From 5af37d44331c11b9c9ed72057738b69bf4053ab9 Mon Sep 17 00:00:00 2001 From: coder-alpha Date: Fri, 29 Mar 2019 19:36:57 -0400 Subject: [PATCH] fixes and enhancements Fixes: - Thread Status for External Sources - Bookmarking, Recent Watchlist, Config (in certain cases) - Plugin would not load if Anime site was slow to respond - Episode Numbering/Listing in certain cases for missing, odd formatted naming eg (-,+) - Bug in sort using file-size when size type was not in float - AutoPilot Que item removal in certain cases - Anime (Other Seasons, Recommended) menu - Anime Episodes tag string - Titles with numerical names - Primewire was not tagging Show items correctly for use with AutoPilot - Fix resolvers for files hosted on unsupported hosts - Fix vidcloud host resolver for downloader resuming - Fix mega host for files not supported (folder links) - Fix gvideo host for unsupported video files New: - Anime Base URL Option (Tools) - External Listing API URL Option (Tools) - AutoPilot Start/End Index for Episodes uses OMDB - AutoPilot Update/Edit option - Input Page No. option - New native host Mp4upload (used by 9Anime) - Save/Load Bookmarking via Bookmarks menu - Device option to transcode IMDb vids (fix audio issues) - Global Options menu - Global Option - Retry Failed Downloads (on restart) - Global Option - Don't Refresh Library on Item Download - AutoPilot additional options - Prefer Subtitles, Run via Scheduler and Smart Add - Downloads resuming from other native supported hosts - New category Anime (Genre) - New host xstreamcdn (moved from direct host) - ThreadStatus menu - Run Now option for AutoPilot Item's - Item's Video Page from AutoPilot menu - Remove and Add to AutoPilot menu for Failed Downloads (for file removed/not found errors) - WIP Sources/Providers selector (currently only shows Sources/Providers universally enabled) Changes: - Unplayable hosts will not show up - AutoPilot Que Management (restrict multiple runs) - Downloads use a file-size check before proceeding to download - Downloads additional error handling - Downloads - update/add as new menu for existing item - Downloads - uses a 'seq no' for resuming if multiples file version are retrieved from single url - Generic Playback to honor transcode device options - Clear Sources Cache before AutoPilot run --- Contents/Code/common.py | 122 +- Contents/Code/download.py | 1055 +++++++------ Contents/Code/downloadsmenu.py | 1321 ++++++++++++----- Contents/Code/interface.py | 28 +- Contents/Code/main.py | 916 +++++++++--- Contents/Code/playback.py | 29 +- Contents/Code/tools.py | 207 ++- Contents/DefaultPrefs.json | 2 +- .../Shared/resources/lib/libraries/client.py | 59 +- .../Shared/resources/lib/libraries/control.py | 32 +- .../Shared/resources/lib/libraries/mega.py | 275 +++- .../resources/lib/resolvers/__init__.py | 33 +- .../resources/lib/resolvers/host_direct.py | 115 +- .../resources/lib/resolvers/host_gvideo.py | 153 +- .../resources/lib/resolvers/host_mega.py | 25 +- .../resources/lib/resolvers/host_mp4upload.py | 335 +++++ .../resources/lib/resolvers/host_openload.py | 10 +- .../lib/resolvers/host_rapidvideo.py | 12 +- .../lib/resolvers/host_streamango.py | 16 +- .../resources/lib/resolvers/host_vidcloud.py | 92 +- .../resources/lib/resolvers/host_vidnode.py | 14 +- .../lib/resolvers/host_xstreamcdn.py | 328 ++++ .../resources/lib/resolvers/host_youtube.py | 10 +- .../Shared/resources/lib/sources/__init__.py | 194 ++- .../resources/lib/sources/fmovies_mv_tv.py | 21 +- .../Shared/resources/lib/sources/gogoanime.py | 1 - .../resources/lib/sources/gowatchseries_ca.py | 17 +- .../resources/lib/sources/nineanime_ca.py | 69 +- .../resources/lib/sources/primewire_mv_tv.py | 4 +- Contents/Resources/icon-floppyload.png | Bin 0 -> 2125 bytes Contents/Resources/icon-floppysave.png | Bin 0 -> 5214 bytes Contents/Resources/icon-global-options.png | Bin 0 -> 5099 bytes Contents/Services/Shared Code/jsunpack.pys | 144 ++ Contents/Services/Shared Code/misc.pys | 71 +- Contents/Services/URL/FMovies/ServiceCode.pys | 12 + README.md | 9 +- 36 files changed, 4135 insertions(+), 1596 deletions(-) create mode 100644 Contents/Libraries/Shared/resources/lib/resolvers/host_mp4upload.py create mode 100644 Contents/Libraries/Shared/resources/lib/resolvers/host_xstreamcdn.py create mode 100644 Contents/Resources/icon-floppyload.png create mode 100644 Contents/Resources/icon-floppysave.png create mode 100644 Contents/Resources/icon-global-options.png create mode 100644 Contents/Services/Shared Code/jsunpack.pys diff --git a/Contents/Code/common.py b/Contents/Code/common.py index dcd4d27..dfe034d 100644 --- a/Contents/Code/common.py +++ b/Contents/Code/common.py @@ -1,6 +1,6 @@ ################################################################################ TITLE = "FMoviesPlus" -VERSION = '0.76' # Release notation (x.y - where x is major and y is minor) +VERSION = '0.77' # Release notation (x.y - where x is major and y is minor) TAG = '' GITHUB_REPOSITORY = 'coder-alpha/FMoviesPlus.bundle' PREFIX = "/video/fmoviesplus" @@ -24,7 +24,10 @@ BASE_URL = "https://fmovies.taxi" BASE_URLS = ["https://bmovies.is","https://bmovies.to","https://bmovies.pro","https://bmovies.online","https://bmovies.club","https://bmovies.ru","https://fmovies.to","https://fmovies.is","https://fmovies.taxi","https://fmovies.se","https://ffmovies.ru"] - + +ES_API_URL = 'http://movies-v2.api-fetch.website' +EXT_LIST_URLS = ["http://movies-v2.api-fetch.website","http://tv-v2.api-fetch.website"] + JSEngines_ALLowed = ['Node'] Engine_OK = False try: @@ -59,6 +62,7 @@ CACHE_COOKIE = [] TOKEN_CODE = [] TO_GB = float(1024*1024*1024) +MIN_FILE_SIZE = 999999 DOWNLOAD_CHUNK_SIZE = 1.0 # in MB # Help videos on Patebin @@ -100,7 +104,7 @@ #INTERNAL_SOURCES_FILETYPE = [{'label':'Movie/Show','enabled': 'True'},{'label':'Trailer','enabled': 'True'},{'label':'Interviews','enabled': 'False'},{'label':'Behind the scenes','enabled': 'False'},{'label':'Music Video','enabled': 'False'},{'label':'Deleted Scenes','enabled': 'False'},{'label':'Misc.','enabled': 'False'}] #INTERNAL_SOURCES_SIZES = [{'label':'> 2GB','enabled': 'True','LL':2*TO_GB,'UL':100*TO_GB},{'label':'1GB - 2GB','enabled': 'True','LL':1*TO_GB,'UL':2*TO_GB},{'label':'0.5GB - 1GB','enabled': 'True','LL':0.5*TO_GB,'UL':1*TO_GB},{'label':'0GB - 0.5GB','enabled': 'True','LL':1,'UL':0.5*TO_GB},{'label':'0GB','enabled': 'False','LL':0,'UL':0}] -INTERNAL_SOURCES_SIZES_CONST = [{'label':'> 10GB','enabled': 'False','LL':10*TO_GB,'UL':1024*TO_GB},{'label':'5GB >= 10GB','enabled': 'True','LL':5*TO_GB,'UL':10*TO_GB},{'label':'2GB >= 5GB','enabled': 'True','LL':2*TO_GB,'UL':5*TO_GB},{'label':'1GB >= 2GB','enabled': 'True','LL':1*TO_GB,'UL':2*TO_GB},{'label':'0.5GB >= 1GB','enabled': 'True','LL':0.5*TO_GB,'UL':1*TO_GB},{'label':'0GB >= 0.5GB','enabled': 'True','LL':999999,'UL':0.5*TO_GB},{'label':'0GB','enabled': 'False','LL':0,'UL':999999}] +INTERNAL_SOURCES_SIZES_CONST = [{'label':'> 10GB','enabled': 'False','LL':10*TO_GB,'UL':1024*TO_GB},{'label':'5GB >= 10GB','enabled': 'True','LL':5*TO_GB,'UL':10*TO_GB},{'label':'2GB >= 5GB','enabled': 'True','LL':2*TO_GB,'UL':5*TO_GB},{'label':'1GB >= 2GB','enabled': 'True','LL':1*TO_GB,'UL':2*TO_GB},{'label':'0.5GB >= 1GB','enabled': 'True','LL':0.5*TO_GB,'UL':1*TO_GB},{'label':'0GB >= 0.5GB','enabled': 'True','LL':MIN_FILE_SIZE,'UL':0.5*TO_GB},{'label':'0GB','enabled': 'False','LL':0,'UL':MIN_FILE_SIZE}] INTERNAL_SOURCES_QUALS_CONST = [{'label':'4K','enabled': 'True'},{'label':'1080p','enabled': 'True'},{'label':'720p','enabled': 'True'},{'label':'480p','enabled': 'True'},{'label':'360p','enabled': 'True'}] INTERNAL_SOURCES_RIPTYPE_CONST = [{'label':'BRRIP','enabled': 'True'},{'label':'3D-BRRIP','enabled': 'True'},{'label':'PREDVD','enabled': 'True'},{'label':'CAM','enabled': 'True'},{'label':'TS','enabled': 'True'},{'label':'SCR','enabled': 'True'},{'label':'UNKNOWN','enabled': 'True'}] INTERNAL_SOURCES_FILETYPE_CONST = [{'label':'Movie/Show','enabled':'True'},{'label':'Trailer','enabled':'True'},{'label':'Featurette','enabled':'False'},{'label':'Interviews','enabled':'False'},{'label':'Behind the scenes','enabled':'False'},{'label':'Music Video','enabled':'False'},{'label':'Deleted Scenes','enabled':'False'},{'label':'Misc.','enabled':'False'}] @@ -109,7 +113,7 @@ INTERNAL_SOURCES_RIPTYPE = list(INTERNAL_SOURCES_RIPTYPE_CONST) INTERNAL_SOURCES_FILETYPE = list(INTERNAL_SOURCES_FILETYPE_CONST) -DEVICE_OPTIONS = ['Dumb-Keyboard','List-View','Redirector','Simple-Emoji','Vibrant-Emoji','Multi-Link-View','Full-poster display','Use-PhantomJS','No-Extra-Page-Info','Use-FileSize-Sorting','Force-Transcoding','No-Extra-Page-Info (Anime)','Downloads-Listing','Retry-Failed-Downloads'] +DEVICE_OPTIONS = ['Dumb-Keyboard','List-View','Redirector','Simple-Emoji','Vibrant-Emoji','Multi-Link-View','Full-poster display','Use-PhantomJS','No-Extra-Page-Info','Use-FileSize-Sorting','Force-Transcoding','No-Extra-Page-Info (Anime)','Downloads-Listing','Force-Transcoding (IMDb)'] DEVICE_OPTION = {DEVICE_OPTIONS[0]:'The awesome Keyboard for Search impaired devices', DEVICE_OPTIONS[1]:'Force List-View of Playback page listing sources', DEVICE_OPTIONS[2]:'Required in certain cases - *Experimental (refer forum)', @@ -120,10 +124,15 @@ DEVICE_OPTIONS[7]:'Use PhantomJS - For parsing links. Binary download required', DEVICE_OPTIONS[8]:'No-Extra-Page-Info - Speeds up navigation by not downloading detailed item info', DEVICE_OPTIONS[9]:'Use-FileSize-Sorting - Uses FileSize instead of Resolution info provided by site which can be inaccurate', - DEVICE_OPTIONS[10]:'Force-Transcoding - Sets the item\'s container property to null in order to force transcoding by PMS', + DEVICE_OPTIONS[10]:'Force-Transcoding - Force transcoding by PMS for videos with audio/video issues', DEVICE_OPTIONS[11]:'No-Extra-Page-Info (Anime) - Speeds up navigation by not downloading detailed item info', DEVICE_OPTIONS[12]:'Downloads-Listing - Reverse the order of Downloads i.e. oldest entry on top', - DEVICE_OPTIONS[13]:'Retry-Failed-Downloads - On Plugin Initialization Retry Failed Downloads (Global Option)'} + DEVICE_OPTIONS[13]:'Force-Transcoding (IMDb) - Force transcoding IMDb videos by PMS'} + +GLOBAL_OPTIONS = ['Retry-Failed-Downloads','Dont-Refresh-Library-Downloads'] +GLOBAL_OPTION = {GLOBAL_OPTIONS[0]:'On Plugin Initialization Retry Failed Downloads', + GLOBAL_OPTIONS[1]:'Don\'t perform a Library Section Refresh after an Item is Downloaded'} + DEVICE_OPTION_CONSTRAINTS = {DEVICE_OPTIONS[2]:[{'Pref':'use_https_alt','Desc':'Use Alternate SSL/TLS','ReqValue':'disabled'}]} DEVICE_OPTION_CONSTRAINTS2 = {DEVICE_OPTIONS[5]:[{'Option':6,'ReqValue':False}], DEVICE_OPTIONS[6]:[{'Option':5,'ReqValue':False}]} DEVICE_OPTION_PROPOGATE_TO_CONTROL = {DEVICE_OPTIONS[7]:True} @@ -142,18 +151,22 @@ DOWNLOAD_TEMP = {} DOWNLOAD_AUTOPILOT_CONST = {'movie':[], 'show':[], 'extras':[]} DOWNLOAD_AUTOPILOT = {'movie':[], 'show':[], 'extras':[]} -DOWNLOAD_AUTOPILOT_STATUS = ['Processing','UnAvailable','In Download Queue','Waiting','Error'] +DOWNLOAD_AUTOPILOT_STATUS = ['Processing','UnAvailable','In Download Queue','Waiting','Error','Scheduled','Completed Pending Removal'] DOWNLOAD_FMP_EXT = '.FMPTemp' ANIME_SEARCH = [] ANIME_KEY = '9anime' -ANIME_DOM = 'to' +ANIME_DOMS = ['ru','to'] +ANIME_DOM = ANIME_DOMS[0] ANIME_URL = 'https://%s.%s' % (ANIME_KEY, ANIME_DOM) ANIME_SEARCH_URL = ANIME_URL + '/search?keyword=%s' -ES_API_URL = 'http://movies-v2.api-fetch.website' -ES_API_KEY = 'api-fetch' - -EXT_SITE_URLS = [ANIME_URL, ES_API_URL] +ES_API_KEY = 'fetch.website' +EXT_SITE_URLS = [ES_API_KEY] +for d in ANIME_DOMS: + u9 = 'https://%s.%s' % (ANIME_KEY, d) + EXT_SITE_URLS.append(u9) +for d in EXT_LIST_URLS: + EXT_SITE_URLS.append(d) ART = "art-default.jpg" ICON = "icon-fmovies.png" @@ -176,7 +189,9 @@ ICON_PREFS = "icon-prefs.png" ICON_UPDATE = "icon-update.png" ICON_UPDATE_NEW = "icon-update-new.png" +ICON_UPDATER = "icon-updater.png" ICON_DEVICE_OPTIONS = "icon-device-options.png" +ICON_GLOBAL_OPTIONS = "icon-global-options.png" ICON_OPTIONS = "icon-options.png" ICON_CLEAR = "icon-clear.png" ICON_DK_ENABLE = "icon-dumbKeyboardE.png" @@ -215,6 +230,11 @@ ICON_TOOLS = "icon-tools.png" ICON_WARNING = "icon-warning.png" ICON_SYSSTATUS = "icon-status.png" +ICON_FL_SAVE = "icon-floppysave.png" +ICON_FL_LOAD = "icon-floppyload.png" + +ICON_OPENLOAD = "http://i.imgur.com/OM7VzQs.png" +ICON_IMDB = "https://i.imgur.com/LqO2Fn0.png" MSG0 = "Please wait.." MSG1 = "Please give some time for the Interface to Load & Initialize plugins" @@ -234,17 +254,22 @@ ALT_PLAYBACK_INLINE = True SEARCH_EXT_SOURCES_FROM_SEARCH_MENU = True CHECK_BASE_URL_REDIRECTION = True +CHECK_BASE_URL_REDIRECTION_HP = False +CHECK_9BASE_URL_REDIRECTION = True +CHECK_EXTLIST_URL_REDIRECTION = False DEV_BM_CONVERSION = False NO_MOVIE_INFO = False USE_CUSTOM_TIMEOUT = False -MY_CLOUD_DISABLED = False -FMOVIES_HOSTS_DISABLED = [''] -SERVER_PLACEHOLDER = 'FMOVIES' ENCRYPTED_URLS = False -DEV_DEBUG = False REFACTOR_WIP = True DOWNLOAD_ALL_SEASONS = True WBH = 'aHR0cHM6Ly9ob29rLmlvL2NvZGVyLWFscGhhL3Rlc3Q=' +SERVER_PLACEHOLDER = 'FMOVIES' +FMOVIES_SERVER_REMAP = {'Server F':'Google-F', 'Server G':'Google-G', 'F5 Beta':'PrettyFast'} +FMOVIES_HOSTS_DISABLED = ['MyCloud','PrettyFast'] +FMOVIES_HOSTS_UNPLAYABLE = True + +DEV_DEBUG = True if 'dev' in TAG else False #################################################################################################### # Get Key from a Dict using Val @@ -416,10 +441,10 @@ def isArrayValueInString(arr, mystr, toLowercase=True): for a in arr: if toLowercase == True: - if a.lower() in mystr.lower(): + if a.lower() in str(mystr).lower(): return True else: - if a in mystr: + if a in str(mystr): return True return False @@ -504,6 +529,42 @@ def FixUrlInconsistencies(url): pass return url + +#################################################################################################### +def FixUrlInconsistencies2(url): + + try: + url = url.replace('www.','') + except: + pass + + return url + +#################################################################################################### +def FixUrlInconsistencies3(url): + + try: + url = url.replace('.taxixixixi/','.taxi/') + url = url.replace('.taxixixi/','.taxi/') + url = url.replace('.taxixi/','.taxi/') + except: + pass + + return url + +#################################################################################################### +def getSelectedItems(item_as_dict_enc): + + try: + ret = '' + item = JSON.ObjectFromString(D(item_as_dict_enc)) + filter_extSources = [] + filter_extSources += [i for i in item.keys() if item[i] == True] + if len(filter_extSources) > 0: + ret = ', '.join(str(x) for x in filter_extSources) + return ret + except: + return ret #################################################################################################### def OrderBasedOn(srcs, use_host=True, use_filesize=False): @@ -578,7 +639,7 @@ def FilterBasedOn(srcs, use_quality=True, use_riptype=True, use_vidtype=True, us #Log(INTERNAL_SOURCES_SIZES) if use_filesize == True: filter_extSources = [] - for fs in INTERNAL_SOURCES_SIZES: filter_extSources += [i for i in srcs if (i['vidtype'].lower() in 'movie/show' and i['fs'] >= fs['LL'] and i['fs'] < fs['UL'] and str(fs['enabled'])=='True') or (i['vidtype'].lower() not in 'movie/show')] + for fs in INTERNAL_SOURCES_SIZES: filter_extSources += [i for i in srcs if (i['vidtype'].lower() in 'movie/show' and float(i['fs']) >= float(fs['LL']) and float(i['fs']) < float(fs['UL']) and str(fs['enabled'])=='True') or (i['vidtype'].lower() not in 'movie/show')] srcs = filter_extSources # filter sources based on enabled rip-type in INTERNAL_SOURCES_RIPTYPE @@ -797,7 +858,7 @@ def GetPageElements(url, headers=None, referer=None, timeout=15): except Exception as e: if error == '': - error = e + error = '%s' % e Log('ERROR common.py>GetPageElements: Error: %s URL: %s DATA: %s' % (error,url,page_data_string)) return page_data_elems, error @@ -891,6 +952,7 @@ def GetPageAsString(url, headers=None, timeout=15, referer=None): pass else: headers['Referer'] = url + headers['User-Agent'] = client.agent() if USE_COOKIES and ('fmovies' in url or 'bmovies' in url): cookies, error = make_cookie_str() @@ -986,7 +1048,25 @@ def OpenLoadUnpair(**kwargs): for m in msg: Log('OpenLoad UnPair: %s' % m) -###################################################################################### +#################################################################################################### + +def uidAltExists(uid, boolExt=False): + + items = [] + if Dict[uid] != None: + if boolExt == False: + items.append(uid) + else: + items.append([uid, 0]) + for i in range(1,10): + nuid = uid + '-%s' % i + if Dict[nuid] != None: + if boolExt == False: + items.append(nuid) + else: + items.append([nuid, i]) + + return items def makeUID(title, year='None', quality='None', source='None', url='None', season='None', episode='None'): return E(title+str(year)+str(quality)+str(source)+str(url)+str(season)+str(episode)) diff --git a/Contents/Code/download.py b/Contents/Code/download.py index ef4a1d4..65bc587 100644 --- a/Contents/Code/download.py +++ b/Contents/Code/download.py @@ -131,457 +131,542 @@ def setDownloadThrottler(self, d): def download(self, file_meta_enc): - file_meta = JSON.ObjectFromString(D(file_meta_enc)) - title = file_meta['title'] - url = file_meta['url'] - furl = url - durl = file_meta['durl'] - purl = file_meta['purl'] - year = file_meta['year'] - summary = file_meta['summary'] - thumb = file_meta['thumb'] - fs = file_meta['fs'] - fsBytes = file_meta['fsBytes'] - chunk_size = file_meta['chunk_size'] - quality = file_meta['quality'] - source = file_meta['source'] - uid = file_meta['uid'] - fid = file_meta['fid'] - type = file_meta['type'] - status = file_meta['status'] - startPos = file_meta['startPos'] - timeAdded = file_meta['timeAdded'] - first_time = file_meta['first_time'] - progress = file_meta['progress'] - path = file_meta['section_path'] - - try: - vidtype = file_meta['vidtype'].lower() - except: - vidtype = 'movie' - - try: - riptype = file_meta['riptype'] - if riptype == None: - riptype = 'BRRIP' - except: - riptype = 'BRRIP' - - headers = None - try: - params_enc = file_meta['params'] - params = json.loads(base64.b64decode(params_enc)) - if 'headers' in params.keys(): - headers = params['headers'] - except: - params = None - - file_meta['last_error'] = 'Unknown Error' - file_meta['error'] = 'Unknown Error' - purgeKey = uid - total_size_bytes = int(fsBytes) # in bytes - #Log("total_size_bytes : %s" % str(total_size_bytes)) + tuid = None + url = '' error = '' - - if common.DOWNLOAD_TEMP == None: - common.DOWNLOAD_TEMP = {} - - chunk_size_n = int(1024.0 * 1024.0 * float(common.DOWNLOAD_CHUNK_SIZE)) # in bytes - if chunk_size != chunk_size_n: - chunk_size = chunk_size_n - file_meta['chunk_size'] = chunk_size - - if 'file_ext' in file_meta: - file_ext = file_meta['file_ext'] - if file_ext == None: - file_ext = '.mp4' - else: - file_ext = '.mp4' - - source_meta = {} - f_meta = {} - - if vidtype == 'show': - item_folder_name = '%s' % (file_meta['title']) - else: - item_folder_name = '%s (%s)' % (file_meta['title'], file_meta['year']) - - fname, fname_e = create_fname(file_meta, vidtype, riptype, file_ext, fid + common.DOWNLOAD_FMP_EXT) - if fname == None: - raise fname_e + progress = 0 + startPos = 0 + purgeKey = '' + try: + file_meta = JSON.ObjectFromString(D(file_meta_enc)) + title = file_meta['title'] + url = file_meta['url'] + furl = url - tuid = common.id_generator(16) - common.control.AddThread('download', 'Download File: %s' % fname, time.time(), '2', False, tuid) + page_url = None + if 'page_url' in file_meta.keys(): + page_url = file_meta['page_url'] + + seq = 0 + if 'seq' in file_meta.keys(): + seq = file_meta['seq'] + + durl = file_meta['durl'] + purl = file_meta['purl'] + year = file_meta['year'] + summary = file_meta['summary'] + thumb = file_meta['thumb'] + fs = file_meta['fs'] + fsBytes = file_meta['fsBytes'] + chunk_size = file_meta['chunk_size'] + quality = file_meta['quality'] + source = file_meta['source'] + uid = file_meta['uid'] + fid = file_meta['fid'] + type = file_meta['type'] + status = file_meta['status'] + startPos = file_meta['startPos'] + timeAdded = file_meta['timeAdded'] + first_time = file_meta['first_time'] + progress = file_meta['progress'] + path = file_meta['section_path'] - abs_path = Core.storage.join_path(path, item_folder_name, fname) - directory = Core.storage.join_path(path, item_folder_name) - - file_meta['temp_file'] = abs_path - - startPos = verifyStartPos(startPos, abs_path) - - sub_url_t = None - if 'openload' in source.lower(): - furl, error, sub_url_t, page_html = common.host_openload.resolve(furl) - if error != '' or furl == None: - furl, error, sub_url_t, page_html = common.host_openload.resolve(durl) - if error != '' or furl == None: - Log('OpenLoad URL: %s' % furl) - Log('OpenLoad Error: %s' % error) - download_failed(url, error, progress, startPos, purgeKey) - common.control.RemoveThread(tuid) - return - elif 'rapidvideo' in source.lower(): - furl, error, sub_url_t = common.host_rapidvideo.resolve(furl) - if error != '' or furl == None: - furl, error, sub_url_t = common.host_rapidvideo.resolve(durl) - if error != '' or furl == None: - Log('RapidVideo URL: %s' % furl) - Log('RapidVideo Error: %s' % error) - download_failed(url, error, progress, startPos, purgeKey) - common.control.RemoveThread(tuid) - return - elif 'streamango' in source.lower(): - furl, error, sub_url_t = common.host_streamango.resolve(furl) - if error != '' or furl == None: - furl, error, sub_url_t = common.host_streamango.resolve(durl) - if error != '' or furl == None: - Log('Streamango URL: %s' % furl) - Log('Streamango Error: %s' % error) - download_failed(url, error, progress, startPos, purgeKey) - common.control.RemoveThread(tuid) - return - elif 'direct' in source.lower(): - furl, params_enc, error = common.host_direct.resolve(furl) - if error != '' or furl == None: - furl, params_enc, error = common.host_direct.resolve(durl) - if error != '' or furl == None: - Log('3donlinefilms URL: %s' % furl) - Log('3donlinefilms Error: %s' % error) - download_failed(url, error, progress, startPos, purgeKey) - common.control.RemoveThread(tuid) - return try: + vidtype = file_meta['vidtype'].lower() + except: + vidtype = 'movie' + + try: + riptype = file_meta['riptype'] + if riptype == None: + riptype = 'BRRIP' + except: + riptype = 'BRRIP' + + headers = None + try: + params_enc = file_meta['params'] params = json.loads(base64.b64decode(params_enc)) if 'headers' in params.keys(): headers = params['headers'] except: - pass - - if sub_url_t != None: - file_meta['sub_url'] = sub_url_t - - if Prefs['use_debug']: - Log('Save path: %s' % abs_path) - - # ToDo - # https://support.plex.tv/articles/200220677-local-media-assets-movies/ - fname, fname_e = create_fname(file_meta, vidtype, riptype, file_ext) - if fname == None: - raise fname_e - - final_abs_path = Core.storage.join_path(path, item_folder_name, fname) - - # subtitle - sub_fname = fname.replace(file_ext,'') + '.en.srt' - sub_file_path = Core.storage.join_path(directory, sub_fname) - - write_mode = 'wb' - chunk_speed = 0 - avg_speed = 0 - avg_speed_curr = 0 - eta = 0 - bytes_read = 0 - r = None + params = None + + file_meta['last_error'] = 'Unknown Error' + file_meta['error'] = 'Unknown Error' + purgeKey = uid + total_size_bytes = int(fsBytes) # in bytes + #Log("total_size_bytes : %s" % str(total_size_bytes)) + error = '' + + if common.DOWNLOAD_TEMP == None: + common.DOWNLOAD_TEMP = {} + + chunk_size_n = int(1024.0 * 1024.0 * float(common.DOWNLOAD_CHUNK_SIZE)) # in bytes + if chunk_size != chunk_size_n: + chunk_size = chunk_size_n + file_meta['chunk_size'] = chunk_size + + if 'file_ext' in file_meta: + file_ext = file_meta['file_ext'] + if file_ext == None: + file_ext = '.mp4' + else: + file_ext = '.mp4' + + source_meta = {} + f_meta = {} + + if vidtype == 'show': + item_folder_name = '%s' % (file_meta['title']) + else: + item_folder_name = '%s (%s)' % (file_meta['title'], file_meta['year']) - if source != 'mega': - time.sleep(2) # reduce 429 -- too many requests error - if common.USE_DOWNLOAD_RESUME_GEN == True and Core.storage.file_exists(abs_path): - if Prefs['use_debug']: - Log('**Resuming download from position: %s**' % startPos) - r = resume_download(furl, startPos, headers=headers) + fname, fname_e = create_fname(file_meta, vidtype, riptype, file_ext, fid + common.DOWNLOAD_FMP_EXT) + if fname == None: + raise fname_e - if WAIT_AND_RETRY_ON_429 == True and r.status_code == 429: - time.sleep(5) - r = resume_download(furl, startPos, headers=headers) + tuid = common.id_generator(16) + common.control.AddThread('download', 'Download File: %s' % fname, time.time(), '2', False, tuid) - if r.status_code != 200 and r.status_code != 206: + abs_path = Core.storage.join_path(path, item_folder_name, fname) + directory = Core.storage.join_path(path, item_folder_name) + + file_meta['temp_file'] = abs_path + + startPos = verifyStartPos(startPos, abs_path) + + sub_url_t = None + if 'openload' in source.lower(): + furl, error, sub_url_t, page_html = common.host_openload.resolve(furl) + # check if file-link valid using fs of 1MB + if error == '': + fs_r, error = common.client.getFileSize(furl, headers=headers, retError=True, retry429=True, cl=2) + if error != '' or furl == None or float(fs_r) < float(1024*1024): + furl, error, sub_url_t, page_html = common.host_openload.resolve(durl) + if error != '' or furl == None: + Log('OpenLoad URL-f: %s' % furl) + Log('OpenLoad URL-d: %s' % durl) + Log('OpenLoad Error: %s' % error) + download_failed(url, error, progress, startPos, purgeKey) + common.control.RemoveThread(tuid) + return + elif 'rapidvideo' in source.lower(): + furl, error, sub_url_t = common.host_rapidvideo.resolve(furl) + # check if file-link valid using fs of 1MB + if error == '': + fs_r, error = common.client.getFileSize(furl, headers=headers, retError=True, retry429=True, cl=2) + if error != '' or furl == None or float(fs_r) < float(1024*1024): + furl, error, sub_url_t = common.host_rapidvideo.resolve(durl) + if error != '' or furl == None: + Log('RapidVideo URL: %s' % furl) + Log('RapidVideo Error: %s' % error) + download_failed(url, error, progress, startPos, purgeKey) + common.control.RemoveThread(tuid) + return + elif 'streamango' in source.lower(): + furl, error, sub_url_t = common.host_streamango.resolve(furl) + # check if file-link valid using fs of 1MB + if error == '': + fs_r, error = common.client.getFileSize(furl, headers=headers, retError=True, retry429=True, cl=2) + if error != '' or furl == None or float(fs_r) < float(1024*1024): + furl, error, sub_url_t = common.host_streamango.resolve(durl) + if error != '' or furl == None: + Log('Streamango URL: %s' % furl) + Log('Streamango Error: %s' % error) + download_failed(url, error, progress, startPos, purgeKey) + common.control.RemoveThread(tuid) + return + elif 'direct' in source.lower(): + furl, error, params_enc = common.host_direct.resolve(furl) + if error != '' or furl == None or float(fs_r) < float(1024*1024): + furl, error, params_enc = common.host_direct.resolve(durl) + if error != '' or furl == None: + Log('Direct host URL: %s' % furl) + Log('Direct host Error: %s' % error) + download_failed(url, error, progress, startPos, purgeKey) + common.control.RemoveThread(tuid) + return + try: + params = json.loads(base64.b64decode(params_enc)) + if 'headers' in params.keys(): + headers = params['headers'] + except: + pass + else: + # check if file-link valid using fs of 1MB + fs_r, error = common.client.getFileSize(furl, headers=headers, retError=True, retry429=True, cl=2) + if common.DEV_DEBUG == True and Prefs["use_debug"]: + Log('Url: %s | Curr. FileSize: %s | Orig. FileSize: %s' % (furl, fs_r, fsBytes)) + if fs_r != None and float(fs_r) > float(1024*1024): # 1MB + if common.DEV_DEBUG == True and Prefs["use_debug"]: + Log('File Health Looks Good with current download url !') + else: + # ret_val_resolvers is always a tuple with first val. of returned url and second of error... + ret_val_resolvers = common.interface.getHostResolverMain().resolve(furl, page_url=page_url) + error = ret_val_resolvers[1] + if error == '' and furl == ret_val_resolvers[0]: + fs_r, error = common.client.getFileSize(furl, headers=headers, retError=True, retry429=True, cl=2) + if error != '' or float(fs_r) < float(1024*1024): + ret_val_resolvers = common.interface.getHostResolverMain().resolve(durl, page_url=page_url) + error = ret_val_resolvers[1] + if error == '': + try: + furl = ret_val_resolvers[0] + try: + if furl != None and len(furl) >= seq: + furl = furl[seq] + else: + furl = furl[0] + params = json.loads(base64.b64decode(ret_val_resolvers[2])) + if 'headers' in params.keys(): + headers = params['headers'] + except: + pass + fs_r, error = common.client.getFileSize(furl, headers=headers, retError=True, retry429=True, cl=2, timeout=30.0) + if common.DEV_DEBUG == True and Prefs["use_debug"]: + Log('Url: %s | FileSize: %s | Error: %s' % (furl, fs_r, error)) + except Exception as e: + error = '%s' % e + if error != '' or float(fs_r) < float(1024*1024): + if common.DEV_DEBUG == True and Prefs["use_debug"]: + Log('Host URL-f: %s' % furl) + Log('Host URL-d: %s' % durl) + Log('Host params: %s' % params) + Log('Host FileSize: %s' % fs_r) + Log('Host Error: %s' % error) + download_failed(url, error, progress, startPos, purgeKey) + common.control.RemoveThread(tuid) + return + else: + if common.DEV_DEBUG == True and Prefs["use_debug"]: + Log('Host URL-f: %s' % furl) + Log('Host URL-d: %s' % durl) + + if sub_url_t != None: + file_meta['sub_url'] = sub_url_t + + if Prefs['use_debug']: + Log('Save path: %s' % abs_path) + Log('File url: %s' % furl) + + # ToDo + # https://support.plex.tv/articles/200220677-local-media-assets-movies/ + fname, fname_e = create_fname(file_meta, vidtype, riptype, file_ext) + if fname == None: + raise fname_e + + final_abs_path = Core.storage.join_path(path, item_folder_name, fname) + + # subtitle + sub_fname = fname.replace(file_ext,'') + '.en.srt' + sub_file_path = Core.storage.join_path(directory, sub_fname) + + write_mode = 'wb' + chunk_speed = 0 + avg_speed = 0 + avg_speed_curr = 0 + eta = 0 + bytes_read = 0 + r = None + err = '' + + if source != 'mega': + time.sleep(2) # reduce 429 -- too many requests error + if common.USE_DOWNLOAD_RESUME_GEN == True and Core.storage.file_exists(abs_path): if Prefs['use_debug']: - Log('Could not Resume (HTTP Code: %s) - New download' % str(r.status_code)) - r = request_download(furl, headers=headers) + Log('**Resuming download from position: %s**' % startPos) + r, err = resume_download(furl, startPos, headers=headers) + + if WAIT_AND_RETRY_ON_429 == True and (r == None or r.status_code == 429): + time.sleep(5) + r, err = resume_download(furl, startPos, headers=headers) + + if r == None or (r.status_code != 200 and r.status_code != 206): + if Prefs['use_debug']: + Log('Could not Resume (HTTP Code: %s) - New download' % str(r.status_code)) + r, err = request_download(furl, headers=headers) + else: + write_mode = 'ab' + bytes_read = startPos else: - write_mode = 'ab' - bytes_read = startPos - else: - if Prefs['use_debug']: - Log('**New download**') - r = request_download(furl, headers=headers) - - if WAIT_AND_RETRY_ON_429 == True and r.status_code == 429: - time.sleep(5) - r = request_download(furl, headers=headers) - - file_meta_temp = file_meta - file_meta_temp['status'] = common.DOWNLOAD_STATUS[1] - file_meta_temp['progress'] = '?' - file_meta_temp['chunk_speed'] = '?' - file_meta_temp['avg_speed'] = '?' - file_meta_temp['avg_speed_curr'] = '?' - file_meta_temp['eta'] = '?' + if Prefs['use_debug']: + Log('**New download**') + r, err = request_download(furl, headers=headers) + + if WAIT_AND_RETRY_ON_429 == True and (r == None or r.status_code == 429): + time.sleep(5) + r, err = request_download(furl, headers=headers) + + if r == None or err != '': + raise Exception(err) - common.DOWNLOAD_TEMP[purgeKey] = E(JSON.StringFromObject(file_meta_temp)) - Dict['DOWNLOAD_TEMP'] = E(JSON.StringFromObject(common.DOWNLOAD_TEMP)) - Dict[purgeKey] = E(JSON.StringFromObject(file_meta_temp)) - Dict.Save() - common.DOWNLOAD_STATS[purgeKey] = file_meta_temp - del QUEUE_RUN_ITEMS[purgeKey] + file_meta_temp = file_meta + file_meta_temp['status'] = common.DOWNLOAD_STATUS[1] + file_meta_temp['progress'] = '?' + file_meta_temp['chunk_speed'] = '?' + file_meta_temp['avg_speed'] = '?' + file_meta_temp['avg_speed_curr'] = '?' + file_meta_temp['eta'] = '?' + + common.DOWNLOAD_TEMP[purgeKey] = E(JSON.StringFromObject(file_meta_temp)) + Dict['DOWNLOAD_TEMP'] = E(JSON.StringFromObject(common.DOWNLOAD_TEMP)) + Dict[purgeKey] = E(JSON.StringFromObject(file_meta_temp)) + Dict.Save() + common.DOWNLOAD_STATS[purgeKey] = file_meta_temp + del QUEUE_RUN_ITEMS[purgeKey] - FMPdownloader = None - - try: + FMPdownloader = None + try: - if not os.path.exists(directory): - os.makedirs(directory) - while os.path.exists(directory) == False: - time.sleep(1) - except OSError as e: - raise Exception('%s' % e) - if source == 'mega' or r.status_code == 200 or r.status_code == 206: - - if source == 'mega': - megaDL = common.host_mega.mega.Mega() - megaDL.setBufferSize(chunk_size) - FMPdownloader = megaDL.download_url(furl) + try: + if not os.path.exists(directory): + os.makedirs(directory) + while os.path.exists(directory) == False: + time.sleep(1) + except OSError as e: + raise Exception('%s' % e) + if source == 'mega' or r.status_code == 200 or r.status_code == 206: - if common.USE_DOWNLOAD_RESUME_MEGA == True and Core.storage.file_exists(abs_path): - if Prefs['use_debug']: - Log('**Resuming download**') - dl_info = FMPdownloader.next() - furl = "%s/%s" % (dl_info['url'],dl_info['name']) - r = resume_download(furl, startPos, headers=headers) + if source == 'mega': + megaDL = common.host_mega.mega.Mega() + megaDL.setBufferSize(chunk_size) + FMPdownloader = megaDL.download_url(furl) - if r.status_code == 200 or r.status_code == 206: - FMPdownloader = r.iter_content(chunk_size) - write_mode = 'ab' - bytes_read = startPos + if common.USE_DOWNLOAD_RESUME_MEGA == True and Core.storage.file_exists(abs_path): + if Prefs['use_debug']: + Log('**Resuming download**') + dl_info = FMPdownloader.next() + furl = "%s/%s" % (dl_info['url'],dl_info['name']) + r = resume_download(furl, startPos, headers=headers) + + if r.status_code == 200 or r.status_code == 206: + FMPdownloader = r.iter_content(chunk_size) + write_mode = 'ab' + bytes_read = startPos + else: + if Prefs['use_debug']: + Log.Error('**Could not Resume (HTTP Code: %s) - New download**' % str(r.status_code)) else: if Prefs['use_debug']: - Log.Error('**Could not Resume (HTTP Code: %s) - New download**' % str(r.status_code)) + Log('**New download**') + FMPdownloader.next() # Start the download. else: - if Prefs['use_debug']: - Log('**New download**') - FMPdownloader.next() # Start the download. - else: - FMPdownloader = r.iter_content(chunk_size) - - try: - with io.open(abs_path, write_mode) as f: - - last_time = first_time_avg = time.time() - bytes_read_curr = 0 - - for chunk in FMPdownloader: + FMPdownloader = r.iter_content(chunk_size) + + try: + with io.open(abs_path, write_mode) as f: - f.write(chunk) + last_time = first_time_avg = time.time() + bytes_read_curr = 0 - if bytes_read == 0: - file_meta['first_time'] = time.time() - - chunk_size = float(len(chunk)) - bytes_read_curr += chunk_size - bytes_read += chunk_size - curr_time = time.time() - delta_time = (curr_time - last_time)+0.0001 # avoid delta_time == 0 - if bytes_read > 0: - self.dlthrottle.addBytes(chunk_size) - chunk_speed = round(chunk_size/float(delta_time * float(1000 * 1024)),2) - avg_speed = round(float(bytes_read)/float((time.time() - first_time) * float(1000 * 1024)),2) - avg_speed_curr = round(float(bytes_read_curr)/float((time.time() - first_time_avg) * float(1000 * 1024)),2) - rem_bytes = float(total_size_bytes) - float(bytes_read) - eta = round(float(((float(rem_bytes) / (1024.0*1024.0))/float(avg_speed_curr))/60.0), 2) - progress = round(float(100) * float(bytes_read)/float(total_size_bytes), 2) - last_time = curr_time + for chunk in FMPdownloader: - file_meta['status'] = common.DOWNLOAD_STATUS[1] - file_meta['progress'] = progress - file_meta['chunk_speed'] = chunk_speed - file_meta['avg_speed'] = avg_speed - file_meta['avg_speed_curr'] = avg_speed_curr - file_meta['eta'] = eta + f.write(chunk) - longstringObjs = JSON.ObjectFromString(D(Dict[purgeKey])) - action = longstringObjs['action'] - if action in [common.DOWNLOAD_ACTIONS[3]]: - Dict[purgeKey] = E(JSON.StringFromObject(file_meta)) - if action == common.DOWNLOAD_ACTIONS[0]: # cancel - try: - f.close() - except: - pass - try: - r.close() - except: - pass - end_download_by_user(title, url, purgeKey) - common.control.RemoveThread(tuid) - return - elif action == common.DOWNLOAD_ACTIONS[1]: # pause - while action == common.DOWNLOAD_ACTIONS[1]: - curr_time = time.time() - delta_time = (curr_time - last_time)+0.0001 # avoid delta_time == 0 + if bytes_read == 0: + file_meta['first_time'] = time.time() + + chunk_size = float(len(chunk)) + bytes_read_curr += chunk_size + bytes_read += chunk_size + curr_time = time.time() + delta_time = (curr_time - last_time)+0.0001 # avoid delta_time == 0 + if bytes_read > 0: + self.dlthrottle.addBytes(chunk_size) chunk_speed = round(chunk_size/float(delta_time * float(1000 * 1024)),2) avg_speed = round(float(bytes_read)/float((time.time() - first_time) * float(1000 * 1024)),2) avg_speed_curr = round(float(bytes_read_curr)/float((time.time() - first_time_avg) * float(1000 * 1024)),2) rem_bytes = float(total_size_bytes) - float(bytes_read) eta = round(float(((float(rem_bytes) / (1024.0*1024.0))/float(avg_speed_curr))/60.0), 2) progress = round(float(100) * float(bytes_read)/float(total_size_bytes), 2) + last_time = curr_time + + file_meta['status'] = common.DOWNLOAD_STATUS[1] file_meta['progress'] = progress file_meta['chunk_speed'] = chunk_speed file_meta['avg_speed'] = avg_speed file_meta['avg_speed_curr'] = avg_speed_curr + file_meta['eta'] = eta + + longstringObjs = JSON.ObjectFromString(D(Dict[purgeKey])) + action = longstringObjs['action'] + if action in [common.DOWNLOAD_ACTIONS[3]]: + Dict[purgeKey] = E(JSON.StringFromObject(file_meta)) + if action == common.DOWNLOAD_ACTIONS[0]: # cancel + try: + f.close() + except: + pass + try: + r.close() + except: + pass + end_download_by_user(title, url, purgeKey) + common.control.RemoveThread(tuid) + return + elif action == common.DOWNLOAD_ACTIONS[1]: # pause + while action == common.DOWNLOAD_ACTIONS[1]: + curr_time = time.time() + delta_time = (curr_time - last_time)+0.0001 # avoid delta_time == 0 + chunk_speed = round(chunk_size/float(delta_time * float(1000 * 1024)),2) + avg_speed = round(float(bytes_read)/float((time.time() - first_time) * float(1000 * 1024)),2) + avg_speed_curr = round(float(bytes_read_curr)/float((time.time() - first_time_avg) * float(1000 * 1024)),2) + rem_bytes = float(total_size_bytes) - float(bytes_read) + eta = round(float(((float(rem_bytes) / (1024.0*1024.0))/float(avg_speed_curr))/60.0), 2) + progress = round(float(100) * float(bytes_read)/float(total_size_bytes), 2) + file_meta['progress'] = progress + file_meta['chunk_speed'] = chunk_speed + file_meta['avg_speed'] = avg_speed + file_meta['avg_speed_curr'] = avg_speed_curr + common.DOWNLOAD_STATS[purgeKey] = file_meta + common.DOWNLOAD_STATS[purgeKey]['action'] = action + time.sleep(1) + longstringObjs = JSON.ObjectFromString(D(Dict[purgeKey])) + action = longstringObjs['action'] + #Log('Action: %s' % action) + + elif action == common.DOWNLOAD_ACTIONS[2]: # resume + common.DOWNLOAD_STATS[purgeKey]['action'] = common.DOWNLOAD_ACTIONS[4] + + elif action == common.DOWNLOAD_ACTIONS[3]: # postpone + try: + f.close() + except: + pass + try: + r.close() + except: + pass + postpone_download_by_user(title, url, progress, bytes_read, purgeKey) + common.control.RemoveThread(tuid) + return + else: + pass + + common.DOWNLOAD_STATS[purgeKey] = file_meta + + if self.dlthrottle.getThrottleState() == True and progress < 99 and action == common.DOWNLOAD_ACTIONS[4]: + last_state = file_meta['action'] + file_meta['action'] = common.DOWNLOAD_PROPS[2] + common.DOWNLOAD_STATS[purgeKey] = file_meta + while self.dlthrottle.getThrottleState() == True: + curr_time = time.time() + delta_time = (curr_time - last_time)+0.0001 # avoid delta_time == 0 + chunk_speed = round(chunk_size/float(delta_time * float(1000 * 1024)),2) + avg_speed = round(float(bytes_read)/float((time.time() - first_time) * float(1000 * 1024)),2) + avg_speed_curr = round(float(bytes_read_curr)/float((time.time() - first_time_avg) * float(1000 * 1024)),2) + eta = round(float(((float(rem_bytes) / (1024.0*1024.0))/float(avg_speed_curr))/60.0), 2) + progress = round(float(100) * float(bytes_read)/float(total_size_bytes), 2) + file_meta['progress'] = progress + file_meta['chunk_speed'] = chunk_speed + file_meta['avg_speed'] = avg_speed + file_meta['avg_speed_curr'] = avg_speed_curr + file_meta['eta'] = eta + + common.DOWNLOAD_STATS[purgeKey] = file_meta + time.sleep(0.1) + + file_meta['action'] = last_state common.DOWNLOAD_STATS[purgeKey] = file_meta - common.DOWNLOAD_STATS[purgeKey]['action'] = action - time.sleep(1) - longstringObjs = JSON.ObjectFromString(D(Dict[purgeKey])) - action = longstringObjs['action'] - #Log('Action: %s' % action) - elif action == common.DOWNLOAD_ACTIONS[2]: # resume - common.DOWNLOAD_STATS[purgeKey]['action'] = common.DOWNLOAD_ACTIONS[4] + if (progress < 5): + raise Exception('Error downloading file.') - elif action == common.DOWNLOAD_ACTIONS[3]: # postpone - try: - f.close() - except: - pass - try: - r.close() - except: - pass - postpone_download_by_user(title, url, progress, bytes_read, purgeKey) - common.control.RemoveThread(tuid) - return - else: + try: + f.close() + except: pass + progress = 100 + file_renamed_inc = True + c = 1 + exact_same_file = False - common.DOWNLOAD_STATS[purgeKey] = file_meta - - if self.dlthrottle.getThrottleState() == True and progress < 99 and action == common.DOWNLOAD_ACTIONS[4]: - last_state = file_meta['action'] - file_meta['action'] = common.DOWNLOAD_PROPS[2] - common.DOWNLOAD_STATS[purgeKey] = file_meta - while self.dlthrottle.getThrottleState() == True: - curr_time = time.time() - delta_time = (curr_time - last_time)+0.0001 # avoid delta_time == 0 - chunk_speed = round(chunk_size/float(delta_time * float(1000 * 1024)),2) - avg_speed = round(float(bytes_read)/float((time.time() - first_time) * float(1000 * 1024)),2) - avg_speed_curr = round(float(bytes_read_curr)/float((time.time() - first_time_avg) * float(1000 * 1024)),2) - eta = round(float(((float(rem_bytes) / (1024.0*1024.0))/float(avg_speed_curr))/60.0), 2) - progress = round(float(100) * float(bytes_read)/float(total_size_bytes), 2) - file_meta['progress'] = progress - file_meta['chunk_speed'] = chunk_speed - file_meta['avg_speed'] = avg_speed - file_meta['avg_speed_curr'] = avg_speed_curr - file_meta['eta'] = eta + while (file_renamed_inc): + CRC_Of_Files = [] + while Core.storage.file_exists(final_abs_path): + try: + crc_file = common.md5(final_abs_path) + CRC_Of_Files.append(crc_file) + except Exception as error: + Log('Error download.py >>> CRC check : %s' % error) + + fname, fname_e = create_fname(file_meta, vidtype, riptype, file_ext, c=str(c)) + if fname == None: + raise fname_e - common.DOWNLOAD_STATS[purgeKey] = file_meta - time.sleep(0.1) + # new progressive name + final_abs_path = Core.storage.join_path(directory, fname) - file_meta['action'] = last_state - common.DOWNLOAD_STATS[purgeKey] = file_meta - - if (progress < 5): - raise Exception('Error downloading file.') + # subtitle + sub_fname = fname.replace(file_ext,'') + '.en.srt' + sub_file_path = Core.storage.join_path(directory, sub_fname) + + c += 1 + + try: + crc_new_file = common.md5(abs_path) + if crc_new_file in CRC_Of_Files: + Log('CRC New File %s:%s' % (crc_new_file, abs_path)) + Log('CRC Old File : %s' % (CRC_Of_Files)) + + exact_same_file = True + except Exception as error: + Log('Error download.py >>> CRC compare : %s' % error) + try: + if exact_same_file == True: + try: + Core.storage.remove_data_item(abs_path) + time.sleep(1) + except Exception as e: + Log('Error download.py >>> CRC based removal : %s' % e) + else: + os.rename(abs_path, final_abs_path) + download_subtitle(file_meta['sub_url'], sub_file_path, params=params) + + file_renamed_inc = False + + except Exception as error: + Log('Error download.py >>> %s : %s' % (error, final_abs_path)) + + if (c > 5): + raise Exception(error) + + # file_meta['status'] = common.DOWNLOAD_STATUS[2] + # file_meta['progress'] = progress + # Dict[purgeKey] = E(JSON.StringFromObject(file_meta)) + # Dict.Save() + download_completed(final_abs_path, file_meta['section_title'], file_meta['section_key'], purgeKey, exact_same_file) + + except Exception as error: + st = traceback.format_exc() + Log(st) try: f.close() except: pass - progress = 100 - file_renamed_inc = True - c = 1 - exact_same_file = False - - while (file_renamed_inc): - CRC_Of_Files = [] - while Core.storage.file_exists(final_abs_path): - try: - crc_file = common.md5(final_abs_path) - CRC_Of_Files.append(crc_file) - except Exception as error: - Log('Error download.py >>> CRC check : %s' % error) - - fname, fname_e = create_fname(file_meta, vidtype, riptype, file_ext, c=str(c)) - if fname == None: - raise fname_e - - # new progressive name - final_abs_path = Core.storage.join_path(directory, fname) - - # subtitle - sub_fname = fname.replace(file_ext,'') + '.en.srt' - sub_file_path = Core.storage.join_path(directory, sub_fname) - - c += 1 - - try: - crc_new_file = common.md5(abs_path) - if crc_new_file in CRC_Of_Files: - Log('CRC New File %s:%s' % (crc_new_file, abs_path)) - Log('CRC Old File : %s' % (CRC_Of_Files)) - - exact_same_file = True - except Exception as error: - Log('Error download.py >>> CRC compare : %s' % error) - try: - if exact_same_file == True: - try: - Core.storage.remove_data_item(abs_path) - time.sleep(1) - except Exception as e: - Log('Error download.py >>> CRC based removal : %s' % e) - else: - os.rename(abs_path, final_abs_path) - download_subtitle(file_meta['sub_url'], sub_file_path, params=params) - - file_renamed_inc = False - - except Exception as error: - Log('Error download.py >>> %s : %s' % (error, final_abs_path)) - - if (c > 5): - raise Exception(error) - - # file_meta['status'] = common.DOWNLOAD_STATUS[2] - # file_meta['progress'] = progress - # Dict[purgeKey] = E(JSON.StringFromObject(file_meta)) - # Dict.Save() - - download_completed(final_abs_path, file_meta['section_title'], file_meta['section_key'], purgeKey, exact_same_file) - - except Exception as error: - st = traceback.format_exc() - Log(st) - try: - f.close() - except: - pass - raise Exception(error) - else: - raise Exception('Error response - HTTP Code:%s' % r.status_code) - - except Exception as error: - error = '{}'.format(error) - Dict[purgeKey] = E(JSON.StringFromObject(file_meta)) - download_failed(url, error, progress, bytes_read, purgeKey) + raise Exception(error) + else: + raise Exception('Error response - HTTP Code:%s' % r.status_code) + + except Exception as error: + error = '%s' % error + Dict[purgeKey] = E(JSON.StringFromObject(file_meta)) + download_failed(url, error, progress, bytes_read, purgeKey) - if r != None: - try: - r.close() - except: - pass - - common.control.RemoveThread(tuid) + if r != None: + try: + r.close() + except: + pass + + common.control.RemoveThread(tuid) + except Exception as e: + error = '%s' % e + Log.Error(e) + download_failed(url, error, progress, startPos, purgeKey) + common.control.RemoveThread(tuid) ############################################################################################## @@ -671,7 +756,9 @@ def download_subtitle(url, sub_file_path, params=None): if Prefs['use_debug']: Log('Download Subtitle : %s to %s' % (url, sub_file_path)) r = None - r = request_download(url, headers=headers) + r, err = request_download(url, headers=headers) + if err != '' or r == None: + raise Exception(err) if r.status_code == 200: FMPdownloaderSub = r.iter_content(1024*64) with io.open(sub_file_path, 'wb') as f: @@ -686,14 +773,27 @@ def download_subtitle(url, sub_file_path, params=None): r.close() def request_download(url, headers=None): - return requests.get(url, headers=headers, stream=True, verify=False, allow_redirects=True, timeout=CONNECTION_TIMEOUT) + + try: + r = requests.get(url, headers=headers, stream=True, verify=False, allow_redirects=True, timeout=CONNECTION_TIMEOUT) + return r, '' + except Exception as e: + err = '%s' % e + Log.Error(e) + return None, err def resume_download(url, resume_byte_pos, headers=None): - resume_header = {'Range': 'bytes=%s-' % int(resume_byte_pos)} - if headers != None: - for h in headers.keys(): - resume_header[h] = headers[h] - return requests.get(url, headers=resume_header, stream=True, verify=False, allow_redirects=True, timeout=CONNECTION_TIMEOUT) + try: + resume_header = {'Range': 'bytes=%s-' % int(resume_byte_pos)} + if headers != None: + for h in headers.keys(): + resume_header[h] = headers[h] + r = requests.get(url, headers=resume_header, stream=True, verify=False, allow_redirects=True, timeout=CONNECTION_TIMEOUT) + return r, '' + except Exception as e: + err = '%s' % e + Log.Error(e) + return None, err def download_completed(final_abs_path, section_title, section_key, purgeKey, fileExists=False): @@ -717,7 +817,7 @@ def download_completed(final_abs_path, section_title, section_key, purgeKey, fil else: Log('Download Completed - %s' % final_abs_path) - if fileExists == False: + if fileExists == False and common.UsingOption(key=common.GLOBAL_OPTIONS[1], session='None') == False: Thread.Create(refresh_section, {}, section_title, section_key) Thread.Create(trigger_que_run) @@ -738,8 +838,13 @@ def download_failed(url, error, progress, startPos, purgeKey): file_meta['action'] = common.DOWNLOAD_PROPS[1] file_meta['progress'] = progress file_meta['startPos'] = startPos + file_meta['timeAdded'] = time.time() Dict[purgeKey] = E(JSON.StringFromObject(file_meta)) + uid = file_meta['uid'] + if uid in QUEUE_RUN_ITEMS.keys(): + del QUEUE_RUN_ITEMS[uid] + if purgeKey in common.DOWNLOAD_TEMP.keys(): del common.DOWNLOAD_TEMP[purgeKey] Dict['DOWNLOAD_TEMP'] = E(JSON.StringFromObject(common.DOWNLOAD_TEMP)) @@ -809,63 +914,78 @@ def postpone_download_by_user(title, url, progress, startPos, purgeKey): Thread.Create(trigger_que_run) - +trigger_que_run_singleton = [] def trigger_que_run(skip = []): + while len(trigger_que_run_singleton) > 0: + time.sleep(1.0) + + trigger_que_run_singleton.append(True) tuid = common.id_generator(16) common.control.AddThread('trigger_que_run', 'Updates Download Items Run/Status', time.time(), '2', False, tuid) time.sleep(3) items_for_que_run = [] Dict_Temp = {} - for each in Dict: - if 'Down5Split' in each: - try: - file_meta = JSON.ObjectFromString(D(Dict[each])) - if file_meta['uid'] not in skip: + + try: + try: + for each in Dict: + if 'Down5Split' in each: + try: + file_meta = JSON.ObjectFromString(D(Dict[each])) + if file_meta['uid'] not in skip: + if file_meta['status'] == common.DOWNLOAD_STATUS[0] and file_meta['action'] == common.DOWNLOAD_ACTIONS[4] and (time.time() - float(file_meta['timeAdded'])) > 0: + Dict_Temp[each] = Dict[each] + elif file_meta['status'] == common.DOWNLOAD_STATUS[0] and file_meta['action'] == common.DOWNLOAD_ACTIONS[3] and (time.time() - float(file_meta['timeAdded'])) > 0: + Dict_Temp[each] = Dict[each] + except Exception as e: + Log(e) + except Exception as e: + Log(e) + + save_dict = False + try: + for each in Dict_Temp: + try: + file_meta = JSON.ObjectFromString(D(Dict_Temp[each])) if file_meta['status'] == common.DOWNLOAD_STATUS[0] and file_meta['action'] == common.DOWNLOAD_ACTIONS[4] and (time.time() - float(file_meta['timeAdded'])) > 0: - Dict_Temp[each] = Dict[each] + EncTxt = Dict_Temp[each] + items_for_que_run.append({'label':str(file_meta['timeAdded']), 'data':EncTxt, 'uid':file_meta['uid']}) + QUEUE_RUN_ITEMS[file_meta['uid']] = False elif file_meta['status'] == common.DOWNLOAD_STATUS[0] and file_meta['action'] == common.DOWNLOAD_ACTIONS[3] and (time.time() - float(file_meta['timeAdded'])) > 0: - Dict_Temp[each] = Dict[each] - except Exception as e: - Log(e) - - save_dict = False - for each in Dict_Temp: - try: - file_meta = JSON.ObjectFromString(D(Dict_Temp[each])) - if file_meta['status'] == common.DOWNLOAD_STATUS[0] and file_meta['action'] == common.DOWNLOAD_ACTIONS[4] and (time.time() - float(file_meta['timeAdded'])) > 0: - EncTxt = Dict_Temp[each] - items_for_que_run.append({'label':str(file_meta['timeAdded']), 'data':EncTxt, 'uid':file_meta['uid']}) - QUEUE_RUN_ITEMS[file_meta['uid']] = False - elif file_meta['status'] == common.DOWNLOAD_STATUS[0] and file_meta['action'] == common.DOWNLOAD_ACTIONS[3] and (time.time() - float(file_meta['timeAdded'])) > 0: - file_meta['action'] = common.DOWNLOAD_ACTIONS[4] - EncTxt = E(JSON.StringFromObject(file_meta)) - Dict[each] = EncTxt - save_dict = True - items_for_que_run.append({'label':str(file_meta['timeAdded']), 'data':EncTxt, 'uid':file_meta['uid']}) - QUEUE_RUN_ITEMS[file_meta['uid']] = False + file_meta['action'] = common.DOWNLOAD_ACTIONS[4] + EncTxt = E(JSON.StringFromObject(file_meta)) + Dict[each] = EncTxt + save_dict = True + items_for_que_run.append({'label':str(file_meta['timeAdded']), 'data':EncTxt, 'uid':file_meta['uid']}) + QUEUE_RUN_ITEMS[file_meta['uid']] = False + except Exception as e: + Log(e) + if save_dict == True: + Dict.Save() except Exception as e: Log(e) - if save_dict == True: - Dict.Save() - - if len(items_for_que_run) > 0: - newlistSorted = sorted(items_for_que_run, key=lambda k: k['label'], reverse=False) - - for i in newlistSorted: - try: - time.sleep(1) - EncTxt = i['data'] - uid = i['uid'] - Thread.Create(do_download, {}, file_meta_enc=EncTxt) - while (uid in QUEUE_RUN_ITEMS.keys()): - time.sleep(0.2) - except Exception as e: - Log(e) - + + if len(items_for_que_run) > 0: + newlistSorted = sorted(items_for_que_run, key=lambda k: k['label'], reverse=False) + + for i in newlistSorted: + try: + time.sleep(1) + EncTxt = i['data'] + uid = i['uid'] + Thread.Create(do_download, {}, file_meta_enc=EncTxt) + while (uid in QUEUE_RUN_ITEMS.keys()): + time.sleep(2.0) + except Exception as e: + Log(e) + except Exception as e: + Log(e) + common.control.RemoveThread(tuid) - + del trigger_que_run_singleton[:] + def move_unfinished_to_failed(): common.DOWNLOAD_TEMP = Dict['DOWNLOAD_TEMP'] @@ -930,16 +1050,19 @@ def move_failed_to_queued(): items_to_change = [] - for each in Dict: - if 'Down5Split' in each: - try: - longstringObjs = JSON.ObjectFromString(D(Dict[each])) - if longstringObjs['status'] == common.DOWNLOAD_STATUS[3]: - items_to_change.append(each) - except Exception as e: - Log("============ move_failed_to_queued =============") - Log(e) - + try: + for each in Dict: + if 'Down5Split' in each: + try: + longstringObjs = JSON.ObjectFromString(D(Dict[each])) + if longstringObjs['status'] == common.DOWNLOAD_STATUS[3]: + items_to_change.append(each) + except Exception as e: + Log("============ move_failed_to_queued =============") + Log(e) + except Exception as e: + Log(e) + if len(items_to_change) > 0: if Prefs['use_debug']: Log("Moving Failed Downloads to Queued !") @@ -975,7 +1098,7 @@ def DownloadInit(): move_unfinished_to_failed() time.sleep(1) - if common.UsingOption(key=common.DEVICE_OPTIONS[13], session='None') == True: + if common.UsingOption(key=common.GLOBAL_OPTIONS[0], session='None') == True: move_failed_to_queued() time.sleep(1) dlt = DownloadThrottler() diff --git a/Contents/Code/downloadsmenu.py b/Contents/Code/downloadsmenu.py index f5e0b5e..9dbec07 100644 --- a/Contents/Code/downloadsmenu.py +++ b/Contents/Code/downloadsmenu.py @@ -12,15 +12,34 @@ REMOVE_ENTRY_WHEN_ALL_EPS_IN_DOWNLOADS = False SOURCE_SEARCH_TIMEOUT = float(5*60) # 5 min. +ITEM_FOR_UPDATE = {} + ####################################################################################################### @route(PREFIX + '/AddToAutoPilotDownloads') -def AddToAutoPilotDownloads(title, year, type, purl=None, thumb=None, summary=None, quality=None, file_size=None, riptype='BRRIP', season=None, season_end=None, episode_start=None, episode_end=None, vidtype=None, section_path=None, section_title=None, section_key=None, session=None, admin=False, all_seasons=False, **kwargs): +def AddToAutoPilotDownloads(title, year, type, purl=None, thumb=None, summary=None, quality=None, file_size=None, riptype='BRRIP', season=None, season_end=None, episode_start=None, episode_end=None, vidtype=None, section_path=None, section_title=None, section_key=None, session=None, admin=False, all_seasons=False, edit=False, mode=None, sub_mand=False, scheduled=False, smart_add=False, ssources=None, sproviders=None, **kwargs): try: admin = True if str(admin) == 'True' else False all_seasons = True if str(all_seasons) == 'True' else False + edit = True if str(edit) == 'True' else False + sub_mand = True if str(sub_mand) == 'True' else False + scheduled = True if str(scheduled) == 'True' else False + smart_add = True if str(smart_add) == 'True' else False + + if ssources == None: + ssources_s = [] + ssources_s += [i['name'] for i in common.INTERNAL_SOURCES if str(i['enabled']) == 'True'] + ssources = {k:True for k in ssources_s} + ssources = E(JSON.StringFromObject(ssources)) + + if sproviders == None: + sproviders_s = [] + sproviders_s += [i['name'] for i in common.OPTIONS_PROVIDERS if str(i['enabled']) == 'True'] + sproviders = {k:True for k in sproviders_s} + sproviders = E(JSON.StringFromObject(sproviders)) ret = False + retInAQ = False title = common.cleantitle.windows_filename(title) @@ -37,10 +56,25 @@ def AddToAutoPilotDownloads(title, year, type, purl=None, thumb=None, summary=No except: season_end = 1 + ts = '%s:%s' % (title,season) + if ts in ITEM_FOR_UPDATE.keys(): + if ITEM_FOR_UPDATE[ts] == False and (quality != None or file_size != None): + oc = ObjectContainer(title1='Item Added Already !', no_cache=common.isForceNoCache()) + oc.add(DirectoryObject(key = Callback(Downloads, title=" Downloads", session = session), title = "<< Downloads", thumb = common.GetThumb(R(common.ICON_DOWNLOADS), session=session))) + oc.add(DirectoryObject(key = Callback(main.MainMenu), title = '<< Main Menu', thumb = common.GetThumb(R(common.ICON), session=session))) + return oc + elif ITEM_FOR_UPDATE[ts] == True and (quality != None or file_size != None): + oc = ObjectContainer(title1='Item Updated Already !', no_cache=common.isForceNoCache()) + oc.add(DirectoryObject(key = Callback(Downloads, title="Downloads", session = session), title = "<< Downloads", thumb = common.GetThumb(R(common.ICON_DOWNLOADS), session=session))) + oc.add(DirectoryObject(key = Callback(main.MainMenu), title = '<< Main Menu', thumb = common.GetThumb(R(common.ICON), session=session))) + return oc + elif ITEM_FOR_UPDATE[ts] == True and quality == None or file_size == None: + del ITEM_FOR_UPDATE[ts] + + res = None if year == None: try: - res = common.interface.requestOMDB(title=title, ver=common.VERSION) - Log(res.content) + res = common.interface.requestOMDB(title=title, season=None if season==None else str(season), ver=common.VERSION) res_item = json.loads(json.dumps(res.content)) res_item = json.loads(res_item) year = res_item['Year'][:4] @@ -54,112 +88,241 @@ def AddToAutoPilotDownloads(title, year, type, purl=None, thumb=None, summary=No season_end = seasonNR else: season_end = season_end - + if year == None: - return main.MyMessage(title='Error', msg='OMDB is not reachable at the mmoment. Please try again after some time.') + return main.MyMessage(title='Error', msg='OMDB is not reachable at the moment. Please try again after some time.') - for i in common.DOWNLOAD_AUTOPILOT[type]: - if type == 'movie': - if i['title'] == title and i['year'] == year: - ret = True - break - elif type == 'show': - if i['short_title'] == title and int(i['season']) == int(season): - ret = True - break - - if ret == False: - for uid in Dict: - if 'Down5Split' in uid: - EncTxt = None - try: - EncTxt = Dict[uid] - EncTxt = JSON.ObjectFromString(D(EncTxt)) - except: - pass - try: - if EncTxt != None: - if type == 'movie': - if EncTxt['title'] == title and EncTxt['year'] == year: - ret = True - break - elif type == 'show': - if EncTxt['title'] == title and EncTxt['year'] == year and EncTxt['season'] == season: - ret = True - break - except: - pass + first_ep_idx = episode_start + last_ep_idx = episode_end + + if type=='show' and edit == False and (quality == None and file_size == None): + try: + if res == None: + res = common.interface.requestOMDB(title=title, season=str(season), ver=common.VERSION, timeout=7) + try: + episodesTot1 = len(json.loads(res.content)['Episodes']) + episodesTot2 = int(json.loads(res.content)['Episodes'][len(json.loads(res.content)['Episodes'])-1]['Episode']) + episode_end = last_ep_idx = max(episodesTot1,episodesTot2,int(last_ep_idx)) + except: + pass + except: + pass + + if ret == False and edit == False: + for i in common.DOWNLOAD_AUTOPILOT[type]: + if type == 'movie': + if i['title'] == title and i['year'] == year: + ret = True + retInAQ = True + break + elif type == 'show': + if i['short_title'] == title and int(i['season']) == int(season): + ret = True + retInAQ = True + last_ep_idx_t = int(i['episode_end']) + if int(last_ep_idx) <= int(last_ep_idx_t): + last_ep_idx = int(last_ep_idx_t)+1 + elif int(last_ep_idx) > int(last_ep_idx_t): + last_ep_idx = int(last_ep_idx) + if int(first_ep_idx) <= int(i['episode_end']): + first_ep_idx = int(i['episode_end'])+1 + + if ret == False: + for uid in Dict: + if 'Down5Split' in uid: + EncTxt = None + try: + EncTxt = Dict[uid] + EncTxt = JSON.ObjectFromString(D(EncTxt)) + except: + pass + try: + if EncTxt != None: + if type == 'movie': + if EncTxt['title'] == title and EncTxt['year'] == year: + ret = True + break + elif type == 'show': + if EncTxt['title'] == title and EncTxt['year'] == year and EncTxt['season'] == season: + ret = True + last_ep_idx_t = EncTxt['episode'] + if int(last_ep_idx) <= int(last_ep_idx_t): + last_ep_idx = int(last_ep_idx_t)+1 + elif int(last_ep_idx) > int(last_ep_idx_t): + last_ep_idx = int(last_ep_idx) + if int(first_ep_idx) <= int(EncTxt['episode']): + first_ep_idx = int(EncTxt['episode'])+1 + except: + pass if ret == True: - oc = ObjectContainer(title1='Item exists', no_cache=common.isForceNoCache()) - oc.add(DirectoryObject(key = Callback(main.MainMenu), title = '<< Main Menu', thumb = R(common.ICON))) - return oc - #return main.MyMessage(title='Return', msg='Item exists. Use back to Return to previous screen') + if edit == False: + if mode == None: + if retInAQ == True: + oc = ObjectContainer(title1='Add New or Replace Existing AutoPilot Items ?', no_cache=common.isForceNoCache()) + oc.add(DirectoryObject( + key = Callback(AddToAutoPilotDownloads, title=title, year=year, type=type, purl=purl, thumb=thumb, summary=summary, quality=quality, file_size=file_size, riptype=riptype, season=season, season_end=season_end, episode_start=episode_start, episode_end=episode_end, vidtype=vidtype, section_path=section_path, section_title=section_title, section_key=section_key, session=session, admin=admin, all_seasons=all_seasons, edit=edit, mode='add', sub_mand=sub_mand, scheduled=scheduled, smart_add=smart_add, ssources=ssources, sproviders=sproviders), + title = "Add as New Items" + ) + ) + oc.add(DirectoryObject( + key = Callback(AddToAutoPilotDownloads, title=title, year=year, type=type, purl=purl, thumb=thumb, summary=summary, quality=quality, file_size=file_size, riptype=riptype, season=season, season_end=season_end, episode_start=episode_start, episode_end=episode_end, vidtype=vidtype, section_path=section_path, section_title=section_title, section_key=section_key, session=session, admin=admin, all_seasons=all_seasons, edit=edit, mode='replace', sub_mand=sub_mand, scheduled=scheduled, smart_add=smart_add, ssources=ssources, sproviders=sproviders), + title = "Replace Existing Items" + ) + ) + return oc + else: + mode = 'add' + if mode == 'add' and type == 'show': + episode_start = first_ep_idx + episode_end = last_ep_idx + edit = True + elif edit == True: + pass + else: + pass + # oc = ObjectContainer(title1='Item exists', no_cache=common.isForceNoCache()) + # oc.add(DirectoryObject(key = Callback(Downloads, title="Downloads", session = session), title = "<< Downloads", thumb = common.GetThumb(R(common.ICON_DOWNLOADS), session=session))) + # oc.add(DirectoryObject(key = Callback(main.MainMenu), title = '<< Main Menu', thumb = common.GetThumb(R(common.ICON), session=session))) + # return oc + #return main.MyMessage(title='Return', msg='Item exists. Use back to Return to previous screen') - if quality == None and file_size == None: - oc = ObjectContainer(title1='Select Quality or FileSize', no_cache=common.isForceNoCache()) + if (quality == None and file_size == None) or (ret == True and edit == False): + if ret == True: + oc = ObjectContainer(title1='Update Quality or FileSize', no_cache=common.isForceNoCache()) + else: + oc = ObjectContainer(title1='Select Quality or FileSize', no_cache=common.isForceNoCache()) oc.add(DirectoryObject( - key = Callback(AddToAutoPilotDownloads, title=title, year=year, type=type, purl=purl, thumb=thumb, summary=summary, quality=quality, file_size='%s:%s'%(0,100*common.TO_GB), riptype=riptype, season=season, season_end=season_end, episode_start=episode_start, episode_end=episode_end, vidtype=vidtype, section_path=section_path, section_title=section_title, section_key=section_key, session=session, admin=admin, all_seasons=all_seasons), + key = Callback(AddToAutoPilotDownloads, title=title, year=year, type=type, purl=purl, thumb=thumb, summary=summary, quality=quality, file_size='%s:%s'%(0,100*common.TO_GB), riptype=riptype, season=season, season_end=season_end, episode_start=episode_start, episode_end=episode_end, vidtype=vidtype, section_path=section_path, section_title=section_title, section_key=section_key, session=session, admin=admin, all_seasons=all_seasons, edit=edit, mode=mode, sub_mand=sub_mand, scheduled=scheduled, smart_add=smart_add, ssources=ssources, sproviders=sproviders), title = "Enabled: %s | File-Size: %s" % (common.GetEmoji(type=True if file_size=='%s:%s'%(0,100*common.TO_GB) else False, mode='simple', session=session), 'Largest Available File') ) ) for item in common.INTERNAL_SOURCES_SIZES: if item['enabled']: oc.add(DirectoryObject( - key = Callback(AddToAutoPilotDownloads, title=title, year=year, type=type, purl=purl, thumb=thumb, summary=summary, quality=quality, file_size='%s:%s'%(item['LL'],item['UL']), riptype=riptype, season=season, season_end=season_end, episode_start=episode_start, episode_end=episode_end, vidtype=vidtype, section_path=section_path, section_title=section_title, section_key=section_key, session=session, admin=admin, all_seasons=all_seasons), + key = Callback(AddToAutoPilotDownloads, title=title, year=year, type=type, purl=purl, thumb=thumb, summary=summary, quality=quality, file_size='%s:%s'%(item['LL'],item['UL']), riptype=riptype, season=season, season_end=season_end, episode_start=episode_start, episode_end=episode_end, vidtype=vidtype, section_path=section_path, section_title=section_title, section_key=section_key, session=session, admin=admin, all_seasons=all_seasons, edit=edit, mode=mode, sub_mand=sub_mand, scheduled=scheduled, smart_add=smart_add, ssources=ssources, sproviders=sproviders), title = "Enabled: %s | File-Size: %s" % (common.GetEmoji(type=True if file_size=='%s:%s'%(item['LL'],item['UL']) else False, mode='simple', session=session), item['label']) ) ) for item in common.INTERNAL_SOURCES_QUALS: if item['enabled']: oc.add(DirectoryObject( - key = Callback(AddToAutoPilotDownloads, title=title, year=year, type=type, purl=purl, thumb=thumb, summary=summary, quality=item['label'], file_size=file_size, riptype=riptype, season=season, season_end=season_end, episode_start=episode_start, episode_end=episode_end, vidtype=vidtype, section_path=section_path, section_title=section_title, section_key=section_key, session=session, admin=admin, all_seasons=all_seasons), + key = Callback(AddToAutoPilotDownloads, title=title, year=year, type=type, purl=purl, thumb=thumb, summary=summary, quality=item['label'], file_size=file_size, riptype=riptype, season=season, season_end=season_end, episode_start=episode_start, episode_end=episode_end, vidtype=vidtype, section_path=section_path, section_title=section_title, section_key=section_key, session=session, admin=admin, all_seasons=all_seasons, edit=edit, mode=mode, sub_mand=sub_mand, scheduled=scheduled, smart_add=smart_add, ssources=ssources, sproviders=sproviders), title = "Enabled: %s | Quality: %s" % (common.GetEmoji(type=True if quality==item['label'] else False, mode='simple', session=session), item['label']) ) ) for item in common.INTERNAL_SOURCES_RIPTYPE: - if item['enabled'] and 'BRRIP' in item['label']: + if 'BRRIP' in item['label']: oc.add(DirectoryObject( - key = Callback(AddToAutoPilotDownloads, title=title, year=year, type=type, purl=purl, thumb=thumb, summary=summary, quality=quality, file_size=file_size, riptype=item['label'], season=season, season_end=season_end, episode_start=episode_start, episode_end=episode_end, vidtype=vidtype, section_path=section_path, section_title=section_title, section_key=section_key, session=session, admin=admin, all_seasons=all_seasons), + key = Callback(AddToAutoPilotDownloads, title=title, year=year, type=type, purl=purl, thumb=thumb, summary=summary, quality=quality, file_size=file_size, riptype=item['label'], season=season, season_end=season_end, episode_start=episode_start, episode_end=episode_end, vidtype=vidtype, section_path=section_path, section_title=section_title, section_key=section_key, session=session, admin=admin, all_seasons=all_seasons, edit=edit, mode=mode, sub_mand=sub_mand, scheduled=scheduled, smart_add=smart_add, ssources=ssources, sproviders=sproviders), title = "Enabled: %s | Rip-Type: %s" % (common.GetEmoji(type=True if riptype==item['label'] else False, mode='simple', session=session), item['label']) ) ) + oc.add(DirectoryObject( + key = Callback(AddToAutoPilotDownloads, title=title, year=year, type=type, purl=purl, thumb=thumb, summary=summary, quality=quality, file_size=file_size, riptype=riptype, season=season, season_end=season_end, episode_start=episode_start, episode_end=episode_end, vidtype=vidtype, section_path=section_path, section_title=section_title, section_key=section_key, session=session, admin=admin, all_seasons=all_seasons, edit=edit, mode=mode, sub_mand=not sub_mand, scheduled=scheduled, smart_add=smart_add, ssources=ssources, sproviders=sproviders), + title = "Prefer Source with Subtitle: %s" % common.GetEmoji(type=sub_mand, mode='simple', session=session) + ) + ) + oc.add(DirectoryObject( + key = Callback(AddToAutoPilotDownloads, title=title, year=year, type=type, purl=purl, thumb=thumb, summary=summary, quality=quality, file_size=file_size, riptype=riptype, season=season, season_end=season_end, episode_start=episode_start, episode_end=episode_end, vidtype=vidtype, section_path=section_path, section_title=section_title, section_key=section_key, session=session, admin=admin, all_seasons=all_seasons, edit=edit, mode=mode, sub_mand=sub_mand, scheduled=not scheduled, smart_add=smart_add, ssources=ssources, sproviders=sproviders), + title = "Run Later via Scheduler: %s" % common.GetEmoji(type=scheduled, mode='simple', session=session) + ) + ) + if type == 'show': + oc.add(DirectoryObject( + key = Callback(AddToAutoPilotDownloads, title=title, year=year, type=type, purl=purl, thumb=thumb, summary=summary, quality=quality, file_size=file_size, riptype=riptype, season=season, season_end=season_end, episode_start=episode_start, episode_end=episode_end, vidtype=vidtype, section_path=section_path, section_title=section_title, section_key=section_key, session=session, admin=admin, all_seasons=all_seasons, edit=edit, mode=mode, sub_mand=sub_mand, scheduled=scheduled, smart_add=not smart_add, ssources=ssources, sproviders=sproviders), + title = "Smart Add for To Be Aired Episodes: %s" % common.GetEmoji(type=smart_add, mode='simple', session=session) + ) + ) + oc.add(DirectoryObject( + key = Callback(AddToAutoPilotDownloads, title=title, year=year, type=type, purl=purl, thumb=thumb, summary=summary, quality=quality, file_size=file_size, riptype=riptype, season=season, season_end=season_end, episode_start=episode_start, episode_end=episode_end, vidtype=vidtype, section_path=section_path, section_title=section_title, section_key=section_key, session=session, admin=admin, all_seasons=all_seasons, edit=edit, mode=mode, sub_mand=sub_mand, scheduled=scheduled, smart_add=smart_add, ssources=ssources, sproviders=sproviders), + title = "Sources: %s >>" % ('All' if ssources==None else common.getSelectedItems(ssources)) + ) + ) + oc.add(DirectoryObject( + key = Callback(AddToAutoPilotDownloads, title=title, year=year, type=type, purl=purl, thumb=thumb, summary=summary, quality=quality, file_size=file_size, riptype=riptype, season=season, season_end=season_end, episode_start=episode_start, episode_end=episode_end, vidtype=vidtype, section_path=section_path, section_title=section_title, section_key=section_key, session=session, admin=admin, all_seasons=all_seasons, edit=edit, mode=mode, sub_mand=sub_mand, scheduled=scheduled, smart_add=smart_add, ssources=ssources, sproviders=sproviders), + title = "Providers: %s >>" % ('All' if sproviders==None else common.getSelectedItems(sproviders)) + ) + ) + oc.add(DirectoryObject(key = Callback(main.MainMenu), title = '<< Main Menu')) + if len(oc) == 0: return MC.message_container('Quality or FileSize', 'A Quality or FileSize selection needs to be enabled under Interface Options') else: return oc if section_path == None or section_title == None: - oc = ObjectContainer(title1='Select Location', no_cache=common.isForceNoCache()) + if ret == True or edit == True: + oc = ObjectContainer(title1='Update Location & Ep. Index', no_cache=common.isForceNoCache()) + else: + oc = ObjectContainer(title1='Select Location & Ep. Index', no_cache=common.isForceNoCache()) + for item in common.DOWNLOAD_OPTIONS[type]: if item['enabled']: oc.add(DirectoryObject( - key = Callback(AddToAutoPilotDownloads, title=title, year=year, type=type, purl=purl, thumb=thumb, summary=summary, quality=quality, file_size=file_size, riptype=riptype, season=season, season_end=season_end, episode_start=episode_start, episode_end=episode_end, vidtype=vidtype, section_path=item['path'], section_title=item['title'], section_key=item['key'], session=session, admin=admin, all_seasons=all_seasons), + key = Callback(AddToAutoPilotDownloads, title=title, year=year, type=type, purl=purl, thumb=thumb, summary=summary, quality=quality, file_size=file_size, riptype=riptype, season=season, season_end=season_end, episode_start=episode_start, episode_end=episode_end, vidtype=vidtype, section_path=item['path'], section_title=item['title'], section_key=item['key'], session=session, admin=admin, all_seasons=all_seasons, edit=edit, mode=mode, sub_mand=sub_mand, scheduled=scheduled, smart_add=smart_add, ssources=ssources, sproviders=sproviders), + thumb = common.GetThumb(R(common.ICON_SAVE), session=session), title = '%s | %s' % (item['title'], item['path']) ) - ) + ) if len(oc) == 0: return MC.message_container('Download Sources', 'No Download Location set under Download Options') else: if type == 'show': - DumbKeyboard(PREFIX, oc, AddToAutoPilotDownloadsInputEp, dktitle = 'Ep. Start Index:%s' % episode_start, dkthumb=R(common.ICON_DK_ENABLE), dkNumOnly=True, dkHistory=False, title=title, year=year, type=type, purl=purl, thumb=thumb, summary=summary, quality=quality, file_size=file_size, riptype=riptype, season=season, season_end=season_end, episode_start=episode_start, episode_end=episode_end, vidtype=vidtype, section_path=section_path, section_title=section_title, section_key=section_key, session=session, admin=admin, all_seasons=all_seasons, ep_id='start') - DumbKeyboard(PREFIX, oc, AddToAutoPilotDownloadsInputEp, dktitle = 'Ep. End Index:%s' % episode_end, dkthumb=R(common.ICON_DK_ENABLE), dkNumOnly=True, dkHistory=False, title=title, year=year, type=type, purl=purl, thumb=thumb, summary=summary, quality=quality, file_size=file_size, riptype=riptype, season=season, season_end=season_end, episode_start=episode_start, episode_end=episode_end, vidtype=vidtype, section_path=section_path, section_title=section_title, section_key=section_key, session=session, admin=admin, all_seasons=all_seasons, ep_id='end') - oc.add(DirectoryObject(key = Callback(main.MainMenu), title = '<< Main Menu', thumb = R(common.ICON))) + DumbKeyboard(PREFIX, oc, AddToAutoPilotDownloadsInputEp, dktitle = 'Ep. Start Index:%s' % episode_start, dkthumb=common.GetThumb(R(common.ICON_DK_ENABLE), session=session), dkNumOnly=True, dkHistory=False, title=title, year=year, type=type, purl=purl, thumb=thumb, summary=summary, quality=quality, file_size=file_size, riptype=riptype, season=season, season_end=season_end, episode_start=episode_start, episode_end=episode_end, vidtype=vidtype, section_path=section_path, section_title=section_title, section_key=section_key, session=session, admin=admin, all_seasons=all_seasons, ep_id='start', edit=edit, mode=mode, sub_mand=sub_mand, scheduled=scheduled, smart_add=smart_add, ssources=ssources, sproviders=sproviders) + DumbKeyboard(PREFIX, oc, AddToAutoPilotDownloadsInputEp, dktitle = 'Ep. End Index:%s' % episode_end, dkthumb=common.GetThumb(R(common.ICON_DK_ENABLE), session=session), dkNumOnly=True, dkHistory=False, title=title, year=year, type=type, purl=purl, thumb=thumb, summary=summary, quality=quality, file_size=file_size, riptype=riptype, season=season, season_end=season_end, episode_start=episode_start, episode_end=episode_end, vidtype=vidtype, section_path=section_path, section_title=section_title, section_key=section_key, session=session, admin=admin, all_seasons=all_seasons, ep_id='end', edit=edit, mode=mode, sub_mand=sub_mand, scheduled=scheduled, smart_add=smart_add, ssources=ssources, sproviders=sproviders) + oc.add(DirectoryObject(key = Callback(Downloads, title="Downloads", session = session), title = "<< Downloads", thumb = common.GetThumb(R(common.ICON_DOWNLOADS), session=session))) + oc.add(DirectoryObject(key = Callback(main.MainMenu), title = '<< Main Menu', thumb = common.GetThumb(R(common.ICON), session=session))) return oc + if type == 'show': + if int(episode_start) > int(episode_end): + return MC.message_container('Episode Index', 'Episode Start Index cannot be greater than End Index') + uid = common.makeUID(title, year, quality, file_size, purl, season, episode_start) if season_end == None: season_end = season if type == 'show': - item = {'title':title, 'year':year, 'season':season, 'season_end':season_end, 'episode':episode_start, 'thumb':thumb, 'summary':summary, 'episode_start':int(episode_start), 'episode_end':int(episode_end), 'quality':quality, 'file_size':file_size, 'riptype':riptype, 'vidtype':vidtype, 'section_path':section_path, 'section_title':section_title, 'section_key':section_key, 'admin':admin, 'timeAdded':time.time(), 'type':type, 'session':session, 'purl':purl, 'status':common.DOWNLOAD_AUTOPILOT_STATUS[3], 'fsBytes':0, 'uid':uid, 'all_seasons':all_seasons} + item = {'title':title, 'year':year, 'season':season, 'season_end':season_end, 'episode':int(episode_start), 'thumb':thumb, 'summary':summary, 'episode_start':int(episode_start), 'episode_end':int(episode_end), 'quality':quality, 'file_size':file_size, 'riptype':riptype, 'vidtype':vidtype, 'section_path':section_path, 'section_title':section_title, 'section_key':section_key, 'admin':admin, 'timeAdded':time.time(), 'first_time':time.time(), 'type':type, 'session':session, 'purl':purl, 'status':common.DOWNLOAD_AUTOPILOT_STATUS[3], 'fsBytes':0, 'uid':uid, 'all_seasons':all_seasons, 'sub_mand':sub_mand, 'scheduled':scheduled, 'smart_add':smart_add, 'smart_add_active':False, 'ssources':ssources, 'sproviders':sproviders} else: - item = {'title':title, 'year':year, 'season':season, 'season_end':season_end, 'episode':episode_start, 'thumb':thumb, 'summary':summary, 'quality':quality, 'file_size':file_size, 'riptype':riptype, 'vidtype':vidtype, 'section_path':section_path, 'section_title':section_title, 'section_key':section_key, 'admin':admin, 'timeAdded':time.time(), 'type':type, 'session':session, 'purl':purl, 'status':common.DOWNLOAD_AUTOPILOT_STATUS[3], 'fsBytes':0, 'uid':uid, 'all_seasons':all_seasons} + item = {'title':title, 'year':year, 'season':season, 'season_end':season_end, 'episode':episode_start, 'thumb':thumb, 'summary':summary, 'quality':quality, 'file_size':file_size, 'riptype':riptype, 'vidtype':vidtype, 'section_path':section_path, 'section_title':section_title, 'section_key':section_key, 'admin':admin, 'timeAdded':time.time(), 'first_time':time.time(), 'type':type, 'session':session, 'purl':purl, 'status':common.DOWNLOAD_AUTOPILOT_STATUS[3], 'fsBytes':0, 'uid':uid, 'all_seasons':all_seasons, 'sub_mand':sub_mand, 'scheduled':scheduled, 'smart_add':smart_add, 'smart_add_active':False, 'ssources':ssources, 'sproviders':sproviders} + + if mode == 'replace': + save_bool = False + items_to_remove = [] + for i in common.DOWNLOAD_AUTOPILOT[type]: + if type == 'movie': + if i['title'] == title and i['year'] == year: + items_to_remove.append(i) + save_bool = True + break + elif type == 'show': + if i['short_title'] == title and int(i['season']) == int(season): + try: + assert int(episode_start) <= int(i['episode']) <= int(episode_end) + items_to_remove.append(i) + save_bool = True + except AssertionError: + pass + if save_bool == True: + for i in items_to_remove: + common.DOWNLOAD_AUTOPILOT[type].remove(i) + Dict['DOWNLOAD_AUTOPILOT'] = E(JSON.StringFromObject(common.DOWNLOAD_AUTOPILOT)) + Dict.Save() + time.sleep(3) Thread.Create(AutoPilotDownloadThread, {}, item) + ts = '%s:%s' % (title,season) + + if edit == True: + ITEM_FOR_UPDATE[ts] = True + return MC.message_container('Updated in AutoPilot Download Queue', 'The item has been Updated in AutoPilot Download Queue') + else: + ITEM_FOR_UPDATE[ts] = False + return MC.message_container('Added to AutoPilot Download Queue', 'The item has been Added to AutoPilot Download Queue') - return MC.message_container('Added to AutoPilot Download Queue', 'The item has been added to AutoPilot Download Queue') except Exception as e: err = '{}'.format(e) Log('ERROR: downloadsmenu.py > AddToAutoPilotDownloads: %s' % err) @@ -167,7 +330,7 @@ def AddToAutoPilotDownloads(title, year, type, purl=None, thumb=None, summary=No #################################################################################################### @route(PREFIX + "/AddToAutoPilotDownloadsInputEp") -def AddToAutoPilotDownloadsInputEp(query, title, year, type, purl=None, thumb=None, summary=None, quality=None, file_size=None, riptype='BRRIP', season=None, season_end=None, episode_start=None, episode_end=None, vidtype=None, section_path=None, section_title=None, section_key=None, session=None, admin=False, all_seasons=False, ep_id='start', **kwargs): +def AddToAutoPilotDownloadsInputEp(query, title, year, type, purl=None, thumb=None, summary=None, quality=None, file_size=None, riptype='BRRIP', season=None, season_end=None, episode_start=None, episode_end=None, vidtype=None, section_path=None, section_title=None, section_key=None, session=None, admin=False, all_seasons=False, ep_id='start', edit=False, mode=None, sub_mand=False, scheduled=False, smart_add=False, ssources=None, sproviders=None, **kwargs): if ep_id == 'start': try: @@ -180,13 +343,13 @@ def AddToAutoPilotDownloadsInputEp(query, title, year, type, purl=None, thumb=No except: episode_end = '1' - return AddToAutoPilotDownloads(title=title, year=year, type=type, purl=purl, thumb=thumb, summary=summary, quality=quality, file_size=file_size, riptype=riptype, season=season, season_end=season_end, episode_start=episode_start, episode_end=episode_end, vidtype=vidtype, section_path=section_path, section_title=section_title, section_key=section_key, session=session, admin=admin, all_seasons=all_seasons) + return AddToAutoPilotDownloads(title=title, year=year, type=type, purl=purl, thumb=thumb, summary=summary, quality=quality, file_size=file_size, riptype=riptype, season=season, season_end=season_end, episode_start=episode_start, episode_end=episode_end, vidtype=vidtype, section_path=section_path, section_title=section_title, section_key=section_key, session=session, admin=admin, all_seasons=all_seasons, edit=edit, mode=mode, sub_mand=sub_mand, scheduled=scheduled, smart_add=smart_add, ssources=ssources, sproviders=sproviders) ####################################################################################################### def AutoPilotDownloadThread(item): tuid = common.id_generator(16) - common.control.AddThread('AutoPilotDownloadThread', 'Auto Pilot Download Thread', time.time(), '3', False, tuid) + common.control.AddThread('AutoPilotDownloadThread', 'Auto Pilot Download Thread > %s' % createAutoPilotThreadTitle(item), time.time(), '3', False, tuid) try: type = item['type'] @@ -233,11 +396,18 @@ def AutoPilotDownloadThread(item): common.DOWNLOAD_AUTOPILOT[type].append(item_x) SHOW_QUEUE.append(item_x) + + Dict['DOWNLOAD_AUTOPILOT'] = E(JSON.StringFromObject(common.DOWNLOAD_AUTOPILOT)) + Dict.Save() c=0 for i in SHOW_QUEUE: - common.DOWNLOAD_AUTOPILOT[type][c]['status'] = common.DOWNLOAD_AUTOPILOT_STATUS[0] - AutoPilotDownloadThread1(i) + common.DOWNLOAD_AUTOPILOT[type][c]['timeAdded'] = time.time() + if 'scheduled' not in i.keys() or i['scheduled'] == False: + i['status'] = common.DOWNLOAD_AUTOPILOT_STATUS[0] + AutoPilotDownloadThread1(i) + else: + i['status'] = common.DOWNLOAD_AUTOPILOT_STATUS[5] c += 1 else: item_x = item.copy() @@ -246,7 +416,17 @@ def AutoPilotDownloadThread(item): item_x['watch_title'] = orig_title common.DOWNLOAD_AUTOPILOT[type].append(item_x) - AutoPilotDownloadThread1(item_x) + + Dict['DOWNLOAD_AUTOPILOT'] = E(JSON.StringFromObject(common.DOWNLOAD_AUTOPILOT)) + Dict.Save() + + c = len(common.DOWNLOAD_AUTOPILOT[type])-1 + common.DOWNLOAD_AUTOPILOT[type][c]['timeAdded'] = time.time() + if 'scheduled' not in item_x.keys() or item_x['scheduled'] == False: + item_x['status'] = common.DOWNLOAD_AUTOPILOT_STATUS[0] + AutoPilotDownloadThread1(item_x) + else: + item_x['status'] = common.DOWNLOAD_AUTOPILOT_STATUS[5] Dict['DOWNLOAD_AUTOPILOT'] = E(JSON.StringFromObject(common.DOWNLOAD_AUTOPILOT)) Dict.Save() @@ -257,18 +437,124 @@ def AutoPilotDownloadThread(item): common.control.RemoveThread(tuid) ####################################################################################################### -def AutoPilotDownloadThread1(item=None, runForWaiting=False): +def createAutoPilotThreadTitle(item): + + watch_title = 'AutoPilot Thread' + try: + type = item['type'] + year = item['year'] + if type == 'show': + orig_title = item['title'] + season = int(item['season']) + episode_start = int(item['episode_start']) + episode_end = int(item['episode_end']) + if int(episode_end) < 100: + watch_title = '%s S%s (E%02d-E%02d) (%s)' % (orig_title,season,episode_start,episode_end,year) + else: + watch_title = '%s S%s (E%03d-E%03d) (%s)' % (orig_title,season,episode_start,episode_end,year) + else: + watch_title = '%s (%s)' % (item['title'],year) + except Exception as e: + err = '{}'.format(e) + Log('ERROR: downloadsmenu.py > createAutoPilotThreadTitle: %s' % err) + return watch_title + +####################################################################################################### +@route(PREFIX + '/AutoPilotDownloadThreadCall') +def AutoPilotDownloadThreadCall(item=None): + i = None + err = '' + try: + i = JSON.ObjectFromString(D(item)) + if i == None: + raise Exception('Item is None') + else: + Thread.Create(AutoPilotDownloadThread1, {}, i, False, True) + time.sleep(3.0) + return MC.message_container('AutoPilotDownloadThread', 'Item is performing an AutoPilot Run') + except Exception as e: + err = '%s' % e + return MC.message_container('AutoPilotDownloadThread', 'Error in AutoPilot Run. %s' % err) + +####################################################################################################### +@route(PREFIX + '/AutoPilotDownloadCall') +def AutoPilotDownloadCall(item=None, uid=None, session=None): + i = None + err = '' + try: + longstringObjs = JSON.ObjectFromString(D(Dict[uid])) + if 'temp_file' in longstringObjs: + filepath = longstringObjs['temp_file'] + try: + Core.storage.remove_data_item(filepath) + except Exception as e: + Log("==== Temp File Deletion Error ====") + Log(e) + del Dict[uid] + Dict.Save() + except: + pass + try: + i = JSON.ObjectFromString(D(item)) + if i == None: + raise Exception('Item is None') + else: + title = i['title'] + year = i['year'] + type = i['type'] + purl = i['purl'] + thumb = i['thumb'] + summary = i['summary'] + season = i['season'] + season_end = i['season'] + episode_start = i['episode'] + episode_end = i['episode'] + vidtype = i['vidtype'] + section_path = i['section_path'] + section_title = i['section_title'] + section_key = i['section_key'] + provider = i['provider'] + source = i['source'] + + return AddToAutoPilotDownloads(title=title, year=year, type=type, purl=purl, thumb=thumb, summary=summary, season=season, season_end=season_end, episode_start=episode_start, episode_end=episode_end, vidtype=vidtype, section_path=section_path, section_title=section_title, section_key=section_key, session=session, admin=True, edit=True, mode='add') + + except Exception as e: + err = '%s' % e + return MC.message_container('AutoPilotDownloadCall', 'Error in adding to AutoPilot. %s' % err) + +AutoPilotDownloadThread1_Singleton = [] +####################################################################################################### +def AutoPilotDownloadThread1(item=None, runForWaiting=False, viaRunNow=False): + + run_via_scheduler = False + tuid = common.id_generator(16) + if item == None: + run_via_scheduler = True + common.control.AddThread('AutoPilotDownloadThread1', 'Auto Pilot Download Thread > Scheduler', time.time(), '3', False, tuid) + + while len(AutoPilotDownloadThread1_Singleton) > 0: + time.sleep(1.0) + + AutoPilotDownloadThread1_Singleton.append(True) try: removeEntry = False removeEntry_item = None + items_for_removal = {} if item == None: # runs via Scheduler and after Initialization (plugin restart) for type in common.DOWNLOAD_AUTOPILOT.keys(): + items_for_removal[type] = [] for item in common.DOWNLOAD_AUTOPILOT[type]: - if (item['status'] != common.DOWNLOAD_AUTOPILOT_STATUS[2] and runForWaiting == False) or (runForWaiting == True and (item['status'] == common.DOWNLOAD_AUTOPILOT_STATUS[0] or item['status'] == common.DOWNLOAD_AUTOPILOT_STATUS[3])): + if (item['status'] == common.DOWNLOAD_AUTOPILOT_STATUS[2] or item['status'] == common.DOWNLOAD_AUTOPILOT_STATUS[6]): + items_for_removal[type].append(item) + elif 'smart_add_active' in item.keys() and 'first_time' in item.keys() and item['smart_add_active'] == True and float(time.time() - item['first_time']) > float(60*60*24*15): + items_for_removal[type].append(item) + if (item['status'] != common.DOWNLOAD_AUTOPILOT_STATUS[2] and runForWaiting == False) or (runForWaiting == True and (item['status'] == common.DOWNLOAD_AUTOPILOT_STATUS[0] or item['status'] == common.DOWNLOAD_AUTOPILOT_STATUS[3])) or (item['status'] == common.DOWNLOAD_AUTOPILOT_STATUS[0] and float(time.time() - item['timeAdded']) > float(60*60)): sources = None start_time = time.time() + item['status'] = common.DOWNLOAD_AUTOPILOT_STATUS[0] + item['timeAdded'] = start_time if item['type'] == 'show': key = main.generatemoviekey(movtitle=None, year=item['year'], tvshowtitle=item['short_title'], season=item['season'], episode=str(item['episode'])) prog = common.interface.checkProgress(key) @@ -278,7 +564,7 @@ def AutoPilotDownloadThread1(item=None, runForWaiting=False): if (time.time() - start_time) > SOURCE_SEARCH_TIMEOUT: Log('ERROR: downloadsmenu.py > AutoPilotDownloadThread1: Source Searching Timeout Reached !') break - sources = common.interface.getExtSources(movtitle=None, year=item['year'], tvshowtitle=item['short_title'], season=item['season'], episode=str(item['episode']), proxy_options=common.OPTIONS_PROXY, provider_options=common.OPTIONS_PROVIDERS, key=key, maxcachetime=common.CACHE_EXPIRY_TIME, ver=common.VERSION, imdb_id=None, session=item['session'], timeout=SOURCE_SEARCH_TIMEOUT) + sources = common.interface.getExtSources(movtitle=None, year=item['year'], tvshowtitle=item['short_title'], season=item['season'], episode=str(item['episode']), proxy_options=common.OPTIONS_PROXY, provider_options=common.OPTIONS_PROVIDERS, key=key, maxcachetime=common.CACHE_EXPIRY_TIME, ver=common.VERSION, imdb_id=None, session=item['session'], timeout=SOURCE_SEARCH_TIMEOUT, forceRet=True) else: key = main.generatemoviekey(movtitle=item['title'], year=item['year'], tvshowtitle=None, season=None, episode=None) prog = common.interface.checkProgress(key) @@ -288,7 +574,7 @@ def AutoPilotDownloadThread1(item=None, runForWaiting=False): if (time.time() - start_time) > SOURCE_SEARCH_TIMEOUT: Log('ERROR: downloadsmenu.py > AutoPilotDownloadThread1: Source Searching Timeout Reached !') break - sources = common.interface.getExtSources(movtitle=item['title'], year=item['year'], tvshowtitle=None, season=None, episode=None, proxy_options=common.OPTIONS_PROXY, provider_options=common.OPTIONS_PROVIDERS, key=key, maxcachetime=common.CACHE_EXPIRY_TIME, ver=common.VERSION, imdb_id=None, session=item['session'], timeout=SOURCE_SEARCH_TIMEOUT) + sources = common.interface.getExtSources(movtitle=item['title'], year=item['year'], tvshowtitle=None, season=None, episode=None, proxy_options=common.OPTIONS_PROXY, provider_options=common.OPTIONS_PROVIDERS, key=key, maxcachetime=common.CACHE_EXPIRY_TIME, ver=common.VERSION, imdb_id=None, session=item['session'], timeout=SOURCE_SEARCH_TIMEOUT, forceRet=True) if sources != None: bool, fsBytes, removeEntry = AutoPilotDownloadThread2(item, sources) @@ -300,23 +586,37 @@ def AutoPilotDownloadThread1(item=None, runForWaiting=False): removeEntry_item = item if item['type'] != 'show' or REMOVE_ENTRY_WHEN_ALL_EPS_IN_DOWNLOADS == False: try: - common.DOWNLOAD_AUTOPILOT[item['type']].remove(item) + #common.DOWNLOAD_AUTOPILOT[item['type']].remove(item) + items_for_removal[item['type']].append(item) + item['status'] = common.DOWNLOAD_AUTOPILOT_STATUS[6] except: pass else: item['status'] = common.DOWNLOAD_AUTOPILOT_STATUS[1] + else: + item['status'] = common.DOWNLOAD_AUTOPILOT_STATUS[4] if REMOVE_ENTRY_WHEN_ALL_EPS_IN_DOWNLOADS == True and removeEntry_item != None and removeEntry_item['type'] == 'show': for i in common.DOWNLOAD_AUTOPILOT['show']: if i['status'] == common.DOWNLOAD_AUTOPILOT_STATUS[2] and i['short_title'] == removeEntry_item['short_title'] and i['season'] == removeEntry_item['season']: try: - common.DOWNLOAD_AUTOPILOT[type].remove(i) + #common.DOWNLOAD_AUTOPILOT[type].remove(i) + items_for_removal[item['type']].append(i) + i['status'] = common.DOWNLOAD_AUTOPILOT_STATUS[6] except: pass - else: # runs when added + else: # runs when added or via Run Now sources = None start_time = time.time() type = item['type'] + items_for_removal[type] = [] + + if viaRunNow == True: + for i in common.DOWNLOAD_AUTOPILOT[type]: + if i['uid'] == item['uid']: + i['status'] = common.DOWNLOAD_AUTOPILOT_STATUS[0] + break + if type == 'show': key = main.generatemoviekey(movtitle=None, year=item['year'], tvshowtitle=item['short_title'], season=item['season'], episode=str(item['episode'])) prog = common.interface.checkProgress(key) @@ -326,7 +626,7 @@ def AutoPilotDownloadThread1(item=None, runForWaiting=False): if (time.time() - start_time) > SOURCE_SEARCH_TIMEOUT: Log('ERROR: downloadsmenu.py > AutoPilotDownloadThread1: Source Searching Timeout Reached !') break - sources = common.interface.getExtSources(movtitle=None, year=item['year'], tvshowtitle=item['short_title'], season=item['season'], episode=str(item['episode']), proxy_options=common.OPTIONS_PROXY, provider_options=common.OPTIONS_PROVIDERS, key=key, maxcachetime=common.CACHE_EXPIRY_TIME, ver=common.VERSION, imdb_id=None, session=item['session'], timeout=SOURCE_SEARCH_TIMEOUT) + sources = common.interface.getExtSources(movtitle=None, year=item['year'], tvshowtitle=item['short_title'], season=item['season'], episode=str(item['episode']), proxy_options=common.OPTIONS_PROXY, provider_options=common.OPTIONS_PROVIDERS, key=key, maxcachetime=common.CACHE_EXPIRY_TIME, ver=common.VERSION, imdb_id=None, session=item['session'], timeout=SOURCE_SEARCH_TIMEOUT, forceRet=True) else: key = main.generatemoviekey(movtitle=item['title'], year=item['year'], tvshowtitle=None, season=None, episode=None) prog = common.interface.checkProgress(key) @@ -336,7 +636,7 @@ def AutoPilotDownloadThread1(item=None, runForWaiting=False): if (time.time() - start_time) > SOURCE_SEARCH_TIMEOUT: Log('ERROR: downloadsmenu.py > AutoPilotDownloadThread1: Source Searching Timeout Reached !') break - sources = common.interface.getExtSources(movtitle=item['title'], year=item['year'], tvshowtitle=None, season=None, episode=None, proxy_options=common.OPTIONS_PROXY, provider_options=common.OPTIONS_PROVIDERS, key=key, maxcachetime=common.CACHE_EXPIRY_TIME, ver=common.VERSION, imdb_id=None, session=item['session'], timeout=SOURCE_SEARCH_TIMEOUT) + sources = common.interface.getExtSources(movtitle=item['title'], year=item['year'], tvshowtitle=None, season=None, episode=None, proxy_options=common.OPTIONS_PROXY, provider_options=common.OPTIONS_PROVIDERS, key=key, maxcachetime=common.CACHE_EXPIRY_TIME, ver=common.VERSION, imdb_id=None, session=item['session'], timeout=SOURCE_SEARCH_TIMEOUT, forceRet=True) if sources != None: bool, fsBytes, removeEntry = AutoPilotDownloadThread2(item, sources) @@ -348,26 +648,99 @@ def AutoPilotDownloadThread1(item=None, runForWaiting=False): removeEntry_item = item if type != 'show' or REMOVE_ENTRY_WHEN_ALL_EPS_IN_DOWNLOADS == False: try: - common.DOWNLOAD_AUTOPILOT[type].remove(item) + #common.DOWNLOAD_AUTOPILOT[type].remove(item) + items_for_removal[type].append(item) + item['status'] = common.DOWNLOAD_AUTOPILOT_STATUS[6] except: pass else: item['status'] = common.DOWNLOAD_AUTOPILOT_STATUS[1] + if viaRunNow == True: + for i in common.DOWNLOAD_AUTOPILOT[type]: + if i['uid'] == item['uid']: + i['status'] = common.DOWNLOAD_AUTOPILOT_STATUS[1] + break + else: + item['status'] = common.DOWNLOAD_AUTOPILOT_STATUS[4] + if viaRunNow == True: + for i in common.DOWNLOAD_AUTOPILOT[type]: + if i['uid'] == item['uid']: + i['status'] = common.DOWNLOAD_AUTOPILOT_STATUS[4] + break if REMOVE_ENTRY_WHEN_ALL_EPS_IN_DOWNLOADS == True and removeEntry_item != None and removeEntry_item['type'] == 'show': for i in common.DOWNLOAD_AUTOPILOT['show']: if i['status'] == common.DOWNLOAD_AUTOPILOT_STATUS[2] and i['short_title'] == removeEntry_item['short_title'] and i['season'] == removeEntry_item['season']: try: - common.DOWNLOAD_AUTOPILOT[type].remove(i) + #common.DOWNLOAD_AUTOPILOT[type].remove(i) + items_for_removal[item['type']].append(i) + i['status'] = common.DOWNLOAD_AUTOPILOT_STATUS[6] except: pass - + + items_for_smart_add = {} + + # remove completed entries + for type_r in items_for_removal.keys(): + items_for_smart_add[type_r] = [] + for item_r in items_for_removal[type_r]: + for item_i in common.DOWNLOAD_AUTOPILOT[type_r]: + if item_r['uid'] == item_i['uid']: + try: + if common.DEV_DEBUG == True and Prefs["use_debug"]: + Log('Item for removal: %s S%s E%s' % (item_i['short_title'],item_i['season'],item_i['episode'])) + bool, lastep = verifyForSmart(item_i) + common.DOWNLOAD_AUTOPILOT[type_r].remove(item_i) + if bool == True: + item_i['episode'] = lastep + 1 + item_i['first_time'] = time.time() + item_i['smart_add_active'] = True + item_i['status'] = common.DOWNLOAD_AUTOPILOT_STATUS[1] + item_i['title'] = '%s S%sE%s' % (item_i['short_title'], item_i['season'], item_i['episode']) + item_i['uid'] = common.makeUID(item_i['short_title'], item_i['year'], item_i['quality'], item_i['file_size'], item_i['purl'], item_i['season'], item_i['episode']) + if int(item_i['episode']) < 100: + item_i['watch_title'] = '%s S%sE%02d' % (item_i['short_title'],int(item_i['season']),int(item_i['episode'])) + else: + item_i['watch_title'] = '%s S%sE%03d' % (item_i['short_title'],int(item_i['season']),int(item_i['episode'])) + items_for_smart_add[type_r].append(item_i) + except Exception as e: + Log.Error('ERROR: downloadsmenu.py > AutoPilotDownloadThread1 > items_for_removal: %s' % e) + break + + for type_r in items_for_smart_add.keys(): + for item_a in items_for_smart_add[type_r]: + common.DOWNLOAD_AUTOPILOT[type_r].append(item_a) + if common.DEV_DEBUG == True and Prefs["use_debug"]: + Log('Item for smart-add: %s S%s E%s' % (item_a['short_title'],item_a['season'],item_a['episode'])) + Dict['DOWNLOAD_AUTOPILOT'] = E(JSON.StringFromObject(common.DOWNLOAD_AUTOPILOT)) Dict.Save() except Exception as e: err = '{}'.format(e) Log('ERROR: downloadsmenu.py > AutoPilotDownloadThread1: %s' % err) + + if run_via_scheduler == True: + common.control.RemoveThread(tuid) + + del AutoPilotDownloadThread1_Singleton[:] + +####################################################################################################### +def verifyForSmart(item): + lastep = 0 + no_items = 0 + if 'smart_add' in item.keys() and item['smart_add'] == True: + for i in common.DOWNLOAD_AUTOPILOT['show']: + if item['short_title'] == i['short_title'] and item['season'] == i['season']: + no_items += 1 + if i['episode'] > lastep: + lastep = int(i['episode']) + + if no_items == 1: + return True, lastep + else: + return False, lastep + ####################################################################################################### def AutoPilotDownloadThread2(item, sources): @@ -376,48 +749,56 @@ def AutoPilotDownloadThread2(item, sources): sources = common.FilterBasedOn(sources) sources = common.OrderBasedOn(sources, use_filesize=True) - for s in sources: - try: - fsBytes = int(s['fs']) - fs = '%s GB' % str(round(float(s['fs'])/common.TO_GB, 3)) - except: - fsBytes = 0 - fs = None - - doSkip = False - removeEntry = True - eps = 0 - eps_done = 0 - - if item['riptype'] != s['rip']: - doSkip = True + loops = [0] + sub_bool = [False] + if ('sub_mand' in item.keys() and item['sub_mand'] == True): + sub_bool = [True, False] + loops = [0,1] + + for loop in loops: + for s in sources: + try: + fsBytes = int(s['fs']) + fs = '%s GB' % str(round(float(s['fs'])/common.TO_GB, 3)) + except: + fsBytes = 0 + fs = None - if doSkip == False: - if item['type'] != s['vidtype'].lower(): + doSkip = False + removeEntry = True + eps = 0 + eps_done = 0 + + if item['riptype'] != s['rip']: doSkip = True - - if doSkip == False: - if item['type'] == 'show': - for i in common.DOWNLOAD_AUTOPILOT['show']: - if item['short_title'] == i['short_title'] and item['season'] == i['season']: - eps += 1 - if item['short_title'] == i['short_title'] and item['season'] == i['season'] and item['status'] == common.DOWNLOAD_AUTOPILOT_STATUS[2]: - eps_done += 1 - if item['short_title'] == i['short_title'] and item['season'] == i['season'] and fsBytes == i['fsBytes']: - doSkip = True - - if eps - eps_done > 1 and REMOVE_ENTRY_WHEN_ALL_EPS_IN_DOWNLOADS == True: - removeEntry = False + + if doSkip == False: + if item['type'] != s['vidtype'].lower(): + doSkip = True + + if doSkip == False: + if item['type'] == 'show': + for i in common.DOWNLOAD_AUTOPILOT['show']: + if item['short_title'] == i['short_title'] and item['season'] == i['season']: + eps += 1 + if item['short_title'] == i['short_title'] and item['season'] == i['season'] and item['status'] == common.DOWNLOAD_AUTOPILOT_STATUS[2]: + eps_done += 1 + if item['short_title'] == i['short_title'] and item['season'] == i['season'] and fsBytes == i['fsBytes']: + doSkip = True + + if eps - eps_done > 1 and REMOVE_ENTRY_WHEN_ALL_EPS_IN_DOWNLOADS == True: + removeEntry = False - if doSkip == False: - if item['quality'] == s['quality']: - AutoPilotDownloadThread3(item, s, fsBytes, fs) - return True, fsBytes, removeEntry - elif item['file_size'] != None and fs != None: - i_fs = item['file_size'].split(':') - if fsBytes >= int(float(str(i_fs[0]))): - AutoPilotDownloadThread3(item, s, fsBytes, fs) - return True, fsBytes, removeEntry + if doSkip == False: + if (sub_bool[loop] == True and s['sub_url'] != None) or sub_bool[loop] == False: + if item['quality'] == s['quality']: + AutoPilotDownloadThread3(item, s, fsBytes, fs) + return True, fsBytes, removeEntry + elif item['file_size'] != None and fs != None: + i_fs = item['file_size'].split(':') + if fsBytes >= int(float(str(i_fs[0]))) and fsBytes < int(float(str(i_fs[1]))): + AutoPilotDownloadThread3(item, s, fsBytes, fs) + return True, fsBytes, removeEntry return False, 0, False except Exception as e: @@ -429,19 +810,20 @@ def AutoPilotDownloadThread2(item, sources): def AutoPilotDownloadThread3(item, s, fsBytes, fs): try: - AddToDownloadsList(title=item['short_title'] if item['type']=='show' else item['title'], purl=item['purl'], url=s['url'], durl=s['durl'], summary=item['summary'], thumb=item['thumb'], year=item['year'], quality=s['quality'], source=s['source'], source_meta={}, file_meta={}, type=item['type'], vidtype=item['vidtype'], resumable=s['resumeDownload'], sub_url=s['sub_url'], fsBytes=fsBytes, fs=fs, file_ext=s['file_ext'], mode=common.DOWNLOAD_MODE[0], section_path=item['section_path'], section_title=item['section_title'], section_key=item['section_key'], session=item['session'], admin=item['admin'], params=s['params'], riptype=s['rip'], season=item['season'], episode=item['episode'], provider=s['provider']) + AddToDownloadsList(title=item['short_title'] if item['type']=='show' else item['title'], purl=item['purl'], url=s['url'], durl=s['durl'], summary=item['summary'], thumb=item['thumb'], year=item['year'], quality=s['quality'], source=s['source'], source_meta={}, file_meta={}, type=item['type'], vidtype=item['vidtype'], resumable=s['resumeDownload'], sub_url=s['sub_url'], fsBytes=fsBytes, fs=fs, file_ext=s['file_ext'], mode=common.DOWNLOAD_MODE[0], section_path=item['section_path'], section_title=item['section_title'], section_key=item['section_key'], session=item['session'], admin=item['admin'], params=s['params'], riptype=s['rip'], season=item['season'], episode=item['episode'], provider=s['provider'], page_url=s['page_url'], seq=s['seq']) except Exception as e: err = '{}'.format(e) Log('ERROR: downloadsmenu.py > AutoPilotDownloadThread3: %s' % err) ####################################################################################################### @route(PREFIX + '/AddToDownloadsListPre') -def AddToDownloadsListPre(title, year, url, durl, purl, summary, thumb, quality, source, type, resumable, source_meta, file_meta, mode, sub_url=None, fsBytes=None, fs=None, file_ext=None, vidtype=None, section_path=None, section_title=None, section_key=None, session=None, admin=False, update=False, params=None, riptype=None, season=None, episode=None, provider=None, **kwargs): +def AddToDownloadsListPre(title, year, url, durl, purl, summary, thumb, quality, source, type, resumable, source_meta, file_meta, mode, sub_url=None, fsBytes=None, fs=None, file_ext=None, vidtype=None, section_path=None, section_title=None, section_key=None, session=None, admin=False, update=False, params=None, riptype=None, season=None, episode=None, provider=None, page_url=None, seq=0, force_add=False, uid_upd=None, **kwargs): try: admin = True if str(admin) == 'True' else False update = True if str(update) == 'True' else False resumable = True if str(resumable) == 'True' else False + force_add = True if str(force_add) == 'True' else False user = common.control.setting('%s-%s' % (session,'user')) bool = False @@ -478,7 +860,43 @@ def AddToDownloadsListPre(title, year, url, durl, purl, summary, thumb, quality, fs_i, err = common.client.getFileSize(vurl, retError=True, retry429=True, cl=2) else: fs_i, err = common.client.getFileSize(url, retError=True, retry429=True, cl=2) - + if common.DEV_DEBUG == True and Prefs["use_debug"]: + Log('Url: %s | FileSize: %s | Error: %s' % (url, fs_i, err)) + try: + ret_val_resolvers = None + # check if file-link valid using fs of 1MB + if fs_i != None and float(fs_i) > float(1024*1024): # 1MB + pass + else: + # ret_val_resolvers is always a tuple with first val. of returned list of urls and second of error... + ret_val_resolvers = common.interface.getHostResolverMain().resolve(url, page_url=page_url) + err = ret_val_resolvers[1] + if err != '': + Log('Host URL: %s' % url) + Log('Host URL Resolved: %s' % ret_val_resolvers[0]) + Log('Host Error: %s' % err) + else: + vurl = ret_val_resolvers[0] + headers = None + try: + if vurl != None and len(vurl) >= seq: + vurl = vurl[seq] + else: + vurl = vurl[0] + params = json.loads(base64.b64decode(ret_val_resolvers[2])) + if 'headers' in params.keys(): + headers = params['headers'] + except: + pass + fs_i, err = common.client.getFileSize(vurl, headers=headers, retError=True, retry429=True, cl=2) + if common.DEV_DEBUG == True and Prefs["use_debug"]: + Log('Host URL: %s | FileSize: %s | Host URL Resolved: %s' % (url, fs_i, vurl)) + except Exception as e: + err = '%s' % e + if common.DEV_DEBUG == True and Prefs["use_debug"]: + Log(e) + Log('Host URLs: %s' % ret_val_resolvers[0]) + if err != '': return MC.message_container('Error', 'Error: %s. Please try again later when it becomes available.' % err) @@ -489,7 +907,7 @@ def AddToDownloadsListPre(title, year, url, durl, purl, summary, thumb, quality, fsBytes = 0 fs = '? GB' - if int(fsBytes) < 100 * 1024: + if float(fsBytes) < float(1024*1024): # 1MB return MC.message_container('FileSize Error', 'File reporting %s bytes cannot be downloaded. Please try again later when it becomes available.' % fsBytes) except Exception as e: @@ -497,9 +915,19 @@ def AddToDownloadsListPre(title, year, url, durl, purl, summary, thumb, quality, return MC.message_container('Error', '%s. Sorry but file could not be added.' % e) uid = 'Down5Split'+E(title+year+fs+quality+source+str(season)+str(episode)) - if Dict[uid] != None: - EncTxt = Dict[uid] + uid_alts = common.uidAltExists(uid) + + if uid_upd != None: + uid = uid_upd + else: + if len(uid_alts) > 0: + uid = uid_alts[len(uid_alts)-1] + + if force_add == False and Dict[uid] != None: + + EncTxt = Dict[uid] EncTxt = JSON.ObjectFromString(D(EncTxt)) + if admin == False and update == False: return MC.message_container('Download Sources', 'Item exists in Downloads List') elif admin == True and update == True and EncTxt['url'] != url: @@ -512,8 +940,21 @@ def AddToDownloadsListPre(title, year, url, durl, purl, summary, thumb, quality, return MC.message_container('Item Update', 'Item has been updated with new download url') elif admin == True and update == False and EncTxt['url'] != url: oc = ObjectContainer(title1='Item exists in Downloads List', no_cache=common.isForceNoCache()) - oc.add(DirectoryObject(key = Callback(AddToDownloadsListPre, title=title, purl=purl, url=url, durl=durl, summary=summary, thumb=thumb, year=year, quality=quality, source=source, source_meta=source_meta, file_meta=file_meta, type=type, resumable=resumable, sub_url=sub_url, fsBytes=fsBytes, fs=fs, file_ext=file_ext, mode=mode, vidtype=vidtype, section_path=section_path, section_title=section_title, section_key=section_key, session=session, admin=admin, update=True, params=params, riptype=riptype, season=season, episode=episode, provider=provider), title = 'Update this item')) - oc.add(DirectoryObject(key = Callback(main.MyMessage, title='Return', msg='Use back to Return to previous screen'), title = 'Return')) + + for u_i in uid_alts: + EncTxt_t = Dict[u_i] + EncTxt_t = JSON.ObjectFromString(D(EncTxt_t)) + timestr_t = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(float(EncTxt_t['timeAdded']))) + + if EncTxt_t['status'] == common.DOWNLOAD_STATUS[2]: + t_exists = 'Item in Completed Downloads %s | %s' % (EncTxt_t['section_path'],timestr_t) + oc.add(DirectoryObject(title = '<< %s >>' % t_exists, key = Callback(main.MyMessage, title='Item Exists', msg=t_exists))) + else: + oc.add(DirectoryObject(key = Callback(AddToDownloadsListPre, title=title, purl=purl, url=url, durl=durl, summary=summary, thumb=thumb, year=year, quality=quality, source=source, source_meta=source_meta, file_meta=file_meta, type=type, resumable=resumable, sub_url=sub_url, fsBytes=fsBytes, fs=fs, file_ext=file_ext, mode=mode, vidtype=vidtype, section_path=section_path, section_title=section_title, section_key=section_key, session=session, admin=admin, update=True, params=params, riptype=riptype, season=season, episode=episode, provider=provider, page_url=page_url, seq=seq, uid_upd=u_i), title = 'Update item. %s | %s | %s' % (EncTxt_t['section_path'],EncTxt_t['fid'],timestr_t))) + + oc.add(DirectoryObject(key = Callback(AddToDownloadsListPre, title=title, purl=purl, url=url, durl=durl, summary=summary, thumb=thumb, year=year, quality=quality, source=source, source_meta=source_meta, file_meta=file_meta, type=type, resumable=resumable, sub_url=sub_url, fsBytes=fsBytes, fs=fs, file_ext=file_ext, mode=mode, vidtype=vidtype, section_path=section_path, section_title=section_title, section_key=section_key, session=session, admin=admin, update=False, params=params, riptype=riptype, season=season, episode=episode, provider=provider, page_url=page_url, seq=seq, force_add=True), title = 'Add as New < Item will be duplicated >')) + oc.add(DirectoryObject(key = Callback(Downloads, title=" Downloads", session = session), title = "<< Downloads")) + oc.add(DirectoryObject(key = Callback(main.MainMenu), title = '<< Main Menu')) return oc elif admin == True and update == True and EncTxt['url'] == url: return MC.message_container('Item Updated', 'Item url updated.') @@ -551,7 +992,7 @@ def AddToDownloadsListPre(title, year, url, durl, purl, summary, thumb, quality, Dict['DOWNLOAD_OPTIONS_SECTION_TEMP'][tuec][x] = common.DOWNLOAD_OPTIONS[x] Dict.Save() - return AddToDownloadsList(title=title, purl=purl, url=url, durl=durl, summary=summary, thumb=thumb, year=year, quality=quality, source=source, source_meta=source_meta, file_meta=file_meta, type=type, vidtype=vidtype, resumable=resumable, sub_url=sub_url, fsBytes=fsBytes, fs=fs, file_ext=file_ext, mode=mode, section_path=section_path, section_title=section_title, section_key=section_key, session=session, admin=admin, update=update, user=user,params=params, riptype=riptype, season=season, episode=episode, provider=provider) + return AddToDownloadsList(title=title, purl=purl, url=url, durl=durl, summary=summary, thumb=thumb, year=year, quality=quality, source=source, source_meta=source_meta, file_meta=file_meta, type=type, vidtype=vidtype, resumable=resumable, sub_url=sub_url, fsBytes=fsBytes, fs=fs, file_ext=file_ext, mode=mode, section_path=section_path, section_title=section_title, section_key=section_key, session=session, admin=admin, update=update, user=user,params=params, riptype=riptype, season=season, episode=episode, provider=provider, page_url=page_url, seq=seq, force_add=force_add) except Exception as e: err = '{}'.format(e) Log('Error AddToDownloadsListPre: %s' % err) @@ -560,11 +1001,12 @@ def AddToDownloadsListPre(title, year, url, durl, purl, summary, thumb, quality, ###################################################################################### # Adds a movie to the DownloadsList list using the (title + 'Down5Split') as a key for the url @route(PREFIX + "/addToDownloadsList") -def AddToDownloadsList(title, year, url, durl, purl, summary, thumb, quality, source, type, resumable, source_meta, file_meta, sub_url=None, fsBytes=None, fs=None, file_ext=None, vidtype=None, section_path=None, section_title=None, section_key=None, session=None, admin=False, update=False, user=None, params=None, riptype=None, season=None, episode=None, provider=None, **kwargs): +def AddToDownloadsList(title, year, url, durl, purl, summary, thumb, quality, source, type, resumable, source_meta, file_meta, sub_url=None, fsBytes=None, fs=None, file_ext=None, vidtype=None, section_path=None, section_title=None, section_key=None, session=None, admin=False, update=False, user=None, params=None, riptype=None, season=None, episode=None, provider=None, page_url=None, seq=0, force_add=False, **kwargs): admin = True if str(admin) == 'True' else False update = True if str(update) == 'True' else False resumable = True if str(resumable) == 'True' else False + force_add = True if str(force_add) == 'True' else False #Log(common.DOWNLOAD_OPTIONS_SECTION_TEMP) tuec = E(title+year+quality+source+url+str(season)+str(episode)) @@ -605,13 +1047,13 @@ def AddToDownloadsList(title, year, url, durl, purl, summary, thumb, quality, so LOCS.append(item) if len(LOCS) == 1: item = LOCS[0] - return AddToDownloadsList(title=title, year=year, url=url, durl=durl, purl=purl, summary=summary, thumb=thumb, fs=fs, fsBytes=fsBytes, file_ext=file_ext, quality=quality, source=source, source_meta=source_meta, file_meta=file_meta, type=type, vidtype=vidtype, resumable=resumable, sub_url=sub_url, section_path=item['path'], section_title=item['title'], section_key=item['key'], session=session, admin=admin, update=update, user=user, params=params, riptype=riptype, season=season, episode=episode, provider=provider) + return AddToDownloadsList(title=title, year=year, url=url, durl=durl, purl=purl, summary=summary, thumb=thumb, fs=fs, fsBytes=fsBytes, file_ext=file_ext, quality=quality, source=source, source_meta=source_meta, file_meta=file_meta, type=type, vidtype=vidtype, resumable=resumable, sub_url=sub_url, section_path=item['path'], section_title=item['title'], section_key=item['key'], session=session, admin=admin, update=update, user=user, params=params, riptype=riptype, season=season, episode=episode, provider=provider, page_url=page_url, seq=seq, force_add=force_add) else: oc = ObjectContainer(title1='Select Location', no_cache=common.isForceNoCache()) for item in DOWNLOAD_OPTIONS_SECTION_TEMP[type]: if item['enabled']: oc.add(DirectoryObject( - key = Callback(AddToDownloadsList, title=title, year=year, url=url, durl=durl, purl=purl, summary=summary, thumb=thumb, fs=fs, fsBytes=fsBytes, file_ext=file_ext, quality=quality, source=source, source_meta=source_meta, file_meta=file_meta, type=type, vidtype=vidtype, resumable=resumable, sub_url=sub_url, section_path=item['path'], section_title=item['title'], section_key=item['key'], session=session, admin=admin, update=update, user=user, params=params, riptype=riptype, season=season, episode=episode, provider=provider), + key = Callback(AddToDownloadsList, title=title, year=year, url=url, durl=durl, purl=purl, summary=summary, thumb=thumb, fs=fs, fsBytes=fsBytes, file_ext=file_ext, quality=quality, source=source, source_meta=source_meta, file_meta=file_meta, type=type, vidtype=vidtype, resumable=resumable, sub_url=sub_url, section_path=item['path'], section_title=item['title'], section_key=item['key'], session=session, admin=admin, update=update, user=user, params=params, riptype=riptype, season=season, episode=episode, provider=provider, page_url=page_url, seq=seq, force_add=force_add), title = '%s | %s' % (item['title'], item['path']) ) ) @@ -651,13 +1093,19 @@ def AddToDownloadsList(title, year, url, durl, purl, summary, thumb, quality, so uid = 'Down5Split'+E(title+year+fs+quality+source+str(season)+str(episode)) if Dict[uid] != None: - if admin == True and update == True: - pass + if admin == True and force_add == True: + uid_c = 0 + while Dict[uid] != None: + uid_c += 1 + uid = 'Down5Split'+E(title+year+fs+quality+source+str(season)+str(episode))+'-%s' % str(uid_c) else: - Dict['DOWNLOAD_OPTIONS_SECTION_TEMP'][tuec] = {} - Dict['DOWNLOAD_OPTIONS_SECTION_TEMP'][tuec]['Done'] = 'Done' - Dict.Save() - return MC.message_container('Download Sources', 'Item already in Downloads List') + if admin == True and update == True: + pass + else: + Dict['DOWNLOAD_OPTIONS_SECTION_TEMP'][tuec] = {} + Dict['DOWNLOAD_OPTIONS_SECTION_TEMP'][tuec]['Done'] = 'Done' + Dict.Save() + return MC.message_container('Download Sources', 'Item already in Downloads List') if file_ext == None: file_ext = '.mp4' @@ -673,7 +1121,7 @@ def AddToDownloadsList(title, year, url, durl, purl, summary, thumb, quality, so else: watch_title = title - EncTxt = E(JSON.StringFromObject({'title':title, 'watch_title':watch_title, 'year':year, 'season':season, 'episode':episode, 'url':url, 'durl':durl, 'purl':purl, 'sub_url':sub_url, 'summary':summary, 'thumb':thumb, 'fsBytes':int(fsBytes), 'fs':fs, 'chunk_size':chunk_size, 'file_ext':file_ext, 'quality':quality, 'source':source, 'source_meta':source_meta, 'file_meta':file_meta, 'uid':uid, 'fid':fid, 'type':type, 'vidtype':vidtype, 'resumable':resumable, 'status':common.DOWNLOAD_STATUS[0], 'startPos':0, 'timeAdded':time.time(), 'first_time':time.time(), 'progress':0, 'chunk_speed':0,'avg_speed':0,'avg_speed_curr':0, 'eta':0, 'error':'', 'last_error':'Unknown Error', 'action':common.DOWNLOAD_ACTIONS[4],'section_path':section_path, 'section_title':section_title, 'section_key':section_key, 'user':user, 'params':params, 'riptype':riptype, 'provider':provider})) + EncTxt = E(JSON.StringFromObject({'title':title, 'watch_title':watch_title, 'year':year, 'season':season, 'episode':episode, 'url':url, 'durl':durl, 'purl':purl, 'sub_url':sub_url, 'summary':summary, 'thumb':thumb, 'fsBytes':int(fsBytes), 'fs':fs, 'chunk_size':chunk_size, 'file_ext':file_ext, 'quality':quality, 'source':source, 'source_meta':source_meta, 'file_meta':file_meta, 'uid':uid, 'fid':fid, 'type':type, 'vidtype':vidtype, 'resumable':resumable, 'status':common.DOWNLOAD_STATUS[0], 'startPos':0, 'timeAdded':time.time(), 'first_time':time.time(), 'progress':0, 'chunk_speed':0,'avg_speed':0,'avg_speed_curr':0, 'eta':0, 'error':'', 'last_error':'Unknown Error', 'action':common.DOWNLOAD_ACTIONS[4],'section_path':section_path, 'section_title':section_title, 'section_key':section_key, 'user':user, 'params':params, 'riptype':riptype, 'provider':provider, 'page_url':page_url, 'seq':int(seq)})) Dict[uid] = EncTxt Dict.Save() @@ -697,209 +1145,165 @@ def AddToDownloadsList(title, year, url, durl, purl, summary, thumb, quality, so ###################################################################################### # Loads Downloads from Dict. @route(PREFIX + "/downloads") -def Downloads(title, session = None, status = None, refresh = 0, isDir='N', **kwargs): +def Downloads(title, session = None, status = None, refresh = 0, isDir='N', item=None, **kwargs): if not common.interface.isInitialized(): return MC.message_container(common.MSG0, '%s. Progress %s%s (%s)' % (common.MSG1, common.interface.getProvidersInitStatus(), '%', common.interface.getCurrentProviderInProcess())) oc = ObjectContainer(title1=title, no_cache=common.isForceNoCache()) - if status == None: - N_status = {} - for dstatus in common.DOWNLOAD_STATUS: - c = 0 - if dstatus == common.DOWNLOAD_STATUS[6]: # AutoPilot Queue - for k in common.DOWNLOAD_AUTOPILOT.keys(): - #for i in common.DOWNLOAD_AUTOPILOT[k]: - # if i['status']==common.DOWNLOAD_AUTOPILOT_STATUS[0] and (time.time() - float(i['timeAdded']) > float(15*60)): - # i['status'] = common.DOWNLOAD_AUTOPILOT_STATUS[4] - - c += len(common.DOWNLOAD_AUTOPILOT[k]) - N_status[dstatus] = c - else: + try: + if status == None: + N_status = {} + for dstatus in common.DOWNLOAD_STATUS: c = 0 - for each in Dict: - if 'Down5Split' in each: - try: - longstringObjs = JSON.ObjectFromString(D(Dict[each])) - if longstringObjs['status'] == dstatus or dstatus == common.DOWNLOAD_STATUS[5]: # All - c += 1 - except Exception as e: - Log('ERROR: Downloads >> %s' % e) - N_status[dstatus] = c - for statusx in common.DOWNLOAD_STATUS: - oc.add(DirectoryObject( - key = Callback(Downloads, title="%s Downloads" % statusx, status = statusx, session = session, isDir='N'), - title = '%s (%s)' % (statusx, str(N_status[statusx])) + if dstatus == common.DOWNLOAD_STATUS[6]: # AutoPilot Queue + for k in common.DOWNLOAD_AUTOPILOT.keys(): + c += len(common.DOWNLOAD_AUTOPILOT[k]) + N_status[dstatus] = c + else: + c = 0 + for each in Dict: + if 'Down5Split' in each: + try: + longstringObjs = JSON.ObjectFromString(D(Dict[each])) + if longstringObjs['status'] == dstatus or dstatus == common.DOWNLOAD_STATUS[5]: # All + c += 1 + except Exception as e: + Log('ERROR: Downloads >> %s' % e) + N_status[dstatus] = c + for statusx in common.DOWNLOAD_STATUS: + oc.add(DirectoryObject( + key = Callback(Downloads, title="%s Downloads" % statusx, status = statusx, session = session, isDir='N'), + title = '%s (%s)' % (statusx, str(N_status[statusx])) + ) ) - ) - oc.add(DirectoryObject(key = Callback(Downloads, title="Downloads", session = session, refresh = int(refresh)+1), title = "Refresh")) - if int(refresh) > 0: + oc.add(DirectoryObject(key = Callback(Downloads, title="Downloads", session = session, refresh = int(refresh)+1), title = "Refresh")) oc.add(DirectoryObject(key = Callback(main.MainMenu), title = '<< Main Menu', thumb = R(common.ICON))) - return oc - - items_to_del = [] - doTrigger = False - - if status == common.DOWNLOAD_STATUS[6]: # Auto-Pilot - doSave = False - shows_array = {} - for k in common.DOWNLOAD_AUTOPILOT.keys(): - for i in common.DOWNLOAD_AUTOPILOT[k]: - try: - q_fs = i['quality'] if i['quality'] != None else i['file_size'] - rip = i['riptype'] + return oc + + items_to_del = [] + doTrigger = False + first_episode = 0 + last_episode = 1 + + if status == common.DOWNLOAD_STATUS[6]: # Auto-Pilot + doSave = False + shows_array = {} + for k in common.DOWNLOAD_AUTOPILOT.keys(): + for i in common.DOWNLOAD_AUTOPILOT[k]: try: - q_fs1 = q_fs.split(':') - q_fs_t = '%s GB - %s GB' % (str(round(float(q_fs1[0])/common.TO_GB, 3)), str(round(float(q_fs1[1])/common.TO_GB, 3))) - q_fs = q_fs_t - except: - pass + q_fs = i['quality'] if i['quality'] != None else i['file_size'] + rip = i['riptype'] + try: + q_fs1 = q_fs.split(':') + q_fs_t = '%s GB - %s GB' % (str(round(float(q_fs1[0])/common.TO_GB, 3)), str(round(float(q_fs1[1])/common.TO_GB, 3))) + q_fs = q_fs_t + except: + pass - timestr = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(float(i['timeAdded']))) - ooc = None - addShow = False - if k == 'show': - show_t = '%s:%s'%(i['short_title'],i['season']) - if isDir == 'N': - if show_t not in shows_array.keys(): - ooc = DirectoryObject(title=show_t, + timestr = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(float(i['timeAdded']))) + ooc = None + addShow = False + if k == 'show': + show_t = '%s:%s'%(i['short_title'],i['season']) + if isDir == 'N': + if show_t not in shows_array.keys(): + ooc = DirectoryObject(title=show_t, + thumb = common.GetThumb(i['thumb'], session=session), + summary = i['summary'], + tagline = timestr, + key = Callback(Downloads, title=show_t, session=session, status=status, isDir='Y', item=E(JSON.StringFromObject(i))) + ) + shows_array[show_t] = ooc + elif isDir == 'Y': + addShow = True + if isDir == 'Y' and show_t == title: + addShow = True + wtitle = '%s | %s | %s | %s | %s | %s' % (i['watch_title'], k.title(), rip, q_fs, i['status'], timestr) + last_episode = i['episode'] + first_episode_x = int(i['episode']) + if first_episode == 0: + first_episode = first_episode_x + if first_episode_x < first_episode: + first_episode = first_episode_x + Log('Item: %s %s %s' % (title, int(i['season']), int(i['episode']))) + elif k == 'extras': + if isDir == 'N': + addShow = True + wtitle = '%s (%s) | %s - %s | %s | %s | %s | %s' % (i['title'], i['year'], k.title(), i['vidtype'], rip, q_fs, i['status'], timestr) + else: + if isDir == 'N': + addShow = True + wtitle = '%s (%s) | %s | %s | %s | %s | %s' % (i['title'], i['year'], k.title(), rip, q_fs, i['status'], timestr) + + if ooc != None: + oc.add(ooc) + else: + if addShow == True: + #key = Callback(main.MyMessage, title='Info', msg=wtitle) + key = Callback(DownloadingFilesMenu, title=i['watch_title'], uid=i['uid'], session=session, status=status, autopilot=True, type=k) + + do = DirectoryObject( + title = wtitle, thumb = common.GetThumb(i['thumb'], session=session), summary = i['summary'], tagline = timestr, - key = Callback(Downloads, title=show_t, session=session, status=status, isDir='Y') + key = key ) - shows_array[show_t] = ooc - elif isDir == 'Y': - addShow = True - if isDir == 'Y' and show_t == title: - addShow = True - wtitle = '%s | %s | %s | %s | %s | %s' % (i['watch_title'], k.title(), rip, q_fs, i['status'], timestr) - elif k == 'extras': - if isDir == 'N': - addShow = True - wtitle = '%s (%s) | %s - %s | %s | %s | %s | %s' % (i['title'], i['year'], k.title(), i['vidtype'], rip, q_fs, i['status'], timestr) - else: - if isDir == 'N': - addShow = True - wtitle = '%s (%s) | %s | %s | %s | %s | %s' % (i['title'], i['year'], k.title(), rip, q_fs, i['status'], timestr) - - if ooc != None: - oc.add(ooc) - else: - if addShow == True: - #key = Callback(main.MyMessage, title='Info', msg=wtitle) - key = Callback(DownloadingFilesMenu, title=i['watch_title'], uid=i['uid'], session=session, status=status, autopilot=True, type=k) - - do = DirectoryObject( - title = wtitle, - thumb = common.GetThumb(i['thumb'], session=session), - summary = i['summary'], - tagline = timestr, - key = key - ) - oc.add(do) - except Exception as e: - Log("==============Downloads==============") - #Log(longstringObjs) - Log(e) - common.DOWNLOAD_AUTOPILOT[k].remove(i) - doSave = True + oc.add(do) + except Exception as e: + Log("==============Downloads==============") + #Log(longstringObjs) + Log(e) + common.DOWNLOAD_AUTOPILOT[k].remove(i) + doSave = True - if doSave == True: - Dict['DOWNLOAD_AUTOPILOT'] = E(JSON.StringFromObject(common.DOWNLOAD_AUTOPILOT)) - Dict.Save() - else: - for each in Dict: - if 'Down5Split' in each: - try: - longstringObjs = JSON.ObjectFromString(D(Dict[each])) - if 'watch_title' not in longstringObjs.keys(): - if longstringObjs['type'] == 'show': - try: - if int(longstringObjs['episode']) < 100: - longstringObjs['watch_title'] = '%s S%sE%02d' % (longstringObjs['title'],int(longstringObjs['season']),int(longstringObjs['episode'])) - else: - longstringObjs['watch_title'] = '%s S%sE%03d' % (longstringObjs['title'],int(longstringObjs['season']),int(longstringObjs['episode'])) - except Exception as e: - Log('Error in Downloads > %s' % e) - longstringObjs['watch_title'] = longstringObjs['title'] - else: - longstringObjs['watch_title'] = longstringObjs['title'] - - if longstringObjs['status'] == status or status == common.DOWNLOAD_STATUS[5]: # All - timestr = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(float(longstringObjs['timeAdded']))) - key = None - summary = longstringObjs['summary'] - has_sub = False if longstringObjs['sub_url'] == None else True - - if status == common.DOWNLOAD_STATUS[0]: # Queued - wtitle = '%s (%s) | %s | %s - %s | %s [%s] | %s - %s | %s | Subtitle:%s' % (longstringObjs['watch_title'], longstringObjs['year'], longstringObjs['type'].title(), longstringObjs['fs'], longstringObjs['quality'], longstringObjs['source'], longstringObjs['provider'] if 'provider' in longstringObjs.keys() else 'N/A', longstringObjs['status'], common.DOWNLOAD_ACTIONS_K[longstringObjs['action']], str(longstringObjs['progress'])+'%', common.GetEmoji(type=has_sub, mode='simple', session=session)) - key = Callback(DownloadingFilesMenu, title=longstringObjs['watch_title'], uid=longstringObjs['uid'], choice=None, session=session, status=status) - elif status == common.DOWNLOAD_STATUS[1]: # Downloading - if each not in common.DOWNLOAD_STATS.keys() and len(common.DOWNLOAD_STATS.keys()) < int(Prefs['download_connections']): - longstringObjs['status'] = common.DOWNLOAD_STATUS[1] # Downloading - longstringObjs['action'] = common.DOWNLOAD_ACTIONS[4] # Start Download - Dict[each] = E(JSON.StringFromObject(longstringObjs)) - - #longstringObjs['status'] = common.DOWNLOAD_STATUS[1] - #common.DOWNLOAD_STATS[each] = Dict[each] - #doTrigger = True - - EncTxt = E(JSON.StringFromObject(longstringObjs)) - Thread.Create(download.do_download, {}, file_meta_enc=EncTxt) - elif each not in common.DOWNLOAD_STATS.keys(): - longstringObjs['status'] = common.DOWNLOAD_STATUS[0] # Queued - longstringObjs['action'] = common.DOWNLOAD_ACTIONS[4] # Start Download - Dict[each] = E(JSON.StringFromObject(longstringObjs)) - doTrigger = True - else: - longstringObjs = common.DOWNLOAD_STATS[each] - - try: - eta = float(longstringObjs['eta']) - except: - eta = '?' - - if eta == '?' or str(eta) == '0': - eta_str = 'calculating time' - elif eta < 0.1: - eta_str = 'almost done' - elif eta < 1: - eta_str = '%02d sec. remaining' % int(int(float(eta) * 60.0)) - elif eta > 60: - eta_str = '%s hr. %02d min. %02d sec. remaining' % (int(int(eta)/60), (float(int(int(eta)/60))-float(int((float(eta)/60.0)/100)*100)), int(60 * (float(eta) - float(int(eta))))) - else: - eta_str = '%s min. %02d sec. remaining' % (int(eta), int(60 * (float(eta) - float(int(eta))))) - - wtitle = '%s (%s) | %s | %s - %s | %s [%s] | %s - %s | %s | %s MB/s ~ %s MB/s ~ %s MB/s | %s | Subtitle:%s' % (longstringObjs['watch_title'], longstringObjs['year'], longstringObjs['type'].title(), longstringObjs['fs'], longstringObjs['quality'], longstringObjs['source'], longstringObjs['provider'] if 'provider' in longstringObjs.keys() else 'N/A', longstringObjs['status'], common.DOWNLOAD_ACTIONS_K[longstringObjs['action']], str(longstringObjs['progress'])+'%', str(longstringObjs['chunk_speed']), str(longstringObjs['avg_speed_curr']), str(longstringObjs['avg_speed']), str(eta_str), common.GetEmoji(type=has_sub, mode='simple', session=session)) - key = Callback(DownloadingFilesMenu, title=longstringObjs['watch_title'], uid=longstringObjs['uid'], choice=None, session=session, status=status) - elif status == common.DOWNLOAD_STATUS[2]: # Completed - wtitle = '%s (%s) | %s | %s - %s | %s [%s] | %s - %s | %s | %s MB/s | Subtitle:%s' % (longstringObjs['watch_title'], longstringObjs['year'], longstringObjs['type'].title(), longstringObjs['fs'], longstringObjs['quality'], longstringObjs['source'], longstringObjs['provider'] if 'provider' in longstringObjs.keys() else 'N/A', longstringObjs['status'], common.DOWNLOAD_ACTIONS_K[longstringObjs['action']], str(longstringObjs['progress'])+'%', str(longstringObjs['avg_speed_curr']), common.GetEmoji(type=has_sub, mode='simple', session=session)) - key = Callback(DownloadingFilesMenu, title=longstringObjs['watch_title'], uid=longstringObjs['uid'], choice=None, session=session, status=status) - elif status == common.DOWNLOAD_STATUS[3]: # Failed - err = longstringObjs['last_error'] if longstringObjs['error'] == '' else longstringObjs['error'] - wtitle = '%s (%s) | %s | %s - %s | %s | %s | %s - %s' % (longstringObjs['watch_title'], longstringObjs['year'], longstringObjs['type'].title(), longstringObjs['fs'], longstringObjs['quality'], longstringObjs['source'], str(longstringObjs['progress'])+'%', longstringObjs['status'], err) - key = Callback(DownloadingFilesMenu, title=longstringObjs['watch_title'], uid=longstringObjs['uid'], choice=None, session=session, status=status) - summary = '%s | %s' % (wtitle, summary) - elif status == common.DOWNLOAD_STATUS[4]: # Requested - if 'user' in longstringObjs.keys() and longstringObjs['user'] != None and AuthTools.CheckAdmin() == True: - wtitle = '%s (%s) | %s | %s - %s | %s | %s (by %s) - %s | %s | %s MB/s | Subtitle:%s' % (longstringObjs['watch_title'], longstringObjs['year'], longstringObjs['type'].title(), longstringObjs['fs'], longstringObjs['quality'], longstringObjs['source'], longstringObjs['status'], longstringObjs['user'], common.DOWNLOAD_ACTIONS_K[longstringObjs['action']], str(longstringObjs['progress'])+'%', str(longstringObjs['avg_speed_curr']), common.GetEmoji(type=has_sub, mode='simple', session=session)) + if doSave == True: + Dict['DOWNLOAD_AUTOPILOT'] = E(JSON.StringFromObject(common.DOWNLOAD_AUTOPILOT)) + Dict.Save() + else: + for each in Dict: + if 'Down5Split' in each: + try: + longstringObjs = JSON.ObjectFromString(D(Dict[each])) + if 'watch_title' not in longstringObjs.keys(): + if longstringObjs['type'] == 'show': + try: + if int(longstringObjs['episode']) < 100: + longstringObjs['watch_title'] = '%s S%sE%02d' % (longstringObjs['title'],int(longstringObjs['season']),int(longstringObjs['episode'])) + else: + longstringObjs['watch_title'] = '%s S%sE%03d' % (longstringObjs['title'],int(longstringObjs['season']),int(longstringObjs['episode'])) + except Exception as e: + Log('Error in Downloads > %s' % e) + longstringObjs['watch_title'] = longstringObjs['title'] else: - wtitle = '%s (%s) | %s | %s - %s | %s | %s - %s | %s | %s MB/s | Subtitle:%s' % (longstringObjs['watch_title'], longstringObjs['year'], longstringObjs['type'].title(), longstringObjs['fs'], longstringObjs['quality'], longstringObjs['source'], longstringObjs['status'], common.DOWNLOAD_ACTIONS_K[longstringObjs['action']], str(longstringObjs['progress'])+'%', str(longstringObjs['avg_speed_curr']), common.GetEmoji(type=has_sub, mode='simple', session=session)) - key = Callback(DownloadingFilesMenu, title=longstringObjs['watch_title'], uid=longstringObjs['uid'], choice=None, session=session, status=status) - elif status == common.DOWNLOAD_STATUS[5]: # All - if longstringObjs['status'] == common.DOWNLOAD_STATUS[1]: # Downloading + longstringObjs['watch_title'] = longstringObjs['title'] + + if longstringObjs['status'] == status or status == common.DOWNLOAD_STATUS[5]: # All + timestr = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(float(longstringObjs['timeAdded']))) + key = None + summary = longstringObjs['summary'] + has_sub = False if longstringObjs['sub_url'] == None else True + + if status == common.DOWNLOAD_STATUS[0]: # Queued + wtitle = '%s (%s) | %s | %s - %s | %s [%s] | %s - %s | %s | Subtitle:%s' % (longstringObjs['watch_title'], longstringObjs['year'], longstringObjs['type'].title(), longstringObjs['fs'], longstringObjs['quality'], longstringObjs['source'], longstringObjs['provider'] if 'provider' in longstringObjs.keys() else 'N/A', longstringObjs['status'], common.DOWNLOAD_ACTIONS_K[longstringObjs['action']], str(longstringObjs['progress'])+'%', common.GetEmoji(type=has_sub, mode='simple', session=session)) + key = Callback(DownloadingFilesMenu, title=longstringObjs['watch_title'], uid=longstringObjs['uid'], choice=None, session=session, status=status) + elif status == common.DOWNLOAD_STATUS[1]: # Downloading if each not in common.DOWNLOAD_STATS.keys() and len(common.DOWNLOAD_STATS.keys()) < int(Prefs['download_connections']): - longstringObjs['status'] = common.DOWNLOAD_STATUS[1] - longstringObjs['action'] = common.DOWNLOAD_ACTIONS[4] + longstringObjs['status'] = common.DOWNLOAD_STATUS[1] # Downloading + longstringObjs['action'] = common.DOWNLOAD_ACTIONS[4] # Start Download Dict[each] = E(JSON.StringFromObject(longstringObjs)) + #longstringObjs['status'] = common.DOWNLOAD_STATUS[1] + #common.DOWNLOAD_STATS[each] = Dict[each] + #doTrigger = True + EncTxt = E(JSON.StringFromObject(longstringObjs)) Thread.Create(download.do_download, {}, file_meta_enc=EncTxt) elif each not in common.DOWNLOAD_STATS.keys(): - longstringObjs['status'] = common.DOWNLOAD_STATUS[0] - longstringObjs['action'] = common.DOWNLOAD_ACTIONS[4] + longstringObjs['status'] = common.DOWNLOAD_STATUS[0] # Queued + longstringObjs['action'] = common.DOWNLOAD_ACTIONS[4] # Start Download Dict[each] = E(JSON.StringFromObject(longstringObjs)) doTrigger = True else: @@ -921,112 +1325,176 @@ def Downloads(title, session = None, status = None, refresh = 0, isDir='N', **kw else: eta_str = '%s min. %02d sec. remaining' % (int(eta), int(60 * (float(eta) - float(int(eta))))) - wtitle = '%s (%s) | %s | %s - %s | %s | %s - %s | %s | %s MB/s ~ %s MB/s ~ %s MB/s | %s | Subtitle:%s' % (longstringObjs['watch_title'], longstringObjs['year'], longstringObjs['type'].title(), longstringObjs['fs'], longstringObjs['quality'], longstringObjs['source'], longstringObjs['status'], common.DOWNLOAD_ACTIONS_K[longstringObjs['action']], str(longstringObjs['progress'])+'%', str(longstringObjs['chunk_speed']), str(longstringObjs['avg_speed_curr']), str(longstringObjs['avg_speed']), str(eta_str), common.GetEmoji(type=has_sub, mode='simple', session=session)) - else: - wtitle = '%s (%s) | %s | %s - %s | %s | %s - %s | %s | %s MB/s | Subtitle:%s' % (longstringObjs['watch_title'], longstringObjs['year'], longstringObjs['type'].title(), longstringObjs['fs'], longstringObjs['quality'], longstringObjs['source'], longstringObjs['status'], common.DOWNLOAD_ACTIONS_K[longstringObjs['action']], str(longstringObjs['progress'])+'%', str(longstringObjs['avg_speed_curr']), common.GetEmoji(type=has_sub, mode='simple', session=session)) + wtitle = '%s (%s) | %s | %s - %s | %s [%s] | %s - %s | %s | %s MB/s ~ %s MB/s ~ %s MB/s | %s | Subtitle:%s' % (longstringObjs['watch_title'], longstringObjs['year'], longstringObjs['type'].title(), longstringObjs['fs'], longstringObjs['quality'], longstringObjs['source'], longstringObjs['provider'] if 'provider' in longstringObjs.keys() else 'N/A', longstringObjs['status'], common.DOWNLOAD_ACTIONS_K[longstringObjs['action']], str(longstringObjs['progress'])+'%', str(longstringObjs['chunk_speed']), str(longstringObjs['avg_speed_curr']), str(longstringObjs['avg_speed']), str(eta_str), common.GetEmoji(type=has_sub, mode='simple', session=session)) + key = Callback(DownloadingFilesMenu, title=longstringObjs['watch_title'], uid=longstringObjs['uid'], choice=None, session=session, status=status) + elif status == common.DOWNLOAD_STATUS[2]: # Completed + wtitle = '%s (%s) | %s | %s - %s | %s [%s] | %s - %s | %s | %s MB/s | Subtitle:%s' % (longstringObjs['watch_title'], longstringObjs['year'], longstringObjs['type'].title(), longstringObjs['fs'], longstringObjs['quality'], longstringObjs['source'], longstringObjs['provider'] if 'provider' in longstringObjs.keys() else 'N/A', longstringObjs['status'], common.DOWNLOAD_ACTIONS_K[longstringObjs['action']], str(longstringObjs['progress'])+'%', str(longstringObjs['avg_speed_curr']), common.GetEmoji(type=has_sub, mode='simple', session=session)) + key = Callback(DownloadingFilesMenu, title=longstringObjs['watch_title'], uid=longstringObjs['uid'], choice=None, session=session, status=status) + elif status == common.DOWNLOAD_STATUS[3]: # Failed + err = longstringObjs['last_error'] if longstringObjs['error'] == '' else longstringObjs['error'] + wtitle = '%s (%s) | %s | %s - %s | %s | %s | %s - %s' % (longstringObjs['watch_title'], longstringObjs['year'], longstringObjs['type'].title(), longstringObjs['fs'], longstringObjs['quality'], longstringObjs['source'], str(longstringObjs['progress'])+'%', longstringObjs['status'], err) + key = Callback(DownloadingFilesMenu, title=longstringObjs['watch_title'], uid=longstringObjs['uid'], choice=None, session=session, status=status) + summary = '%s | %s' % (wtitle, summary) + elif status == common.DOWNLOAD_STATUS[4]: # Requested + if 'user' in longstringObjs.keys() and longstringObjs['user'] != None and AuthTools.CheckAdmin() == True: + wtitle = '%s (%s) | %s | %s - %s | %s | %s (by %s) - %s | %s | %s MB/s | Subtitle:%s' % (longstringObjs['watch_title'], longstringObjs['year'], longstringObjs['type'].title(), longstringObjs['fs'], longstringObjs['quality'], longstringObjs['source'], longstringObjs['status'], longstringObjs['user'], common.DOWNLOAD_ACTIONS_K[longstringObjs['action']], str(longstringObjs['progress'])+'%', str(longstringObjs['avg_speed_curr']), common.GetEmoji(type=has_sub, mode='simple', session=session)) + else: + wtitle = '%s (%s) | %s | %s - %s | %s | %s - %s | %s | %s MB/s | Subtitle:%s' % (longstringObjs['watch_title'], longstringObjs['year'], longstringObjs['type'].title(), longstringObjs['fs'], longstringObjs['quality'], longstringObjs['source'], longstringObjs['status'], common.DOWNLOAD_ACTIONS_K[longstringObjs['action']], str(longstringObjs['progress'])+'%', str(longstringObjs['avg_speed_curr']), common.GetEmoji(type=has_sub, mode='simple', session=session)) + key = Callback(DownloadingFilesMenu, title=longstringObjs['watch_title'], uid=longstringObjs['uid'], choice=None, session=session, status=status) + elif status == common.DOWNLOAD_STATUS[5]: # All + if longstringObjs['status'] == common.DOWNLOAD_STATUS[1]: # Downloading + if each not in common.DOWNLOAD_STATS.keys() and len(common.DOWNLOAD_STATS.keys()) < int(Prefs['download_connections']): + longstringObjs['status'] = common.DOWNLOAD_STATUS[1] + longstringObjs['action'] = common.DOWNLOAD_ACTIONS[4] + Dict[each] = E(JSON.StringFromObject(longstringObjs)) + + EncTxt = E(JSON.StringFromObject(longstringObjs)) + Thread.Create(download.do_download, {}, file_meta_enc=EncTxt) + elif each not in common.DOWNLOAD_STATS.keys(): + longstringObjs['status'] = common.DOWNLOAD_STATUS[0] + longstringObjs['action'] = common.DOWNLOAD_ACTIONS[4] + Dict[each] = E(JSON.StringFromObject(longstringObjs)) + doTrigger = True + else: + longstringObjs = common.DOWNLOAD_STATS[each] + + try: + eta = float(longstringObjs['eta']) + except: + eta = '?' + + if eta == '?' or str(eta) == '0': + eta_str = 'calculating time' + elif eta < 0.1: + eta_str = 'almost done' + elif eta < 1: + eta_str = '%02d sec. remaining' % int(int(float(eta) * 60.0)) + elif eta > 60: + eta_str = '%s hr. %02d min. %02d sec. remaining' % (int(int(eta)/60), (float(int(int(eta)/60))-float(int((float(eta)/60.0)/100)*100)), int(60 * (float(eta) - float(int(eta))))) + else: + eta_str = '%s min. %02d sec. remaining' % (int(eta), int(60 * (float(eta) - float(int(eta))))) + + wtitle = '%s (%s) | %s | %s - %s | %s | %s - %s | %s | %s MB/s ~ %s MB/s ~ %s MB/s | %s | Subtitle:%s' % (longstringObjs['watch_title'], longstringObjs['year'], longstringObjs['type'].title(), longstringObjs['fs'], longstringObjs['quality'], longstringObjs['source'], longstringObjs['status'], common.DOWNLOAD_ACTIONS_K[longstringObjs['action']], str(longstringObjs['progress'])+'%', str(longstringObjs['chunk_speed']), str(longstringObjs['avg_speed_curr']), str(longstringObjs['avg_speed']), str(eta_str), common.GetEmoji(type=has_sub, mode='simple', session=session)) + else: + wtitle = '%s (%s) | %s | %s - %s | %s | %s - %s | %s | %s MB/s | Subtitle:%s' % (longstringObjs['watch_title'], longstringObjs['year'], longstringObjs['type'].title(), longstringObjs['fs'], longstringObjs['quality'], longstringObjs['source'], longstringObjs['status'], common.DOWNLOAD_ACTIONS_K[longstringObjs['action']], str(longstringObjs['progress'])+'%', str(longstringObjs['avg_speed_curr']), common.GetEmoji(type=has_sub, mode='simple', session=session)) + + key = Callback(DownloadingFilesMenu, title=longstringObjs['watch_title'], uid=longstringObjs['uid'], choice=None, session=session, status=longstringObjs['status']) - key = Callback(DownloadingFilesMenu, title=longstringObjs['watch_title'], uid=longstringObjs['uid'], choice=None, session=session, status=longstringObjs['status']) - - oc.add(DirectoryObject( - title = wtitle, - key = key, - thumb = common.GetThumb(longstringObjs['thumb'], session=session), - tagline = timestr, - summary = summary + oc.add(DirectoryObject( + title = wtitle, + key = key, + thumb = common.GetThumb(longstringObjs['thumb'], session=session), + tagline = timestr, + summary = summary + ) ) - ) - except Exception as e: - Log("==============Downloads==============") - #Log(longstringObjs) - Log(e) - #Log(common.DOWNLOAD_STATS) - #items_to_del.append(each) - - if len(items_to_del) > 0: - for each in items_to_del: - if each in common.DOWNLOAD_STATS.keys(): - del common.DOWNLOAD_STATS[each] - - try: - encoded_str = Dict[each] - decoded_str = D(encoded_str) - longstringObjs = JSON.ObjectFromString(decoded_str) - Log(longstringObjs) - if 'temp_file' in longstringObjs: - filepath = longstringObjs['temp_file'] - try: - Core.storage.remove_data_item(filepath) except Exception as e: - Log("=============ClearDownLoadSection Error============") + Log("==============Downloads==============") + #Log(longstringObjs) Log(e) - Log("Deleting: %s" % longstringObjs['watch_title']) - del Dict[each] - except: - Log("Deleting: %s" % each) - del Dict[each] - - Dict.Save() - - if doTrigger == True: - Thread.Create(download.trigger_que_run) + #Log(common.DOWNLOAD_STATS) + #items_to_del.append(each) + + if len(items_to_del) > 0: + for each in items_to_del: + if each in common.DOWNLOAD_STATS.keys(): + del common.DOWNLOAD_STATS[each] + + try: + encoded_str = Dict[each] + decoded_str = D(encoded_str) + longstringObjs = JSON.ObjectFromString(decoded_str) + Log(longstringObjs) + if 'temp_file' in longstringObjs: + filepath = longstringObjs['temp_file'] + try: + Core.storage.remove_data_item(filepath) + except Exception as e: + Log("=============ClearDownLoadSection Error============") + Log(e) + Log("Deleting: %s" % longstringObjs['watch_title']) + del Dict[each] + except: + Log("Deleting: %s" % each) + del Dict[each] + + Dict.Save() + + if doTrigger == True: + Thread.Create(download.trigger_que_run) - if len(oc) == 0: - return MC.message_container(title, 'No %s section videos available' % status) + if len(oc) == 0: + return MC.message_container(title, 'No %s section videos available' % status) + + if isDir == 'Y': + oc.objects.sort(key=lambda obj: obj.title, reverse=False) + else: + oc.objects.sort(key=lambda obj: obj.tagline, reverse=not common.UsingOption(key=common.DEVICE_OPTIONS[12], session=session)) - oc.objects.sort(key=lambda obj: obj.tagline, reverse=not common.UsingOption(key=common.DEVICE_OPTIONS[12], session=session)) - - if status != None: - if status == common.DOWNLOAD_STATUS[3]: - oc.add(DirectoryObject( - title = 'Retry All Downloads', - key = Callback(RetryFailedDownloads, session=session), - summary = 'Retry Failed Downloads', - thumb = common.GetThumb(R(common.ICON_REFRESH), session=session) + if status != None: + if status == common.DOWNLOAD_STATUS[3]: + oc.add(DirectoryObject( + title = 'Retry All Downloads', + key = Callback(RetryFailedDownloads, session=session), + summary = 'Retry Failed Downloads', + thumb = common.GetThumb(R(common.ICON_REFRESH), session=session) + ) ) - ) - elif status == common.DOWNLOAD_STATUS[1]: - oc.add(DirectoryObject( - title = 'Pause %s Downloads' % status, - key = Callback(PauseDownloadingDownloads, session=session), - summary = 'Pause %s Download Entries' % status, - thumb = common.GetThumb(R(common.ICON_ENTER), session=session) + elif status == common.DOWNLOAD_STATUS[1]: + oc.add(DirectoryObject( + title = 'Pause %s Downloads' % status, + key = Callback(PauseDownloadingDownloads, session=session), + summary = 'Pause %s Download Entries' % status, + thumb = common.GetThumb(R(common.ICON_ENTER), session=session) + ) + ) + oc.add(DirectoryObject( + title = 'Postpone %s Downloads' % status, + key = Callback(PostponeDownloadingDownloads, session=session), + summary = 'Postpone %s Download Entries' % status, + thumb = common.GetThumb(R(common.ICON_ENTER), session=session) + ) + ) + if isDir == 'Y': + i = json.loads(D(item)) + oc.add(DirectoryObject( + title = 'Update Entry for %s' % title, + key = Callback(AddToAutoPilotDownloads, title=i['short_title'], year=i['year'], type=i['type'], purl=i['purl'], thumb=i['thumb'], summary=i['summary'], quality=None, file_size=None, riptype=i['riptype'], season=i['season'], season_end=i['season'], episode_start=int(first_episode), episode_end=int(last_episode), vidtype=i['vidtype'], section_path=None, section_title=None, section_key=None, session=session, admin=i['admin'], all_seasons=False, edit=False, mode=None), + summary = 'Update Entry for %s' % title, + thumb = common.GetThumb(R(common.ICON_UPDATER), session=session) + ) ) - ) oc.add(DirectoryObject( - title = 'Postpone %s Downloads' % status, - key = Callback(PostponeDownloadingDownloads, session=session), - summary = 'Postpone %s Download Entries' % status, - thumb = common.GetThumb(R(common.ICON_ENTER), session=session) + title = 'Refresh %s Downloads' % status, + key = Callback(Downloads,title=title, status=status, session=session, refresh=int(refresh)+1, isDir=isDir, item=item), + summary = 'Refresh %s Download Entries' % status, + thumb = common.GetThumb(R(common.ICON_REFRESH), session=session) ) ) - oc.add(DirectoryObject( - title = 'Refresh %s Downloads' % status, - key = Callback(Downloads,title=title, status=status, session=session, refresh=int(refresh)+1, isDir=isDir), - summary = 'Refresh %s Download Entries' % status, - thumb = common.GetThumb(R(common.ICON_REFRESH), session=session) - ) - ) - if isDir == 'Y': - oc.add(DirectoryObject( - title = 'Clear %s %s Downloads' % (title, status), - key = Callback(ClearDownLoadSection, status=status, session=session, dir=title), - summary = 'Remove %s %s Download Entries' % (title, status), + if isDir == 'Y': + oc.add(DirectoryObject( + title = 'Clear %s %s Downloads' % (title, status), + key = Callback(ClearDownLoadSection, status=status, session=session, dir=title), + summary = 'Remove %s %s Download Entries' % (title, status), + thumb = common.GetThumb(R(common.ICON_NOTOK), session=session) + ) + ) + else: + oc.add(DirectoryObject( + title = 'Clear %s Downloads' % status, + key = Callback(ClearDownLoadSection, status=status, session=session), + summary = 'Remove %s Download Entries' % status, thumb = common.GetThumb(R(common.ICON_NOTOK), session=session) ) ) - else: - oc.add(DirectoryObject( - title = 'Clear %s Downloads' % status, - key = Callback(ClearDownLoadSection, status=status, session=session), - summary = 'Remove %s Download Entries' % status, - thumb = common.GetThumb(R(common.ICON_NOTOK), session=session) - ) - ) - - #oc.objects.sort(key=lambda obj: obj.title, reverse=False) - oc.add(DirectoryObject(key = Callback(main.MainMenu), title = '<< Main Menu', thumb = R(common.ICON))) - - return oc + + #oc.objects.sort(key=lambda obj: obj.title, reverse=False) + oc.add(DirectoryObject(key = Callback(main.MainMenu), title = '<< Main Menu', thumb = R(common.ICON))) + + return oc + except Exception as e: + Log.Error(e) + return MC.message_container('Downloads', 'An error occurred. Please try again !') ###################################################################################### @route(PREFIX + "/DownloadingFilesMenu") @@ -1049,6 +1517,8 @@ def DownloadingFilesMenu(title, uid, choice=None, session=None, status=None, con elif choice == common.DOWNLOAD_ACTIONS[0] and confirm == True and uid == i['uid']: common.DOWNLOAD_AUTOPILOT[k].remove(i) + Dict['DOWNLOAD_AUTOPILOT'] = E(JSON.StringFromObject(common.DOWNLOAD_AUTOPILOT)) + Dict.Save() return MC.message_container('Removed', 'Item has been removed') else: if uid == i['uid']: @@ -1068,13 +1538,43 @@ def DownloadingFilesMenu(title, uid, choice=None, session=None, status=None, con else: wtitle = '%s (%s) | %s | %s | %s | %s' % (i['title'], i['year'], k.title(), q_fs, i['status'], timestr) - key = Callback(DownloadingFilesMenu, title=i['watch_title'], uid=i['uid'], choice=common.DOWNLOAD_ACTIONS[0], session=session, status=status, autopilot=autopilot, type=type) + oc.add(DirectoryObject( title = 'Delete Entry - %s' % i['watch_title'], thumb = common.GetThumb(i['thumb'], session=session), summary = 'Delete this entry from the Auto-Pilot list', tagline = timestr, - key = key + key = Callback(DownloadingFilesMenu, title=i['watch_title'], uid=i['uid'], choice=common.DOWNLOAD_ACTIONS[0], session=session, status=status, autopilot=autopilot, type=type) + ) + ) + oc.add(DirectoryObject( + title = 'Run Now - %s' % i['watch_title'], + thumb = common.GetThumb(R(common.ICON_ENTER), session=session), + summary = 'Perform an AutoPilot Run Now', + key = Callback(AutoPilotDownloadThreadCall, item=E(JSON.StringFromObject(i))) + ) + ) + if i['purl'] != None: + oc.add(DirectoryObject( + title = 'Video Page', + summary = 'Video Page: %s' % i['watch_title'], + key = Callback(main.EpisodeDetail, title=i['watch_title'], url=i['purl'], thumb=i['thumb'], session = session), + thumb = common.GetThumb(R(common.ICON_ENTER), session=session) + ) + ) + else: + oc.add(DirectoryObject( + title = 'Video Page (Unavailable)', + summary = 'Video Page: %s' % i['watch_title'], + key = Callback(main.MyMessage, title='Video Page', msg='This Video Page is Unavailable'), + thumb = common.GetThumb(R(common.ICON_ENTER), session=session) + ) + ) + oc.add(DirectoryObject( + title = 'Update Entry for %s' % i['watch_title'], + key = Callback(AddToAutoPilotDownloads, title=i['title'], year=i['year'], type=i['type'], purl=i['purl'], thumb=i['thumb'], summary=i['summary'], quality=None, file_size=None, riptype=i['riptype'], season=None, season_end=None, episode_start=None, episode_end=None, vidtype=i['vidtype'], section_path=None, section_title=None, section_key=None, session=session, admin=i['admin'], all_seasons=False, edit=False, mode=None), + summary = 'Update Entry for %s' % i['watch_title'], + thumb = common.GetThumb(R(common.ICON_UPDATER), session=session) ) ) return oc @@ -1180,6 +1680,15 @@ def DownloadingFilesMenu(title, uid, choice=None, session=None, status=None, con thumb = common.GetThumb(R(common.ICON_ENTER), session=session) ) ) + elif status == common.DOWNLOAD_STATUS[3]: + oc.add(DirectoryObject( + title = 'Remove and Add to AutoPilot', + key = Callback(AutoPilotDownloadCall, item=E(JSON.StringFromObject(longstringObjs)), uid=uid, session=session), + summary = 'Remove this from Downloads and Add to AutoPilot', + thumb = common.GetThumb(R(common.ICON_ENTER), session=session) + ) + ) + oc.add(DirectoryObject( title = 'Refresh', key = Callback(DownloadingFilesMenu, title=title, uid=uid, choice=choice, session=session, status=status, confirm=confirm, refresh=int(refresh)+1), diff --git a/Contents/Code/interface.py b/Contents/Code/interface.py index 84ca908..4e137a5 100644 --- a/Contents/Code/interface.py +++ b/Contents/Code/interface.py @@ -171,16 +171,24 @@ def searchOMDB(title, year=None, doSearch=False, ver=None): Log("interface.py>searchOMDB() >> : >>> %s" % (e)) return None -def requestOMDB(title, year=None, season=None, imdb=None, ver=None): +def requestOMDB(title, year=None, season=None, imdb=None, ver=None, timeout=None): try: if Prefs["use_debug"]: Log("OMDB Request: Title:%s Year:%s Season:%s imdb:%s" % (title,year,season,imdb)) + if timeout == None: + timeout = 10 + else: + try: + timeout = int(timeout) + except: + timeout = 10 + c = 0 res = None while res == None and c < 3: try: - res = omdb.request(t=title, y=year, Season=season, i=imdb, c=Prefs['ca_api_key'], ver=ver, r='json', timeout=10) + res = omdb.request(t=title, y=year, Season=season, i=imdb, c=Prefs['ca_api_key'], ver=ver, r='json', timeout=timeout) except Exception as e: c += 1 time.sleep(1.0) @@ -206,7 +214,7 @@ def getOMDB(title, year=None, season=None, episode=None, imdbid=None, ver=None): return res except Exception as e: - Log("interface.py>requestOMDB() >> : >>> %s" % (e)) + Log("interface.py>getOMDB() >> : >>> %s" % (e)) return None def clearSources(): @@ -319,6 +327,12 @@ def getProviders(encode=True): return E(JSON.StringFromObject(initA[0].getProviders())) +def getHostResolverMain(): + if wait_for_init() == False: + return + + return initA[0].getHostResolverMain() + def getProvidersInitStatus(): if wait_for_init() == False: return 0 @@ -396,11 +410,11 @@ def getHostsLoggerTxts(choice=None, dumpToLog=True): #Log(" === LOGGER txt END === ") return list(reversed(loggertxt)) -def getControlLoggerTxts(): +def getControlLoggerTxts(forceDump=False): if wait_for_init() == False: return loggertxt = [] - if Prefs["use_debug"]: + if Prefs["use_debug"] or forceDump == True: Log(" === CONTROL txt Start ===") for txt in control.loggertxt: loggertxt.append(txt) @@ -419,7 +433,7 @@ def checkKeyInThread(key=None): return initA[0].checkKeyInThread(key=key) -def getExtSources(movtitle=None, year=None, tvshowtitle=None, season=None, episode=None, proxy_options=None, provider_options=None, key=None, maxcachetime=0, ver=None, imdb_id=None, session=None, timeout=None): +def getExtSources(movtitle=None, year=None, tvshowtitle=None, season=None, episode=None, proxy_options=None, provider_options=None, key=None, maxcachetime=0, ver=None, imdb_id=None, session=None, timeout=None, forceRet=False): InterfaceThread[key] = True @@ -458,7 +472,7 @@ def getExtSources(movtitle=None, year=None, tvshowtitle=None, season=None, episo # if Prefs['use_debug']: # Log("Movie: %s" % movtitle) - while initA[0].checkProgress(key) != 100: + while initA[0].checkProgress(key) != 100 and forceRet == False: time.sleep(2) try: diff --git a/Contents/Code/main.py b/Contents/Code/main.py index cabcc25..4331fc2 100644 --- a/Contents/Code/main.py +++ b/Contents/Code/main.py @@ -23,7 +23,7 @@ CAT_WHATS_HOT = [] CAT_WHATS_HOT_REGULAR = ['Sizzlers','Most Favourited','Recommended','Most Watched This Week','Most Watched This Month','Latest Movies','Latest TV-Series','Requested Movies'] CAT_WHATS_HOT_ANIME = ['Newest (Anime)','Last Update (Anime)', 'Ongoing (Anime)'] -CAT_REGULAR = ['Anime','Movies','TV-Series','Top-IMDb','Most Watched','Sitemap Listing'] +CAT_REGULAR = ['Anime','Movies','TV-Series','Top-IMDb','Most Watched','Sitemap Listing','Anime (Genre)'] CAT_FILTERS = ['Release','Genre','Country','Filter Setup >>>'] CAT_GROUPS = ['What\'s Hot ?', 'Movies & TV-Series', 'Sort using...','News & Announcements'] @@ -51,7 +51,7 @@ def MainMenu(**kwargs): fmovies.BASE_URL = Prefs["new_base_url"] - if common.CHECK_BASE_URL_REDIRECTION == True: + if common.CHECK_BASE_URL_REDIRECTION == True and common.CHECK_BASE_URL_REDIRECTION_HP == True: try: RED_URL = common.client.getRedirectingUrl(fmovies.BASE_URL).strip("/") if RED_URL != None and 'http' in RED_URL and fmovies.BASE_URL != RED_URL: @@ -134,7 +134,8 @@ def PreCacheStuff(): except Exception as e: Log("Error in geturl : %s" % e) - tools.SetAnimeBaseUrl() + Thread.Create(tools.SetAnimeBaseUrl) + PRE_CACHE_URLS = [fmovies.BASE_URL, urlparse.urljoin(fmovies.BASE_URL, 'home'), urlparse.urljoin(fmovies.BASE_URL, fmovies.SITE_MAP), common.ANIME_URL] for url in PRE_CACHE_URLS: @@ -184,6 +185,7 @@ def SleepPersistAndUpdateCookie(**kwargs): if now.hour == int(Prefs['autopilot_schedule']) and Dict['Autopilot_Schedule_Complete'] != True: if Prefs["use_debug"]: Log("Running the Auto-Pilot Scheduled Run: %s" % time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))) + common.interface.clearSources() # clear sources cached before AutoPilot run Thread.Create(downloadsmenu.AutoPilotDownloadThread1) Dict['Autopilot_Schedule_Complete'] = True Dict.Save() @@ -424,6 +426,8 @@ def Options(session, refresh=0, **kwargs): oc.add(DirectoryObject(key = Callback(DeviceOptions, session=session), title = 'Device Options', thumb = R(common.ICON_DEVICE_OPTIONS), summary='Device Specific Options includes Enabling DumbKeyboard, Redirector and List View mode')) + oc.add(DirectoryObject(key = Callback(GlobalOptions, session=session), title = 'Global Options', thumb = R(common.ICON_GLOBAL_OPTIONS), summary='Global Options that apply universally')) + msg = '%s' % (len(common.CACHE)+len(common.CACHE_META)) smsg = msg extmemory = 0 @@ -440,7 +444,7 @@ def Options(session, refresh=0, **kwargs): oc.add(DirectoryObject(key = Callback(DownloadOptions, title="Download Options", session = session), title = "Download Options", thumb = R(common.ICON_DOWNLOADS))) - oc.add(DirectoryObject(key = Callback(ThreadsStatus, title="Threads Status"), title = "Threads Status", thumb = R(common.ICON_SYSSTATUS))) + oc.add(DirectoryObject(key = Callback(ThreadsStatus, title="Threads Status", session=session), title = "Threads Status", thumb = R(common.ICON_SYSSTATUS))) if common.interface.isInitialized(): oc.add(DirectoryObject(key = Callback(InterfaceOptions, session=session), title = 'Interface Options', thumb = R(common.ICON_PREFS), summary='Interface for Proxies, Hosts, Providers and Playback Quality')) @@ -474,9 +478,6 @@ def DeviceOptions(session, **kwargs): session_x = session summary = common.DEVICE_OPTION[key] - if key == common.DEVICE_OPTIONS[13]: # Retry-Failed-Downloads - session_x = 'None' - bool = False try: bool = False if (Dict['Toggle'+key+session_x] == None or Dict['Toggle'+key+session_x] == 'disabled') else True @@ -490,7 +491,52 @@ def DeviceOptions(session, **kwargs): oc.add(DirectoryObject(key=Callback(common.setDictVal, key=key, val=not bool, session=session_x), title = title_msg)) c += 1 except Exception as e: - Log('DeviceOptions Error: %s' % e) + Log('Device Options Error: %s' % e) + oc.add(DirectoryObject(key=Callback(common.setDictVal, key=key, val=not bool, session=session_x), title = key)) + c += 1 + except Exception as e: + err = '%s' % e + title_msg = "%02d). %s %s | %s" % (c, '-', key, err) + oc.add(DirectoryObject(key=Callback(MyMessage, 'Info', err), title = title_msg)) + c += 1 + Log('Device Options Critical Error: %s' % e) + + return oc + +###################################################################################### +@route(PREFIX + "/globaloptions") +def GlobalOptions(session, **kwargs): + + if AuthTools.CheckAdmin() == False: + return MC.message_container('Admin Access Only', 'Only the Admin can perform this action !') + + oc = ObjectContainer(title2='Global Options', no_cache=common.isForceNoCache()) + + c = 1 + + user = common.control.setting('%s-%s' % (session, 'user')) + if user != None: + summary = 'UserName: %s' % user + title_msg = "00). %s" % summary + oc.add(DirectoryObject(key=Callback(MyMessage, 'Info', summary), title = title_msg)) + + for key in sorted(common.GLOBAL_OPTIONS): + try: + try: + session_x = 'None' + summary = common.GLOBAL_OPTION[key] + + bool = False + try: + bool = False if (Dict['Toggle'+key+session_x] == None or Dict['Toggle'+key+session_x] == 'disabled') else True + except: + pass + + title_msg = "%02d). %s %s | %s" % (c, common.GetEmoji(type=bool, mode='simple', session=session), key, summary) + oc.add(DirectoryObject(key=Callback(common.setDictVal, key=key, val=not bool, session=session_x), title = title_msg)) + c += 1 + except Exception as e: + Log('Global Options Error: %s' % e) oc.add(DirectoryObject(key=Callback(common.setDictVal, key=key, val=not bool, session=session_x), title = key)) c += 1 except Exception as e: @@ -498,24 +544,28 @@ def DeviceOptions(session, **kwargs): title_msg = "%02d). %s %s | %s" % (c, '-', key, err) oc.add(DirectoryObject(key=Callback(MyMessage, 'Info', err), title = title_msg)) c += 1 - Log('DeviceOptions Critical Error: %s' % e) + Log('Global Options Critical Error: %s' % e) return oc ###################################################################################### @route(PREFIX + "/ThreadsStatus") -def ThreadsStatus(refresh=0, **kwargs): +def ThreadsStatus(session, refresh=0, **kwargs): oc = ObjectContainer(title2='Threads Status', no_cache=common.isForceNoCache()) for t in common.control.getThreads(): timestrx = t['start_time'] timestr = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(float(timestrx))) - title_msg = 'Name:%s | Type:%s | Start Time:%s | Desc.:%s' % (t['name'], t['type'], timestr, t['desc']) - oc.add(DirectoryObject(title = title_msg, summary = title_msg, tagline = timestr, key = Callback(MC.message_container, header="Thread %s" % t['name'], message="Does Nothing"))) + if t['thread'] == None: + title_msg = 'Name:%s | Type:%s | Start Time:%s | Desc.:%s' % (t['name'], t['type'], timestr, t['desc']) + oc.add(DirectoryObject(title = title_msg, key = Callback(MC.message_container, header="Thread %s" % t['name'], message="Does Nothing"))) + else: + title_msg = 'Name:%s | Type:%s | Start Time:%s | Alive:%s | Desc.:%s' % (t['name'], t['type'], timestr, common.GetEmoji(type=t['thread'].isAlive(), mode='simple', session=session), t['desc']) + oc.add(DirectoryObject(title = title_msg, key = Callback(MC.message_container, header="Thread %s" % t['name'], message="Does Nothing"))) if len(oc) > 0: - oc.add(DirectoryObject(title = 'Refresh', key = Callback(ThreadsStatus, refresh=int(refresh)+1))) - oc.add(DirectoryObject(key = Callback(MainMenu),title = '<< Main Menu',thumb = R(common.ICON))) + oc.add(DirectoryObject(title = 'Refresh', key = Callback(ThreadsStatus, session=session, refresh=int(refresh)+1))) + oc.add(DirectoryObject(key = Callback(MainMenu),title = '<< Main Menu',thumb = None)) return oc else: return MC.message_container('Threads Status', 'No Threads Currently Running !') @@ -772,7 +822,7 @@ def ControlLog(session=None, choice=None, **kwargs): Log(" === CONTROL LOGGER txt START === ") - loggertxt = common.interface.getControlLoggerTxts() + loggertxt = common.interface.getControlLoggerTxts(forceDump = True) loggertxt = list(reversed(loggertxt)) for title_msg in loggertxt: @@ -1981,9 +2031,10 @@ def SortMenu(title, session=None, **kwargs): ###################################################################################### @route(PREFIX + "/showcategory") -def ShowCategory(title, key=' ', urlpath=None, page_count='1', session=None, **kwargs): +def ShowCategory(title, key=' ', urlpath=None, page_count='1', session=None, is9anime = 'False', **kwargs): + + title = unicode(title) - is9anime = 'False' if urlpath != None: newurl = urlpath + '?page=%s' % page_count else: @@ -1993,8 +2044,11 @@ def ShowCategory(title, key=' ', urlpath=None, page_count='1', session=None, **k newurl = urlparse.urljoin(fmovies.BASE_URL , '/genre/' + key.lower() + '?page=%s' % page_count) elif title == CAT_FILTERS[2]: newurl = urlparse.urljoin(fmovies.BASE_URL , '/country/' + key.lower() + '?page=%s' % page_count) - elif title == CAT_REGULAR[0]: - newurl = urlparse.urljoin(common.ANIME_URL , '/newest' + '?page=%s' % page_count) + elif title == CAT_REGULAR[0] or title == CAT_REGULAR[6]: # Anime + if key == ' ': + newurl = urlparse.urljoin(common.ANIME_URL , '/newest' + '?page=%s' % page_count) + else: + newurl = urlparse.urljoin(common.ANIME_URL , '/genre/' + key.lower() + '?page=%s' % page_count) is9anime = 'True' elif title == CAT_REGULAR[1]: newurl = urlparse.urljoin(fmovies.BASE_URL , '/movies' + '?page=%s' % page_count) @@ -2034,13 +2088,29 @@ def ShowCategory(title, key=' ', urlpath=None, page_count='1', session=None, **k limit_y = int(page_count) * 50 for i in range(limit_x, limit_y): elems.append(elems_all[i]) - elif title == CAT_REGULAR[0]: # Anime - elems = page_data.xpath(".//*//div[@class='film-list']//div[@class='item']") - last_page_no = int(page_count) - try: - last_page_no = int(page_data.xpath(".//*//span[@class='total']//text()")[0]) - except: - pass + elif title == CAT_REGULAR[0] or title == CAT_REGULAR[6]: # Anime + if title == CAT_REGULAR[0] or key != ' ': + elems = page_data.xpath(".//*//div[@class='film-list']//div[@class='item']") + last_page_no = int(page_count) + try: + last_page_no = int(page_data.xpath(".//*//span[@class='total']//text()")[0]) + except: + pass + else: + elems = page_data.xpath(".//*//ul[@class='sub']//li//a") + oc = ObjectContainer(title2 = title, no_cache=common.isForceNoCache()) + for elem in elems: + gen = elem.xpath(".//@title")[0] + key = elem.xpath(".//@href")[0].replace('/genre/','').strip() + oc.add(DirectoryObject( + key = Callback(ShowCategory, title = title, key = key, session = session, is9anime = is9anime), + title = gen + ) + ) + if len(oc) > 0: + return oc + else: + return MC.message_container(title, 'Could not find %s listing' % title) else: elems = page_data.xpath(".//*[@id='body-wrapper']//div[@class='row movie-list']//div[@class='item']") last_page_no = int(page_count) @@ -2083,10 +2153,16 @@ def ShowCategory(title, key=' ', urlpath=None, page_count='1', session=None, **k eps_nos = '' title_eps_no = '' try: - eps_nos = elem.xpath(".//div[@class='status']//span//text()")[0] - eps_no_i = str(int(eps_nos.strip())) - title_eps_no = ' (Eps:'+eps_no_i+')' - eps_nos = ' Episodes: ' + eps_no_i + if is9anime == 'True': + eps_nos = elem.xpath(".//*//div[@class='ep']//text()")[0] + eps_no_i = eps_nos.strip() + title_eps_no = ' (%s)' % eps_no_i + eps_nos = title_eps_no + else: + eps_nos = elem.xpath(".//div[@class='status']//span//text()")[0] + eps_no_i = str(int(eps_nos.strip())) + title_eps_no = ' (Eps:'+eps_no_i+')' + eps_nos = ' Episodes: ' + eps_no_i except: pass try: @@ -2110,7 +2186,8 @@ def ShowCategory(title, key=' ', urlpath=None, page_count='1', session=None, **k thumb = R(common.ICON_NEXT) ) ) - + DumbKeyboard(PREFIX, oc, GetInput, dktitle = 'Input Page: (1-%s)' % last_page_no, dkthumb=common.GetThumb(R(common.ICON_DK_ENABLE), session=session), dkNumOnly=True, dkHistory=False, title=title, key=key, urlpath=urlpath, page_count=page_count, session=session, is9anime=is9anime, method='ShowCategory') + if common.UsingOption(key=common.DEVICE_OPTIONS[0], session=session): DumbKeyboard(PREFIX, oc, Search, dktitle = 'Search', @@ -2130,6 +2207,20 @@ def ShowCategory(title, key=' ', urlpath=None, page_count='1', session=None, **k ) return oc + +###################################################################################### +@route(PREFIX + "/getInput") +def GetInput(query, title, key, urlpath, page_count, session, method, is9anime='False', **kwargs): + + try: + int(query) + except: + query = page_count + + if method == 'ShowCategory': + return ShowCategory(title=title, key=key, urlpath=urlpath, page_count=query, session=session, is9anime=is9anime) + else: + return ShowCategory(title=title, key=key, urlpath=urlpath, page_count=query, session=session, is9anime=is9anime) ###################################################################################### @route(PREFIX + "/episodedetail") @@ -2164,10 +2255,11 @@ def EpisodeDetail(title, url, thumb, session, dataEXS=None, dataEXSAnim=None, ** serverid = None similar_reccos = [] tags = 'Not Available' + riptype = '' oc = ObjectContainer(title2 = title, no_cache=common.isForceNoCache()) - if dataEXS==None and dataEXSAnim==None and is9anime == 'False': + if dataEXS==None and dataEXSAnim==None and is9anime == 'False' and common.ES_API_KEY not in url: if Prefs["use_debug"]: Log("============================= Processing f/b - movies ===============================") @@ -2234,6 +2326,11 @@ def EpisodeDetail(title, url, thumb, session, dataEXS=None, dataEXSAnim=None, ** duration = int(page_data.xpath(".//*[@id='info']//div[@class='info col-md-19']//span[2]//b//text()")[0].strip('/episode').strip(' min')) except: duration = 'Not Available' + + try: + riptype = page_data.xpath(".//*[@id='info']//div[@class='info col-md-19']//span[@class='quality']//text()")[0].strip() + except: + riptype = '' try: genre0 = page_data.xpath(".//*[@id='info']//dl[@class='meta col-sm-12'][1]//dd[1]//a//text()") @@ -2311,7 +2408,6 @@ def EpisodeDetail(title, url, thumb, session, dataEXS=None, dataEXSAnim=None, ** try: summary = unicode(common.ascii_only(summary)) - #summary = unicode(str(summary).replace('"','').replace('\u00','')) except: summary = 'Not Available' @@ -2378,69 +2474,200 @@ def EpisodeDetail(title, url, thumb, session, dataEXS=None, dataEXSAnim=None, ** label = label.replace('Server F','Google-F') if 'Server G' in label: label = label.replace('Server G','Google-G') - + if label in common.FMOVIES_SERVER_REMAP.keys(): + label = label.replace(label,common.FMOVIES_SERVER_REMAP[label]) + if True: server_lab.append(label) + if common.DEV_DEBUG == True: + Log('-- %s --' % label) + items = server.xpath(".//ul//li") if len(items) > 1: isMovieWithMultiPart = True servers_list[label] = [] - servers_list[common.SERVER_PLACEHOLDER] = [] + if common.SERVER_PLACEHOLDER not in servers_list.keys(): + servers_list[common.SERVER_PLACEHOLDER] = [] c=0 for item in items: servers_list[label].append([]) servers_list[label][c]={} - servers_list[common.SERVER_PLACEHOLDER].append([]) - servers_list[common.SERVER_PLACEHOLDER][c]={} label_qual = item.xpath(".//a//text()")[0].strip() + label_val = item.xpath(".//a//@data-id")[0] servers_list[label][c]['quality'] = label_qual servers_list[label][c]['loc'] = label_val servers_list[label][c]['serverid'] = serverid - servers_list[common.SERVER_PLACEHOLDER][c] = servers_list[label][c] + + doFill = True + if isTvSeries == True and len(servers_list[common.SERVER_PLACEHOLDER]) > 0: + try: + for cx in servers_list[common.SERVER_PLACEHOLDER]: + if cx['quality'] == servers_list[label][c]['quality'] and cx['loc'] != '': + if common.DEV_DEBUG == True: + Log('%s == %s' % (int(cx['quality']), int(servers_list[label][c]['quality']))) + doFill = False + break + except: + pass + + if doFill == True and len(servers_list[label][c]) > 0: + if common.DEV_DEBUG == True: + Log('c = %s' % servers_list[label][c]) + if len(servers_list[common.SERVER_PLACEHOLDER]) <= c: + servers_list[common.SERVER_PLACEHOLDER].append([]) + servers_list[common.SERVER_PLACEHOLDER][c]={'loc':''} + + if servers_list[common.SERVER_PLACEHOLDER][c]['loc'] == '': + servers_list[common.SERVER_PLACEHOLDER][c] = servers_list[label][c] + c += 1 + + if common.DEV_DEBUG == True and Prefs["use_debug"]: + Log('================= servers_list-0 ===============') + Log(servers_list) + + # servers_list_1 = {} + # for k in servers_list.keys(): + # servers_list_1[k]=[] + # for s in servers_list[k]: + # if len(s) != 0: + # servers_list_1[k].append(s) + # servers_list = servers_list_1 + + if common.DEV_DEBUG == True and Prefs["use_debug"]: + Log('================= servers_list-1 ===============') + Log(servers_list) # label array of servers available - sort them so that presentation order is consistent server_lab = sorted(server_lab) - if len(server_lab) == 0: - server_lab.append(common.SERVER_PLACEHOLDER) + + #if len(server_lab) == 0: + server_lab.insert(0,common.SERVER_PLACEHOLDER) # remap server list - this way its easier to iterate for tv-show episodes servers_list_new = [] c=0 - + c_p=0 + try: + m_min = servers_list[common.SERVER_PLACEHOLDER][0]['quality'] + if '-' in m_min: + m_min = m_min.split('-') + m_min = m_min[0] + m_min = filter(lambda x: x.isdigit(), m_min) + m_min = int(m_min) + except: + m_min = 0 + try: + m_max = servers_list[common.SERVER_PLACEHOLDER][len(servers_list[common.SERVER_PLACEHOLDER])-1]['quality'] + if '-' in m_max: + m_max = m_max.split('-') + try: + m_max = str(int(m_max[1])) + except: + m_max = m_max[0] + m_max = filter(lambda x: x.isdigit(), m_max) + m_max = int(m_max)+1 + except: + m_max = 1 + + if common.DEV_DEBUG == True and Prefs["use_debug"]: + Log('======== Fill missing %s - %s ========' % (min(m_min,1), max(len(servers_list[common.SERVER_PLACEHOLDER]),m_max))) + nos = 1-min(m_min,1) if len(servers_list) > 0: - for no in range(max(len(servers_list[common.SERVER_PLACEHOLDER]),len(servers_list['MyCloud']))): + for no in range(min(m_min,1), max(len(servers_list[common.SERVER_PLACEHOLDER]),m_max)): servers_list_new.append([]) - servers_list_new[c] = {} + servers_list_new[no-1+nos] = {} + skip_c = False for label in servers_list.keys(): - servers_list_new[c][label] = {} + if c > 99: + servers_list_new[no-1+nos][label] = {'quality':"%03d" % (no), 'loc':'', 'serverid':None} + else: + servers_list_new[no-1+nos][label] = {'quality':"%02d" % (no), 'loc':'', 'serverid':None} try: - if 'MyCloud' in servers_list_new[c].keys() and len(servers_list_new[c]['MyCloud'].keys()) > 0: - if servers_list_new[c]['MyCloud']['quality'] != servers_list[label][c]['quality']: - raise - servers_list_new[c][label] = {'quality':servers_list[label][c]['quality'], 'loc':servers_list[label][c]['loc'], 'serverid':servers_list[label][c]['serverid']} - except: - if c > 99: - servers_list_new[c][label] = {'quality':"%03d" % (c+1), 'loc':'', 'serverid':None} + fillBlank = True + skip_c = False + try: + sno = "%02d" % (no) if (no) <= 99 else "%03d" % (no) + if sno not in servers_list[common.SERVER_PLACEHOLDER][c]['quality']: + if common.DEV_DEBUG == True: + Log('%s -- %s' % (sno, servers_list[common.SERVER_PLACEHOLDER][c]['quality'])) + fillBlank = False + skip_c = True + q_lab = re.sub('[^0-9]+', '-', servers_list[common.SERVER_PLACEHOLDER][c]['quality']) + else: + if common.DEV_DEBUG == True: + Log('%s - %s' % (sno, servers_list[common.SERVER_PLACEHOLDER][c]['quality'])) + except Exception as e: + Log(e) + if common.DEV_DEBUG == True: + Log('%s <-> %s' % (sno, servers_list[common.SERVER_PLACEHOLDER][c]['quality'])) + pass + if fillBlank == True: + for c2 in range(0,len(servers_list[label])): + if servers_list[common.SERVER_PLACEHOLDER][c]['quality'] == servers_list[label][c2]['quality']: + + q_lab = servers_list[label][c2]['quality'] + if isTvSeries == True: + q_lab = q_lab.replace(' ','') + q_lab = re.sub('[^0-9]+', '-', q_lab) + if '-' in q_lab: + q_lab = q_lab.split('-') + q_lab = q_lab[0] + try: + str(int(q_lab)) + except: + q_lab = no + if common.DEV_DEBUG == True: + Log('q_lab : %s' % q_lab) + servers_list_new[no-1+nos][label] = {'quality':q_lab,'loc':servers_list[label][c2]['loc'],'serverid':servers_list[label][c2]['serverid']} + if common.DEV_DEBUG == True: + Log('Fill- %s' % servers_list_new[no-1+nos][label]) + break else: - servers_list_new[c][label] = {'quality':"%02d" % (c+1), 'loc':'', 'serverid':None} - c += 1 + pass + except: + pass + if skip_c == False: + c += 1 + else: + if common.DEV_DEBUG == True: + Log('Fill-- %s' % servers_list_new[no-1+nos][common.SERVER_PLACEHOLDER]) + q_lab = servers_list_new[no-1+nos][common.SERVER_PLACEHOLDER]['quality'] + if '-' in q_lab: + q_lab = q_lab.split('-') + try: + c_p = c_p + int(q_lab[1])-int(q_lab[0]) + except: + c += 1 + + if common.DEV_DEBUG == True and Prefs["use_debug"]: + Log('================= servers_list_new-1B ===============') + Log(servers_list_new) - if common.MY_CLOUD_DISABLED == True: + if common.FMOVIES_HOSTS_UNPLAYABLE == True: for i in servers_list_new: - if 'MyCloud' in i.keys(): - del i['MyCloud'] - for i in server_lab: - if 'MyCloud' == i: - server_lab.remove(i) + for h_unp in common.FMOVIES_HOSTS_DISABLED: + if h_unp in i.keys(): + try: + del i[h_unp] + except: + pass + for h_unp in common.FMOVIES_HOSTS_DISABLED: + if h_unp in server_lab: + try: + server_lab.remove(h_unp) + except: + pass + if len(server_lab) == 0: + server_lab.append(common.SERVER_PLACEHOLDER) - if Prefs["use_debug"]: - Log('=================servers_list===============') - Log(servers_list) + if common.DEV_DEBUG == True and Prefs["use_debug"]: + Log('================= servers_list_new-1 ===============') + Log(server_lab) Log(servers_list_new) ############################# Data ############################ @@ -2618,13 +2845,12 @@ def EpisodeDetail(title, url, thumb, session, dataEXS=None, dataEXSAnim=None, ** try: summary = unicode(common.ascii_only(summary)) - #summary = unicode(str(summary).replace('"','').replace('\u00','')) except: summary = 'Not Available' try: similar_reccos = [] - similar_reccos_elems = page_data.xpath(".//*[@id='movie']//div[@class='row']//div[@class='item']") + similar_reccos_elems = page_data.xpath(".//*//div[@class='film-list']//div[@class='item']") for elem in similar_reccos_elems: similar_reccos_name = elem.xpath(".//a[@class='name']//text()")[0] @@ -2632,8 +2858,8 @@ def EpisodeDetail(title, url, thumb, session, dataEXS=None, dataEXSAnim=None, ** thumb_t = elem.xpath(".//a[@class='poster']//@src")[0] similar_reccos_thumb = thumb_t if 'url' not in thumb_t else thumb_t.split('url=')[1] try: - eps_nos = elem.xpath(".//div[@class='status']//span//text()")[0] - eps_nos = ' Episodes: ' + str(int(eps_nos.strip())) + eps_nos = elem.xpath(".//div[@class='ep']//text()")[0] + eps_nos = ' (%s)' % eps_nos.strip() except: eps_nos = '' try: @@ -2677,6 +2903,8 @@ def EpisodeDetail(title, url, thumb, session, dataEXS=None, dataEXSAnim=None, ** label = label.replace('Server F','Google-F') if 'Server G' in label: label = label.replace('Server G','Google-G') + if label in common.FMOVIES_SERVER_REMAP.keys(): + label = label.replace(label,common.FMOVIES_SERVER_REMAP[label]) if True: server_lab.append(label) @@ -2710,12 +2938,12 @@ def EpisodeDetail(title, url, thumb, session, dataEXS=None, dataEXSAnim=None, ** servers_list_new = [] c=0 - if Prefs["use_debug"]: - Log('=================servers_list===============') + if common.DEV_DEBUG == True and Prefs["use_debug"]: + Log('================= servers_list-2 ===============') Log(servers_list) if len(servers_list) > 0: - for no in range(max(len(servers_list[common.SERVER_PLACEHOLDER]),len(servers_list['MyCloud']))): + for no in range(max(len(servers_list[common.SERVER_PLACEHOLDER]),len(servers_list[server_lab[0]]))): servers_list_new.append([]) servers_list_new[c] = {} for label in servers_list: @@ -2730,13 +2958,48 @@ def EpisodeDetail(title, url, thumb, session, dataEXS=None, dataEXSAnim=None, ** servers_list_new[c][label] = {'quality':"%02d" % (c+1), 'loc':'', 'serverid':None} c += 1 + if common.FMOVIES_HOSTS_UNPLAYABLE == True: + for i in servers_list_new: + for h_unp in common.FMOVIES_HOSTS_DISABLED: + if h_unp in i.keys(): + try: + del i[h_unp] + except: + pass + for h_unp in common.FMOVIES_HOSTS_DISABLED: + if h_unp in server_lab: + try: + server_lab.remove(h_unp) + except: + pass + if len(server_lab) == 0: + server_lab.append(common.SERVER_PLACEHOLDER) + + if common.DEV_DEBUG == True and Prefs["use_debug"]: + Log('=================Fix numbering===============') + ##### Fix numbering ##### - seq = range(1, len(servers_list_new)+1) - #Log(seq) - c=1 + try: + m_min = servers_list[common.SERVER_PLACEHOLDER][0]['quality'] + m_min = filter(lambda x: x.isdigit(), m_min) + m_min = int(m_min) + except: + m_min = 0 + try: + m_max = servers_list[common.SERVER_PLACEHOLDER][len(servers_list[common.SERVER_PLACEHOLDER])-1]['quality'] + m_max = filter(lambda x: x.isdigit(), m_max) + m_max = int(m_max)+1 + except: + m_max = 1 + + seq = range(min(1,m_min), max(len(servers_list_new),m_max)) + + c = 1 # min(1,m_min) if len(servers_list_new) > 0: - #Log(server_lab) - #Log(servers_list_new) + if common.DEV_DEBUG == True and Prefs["use_debug"]: + Log(seq) + Log(server_lab) + Log(servers_list_new) new_map = [] @@ -2763,8 +3026,8 @@ def EpisodeDetail(title, url, thumb, session, dataEXS=None, dataEXSAnim=None, ** eps_item[label]['loc'] = None if int(ep_c) > seq[c-1] and label == server_lab[len(server_lab)-1]: seq.remove(seq[c-1]) - if int(ep_c) in seq and label == server_lab[len(server_lab)-1]: - seq.remove(int(ep_c)) + #if int(ep_c) in seq and label == server_lab[len(server_lab)-1]: + # seq.remove(int(ep_c)) except: pass eps_items[label] = eps_item[label] @@ -2772,9 +3035,14 @@ def EpisodeDetail(title, url, thumb, session, dataEXS=None, dataEXSAnim=None, ** new_map.append(eps_items) servers_list_new = new_map + + if common.DEV_DEBUG == True and Prefs["use_debug"]: + Log('================= servers_list-3 ===============') + Log(servers_list_new) + ############################ - if dataEXS != None or common.ES_API_URL.lower() in url: + if dataEXS != None or common.isArrayValueInString(common.EXT_LIST_URLS, url): if Prefs["use_debug"]: Log("============================= Processsing API-Fetch ===============================") @@ -2892,7 +3160,6 @@ def EpisodeDetail(title, url, thumb, session, dataEXS=None, dataEXSAnim=None, ** try: summary = unicode(common.ascii_only(summary)) - #summary = unicode(str(summary).replace('"','').replace('\u00','')) except: summary = 'Not Available' @@ -2954,10 +3221,13 @@ def EpisodeDetail(title, url, thumb, session, dataEXS=None, dataEXSAnim=None, ** det_Season = common.cleantitle.removeParanthesis(title).split(' ') SeasonN = 1 try: - SeasonN = int(det_Season[len(det_Season)-1]) - oc.title2 = title.replace(str(SeasonN), '(Season ' + str(SeasonN) + ')') + if ' ' in title: + SeasonN = int(det_Season[len(det_Season)-1]) + oc.title2 = '%s %s' % (title[:title.rfind(' ')], title[title.rfind(' '):].replace(str(SeasonN), '(Season ' + str(SeasonN) + ')')) + else: + oc.title2 = '%s %s' % (title, '(Season ' + str(SeasonN) + ')') except: - oc.title2 = title + oc.title2 = '%s %s' % (title, '(Season ' + str(SeasonN) + ')') c_not_missing = 1 for episode in episodes: @@ -3048,19 +3318,31 @@ def EpisodeDetail(title, url, thumb, session, dataEXS=None, dataEXSAnim=None, ** episodes_list_new.append(item) episodes_list_new = sorted(episodes_list_new, key=lambda k: k['ord_date'], reverse=False) - #Log(episodes_list_new) + + if common.DEV_DEBUG == True and Prefs["use_debug"]: + Log('================= episodes_list_new ===============') + Log(episodes_list_new) episodes_list = episodes_list_new ###################################################################################### eps_i = 1 + try: + eps_i = servers_list_new[0][common.SERVER_PLACEHOLDER]['quality'] + if '-' in eps_i: + eps_i = eps_i.split('-') + eps_i = eps_i[0] + eps_i = filter(lambda x: x.isdigit(), eps_i) + eps_i = int(eps_i) + except: + eps_i = 1 c_not_missing=-1 c=0 c2=0 for eps in servers_list_new: if '-' in eps[server_lab[0]]['quality'] and verify2partcond(eps[server_lab[0]]['quality']): # 2 part episode condition - qual_i = (int(eps[server_lab[0]]['quality'].split('-')[0])-eps_i) + qual_i = max(int(eps[server_lab[0]]['quality'].split('-')[0])-eps_i,0) eps_i += count2partcond(eps[server_lab[0]]['quality'])-1 try: if episodes_list[qual_i]['air_date'] == episodes_list[qual_i+1]['air_date']: @@ -3069,7 +3351,7 @@ def EpisodeDetail(title, url, thumb, session, dataEXS=None, dataEXSAnim=None, ** pass else: try: - qual_i = (int(eps[server_lab[0]]['quality'])-eps_i) + c2 + qual_i = max(int(eps[server_lab[0]]['quality'])-eps_i,0) + c2 except: qual_i = c_not_missing+1 + c2 eps_i = eps_i-1 + c2 @@ -3091,6 +3373,12 @@ def EpisodeDetail(title, url, thumb, session, dataEXS=None, dataEXSAnim=None, ** desc = common.ascii_only(desc) + try: + episodex = filter(lambda x: x.isdigit(), episode) + episode = episodex + except: + pass + try: oc.add(DirectoryObject( key = Callback(TvShowDetail, tvshow=title, title=title_s, url=url, servers_list_new=E(JSON.StringFromObject(servers_list_new[c])), server_lab=E(JSON.StringFromObject(server_lab)), summary=desc+'\n '+summary, thumb=thumb, art=art, year=year, rating=rating, duration=duration, genre=genre, directors=directors, roles=roles, serverts=serverts, session=session, season=SeasonN, episode=int(episode), imdb_id=imdb_id), @@ -3103,7 +3391,7 @@ def EpisodeDetail(title, url, thumb, session, dataEXS=None, dataEXSAnim=None, ** c_not_missing = qual_i c += 1 except Exception as e: - Log('ERROR init.py>EpisodeDetail>Tv1 %s, %s' % (e.args, title_s)) + Log('ERROR init.py>EpisodeDetail>Tv1 %s, %s %s' % (e.args, title, c)) pass if SeasonN > 0 or True: # enable for all - even if this might be a single season @@ -3130,16 +3418,25 @@ def EpisodeDetail(title, url, thumb, session, dataEXS=None, dataEXSAnim=None, ** det_Season = common.cleantitle.removeParanthesis(title).split(' ') SeasonN = 1 try: - SeasonN = int(det_Season[len(det_Season)-1]) - oc.title2 = title.replace(str(SeasonN), '(Season ' + str(SeasonN) + ')') + if ' ' in title: + SeasonN = int(det_Season[len(det_Season)-1]) + oc.title2 = '%s %s' % (title[:title.rfind(' ')], title[title.rfind(' '):].replace(str(SeasonN), '(Season ' + str(SeasonN) + ')')) + else: + oc.title2 = '%s %s' % (title, '(Season ' + str(SeasonN) + ')') except: - oc.title2 = title + oc.title2 = '%s %s' % (title, '(Season ' + str(SeasonN) + ')') c=0 + episode = None for eps in servers_list_new: try: episode = eps[server_lab[0]]['quality'] title_s = 'Ep:' + episode + try: + episodex = filter(lambda x: x.isdigit(), episode) + episode = episodex + except: + pass oc.add(DirectoryObject( key = Callback(TvShowDetail, tvshow=title, title=title_s, url=url, servers_list_new=E(JSON.StringFromObject(servers_list_new[c])), server_lab=E(JSON.StringFromObject(server_lab)), summary='Episode Summary Not Available.\n ' + summary, thumb=thumb, art=art, year=year, rating=rating, duration=duration, genre=genre, directors=directors, roles=roles, serverts=serverts, session=session, season=SeasonN, episode=int(episode), imdb_id=imdb_id), title = title_s, @@ -3150,7 +3447,7 @@ def EpisodeDetail(title, url, thumb, session, dataEXS=None, dataEXSAnim=None, ** ) c += 1 except Exception as e: - Log('ERROR init.py>EpisodeDetail>Tv2 %s, %s' % (e.args, title_s)) + Log('ERROR init.py>EpisodeDetail>Tv2 %s, %s %s' % (e.args, title, c)) pass if SeasonN > 0 or True: # enable for all - even if this might be a single season oc.add(DirectoryObject( @@ -3191,10 +3488,11 @@ def EpisodeDetail(title, url, thumb, session, dataEXS=None, dataEXSAnim=None, ** for eps in servers_list_new: try: episode = eps[server_lab[0]]['quality'] + labelx = '%s' % (' ('+riptype+')' if riptype != '' else '') title_s = episode oc.add(DirectoryObject( key = Callback(TvShowDetail, tvshow=title, title=title_s, url=url, servers_list_new=E(JSON.StringFromObject(servers_list_new[c])), server_lab=E(JSON.StringFromObject(server_lab)), summary=summary, thumb=thumb, art=art, year=year, rating=rating, duration=duration, genre=genre, directors=directors, roles=roles, serverts=serverts, session=session, season=SeasonN, episode=episode, treatasmovie=True, imdb_id=imdb_id), - title = title_s, + title = '%s%s' % (title_s, labelx), summary = summary, art = art, thumb = Resource.ContentsOfURLWithFallback(url = common.GetThumb(thumb, session=session), fallback = common.GetThumb(common.ICON_UNAV, session=session)) @@ -3202,7 +3500,7 @@ def EpisodeDetail(title, url, thumb, session, dataEXS=None, dataEXSAnim=None, ** ) c += 1 except Exception as e: - Log('ERROR init.py>EpisodeDetail>Movie1 %s, %s' % (e.args, title_s)) + Log('ERROR init.py>EpisodeDetail>Movie1 %s, %s %s' % (e.args, title, c)) pass if Prefs['disable_extsources'] == False and common.interface.isInitialized(): @@ -3243,7 +3541,10 @@ def EpisodeDetail(title, url, thumb, session, dataEXS=None, dataEXSAnim=None, ** else: # case for presenting movies if Prefs["use_debug"]: - Log("case for presenting movies") + if is9anime == 'True': + Log("case for presenting 9Anime movies") + else: + Log("case for presenting movies") if Prefs['disable_extsources'] == False: #Thread.Create(ExtSources, {}, movtitle=title, year=year, title=title, url=url, summary=summary, thumb=thumb, art=art, rating=rating, duration=duration, genre=genre, directors=directors, roles=roles) @@ -3268,6 +3569,7 @@ def EpisodeDetail(title, url, thumb, session, dataEXS=None, dataEXSAnim=None, ** del server_lab[idx] pair_required = False + title_s = None for label in server_lab: for label_i in servers_list[label]: url_s = label_i['loc'] @@ -3282,7 +3584,13 @@ def EpisodeDetail(title, url, thumb, session, dataEXS=None, dataEXSAnim=None, ** captcha = None dlt = None if server_info != None: - qual = common.getHighestQualityLabel(server_info, label_i['quality']) + lab_q = label_i['quality'] + try: + if is9anime == 'True' and int(lab_q) < 360: + lab_q = '480' + except: + pass + qual = common.getHighestQualityLabel(server_info, lab_q) title_s = label + ' - ' + qual pair_required = False @@ -3328,7 +3636,7 @@ def EpisodeDetail(title, url, thumb, session, dataEXS=None, dataEXSAnim=None, ** if not Prefs['use_openload_pairing'] and 'openload' in host and common.is_uss_installed() and URLService.ServiceIdentifierForURL(server_info) != None: durl = server_info else: - durl = "fmovies://" + E(JSON.StringFromObject({"url":url, "server":server_info_t, "title":title, "summary":summary, "thumb":thumb, "art":art, "year":year, "rating":rating, "duration":str(duration), "genre":genre, "roles":roles, "directors":directors, "roles":roles, "isTargetPlay":str(isTargetPlay), "useSSL":Prefs["use_https_alt"], "isVideoOnline":str(isVideoOnline), "useRedirector": redirector_enabled, 'urldata':'','quality':qual, 'pairrequired':pair_required, "host":host, "openloadApiKey":None, "force_transcode":common.UsingOption(key=common.DEVICE_OPTIONS[10], session=session)})) + durl = "fmovies://" + E(JSON.StringFromObject({"url":url, "server":server_info_t, "title":title, "summary":summary, "thumb":thumb, "art":art, "year":year, "rating":rating, "duration":str(duration), "genre":genre, "roles":roles, "directors":directors, "roles":roles, "isTargetPlay":str(isTargetPlay), "useSSL":Prefs["use_https_alt"], "isVideoOnline":str(isVideoOnline), "useRedirector": redirector_enabled, 'urldata':'','quality':qual, 'pairrequired':pair_required, "provider":"plugin", "host":host, "openloadApiKey":None, "force_transcode":common.UsingOption(key=common.DEVICE_OPTIONS[10], session=session), "force_transcode_imdb":common.UsingOption(key=common.DEVICE_OPTIONS[13], session=session)})) vco = VideoClipObject( url = durl, @@ -3365,7 +3673,7 @@ def EpisodeDetail(title, url, thumb, session, dataEXS=None, dataEXSAnim=None, ** Log('ERROR init.py>EpisodeDetail>Movie2b %s, %s' % (e.args, (title + ' - ' + title_s))) pass else: - labelx = '' + labelx = '%s' % (' ('+riptype+')' if riptype != '' else '') if len(common.host_misc_resolvers.RAPIDVIDEO_CAPTCHA) > 0 and 'rapidvideo' in label.lower(): labelx = ' (Solve Captcha)' if common.UsingOption(common.DEVICE_OPTIONS[6], session=session): @@ -3483,10 +3791,9 @@ def EpisodeDetail(title, url, thumb, session, dataEXS=None, dataEXSAnim=None, ** ) ) - oc.add(DirectoryObject(key = Callback(MainMenu),title = '<< Main Menu',thumb = R(common.ICON))) + oc.add(DirectoryObject(key = Callback(MainMenu),title = '<< Main Menu',thumb = common.GetThumb(R(common.ICON), session=session))) return oc - #################################################################################################### @route(PREFIX + "/TvShowDetail") def TvShowDetail(tvshow, title, url, servers_list_new, server_lab, summary, thumb, art, year, rating, duration, genre, directors, roles, serverts, session, season=None, episode=None, treatasmovie=False, imdb_id=None, **kwargs): @@ -3535,7 +3842,7 @@ def TvShowDetail(tvshow, title, url, servers_list_new, server_lab, summary, thum for label in server_lab: url_s = servers_list_new[label]['loc'] serverid = servers_list_new[label]['serverid'] - if url_s != None: + if url_s != None and url_s != '': if common.UsingOption(common.DEVICE_OPTIONS[5], session=session): server_info,isTargetPlay, error, host, sub_url = fmovies.GetApiUrl(url=url, key=url_s, serverts=serverts, serverid=serverid, session=session) server_info_t = server_info @@ -3584,7 +3891,7 @@ def TvShowDetail(tvshow, title, url, servers_list_new, server_lab, summary, thum if not Prefs['use_openload_pairing'] and 'openload' in host and common.is_uss_installed() and URLService.ServiceIdentifierForURL(server_info) != None: durl = server_info else: - durl = "fmovies://" + E(JSON.StringFromObject({"url":url, "server":server_info_t, "title":title, "summary":summary, "thumb":thumb, "art":art, "year":year, "rating":rating, "duration":str(duration), "genre":genre, "directors":directors, "roles":roles, "isTargetPlay":str(isTargetPlay), "useSSL":Prefs["use_https_alt"], "isVideoOnline":str(isVideoOnline), "useRedirector": redirector_enabled, 'urldata':'', 'pairrequired':pair_required, "host":host, "openloadApiKey":None, "force_transcode":common.UsingOption(key=common.DEVICE_OPTIONS[10], session=session)})) + durl = "fmovies://" + E(JSON.StringFromObject({"url":url, "server":server_info_t, "title":title, "summary":summary, "thumb":thumb, "art":art, "year":year, "rating":rating, "duration":str(duration), "genre":genre, "directors":directors, "roles":roles, "isTargetPlay":str(isTargetPlay), "useSSL":Prefs["use_https_alt"], "isVideoOnline":str(isVideoOnline), "useRedirector": redirector_enabled, 'urldata':'', 'pairrequired':pair_required, "provider":"plugin", "host":host, "openloadApiKey":None, "force_transcode":common.UsingOption(key=common.DEVICE_OPTIONS[10], session=session), "force_transcode_imdb":common.UsingOption(key=common.DEVICE_OPTIONS[13], session=session)})) vco = None try: @@ -3682,7 +3989,6 @@ def VideoDetail(title, url, url_s, label_i_qual, label, serverts, thumb, summary try: summary = unicode(common.ascii_only(summary)) - #summary = unicode(str(summary).replace('"','').replace('\u00','')) except: summary = 'Not Available' @@ -3765,7 +4071,7 @@ def VideoDetail(title, url, url_s, label_i_qual, label, serverts, thumb, summary if not Prefs['use_openload_pairing'] and 'openload' in host and common.is_uss_installed() and URLService.ServiceIdentifierForURL(server_info) != None: durl = server_info else: - durl = "fmovies://" + E(JSON.StringFromObject({"url":url, "server":server_info_t, "title":title, "summary":summary, "thumb":thumb, "art":art, "year":year, "rating":rating, "duration":str(duration), "genre":genre, "roles":roles, "directors":directors, "roles":roles, "isTargetPlay":str(isTargetPlay), "useSSL":Prefs["use_https_alt"], "isVideoOnline":str(isVideoOnline), "useRedirector": redirector_enabled, 'urldata':'','quality':qual, 'pairrequired':pair_required, "host":host, "openloadApiKey":None, "force_transcode":common.UsingOption(key=common.DEVICE_OPTIONS[10], session=session)})) + durl = "fmovies://" + E(JSON.StringFromObject({"url":url, "server":server_info_t, "title":title, "summary":summary, "thumb":thumb, "art":art, "year":year, "rating":rating, "duration":str(duration), "genre":genre, "roles":roles, "directors":directors, "roles":roles, "isTargetPlay":str(isTargetPlay), "useSSL":Prefs["use_https_alt"], "isVideoOnline":str(isVideoOnline), "useRedirector": redirector_enabled, 'urldata':'','quality':qual, 'pairrequired':pair_required, "provider":"plugin", "host":host, "openloadApiKey":None, "force_transcode":common.UsingOption(key=common.DEVICE_OPTIONS[10], session=session), "force_transcode_imdb":common.UsingOption(key=common.DEVICE_OPTIONS[13], session=session)})) vco = None vco = VideoClipObject( @@ -3802,12 +4108,13 @@ def VideoDetail(title, url, url_s, label_i_qual, label, serverts, thumb, summary else: vvurls = [{'url':server_info, 'qual':qual}] + seq_no = 0 for vvv in vvurls: vv = vvv['url'] qualx = vvv['qual'] if Prefs['disable_downloader'] == False and AuthTools.CheckAdmin() == True: oc.add(DirectoryObject( - key = Callback(downloadsmenu.AddToDownloadsListPre, title=title if tvshowtitle==None else tvshowtitlecleaned, season=season, episode=episode, purl=url, url=vv, durl=vv, summary=summary, thumb=thumb, year=year, quality=qualx, source=host, type=libtype, vidtype=libtype.lower(), resumable=True, source_meta={}, file_meta={}, sub_url=sub_url, mode=common.DOWNLOAD_MODE[0], session=session, admin=True, provider='Plugin'), + key = Callback(downloadsmenu.AddToDownloadsListPre, title=title if tvshowtitle==None else tvshowtitlecleaned, season=season, episode=episode, purl=url, url=vv, durl=vv, summary=summary, thumb=thumb, year=year, quality=qualx, source=host, type=libtype, vidtype=libtype.lower(), resumable=True, source_meta={}, file_meta={}, sub_url=sub_url, mode=common.DOWNLOAD_MODE[0], session=session, admin=True, provider='Plugin', seq=seq_no), title = '%s | Add to Download Queue' % qualx, summary = 'Adds the current video to Download List', art = art, @@ -3816,13 +4123,14 @@ def VideoDetail(title, url, url_s, label_i_qual, label, serverts, thumb, summary ) elif Prefs['disable_downloader'] == False: oc.add(DirectoryObject( - key = Callback(downloadsmenu.AddToDownloadsListPre, title=title if tvshowtitle==None else tvshowtitlecleaned, season=season, episode=episode, purl=url, url=vv, durl=vv, summary=summary, thumb=thumb, year=year, quality=qualx, source=host, type=libtype, vidtype=libtype.lower(), resumable=True, source_meta={}, file_meta={}, sub_url=sub_url, mode=common.DOWNLOAD_MODE[1], session=session, admin=False, provider='Plugin'), + key = Callback(downloadsmenu.AddToDownloadsListPre, title=title if tvshowtitle==None else tvshowtitlecleaned, season=season, episode=episode, purl=url, url=vv, durl=vv, summary=summary, thumb=thumb, year=year, quality=qualx, source=host, type=libtype, vidtype=libtype.lower(), resumable=True, source_meta={}, file_meta={}, sub_url=sub_url, mode=common.DOWNLOAD_MODE[1], session=session, admin=False, provider='Plugin', seq=seq_no), title = '%s | Add to Request Queue' % qualx, summary = 'Adds the current video to Request List', art = art, thumb = common.GetThumb(R(common.ICON_REQUESTS), session=session) ) ) + seq_no += 1 else: host_source = 'gvideo' files = json.loads(server_info) @@ -3835,13 +4143,14 @@ def VideoDetail(title, url, url_s, label_i_qual, label, serverts, thumb, summary ftype = file['type'] sortable_list.append({'label': res, 'file':furl, 'type':ftype}) newlist = sorted(sortable_list, key=lambda k: k['label'], reverse=True) + seq_no = 0 for file in newlist: furl = file['file'] res = str(int(file['label']))+'p' ftype = file['type'] if Prefs['disable_downloader'] == False and AuthTools.CheckAdmin() == True: oc.add(DirectoryObject( - key = Callback(downloadsmenu.AddToDownloadsListPre, title=title if tvshowtitle==None else tvshowtitlecleaned, season=season, episode=episode, purl=url, url=furl, durl=furl, summary=summary, thumb=thumb, year=year, quality=res, source=host_source, type=libtype, vidtype=libtype.lower(), resumable=True, source_meta={}, file_meta={}, sub_url=sub_url, mode=common.DOWNLOAD_MODE[0], session=session, admin=True, provider='Plugin'), + key = Callback(downloadsmenu.AddToDownloadsListPre, title=title if tvshowtitle==None else tvshowtitlecleaned, season=season, episode=episode, purl=url, url=furl, durl=furl, summary=summary, thumb=thumb, year=year, quality=res, source=host_source, type=libtype, vidtype=libtype.lower(), resumable=True, source_meta={}, file_meta={}, sub_url=sub_url, mode=common.DOWNLOAD_MODE[0], session=session, admin=True, provider='Plugin', seq=seq_no), title = '%s | Add to Download Queue' % res, summary = 'Adds the current video to Download List', art = art, @@ -3850,13 +4159,14 @@ def VideoDetail(title, url, url_s, label_i_qual, label, serverts, thumb, summary ) elif Prefs['disable_downloader'] == False: oc.add(DirectoryObject( - key = Callback(downloadsmenu.AddToDownloadsListPre, title=title if tvshowtitle==None else tvshowtitlecleaned, season=season, episode=episode, purl=url, url=furl, durl=furl, summary=summary, thumb=thumb, year=year, quality=res, source=host_source, type=libtype, vidtype=libtype.lower(), resumable=True, source_meta={}, file_meta={}, sub_url=sub_url, mode=common.DOWNLOAD_MODE[1], session=session, admin=False, provider='Plugin'), + key = Callback(downloadsmenu.AddToDownloadsListPre, title=title if tvshowtitle==None else tvshowtitlecleaned, season=season, episode=episode, purl=url, url=furl, durl=furl, summary=summary, thumb=thumb, year=year, quality=res, source=host_source, type=libtype, vidtype=libtype.lower(), resumable=True, source_meta={}, file_meta={}, sub_url=sub_url, mode=common.DOWNLOAD_MODE[1], session=session, admin=False, provider='Plugin', seq=seq_no), title = '%s | Add to Request Queue' % res, summary = 'Adds the current video to Request List', art = art, thumb = common.GetThumb(R(common.ICON_REQUESTS), session=session) ) ) + seq_no += 1 except Exception as e: if Prefs["use_debug"]: @@ -3888,7 +4198,6 @@ def ExtSources(title, url, summary, thumb, art, rating, duration, genre, directo try: summary = unicode(common.ascii_only(summary)) - #summary = unicode(str(summary).replace('"','').replace('\u00','')) except: summary = 'Not Available' @@ -4042,7 +4351,7 @@ def ExtSources(title, url, summary, thumb, art, rating, duration, genre, directo else: title_msg = "%s | %s | %s | %s | %s | %s | %s" % (status, source['vidtype'], source['rip'], source['quality'], file_size, source['source'], source['provider']) - if common.DEV_DEBUG == True: + if common.DEV_DEBUG == True and Prefs["use_debug"] == True: Log("%s --- %s %s" % (title_msg, source['vidtype'], vidUrl)) my_i_hosts = common.interface.getHostsPlaybackSupport(encode=False) if source['source'] in my_i_hosts.keys(): @@ -4056,7 +4365,7 @@ def ExtSources(title, url, summary, thumb, art, rating, duration, genre, directo if not Prefs['use_openload_pairing'] and 'openload' in source['source'] and common.is_uss_installed() and URLService.ServiceIdentifierForURL(vidUrl) != None: durl = vidUrl else: - durl = "fmovies://" + E(JSON.StringFromObject({"url":url, "server":vidUrl, "title":title, "summary":summary, "thumb":thumb, "art":art, "year":year, "rating":rating, "duration":str(duration), "genre":genre, "directors":directors, "roles":roles, "isTargetPlay":str(isTargetPlay), "useSSL":Prefs["use_https_alt"], "isVideoOnline":str(isVideoOnline), "useRedirector": redirector_enabled, 'quality':source['quality'], 'urldata':urldata, 'params':params, 'pairrequired':pair_required, "host":source['source'], "openloadApiKey":None, "force_transcode":common.UsingOption(key=common.DEVICE_OPTIONS[10], session=session)})) + durl = "fmovies://" + E(JSON.StringFromObject({"url":url, "server":vidUrl, "title":title, "summary":summary, "thumb":thumb, "art":art, "year":year, "rating":rating, "duration":str(duration), "genre":genre, "directors":directors, "roles":roles, "isTargetPlay":str(isTargetPlay), "useSSL":Prefs["use_https_alt"], "isVideoOnline":str(isVideoOnline), "useRedirector": redirector_enabled, 'quality':source['quality'], 'urldata':urldata, 'params':params, 'pairrequired':pair_required, "provider":source['provider'], "host":source['source'], "openloadApiKey":None, "force_transcode":common.UsingOption(key=common.DEVICE_OPTIONS[10], session=session), "force_transcode_imdb":common.UsingOption(key=common.DEVICE_OPTIONS[13], session=session)})) try: oc.append(VideoClipObject( url = durl, @@ -4088,7 +4397,11 @@ def ExtSources(title, url, summary, thumb, art, rating, duration, genre, directo gen_play = (title_msg + source['titleinfo'] + ' | (via Generic Playback)', summary, common.GetThumb(thumb, session=session), source['params'], duration, genre, vidUrl, source['quality'], watch_title) titlex, summary, thumb, params, duration, genres, videoUrl, videoRes, watch_title = gen_play try: - oc.append(playback.CreateVideoObject(url, titlex, summary, thumb, params, duration, genres, videoUrl, videoRes, watch_title)) + #url, title, summary, thumb, params, duration, genres, videoUrl, videoRes, watch_title, include_container=False, playDirect=False, force_transcode=False + force_transcode = common.UsingOption(key=common.DEVICE_OPTIONS[10], session=session) + if source['provider'] == 'IMDb' and force_transcode == False: + force_transcode = common.UsingOption(key=common.DEVICE_OPTIONS[13], session=session) + oc.append(playback.CreateVideoObject(url, titlex, summary, thumb, params, duration, genres, videoUrl, videoRes, watch_title, force_transcode=force_transcode)) except Exception as e: if Prefs["use_debug"]: Log(gen_play) @@ -4103,7 +4416,11 @@ def ExtSources(title, url, summary, thumb, art, rating, duration, genre, directo gen_play = (title_msg + source['titleinfo'] + ' | (via Generic Playback)', summary, common.GetThumb(thumb, session=session), source['params'], duration, genre, vidUrl, source['quality'], watch_title) titlex, summary, thumb, params, duration, genres, videoUrl, videoRes, watch_title = gen_play try: - oc.append(playback.CreateVideoObject(url, titlex, summary, thumb, params, duration, genres, videoUrl, videoRes, watch_title)) + #url, title, summary, thumb, params, duration, genres, videoUrl, videoRes, watch_title, include_container=False, playDirect=False, force_transcode=False + force_transcode = common.UsingOption(key=common.DEVICE_OPTIONS[10], session=session) + if source['provider'] == 'IMDb' and force_transcode == False: + force_transcode = common.UsingOption(key=common.DEVICE_OPTIONS[13], session=session) + oc.append(playback.CreateVideoObject(url, titlex, summary, thumb, params, duration, genres, videoUrl, videoRes, watch_title, force_transcode=force_transcode)) except Exception as e: if Prefs["use_debug"]: Log(gen_play) @@ -4115,7 +4432,11 @@ def ExtSources(title, url, summary, thumb, art, rating, duration, genre, directo for gen_play in generic_playback_links: titlex, summary, thumb, params, duration, genres, videoUrl, videoRes, watch_title = gen_play try: - oc.append(playback.CreateVideoObject(url, titlex, summary, thumb, params, duration, genres, videoUrl, videoRes, watch_title)) + #url, title, summary, thumb, params, duration, genres, videoUrl, videoRes, watch_title, include_container=False, playDirect=False, force_transcode=False + force_transcode = common.UsingOption(key=common.DEVICE_OPTIONS[10], session=session) + if source['provider'] == 'IMDb' and force_transcode == False: + force_transcode = common.UsingOption(key=common.DEVICE_OPTIONS[13], session=session) + oc.append(playback.CreateVideoObject(url, titlex, summary, thumb, params, duration, genres, videoUrl, videoRes, watch_title, force_transcode=force_transcode)) except Exception as e: if Prefs["use_debug"]: Log(gen_play) @@ -4156,6 +4477,9 @@ def ExtSources(title, url, summary, thumb, art, rating, duration, genre, directo extSources_urlservice = common.OrderBasedOn(extSources_urlservice, use_host=False, use_filesize=common.UsingOption(key=common.DEVICE_OPTIONS[9], session=session)) c = len(extSources_urlservice) + if common.DEV_DEBUG == True and Prefs["use_debug"] == True: + Log(extSources_urlservice) + if cx > 0: ext_summmary = ', '.join('%s (%s)' % (x['label'],'enabled' if str(x['enabled']).lower()=='true' else 'disabled') for x in common.INTERNAL_SOURCES_FILETYPE if 'Movie/Show' not in x['label']) ocp = DirectoryObject(title = 'Extras (%s items)' % str(cx), key = Callback(PSExtSources, con_title='Extras (%s items)' % str(cx), extSources_play=E(JSON.StringFromObject(extExtrasSources_urlservice)), session=session, watch_title=watch_title, year=year, summary=summary, thumb=thumb, art=art, url=url, duration=duration, rating=rating, genre=genre, mode_trailer=True), summary=ext_summmary,thumb=R(common.ICON_PLEX)) @@ -4182,7 +4506,6 @@ def ExtSourcesDownload(title, url, summary, thumb, art, rating, duration, genre, try: summary = unicode(common.ascii_only(summary)) - #summary = unicode(str(summary).replace('"','').replace('\u00','')) except: summary = 'Not Available' @@ -4313,14 +4636,15 @@ def ExtSourcesDownload(title, url, summary, thumb, art, rating, duration, genre, watch_title_x = '%s - %s%s' % (watch_title, source['maininfo'], (' - ' + source['vidtype']) if source['vidtype'].lower() not in source['maininfo'].lower() else '') - if common.DEV_DEBUG == True: + if common.DEV_DEBUG == True and Prefs["use_debug"] == True: Log("%s --- %s" % (title_msg, vidUrl)) - my_i_hosts = common.interface.getHostsPlaybackSupport(encode=False) - if source['source'] in my_i_hosts.keys(): - Log('Playback: %s' % my_i_hosts[source['source']]) + # my_i_hosts = common.interface.getHostsPlaybackSupport(encode=False) + # if source['source'] in my_i_hosts.keys(): + # Log('Playback: %s' % my_i_hosts[source['source']]) - # all source links (not extras) that can be played via the code service - if vidUrl != None and source['enabled'] and source['allowsDownload'] and source['misc']['player'] == 'iplayer' and common.interface.getHostsPlaybackSupport(encode=False)[source['source']] or source['source'] == 'direct': + # all source links (not extras) that can be downloaded + # if vidUrl != None and source['enabled'] and source['allowsDownload'] and source['misc']['player'] == 'iplayer' and common.interface.getHostsPlaybackSupport(encode=False)[source['source']] or source['source'] == 'direct': + if vidUrl != None and source['enabled'] and source['allowsDownload']: try: libtype = 'movie' if tvshowtitle == None else 'show' @@ -4331,7 +4655,7 @@ def ExtSourcesDownload(title, url, summary, thumb, art, rating, duration, genre, downloadTitle = movtitle oc.add(DirectoryObject( - key = Callback(downloadsmenu.AddToDownloadsListPre, title=downloadTitle, purl=url, url=source['url'], durl=source['durl'], sub_url=source['sub_url'], summary=summary, thumb=thumb, year=year, fsBytes=fsBytes, fs=fs, file_ext=source['file_ext'], quality=source['quality'], source=source['source'], source_meta={}, file_meta={}, type=libtype, vidtype=vidtype, resumable=source['resumeDownload'], mode=mode, session=session, admin=True if mode==common.DOWNLOAD_MODE[0] else False, params=source['params'], riptype=source['rip'], season=season, episode=episode, provider=source['provider']), + key = Callback(downloadsmenu.AddToDownloadsListPre, title=downloadTitle, purl=url, url=source['url'], durl=source['durl'], sub_url=source['sub_url'], page_url=source['page_url'], summary=summary, thumb=thumb, year=year, fsBytes=fsBytes, fs=fs, file_ext=source['file_ext'], quality=source['quality'], source=source['source'], source_meta={}, file_meta={}, type=libtype, vidtype=vidtype, resumable=source['resumeDownload'], mode=mode, session=session, admin=True if mode==common.DOWNLOAD_MODE[0] else False, params=source['params'], riptype=source['rip'], season=season, episode=episode, provider=source['provider'], seq = source['seq']), title = title_msg, summary = 'Adds the current video to %s List' % 'Download' if mode==common.DOWNLOAD_MODE[0] else 'Request', art = art, @@ -4358,7 +4682,6 @@ def PSExtSources(extSources_play, con_title, session, watch_title, year, summary try: summary = unicode(common.ascii_only(summary)) - #summary = unicode(str(summary).replace('"','').replace('\u00','')) except: summary = 'Not Available' @@ -4450,7 +4773,7 @@ def PSExtSources(extSources_play, con_title, session, watch_title, year, summary else: title_msg = "%s | %s | %s | %s | %s | %s | %s" % (status, source['vidtype'], source['rip'], source['quality'], file_size, source['source'], source['provider']) - if common.DEV_DEBUG == True: + if common.DEV_DEBUG == True and Prefs["use_debug"] == True: Log("%s --- %s" % (title_msg, vidUrl)) Log("%s" % source) my_i_hosts = common.interface.getHostsPlaybackSupport(encode=False) @@ -4465,7 +4788,7 @@ def PSExtSources(extSources_play, con_title, session, watch_title, year, summary if not Prefs['use_openload_pairing'] and 'openload' in source['source'] and common.is_uss_installed() and URLService.ServiceIdentifierForURL(vidUrl) != None: durl = vidUrl else: - durl = "fmovies://" + E(JSON.StringFromObject({"url":url, "server":vidUrl, "title":watch_title, "summary":summary, "thumb":thumb, "art":art, "year":year, "rating":rating, "duration":str(duration), "genre":genre, "directors":None, "roles":None, "isTargetPlay":str(isTargetPlay), "useSSL":Prefs["use_https_alt"], "isVideoOnline":str(isVideoOnline), "useRedirector": redirector_enabled, 'quality':source['quality'], 'urldata':urldata, 'params':params, 'pairrequired':pair_required, "host":source['source'], "openloadApiKey":None, "force_transcode":common.UsingOption(key=common.DEVICE_OPTIONS[10], session=session)})) + durl = "fmovies://" + E(JSON.StringFromObject({"url":url, "server":vidUrl, "title":watch_title, "summary":summary, "thumb":thumb, "art":art, "year":year, "rating":rating, "duration":str(duration), "genre":genre, "directors":None, "roles":None, "isTargetPlay":str(isTargetPlay), "useSSL":Prefs["use_https_alt"], "isVideoOnline":str(isVideoOnline), "useRedirector": redirector_enabled, 'quality':source['quality'], 'urldata':urldata, 'params':params, 'pairrequired':pair_required, "provider":source['provider'], "host":source['source'], "openloadApiKey":None, "force_transcode":common.UsingOption(key=common.DEVICE_OPTIONS[10], session=session), "force_transcode_imdb":common.UsingOption(key=common.DEVICE_OPTIONS[13], session=session)})) try: oc.add(VideoClipObject( url = durl, @@ -4520,7 +4843,11 @@ def PSExtSources(extSources_play, con_title, session, watch_title, year, summary for gen_play in generic_playback_links: title, summary, thumb, params, duration, genres, videoUrl, videoRes, watch_title = gen_play try: - oc.add(playback.CreateVideoObject(url, title, summary, thumb, params, duration, genres, videoUrl, videoRes, watch_title)) + #url, title, summary, thumb, params, duration, genres, videoUrl, videoRes, watch_title, include_container=False, playDirect=False, force_transcode=False + force_transcode = common.UsingOption(key=common.DEVICE_OPTIONS[10], session=session) + if source['provider'] == 'IMDb' and force_transcode == False: + force_transcode = common.UsingOption(key=common.DEVICE_OPTIONS[13], session=session) + oc.add(playback.CreateVideoObject(url, title, summary, thumb, params, duration, genres, videoUrl, videoRes, watch_title, force_transcode=force_transcode)) except Exception as e: if Prefs["use_debug"]: Log(gen_play) @@ -4528,7 +4855,7 @@ def PSExtSources(extSources_play, con_title, session, watch_title, year, summary for o in ocx: oc.add(o) - + oc.add(DirectoryObject(key = Callback(MainMenu), title = '<< Main Menu', thumb = R(common.ICON))) return oc @@ -4603,7 +4930,7 @@ def SimilarRecommendations(title, similar_reccos, referer=None, is9anime = 'Fals thumbxx = common.GetThumb(thumb, session=session) oc.add(DirectoryObject( key = Callback(EpisodeDetail, title = name, url = loc, thumb = thumb, session = session, dataEXSAnim = dataEXSAnim), - title = name, + title = '%s%s' % (name, eps_nos), summary = GetMovieInfo(summary=summary, urlPath=more_info_link, referer=referer, session=session, is9anime=is9anime) + eps_nos, thumb = thumbxx if thumbxx != None else R(common.ICON_UNAV) ) @@ -4784,10 +5111,6 @@ def RecentWatchList(title, session=None, **kwargs): newlist = sorted(urls_list, key=lambda k: k['time'], reverse=True) - # m = re.findall(r'(.*?.)(bmovie|fmovie)', fmovies.BASE_URL) - # if len(m) > 0: - # fmovies_base = fmovies.BASE_URL.replace(m[0][0], '') - # fmovies_base = fmovies.BASE_URL.replace('https://www.','') fmovies_base = fmovies.BASE_URL.replace('https://','') c=0 @@ -4803,36 +5126,44 @@ def RecentWatchList(title, session=None, **kwargs): #Log("%s %s" % (stitle, url)) url = common.FixUrlInconsistencies(url) - url = url.replace('www.','') + url = common.FixUrlInconsistencies2(url) + url = common.FixUrlInconsistencies3(url) ES = '' - if common.ES_API_URL.lower() in longstring.lower(): + if common.isArrayValueInString(common.EXT_SITE_URLS, url) == True: + try: + if common.ES_API_URL not in url: + url = url.replace(url.split(common.ES_API_KEY)[0],common.ES_API_URL.split(common.ES_API_KEY)[0]) + except: + pass ES = common.EMOJI_EXT - if common.ANIME_URL.lower() in longstring.lower() or '9anime' in longstring.lower(): + + if common.ANIME_KEY.lower() in longstring.lower(): ES = common.EMOJI_ANIME + + if common.DEV_DEBUG == True and Prefs["use_debug"] == True: + Log("RECENT Title: %s" % stitle) + Log("RECENT URL: %s" % url) urlhost = common.client.getUrlHost(url) show = True - for u in common.BASE_URLS: - if url.replace(common.client.getUrlHost(u),fmovies_base) in items_in_recent or c > NO_OF_ITEMS_IN_RECENT_LIST: - items_to_del.append(each['key']) - show = False - break + if common.isArrayValueInString(common.EXT_SITE_URLS, url) == False: + for u in common.BASE_URLS: + if url.replace(common.client.getUrlHost(u),fmovies_base) in items_in_recent or c > NO_OF_ITEMS_IN_RECENT_LIST: + items_to_del.append(each['key']) + show = False + break - if show == True: - if 'fmovies.' in longstring or 'bmovies.' in longstring: + if show == True and url not in items_in_recent: + if 'fmovies.' in url or 'bmovies.' in url: url = url.replace(common.client.geturlhost(url),fmovies_base) - - #url = common.FixUrlInconsistencies(url) - - #Log("%s %s" % (stitle, url)) items_in_recent.append(url) oc.add(DirectoryObject( key=Callback(EpisodeDetail, title=stitle, url=url, thumb=thumb, session = session), - title= '%s%s' % (stitle,ES), + title= '%s%s' % (ES,stitle), thumb=thumb, tagline = timestr, summary=summary @@ -4841,6 +5172,7 @@ def RecentWatchList(title, session=None, **kwargs): c += 1 if c >= NO_OF_ITEMS_IN_RECENT_LIST or len(items_to_del) > 0: + Log('%s items to delete' % len(items_to_del)) for each in items_to_del: del Dict[each] Dict.Save() @@ -4880,10 +5212,10 @@ def ClearRecentWatchList(**kwargs): del Dict[watchlist] except Exception as e: Log.Error('Error Clearing Recent WatchList: %s' %str(e)) - continue + return MC.message_container("Recent WatchList", 'Error Clearing Recent WatchList: %s' %str(e)) Dict.Save() - return MC.message_container("My Recent WatchList", 'Your Recent WatchList list will be cleared soon.') + return MC.message_container("Recent WatchList", 'Your Recent WatchList list will be cleared soon.') ###################################################################################### # Converts old style bookmarks @@ -4911,7 +5243,8 @@ def convertbookmarks(**kwargs): thumb = longstring.split('Key5Split')[3] Delete_List.append(title+'-'+E(url)) - url = url.replace('www.','') + url = common.FixUrlInconsistencies2(url) + Covert_List[title+'-'+E(url)] = (title + 'Key5Split' + url +'Key5Split'+ summary + 'Key5Split' + thumb) if len(Delete_List) > 0: @@ -4941,76 +5274,124 @@ def Bookmarks(title, session = None, **kwargs): items_in_bm = [] items_to_del = [] + items_to_bm = [] for each in Dict: longstring = str(Dict[each]) - if (('fmovies.' in longstring or 'bmovies.' in longstring or '9anime.' in longstring) or common.isArrayValueInString(common.EXT_SITE_URLS, longstring) == True) and 'Key5Split' in longstring: - stitle = unicode(longstring.split('Key5Split')[0]) - url = longstring.split('Key5Split')[1] - summary = unicode(longstring.split('Key5Split')[2]) - thumb = longstring.split('Key5Split')[3] - - url = common.FixUrlInconsistencies(url) - url = url.replace('www.','') + if 'Key5Split' in longstring: - for u in common.BASE_URLS: - u = common.client.getUrlHost(u) - if u in url: - url = url.replace(u,fmovies_base) - break + if ('fmovies.' in longstring or 'bmovies.' in longstring or '9anime.' in longstring) or common.isArrayValueInString(common.EXT_SITE_URLS, longstring) == True: + + stitle = unicode(longstring.split('Key5Split')[0]) + url = longstring.split('Key5Split')[1] + summary = unicode(longstring.split('Key5Split')[2]) + thumb = longstring.split('Key5Split')[3] - #Log("BM : %s" % stitle) - #Log("BM : %s" % url) + eachUrl = None + try: + eachUrl = D(each.replace((stitle+'-'),'')) + except: + pass - if url not in items_in_bm: + url0 = url = common.FixUrlInconsistencies(url) + url = common.FixUrlInconsistencies2(url) + url = common.FixUrlInconsistencies3(url) - items_in_bm.append(url) - is9anime = 'False' - ES = '' - if common.ES_API_URL.lower() in url.lower(): - ES = common.EMOJI_EXT - if common.ANIME_URL.lower() in url.lower(): - ES = common.EMOJI_ANIME - is9anime = 'True' + # Fix incorrect BM's + if eachUrl != None and eachUrl != url0: + if common.DEV_DEBUG == True and Prefs["use_debug"] == True: + Log('Fixing URL : %s with %s' % (url0, url)) + items_to_bm.append([stitle, url, summary, thumb]) + items_to_del.append(each) - if fmovies.FILTER_PATH in url or '(All Seasons)' in stitle: - oc.add(DirectoryObject( - key=Callback(Search, query=stitle.replace(' (All Seasons)',''), session = session, mode='other seasons', thumb=thumb, summary=summary, is9anime=is9anime), - title='%s%s' % (stitle,ES), - thumb=thumb, - summary=summary + for u in common.BASE_URLS: + u = common.client.getUrlHost(u) + if u in url: + url = url.replace(u,fmovies_base) + break + + if common.ES_API_KEY in url: + try: + if common.ES_API_URL not in url: + url = url.replace(url.split(common.ES_API_KEY)[0],common.ES_API_URL.split(common.ES_API_KEY)[0]) + except: + pass + + if common.DEV_DEBUG == True and Prefs["use_debug"] == True: + Log("eachUrl : %s" % eachUrl) + Log("BM : %s" % stitle) + Log("BM : %s" % url) + + if url not in items_in_bm: + items_in_bm.append(url) + is9anime = 'False' + ES = '' + if common.isArrayValueInString(common.EXT_SITE_URLS, longstring) == True: + ES = common.EMOJI_EXT + if common.ANIME_KEY.lower() in url.lower(): + ES = common.EMOJI_ANIME + is9anime = 'True' + + if fmovies.FILTER_PATH in url or '(All Seasons)' in stitle: + oc.add(DirectoryObject( + key=Callback(Search, query=stitle.replace(' (All Seasons)',''), session = session, mode='other seasons', thumb=thumb, summary=summary, is9anime=is9anime), + title='%s%s' % (ES,stitle), + tagline=stitle, + thumb=thumb, + summary=summary + ) ) - ) - else: - oc.add(DirectoryObject( - key=Callback(EpisodeDetail, title=stitle, url=url, thumb=thumb, session = session), - title='%s%s' % (stitle,ES), - thumb=thumb, - summary=summary + else: + oc.add(DirectoryObject( + key=Callback(EpisodeDetail, title=stitle, url=url, thumb=thumb, session = session), + title='%s%s' % (ES,stitle), + tagline=stitle, + thumb=thumb, + summary=summary + ) ) - ) - else: - items_to_del.append(each) - + else: + items_to_del.append(each) + if len(items_to_del) > 0: for each in items_to_del: del Dict[each] Dict.Save() + + if len(items_to_bm) > 0: + for each in items_to_bm: + AddBookmark(each[0], each[1], each[2], each[3], True) + Dict.Save() - if len(oc) == 0: - return MC.message_container(title, 'No Bookmarked Videos Available') + #if len(oc) == 0: + # return MC.message_container(title, 'No Bookmarked Videos Available') - oc.objects.sort(key=lambda obj: obj.title, reverse=False) + if len(oc) > 0: + oc.objects.sort(key=lambda obj: obj.tagline, reverse=False) - #add a way to clear bookmarks list - oc.add(DirectoryObject( - key = Callback(ClearBookmarks), - title = "Clear Bookmarks", - thumb = R(common.ICON_QUEUE), - summary = "CAUTION! This will clear your entire bookmark list!" + if len(oc) > 0: + #add a way to clear bookmarks list + oc.add(DirectoryObject( + key = Callback(ClearBookmarks), + title = "Clear Bookmarks", + thumb = R(common.ICON_QUEUE), + summary = "CAUTION! This will clear your entire bookmark list!" + ) ) - ) + + if len(oc) > 0: + # save bookmarks + oc.add(DirectoryObject(key=Callback(tools.DevToolsC, title='save_bm'), + title=u'Save Bookmarks', + thumb = R(common.ICON_FL_SAVE), + summary=u'Save Bookmarks to the Resource dir. (file: bookmarks.json)')) + + # load bookmarks + oc.add(DirectoryObject(key=Callback(tools.DevToolsC, title='load_bm'), + title=u'Load Bookmarks', + thumb = R(common.ICON_FL_LOAD), + summary=u'Load Bookmarks from the Resource dir. (file: bookmarks.json)')) return oc @@ -5021,22 +5402,38 @@ def Check(title, url, **kwargs): longstring = Dict[title+'-'+E(url)] fmovies_urlhost = common.client.geturlhost(url) - #Log("%s --- %s --- %s" % (longstring, url, fmovies_urlhost)) + fmovies_movie_path = None + + try: + fmovies_movie_path = url.split(fmovies_urlhost)[1] + except: + pass + + for ext_url in common.EXT_SITE_URLS: + surl = url.replace(url.split(common.ES_API_KEY)[0],ext_url.split(common.ES_API_KEY)[0]) + longstring = Dict[title+'-'+E(surl)] + if longstring != None and common.ES_API_KEY in longstring: + return True if longstring != None and common.isArrayValueInString(common.EXT_SITE_URLS, longstring) == True: return True if longstring != None and url in longstring: return True - - + for u in common.BASE_URLS: surl = url.replace(fmovies_urlhost, common.client.geturlhost(u)) longstring = Dict[title+'-'+E(surl)] if longstring != None and surl in longstring: return True - surl = url.replace(fmovies_urlhost,'%s.%s' % (common.ANIME_KEY, common.ANIME_DOM)) + for d in common.ANIME_DOMS: + surl = url.replace(common.client.geturlhost(url),'%s.%s' % (common.ANIME_KEY, d)) + longstring = Dict[title+'-'+E(surl)] + if longstring != None and surl in longstring: + return True + + surl = url.replace(fmovies_urlhost,'%s.%s' % (common.ANIME_KEY, 'is')) longstring = Dict[title+'-'+E(surl)] if longstring != None and surl in longstring: return True @@ -5051,17 +5448,22 @@ def Check(title, url, **kwargs): ###################################################################################### # Adds a movie to the bookmarks list using the title as a key for the url @route(PREFIX + "/addbookmark") -def AddBookmark(title, url, summary, thumb, **kwargs): +def AddBookmark(title, url, summary, thumb, silent=False, **kwargs): url = common.FixUrlInconsistencies(url) + url = common.FixUrlInconsistencies3(url) if Check(title=title, url=url): return MC.message_container(title, 'This item has already been added to your bookmarks.') #Log("Added : %s %s" % (title, url)) Dict[title+'-'+E(url)] = (title + 'Key5Split' + url +'Key5Split'+ summary + 'Key5Split' + thumb) - Dict.Save() - return MC.message_container(title, 'This item has been added to your bookmarks.') + + silent = True if str(silent).lower() == 'true' else False + + if silent == False: + Dict.Save() + return MC.message_container(title, 'This item has been added to your bookmarks.') ###################################################################################### # Removes a movie to the bookmarks list using the title as a key for the url @@ -5069,30 +5471,69 @@ def AddBookmark(title, url, summary, thumb, **kwargs): def RemoveBookmark(title, url, **kwargs): url = common.FixUrlInconsistencies(url) + url = common.FixUrlInconsistencies3(url) + success = False + try: del Dict[title+'-'+E(url)] - except: - pass + success = True + except Exception as e: + Log(e) + + if success == False and 'All Seasons' in title: + try: + if 'type=series' not in url: + del Dict[title+'-'+E(url.replace('page=1','type=series&page=1'))] + success = True + else: + del Dict[title+'-'+E(url.replace('type=series&page=1','page=1'))] + success = True + except Exception as e: + Log(e) fmovies_urlhost = common.client.geturlhost(url) - for u in common.BASE_URLS: + if success == False: + for u in common.BASE_URLS: + try: + del Dict[title+'-'+E(url.replace(fmovies_urlhost,common.client.geturlhost(u)))] + success = True + break + except Exception as e: + if common.DEV_DEBUG == True and Prefs["use_debug"] == True: + Log(e) + pass + + if success == False: + for d in common.ANIME_DOMS: + try: + del Dict[title+'-'+E(url.replace(fmovies_urlhost,'%s.%s' % (common.ANIME_KEY, d)))] + success = True + break + except: + pass + + if success == False: try: - del Dict[title+'-'+E(url.replace(fmovies_urlhost,common.client.geturlhost(u)))] + del Dict[title+'-'+E(url.replace(fmovies_urlhost,'9anime.to'))] + success = True except: pass - - try: - del Dict[title+'-'+E(url.replace(fmovies_urlhost,'9anime.is'))] - except: - pass - try: - del Dict[title+'-'+E(url.replace(fmovies_urlhost,'9anime.to'))] - except: - pass + + if success == False: + for ext_url in common.EXT_SITE_URLS: + try: + surl = url.replace(url.split(common.ES_API_KEY)[0],ext_url.split(common.ES_API_KEY)[0]) + del Dict[title+'-'+E(surl)] + success = True + except: + pass - Dict.Save() - return MC.message_container(title, 'This item has been removed from your bookmarks.') + if success == True: + Dict.Save() + return MC.message_container(title, 'This item has been removed from your bookmarks.') + else: + return MC.message_container(title, 'Error in removing your bookmark.') ###################################################################################### # Clears the Dict that stores the bookmarks list @@ -5105,8 +5546,8 @@ def ClearBookmarks(**kwargs): remove_list = [] for each in Dict: try: - url = Dict[each] - if ('bmovies.' in url or 'fmovies.' in url or '9anime.' in url) or common.isArrayValueInString(common.EXT_SITE_URLS, url) == True and 'http' in url and 'RR44SS' not in url: + longstring = Dict[each] + if (('bmovies.' in longstring or 'fmovies.' in longstring or '9anime.' in longstring) or common.isArrayValueInString(common.EXT_SITE_URLS, longstring) == True) and 'http' in longstring and 'Key5Split' in longstring: remove_list.append(each) except: continue @@ -5116,7 +5557,7 @@ def ClearBookmarks(**kwargs): del Dict[bookmark] except Exception as e: Log.Error('Error Clearing Bookmarks: %s' %str(e)) - continue + return MC.message_container("Bookmarks", 'Error Clearing Bookmarks: %s' %str(e)) Dict.Save() return MC.message_container("Bookmarks", 'Your bookmark list will be cleared soon.') @@ -5144,7 +5585,7 @@ def ClearSearches(**kwargs): del Dict[search_term] except Exception as e: Log.Error('Error Clearing Searches: %s' %str(e)) - continue + return MC.message_container("Search Queue", 'Error Clearing Searches: %s' %str(e)) Dict.Save() return MC.message_container("Search Queue", "Your Search Queue list will be cleared soon.") @@ -5198,7 +5639,7 @@ def Search(query=None, surl=None, page_count='1', mode='default', thumb=None, su last_page_no = int(page_count) last_page_no = int(page_data.xpath(".//*//ul[@class='pagination'][1]//li[last()-1]//text()")[0]) except Exception as e: - Log("__init.py__ > Search > Error: %s" % e) + Log("main.py__ > Search > Error: %s" % e) errorB = True pass no_elems = len(elems) @@ -5303,7 +5744,7 @@ def Search(query=None, surl=None, page_count='1', mode='default', thumb=None, su if mode == 'other seasons' or mode == 'tag': oc.objects.sort(key=lambda obj: obj.title, reverse=False) except Exception as e: - Log('__init.py__ > Search Error: %s URL: %s' % (e, url)) + Log('main.py__ > Search Error: %s URL: %s' % (e, url)) pass oc_ext = [] @@ -5423,7 +5864,7 @@ def AnimeSearchExt(query=None, session=None, **kwargs): summary = 'Available on item page.' dobj = DirectoryObject( key = Callback(EpisodeDetail, title=title, url=url, thumb=thumb, session=session, dataEXSAnim=url), - title = '%s %s' % (common.EMOJI_ANIME, title), + title = '%s%s' % (common.EMOJI_ANIME, title), summary = summary, thumb = Resource.ContentsOfURLWithFallback(url = thumb, fallback=common.ICON_UNAV) ) @@ -5869,6 +6310,7 @@ def DoIMDBExtSources(title, year, type, imdbid, season=None, episode=None, episo if int(episodeNr) + 9 == e: if int(episodeNr) == 1 and autopilot_option_shown == False: if episodesTot != None: + autopilot_option_shown = True episodesTot = int(episodesTot) if Prefs['disable_downloader'] == False and AuthTools.CheckAdmin() == True: oc.add(DirectoryObject( @@ -5883,7 +6325,18 @@ def DoIMDBExtSources(title, year, type, imdbid, season=None, episode=None, episo title = 'Next Page >>', thumb = R(common.ICON_NEXT))) break - + if autopilot_option_shown == False: + if episodesTot != None: + autopilot_option_shown = True + episodesTot = int(episodesTot) + if Prefs['disable_downloader'] == False and AuthTools.CheckAdmin() == True: + oc.add(DirectoryObject( + key = Callback(downloadsmenu.AddToAutoPilotDownloads, title=x_title, thumb=x_thumb, summary=x_summary, purl=None, season=x_season, episode_start=1, episode_end=episodesTot, year=x_year, type='show', vidtype='show', session=session, admin=True), + title = 'Add to AutoPilot Queue', + summary = 'Adds Episodes (1 - %s) to the AutoPilot Queue for Downloading' % episodesTot, + thumb = R(common.ICON_OTHERSOURCESDOWNLOAD_AUTO) + ) + ) elif final == False: try: CACHE_EXPIRY = 60 * int(Prefs["cache_expiry_time"]) @@ -6626,7 +7079,7 @@ def ShowCategoryES(title, filter=None, page_count='1', last_page_no=None, sessio searchString = '%s/%s?sort=%s&order=%s&genre=%s' % (filter['type'], page_count, filter['sort'], filter['order'], genre) # Build Filter-Search Url - #http://movies-v2.api-fetch.website/movies/1?sort=trending&limit=50&year=2017&genre=Comedy&order=-1 + # http://movies-v2.api-fetch.website/movies/1?sort=trending&limit=50&year=2017&genre=Comedy&order=-1 apiUrl = common.ES_API_URL + '/%s' % urllib2.quote(searchString, safe='%/_-+=&?') if last_page_no == None: @@ -6929,6 +7382,11 @@ def ValidatePrefs2(changed='True', **kwargs): common.CACHE_EXPIRY = 60 * int(Prefs["cache_expiry_time"]) except: common.CACHE_EXPIRY = common.CACHE_EXPIRY_TIME + + try: + common.control.debug = Prefs["use_debug"] + except: + pass if str(changed) == 'True': DumpPrefs(changed=changed) diff --git a/Contents/Code/playback.py b/Contents/Code/playback.py index f6e58c4..cba567e 100644 --- a/Contents/Code/playback.py +++ b/Contents/Code/playback.py @@ -11,44 +11,45 @@ #################################################################################################### @route(PREFIX+'/videoplayback') -def CreateVideoObject(url, title, summary, thumb, params, duration, genres, videoUrl, videoRes, watch_title, include_container=False, playDirect=False, **kwargs): +def CreateVideoObject(url, title, summary, thumb, params, duration, genres, videoUrl, videoRes, watch_title, include_container=False, playDirect=False, force_transcode=False, **kwargs): videoUrl = videoUrl.decode('unicode_escape') url = url if url != None else videoUrl + force_transcode = True if str(force_transcode) == 'True' else False if include_container: video = MovieObject( - key = Callback(CreateVideoObject, url=url, title=title, summary=summary, thumb=thumb, params=params, duration=duration, genres=genres, videoUrl=videoUrl, videoRes=videoRes, watch_title=watch_title, include_container=True, playDirect=playDirect), + key = Callback(CreateVideoObject, url=url, title=title, summary=summary, thumb=thumb, params=params, duration=duration, genres=genres, videoUrl=videoUrl, videoRes=videoRes, watch_title=watch_title, include_container=True, playDirect=playDirect, force_transcode=force_transcode), rating_key = url + title, title = title, summary = summary, thumb = thumb, items = [ MediaObject( - container = Container.MP4, # MP4, MKV, MOV, AVI - video_codec = VideoCodec.H264, # H264 - audio_codec = AudioCodec.AAC, # ACC, MP3 - audio_channels = 2, # 2, 6 - video_resolution = int(videoRes.replace('p','')), - parts = [PartObject(key=Callback(PlayVideo,videoUrl=videoUrl, params=params, retResponse=include_container, url=url, title=title, summary=summary, thumb=thumb, watch_title=watch_title, playDirect=playDirect))], + container = Container.MP4 if not force_transcode else None, # MP4, MKV, MOV, AVI + video_codec = VideoCodec.H264 if not force_transcode else None, # H264 + audio_codec = AudioCodec.AAC if not force_transcode else None, # ACC, MP3 + audio_channels = 2 if not force_transcode else None, # 2, 6 + video_resolution = int(videoRes.replace('p','')) if not force_transcode else None, + parts = [PartObject(key=Callback(PlayVideo,videoUrl=videoUrl, params=params, retResponse=include_container, url=url, title=title, summary=summary, thumb=thumb, watch_title=watch_title, playDirect=playDirect, force_transcode=force_transcode))], optimized_for_streaming = True ) ] ) else: video = VideoClipObject( - key = Callback(CreateVideoObject, url=url, title=title, summary=summary, thumb=thumb, params=params, duration=duration, genres=genres, videoUrl=videoUrl, videoRes=videoRes, watch_title=watch_title, include_container=True, playDirect=playDirect), + key = Callback(CreateVideoObject, url=url, title=title, summary=summary, thumb=thumb, params=params, duration=duration, genres=genres, videoUrl=videoUrl, videoRes=videoRes, watch_title=watch_title, include_container=True, playDirect=playDirect, force_transcode=force_transcode), rating_key = url + title, title = title, summary = summary, thumb = thumb, items = [ MediaObject( - container = Container.MP4, # MP4, MKV, MOV, AVI - video_codec = VideoCodec.H264, # H264 - audio_codec = AudioCodec.AAC, # ACC, MP3 - audio_channels = 2, # 2, 6 - video_resolution = int(videoRes.replace('p','')), + container = Container.MP4 if not force_transcode else None, # MP4, MKV, MOV, AVI + video_codec = VideoCodec.H264 if not force_transcode else None, # H264 + audio_codec = AudioCodec.AAC if not force_transcode else None, # ACC, MP3 + audio_channels = 2 if not force_transcode else None, # 2, 6 + video_resolution = int(videoRes.replace('p','')) if not force_transcode else None, parts = [PartObject(key=Callback(PlayVideo,videoUrl=videoUrl, params=params, retResponse=include_container, url=url, title=title, summary=summary, thumb=thumb, watch_title=watch_title, playDirect=playDirect))], optimized_for_streaming = True ) diff --git a/Contents/Code/tools.py b/Contents/Code/tools.py index f52d663..9d0dbc6 100644 --- a/Contents/Code/tools.py +++ b/Contents/Code/tools.py @@ -14,8 +14,6 @@ TITLE = common.TITLE PREFIX = common.PREFIX -ICON_TOOLS = "icon-tools.png" - # general identifier = 'com.plexapp.plugins.fmoviesplus' prefix = common.PREFIX @@ -113,6 +111,35 @@ def DevToolsC(title=None, header=None, message=None, session=None, **kwargs): ch = common.GetEmoji(type=True) if u == fmovies.BASE_URL else common.GetEmoji(type=False) oc.add(DirectoryObject(title='%s | Base URL : %s (set by redirection detector)' % (ch, u),key=Callback(SetBaseUrl, url=u))) + return oc + elif title == 'set_9base_url': + oc = ObjectContainer(title2='Set 9Anime Base URL') + base9_url_match = False + for d in common.ANIME_DOMS: + u = 'https://%s.%s' % (common.ANIME_KEY, d) + if u == common.ANIME_URL: + base9_url_match = True + ch = common.GetEmoji(type=True) if u == common.ANIME_URL else common.GetEmoji(type=False) + oc.add(DirectoryObject(title='%s | 9Anime Base URL : %s' % (ch, u),key=Callback(Set9BaseUrl, url=u))) + if base9_url_match == False: + u = common.ANIME_URL + ch = common.GetEmoji(type=True) if u == common.ANIME_URL else common.GetEmoji(type=False) + oc.add(DirectoryObject(title='%s | 9Anime Base URL : %s (set by redirection detector)' % (ch, u),key=Callback(Set9BaseUrl, url=u))) + + return oc + elif title == 'set_ext_list_url': + oc = ObjectContainer(title2='Set External Listing URL') + ext_list_url_match = False + for u in common.EXT_LIST_URLS: + if u == common.ES_API_URL: + ext_list_url_match = True + ch = common.GetEmoji(type=True) if u == common.ES_API_URL else common.GetEmoji(type=False) + oc.add(DirectoryObject(title='%s | External Listing URL : %s' % (ch, u),key=Callback(SetExtListUrl, url=u))) + if ext_list_url_match == False: + u = common.ES_API_URL + ch = common.GetEmoji(type=True) if u == common.ES_API_URL else common.GetEmoji(type=False) + oc.add(DirectoryObject(title='%s | External Listing URL : %s (set by redirection detector)' % (ch, u),key=Callback(SetExtListUrl, url=u))) + return oc elif title == 'openload_input_id': oc = ObjectContainer(title2='OpenLoad Video ID') @@ -153,35 +180,43 @@ def DevToolsC(title=None, header=None, message=None, session=None, **kwargs): # oc.add(DirectoryObject(key=Callback(DevToolsC, title='plex_cache'), # title=u'Reset {} Cache'.format(PLEX_CACHE_DIR), - # thumb = R(ICON_TOOLS), + # thumb = R(common.ICON_TOOLS), # summary=u'Remove cached files from {} directory.'.format(caches_path))) oc.add(DirectoryObject(key=Callback(DevToolsC, title='save_bm'), title=u'Save Bookmarks', - thumb = R(ICON_TOOLS), + thumb = R(common.ICON_FL_SAVE), summary=u'Save Bookmarks to the Resource dir. (file: bookmarks.json)')) oc.add(DirectoryObject(key=Callback(DevToolsC, title='load_bm'), title=u'Load Bookmarks', - thumb = R(ICON_TOOLS), + thumb = R(common.ICON_FL_LOAD), summary=u'Load Bookmarks from the Resource dir. (file: bookmarks.json)')) oc.add(DirectoryObject(key=Callback(DevToolsC, title='save_config'), title=u'Save Config', - thumb = R(ICON_TOOLS), + thumb = R(common.ICON_FL_SAVE), summary=u'Save Config to the Resource dir. (file: config.json). Device Options (all clients), Bookmarks, Recent WatchList, SearchQue, Downloads and Interface Options can be saved and restored using Config file.')) oc.add(DirectoryObject(key=Callback(DevToolsC, title='load_config'), title=u'Load Config', - thumb = R(ICON_TOOLS), + thumb = R(common.ICON_FL_LOAD), summary=u'Load Config from the Resource dir. (file: config.json). Device Options (all clients), Bookmarks, Recent WatchList, SearchQue, Downloads and Interface Options can be saved and restored using Config file.')) oc.add(DirectoryObject(key=Callback(DevToolsC, title='check_externals'), title=u'Check Externals', - thumb = R(ICON_TOOLS), + thumb = R(common.ICON_TOOLS), summary=u'Check externals like PhantomJS and Cryptodome have been installed or not')) oc.add(DirectoryObject(key=Callback(DevToolsC, title='set_base_url'), title=u'Set Base URL', - thumb = R(ICON_TOOLS), + thumb = R(common.ICON_TOOLS), summary=u'Set the Base URL to be used by the Channel')) + oc.add(DirectoryObject(key=Callback(DevToolsC, title='set_9base_url'), + title=u'Set 9Anime Base URL', + thumb = R(common.ICON_TOOLS), + summary=u'Set the 9Anime Base URL to be used by the Channel')) + oc.add(DirectoryObject(key=Callback(DevToolsC, title='set_ext_list_url'), + title=u'Set External Listing URL', + thumb = R(common.ICON_TOOLS), + summary=u'Set the External Listing URL to be used by the Channel')) oc.add(DirectoryObject(key=Callback(DevToolsC, title='openload_input_id', session=session), title=u'OpenLoad Video ID', - thumb = R(ICON_TOOLS), + thumb = Resource.ContentsOfURLWithFallback(url = common.ICON_OPENLOAD, fallback=common.ICON_TOOLS), summary=u'OpenLoad Video ID')) cc = Dict['VSPAPI']['count'] @@ -195,7 +230,7 @@ def DevToolsC(title=None, header=None, message=None, session=None, **kwargs): oc.add(DirectoryObject(key=Callback(DevToolsC, title='imdb_input_id', session=session), title=videospider_msg, - thumb = R(ICON_TOOLS), + thumb = Resource.ContentsOfURLWithFallback(url = common.ICON_IMDB, fallback=common.ICON_TOOLS), summary=videospider_msg)) oc.add(DirectoryObject(key = Callback(main.MainMenu), title = '<< Main Menu', thumb = R(common.ICON))) @@ -230,31 +265,34 @@ def ClearCache(itemname, timeout=None, **kwargs): ###################################################################################### def SaveBookmarks(**kwargs): - fmovies_base = fmovies.BASE_URL.replace('https://','') + fmovies_base = fmovies.BASE_URL.replace('https://','').replace('http://','') + anime_base = common.ANIME_URL.replace('https://','').replace('http://','') items_in_bm = [] for each in Dict: longstring = str(Dict[each]) - if (('fmovies.' in longstring or 'bmovies.' in longstring) or common.isArrayValueInString(common.EXT_SITE_URLS, longstring) == True) and 'Key5Split' in longstring: + if (('fmovies.' in longstring or 'bmovies.' in longstring or '9anime.' in longstring) or common.isArrayValueInString(common.EXT_SITE_URLS, longstring) == True) and 'Key5Split' in longstring: stitle = unicode(longstring.split('Key5Split')[0]) url = longstring.split('Key5Split')[1] summary = unicode(longstring.split('Key5Split')[2]) thumb = longstring.split('Key5Split')[3] url = common.FixUrlInconsistencies(url) - url = url.replace('www.','') + url = common.FixUrlInconsistencies2(url) + url = common.FixUrlInconsistencies3(url) - #Log("BM : %s" % url) + if common.DEV_DEBUG == True and Prefs["use_debug"] == True: + Log("Save BM-1 : %s" % url) - for u in common.BASE_URLS: - u = common.client.getUrlHost(u) - if u in url: - url = url.replace(common.client.getUrlHost(u),fmovies_base) - break + if ('fmovies.' in url or 'bmovies.' in url): + url = url.replace(common.client.getUrlHost(url),fmovies_base) + elif ('9anime.' in url): + url = url.replace(common.client.getUrlHost(url),anime_base) - #Log("BM : %s" % url) + if common.DEV_DEBUG == True and Prefs["use_debug"] == True: + Log("Save BM-2 : %s" % url) if url not in items_in_bm: items_in_bm.append({'title':stitle,'url':url,'summary':summary,'thumb':thumb}) @@ -276,6 +314,9 @@ def LoadBookmarks(**kwargs): bkup_file = Core.storage.join_path(resources_path, 'bookmarks.json') + fmovies_base = fmovies.BASE_URL.replace('https://','').replace('http://','') + anime_base = common.ANIME_URL.replace('https://','').replace('http://','') + if Core.storage.file_exists(bkup_file) and (Core.storage.file_size(bkup_file) != 0): try: with io.open(bkup_file, 'r', encoding='utf8') as f: @@ -294,6 +335,19 @@ def LoadBookmarks(**kwargs): thumb = item['thumb'] url = common.FixUrlInconsistencies(url) + url = common.FixUrlInconsistencies2(url) + url = common.FixUrlInconsistencies3(url) + + if common.DEV_DEBUG == True and Prefs["use_debug"] == True: + Log("Load BM-1 : %s" % url) + + if ('fmovies.' in url or 'bmovies.' in url): + url = url.replace(common.client.getUrlHost(url),fmovies_base) + elif ('9anime.' in url): + url = url.replace(common.client.getUrlHost(url),anime_base) + + if common.DEV_DEBUG == True and Prefs["use_debug"] == True: + Log("Load BM-2 : %s" % url) Dict[title+'-'+E(url)] = (title + 'Key5Split' + url +'Key5Split'+ summary + 'Key5Split' + thumb) @@ -307,7 +361,8 @@ def LoadBookmarks(**kwargs): ###################################################################################### def SaveConfig(**kwargs): - fmovies_base = fmovies.BASE_URL.replace('https://','') + fmovies_base = fmovies.BASE_URL.replace('https://','').replace('http://','') + anime_base = common.ANIME_URL.replace('https://','').replace('http://','') config = {} items_in_recent = [] @@ -320,24 +375,26 @@ def SaveConfig(**kwargs): for each in Dict: longstring = str(Dict[each]) - if (('fmovies.' in longstring or 'bmovies.' in longstring) or common.isArrayValueInString(common.EXT_SITE_URLS, longstring) == True) and 'Key5Split' in longstring: + if (('fmovies.' in longstring or 'bmovies.' in longstring or '9anime.' in longstring) or common.isArrayValueInString(common.EXT_SITE_URLS, longstring) == True) and 'Key5Split' in longstring: stitle = unicode(longstring.split('Key5Split')[0]) url = longstring.split('Key5Split')[1] summary = unicode(longstring.split('Key5Split')[2]) thumb = longstring.split('Key5Split')[3] url = common.FixUrlInconsistencies(url) - url = url.replace('www.','') + url = common.FixUrlInconsistencies2(url) + url = common.FixUrlInconsistencies3(url) - #Log("BM : %s" % url) + if common.DEV_DEBUG == True and Prefs["use_debug"] == True: + Log("Save Config BM-1 : %s" % url) - for u in common.BASE_URLS: - u = common.client.getUrlHost(u) - if u in url: - url = url.replace(common.client.getUrlHost(u),fmovies_base) - break + if ('fmovies.' in url or 'bmovies.' in url): + url = url.replace(common.client.getUrlHost(url),fmovies_base) + elif ('9anime.' in url): + url = url.replace(common.client.getUrlHost(url),anime_base) - #Log("BM : %s" % url) + if common.DEV_DEBUG == True and Prefs["use_debug"] == True: + Log("Save Config BM-2 : %s" % url) if url not in items_in_bm: items_in_bm.append({'title':stitle,'url':url,'summary':summary,'thumb':thumb}) @@ -348,7 +405,7 @@ def SaveConfig(**kwargs): for each in Dict: longstring = str(Dict[each]) - if (('fmovies.' in longstring or 'bmovies.' in longstring) or common.isArrayValueInString(common.EXT_SITE_URLS, longstring) == True) and 'RR44SS' in longstring: + if (('fmovies.' in longstring or 'bmovies.' in longstring or '9anime.' in longstring) or common.isArrayValueInString(common.EXT_SITE_URLS, longstring) == True) and 'RR44SS' in longstring: longstringsplit = longstring.split('RR44SS') urls_list.append({'key': each, 'time': longstringsplit[4], 'val': longstring}) @@ -356,8 +413,6 @@ def SaveConfig(**kwargs): newlist = sorted(urls_list, key=lambda k: k['time'], reverse=True) - fmovies_base = fmovies.BASE_URL.replace('https://','') - for each in newlist: longstring = each['val'] @@ -368,26 +423,21 @@ def SaveConfig(**kwargs): thumb = longstringsplit[3] timestr = longstringsplit[4] - ES = '' - if common.ES_API_URL.lower() in longstring.lower(): - ES = common.EMOJI_EXT - if common.ANIME_URL.lower() in longstring.lower(): - ES = common.EMOJI_ANIME - - show = True url = common.FixUrlInconsistencies(url) - url = url.replace('www.','') + url = common.FixUrlInconsistencies2(url) + url = common.FixUrlInconsistencies3(url) - #Log("BM : %s" % url) + if common.DEV_DEBUG == True and Prefs["use_debug"] == True: + Log("Save Config RECENT-1 : %s" % url) - for u in common.BASE_URLS: - u = common.client.getUrlHost(u) - if u in url: - url = url.replace(common.client.getUrlHost(u),fmovies_base) - break + if ('fmovies.' in url or 'bmovies.' in url): + url = url.replace(common.client.getUrlHost(url),fmovies_base) + elif ('9anime.' in url): + url = url.replace(common.client.getUrlHost(url),anime_base) - #Log("BM : %s" % url) - + if common.DEV_DEBUG == True and Prefs["use_debug"] == True: + Log("Save Config RECENT-2 : %s" % url) + if url not in items_in_recent: items_in_recent.append(url) items_in_recentlisting.append({'title':stitle, 'url':url, 'summary':summary, 'thumb':thumb, 'time':timestr}) @@ -440,6 +490,9 @@ def LoadConfig(**kwargs): file_read = None config = {} + fmovies_base = fmovies.BASE_URL.replace('https://','').replace('http://','') + anime_base = common.ANIME_URL.replace('https://','').replace('http://','') + try: bkup_file = Core.storage.join_path(resources_path, 'config.json') if Core.storage.file_exists(bkup_file) and (Core.storage.file_size(bkup_file) != 0): @@ -580,16 +633,66 @@ def SetBaseUrl(url): else: return MyMessage('Set Base URL','Base URL set to %s' % fmovies.BASE_URL) +#################################################################################################### +@route(PREFIX+'/Set9BaseUrl') +def Set9BaseUrl(url): + common.ANIME_URL = url + common.ANIME_SEARCH_URL = common.ANIME_URL + '/search?keyword=%s' + + RED_URL = None + RED_Bool = False + if common.CHECK_9BASE_URL_REDIRECTION == True: + try: + RED_URL = common.client.getRedirectingUrl(common.ANIME_URL).strip("/") + except Exception as e: + Log("Error in geturl : %s" % e) + + if RED_URL != None and 'http' in RED_URL and common.ANIME_URL != RED_URL: + Log("***9Anime Base URL has been overridden and set based on redirection: %s ***" % RED_URL) + common.ANIME_URL = RED_URL + RED_Bool = True + if common.ANIME_URL not in common.EXT_SITE_URLS: + common.EXT_SITE_URLS.append(common.ANIME_URL) + + if RED_Bool == True: + return MyMessage('Set 9Anime Base URL','Base URL (Redirecting) set to %s' % common.ANIME_URL) + else: + return MyMessage('Set 9Anime Base URL','Base URL set to %s' % common.ANIME_URL) + +#################################################################################################### +@route(PREFIX+'/SetExtListUrl') +def SetExtListUrl(url): + common.ES_API_URL = url + RED_URL = None + RED_Bool = False + if common.CHECK_EXTLIST_URL_REDIRECTION == True: + try: + RED_URL = common.client.getRedirectingUrl(common.ES_API_URL).strip("/") + except Exception as e: + Log("Error in geturl : %s" % e) + + if RED_URL != None and 'http' in RED_URL and common.ES_API_URL != RED_URL: + Log("***External Listing URL has been overridden and set based on redirection: %s ***" % RED_URL) + common.ES_API_URL = RED_URL + RED_Bool = True + + if RED_Bool == True: + return MyMessage('Set External Listing URL','External Listing URL (Redirecting) set to %s' % common.ES_API_URL) + else: + return MyMessage('Set External Listing URL','External Listing URL set to %s' % common.ES_API_URL) + #################################################################################################### @route(PREFIX+'/SetAnimeBaseUrl') def SetAnimeBaseUrl(): - common.ANIME_URL = 'https://%s.%s' % (common.ANIME_KEY, common.ANIME_DOM) + ANIME_URL_T = common.client.getRedirectingUrl(common.ANIME_URL).strip("/") if ANIME_URL_T != None and 'http' in ANIME_URL_T and common.ANIME_URL != ANIME_URL_T: Log("***Base ANIME_URL has been overridden and set based on redirection: %s ***" % ANIME_URL_T) common.ANIME_URL = ANIME_URL_T + + if common.ANIME_URL not in common.EXT_SITE_URLS: + common.EXT_SITE_URLS.append(common.ANIME_URL) common.ANIME_SEARCH_URL = common.ANIME_URL + '/search?keyword=%s' - common.EXT_SITE_URLS = [common.ANIME_URL, common.ES_API_URL] #################################################################################################### @route(PREFIX+'/imdbID') diff --git a/Contents/DefaultPrefs.json b/Contents/DefaultPrefs.json index f9ea1fa..c69427b 100644 --- a/Contents/DefaultPrefs.json +++ b/Contents/DefaultPrefs.json @@ -1 +1 @@ -[ { "id": "new_base_url", "label": "Base site url", "type": "enum", "values": ["https://bmovies.is","https://bmovies.to","https://bmovies.pro","https://bmovies.online","https://bmovies.club","https://bmovies.ru","https://fmovies.to","https://fmovies.is","https://fmovies.se","https://fmovies.taxi","https://ffmovies.ru"], "default": "https://fmovies.taxi" }, { "id": "use_quick_init", "label": "Enable quick Initialization (Disable this when diagnosing issues)", "type": "bool", "default": "false" }, { "id": "cache_expiry_time", "label": "Cache Expiry Time (in mins.)", "type": "text", "default": "100" }, { "id": "dont_fetch_more_info", "label": "No Extra Info. for Nav. Pages (Speeds Up Navigation)", "type": "bool", "default": "false" }, { "id": "use_https_alt", "label": "Use Alternate SSL/TLS", "type": "bool", "default": "false" }, { "id": "use_web_proxy", "label": "Use SSL Web-Proxy", "type": "bool", "default": "false" }, { "id": "disable_extsources", "label": "Disable External Sources", "type": "bool", "default": "false" }, { "id": "disable_downloader", "label": "Disable Downloading Sources (Please read Plex Privacy policy)", "type": "bool", "default": "true" }, { "id": "control_concurrent_src_threads", "label": "Number of concurrent Source Searching Threads", "type": "enum", "values": ["2","3","4","5","6","7","8","9","10","12","14","16","18","20","25","30","40","50"], "default": "4" }, { "id": "download_connections", "label": "Number of concurrent Download Threads", "type": "enum", "values": ["1","2","3","4","5","6","7","8","9","10"], "default": "2" }, { "id": "download_speed_limit", "label": "Limit Aggregate Download Speed (KB/s)", "type": "enum", "values": ["0","128","256","512","1024","2048","5120","10240","20480","30720","40960","51200"], "default": "0" }, { "id": "autopilot_schedule", "label": "AutoPilot Scheduled Time to Run in the day", "type": "enum", "values": ["0","1","2","3","4","5","6","7","8","9","10","12","13","14","15","16","17","18","19","20","21","22","23"], "default": "4" }, { "id": "use_linkchecker", "label": "Use LinkChecker for Videos", "type": "bool", "default": "false" }, { "id": "ca_api_key", "label": "External Sources Search API Key", "type": "text", "option": "hidden", "secure": "true", "default": "UTBFeU1ERTM=" }, { "id": "control_flixanity_user_pass", "label": "Flixanity User:Pass (Signup: https://flixanity.mobi", "type": "text", "option": "hidden", "secure": "true", "default": "" }, { "id": "control_videospider_api_key", "label": "VideoSpider API Key (Signup: https://videospider.in/signup.php)", "type": "text", "option": "hidden", "secure": "true", "default": "" }, { "id": "use_openload_pairing", "label": "Use OpenLoad (PhantomJS is primary, API Login:Key is secondary followed by Pairing. Disabled uses USS as primary)", "type": "bool", "default": "true" }, { "id": "use_phantomjs", "label": "Use PhantomJS (Binary download required)", "type": "enum", "values": ["No","Yes - Threads Only","Yes - Universally"], "default": "No" }, { "id": "control_phantomjs_path", "label": "Absolute path to PhantomJS folder (folder containing the binary file)", "type": "text", "default": "" }, { "id": "plextv", "label": "Auth Admin through Plex.tv (else use localhost)", "type": "bool", "default": "false" }, { "id": "use_debug", "label": "Enable Debug Mode (might show IP and computer Username in Logs)", "type": "bool", "default": "false" } ] \ No newline at end of file +[ { "id": "new_base_url", "label": "Base site url", "type": "enum", "values": ["https://bmovies.is","https://bmovies.to","https://bmovies.pro","https://bmovies.online","https://bmovies.club","https://bmovies.ru","https://fmovies.to","https://fmovies.is","https://fmovies.se","https://fmovies.taxi","https://ffmovies.ru"], "default": "https://fmovies.taxi" }, { "id": "use_quick_init", "label": "Enable quick Initialization (Disable this when diagnosing issues)", "type": "bool", "default": "false" }, { "id": "cache_expiry_time", "label": "Cache Expiry Time (in mins.)", "type": "text", "default": "100" }, { "id": "dont_fetch_more_info", "label": "No Extra Info. for Nav. Pages (Speeds Up Navigation)", "type": "bool", "default": "false" }, { "id": "use_https_alt", "label": "Use Alternate SSL/TLS", "type": "bool", "default": "false" }, { "id": "use_web_proxy", "label": "Use SSL Web-Proxy", "type": "bool", "default": "false" }, { "id": "disable_extsources", "label": "Disable External Sources", "type": "bool", "default": "false" }, { "id": "disable_downloader", "label": "Disable Downloading Sources (Please read Plex Privacy policy)", "type": "bool", "default": "true" }, { "id": "control_concurrent_src_threads", "label": "Number of concurrent Source Searching Threads", "type": "enum", "values": ["2","3","4","5","6","7","8","9","10","12","14","16","18","20","25","30","40","50"], "default": "4" }, { "id": "download_connections", "label": "Number of concurrent Download Threads", "type": "enum", "values": ["1","2","3","4","5","6","7","8","9","10"], "default": "2" }, { "id": "download_speed_limit", "label": "Limit Aggregate Download Speed (KB/s)", "type": "enum", "values": ["0","128","256","512","1024","2048","5120","10240","20480","30720","40960","51200"], "default": "0" }, { "id": "autopilot_schedule", "label": "AutoPilot Scheduled Time to Run in the day", "type": "enum", "values": ["0","1","2","3","4","5","6","7","8","9","10","12","13","14","15","16","17","18","19","20","21","22","23"], "default": "4" }, { "id": "use_linkchecker", "label": "Use LinkChecker for Videos", "type": "bool", "default": "false" }, { "id": "ca_api_key", "label": "External Sources Search API Key", "type": "text", "option": "hidden", "secure": "true", "default": "UTBFeU1ERTM=" }, { "id": "control_flixanity_user_pass", "label": "Flixanity User:Pass (Signup: https://flixanity.mobi", "type": "text", "option": "hidden", "secure": "true", "default": "" }, { "id": "control_videospider_api_key", "label": "VideoSpider API_Key:Secret_Key (Signup: https://videospider.in/signup.php)", "type": "text", "option": "hidden", "secure": "true", "default": "" }, { "id": "use_openload_pairing", "label": "Use OpenLoad (PhantomJS is primary, API Login:Key is secondary followed by Pairing. Disabled uses USS as primary)", "type": "bool", "default": "true" }, { "id": "use_phantomjs", "label": "Use PhantomJS (Binary download required)", "type": "enum", "values": ["No","Yes - Threads Only","Yes - Universally"], "default": "No" }, { "id": "control_phantomjs_path", "label": "Absolute path to PhantomJS folder (folder containing the binary file)", "type": "text", "default": "" }, { "id": "plextv", "label": "Auth Admin through Plex.tv (else use localhost)", "type": "bool", "default": "false" }, { "id": "use_debug", "label": "Enable Debug Mode (might show IP and computer Username in Logs)", "type": "bool", "default": "false" } ] \ No newline at end of file diff --git a/Contents/Libraries/Shared/resources/lib/libraries/client.py b/Contents/Libraries/Shared/resources/lib/libraries/client.py index 1b8a6a2..b4e7ddd 100644 --- a/Contents/Libraries/Shared/resources/lib/libraries/client.py +++ b/Contents/Libraries/Shared/resources/lib/libraries/client.py @@ -329,6 +329,8 @@ def redirect_request(self, req, fp, code, msg, headers, newurl): elif output == 'geturl': result = response.geturl() + if result == None: + result = url elif output == 'headers': content = response.headers @@ -358,12 +360,18 @@ def redirect_request(self, req, fp, code, msg, headers, newurl): setIP6() return -def simpleCheck(link, headers={}, cookie={}, retError=False, retry429=False, cl=3): +def simpleCheck(link, headers={}, cookie={}, retError=False, retry429=False, cl=3, timeout=None): try: code = '0' size = '0' - red_url = None - r = requests.get(link, headers=headers, cookies=cookie, stream=True, verify=False, allow_redirects=True) + red_url = None + + try: + timeout = int(timeout) + except: + timeout = GLOBAL_TIMEOUT_FOR_HTTP_REQUEST + + r = requests.get(link, headers=headers, cookies=cookie, stream=True, verify=False, allow_redirects=True, timeout=(timeout, timeout)) if retry429 == True: c = 0 @@ -389,10 +397,17 @@ def simpleCheck(link, headers={}, cookie={}, retError=False, retry429=False, cl= else: return code, red_url, size -def getRedirectingUrl(url, headers=None): + +def getRedirectingUrl(url, headers=None, timeout=None): red = url + + try: + timeout = int(timeout) + except: + timeout = GLOBAL_TIMEOUT_FOR_HTTP_REQUEST + try: - response = requests.get(url) + response = requests.get(url, timeout=(timeout, timeout)) if headers != None: response.headers = headers if response.history: @@ -401,9 +416,32 @@ def getRedirectingUrl(url, headers=None): pass return red -def getFileSize(link, headers=None, retError=False, retry429=False, cl=3): +def getFileSize(link, headers=None, retError=False, retry429=False, cl=3, timeout=None): try: - r = requests.get(link, headers=headers, stream=True, verify=False, allow_redirects=True) + try: + timeout = int(timeout) + except: + timeout = GLOBAL_TIMEOUT_FOR_HTTP_REQUEST + + r = requests.get(link, headers=headers, stream=True, verify=False, allow_redirects=True, timeout=(timeout, timeout)) + + if 'Content-length' not in r.headers: + try: + # https://stackoverflow.com/questions/52044489/how-to-get-content-length-for-google-drive-download + if headers == None: + headers = {'Range':'bytes=0-'} + else: + headers['Range'] = 'bytes=0-' + r = requests.get(link,headers=headers,stream=True).headers['Content-Range'] + #contleng=int(re.split('\W+',r))[-1] + size = int(r.partition('/')[-1]) + #contrange=int(re.split('\W+',r))[-2] + if retError == True: + return size, '' + else: + return size + except: + pass if headers != None and 'Content-length' in headers: r.headers = headers @@ -412,7 +450,7 @@ def getFileSize(link, headers=None, retError=False, retry429=False, cl=3): c = 0 while r.status_code == 429 and c < cl: time.sleep(5) - r = requests.get(link, headers=headers, stream=True, verify=False, allow_redirects=True) + r = requests.get(link, headers=headers, stream=True, verify=False, allow_redirects=True, timeout=(timeout, timeout)) if headers != None and 'Content-length' in headers: r.headers = headers c += 1 @@ -427,8 +465,11 @@ def getFileSize(link, headers=None, retError=False, retry429=False, cl=3): else: return size except Exception as e: + err = '{}'.format(e) + regex = re.compile('[()\\/!?;\'",]') + err = regex.sub('', err) if retError == True: - return 0, '{}'.format(e) + return 0, err else: return 0 diff --git a/Contents/Libraries/Shared/resources/lib/libraries/control.py b/Contents/Libraries/Shared/resources/lib/libraries/control.py index 011c589..e275b8b 100644 --- a/Contents/Libraries/Shared/resources/lib/libraries/control.py +++ b/Contents/Libraries/Shared/resources/lib/libraries/control.py @@ -35,10 +35,13 @@ setting_dict = {} control_json = {} doPrint = False +debug = False phantomjs_choices = ["No","Yes - Threads Only","Yes - Universally"] -ThreadsType = {'0':'Main', '1':'Interface', '2':'Download', '3':'AutoPilot' ,'4':'Provider', '5':'Host', '6':'Proxy'} +ThreadsType = {'0':'Main', '1':'Interface', '2':'Download', '3':'AutoPilot' ,'4':'Provider', '5':'Host', '6':'Proxy', '5':'Thread'} Threads = [] +ThreadBlockOper = [False] + def setting(key): if key in setting_dict.keys(): @@ -59,14 +62,29 @@ def set_setting(key, value): setting_dict[key] = value -def AddThread(name, desc, start_time, type, persist_bool, uid): - Threads.append({'name':name, 'desc':desc, 'start_time':start_time, 'type':ThreadsType[type], 'persist':persist_bool, 'uid':uid}) +def AddThread(name, desc, start_time, type, persist_bool, uid, thread=None): + while ThreadBlockOper[0] == True: + time.sleep(0.1) + ThreadBlockOper[0] = True + try: + Threads.append({'name':name, 'desc':desc, 'start_time':start_time, 'type':ThreadsType[type], 'persist':persist_bool, 'uid':uid, 'thread':thread}) + except Exception as e: + log2('Error in AddThread %s' % e, type='CRITICAL') + + ThreadBlockOper[0] = False def RemoveThread(uid): - for t in Threads: - if t['uid'] == uid: - Threads.remove(t) - break + while ThreadBlockOper[0] == True: + time.sleep(0.1) + ThreadBlockOper[0] = True + try: + for t in Threads: + if t['uid'] == uid: + Threads.remove(t) + break + except Exception as e: + log2('Error in RemoveThread %s' % e, type='CRITICAL') + ThreadBlockOper[0] = False def getThreads(): return Threads diff --git a/Contents/Libraries/Shared/resources/lib/libraries/mega.py b/Contents/Libraries/Shared/resources/lib/libraries/mega.py index 8e8ee76..9fdaf85 100644 --- a/Contents/Libraries/Shared/resources/lib/libraries/mega.py +++ b/Contents/Libraries/Shared/resources/lib/libraries/mega.py @@ -655,20 +655,72 @@ def _login(self): encrypt_key(str_to_a32(tsid[:16]), self.master_key)) if key_encrypted == tsid[-16:]: self.sid = resp['tsid'] - - def _api_request(self, data): - params = {'id': self.sequence_num} + + def getfile(self, file_id, file_key): + #key = base64_to_a32(file_key) + #k = (key[0] ^ key[4], key[1] ^ key[5], key[2] ^ key[6], key[3] ^ key[7]) + #iv = key[4:6] + (0, 0) + #meta_mac = key[6:8] + + file = self._api_request({'a': 'g', 'g': 1, 'p': file_id}) + dl_url = file['g'] + size = file['s'] + print "Downloading (size: %d), url = %s" % (size, dl_url) + + #attributes = base64_url_encode(file['at']) + #attributes = decrypt_attr(attributes, k) + + #print "Downloading %s (size: %d), url = %s" % (attributes['n'], size, dl_url) + # return + + # infile = urllib.urlopen(dl_url) + # outfile = open(attributes['n'], 'wb') + # decryptor = AES.new(a32_to_str(k), AES.MODE_CTR, counter = Counter.new(128, initial_value = ((iv[0] << 32) + iv[1]) << 64)) + + # file_mac = [0, 0, 0, 0] + # for chunk_start, chunk_size in sorted(get_chunks(file['s']).items()): + # chunk = infile.read(chunk_size) + # chunk = decryptor.decrypt(chunk) + # outfile.write(chunk) + + # chunk_mac = [iv[0], iv[1], iv[0], iv[1]] + # for i in xrange(0, len(chunk), 16): + # block = chunk[i:i+16] + # if len(block) % 16: + # block += '\0' * (16 - (len(block) % 16)) + # block = str_to_a32(block) + # chunk_mac = [chunk_mac[0] ^ block[0], chunk_mac[1] ^ block[1], chunk_mac[2] ^ block[2], chunk_mac[3] ^ block[3]] + # chunk_mac = aes_cbc_encrypt_a32(chunk_mac, k) + + # file_mac = [file_mac[0] ^ chunk_mac[0], file_mac[1] ^ chunk_mac[1], file_mac[2] ^ chunk_mac[2], file_mac[3] ^ chunk_mac[3]] + # file_mac = aes_cbc_encrypt_a32(file_mac, k) + + # outfile.close() + # infile.close() + + # if (file_mac[0] ^ file_mac[1], file_mac[2] ^ file_mac[3]) != meta_mac: + # print "MAC mismatch" + # else: + # print "MAC OK" + + #getfile('RtQFAZZQ', 'OH8OnHm0VFw-9IzkYQa7VUdsjMp1G7hucXEk7QIZWvE') + + def files(self, f, mk): + data = [{'a': "f", 'c': 1, 'r': 1, 'ca': 1}] + params = {'id': self.sequence_num, 'n':f} self.sequence_num += 1 - if self.sid: - params.update({'sid': self.sid}) + #if self.sid: + # params.update({'sid': self.sid}) #ensure input data is a list if not isinstance(data, list): data = [data] - url = '%s://g.api.%s/cs?%s' % (self.schema, self.domain, urllib.urlencode(params)) - + url = '%s://g.api.%s/cs?%s&domain=meganz&lang=en' % (self.schema, self.domain, urllib.urlencode(params)) + #print url + #print 'https://g.api.mega.co.nz/cs?id=-1794764883&n=y6IGEQ6J&domain=meganz&lang=en' + if 'use_client_lib' in self.options.keys() and self.options['use_client_lib']: hr = client.request(url, post=dump_json(data), timeout=self.timeout, httpsskip=False) json_resp = parse_json(hr) @@ -679,20 +731,90 @@ def _api_request(self, data): raise RequestError('HTTP not OK: %s %s' % (hr.status, hr.reason)) json_resp = parse_json(hr.read()) - - if isinstance(json_resp, int): - raise RequestError('%s (%s)' % (MEGA_ERRORS.get(json_resp), json_resp)) - if isinstance(json_resp[0], int): - raise RequestError('%s (%s)' % (MEGA_ERRORS.get(json_resp[0]), json_resp[0])) - return json_resp[0] + #print data + #print json_resp + + files = json_resp[0] + + #print 'files: %s' % files + + ret_files = [] + filter_extSources = [] + + for file in files['f']: + if file['t'] == 0: + + ret_files.append(file) + + #p = file['p'] + #h = file['h'] + #self.getfile(p, k) + elif file['t'] == 1: + folder_id = file['h'] + elif file['t'] == 2: + root_id = file['h'] # Root ("Cloud Drive") + elif file['t'] == 3: + inbox_id = file['h'] # Inbox + elif file['t'] == 4: + trashbin_id = file['h'] # Trash Bin + + filter_extSources = sorted(ret_files, key=lambda k: k['s'], reverse=True) + return filter_extSources + + def _api_request(self, data): + try: + params = {'id': self.sequence_num} + self.sequence_num += 1 + + if self.sid: + params.update({'sid': self.sid}) + + #ensure input data is a list + if not isinstance(data, list): + data = [data] + + url = '%s://g.api.%s/cs?%s' % (self.schema, self.domain, urllib.urlencode(params)) + + if 'use_client_lib' in self.options.keys() and self.options['use_client_lib']: + hr = client.request(url, post=dump_json(data), timeout=self.timeout, httpsskip=False) + json_resp = parse_json(hr) + else: + hr = send_http_request(url, data=dump_json(data), timeout=self.timeout) + + if hr.status != 200: + raise RequestError('HTTP not OK: %s %s' % (hr.status, hr.reason)) + json_resp = parse_json(hr.read()) + + #print data + #print json_resp + + if isinstance(json_resp, int): + raise RequestError('%s (%s)' % (MEGA_ERRORS.get(json_resp), json_resp)) + if isinstance(json_resp[0], int): + raise RequestError('%s (%s)' % (MEGA_ERRORS.get(json_resp[0]), json_resp[0])) + return json_resp[0] + except: + return None @classmethod def _parse_url(self, url): """Returns (file_id, file_key.""" + self.folderSupport = True i = url.find('/#!') - if i < 0: - raise RequestError('Key missing from URL.') - path = url[i + 3:].split('!') + if i >= 0: + path = url[i + 3:].split('!') + elif i < 0 and self.folderSupport == False: + i = url.find('/#F!') + if i >= 0: + raise RequestError('Folder URL Not Supported Yet.') + else: + raise RequestError('Key missing from URL.') + elif i < 0: + i = url.find('/#F!') + path = url[i + 4:].split('!') + + #print path + return path[:2] @classmethod @@ -718,6 +840,7 @@ def download_url(self, url): """ if self.sid is None: self._login() + file_id, file_key = self._parse_url(url) file_key = base64_to_a32(file_key) # if is_public: file_data = self._api_request({'a': 'g', 'g': 1, 'p': file_id}) @@ -760,30 +883,37 @@ def download_url(self, url): (yield_size, file_size)) def file_info(self, url): - file_id, file_key = self._parse_url(url) - file_key = base64_to_a32(file_key) # if is_public: - file_data = self._api_request({'a': 'g', 'g': 1, 'p': file_id}) - k = (file_key[0] ^ file_key[4], file_key[1] ^ file_key[5], - file_key[2] ^ file_key[6], file_key[3] ^ file_key[7]) - iv = file_key[4:6] + (0, 0) - meta_mac = file_key[6:8] - - # Seems to happens sometime... When this occurs, files are - # inaccessible also in the official also in the official web app. - # Strangely, files can come back later. - if 'g' not in file_data: - raise RequestError('File not accessible now.') - file_url = file_data['g'] # Can be non-ASCII UTF-8. - file_size = int(file_data['s']) # Was already an int. - attribs = base64_url_decode(file_data['at']) - attribs = decrypt_attr(attribs, k) - file_name = attribs['n'] # Can be non-ASCII UTF-8. - key_str = a32_to_str(k) - #assert len(key_str) == 16 - iv_str = struct.pack('>LLLL', iv[0], iv[1], 0, 0) - #assert len(iv_str) == 16 + try: + file_id, file_key = self._parse_url(url) + file_key = base64_to_a32(file_key) # if is_public: + file_data = self._api_request({'a': 'g', 'g': 1, 'p': file_id}) - return {'name': file_name, 'size': file_size, 'url': file_url, 'key': key_str, 'iv': iv_str, 'id': file_id} + if file_data == None: + raise RequestError('File no longer available.') + + k = (file_key[0] ^ file_key[4], file_key[1] ^ file_key[5], + file_key[2] ^ file_key[6], file_key[3] ^ file_key[7]) + iv = file_key[4:6] + (0, 0) + meta_mac = file_key[6:8] + + # Seems to happens sometime... When this occurs, files are + # inaccessible also in the official also in the official web app. + # Strangely, files can come back later. + if 'g' not in file_data: + raise RequestError('File not accessible now.') + file_url = file_data['g'] # Can be non-ASCII UTF-8. + file_size = int(file_data['s']) # Was already an int. + attribs = base64_url_decode(file_data['at']) + attribs = decrypt_attr(attribs, k) + file_name = attribs['n'] # Can be non-ASCII UTF-8. + key_str = a32_to_str(k) + #assert len(key_str) == 16 + iv_str = struct.pack('>LLLL', iv[0], iv[1], 0, 0) + #assert len(iv_str) == 16 + + return {'name': file_name, 'size': file_size, 'url': file_url, 'key': key_str, 'iv': iv_str, 'id': file_id}, '' + except Exception as e: + return None, e def directDecode(self, chunk, key_str, iv_str): yield_size = 0 @@ -884,21 +1014,66 @@ def download_mega_url(url, mega): # sys.exit(2 * bool(had_error)) def get_mega_dl_link(mega_url): + #fix_ssl() + try: + err = '' + file_ext = None + file_url = mega_url + file_size = 0 + mega = Mega() + log(type='INFO',method='get_mega_dl_link',err='created Mega service') + + try: + login = mega._login() + log(type='INFO',method='get_mega_dl_link',err='anon login') + except: + raise Exception('Could not create anon login') + + if '#F!' in mega_url: + parts = mega_url.split('!') + dl_infos = mega.files(parts[1],parts[2]) + dl_info = dl_infos[0] + #print dl_info + file_size = dl_info['s'] + err = 'Folder URL Not Supported Yet.' + else: + dl_info, err = mega.file_info(mega_url) + if err == '': + log(type='INFO',method='get_mega_dl_link',err='created Mega downloader') + + file_url = "%s/%s" % (dl_info['url'],dl_info['name']) + file_ext = dl_info['name'].split('.') + i = dl_info['name'].rfind('.') + if i > 0: + file_ext = dl_info['name'][i:] + else: + file_ext = '.%s' % file_ext[1] + file_size = dl_info['size'] + + return file_url, file_size, file_ext, err + except Exception as e: + err = '%s' % e + log(type='CRITICAL-ERROR',method='get_mega_dl_link',err=err) + return mega_url, 0, '.mp4', err + +def files(f,k): #fix_ssl() mega = Mega() - log(type='INFO',method='get_mega_dl_link',err='created Mega service') + log(type='INFO',method='getfile',err='created Mega service') login = mega._login() - log(type='INFO',method='get_mega_dl_link',err='anon login') - dl_info = mega.file_info(mega_url) - log(type='INFO',method='get_mega_dl_link',err='created Mega downloader') - - file_url = "%s/%s" % (dl_info['url'],dl_info['name']) - file_ext = dl_info['name'].split('.') - file_ext = '.%s' % file_ext[1] - - file_size = dl_info['size'] + log(type='INFO',method='getfile',err='anon login') + mega.files(f,k) + log(type='INFO',method='getfile',err='Success') - return file_url, file_size, file_ext +def getfile(f,k): + #fix_ssl() + mega = Mega() + log(type='INFO',method='getfile',err='created Mega service') + login = mega._login() + log(type='INFO',method='getfile',err='anon login') + mega.getfile(f,k) + log(type='INFO',method='getfile',err='Success') + def test(): fix_ssl() diff --git a/Contents/Libraries/Shared/resources/lib/resolvers/__init__.py b/Contents/Libraries/Shared/resources/lib/resolvers/__init__.py index 2f381f6..dd592e1 100644 --- a/Contents/Libraries/Shared/resources/lib/resolvers/__init__.py +++ b/Contents/Libraries/Shared/resources/lib/resolvers/__init__.py @@ -93,17 +93,24 @@ def request(url): return None -def resolve(url): +def resolve(url, page_url=None): + err = '' try: - ret = url - urlhost = re.findall('([\w]+[.][\w]+)$', urlparse.urlparse(url.strip().lower()).netloc)[0] + try: + urlhost = re.findall('([\w]+[.][\w]+)$', urlparse.urlparse(url.strip().lower()).netloc)[0] + except: + urlhost = re.findall('([\w]+[.][\w]+).*$', urlparse.urlparse(url.strip().lower()).netloc)[0] + urlhost = urlhost.split('.')[1] + for host in sourceHostsCall: + log("resolve > Searching %s in host (%s)" % (urlhost, host['name']), logToControl=False) if urlhost in host['host']: - ret = host['call'].resolve(url) - break - return ret - except: - return url + log("resolve > Found %s in host (%s)" % (urlhost, host['name'])) + return host['call'].resolve(url, page_url=page_url) + return ([url], err, None) # if its hosted on a different host, return with no error and file-size check will validate it + except Exception as e: + err = '{}'.format(e) + return (None, err, None) def resolveHostname(h): try: @@ -135,7 +142,7 @@ def createMeta(url, provider, logo, quality, links, key, riptype=None, vidtype=' for item in links: if url == item['orig_url']: - log("%s has already been processed" % url) + log("createMeta > %s has already been processed" % url) return links quality = fixquality(quality) @@ -157,13 +164,13 @@ def createMeta(url, provider, logo, quality, links, key, riptype=None, vidtype=' else: riptype_def = riptype for host in sourceHostsCall: - log("Searching %s in host (%s)" % (urlhost, host['name']), logToControl=False) + log("createMeta > Searching %s in host (%s)" % (urlhost, host['name']), logToControl=False) if urlhost in host['host']: - log("Found %s in host (%s)" % (urlhost, host['name'])) + log("createMeta > Found %s in host (%s)" % (urlhost, host['name'])) return host['call'].createMeta(url, provider, logo, quality, links, key, riptype_def, vidtype=vidtype, lang=lang, sub_url=sub_url, txt=txt, file_ext=file_ext, testing=testing, poster=poster, headers=headers, page_url=page_url) - log("urlhost '%s' not found in host/resolver plugins - creating generic meta for external services" % urlhost) + log("createMeta > urlhost '%s' not found in host/resolver plugins - creating generic meta for plex services" % urlhost) quality = file_quality(url, quality) @@ -172,7 +179,7 @@ def createMeta(url, provider, logo, quality, links, key, riptype=None, vidtype=' else: type = riptype - links_m.append({'source':urlhost, 'maininfo':'', 'titleinfo':'', 'quality':quality, 'vidtype':vidtype, 'rip':type, 'provider':provider, 'orig_url':url, 'url':url, 'durl':url, 'urldata':urldata, 'params':params, 'logo':logo, 'online':'Unknown', 'allowsDownload':False, 'resumeDownload':False, 'allowsStreaming':True, 'key':key, 'enabled':True, 'fs':int(0), 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':urlhost, 'misc':{'player':'eplayer', 'gp':False}}) + links_m.append({'source':urlhost, 'maininfo':'', 'titleinfo':'', 'quality':quality, 'vidtype':vidtype, 'rip':type, 'provider':provider, 'orig_url':url, 'url':url, 'durl':url, 'urldata':urldata, 'params':params, 'logo':logo, 'online':'Unknown', 'allowsDownload':False, 'resumeDownload':False, 'allowsStreaming':True, 'key':key, 'enabled':True, 'fs':int(0), 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':urlhost, 'page_url':page_url, 'misc':{'player':'eplayer', 'gp':False}, 'seq':0}) except Exception as e: log(type='ERROR', err="createMeta : %s url: %s" % (e.args, url)) diff --git a/Contents/Libraries/Shared/resources/lib/resolvers/host_direct.py b/Contents/Libraries/Shared/resources/lib/resolvers/host_direct.py index a21ce1c..56ac781 100644 --- a/Contents/Libraries/Shared/resources/lib/resolvers/host_direct.py +++ b/Contents/Libraries/Shared/resources/lib/resolvers/host_direct.py @@ -49,8 +49,8 @@ def __init__(self): self.init = False self.logo = 'https://i.imgur.com/nbtnvDr.png' self.name = name - self.host = ['imdb.com','media-imdb.com','einthusan.tv','vimeocdn.com','apple.com','akamaized.net','micetop.us','vidcdn.pro','fbcdn.net','cmovieshd.com', 'vcstream.to', 'documentarymania.com','3donlinefilms.com','3dmoviesfullhd.com','totaleclips.com','freedocufilms.com','vidcloud.icu/download','xstreamcdn.com'] - self.netloc = ['imdb.com','media-imdb.com','einthusan.tv','vimeocdn.com','apple.com','akamaized.net','micetop.us','vidcdn.pro','fbcdn.net','cmovieshd.com', 'vcstream.to', 'documentarymania.com','3donlinefilms.com','3dmoviesfullhd.com','totaleclips.com','freedocufilms.com','vidcloud.icu/download','xstreamcdn.com'] + self.host = ['imdb.com','media-imdb.com','einthusan.tv','vimeocdn.com','apple.com','akamaized.net','micetop.us','vidcdn.pro','fbcdn.net','cmovieshd.com', 'vcstream.to', 'documentarymania.com','3donlinefilms.com','3dmoviesfullhd.com','totaleclips.com','freedocufilms.com'] + self.netloc = ['imdb.com','media-imdb.com','einthusan.tv','vimeocdn.com','apple.com','akamaized.net','micetop.us','vidcdn.pro','fbcdn.net','cmovieshd.com', 'vcstream.to', 'documentarymania.com','3donlinefilms.com','3dmoviesfullhd.com','totaleclips.com','freedocufilms.com'] self.quality = '1080p' self.loggertxt = [] self.captcha = False @@ -106,16 +106,15 @@ def createMeta(self, url, provider, logo, quality, links, key, riptype, vidtype= try: items = self.process(url, quality, riptype, headers, page_url) + seq = 0 for item in items: - if 'vidcloud.icu/download' in url: - durl = url - url = item['src'] - else: - url = item['src'] - durl = url + + vidurl = item['src'] + durl = url allowsStreaming = item['allowsStreaming'] + allowsDownload = item['allowsDownload'] quality = item['quality'] riptype = item['riptype'] fs = item['fs'] @@ -125,10 +124,11 @@ def createMeta(self, url, provider, logo, quality, links, key, riptype, vidtype= try: log(type='INFO',method='createMeta', err=u'durl:%s ; res:%s; fs:%s' % (durl,quality,fs)) - files_ret.append({'source':self.name, 'maininfo':txt, 'titleinfo':'', 'quality':quality, 'vidtype':vidtype, 'rip':riptype, 'provider':provider, 'orig_url':orig_url, 'url':url, 'durl':durl, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':allowsStreaming, 'key':key, 'enabled':True, 'fs':int(fs), 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':client.geturlhost(url), 'misc':{'player':'eplayer', 'gp':True}}) + files_ret.append({'source':self.name, 'maininfo':txt, 'titleinfo':'', 'quality':quality, 'vidtype':vidtype, 'rip':riptype, 'provider':provider, 'orig_url':orig_url, 'url':vidurl, 'durl':durl, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':allowsStreaming, 'key':key, 'enabled':True, 'fs':int(fs), 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':client.geturlhost(url), 'page_url':page_url, 'misc':{'player':'eplayer', 'gp':True}, 'seq':seq}) except Exception as e: log(type='ERROR',method='createMeta', err=u'%s' % e) - files_ret.append({'source':urlhost, 'maininfo':txt, 'titleinfo':'', 'quality':quality, 'vidtype':vidtype, 'rip':'Unknown' ,'provider':provider, 'orig_url':orig_url, 'url':url, 'durl':url, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':allowsStreaming, 'key':key, 'enabled':True, 'fs':int(fs), 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':client.geturlhost(url), 'misc':{'player':'eplayer', 'gp':True}}) + files_ret.append({'source':urlhost, 'maininfo':txt, 'titleinfo':'', 'quality':quality, 'vidtype':vidtype, 'rip':'Unknown' ,'provider':provider, 'orig_url':orig_url, 'url':vidurl, 'durl':url, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':allowsStreaming, 'key':key, 'enabled':True, 'fs':int(fs), 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':client.geturlhost(url), 'page_url':page_url, 'misc':{'player':'eplayer', 'gp':True}, 'seq':seq}) + seq += 1 except Exception as e: log(type='ERROR', err="createMeta : %s" % e.args) @@ -145,8 +145,8 @@ def createMeta(self, url, provider, logo, quality, links, key, riptype, vidtype= return links - def resolve(self, url): - return resolve(url) + def resolve(self, url, page_url=None, **kwargs): + return resolve(url, page_url=page_url) def resolveHostname(self, host): return self.name @@ -156,7 +156,7 @@ def testLink(self, url): def process(self, url, q, r, headers, page_url): items = [] - + try: if 'vcstream.to' in url: id = re.compile('//.+?/(?:embed|f)/([0-9a-zA-Z-_]+)').findall(url)[0] @@ -187,57 +187,7 @@ def process(self, url, q, r, headers, page_url): paramsx = {'headers':headers} params = client.b64encode(json.dumps(paramsx, encoding='utf-8')) - items.append({'quality':q, 'riptype':r, 'src':u, 'fs':fs, 'online':online, 'params':params, 'urldata':urldata, 'allowsStreaming':True}) - - elif 'xstreamcdn.com' in url: - id = re.compile('//.+?/(?:embed|v)/([0-9a-zA-Z-_]+)').findall(url)[0] - headersx = {'Referer': url, 'User-Agent': client.agent()} - post_data = {'r':page_url, 'd':'www.xstreamcdn.com'} - api_url = 'https://www.xstreamcdn.com/api/source/%s' % id - page_data = client.request(api_url, post=client.encodePostData(post_data), headers=headersx) - - j_data = json.loads(page_data) - success = j_data['success'] - if success == False: - raise Exception('API returned error: %s | Data: %s' % (api_url, post_data)) - else: - srcs = j_data['data'] - for src in srcs: - q = src['label'] - u = src['file'] - fs = client.getFileSize(u, retry429=True, headers=headers) - online = check(u) - u = client.request(u, output='geturl') - urldata = client.b64encode(json.dumps('', encoding='utf-8')) - params = client.b64encode(json.dumps('', encoding='utf-8')) - if headers != None: - paramsx = {'headers':headers} - params = client.b64encode(json.dumps(paramsx, encoding='utf-8')) - - items.append({'quality':q, 'riptype':r, 'src':u, 'fs':fs, 'online':online, 'params':params, 'urldata':urldata, 'allowsStreaming':True}) - - elif 'vidcloud.icu/download' in url: - headersx = {'Referer': url, 'User-Agent': client.agent()} - page_data, head, ret, cookie = client.request(url, output='extended', headers=headersx) - try: - cookie = re.findall(r'Set-Cookie:(.*)', str(ret), re.MULTILINE)[0].strip() - except: - pass - headersx['Cookie'] = cookie - mp4_vids = re.findall(r'\"(http.*?.mp4.*?)\"',page_data) - - for u in mp4_vids: - u = u.strip().replace(' ','%20').replace('&','&') - fs = client.getFileSize(u, headers=headersx) - q = qual_based_on_fs(q,fs) - online = check(u, headers=headersx) - urldata = client.b64encode(json.dumps('', encoding='utf-8')) - params = client.b64encode(json.dumps('', encoding='utf-8')) - if headersx != None: - paramsx = {'headers':headers} - params = client.b64encode(json.dumps(paramsx, encoding='utf-8')) - - items.append({'quality':q, 'riptype':r, 'src':u, 'fs':fs, 'online':online, 'params':params, 'urldata':urldata, 'allowsStreaming':False}) + items.append({'quality':q, 'riptype':r, 'src':u, 'fs':fs, 'online':online, 'params':params, 'urldata':urldata, 'allowsStreaming':True, 'allowsDownload':True}) elif '3donlinefilms.com' in url or '3dmoviesfullhd.com' in url or 'freedocufilms.com' in url: data = urlparse.parse_qs(url) @@ -265,10 +215,6 @@ def process(self, url, q, r, headers, page_url): u = data['file'][0] u = u.replace('//freedocufilms','//www.freedocufilms') - #print headers - #u = '%s?file=%s' % (data['file'][0], data['src_file'][0].replace(' ','')) - #print u - try: ret = client.request(l0, post=client.encodePostData(post_data),headers=headers, output='extended', XHR=True, cookie=cookie) except Exception as e: @@ -291,7 +237,22 @@ def process(self, url, q, r, headers, page_url): paramsx = {'headers':headers} params = client.b64encode(json.dumps(paramsx, encoding='utf-8')) - items.append({'quality':q, 'riptype':r, 'src':url, 'fs':fs, 'online':online, 'params':params, 'urldata':urldata, 'allowsStreaming':False}) + items.append({'quality':q, 'riptype':r, 'src':url, 'fs':fs, 'online':online, 'params':params, 'urldata':urldata, 'allowsStreaming':False, 'allowsDownload':True}) + else: + fs = client.getFileSize(url, retry429=True, headers=headers) + if fs == None or int(fs) == 0: + fs = client.getFileSize(url, retry429=True) + q = qual_based_on_fs(q,fs) + online = check(url) + urldata = client.b64encode(json.dumps('', encoding='utf-8')) + params = client.b64encode(json.dumps('', encoding='utf-8')) + if headers != None: + paramsx = {'headers':headers} + params = client.b64encode(json.dumps(paramsx, encoding='utf-8')) + allowsDownload = True + if '.m3u8' in url: + allowsDownload = False + items.append({'quality':q, 'riptype':r, 'src':url, 'fs':fs, 'online':online, 'params':params, 'urldata':urldata, 'allowsStreaming':True, 'allowsDownload':allowsDownload}) except Exception as e: log(type='ERROR',method='process', err=u'%s' % e) @@ -307,7 +268,7 @@ def process(self, url, q, r, headers, page_url): if headers != None: paramsx = {'headers':headers} params = client.b64encode(json.dumps(paramsx, encoding='utf-8')) - items.append({'quality':q, 'riptype':r, 'src':url, 'fs':fs, 'online':online, 'params':params, 'urldata':urldata, 'allowsStreaming':True}) + items.append({'quality':q, 'riptype':r, 'src':url, 'fs':fs, 'online':online, 'params':params, 'urldata':urldata, 'allowsStreaming':True, 'allowsDownload':True}) return items @@ -356,28 +317,26 @@ def T3DonlineFilms(url): except: pass - # u = '%s?file=%s' % (data['file'][0], data['src_file'][0].replace(' ','')) - paramsx = {'headers':headers} params = client.b64encode(json.dumps(paramsx, encoding='utf-8')) except Exception as e: error = '%s' % e - return u, params, error + return u, error, params -def resolve(url): +def resolve(url, page_url=None, **kwargs): params = client.b64encode(json.dumps('', encoding='utf-8')) error = '' u = url if '3donlinefilms.com' in url or '3dmoviesfullhd.com' in url or 'freedocufilms.com' in url: - u, params, error = T3DonlineFilms(url) - return u, params, error + u, error, params = T3DonlineFilms(url) + return (u, error, params) else: if check(url) == False: - return None, params, 'Error in check !' + return (None, 'Error in check !', params) - return u, params, error + return (u, error, params) def check(url, headers=None, cookie=None): try: diff --git a/Contents/Libraries/Shared/resources/lib/resolvers/host_gvideo.py b/Contents/Libraries/Shared/resources/lib/resolvers/host_gvideo.py index 127b058..c308393 100644 --- a/Contents/Libraries/Shared/resources/lib/resolvers/host_gvideo.py +++ b/Contents/Libraries/Shared/resources/lib/resolvers/host_gvideo.py @@ -87,7 +87,6 @@ def __init__(self): self.working = self.testWorking()[0] self.resolver = self.testResolver() self.msg = '' - #self.checkGetLinkAPI() self.UA = client.USER_AGENT self.init = True log(type='INFO', method='init', err=' -- Initializing %s %s %s End --' % (name, self.ver, self.update_date)) @@ -116,16 +115,7 @@ def info(self): def getLog(self): self.loggertxt = loggertxt return self.loggertxt - - def checkGetLinkAPI(self): - print "entering checkGetLinkAPI" - http_res, content = client.request(url='http://api.getlinkdrive.com', output='response', use_web_proxy=False) - print 'http://api.getlinkdrive.com Response: %s' % http_res - if http_res in client.HTTP_GOOD_RESP_CODES or http_res in client.GOOGLE_HTTP_GOOD_RESP_CODES_1: - self.useGetLinkAPI = True - else: - self.useGetLinkAPI = False - + def testWorking(self): try: testUrls = self.testUrl() @@ -193,23 +183,11 @@ def createMeta(self, url, provider, logo, quality, links, key, riptype, showspli videoData, headers, content, cookie = getVideoMetaData(url, httpsskip) try: cookie += '; %s' % content['Set-Cookie'] - # cookie_s = cookie.split(';') - # cookie_n = [] - # for cook in cookie_s: - # cook = cook.strip() - # if '=' in cook and cook not in cookie_n: - # cookie_n.append(cook) - # cookie = ('; '.join(x for x in sorted(cookie_n))) cookie_value = client.search_regex(r"DRIVE_STREAM=([^;]+);", cookie, 'cookie val',group=1) domain = client.search_regex(r"https?://([^\/]+)", url, 'host val', group=1) cookie = 'DRIVE_STREAM=%s; path=/; domain=.%s;' % (cookie_value, domain) except: pass - #print cookie - - #cookie = urllib.quote_plus(cookie).replace('+','%20').replace('%2F','/') - # DRIVE_STREAM%3Dva1wsBbVn3A%3B%20path%3D/%3B%20domain%3D.docs.google.com%3B - # DRIVE_STREAM%3DtV76KFL8a6k%3B+path%3D%2F%3B+domain%3D.docs.google.com%3B params = {'headers':headers,'cookie':cookie} params = json.dumps(params, encoding='utf-8') @@ -229,69 +207,44 @@ def createMeta(self, url, provider, logo, quality, links, key, riptype, showspli if txt != '': titleinfo = txt ntitleinfo = titleinfo - - - + file_ext1 = None + seq = 0 enabled = True try: - #udata = urldata(url, videoData=videoData, usevideoData=True) + #udata = urldata(url, videoData=videoData, usevideoData=True) + allowsStreaming = self.allowsStreaming if 'google.com/file' in url: idstr = '%s' % (url.split('/preview')[0].split('/edit')[0].split('/view')[0]) idstr = idstr.split('/') id = idstr[len(idstr)-1] try: - durl, f_res, fs = getFileLink(id, httpsskip) + durl, f_res, fs, file_ext1, err = getFileLink(id, httpsskip=httpsskip) except: fs = 0 durl = None + if file_ext1 != None: + file_ext = file_ext1 + if file_ext not in ['.mp4','.mkv','.avi']: + ntitleinfo = '%s%s' % (txt+' ' if len(txt)>0 else '', file_ext+' file') + allowsStreaming = False if durl != None: - files_ret.append({'source':self.name, 'maininfo':'', 'titleinfo':ntitleinfo, 'quality':quality, 'vidtype':vidtype, 'rip':type, 'provider':provider, 'url':durl, 'durl':durl, 'urldata':createurldata(durl,quality), 'params':params, 'logo':logo, 'online':isOnline, 'allowsDownload':self.allowsDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':enabled, 'fs':int(fs), 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':client.geturlhost(durl), 'misc':{'player':'iplayer', 'gp':False}}) + files_ret.append({'source':self.name, 'maininfo':'', 'titleinfo':ntitleinfo, 'quality':quality, 'vidtype':vidtype, 'rip':type, 'provider':provider, 'orig_url':orig_url, 'url':durl, 'durl':durl, 'urldata':createurldata(durl,quality), 'params':params, 'logo':logo, 'online':isOnline, 'allowsDownload':self.allowsDownload, 'allowsStreaming':allowsStreaming, 'key':key, 'enabled':enabled, 'fs':int(fs), 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':client.geturlhost(durl), 'page_url':page_url, 'misc':{'player':'iplayer', 'gp':False}, 'seq':seq}) + seq += 1 else: fs = client.getFileSize(url, retry429=True) - files_ret.append({'source':self.name, 'maininfo':'', 'titleinfo':ntitleinfo, 'quality':quality, 'vidtype':vidtype, 'rip':type, 'provider':provider, 'orig_url':orig_url, 'url':url, 'durl':url, 'urldata':urldata('',''), 'params':params, 'logo':logo, 'online':isOnline, 'allowsDownload':self.allowsDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':enabled, 'fs':int(fs), 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':client.geturlhost(url), 'misc':{'player':'eplayer', 'gp':False}}) + files_ret.append({'source':self.name, 'maininfo':'', 'titleinfo':ntitleinfo, 'quality':quality, 'vidtype':vidtype, 'rip':type, 'provider':provider, 'orig_url':orig_url, 'url':url, 'durl':url, 'urldata':urldata('',''), 'params':params, 'logo':logo, 'online':isOnline, 'allowsDownload':False, 'allowsStreaming':allowsStreaming, 'key':key, 'enabled':enabled, 'fs':int(fs), 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':client.geturlhost(url), 'page_url':page_url, 'misc':{'player':'eplayer', 'gp':False}, 'seq':seq}) + seq += 1 else: fs = client.getFileSize(url, retry429=True) - files_ret.append({'source':self.name, 'maininfo':'', 'titleinfo':ntitleinfo, 'quality':quality, 'vidtype':vidtype, 'rip':type, 'provider':provider, 'orig_url':orig_url, 'url':url, 'durl':url, 'urldata':urldata('',''), 'params':params, 'logo':logo, 'online':isOnline, 'allowsDownload':self.allowsDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':enabled, 'fs':int(fs), 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':client.geturlhost(url), 'misc':{'player':'iplayer', 'gp':False}}) + files_ret.append({'source':self.name, 'maininfo':'', 'titleinfo':ntitleinfo, 'quality':quality, 'vidtype':vidtype, 'rip':type, 'provider':provider, 'orig_url':orig_url, 'url':url, 'durl':url, 'urldata':urldata('',''), 'params':params, 'logo':logo, 'online':isOnline, 'allowsDownload':self.allowsDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':enabled, 'fs':int(fs), 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':client.geturlhost(url), 'page_url':page_url, 'misc':{'player':'iplayer', 'gp':False}, 'seq':seq}) + seq += 1 except Exception as e: log(type='ERROR',method='createMeta-1', err=u'%s' % e) isGetlinkWork = False - try: - if useGetlinkAPI==True and isOnline and 'google.com/file' in url and self.useGetLinkAPI: - client.setIP4() - ntitleinfo = titleinfo + ' | (via GetLink API) ' - files = urldata(url) - files = client.b64decode(files) - filesJ = json.loads(files) - if len(filesJ) > 0: - for mfile in filesJ: - mfile = json.loads(mfile) - #print "mfile --- : %s" % mfile - furl = mfile['src'] - f2url = client.request(furl, followredirect=True, output='geturl') - if 'http' in f2url: - furl = f2url - #print "furl --- : %s" % furl - quality = file_quality(furl, mfile['res'], videoData)[0] - isOnlineT = check(furl, videoData, headers=headers, cookie=cookie)[0] - type = rip_type(furl, riptype) - else: - isOnlineT = 'Unknown' - - p = {'headers':'','cookie':''} - p = json.dumps(p, encoding='utf-8') - p = client.b64encode(p) - - fs = client.getFileSize(furl, retry429=True) - - files_ret.append({'source': self.name, 'maininfo':'', 'titleinfo':ntitleinfo, 'quality': quality, 'vidtype':vidtype, 'rip':type, 'provider': provider, 'url': furl, 'durl':furl, 'urldata':urldata('',''), 'params':p, 'logo': logo, 'online': isOnlineT, 'allowsDownload':self.allowsDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':enabled, 'fs':int(fs), 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':client.geturlhost(furl), 'misc':{'player':'iplayer' , 'gp':False}}) - isGetlinkWork = True - client.setIP6() - except Exception as e: - log(type='ERROR',method='createMeta-2', err=u'%s' % e) try: if showsplit == True and isOnline and isGetlinkWork == False: @@ -309,7 +262,8 @@ def createMeta(self, url, provider, logo, quality, links, key, riptype, showspli fs = client.getFileSize(furl, retry429=True) - files_ret.append({'source': self.name, 'maininfo':'', 'titleinfo':ntitleinfo, 'quality': quality, 'vidtype':vidtype, 'rip':type, 'provider': provider, 'url': furl, 'durl':furl, 'urldata':createurldata(furl,quality), 'params':params, 'logo': logo, 'online': isOnlineT, 'allowsDownload':self.allowsDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':enabled, 'fs':int(fs), 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':client.geturlhost(furl), 'misc':{'player':'iplayer', 'gp':False}}) + files_ret.append({'source': self.name, 'maininfo':'', 'titleinfo':ntitleinfo, 'quality': quality, 'vidtype':vidtype, 'rip':type, 'provider': provider, 'orig_url':orig_url, 'url': furl, 'durl':furl, 'urldata':createurldata(furl,quality), 'params':params, 'logo': logo, 'online': isOnlineT, 'allowsDownload':self.allowsDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':enabled, 'fs':int(fs), 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':client.geturlhost(furl), 'misc':{'player':'iplayer', 'gp':False}, 'seq':seq}) + seq += 1 except Exception as e: log(type='ERROR',method='createMeta-3', err=u'%s' % e) except Exception as e: @@ -329,8 +283,8 @@ def createMeta(self, url, provider, logo, quality, links, key, riptype, showspli return links - def resolve(self, url): - return resolve(url) + def resolve(self, url, page_url=None, **kwargs): + return resolve(url, page_url=page_url) def resolveHostname(self, host): return self.name @@ -338,11 +292,12 @@ def resolveHostname(self, host): def testLink(self, url): return check(url) -def resolve(url): +def resolve(url, page_url=None, **kwargs): - if check(url)[0] == False: return + if check(url)[0] == False: + return (None, 'File removed', None) - return url + return ([url], '', None) def getVideoMetaData(url, httpsskip=False): try: @@ -369,7 +324,16 @@ def check(url, videoData=None, headers=None, cookie=None, doPrint=True, httpsski if videoData==None: videoData = getVideoMetaData(url, httpsskip)[0] - if 'This+video+doesn%27t+exist' in videoData and 'Please+try+again+later' not in videoData: + if 'This+video+doesn%27t+exist' in videoData and 'Please+try+again+later' not in videoData: + if 'google.com/file' in url: + r_split = url.split('/') + id = r_split[len(r_split)-2] + durl, res, fs, file_ext, error = getFileLink(id, httpsskip=False) + if error != '': + log('FAIL', 'check', '%s : %s' % (error,url)) + return (False, videoData) + elif float(fs) > (1024*1024): + return (True, videoData) log('FAIL', 'check', 'This video doesn\'t exist : %s' % url) return (False, videoData) @@ -416,11 +380,12 @@ def check(url, videoData=None, headers=None, cookie=None, doPrint=True, httpsski log('ERROR', 'check', '%s' % e, dolog=doPrint) return (False, videoData) -def getFileLink(id, httpsskip=False): +def getFileLink(id, file_ext='.mp4', httpsskip=False): st = time.time() durl = 'https://drive.google.com/uc?export=view&id=%s' % id fs = 0 + error = '' while 'drive.google.com' in durl and time.time() - st < 30: #durl = 'https://drive.google.com/uc?export=view&id=0BxHDtiw8Swq7X0E5WUgzZTg2aE0' @@ -428,6 +393,11 @@ def getFileLink(id, httpsskip=False): #print headers #print content #print cookieD + + if 'Too many users have viewed or downloaded this file recently' in respD: + error = 'Please try accessing the file again later' + break + try: fs = re.findall(r' \((.*?)G\)', respD)[0] fs = int(float(fs.strip()) * (1024*1024*1024)) @@ -437,17 +407,40 @@ def getFileLink(id, httpsskip=False): fs = int(float(fs.strip()) * (1024*1024)) except: fs = 0 - confirm = re.findall(r'confirm.*?&', respD)[0] - durl = 'https://drive.google.com/uc?export=download&%sid=%s' % (confirm,id) - #print durl - durl = client.request(durl, headers=headersD, cookie=cookieD, followredirect=True, output='geturl', limit='0') - durl = durl.replace('?e=download','?e=file.mp4') + + try: + file_ext_title = client.parseDOM(respD, 'meta', attrs = {'itemprop': 'name'}, ret='content')[0] + i = file_ext_title.rfind('.') + if i > 0: + file_ext = file_ext_title[i:] + except: + try: + file_ext_title = client.parseDOM(respD, 'span', attrs = {'class': 'uc-name-size'})[0] + file_ext_title = client.parseDOM(file_ext_title, 'a')[0] + i = file_ext_title.rfind('.') + if i > 0: + file_ext = file_ext_title[i:] + except: + pass + + try: + confirm = re.findall(r'confirm.*?&', respD)[0] + durl = 'https://drive.google.com/uc?export=download&%sid=%s' % (confirm,id) + #print durl + durl = client.request(durl, headers=headersD, cookie=cookieD, followredirect=True, output='geturl', limit='0') + durl = durl.replace('?e=download','?e=file.mp4') + break + except Exception as e: + error = '%s' % e + break + + time.sleep(15) res = True if 'drive.google.com' in durl: res = False - return durl, res, fs + return durl, res, fs, file_ext, error def urldata(url, videoData=None, usevideoData=False): ret = '' @@ -544,7 +537,7 @@ def rip_type(url, riptype): type = riptype type = type.lower() - if type == 'brrip' or type == 'ts' or type == 'cam' or type == 'scr': + if 'brrip' in type or type == 'ts' or type == 'cam' or type == 'scr': pass else: type = 'unknown' @@ -642,4 +635,4 @@ def log(type='INFO', method='undefined', err='', dolog=True, logToControl=False, if control.doPrint == True and doPrint == True: print msg except Exception as e: - control.log('Error in Logging: %s >>> %s' % (msg,e)) + control.log('Error in Logging: %s >>> %s' % (msg,e)) \ No newline at end of file diff --git a/Contents/Libraries/Shared/resources/lib/resolvers/host_mega.py b/Contents/Libraries/Shared/resources/lib/resolvers/host_mega.py index 8c6e67c..e8e0843 100644 --- a/Contents/Libraries/Shared/resources/lib/resolvers/host_mega.py +++ b/Contents/Libraries/Shared/resources/lib/resolvers/host_mega.py @@ -219,10 +219,17 @@ def createMeta(self, url, provider, logo, quality, links, key, riptype, vidtype= params = client.b64encode(json.dumps('', encoding='utf-8')) online = check(url) titleinfo = txt + maininfo = '' fs = 0 try: - furl, fs, file_ext = mega.get_mega_dl_link(url) + furl, fs, file_ext1, err = mega.get_mega_dl_link(url) + if err != '': + raise Exception(err) + if file_ext1 != None: + file_ext = file_ext1 + if file_ext not in ['.mp4','.mkv','.avi']: + titleinfo = '%s%s' % (txt+' ' if len(txt)>0 else '', file_ext+' file') quality = qual_based_on_fs(quality, fs) @@ -232,13 +239,14 @@ def createMeta(self, url, provider, logo, quality, links, key, riptype, vidtype= except Exception as e: online = False log('FAIL', 'createMeta-1', '%s - %s' % (url,e)) + maininfo = '*File Unavailable*' try: log(type='INFO',method='createMeta', err=u'durl:%s ; res:%s; fs:%s' % (url,quality,fs)) - files_ret.append({'source':self.name, 'maininfo':'', 'titleinfo':titleinfo, 'quality':quality, 'vidtype':vidtype, 'rip':riptype, 'provider':provider, 'orig_url':orig_url, 'durl':url, 'url':url, 'urldata':urldata, 'params':params, 'logo':logo, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':self.allowsStreaming, 'online':online, 'key':key, 'enabled':True, 'fs':int(fs), 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':self.netloc[0], 'misc':{'player':'iplayer', 'gp':False}}) + files_ret.append({'source':self.name, 'maininfo':maininfo, 'titleinfo':titleinfo, 'quality':quality, 'vidtype':vidtype, 'rip':riptype, 'provider':provider, 'orig_url':orig_url, 'durl':url, 'url':url, 'urldata':urldata, 'params':params, 'logo':logo, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':self.allowsStreaming, 'online':online, 'key':key, 'enabled':True, 'fs':int(fs), 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':self.netloc[0], 'page_url':page_url, 'misc':{'player':'iplayer', 'gp':False}, 'seq':0}) except Exception as e: log('ERROR', 'createMeta-2', '%s - %s' % (url,e)) - files_ret.append({'source':urlhost, 'maininfo':'', 'titleinfo':titleinfo, 'quality':quality, 'vidtype':vidtype, 'rip':'Unknown' ,'provider':provider, 'orig_url':orig_url, 'durl':url, 'url':url, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':True, 'fs':int(fs), 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':self.netloc[0], 'misc':{'player':'iplayer', 'gp':False}}) + files_ret.append({'source':urlhost, 'maininfo':maininfo, 'titleinfo':titleinfo, 'quality':quality, 'vidtype':vidtype, 'rip':'Unknown' ,'provider':provider, 'orig_url':orig_url, 'durl':url, 'url':url, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':True, 'fs':int(fs), 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':self.netloc[0], 'page_url':page_url, 'misc':{'player':'iplayer', 'gp':False}, 'seq':0}) except Exception as e: log('ERROR', 'createMeta', '%s' % e) @@ -254,8 +262,8 @@ def createMeta(self, url, provider, logo, quality, links, key, riptype, vidtype= return links - def resolve(self, url): - return resolve(url) + def resolve(self, url, page_url=None, **kwargs): + return resolve(url, page_url=page_url) def resolveHostname(self, host): return self.name @@ -263,11 +271,12 @@ def resolveHostname(self, host): def testLink(self, url): return check(url) -def resolve(url): +def resolve(url, page_url=None, **kwargs): - if check(url) == False: return + if check(url) == False: + return (None, 'File removed', None) - return url + return ([url], '', None) def check(url, headers=None, cookie=None): try: diff --git a/Contents/Libraries/Shared/resources/lib/resolvers/host_mp4upload.py b/Contents/Libraries/Shared/resources/lib/resolvers/host_mp4upload.py new file mode 100644 index 0000000..4104a1d --- /dev/null +++ b/Contents/Libraries/Shared/resources/lib/resolvers/host_mp4upload.py @@ -0,0 +1,335 @@ +# -*- coding: utf-8 -*- + +######################################################################################################### +# +# Mp4upload scraper +# +# +# Coder Alpha +# https://github.com/coder-alpha +# + +''' + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . +''' +######################################################################################################### + +import re,urllib,json,time +import os, sys, ast +from resources.lib.libraries import client, control, jsunpack + +hdr = { + 'User-Agent': client.USER_AGENT, + 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', + 'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3', + 'Accept-Encoding': 'none', + 'Accept-Language': 'en-US,en;q=0.8', + 'Connection': 'keep-alive'} + +name = 'mp4upload' +loggertxt = [] + +class host: + def __init__(self): + del loggertxt[:] + self.ver = '0.0.1' + self.update_date = 'Mar. 15, 2019' + log(type='INFO', method='init', err=' -- Initializing %s %s %s Start --' % (name, self.ver, self.update_date)) + self.init = False + self.logo = 'https://i.imgur.com/uqrHeB7.png' + self.name = name + self.host = ['mp4upload.com'] + self.netloc = ['mp4upload.com'] + self.quality = '720p' + self.loggertxt = [] + self.captcha = False + self.allowsDownload = True + self.resumeDownload = True + self.allowsStreaming = True + self.ac = False + self.pluginManagedPlayback = True + self.speedtest = 0 + testResults = self.testWorking() + self.working = testResults[0] + self.msg = testResults[1] + if self.working == False: + self.captcha = True + self.working = True + self.resolver = self.testResolver() + self.init = True + log(type='INFO', method='init', err=' -- Initializing %s %s %s End --' % (name, self.ver, self.update_date)) + + def info(self): + return { + 'name': self.name, + 'ver': self.ver, + 'date': self.update_date, + 'class': self.name, + 'speed': round(self.speedtest,3), + 'netloc': self.netloc, + 'host': self.host, + 'quality': self.quality, + 'logo': self.logo, + 'working': self.working, + 'resolver': self.resolver, + 'captcha': self.captcha, + 'msg': self.msg, + 'playbacksupport': self.pluginManagedPlayback, + 'a/c': self.ac, + 'streaming' : self.allowsStreaming, + 'downloading' : self.allowsDownload + } + + def getLog(self): + self.loggertxt = loggertxt + return self.loggertxt + + def testWorking(self): + try: + testUrls = self.testUrl() + bool = False + msg = '' + for testUrl in testUrls: + x1 = time.time() + bool = check(testUrl) + self.speedtest = time.time() - x1 + + if bool == True: + break + + log(method='testWorking', err='%s online status: %s' % (self.name, bool)) + return (bool, msg) + except Exception as e: + log(method='testWorking', err='%s online status: %s' % (self.name, bool)) + log(type='ERROR', method='testWorking', err=e) + return False, msg + + def testResolver(self): + try: + if control.setting('use_quick_init') == True: + log('INFO','testResolver', 'Disabled testing - Using Quick Init setting in Prefs.') + return False + testUrls = self.testUrl() + links = [] + bool = False + for testUrl in testUrls: + links = self.createMeta(testUrl, 'Test', '', '720p', links, 'testing', 'BRRIP') + if len(links) > 0: + bool = True + break + except Exception as e: + log(type='ERROR', method='testResolver', err=e) + + log(method='testResolver', err='%s parser status: %s' % (self.name, bool)) + return bool + + def testUrl(self): + return ['https://www.mp4upload.com/embed-8x467xhnq2y9.html'] + + def createMeta(self, url, provider, logo, quality, links, key, riptype, vidtype='Movie', lang='en', sub_url=None, txt='', file_ext = '.mp4', testing=False, poster=None, headers=None, page_url=None): + + files_ret = [] + orig_url = url + + if testing == True: + links.append(url) + return links + + if control.setting('Host-%s' % name) == False: + log('INFO','createMeta','Host Disabled by User') + return links + + try: + urldata = client.b64encode(json.dumps('', encoding='utf-8')) + params = client.b64encode(json.dumps('', encoding='utf-8')) + + online = check(url) + vidurls, err, sub_url_t = getAllQuals(url, online) + + if vidurls == None: + log(type='ERROR',method='createMeta-1', err=u'%s' % err) + return links + + if sub_url_t != None: + sub_url = sub_url_t + + seq = 0 + for vv in vidurls: + durl = vv['page'] + vidurl = vv['file'] + if vidurl != None: + quality = vv['label'] + fs = vv['fs'] + + try: + log(type='INFO',method='createMeta', err=u'durl:%s ; res:%s; fs:%s' % (durl,quality,fs)) + files_ret.append({'source':self.name, 'maininfo':'', 'titleinfo':txt, 'quality':quality, 'vidtype':vidtype, 'rip':riptype, 'provider':provider, 'orig_url':orig_url, 'url':vidurl, 'durl':durl, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':True, 'fs':int(fs), 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'poster':poster, 'sub_url':sub_url, 'subdomain':client.geturlhost(url), 'page_url':page_url, 'misc':{'player':'iplayer', 'gp':False}, 'seq':seq}) + except Exception as e: + log(type='ERROR',method='createMeta', err=u'%s' % e) + files_ret.append({'source':urlhost, 'maininfo':'', 'titleinfo':txt, 'quality':quality, 'vidtype':vidtype, 'rip':'Unknown' ,'provider':provider, 'orig_url':orig_url, 'url':vidurl, 'durl':durl, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':True, 'fs':int(fs), 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':client.geturlhost(url), 'page_url':page_url, 'misc':{'player':'iplayer', 'gp':False}, 'seq':seq}) + seq += 1 + except Exception as e: + log('ERROR', 'createMeta', '%s' % e) + + for fr in files_ret: + links.append(fr) + + if len(files_ret) > 0: + log('SUCCESS', 'createMeta', 'Successfully processed %s link >>> %s' % (provider, orig_url), dolog=self.init) + else: + log('FAIL', 'createMeta', 'Failed in processing %s link >>> %s' % (provider, orig_url), dolog=self.init) + + log('INFO', 'createMeta', 'Completed', dolog=self.init) + + return links + + def resolve(self, url, online=None, page_url=None, **kwargs): + return resolve(url, online=online, page_url=page_url) + + def resolveHostname(self, host): + return self.name + + def testLink(self, url): + return check(url) + +def resolve(url, online=None, page_url=None, **kwargs): + + try: + if online == None: + if check(url) == False: + raise Exception('Video not available') + elif online == False: + raise Exception('Video not available') + + video_url = None + err = '' + try: + page_link = url + page_data_string = client.request(page_link, httpsskip=True) + video_url, err = decode(page_data_string) + if video_url == None: + raise Exception(err) + except Exception as e: + err = e + log('ERROR', 'resolve', 'link > %s : %s' % (url, e), dolog=True) + + return (video_url, err, None) + + except Exception as e: + e = '{}'.format(e) + return (None, e, None) + +def decode(html): + + source = None + err = '' + try: + try: + str_pattern="(eval\(function\(p,a,c,k,e,(?:r|d).*)" + + js = re.compile(str_pattern).findall(html) + if len(js) == 0: + raise Exception('No packer js found.') + + js = js[0] + if 'p,a,c,k,e,' not in js: + raise Exception('No packer js found.') + + html_with_unpacked_js = jsunpack.unpack(js) + if html_with_unpacked_js == None: + raise Exception('Could not unpack js.') + + source = re.findall(r':\"(http.*.mp4)\"', html_with_unpacked_js) + except Exception as e: + log('ERROR', 'decode', '%s' % (e), dolog=True) + err = 'Mp4Upload Error: %s' % e + if source != None and len(source) == 0: + raise Exception('No mp4 Videos found !') + except Exception as e: + err = 'Mp4Upload Error: %s' % e + + return source, err + +def getAllQuals(url, online=None): + try: + if online == None: + if check(url) == False: + raise Exception('Video not available') + + page_data_string = client.request(url, httpsskip=True) + video_urls, err = decode(page_data_string) + + if video_urls == None: + raise Exception(err) + + video_url_a = [] + myheaders = {} + myheaders['User-Agent'] = client.agent() + myheaders['Referer'] = url + + for v in video_urls: + try: + fs = client.getFileSize(v, retry429=True) + qs = qual_based_on_fs(fs) + f_i = {'label': '%s' % qs, 'file':v, 'fs':fs, 'page':url} + video_url_a.append(f_i) + except: + pass + + video_urlf = video_url_a + return (video_urlf, '', None) + except Exception as e: + e = '{}'.format(e) + return (None, e, None) + +def qual_based_on_fs(fs): + q = '480p' + try: + if int(fs) > 2 * float(1024*1024*1024): + q = '1080p' + elif int(fs) > 1 * float(1024*1024*1024): + q = '720p' + except: + pass + return q + +def check(url, headers=None, cookie=None): + try: + http_res, red_url = client.request(url=url, output='responsecodeext', followredirect=True, headers=headers, cookie=cookie) + if http_res not in client.HTTP_GOOD_RESP_CODES: + return False + + page_data_string = client.request(url=url, headers=headers, cookie=cookie) + + if 'File Not Found' in page_data_string or 'File was deleted' in page_data_string: + return False + + return True + except: + return False + +def test(url): + return resolve(url) + +def log(type='INFO', method='undefined', err='', dolog=True, logToControl=False, doPrint=True): + try: + msg = '%s: %s > %s > %s : %s' % (time.ctime(time.time()), type, name, method, err) + if dolog == True: + loggertxt.append(msg) + if logToControl == True: + control.log(msg) + if control.doPrint == True and doPrint == True: + print msg + except Exception as e: + control.log('Error in Logging: %s >>> %s' % (msg,e)) diff --git a/Contents/Libraries/Shared/resources/lib/resolvers/host_openload.py b/Contents/Libraries/Shared/resources/lib/resolvers/host_openload.py index 910d3cc..9cf8ec1 100644 --- a/Contents/Libraries/Shared/resources/lib/resolvers/host_openload.py +++ b/Contents/Libraries/Shared/resources/lib/resolvers/host_openload.py @@ -279,10 +279,10 @@ def createMeta(self, url, provider, logo, quality, links, key, riptype, vidtype= try: log(type='INFO',method='createMeta', err=u'pair:%s; online:%s; durl:%s ; res:%s; fs:%s' % (isPairRequired,online,vidurl,quality,fs)) - files_ret.append({'source':self.name, 'maininfo':pair, 'titleinfo':titleinfo, 'quality':file_quality(vidurl, quality, file_title), 'vidtype':vidtype, 'rip':rip_type(vidurl, riptype, file_title), 'provider':provider, 'orig_url':orig_url, 'url':vidurl, 'durl':durl, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':True, 'fs':int(fs), 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':client.geturlhost(url), 'misc':{'pair':isPairRequired, 'player':'iplayer', 'gp':False}}) + files_ret.append({'source':self.name, 'maininfo':pair, 'titleinfo':titleinfo, 'quality':file_quality(vidurl, quality, file_title), 'vidtype':vidtype, 'rip':rip_type(vidurl, riptype, file_title), 'provider':provider, 'orig_url':orig_url, 'url':vidurl, 'durl':durl, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':True, 'fs':int(fs), 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':client.geturlhost(url), 'page_url':page_url, 'misc':{'pair':isPairRequired, 'player':'iplayer', 'gp':False}, 'seq':0}) except Exception as e: log(type='ERROR',method='createMeta-3', err=u'%s' % e) - files_ret.append({'source':urlhost, 'maininfo':pair, 'titleinfo':titleinfo, 'quality':quality, 'vidtype':vidtype, 'rip':'Unknown' ,'provider':provider, 'orig_url':orig_url, 'url':vidurl, 'durl':durl, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':True, 'fs':int(fs), 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':client.geturlhost(url), 'misc':{'pair':isPairRequired, 'player':'eplayer', 'gp':False}}) + files_ret.append({'source':urlhost, 'maininfo':pair, 'titleinfo':titleinfo, 'quality':quality, 'vidtype':vidtype, 'rip':'Unknown' ,'provider':provider, 'orig_url':orig_url, 'url':vidurl, 'durl':durl, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':True, 'fs':int(fs), 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':client.geturlhost(url), 'page_url':page_url, 'misc':{'pair':isPairRequired, 'player':'eplayer', 'gp':False}, 'seq':0}) except Exception as e: log('ERROR', 'createMeta', '%s' % e) @@ -298,8 +298,8 @@ def createMeta(self, url, provider, logo, quality, links, key, riptype, vidtype= return links - def resolve(self, url): - return resolve(url) + def resolve(self, url, embedpage=False, usePairing=True, session=None, page_url=None, **kwargs): + return resolve(url, embedpage=embedpage, usePairing=usePairing, session=session, page_url=page_url) def resolveHostname(self, host): return self.name @@ -319,7 +319,7 @@ def getVideoMetaData(url): print 'ERROR: %s' % e return res -def resolve(url, embedpage=False, usePairing=True, session=None): +def resolve(url, embedpage=False, usePairing=True, session=None, page_url=None, **kwargs): try: videoData = '' diff --git a/Contents/Libraries/Shared/resources/lib/resolvers/host_rapidvideo.py b/Contents/Libraries/Shared/resources/lib/resolvers/host_rapidvideo.py index 08b1b9e..972dd44 100644 --- a/Contents/Libraries/Shared/resources/lib/resolvers/host_rapidvideo.py +++ b/Contents/Libraries/Shared/resources/lib/resolvers/host_rapidvideo.py @@ -166,6 +166,7 @@ def createMeta(self, url, provider, logo, quality, links, key, riptype, vidtype= if sub_url_t != None: sub_url = sub_url_t + seq = 0 for vv in vidurls: durl = vv['page'] vidurl, r1, r2 = resolve(durl, online) @@ -179,10 +180,11 @@ def createMeta(self, url, provider, logo, quality, links, key, riptype, vidtype= try: log(type='INFO',method='createMeta', err=u'durl:%s ; res:%s; fs:%s' % (vidurl,quality,fs)) - files_ret.append({'source':self.name, 'maininfo':'', 'titleinfo':txt, 'quality':quality, 'vidtype':vidtype, 'rip':riptype, 'provider':provider, 'orig_url':orig_url, 'url':durl, 'durl':durl, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':True, 'fs':fs, 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'poster':poster, 'sub_url':sub_url, 'subdomain':client.geturlhost(url), 'misc':{'player':'iplayer', 'gp':False}}) + files_ret.append({'source':self.name, 'maininfo':'', 'titleinfo':txt, 'quality':quality, 'vidtype':vidtype, 'rip':riptype, 'provider':provider, 'orig_url':orig_url, 'url':durl, 'durl':durl, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':True, 'fs':fs, 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'poster':poster, 'sub_url':sub_url, 'subdomain':client.geturlhost(url), 'page_url':page_url, 'misc':{'player':'iplayer', 'gp':False}, 'seq':seq}) except Exception as e: log(type='ERROR',method='createMeta', err=u'%s' % e) - files_ret.append({'source':urlhost, 'maininfo':'', 'titleinfo':txt, 'quality':quality, 'vidtype':vidtype, 'rip':'Unknown' ,'provider':provider, 'orig_url':orig_url, 'url':durl, 'durl':durl, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':True, 'fs':fs, 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':client.geturlhost(url), 'misc':{'player':'iplayer', 'gp':False}}) + files_ret.append({'source':urlhost, 'maininfo':'', 'titleinfo':txt, 'quality':quality, 'vidtype':vidtype, 'rip':'Unknown' ,'provider':provider, 'orig_url':orig_url, 'url':durl, 'durl':durl, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':True, 'fs':fs, 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':client.geturlhost(url), 'page_url':page_url, 'misc':{'player':'iplayer', 'gp':False}, 'seq':seq}) + seq += 1 except Exception as e: log('ERROR', 'createMeta', '%s' % e) @@ -198,8 +200,8 @@ def createMeta(self, url, provider, logo, quality, links, key, riptype, vidtype= return links - def resolve(self, url): - return resolve(url) + def resolve(self, url, online=None, USE_POST=False, page_url=None, **kwargs): + return resolve(url, online=online, USE_POST=USE_POST, page_url=page_url) def resolveHostname(self, host): return self.name @@ -207,7 +209,7 @@ def resolveHostname(self, host): def testLink(self, url): return check(url) -def resolve(url, online=None, USE_POST=False): +def resolve(url, online=None, USE_POST=False, page_url=None, **kwargs): try: if online == None: diff --git a/Contents/Libraries/Shared/resources/lib/resolvers/host_streamango.py b/Contents/Libraries/Shared/resources/lib/resolvers/host_streamango.py index d5ffc2a..ca7685f 100644 --- a/Contents/Libraries/Shared/resources/lib/resolvers/host_streamango.py +++ b/Contents/Libraries/Shared/resources/lib/resolvers/host_streamango.py @@ -172,6 +172,7 @@ def createMeta(self, url, provider, logo, quality, links, key, riptype, vidtype= if sub_url_t != None: sub_url = sub_url_t + seq = 0 for vv in vidurls: durl = vv['page'] vidurl, r1, r2 = resolve(durl, online) @@ -186,10 +187,11 @@ def createMeta(self, url, provider, logo, quality, links, key, riptype, vidtype= try: log(type='INFO',method='createMeta', err=u'durl:%s ; res:%s; fs:%s' % (durl,quality,fs)) - files_ret.append({'source':self.name, 'maininfo':'', 'titleinfo':txt, 'quality':quality, 'vidtype':vidtype, 'rip':riptype, 'provider':provider, 'orig_url':orig_url, 'url':durl, 'durl':durl, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':True, 'fs':fs, 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'poster':poster, 'sub_url':sub_url, 'subdomain':client.geturlhost(url), 'misc':{'player':'iplayer', 'gp':False}}) + files_ret.append({'source':self.name, 'maininfo':'', 'titleinfo':txt, 'quality':quality, 'vidtype':vidtype, 'rip':riptype, 'provider':provider, 'orig_url':orig_url, 'url':durl, 'durl':durl, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':True, 'fs':fs, 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'poster':poster, 'sub_url':sub_url, 'subdomain':client.geturlhost(url), 'page_url':page_url, 'misc':{'player':'iplayer', 'gp':False}, 'seq':seq}) except Exception as e: log(type='ERROR',method='createMeta', err=u'%s' % e) - files_ret.append({'source':urlhost, 'maininfo':'', 'titleinfo':txt, 'quality':quality, 'vidtype':vidtype, 'rip':'Unknown' ,'provider':provider, 'orig_url':orig_url, 'url':durl, 'durl':durl, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':True, 'fs':fs, 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':client.geturlhost(url), 'misc':{'player':'iplayer', 'gp':False}}) + files_ret.append({'source':urlhost, 'maininfo':'', 'titleinfo':txt, 'quality':quality, 'vidtype':vidtype, 'rip':'Unknown' ,'provider':provider, 'orig_url':orig_url, 'url':durl, 'durl':durl, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':True, 'fs':fs, 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':client.geturlhost(url), 'page_url':page_url, 'misc':{'player':'iplayer', 'gp':False}, 'seq':seq}) + seq += 1 except Exception as e: log('ERROR', 'createMeta', '%s' % e) @@ -205,8 +207,8 @@ def createMeta(self, url, provider, logo, quality, links, key, riptype, vidtype= return links - def resolve(self, url): - return resolve(url) + def resolve(self, url, online=None, page_url=None, **kwargs): + return resolve(url, online=online, page_url=page_url) def resolveHostname(self, host): return self.name @@ -214,7 +216,7 @@ def resolveHostname(self, host): def testLink(self, url): return check(url) -def resolve(url, online=None): +def resolve(url, online=None, page_url=None, **kwargs): try: if online == None: @@ -234,8 +236,8 @@ def resolve(url, online=None): if video_url[len(video_url)-1] == '@': video_url = video_url[:-1] except Exception as e: - err = r - log('ERROR', 'resolve', 'link > %s : %s' % (url, e), dolog=self.init) + err = '%s' % e + log('ERROR', 'resolve', 'link > %s : %s' % (url, e), dolog=True) return (video_url, err, None) diff --git a/Contents/Libraries/Shared/resources/lib/resolvers/host_vidcloud.py b/Contents/Libraries/Shared/resources/lib/resolvers/host_vidcloud.py index 7959d79..756dbc3 100644 --- a/Contents/Libraries/Shared/resources/lib/resolvers/host_vidcloud.py +++ b/Contents/Libraries/Shared/resources/lib/resolvers/host_vidcloud.py @@ -158,7 +158,51 @@ def createMeta(self, url, provider, logo, quality, links, key, riptype, vidtype= try: if 'vidcloud.icu/load' in url: - raise Exception('No mp4 Video\'s found') + raise Exception('No mp4 Video found') + elif 'vidcloud.icu/download' in url: + headersx = {'Referer': url, 'User-Agent': client.agent()} + page_data, head, ret, cookie = client.request(url, output='extended', headers=headersx) + try: + cookie = re.findall(r'Set-Cookie:(.*)', str(ret), re.MULTILINE)[0].strip() + except: + pass + headersx['Cookie'] = cookie + mp4_vids = re.findall(r'\"(http.*?.mp4.*?)\"',page_data) + items = [] + for u in mp4_vids: + u = u.strip().replace(' ','%20').replace('&','&') + fs = client.getFileSize(u, headers=headersx) + q = qual_based_on_fs(quality,fs) + online = check(u, headers=headersx) + urldata = client.b64encode(json.dumps('', encoding='utf-8')) + params = client.b64encode(json.dumps('', encoding='utf-8')) + if headersx != None: + paramsx = {'headers':headers} + params = client.b64encode(json.dumps(paramsx, encoding='utf-8')) + + items.append({'quality':q, 'riptype':riptype, 'src':u, 'fs':fs, 'online':online, 'params':params, 'urldata':urldata, 'allowsStreaming':False}) + + seq = 0 + for item in items: + + durl = url + vidurl = item['src'] + allowsStreaming = item['allowsStreaming'] + quality = item['quality'] + riptype = item['riptype'] + fs = item['fs'] + online = item['online'] + params = item['params'] + urldata = item['urldata'] + + try: + log(type='INFO',method='createMeta', err=u'durl:%s ; res:%s; fs:%s' % (durl,quality,fs)) + files_ret.append({'source':self.name, 'maininfo':txt, 'titleinfo':'', 'quality':quality, 'vidtype':vidtype, 'rip':riptype, 'provider':provider, 'orig_url':orig_url, 'url':vidurl, 'durl':durl, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':allowsStreaming, 'key':key, 'enabled':True, 'fs':int(fs), 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':client.geturlhost(url), 'page_url':page_url, 'misc':{'player':'iplayer', 'gp':True}, 'seq':seq}) + except Exception as e: + log(type='ERROR',method='createMeta', err=u'%s' % e) + files_ret.append({'source':urlhost, 'maininfo':txt, 'titleinfo':'', 'quality':quality, 'vidtype':vidtype, 'rip':'Unknown' ,'provider':provider, 'orig_url':orig_url, 'url':vidurl, 'durl':durl, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':allowsStreaming, 'key':key, 'enabled':True, 'fs':int(fs), 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':client.geturlhost(url), 'page_url':page_url, 'misc':{'player':'iplayer', 'gp':True}, 'seq':seq}) + seq += 1 + elif url != None: online = True result = client.request(orig_url, httpsskip=True) @@ -187,13 +231,13 @@ def createMeta(self, url, provider, logo, quality, links, key, riptype, vidtype= mp4_vids = re.findall(r'http.*?mp4',res) if len(mp4_vids) > 0: try: - files_ret = resolvers.createMeta(u, provider, logo, quality, files_ret, key, poster=poster, riptype=riptype, vidtype=vidtype, sub_url=sub_url, testing=testing, headers=headers, page_url=page_url, urlhost='vidcloud.icu/download') + files_ret = resolvers.createMeta(u, provider, logo, quality, files_ret, key, poster=poster, riptype=riptype, vidtype=vidtype, sub_url=sub_url, testing=testing, headers=headers, page_url=page_url, urlhost='vidcloud.icu') except Exception as e: log(type='ERROR',method='createMeta', err=u'%s' % e) elif len(mp4_vids) == 0 and video_url == vids[len(vids)-1] and len(files_ret) == 0: - raise Exception('No mp4 Video\'s found') + raise Exception('No mp4 Video found') except Exception as e: - log('ERROR', 'createMeta', '%s' % e) + log('FAIL', 'createMeta', '%s' % e) for fr in files_ret: links.append(fr) @@ -207,8 +251,8 @@ def createMeta(self, url, provider, logo, quality, links, key, riptype, vidtype= return links - def resolve(self, url): - return resolve(url) + def resolve(self, url, online=None, page_url=None, **kwargs): + return resolve(url, online=online, page_url=page_url) def resolveHostname(self, host): return self.name @@ -216,18 +260,50 @@ def resolveHostname(self, host): def testLink(self, url): return check(url) -def resolve(url, online=None): +def resolve(url, online=None, page_url=None, **kwargs): try: if online == None: if check(url) == False: raise Exception('Video not available') + + video_url = None + headersx = {'Referer': url, 'User-Agent': client.agent()} + page_data, head, ret, cookie = client.request(url, output='extended', headers=headersx) + try: + cookie = re.findall(r'Set-Cookie:(.*)', str(ret), re.MULTILINE)[0].strip() + except: + pass + headersx['Cookie'] = cookie + mp4_vids = re.findall(r'\"(http.*?.mp4.*?)\"',page_data) + items = [] + for u in mp4_vids: + u = u.strip().replace(' ','%20').replace('&','&') + items.append(u) - return (url, '', None) + if len(items) > 0: + video_url = items + else: + raise Exception('Video not available') + + paramsx = {'headers':headersx} + params = client.b64encode(json.dumps(paramsx, encoding='utf-8')) + + return (video_url, '', params) except Exception as e: e = '{}'.format(e) return (None, e, None) + +def qual_based_on_fs(q,fs): + try: + if int(fs) > 2 * float(1024*1024*1024): + q = '1080p' + elif int(fs) > 1 * float(1024*1024*1024): + q = '720p' + except: + pass + return q def check(url, headers=None, cookie=None): try: diff --git a/Contents/Libraries/Shared/resources/lib/resolvers/host_vidnode.py b/Contents/Libraries/Shared/resources/lib/resolvers/host_vidnode.py index f723cc5..8726570 100644 --- a/Contents/Libraries/Shared/resources/lib/resolvers/host_vidnode.py +++ b/Contents/Libraries/Shared/resources/lib/resolvers/host_vidnode.py @@ -174,12 +174,11 @@ def createMeta(self, url, provider, logo, quality, links, key, riptype, vidtype= if sub_url_t != None: sub_url = sub_url_t + seq = 0 for vv in vidurls: durl = vv['page'] vidurl, r1, r2 = resolve(durl, online) - print vidurl - if vidurl == None: log(type='ERROR',method='createMeta', err=u'%s' % r1) else: @@ -193,10 +192,11 @@ def createMeta(self, url, provider, logo, quality, links, key, riptype, vidtype= try: log(type='INFO',method='createMeta', err=u'durl:%s ; res:%s; fs:%s' % (vidurl,quality,fs)) - files_ret.append({'source':self.name, 'maininfo':'', 'titleinfo':txt, 'quality':quality, 'vidtype':vidtype, 'rip':riptype, 'provider':provider, 'orig_url':orig_url, 'url':durl, 'durl':durl, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':True, 'fs':fs, 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'poster':poster, 'sub_url':sub_url, 'subdomain':client.geturlhost(url), 'misc':{'player':'iplayer', 'gp':False}}) + files_ret.append({'source':self.name, 'maininfo':'', 'titleinfo':txt, 'quality':quality, 'vidtype':vidtype, 'rip':riptype, 'provider':provider, 'orig_url':orig_url, 'url':durl, 'durl':durl, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':True, 'fs':fs, 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'poster':poster, 'sub_url':sub_url, 'subdomain':client.geturlhost(url), 'page_url':page_url, 'misc':{'player':'iplayer', 'gp':False}, 'seq':seq}) except Exception as e: log(type='ERROR',method='createMeta', err=u'%s' % e) - files_ret.append({'source':urlhost, 'maininfo':'', 'titleinfo':txt, 'quality':quality, 'vidtype':vidtype, 'rip':'Unknown' ,'provider':provider, 'orig_url':orig_url, 'url':durl, 'durl':durl, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':True, 'fs':fs, 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':client.geturlhost(url), 'misc':{'player':'iplayer', 'gp':False}}) + files_ret.append({'source':urlhost, 'maininfo':'', 'titleinfo':txt, 'quality':quality, 'vidtype':vidtype, 'rip':'Unknown' ,'provider':provider, 'orig_url':orig_url, 'url':durl, 'durl':durl, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':True, 'fs':fs, 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':client.geturlhost(url), 'page_url':page_url, 'misc':{'player':'iplayer', 'gp':False}, 'seq':seq}) + seq += 1 except Exception as e: log('ERROR', 'createMeta', '%s' % e) @@ -212,8 +212,8 @@ def createMeta(self, url, provider, logo, quality, links, key, riptype, vidtype= return links - def resolve(self, url): - return resolve(url) + def resolve(self, url, online=None, page_url=None, **kwargs): + return resolve(url, online=online, page_url=page_url) def resolveHostname(self, host): return self.name @@ -221,7 +221,7 @@ def resolveHostname(self, host): def testLink(self, url): return check(url) -def resolve(url, online=None): +def resolve(url, online=None, page_url=None, **kwargs): try: if online == None: diff --git a/Contents/Libraries/Shared/resources/lib/resolvers/host_xstreamcdn.py b/Contents/Libraries/Shared/resources/lib/resolvers/host_xstreamcdn.py new file mode 100644 index 0000000..5015aba --- /dev/null +++ b/Contents/Libraries/Shared/resources/lib/resolvers/host_xstreamcdn.py @@ -0,0 +1,328 @@ +# -*- coding: utf-8 -*- + +######################################################################################################### +# +# Mp4upload scraper +# +# +# Coder Alpha +# https://github.com/coder-alpha +# + +''' + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . +''' +######################################################################################################### + +import re,urllib,json,time +import os, sys, ast +from resources.lib.libraries import client, control, jsunpack + +hdr = { + 'User-Agent': client.USER_AGENT, + 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', + 'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3', + 'Accept-Encoding': 'none', + 'Accept-Language': 'en-US,en;q=0.8', + 'Connection': 'keep-alive'} + +name = 'xstreamcdn' +loggertxt = [] + +class host: + def __init__(self): + del loggertxt[:] + self.ver = '0.0.1' + self.update_date = 'Mar. 19, 2019' + log(type='INFO', method='init', err=' -- Initializing %s %s %s Start --' % (name, self.ver, self.update_date)) + self.init = False + self.logo = 'https://i.imgur.com/b89saq1.png' + self.name = name + self.host = ['xstreamcdn.com'] + self.netloc = ['xstreamcdn.com'] + self.quality = '720p' + self.loggertxt = [] + self.captcha = False + self.allowsDownload = True + self.resumeDownload = True + self.allowsStreaming = True + self.ac = False + self.pluginManagedPlayback = False + self.speedtest = 0 + testResults = self.testWorking() + self.working = testResults[0] + self.msg = testResults[1] + if self.working == False: + self.captcha = True + self.working = True + self.resolver = self.testResolver() + self.init = True + log(type='INFO', method='init', err=' -- Initializing %s %s %s End --' % (name, self.ver, self.update_date)) + + def info(self): + return { + 'name': self.name, + 'ver': self.ver, + 'date': self.update_date, + 'class': self.name, + 'speed': round(self.speedtest,3), + 'netloc': self.netloc, + 'host': self.host, + 'quality': self.quality, + 'logo': self.logo, + 'working': self.working, + 'resolver': self.resolver, + 'captcha': self.captcha, + 'msg': self.msg, + 'playbacksupport': self.pluginManagedPlayback, + 'a/c': self.ac, + 'streaming' : self.allowsStreaming, + 'downloading' : self.allowsDownload + } + + def getLog(self): + self.loggertxt = loggertxt + return self.loggertxt + + def testWorking(self): + try: + testUrls = self.testUrl() + bool = False + msg = '' + for testUrl in testUrls: + x1 = time.time() + bool = check(testUrl['url']) + self.speedtest = time.time() - x1 + + if bool == True: + break + + log(method='testWorking', err='%s online status: %s' % (self.name, bool)) + return (bool, msg) + except Exception as e: + log(method='testWorking', err='%s online status: %s' % (self.name, bool)) + log(type='ERROR', method='testWorking', err=e) + return False, msg + + def testResolver(self): + try: + if control.setting('use_quick_init') == True: + log('INFO','testResolver', 'Disabled testing - Using Quick Init setting in Prefs.') + return False + testUrls = self.testUrl() + links = [] + bool = False + for testUrl in testUrls: + links = self.createMeta(testUrl['url'], 'Test', '', '720p', links, 'testing', 'BRRIP', page_url=testUrl['page_url']) + if len(links) > 0: + bool = True + break + except Exception as e: + log(type='ERROR', method='testResolver', err=e) + + log(method='testResolver', err='%s parser status: %s' % (self.name, bool)) + return bool + + def testUrl(self): + return [{'url':'https://www.xstreamcdn.com/v/3qo1y3pj2oy','page_url':'https://www1.gowatchseries.co/triple-frontier-episode-0'}] + + def createMeta(self, url, provider, logo, quality, links, key, riptype, vidtype='Movie', lang='en', sub_url=None, txt='', file_ext = '.mp4', testing=False, poster=None, headers=None, page_url=None): + + files_ret = [] + orig_url = url + + if testing == True: + links.append(url) + return links + + if control.setting('Host-%s' % name) == False: + log('INFO','createMeta','Host Disabled by User') + return links + + if '#' in url: + url = url.split('#')[0] + + try: + online = check(url) + vidurls, err, sub_url_t = getAllQuals(url, page_url, online) + + if vidurls == None: + log(type='ERROR',method='createMeta-1', err=u'%s' % err) + return links + + if sub_url_t != None: + sub_url = sub_url_t + + seq = 0 + for vv in vidurls: #{'quality':q, 'src':u, 'fs':fs, 'online':online, 'params':params, 'urldata':urldata} + durl = url + vidurl = vv['src'] + + if vidurl != None: + quality = vv['quality'] + fs = int(vv['fs']) + online = vv['online'] + params = vv['params'] + urldata = vv['urldata'] + + try: + log(type='INFO',method='createMeta', err=u'durl:%s ; res:%s; fs:%s' % (durl,quality,fs)) + files_ret.append({'source':self.name, 'maininfo':'', 'titleinfo':txt, 'quality':quality, 'vidtype':vidtype, 'rip':riptype, 'provider':provider, 'orig_url':orig_url, 'url':vidurl, 'durl':durl, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':True, 'fs':int(fs), 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'poster':poster, 'sub_url':sub_url, 'subdomain':client.geturlhost(url), 'page_url':page_url, 'misc':{'player':'iplayer', 'gp':True}, 'seq':seq}) + except Exception as e: + log(type='ERROR',method='createMeta', err=u'%s' % e) + files_ret.append({'source':urlhost, 'maininfo':'', 'titleinfo':txt, 'quality':quality, 'vidtype':vidtype, 'rip':'Unknown' ,'provider':provider, 'orig_url':orig_url, 'url':vidurl, 'durl':durl, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':True, 'fs':int(fs), 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':client.geturlhost(url), 'page_url':page_url, 'misc':{'player':'iplayer', 'gp':True}, 'seq':0}) + seq += 1 + except Exception as e: + log('ERROR', 'createMeta', '%s' % e) + + for fr in files_ret: + links.append(fr) + + if len(files_ret) > 0: + log('SUCCESS', 'createMeta', 'Successfully processed %s link >>> %s' % (provider, orig_url), dolog=self.init) + else: + log('FAIL', 'createMeta', 'Failed in processing %s link >>> %s' % (provider, orig_url), dolog=self.init) + + log('INFO', 'createMeta', 'Completed', dolog=self.init) + + return links + + def resolve(self, url, page_url=None, online=None, **kwargs): + return resolve(url, page_url=page_url, online=online) + + def resolveHostname(self, host): + return self.name + + def testLink(self, url): + return check(url) + +def resolve(url, page_url=None, online=None, **kwargs): + + try: + if online == None: + if check(url) == False: + raise Exception('Video not available') + elif online == False: + raise Exception('Video not available') + + video_url = None + err = '' + try: + video_url, err = decode(url, page_url) + if video_url == None or len(video_url) == 0: + raise Exception(err) + except Exception as e: + err = e + log('ERROR', 'resolve', 'link > %s : %s' % (url, e), dolog=True) + + return (video_url, err, None) + + except Exception as e: + e = '{}'.format(e) + return (None, e, None) + +def decode(url,page_url): + items = [] + err = '' + try: + id = re.compile('//.+?/(?:embed|v)/([0-9a-zA-Z-_]+)').findall(url)[0] + headersx = {'Referer': url, 'User-Agent': client.agent()} + post_data = {'r':page_url, 'd':'www.xstreamcdn.com'} + api_url = 'https://www.xstreamcdn.com/api/source/%s' % id + page_data = client.request(api_url, post=client.encodePostData(post_data), headers=headersx) + + j_data = json.loads(page_data) + success = j_data['success'] + if success == False: + raise Exception('API returned error: %s | Data: %s' % (api_url, post_data)) + else: + srcs = j_data['data'] + for src in srcs: + q = src['label'] + u = src['file'] + fs = client.getFileSize(u, retry429=True, headers=headersx) + online = check(u) + u1 = client.request(u, output='geturl') + if u1 != None: + u = u1 + urldata = client.b64encode(json.dumps('', encoding='utf-8')) + params = client.b64encode(json.dumps('', encoding='utf-8')) + paramsx = {'headers':headersx} + params = client.b64encode(json.dumps(paramsx, encoding='utf-8')) + + items.append({'quality':q, 'src':u, 'fs':fs, 'online':online, 'params':params, 'urldata':urldata}) + if len(items) == 0: + raise Exception('No videos found !') + except Exception as e: + err = 'xtreamcdn Error: %s' % e + + return items, err + +def getAllQuals(url, page_url, online=None): + try: + if online == None: + if check(url) == False: + raise Exception('Video not available') + + video_urls, err = decode(url, page_url) + + if video_urls == None or len(video_urls) == 0 or err != '': + raise Exception(err) + + video_urlf = video_urls + return (video_urlf, '', None) + except Exception as e: + e = '{}'.format(e) + return (None, e, None) + +def qual_based_on_fs(fs): + q = '480p' + try: + if int(fs) > 2 * float(1024*1024*1024): + q = '1080p' + elif int(fs) > 1 * float(1024*1024*1024): + q = '720p' + except: + pass + return q + +def check(url, headers=None, cookie=None): + try: + http_res, red_url = client.request(url=url, output='responsecodeext', followredirect=True, headers=headers, cookie=cookie) + if http_res not in client.HTTP_GOOD_RESP_CODES: + return False + + page_data_string = client.request(url=url, headers=headers, cookie=cookie) + + if 'File Not Found' in page_data_string or 'File was deleted' in page_data_string: + return False + + return True + except: + return False + +def test(url, page_url=None): + return resolve(url, page_url) + +def log(type='INFO', method='undefined', err='', dolog=True, logToControl=False, doPrint=True): + try: + msg = '%s: %s > %s > %s : %s' % (time.ctime(time.time()), type, name, method, err) + if dolog == True: + loggertxt.append(msg) + if logToControl == True: + control.log(msg) + if control.doPrint == True and doPrint == True: + print msg + except Exception as e: + control.log('Error in Logging: %s >>> %s' % (msg,e)) diff --git a/Contents/Libraries/Shared/resources/lib/resolvers/host_youtube.py b/Contents/Libraries/Shared/resources/lib/resolvers/host_youtube.py index 8217ec0..625d916 100644 --- a/Contents/Libraries/Shared/resources/lib/resolvers/host_youtube.py +++ b/Contents/Libraries/Shared/resources/lib/resolvers/host_youtube.py @@ -160,10 +160,10 @@ def createMeta(self, url, provider, logo, quality, links, key, riptype, vidtype= fs = 5*1024*1024*1024 try: - files_ret.append({'source':self.name, 'maininfo':'', 'titleinfo':'', 'quality':quality, 'vidtype':vidtype, 'rip':riptype, 'provider':provider, 'orig_url':orig_url, 'url':url, 'durl':url, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':True, 'fs':fs, 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':self.netloc[0], 'misc':{'player':'eplayer', 'gp':False}}) + files_ret.append({'source':self.name, 'maininfo':'', 'titleinfo':'', 'quality':quality, 'vidtype':vidtype, 'rip':riptype, 'provider':provider, 'orig_url':orig_url, 'url':url, 'durl':url, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':True, 'fs':fs, 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':self.netloc[0], 'page_url':page_url, 'misc':{'player':'eplayer', 'gp':False}, 'seq':0}) except Exception as e: log(type='ERROR',method='createMeta', err=u'%s' % e) - files_ret.append({'source':urlhost, 'maininfo':'', 'titleinfo':'', 'quality':quality, 'vidtype':vidtype, 'rip':'Unknown' ,'provider':provider, 'orig_url':orig_url, 'url':url, 'durl':url, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':True, 'fs':fs, 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':self.netloc[0], 'misc':{'player':'eplayer', 'gp':False}}) + files_ret.append({'source':urlhost, 'maininfo':'', 'titleinfo':'', 'quality':quality, 'vidtype':vidtype, 'rip':'Unknown' ,'provider':provider, 'orig_url':orig_url, 'url':url, 'durl':url, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':True, 'fs':fs, 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':self.netloc[0], 'page_url':page_url, 'misc':{'player':'eplayer', 'gp':False}, 'seq':0}) except Exception as e: log('ERROR', 'createMeta', '%s' % e) @@ -178,8 +178,8 @@ def createMeta(self, url, provider, logo, quality, links, key, riptype, vidtype= log('INFO', 'createMeta', 'Completed', dolog=self.init) return links - def resolve(self, url): - return resolve(url) + def resolve(self, url, page_url=None, **kwargs): + return resolve(url, page_url=page_url) def resolveHostname(self, host): return self.name @@ -187,7 +187,7 @@ def resolveHostname(self, host): def testLink(self, url): return check(url) -def resolve(url): +def resolve(url, page_url=None, **kwargs): if check(url) == False: return diff --git a/Contents/Libraries/Shared/resources/lib/sources/__init__.py b/Contents/Libraries/Shared/resources/lib/sources/__init__.py index 9c3628e..4a34c0a 100644 --- a/Contents/Libraries/Shared/resources/lib/sources/__init__.py +++ b/Contents/Libraries/Shared/resources/lib/sources/__init__.py @@ -72,6 +72,9 @@ def getHosts(self): hosts = resolvers.info() return hosts + def getHostResolverMain(self): + return resolvers + def hostsCaller(self): return resolvers.sourceHostsCall @@ -167,9 +170,6 @@ def initProviders(self): def getSources(self, name, title, year, imdb, tmdb, tvdb, tvrage, season, episode, tvshowtitle, alter, date, proxy_options, provider_options, key, session): - tuid = control.id_generator(16) - control.AddThread('getSources', 'Initializing Search Item in Providers', time.time(), '1', False, tuid) - try: sourceDict = [] self.getSourcesAlive = True @@ -186,14 +186,15 @@ def getSources(self, name, title, year, imdb, tmdb, tvdb, tvrage, season, episod self.threadSlots[key] = [] pos = 0 if content == 'movie': - log(err='Initializing Search for Movie: %s' % title) + log(err='Initializing Search for Movie: %s (%s)' % (title, year)) title = cleantitle.normalize(title) for source in myProviders: try: source_name = 'Unknow source (import error)' source_name = source['name'] - log(err='Queing Search for Movie: %s (%s) in Provider %s' % (title,year,source_name)) - thread_i = workers.Thread(self.getMovieSource, title, year, imdb, proxy_options, key, re.sub('_mv_tv$|_mv$|_tv$', '', source['name']), source['call']) + log(err='Queuing Search for Movie: %s (%s) in Provider %s' % (title,year,source_name)) + #thread_i = workers.Thread(self.getMovieSource, title, year, imdb, proxy_options, key, re.sub('_mv_tv$|_mv$|_tv$', '', source_name), source['call']) + thread_i = workers.Thread(self.getMovieSource, title, year, imdb, proxy_options, key, source_name, source['call']) self.threads[key].append(thread_i) self.threadSlots[key].append({'thread':thread_i, 'status':'idle', 'pos':pos, 'source':source_name}) pos += 1 @@ -214,8 +215,9 @@ def getSources(self, name, title, year, imdb, tmdb, tvdb, tvrage, season, episod try: source_name = 'Unknow source (import error)' source_name = source['name'] - log(err='Queing Search for Show: %s S%sE%s in Provider %s' % (tvshowtitle,season,episode,source_name)) - thread_i = workers.Thread(self.getEpisodeSource, title, year, imdb, tvdb, season, episode, tvshowtitle, date, proxy_options, key, re.sub('_mv_tv$|_mv$|_tv$', '', source_name), source['call']) + log(err='Queuing Search for Show: %s S%sE%s in Provider %s' % (tvshowtitle,season,episode,source_name)) + #thread_i = workers.Thread(self.getEpisodeSource, title, year, imdb, tvdb, season, episode, tvshowtitle, date, proxy_options, key, re.sub('_mv_tv$|_mv$|_tv$', '', source_name), source['call']) + thread_i = workers.Thread(self.getEpisodeSource, title, year, imdb, tvdb, season, episode, tvshowtitle, date, proxy_options, key, source_name, source['call']) self.threads[key].append(thread_i) self.threadSlots[key].append({'thread':thread_i, 'status':'idle', 'pos':pos, 'source':source_name}) pos += 1 @@ -225,27 +227,33 @@ def getSources(self, name, title, year, imdb, tmdb, tvdb, tvrage, season, episod thread_ex = workers.Thread(self.executeThreads, key) thread_ex.start() - + self.executeThreadsStatus(key, thread_ex) + #sourceLabel = [re.sub('_mv_tv$|_mv$|_tv$', '', i) for i in sourceDict] #sourceLabel = [re.sub('v\d+$', '', i).upper() for i in sourceLabel] #time.sleep(0.5) self.getSourcesAlive = False - - control.RemoveThread(tuid) + return self.sources except Exception as e: log(type='ERROR-CRITICAL', err='getSources - %s' % e) # self.purgeSourcesKey(key=key) - - control.RemoveThread(tuid) + return self.sources + + def executeThreadsStatus(self, key, thread): + tuid = control.id_generator(16) + try: + title = cleantitle.title_from_key(key) + control.AddThread('executeThreadsStatus', 'Provider Search Manage Thread: %s' % title, time.time(), '1', False, tuid, thread) + while thread != None and thread.isAlive(): + time.sleep(1.0) + except Exception as e: + log(type='ERROR-CRITICAL', err='executeThreadsStatus - %s' % e) + control.RemoveThread(tuid) def executeThreads(self, key): - - tuid = control.id_generator(16) - control.AddThread('executeThreads', 'Provider Search Manage Thread', time.time(), '1', False, tuid) - try: title = cleantitle.title_from_key(key) log(type='SUCCESS', err='Starting Threads ! : %s' % title) @@ -257,36 +265,34 @@ def executeThreads(self, key): for s1 in self.threadSlots[key]: if s1['status'] == 'active': active += 1 - if 'done' in s1['status']: - done += 1 if s1['status'] == 'idle': idle += 1 if s1['status'] == 'done-marked': - log(type='SUCCESS', err='Completed thread : %s > %s in %ss.' % (title, s1['source'], round(s1['e_time']-s1['s_time'], 2))) - s1['status'] = 'done' + log(type='SUCCESS', err='Completed Thread: %s > %s in %ss.' % (title, s1['source'], round(s1['e_time']-s1['s_time'], 2))) control.RemoveThread(s1['tuid']) + s1['status'] = 'done' + if s1['status'] == 'done': + done += 1 if done == len(self.threadSlots[key]): log(type='SUCCESS', err='Completed Threads ! : %s with %s sources' % (title, len(self.sourcesFilter(key=key)))) control.savePermStore() - control.RemoveThread(tuid) return if s['status'] == 'idle' and active < int(control.setting('control_concurrent_src_threads')): - s['thread'].start() + log(type='SUCCESS', err='Starting Thread: %s > %s' % (title, s['source'])) s['status'] = 'active' s['s_time'] = time.time() - log(type='SUCCESS', err='Starting thread : %s > %s' % (title, s['source'])) tuid2 = control.id_generator(16) - control.AddThread('executeThreads', 'Provider Search Thread: %s > %s' % (title, s['source']), time.time(), '4', False, tuid2) + control.AddThread('executeThreads', 'Provider Search Thread: %s > %s' % (title, s['source']), time.time(), '4', False, tuid2, s['thread']) s['tuid'] = tuid2 + s['thread'].start() - time.sleep(0.1) + time.sleep(1.0) time.sleep(1.0) except Exception as e: log(type='ERROR-CRITICAL', err='Thread Title %s - %s' % (title,e)) control.savePermStore() - control.RemoveThread(tuid) def checkProgress(self, key=None): @@ -297,10 +303,10 @@ def checkProgress(self, key=None): for s in self.threadSlots[key]: if x == s['thread'] and 'done' in s['status']: c += 1 - + if len(self.threads[key]) == 0: return 100 - + return float(int(float((float(c)/float(len(self.threads[key])))*100.0))*100)/100.0 else: filtered = [i for i in self.sources if i['key'] == key] @@ -310,20 +316,25 @@ def checkProgress(self, key=None): def getDescProgress(self, key=None): - str = [] - if key in self.threads.keys(): - for s in self.threadSlots[key]: - if 'done' in s['status']: - str.append('%s (%ss. %s)' % (s['source'], round(s['e_time']-s['s_time'], 2), u'\u2713')) - elif s['status'] == 'idle': - str.append('%s (%ss. %s)' % (s['source'], '0.00', u'\u21AD')) - elif s['status'] == 'active' and 's_time' not in s.keys(): - str.append('%s (%ss. %s)' % (s['source'], round(0.01, 2), u'\u21AF')) - elif s['status'] == 'active' and 's_time' in s.keys(): - str.append('%s (%ss. %s)' % (s['source'], round(time.time()-s['s_time'], 2), u'\u21AF')) - - ret_str = (' ,'.join(x for x in str)) - return ret_str + try: + str = [] + if key in self.threads.keys(): + for s in self.threadSlots[key]: + if 'done' in s['status']: + str.append('%s (%ss. %s)' % (s['source'], round(s['e_time']-s['s_time'], 2), u'\u2713')) + elif s['status'] == 'idle': + str.append('%s (%ss. %s)' % (s['source'], '0.00', u'\u21AD')) + elif s['status'] == 'active' and 's_time' not in s.keys(): + str.append('%s (%ss. %s)' % (s['source'], round(0.01, 2), u'\u21AF')) + elif s['status'] == 'active' and 's_time' in s.keys(): + str.append('%s (%ss. %s)' % (s['source'], round(time.time()-s['s_time'], 2), u'\u21AF')) + + ret_str = (' ,'.join(x for x in str)) + return ret_str + except Exception as e: + log(type='ERROR-CRITICAL', err='getDescProgress - %s' % e) + log(type='ERROR-CRITICAL', err='getDescProgress - %s' % s) + return 'Error retrieving status ! %s' % e.args def getETAProgress(self, key=None, type='movie'): @@ -362,18 +373,31 @@ def getMovieSource(self, title, year, imdb, proxy_options, key, source, call): except: pass - for s in self.threadSlots[key]: - if s['source'] == source: - s['status'] = 'done-marked' - s['e_time'] = time.time() - if 'movie' in self.providersTimer[source].keys(): - self.providersTimer[source]['movie'] += s['e_time'] - s['s_time'] - self.providersTimer[source]['movie'] = self.providersTimer[source]['movie']/2 - else: - self.providersTimer[source]['movie'] = s['e_time'] - s['s_time'] - control.control_json[source]['movie'] = self.providersTimer[source]['movie'] - break - + doneMarked = False + try: + s_in_threadSlots = self.threadSlots[key] + for s in s_in_threadSlots: + if s['source'] == source: + try: + s['e_time'] = time.time() + if 'movie' in self.providersTimer[source].keys(): + self.providersTimer[source]['movie'] += s['e_time'] - s['s_time'] + self.providersTimer[source]['movie'] = self.providersTimer[source]['movie']/2 + else: + self.providersTimer[source]['movie'] = s['e_time'] - s['s_time'] + control.control_json[source]['movie'] = self.providersTimer[source]['movie'] + except: + pass + s['status'] = 'done-marked' + doneMarked = True + log(type='INFO', err='getMovieSource-done-marked: %s > %s (%s)' % (source, title, year), logToControl=control.debug) + #break + if doneMarked == False: + log(type='ERROR-CRITICAL', err='getMovieSource: %s' % s_in_threadSlots, logToControl=control.debug) + except Exception as e: + log(type='ERROR-CRITICAL', err='getMovieSource: %s > %s (%s)' % (e, title, year), logToControl=control.debug) + + log(type='INFO', err='getMovieSource: Completed %s > %s (%s)' % (source, title, year), logToControl=control.debug) def getEpisodeSource(self, title, year, imdb, tvdb, season, episode, tvshowtitle, date, proxy_options, key, source, call): @@ -400,17 +424,31 @@ def getEpisodeSource(self, title, year, imdb, tvdb, season, episode, tvshowtitle except: pass - for s in self.threadSlots[key]: - if s['source'] == source: - s['status'] = 'done-marked' - s['e_time'] = time.time() - if 'tv' in self.providersTimer[source].keys(): - self.providersTimer[source]['tv'] += s['e_time'] - s['s_time'] - self.providersTimer[source]['tv'] = self.providersTimer[source]['tv']/2 - else: - self.providersTimer[source]['tv'] = s['e_time'] - s['s_time'] - control.control_json[source]['tv'] = self.providersTimer[source]['tv'] - break + doneMarked = False + try: + s_in_threadSlots = self.threadSlots[key] + for s in s_in_threadSlots: + if s['source'] == source: + try: + s['e_time'] = time.time() + if 'tv' in self.providersTimer[source].keys(): + self.providersTimer[source]['tv'] += s['e_time'] - s['s_time'] + self.providersTimer[source]['tv'] = self.providersTimer[source]['tv']/2 + else: + self.providersTimer[source]['tv'] = s['e_time'] - s['s_time'] + control.control_json[source]['tv'] = self.providersTimer[source]['tv'] + except: + pass + s['status'] = 'done-marked' + doneMarked = True + log(type='INFO', err='getEpisodeSource-done-marked: %s > %s (S%sE%s)' % (source, tvshowtitle, season, episode), logToControl=control.debug) + #break + if doneMarked == False: + log(type='ERROR-CRITICAL', err='getEpisodeSource: %s' % s_in_threadSlots, logToControl=control.debug) + except Exception as e: + log(type='ERROR-CRITICAL', err='getEpisodeSource: %s > %s (S%sE%s)' % (e, tvshowtitle, season, episode), logToControl=control.debug) + + log(type='INFO', err='getEpisodeSource: Completed %s > %s (S%sE%s)' % (source, tvshowtitle, season, episode), logToControl=control.debug) def clearSources(self, key=None): try: @@ -419,39 +457,47 @@ def clearSources(self, key=None): self.threads.clear() self.threadSlots.clear() self.threads = {} - self.threadSlots = {} + self.threadSlots = {} + log(type='INFO', err='clearSources performed at %s' % time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))) except Exception as e: log(type='ERROR', err='clearSources : %s' % e) def purgeSources(self, maxcachetimeallowed=0, override=False): try: filtered = [] + purgedItems = [] maxcachetimeallowed = float(maxcachetimeallowed) curr_time = time.time() if override == True: pass else: - # if cache time < 2min; then get the sources from last 2min. otherwise it will always return 0 sources - if maxcachetimeallowed < 2*60: - maxcachetimeallowed = 2*60 + # if cache time < 5min; then get the sources from last 2min. otherwise it will always return 0 sources + if maxcachetimeallowed < 5*60: + maxcachetimeallowed = 5*60 for i in self.sources: if (i['ts'] + float(maxcachetimeallowed)) >= curr_time: filtered.append(i) for k in self.threads: if self.checkKeyInThread(k) == True and self.checkProgress(k) == 100: + purgedItems.append(k) del self.threads[k] del self.threadSlots[k] del self.sources[:] for i in filtered: self.sources.append(i) + + if len(purgedItems) > 0 or len(filtered) > 0 or control.debug == True: + log(type='INFO', err='purgeSources performed at %s' % time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))) + log(type='INFO', err='purgeSources purged items %s' % (', '.join(cleantitle.title_from_key(x) for x in purgedItems))) except Exception as e: - log(type='ERROR', err='clearSources : %s' % e) + log(type='ERROR', err='purgeSources : %s' % e) def purgeSourcesKey(self, key=None, maxcachetimeallowed=0): try: bool = False filtered = [] + purgedItems = [] curr_time = time.time() if key == None: return bool @@ -467,10 +513,14 @@ def purgeSourcesKey(self, key=None, maxcachetimeallowed=0): bool = True if self.checkKeyInThread(key) == True and self.checkProgress(key) == 100: + purgedItems.append(key) del self.threads[key] del self.threadSlots[key] bool = True - + + if len(purgedItems) > 0 or len(filtered) > 0 or control.debug == True: + log(type='INFO', err='purgeSourcesKey performed at %s' % time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))) + log(type='INFO', err='purgeSourcesKey purged items %s' % (', '.join(cleantitle.title_from_key(x) for x in purgedItems))) except Exception as e: log(type='ERROR', err='purgeSourcesKey : %s' % e) bool = False diff --git a/Contents/Libraries/Shared/resources/lib/sources/fmovies_mv_tv.py b/Contents/Libraries/Shared/resources/lib/sources/fmovies_mv_tv.py index 48617eb..70e20e3 100644 --- a/Contents/Libraries/Shared/resources/lib/sources/fmovies_mv_tv.py +++ b/Contents/Libraries/Shared/resources/lib/sources/fmovies_mv_tv.py @@ -51,7 +51,7 @@ def __init__(self): self.init = False self.disabled = False self.TOKEN_KEY = [] - self.base_link_alts = ['https://fmovies.taxi','https://bmovies.pro','https://bmovies.is','https://bmovies.to','https://bmovies.club','https://bmovies.online','https://bmovies.ru','https://fmovies.to','https://fmovies.is','https://fmovies.se'] + self.base_link_alts = ['https://fmovies.taxi','https://bmovies.pro','https://bmovies.club','https://bmovies.ru','https://fmovies.to','https://fmovies.ru'] self.base_link = self.base_link_alts[0] self.grabber_api = "grabber-api/" self.search_link = '/sitemap' @@ -80,9 +80,9 @@ def __init__(self): self.siteonline = self.testSite() self.testparser = 'Unknown' self.testparser = self.testParser() - self.initAndSleepThread() self.firstRunDisabled = False self.init = True + self.initAndSleepThread() log(type='INFO', method='init', err=' -- Initializing %s %s %s End --' % (name, self.ver, self.update_date)) def info(self): @@ -145,11 +145,18 @@ def initAndSleepThread(self): thread_i.start() def InitSleepThread(self): - while True: - time.sleep(60*100) - self.siteonline = self.testSite() - self.testparser = self.testParser() - self.initAndSleep() + try: + while self.init == True: + tuid = control.id_generator(16) + control.AddThread('%s-InitSleepThread' % self.name, 'Persists & Monitors Provider Requirements (Every 60 mins.)', time.time(), '4', True, tuid) + time.sleep(60*60) + self.siteonline = self.testSite() + self.testparser = self.testParser() + self.initAndSleep() + control.RemoveThread(tuid) + except Exception as e: + log('ERROR','InitSleepThread', '%s' % e) + control.RemoveThread(tuid) def initAndSleep(self): try: diff --git a/Contents/Libraries/Shared/resources/lib/sources/gogoanime.py b/Contents/Libraries/Shared/resources/lib/sources/gogoanime.py index 6f66106..fa4b7d5 100644 --- a/Contents/Libraries/Shared/resources/lib/sources/gogoanime.py +++ b/Contents/Libraries/Shared/resources/lib/sources/gogoanime.py @@ -240,7 +240,6 @@ def get_sources(self, url, hosthdDict=None, hostDict=None, locDict=None, proxy_o url = urlparse.urljoin(self.base_link, url) - #r = client.request(url) req = proxies.request(url, proxy_options=proxy_options, use_web_proxy=self.proxyrequired, IPv4=True) r = client.parseDOM(req, 'iframe', ret='src') diff --git a/Contents/Libraries/Shared/resources/lib/sources/gowatchseries_ca.py b/Contents/Libraries/Shared/resources/lib/sources/gowatchseries_ca.py index c24aaa9..020a619 100644 --- a/Contents/Libraries/Shared/resources/lib/sources/gowatchseries_ca.py +++ b/Contents/Libraries/Shared/resources/lib/sources/gowatchseries_ca.py @@ -126,11 +126,18 @@ def initAndSleepThread(self): thread_i.start() def InitSleepThread(self): - while True: - time.sleep(60*100) - self.siteonline = self.testSite() - self.testparser = self.testParser() - self.initAndSleep() + try: + while self.init == True: + tuid = control.id_generator(16) + control.AddThread('%s-InitSleepThread' % self.name, 'Persists & Monitors Provider Requirements (Every 60 mins.)', time.time(), '4', True, tuid) + time.sleep(60*60) + self.siteonline = self.testSite() + self.testparser = self.testParser() + self.initAndSleep() + control.RemoveThread(tuid) + except Exception as e: + log('ERROR','InitSleepThread', '%s' % e) + control.RemoveThread(tuid) def initAndSleep(self): try: diff --git a/Contents/Libraries/Shared/resources/lib/sources/nineanime_ca.py b/Contents/Libraries/Shared/resources/lib/sources/nineanime_ca.py index de0e23d..e6d79db 100644 --- a/Contents/Libraries/Shared/resources/lib/sources/nineanime_ca.py +++ b/Contents/Libraries/Shared/resources/lib/sources/nineanime_ca.py @@ -37,15 +37,15 @@ class source: def __init__(self): del loggertxt[:] - self.ver = '0.1.1' - self.update_date = 'Feb. 22, 2019' + self.ver = '0.1.2' + self.update_date = 'Mar. 15, 2019' log(type='INFO', method='init', err=' -- Initializing %s %s %s Start --' % (name, self.ver, self.update_date)) self.init = False self.serverts = None self.disabled = False self.TOKEN_KEY = [] self.FLAGS = {} - self.base_link_alts = ['https://9anime.to','https://www1.9anime.to','https://9anime.is'] + self.base_link_alts = ['https://9anime.ru','https://9anime.to'] self.base_link = self.base_link_alts[0] self.grabber_api = "grabber-api/" self.search_link = '/sitemap' @@ -71,9 +71,9 @@ def __init__(self): self.siteonline = self.testSite() self.testparser = 'Unknown' self.testparser = self.testParser() - self.initAndSleepThread() self.firstRunDisabled = False self.init = True + self.initAndSleepThread() log(type='INFO', method='init', err=' -- Initializing %s %s %s End --' % (name, self.ver, self.update_date)) def info(self): @@ -126,11 +126,20 @@ def testSiteAlts(self, site): def initAndSleepThread(self): thread_i = workers.Thread(self.InitSleepThread) thread_i.start() - + def InitSleepThread(self): - while True: - time.sleep(60*10) # 10 min - self.initAndSleep() + try: + while self.init == True: + tuid = control.id_generator(16) + control.AddThread('%s-InitSleepThread' % self.name, 'Persists & Monitors Provider Requirements (Every 60 mins.)', time.time(), '4', True, tuid) + time.sleep(60*60) + self.siteonline = self.testSite() + self.testparser = self.testParser() + self.initAndSleep() + control.RemoveThread(tuid) + except Exception as e: + log('ERROR','InitSleepThread', '%s' % e) + control.RemoveThread(tuid) def initAndSleep(self): try: @@ -149,13 +158,13 @@ def initAndSleep(self): self.headers['User-Agent'] = ua #get cf cookie - cookie1 = proxies.request(url=t_base_link, headers=self.headers, output='cookie', use_web_proxy=self.proxyrequired, httpsskip=True) + cookie1 = proxies.request(url=t_base_link, headers=self.headers, output='cookie', use_web_proxy=self.proxyrequired, httpsskip=True, timeout=7) self.headers['Cookie'] = cookie1 # get reqkey cookie try: token_url = urlparse.urljoin(t_base_link, self.token_link) - r1 = proxies.request(token_url, headers=self.headers, httpsskip=True) + r1 = proxies.request(token_url, headers=self.headers, httpsskip=True, timeout=7) if r1 == None: raise Exception('%s not reachable !' % token_url) reqkey = self.decodeJSFCookie(r1) @@ -189,7 +198,7 @@ def initAndSleep(self): hash_url = urlparse.urljoin(t_base_link, self.hash_menu_link) hash_url = hash_url + '?' + urllib.urlencode(query) - r1, headers, content, cookie2 = proxies.request(hash_url, headers=self.headers, limit='0', output='extended', httpsskip=True) + r1, headers, content, cookie2 = proxies.request(hash_url, headers=self.headers, limit='0', output='extended', httpsskip=True, timeout=7) #cookie = cookie1 + '; ' + cookie2 + '; user-info=null; reqkey=' + reqkey cookie = '%s; %s; user-info=null; reqkey=%s' % (cookie1 , cookie2 , reqkey) @@ -200,9 +209,9 @@ def initAndSleep(self): log('ERROR','initAndSleep', '%s' % e) def getSetServerTs(self): - geturl = proxies.request('https://fmovies.taxi/home', output='geturl') - res = proxies.request(geturl) try: + geturl = proxies.request('https://fmovies.taxi', output='geturl', httpsskip=True, timeout=7) + res = proxies.request(geturl, httpsskip=True, timeout=7) myts1 = re.findall(r'data-ts="(.*?)"', res)[0] myts = str(int(myts1)) return myts @@ -300,7 +309,7 @@ def get_sources(self, url, hosthdDict=None, hostDict=None, locDict=None, proxy_o return sources myts = str(((int(time.time())/3600)*3600)) - log('INFO','get_sources-1', 'url: %s' % url, dolog=False) + log('INFO','get_sources-1', 'url: %s' % url, dolog=control.debug) token_error = False urls = [] @@ -326,7 +335,7 @@ def get_sources(self, url, hosthdDict=None, hostDict=None, locDict=None, proxy_o search_url = search_url + '?' + urllib.urlencode(query) result = proxies.request(search_url, headers=self.headers, proxy_options=proxy_options, use_web_proxy=self.proxyrequired, httpsskip=True) - log('INFO','get_sources-2', '%s' % search_url, dolog=False) + log('INFO','get_sources-2', '%s' % search_url, dolog=control.debug) rs = client.parseDOM(result, 'div', attrs = {'class': '[^"]*film-list[^"]*'})[0] #print rs @@ -388,7 +397,7 @@ def get_sources(self, url, hosthdDict=None, hostDict=None, locDict=None, proxy_o try: url, episode = re.compile('(.+?)\?episode=(\d*)$').findall(url)[0] except: pass - log('INFO','get_sources-3', url, dolog=False) + log('INFO','get_sources-3', url, dolog=control.debug) referer = url result = resultT = proxies.request(url, headers=self.headers, limit='0', proxy_options=proxy_options, use_web_proxy=self.proxyrequired, httpsskip=True) @@ -411,7 +420,7 @@ def get_sources(self, url, hosthdDict=None, hostDict=None, locDict=None, proxy_o if result != None: break except Exception as e: - log('FAIL','get_sources-3', '%s : %s' % (url,e), dolog=False) + log('FAIL','get_sources-3', '%s : %s' % (url,e), dolog=control.debug) if result == None: log('FAIL','get_sources','Could not find a matching title: %s' % cleantitle.title_from_key(key)) @@ -428,13 +437,13 @@ def get_sources(self, url, hosthdDict=None, hostDict=None, locDict=None, proxy_o raise Exception('Could not decode ts') else: myts = str(int(resp)) - log('INFO','get_sources-3', 'could not parse ts ! will try and use decoded : %s' % myts, dolog=False) + log('INFO','get_sources-3', 'could not parse ts ! will try and use decoded : %s' % myts, dolog=control.debug) except: if self.serverts != None: myts = str(self.serverts) - log('INFO','get_sources-3', 'could not parse ts ! will use borrowed one : %s' % myts, dolog=False) + log('INFO','get_sources-3', 'could not parse ts ! will use borrowed one : %s' % myts, dolog=control.debug) else: - log('INFO','get_sources-3', 'could not parse ts ! will use generated one : %s' % myts, dolog=False) + log('INFO','get_sources-3', 'could not parse ts ! will use generated one : %s' % myts, dolog=control.debug) trailers = [] links_m = [] @@ -499,7 +508,7 @@ def get_sources(self, url, hosthdDict=None, hostDict=None, locDict=None, proxy_o headers['Referer'] = urlparse.urljoin(url, s[0]) headers['Cookie'] = self.headers['Cookie'] - log('INFO','get_sources-4', '%s' % hash_url, dolog=False) + log('INFO','get_sources-4', '%s' % hash_url, dolog=control.debug) result = proxies.request(hash_url, headers=headers, limit='0', proxy_options=proxy_options, use_web_proxy=self.proxyrequired, httpsskip=True) result = json.loads(result) @@ -537,7 +546,7 @@ def get_sources(self, url, hosthdDict=None, hostDict=None, locDict=None, proxy_o quality = '480p' #riptype = 'CAM' - log('INFO','get_sources-5', result, dolog=False) + log('INFO','get_sources-5', result, dolog=control.debug) if result['target'] != "-": pass @@ -584,7 +593,7 @@ def get_sources(self, url, hosthdDict=None, hostDict=None, locDict=None, proxy_o if grabber!=None and not grabber.startswith('http'): grabber = 'http:'+grabber - log('INFO','get_sources-6', grabber, dolog=False) + log('INFO','get_sources-6', grabber, dolog=control.debug) result = proxies.request(grabber, headers=headers, referer=url, limit='0', proxy_options=proxy_options, use_web_proxy=self.proxyrequired, httpsskip=True) @@ -611,7 +620,7 @@ def get_sources(self, url, hosthdDict=None, hostDict=None, locDict=None, proxy_o links_m = resolvers.createMeta(target, self.name, self.logo, quality, links_m, key, riptype, sub_url=sub_url, testing=testing) except Exception as e: - log('FAIL', 'get_sources-7','%s' % e, dolog=False) + log('FAIL', 'get_sources-7','%s' % e, dolog=control.debug) sources += [l for l in links_m] @@ -634,7 +643,7 @@ def resolve(self, url): except: return - def get_servers(self, page_url, proxy_options=None): + def get_servers(self, page_url, proxy_options=None): T_BASE_URL = self.base_link T_BASE_URL = 'https://%s' % client.geturlhost(page_url) page_id = page_url.rsplit('.', 1)[1] @@ -645,10 +654,10 @@ def get_servers(self, page_url, proxy_options=None): html = '
%s
' % json.loads(result)['html'].replace('\n','').replace('\\','') return html - def r01(self, t, e, token_error=False, code_use=False): + def r01(self, t, e, token_error=False, use_code=False): i = 0 n = 0 - if code_use == True: + if use_code == True: for i in range(0, max(len(t), len(e))): if i < len(e): n += ord(e[i]) @@ -767,7 +776,7 @@ def getVidToken(self): unpacked_code = '' cch = '' if len(self.TOKEN_KEY) == 0: - all_js_pack_code = proxies.request(all_js_url, use_web_proxy=self.proxyrequired, httpsskip=True) + all_js_pack_code = proxies.request(all_js_url, use_web_proxy=self.proxyrequired, httpsskip=True, timeout=7) unpacked_code = jsunpack.unpack(all_js_pack_code) cch = re.findall(r'%s' % client.b64decode('ZnVuY3Rpb25cKFthLXpdLFthLXpdLFthLXpdXCl7XCJ1c2Ugc3RyaWN0XCI7ZnVuY3Rpb24gW2Etel1cKFwpe3JldHVybiAoLio/KX0='), unpacked_code)[0] token_key = re.findall(r'%s=.*?\"(.*?)\"' % cch, unpacked_code)[0] @@ -781,7 +790,7 @@ def getVidToken(self): try: if len(self.TOKEN_KEY) == 0: - token_key = proxies.request(self.TOKEN_KEY_PASTEBIN_URL, use_web_proxy=self.proxyrequired, httpsskip=True) + token_key = proxies.request(self.TOKEN_KEY_PASTEBIN_URL, use_web_proxy=self.proxyrequired, httpsskip=True, timeout=7) if token_key !=None and token_key != '': #cookie_dict.update({'token_key':token_key}) self.TOKEN_KEY.append(token_key) @@ -790,7 +799,7 @@ def getVidToken(self): log('ERROR', 'getVidToken-2','%s' % e, dolog=False) try: - fm_flags = proxies.request(self.FLAGS_PASTEBIN_URL, use_web_proxy=self.proxyrequired, httpsskip=True) + fm_flags = proxies.request(self.FLAGS_PASTEBIN_URL, use_web_proxy=self.proxyrequired, httpsskip=True, timeout=7) if fm_flags !=None and fm_flags != '': fm_flags = json.loads(fm_flags) #cookie_dict.update({'token_key':token_key}) diff --git a/Contents/Libraries/Shared/resources/lib/sources/primewire_mv_tv.py b/Contents/Libraries/Shared/resources/lib/sources/primewire_mv_tv.py index 8303d61..b7ba85c 100644 --- a/Contents/Libraries/Shared/resources/lib/sources/primewire_mv_tv.py +++ b/Contents/Libraries/Shared/resources/lib/sources/primewire_mv_tv.py @@ -456,7 +456,9 @@ def get_sources(self, url, hosthdDict=None, hostDict=None, locDict=None, proxy_o loc = url.replace(self.base_link+'/','') url = testjs(result, self.base_link, loc) + vidtype = 'Movie' if 'season' in url: + vidtype = 'Show' url = url.replace('=tv-','=watch-').replace('/season','&season') url = url.replace('season-','season=').replace('-episode-','&episode=') log('INFO', 'get_sources-1B',url, dolog=False) @@ -543,7 +545,7 @@ def get_sources(self, url, hosthdDict=None, hostDict=None, locDict=None, proxy_o riptype = riptypex quality = '480p' - links_m = resolvers.createMeta(url, self.name, self.logo, quality, links_m, key, poster=poster, riptype=riptype, testing=testing) + links_m = resolvers.createMeta(url, self.name, self.logo, quality, links_m, key, vidtype=vidtype, poster=poster, riptype=riptype, testing=testing) except: pass diff --git a/Contents/Resources/icon-floppyload.png b/Contents/Resources/icon-floppyload.png new file mode 100644 index 0000000000000000000000000000000000000000..546a72a389e89a741abe2ae064964be696a8b1c9 GIT binary patch literal 2125 zcmV-T2(tHyP)AA zWi^ac2xT86szpzvWw44hsQ>@~0d!JMQvg8b*k%9#2ewH>K~#8N?7%Sr0002OAp57b z#tv8)0Kkc>W0eMiC=8<(#z5`|LfMO7e1S-PZRx-c zP%qL8au;+X1H1v~0jBf-On(bNB>-Iu0DDrv=oGew+W;Np$pH7V2?zky5#U?~5Kn-r z{RLzxhB_mveP&z1DP*#<{WPC zeGcFnDr$h!Xae~Y4strcUH2PB0Yu9B+bBTzzls|L&>{u^?GHu9qy!Mb#2&yW0N@&x zgajZ&U``qc^GZ8R`V zKvPG6RqSv&fNNA(1NgKKpam070=NYODnK@rk_zxT6){{xk7KaB1JI5X=hlZvlM=x1 zPADV5B9Z|zkweOx5D#=)3U&VY796t#~t7Q-QwQ>Y1j=QH9vJ1j4fR%P-cx@>%=D z66SfP5)AYJfM}`_*fKy@c{EyLga&NhP?Z*q6dKb z$G?L&ufR+#ctrpMzhMOe3CB1K0=a#Axqk-+?uabrAIUcaF!Q^^S0J!n?|>eF{PF^?qenp4lD4O~cit%8a67c9HlwgdBAyQkb9q1z{xwC-J3LzXUo7wgb~mqWWd&E%7CrU z(gn6YTL$V6Acjd;)$h(I_-CN{{K-!W0UQuF9BKl^$A$pon1Sm4UxGk((d+`MfDuW+ z$~Oa6z%vk7TzF;RMP0xTP2tE#K;NJyAQ=-FZaoC_<&FSx?{mOd0pTX0xw`}Mib?>( zC^k|Caez7p)-tdIHUnmV2L9AHT(_XC&zS)uC8q;8mNJ+Fq&ZLt;Oc@^BnJxn+pdEH z93lKm2D)PEd0t~ztBqxn7AUiMukUJgMtUNQ|y(V0R%fO}1v?EIC zlW-Y$GKV|)w}5jX5zZvdIY4(nbZ1>>rk-;k+92YbggqVyYz9(J2hzF9fMUp&feA_m zYz{0?k{hlJEPxz9VHh3I+ChMfAYeS^bs&5DP!Swp5)kwSg}(!dByhaFE#TJ|ssrCO z0d?fr1z}&)dCh@Sl7MX0oP_u4K(_0^$mdybNXo$O%jz^!*U+AZ$(q2YTkC;w32i;l z4KN4fy5#>|6ZV)_#W}YF{ND@s-;n$(88~C^0-Qz9KstwN5ip3JffOiOwvNxEXW)6; zhPetDn#c}}+BPi)FsL~oA_z!Zx3NC}FswPy6+zB=7mNVa{udJYFy<|o?AgA2F$Zv7 z0${uhz~qYRqH_R?cP#=A3IM7EfDT1bgac^0v=@wQPHnD?L3Ck z5f8QO>;*a=>cj8$=0E@d0Dxi8{izQyg%T(o006)N9_J31ZzW8_00000NkvXXu0mjf D=CPzX literal 0 HcmV?d00001 diff --git a/Contents/Resources/icon-floppysave.png b/Contents/Resources/icon-floppysave.png new file mode 100644 index 0000000000000000000000000000000000000000..151fda563f6690d1834830ee08582ba321c17cc1 GIT binary patch literal 5214 zcmcII`8(9#_pced?1rpkY$0SASwhCx2E|w-$)FJ4w(%wuV=Gg(WH)5&NtTc{jFPfv zkX<5s$iAD;^!efYJfA<{d!KWkbIyJ4J@-84o^zgaE&*e1%+4yr3IG7R3EIF406;Vn z1TceXXn;#snTA;W(GEBO;As0hLFw`w!n8%kAS-ig8m0~3Ql=*WzD9YpQAcYe0H{K;&szMd`v!0VmK=xPei)%BxJnC2ZwB?fSvh&W zS8`!zU|qA!c6oWsio?DELc%dSSnf7PcVKhsT5tca<}^pdc5yrjil@ci0+m3T$?C4EpE zG*-}}BmZ~?-fdrbpMt2wz$I8onxzdxt1sSfoa$0H%GP((@pZbVj<~?CF^g6V5`+Or zp>`M*i^7{RHDpGErJ8*MI;QLf(jzZ&^qe4s%h`aLD{Jm`8)3P1`t!Q4bLgM`^_z=# zfS7^t&>+Vh01R_U;kA>^K6uU~iwD8r?bMR`a#v+jcmL>ljg0veA}L4P0tkRbwHmmr#8&>D`9(S0k0+hP>eC#a;FX9`I*%6` zg29`G7=~Vi2CdKdxrg3cXg~U~Xb9}!lpIBP&jaKvHiF{A_RCw>0mtdw8HYD^<%&HO ziPg^g2Vc}r|C~3rn4b1I+2n;306fv7u_LZV&YCXn2bOeYO2;#iW>7rppOP3nT+(QU z4Iy>dC5L%6S2d=wv*_uWO;>Q8Bn8MC`N#<_V1POe(1j-NEY6FF$Y4tbS~9I@Jge5h#X?2aVUv^T!>}>?i;W#bFl{ zeL2nl@pBg4RR$JpUSs0?!kctdixv) zLS|L^P1-V8;_BMk2X7B&YQaO0hRjfm>W8;s^IB&k4y!LSPT%nSzP0GvaM1ZZS3O)1 zk8}AkY&~8i+4lI%0L=t za}xKb*FHO2gbU+wR!x)vfM`9GHtN5SH#iIs<>N3;cem`6==?9lQWE)c{;QJogRs>Z zN6sT5JWhqfmw8azjQG@{wK9{JtQo~useL%m|Mw2~CsW+ajYv9w^PaF|+IjoGq#hKH zGvmYKXz2d{yrTvzu)ZxyJ7t(A^1YKQIx5bvYl7V+85_ zjz?G-n*%(aZ)wtGQ)Z{ibd9@HfbNVi-rvMn!P*j0#JKdo0EHqJb4doDx^(UelwuB& z;xC%_*Ys%12u3ZZq>R)}&`kM6Vl`{f<@-bD=sX0#Pze`R7Ugj?WlYJ?p*>Sn|JAIu zHN|kimTYAKEJ)!YcVxPa>aF1rpax1meN2>jeMR^7r9ud5TDCk}cOh_!8WbrggU5+u zRO+1CInko_y7^50=HRTvY__G+G&cMpH-0NQ!Y6x8Mfl0}?i3BSW+pg)`#LFB&%8Lv z*U0M$hM`@218DoWucHtrvgF<-xA-SnLkGxNKucEs8PbG9Jk>1m(QTJ2znL$VW<}X6 za3TRsURGu@Tf)w@r3Y;fZM?DF>9OGWP;2{dai(VWw`VV+u67(LhQCvuOh~N-&-m}t zqo2GJr)PuX)2nMk^ZO$g*Q_qhn?|X&O2mh9+o9c1?5*H;+8@VyuFl2;6yeZV8FqG7m{qT$aYZRO`bb0iB7iE`fxpd+;= z^rga5v|P=6OhlQNsJ|z@pWYa@{qpli zYZV-n6W|uE$eH370KhfqWqXc0g4M__@u;bRP4?kf<<#K^Hv>oS{doZlSf;iVGI9VvAX3}%6^GUFQS2>f-{fZn| zhMs+E0t1iBo%M;j;ggY6329db22N~)3848zQj_~SDz1$utAD&uovh+19+}Ze=%BBa zD#2!p$T2Y3M1=w?sZp}oM4ESRzagI4ZiMrr3U8fbLMy~@6}(l{Y0uecLh>-zLKq02 zc}Par4=dV7&<*Pz+lGHW9zecoa+3d_;B8Kwi9&Rn3UeDOi^-ocORv>OS=~$U@$IzO z`mC@xAQTYpfJ@rgG#NVOdKgVQV(HvtAFVp^x6<73N^fpf6jK3mvZ(4gh{@|-# zAZ53*-J25K%oi6<@rJT-5V zx0pKldGT_iaAf3KUyDaY?MD=|e45&Ibpt*;3F-G3V)Bxy=Cw`#xP*;8BM4g>rWJ8- zYclfW{K~AZGvDJzs*%T?k7;};!^p>jQ3pa;ogp0?5Udn4*|IVDvhUvEWMbo?Yym3* zS-567R4LC18$@G8x{91}As3P(BnfhCrR*>`C$>odY0C?C#~PBD-W{Bjz17<5<;<6x zwBI3D3IMFkY@FChlX&XWItqyu!A7WHa-9qXRNhzTT{0gcoARKtEMF-_4`&D>3U5ts zp3z-co0vNr!F{Nemc*6ys#|CipvS1#Az@_v;m2qGAyrdN@v(w1z_c9+C0{ZXErA7e zQ+{=-Qy=%uR7I>vn&r)LupsTlgDbs3P~_TIQFmg^4X1s#wO@C4t0Rj~8Kl^_uyK&u z8E<@pbiZ0LKSPpin#PA+ch~%w)2ByiUlRLtl#$Q83+X!Uc(T-JqbiO$pc-87Rtb+o_06C?*XJz!YxeRSv33%ycmtDqH!G%1;5#klJLzk= zv@_p-?6$*r>m8%3vc&UwL(lB}xP|FjXj12|{zoIHzTxN-x ze4%vzYw7z<3R5gxce_3?h@&%m8iYdn+s*QY% zS2m7F3j%+DUN!>J^TDq+Zsij8sS9Cqx$AMQag+(WA&-c~QOqMI|8{g*_pcOJG!7?m zhr071aqIW+R8_Bzue7p^VUDtA>xt5+n_HV>~YI^LHqBgikzE|C$zk&!arIWUoS*kO zb##DCiPcz0w(`&kW#amV6HjJyK+)KVq&#=rL9K8_{cVA(b{$jRl7aPug*neRk4PS0=r#c&{kNp~vu)rSp4Vb-x9`opHzL^a zkih!bdN|%oqGvyr+| zlFFCMJ|$N-Ws4n-7*`YvwO#Sa76HS|W0UMzi;&8 zq=qdNpPMJ`(jAL^Zb?0!JNsk7<;|}9>%oEpotHrarrc$Hbbm+~BA2jKQfw!scq(*e zb8G#=iO(~_+;>ydkk8^1Z%8{#%0uY4!0-T zz%`Yp;?m-!knUIMZ>a7!rgAMp#;x=hW0Z%ImNF3AG%K2@KHSnL0k3wkERF0pLYImt z&;r&T(WQz0q>Q`eFPWxY&4#U5<0UH;uI@TNQ`q!*7Rl+D9IbKA>dXEvWlu94s1l`I zgt_06ezEKvx{;}1L?zs(bO_yW2_{CmywwKEM&IGOCt$C7pIR($ojn_n(RM1FSkfV? z-xnM_+4_p+b$rr&YQ&z47|NSVkx#5IPR1n8aoAo-R~;{wf--WF1u6AW;?g~RrcY4~ zd&@uNpbOPr*hx%XZL%IhD!7uigHu4XHxXv*K)@ai&pyAm>$s+%-feV>^obX>Lu&g# z@d#Hl^?We{R)kgEUf%dUj?8E8P0Gp0?qW3V&858&nhwBf)@xF?&I`N3FvraN_Z=}6 zO2=}Vfj(>77YKZ_pnXu$7Y`EwprTUix6bBv%;{%!hkRs~J@%vImO-tia*nq6l~C2;J}acqB?t++&KAOjT)*8v`Xd6 z8I78M(M$Jd%O;1;T-KhDbMsT!vo8k^^S+pclz($8733C%+vPJ&Ck*yaDGMaNTz=Vf zjpI$3yi&78(?55j&jUScHFt(rJT5@->!|`_iOZiMMjaK67h3+{)@CTfKdOTBi`!;d zT1NXnGu+POZ-(M|OTKZKuz&#``z4Mr=ezT3;gh=xNdOl|$eRyO(msHd~u)Pk*frZXTtfSJPltP6ljt6dH%Cj8W*uDW+xP zW&@KF9ZY$4Y7I(@lnblv)du_u4Ee_26Paz79w0{|AhJD*cnt1}s^l0qk9QxPzmq;! zjFT+xHo%N**X~bR6jF<;2HhZu4nTW#4j38Tvs|_;za@IM=e`*jXaOC6&$cQ*8q`7c zh?+{gE>0S>Vs)Lp3q(}eX-R|>UnZSf{54(x38c~Sv2BdBNC#^^ZO6)Tod@L8z%UZ| z!4@s}!Lr$MQs0x5qVs6ne<7i+D->W=NW@)l08HO@RnDeqL{22~zIFu_|E#XO1tG&B zj88FYB$hw&fr; z!YLj@2ZLtec$z@A42&>_=!jjv$l5Sk!9Mhb6=pfK9m4!kg;u8`;D-z4N3c@1=!(l_ z2E>Y{;V-{@Pn~@q1o4{c;om6r3Ib76_|-G?E{u3C+e`a%TGYVw0Zc$e^PkK}+q@RE zNDAGepQ zeo`^VY!#8$=A!MhDf~?_ggYC#(}Mc5BYlcJZ1&}8L!$zFr0B9Dm$mKHkybYOiCi2H zE4o|$`SXzc-x1+4*3$g51B{*fk!+CjT(HR>k>51w;BK9=pG_-zb^A}|W!6vqBXz() zRZ~BA1x56_i50`1Gp&t%zy}!|w;Vd!*dc@C4c{Ffzu$>dst~eYtGuP)78i~)N2Q}m zys**>pE!L_2QnSlYVTgVd59sEy|XAUc@&Q8eUyHyq>ku&I%~*pqR>yPwM02f9}`oocrA8ebwB|kds}Q9RR?2)JWd~ z0C*>b0UEUvSgqRHc7n3GiKPKl^f~6kFKx#6rkNGd>e0Nf3uii}CRUk>b>GaJCjxtB zwzszjey>+gETs<3Lyw0?W@ldkKnyvmuWK2WHJgpQCU#1wMQ6I->*!($JC@s2-8d}< zd0A8_A6AId%Nvkww23=)C6Sxa^W-#t`zd)=smhlz#!;4%1y>HdbqVch2vsLoiF?qx z+dUzi#}(n%5RP9`bfTkUxMAg8$DP0*Id|56y<0i%emh$;uxss|Z{xxim)}c4_1jdv z)(w5q-tDeUKH^AO#s=zU%B|F0xW*$Fdp3e+0ynwCpS8`=qE$wi@1u?1p1rfuKeRo? zI|j#Y8$9&581?pk#_cGb@Qd2vRFO56y_rG_s>BtC?K`G7f0I^Tro5_BvX3Zn;_rCT zW%JvS1YZG?2!;fABpagfkUk(#{t?vf8T=#t=fFknzss*6`FWE6GZ+8wlm4mn?;`(o znDWoU|Ec=_ib*MC82v9he``d|Vf&lgf4KNB??Jj0OAq72$ObBb#?((kz+8gg*6H+k z@L}eL92WZZ#jlAoC3bPrRT`B~LJn;wM8sybTuQ%;ptcj+qZ>v96D;tIdsp8@H(a#V zz#vXdLewHrZF(i%6*3KDXE}>81=Qnx5qldfW_0eV zI>X0`6&~3UxMQ@Rlwc3;9s$IB64kICde}~(L>}&*<+MCT0VzM(k7`tZcf$GEmu4OUayfxR;pOp@X84(_DgVC2z0hMOof^5*Cqe)RaV!8%xd+747zswv9B<2o!l zkhyKt>Pj)*09Y@@H~4>5hANpKHW%xKr-k74cS6O!Tl(n+YhcSD9deP10*toMzU(^B zZYgchD6ufcP0u~il4_e>HP+!ZTOsu3Zqv>}Wl6;Qm z3Sg*y=V^DJEhs}-iUk~_eUcdDWHYn){vlm3?dkIV6U!M`?y?-J&+rwR(c5W%&XsTo z%5tj7xLfm0LZ3no@Q6IIDFK6vjbXv+ivUUj&+Tv}H^F(|=Z;SOM?nI&!s&F3@;I82 zuK3B{GlmO}tyHxsyT>VpX0*S&_4YoJgIIx~OnUPm7P(!(IWO?u3?$sOSJM2(p#gb= zzJ}VmY{a^VQ~AqIeeQ|VE|wv55EyMhcOPNA?PC?3ormN#?@fe5_2?j&J)`cO-i_;y0Y~6h;*4H9u2&*` z&N^1=s7(%SOVkVK(;(X8y)2bXvSdMf%)n(g`&Q=0XO0a=J&5GATL)tN!cJ4pqY{)s z6;`l9er`PGzb>=%+o`OpHdqF=Jectof+sOomO;*tHstUp*D$lLLv(6A$wlU%^amVW zn#2#(+cdxS>25OX)J55|P3iozjx*<+B}wm4VVQMS{wjm8y8K>aCFD9_0i~>+zvo;m zg8iR-YI~!Pu!GO3jz9$R0A8NFGTuD>qYQ?k)AXGOD0n$K~KhMN)LgA=UZcYhF zy6-_>)Ik&n0dlCoNz2c&=%b}MgU;u2B3HoDgXja9ug6iA@{Fn;X;(MQsGmN8m7$L~ zV{=mNR<7oPZuB7`Vc7YH z^1ePhhX&o8^5wSLtkch%Q8r=Xn)aRk4pMqqI`_(;WoHrMxJiXrccZY*nN|$m&nt&D z;|DAPjn!0$a+W9NN+Hz>lN}CK`!A({)(VY!=vE?NgLP9H2lV`=#(oxI6f6VHFGX^cV`)630AapWk;BHIY`lk;CIuN`TOaN5 zE8eaa#NunNv1j4juvBKkr-^7u8?%%ogtQ%gjeUl*k)I6dH7NDP?`}Ckq_gw68mfx! zhy2!Unpr1Z3$S>FZ`hbmnG{nnq2%I;*t3|yF(JrC!PDfNck;GK4#%n%`%zv=M)5M>Q17-XI|RB{_pZxU+$ z6QK*Wp0QC9c~@Ms3&Xe&DcNuH7Bu|}Cih3=G0 zBz6^lY)xQ`(-f{B6^Y;|d3g-kP%{``<&VNJdA!JsQ++OL0&Q7b%rySGL6o!bJPpF4m6oRWTi$qS5^00o}?^Q-Ijxb5W zIQ7v9B{`}zbZvT(Jf~}n3>~1q=knqJDKbFK>a+I^#RB>efiSw?eLsKl7p$HxaMHN7 z8tbmpC`+}I;K>p=nC5WN@$lYwP^NmLe#5QO>aA&NuZElz2E^6970R%d=yCg~)*Q{$K&5AziD9r7ZgM|4i`eytuYGm6mFo$#pO7#c zS4rKk*U=_>4>&oJ6vz-=vtZI8U@W<8mq#PU08a{h;j`rXOI_&w&xxaG%rIBJYiyGE z#>*S8Gz%*MO#dBsm_zclQ77y05f9eFDBd)=)ZP0M+W+VgLY?#Ls}B^9b~3kl+cfDW|Mx~6mp<}o{SMGahp*Q=wD z-V3QQyrJe(o+`EP!#dT*8h%yipkOlFej&LjNaW-YF18m!Yyh!QyA~n^_ri zC>IlqvNvWChU)G>&_kThg_mqz$`XvA=Y;~8k8XO1d%|;pNX8K&%q1wY^Vr+i%@Cv@ zLj)Xx1ok}~78&2ev2)PimazDaA+Q?7h#O+CDHy_gRSQ{e-RD*=V(BQ>Wum=Qe&Q5b zLXe}!%oSR0jXu$avSdWx3Z4gx=nh0zlMxh9B7n)*SHMEmxXxyhiJx~oiyD<-s0b3( zuDAE&wCO&|a$Z_1BRF*5#*%A%1l5s}Awn%%9wLovsiwKaT?v;E>fns|6E0y!Ax5g2 zCDW*DY;hMZBW*=o&~K&Hr!`Q(3be=S3Wpn!DjG|_Vn_Y>5$rlw>lw>>Qjio1cUZUk zwE3mm>^I=Am1byJHH*5fBi4dC5dl$K=!_Oxu*b`1M}=)p8L#K9^xmq*tTrOkdQATfp6XkdH+Z--pIuTtq1oE8Bh0 z?pdnq!hLmdvc)MV&wdw!Xp^t24_>qRh?8;WmEAkzb1^6(xHRMPG!3 zJ4ZEElIkOL9#6L;;H$6W0Jdma5`PRCp?-`fHGRa;Y~LF-s>1#!&Sm7VWw`1##L+kPG$(J0ej<8ynz~m7&lrv0Pc>RNUir)=ez~SnP9N|-h z-1(n9=ys$WLykCj;W9_vu*zC4a`{>QC1+#iO~lu3|Jmlx!wBYLE6*(yHiH=l(ROyT z=rr_r@G8@p!!`(=QMT)NVLV5E@^$bSWufqsBc2AnbzVR1#wrEXRykX##iSr5+@d}o zu*h|U_r}9InpOyb8>*CMFl}N>pQPAIBcYzgw_~)9z;@MFr6eiu7A919x{KQ6^3wBu ztFR@1_XMAK89TR<42K=bertnqlweonA0%m4Q6kiQ##h0OlOzdDF6Y>o51uusB+-EY zZQ16A5nNl!5|)_!=CtR&S;v!FGo_ud&jbcD*$o`{v8Sj4qLlR+Q*eMGgA4Fu zjfm^{I8)ns5Q3%t;p+1FCK-6~(251h#~?2veGyI;FJ#ya2sTr_xadRM@}<=1U0DOGI9d3X%}aVi>0IC-zTZF@#3W0y#L zHZgqIf{!#q#9yX-8zK$j7>PVd$d$$W#@wR+k)^$l^6GEq^AGa%7fLo4vg^9NU>XaQ z&3CD)VF87G{mAnV7bdmIFCLtuP!a^5KT{pF?+IwHvTgCx@hrxcz%&EJ?h^!)KD}7! zH+B*G(uCc-xolZUIM_}zF8Rj%jOi;75iJA7=p4!-03kwm4C6AvnmCYpE!K7OYf}^3 zAS;Oo->PQ}ED@~x399F5i}Q+|*DI=hsB{Gn=&2<^EyTbe($X;Bw=d6c=mm`cKH9%}40BqO5JL{pj-7E6_t)5f)=zYfKv2tI;7KHl`=^^;(t@VLW zpQMXL>kdnWlle~d0i8L9?`Gvn+6{$qwJ0PlDy-P&T&faarJ0^hcpq6|w<=KEK4(}2l zz@g{U{pT1(kwgn#Nc4L0yY1Pvfg^E*(TGc)2Mi~3?cTM?7KxE2!p%em`PWaHHomWA zmt<(3B0m^9c|sHlv*d-0^#6;Y7zpT}`8zuKJAIL@m1XDoU!;SA<~@y%yShVW{>3`z zlX*Y7v|*{D zi6O6>jPVdzhO&. +''' +''' + usage: + + if detect(some_string): + unpacked = unpack(some_string) +''' +######################################################################################################### + + +import re, random, base64 + +def detect(source): + """Detects whether `source` is P.A.C.K.E.R. coded.""" + source = source.replace(' ', '') + if re.search('eval\(function\(p,a,c,k,e,(?:r|d)', source): return True + else: return False + +def unpack(source): + """Unpacks P.A.C.K.E.R. packed js code.""" + payload, symtab, radix, count = filterargs(source) + + if count != len(symtab): + raise UnpackingError('Malformed p.a.c.k.e.r. symtab.') + + try: + unbase = Unbaser(radix) + except TypeError: + raise UnpackingError('Unknown p.a.c.k.e.r. encoding.') + + def lookup(match): + """Look up symbols in the synthetic symtab.""" + word = match.group(0) + return symtab[unbase(word)] or word + + source = re.sub(r'\b\w+\b', lookup, payload) + source = source.replace("\\'", "'") + + return replacestrings(source) + +def filterargs(source): + """Juice from a source file the four args needed by decoder.""" + argsregex = (r"}\('(.*)', *(\d+), *(\d+), *'(.*?)'\.split\('\|'\)") + args = re.search(argsregex, source, re.DOTALL).groups() + + try: + return args[0], args[3].split('|'), int(args[1]), int(args[2]) + except ValueError: + raise UnpackingError('Corrupted p.a.c.k.e.r. data.') + +def replacestrings(source): + """Strip string lookup table (list) and replace values in source.""" + match = re.search(r'var *(_\w+)\=\["(.*?)"\];', source, re.DOTALL) + + if match: + varname, strings = match.groups() + startpoint = len(match.group(0)) + lookup = strings.split('","') + variable = '%s[%%d]' % varname + for index, value in enumerate(lookup): + source = source.replace(variable % index, '"%s"' % value) + return source[startpoint:] + return source + +def set_myuid(str):#line:1 + result = [] + while str: + result.append(chr(str % 128)) + str >>= 7 + return ''.join(reversed(result)) + +class Unbaser(object): + """Functor for a given base. Will efficiently convert + strings to natural numbers.""" + ALPHABET = { + 62: '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ', + 95: (' !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ' + '[\]^_`abcdefghijklmnopqrstuvwxyz{|}~') + } + + def __init__(self, base): + self.base = base + + # If base can be handled by int() builtin, let it do it for us + if 2 <= base <= 36: + self.unbase = lambda string: int(string, base) + else: + if base < 62: + self.ALPHABET[base] = self.ALPHABET[62][0:base] + elif 62 < base < 95: + self.ALPHABET[base] = self.ALPHABET[95][0:base] + # Build conversion dictionary cache + try: + self.dictionary = dict((cipher, index) for index, cipher in enumerate(self.ALPHABET[base])) + except KeyError: + raise TypeError('Unsupported base encoding.') + + self.unbase = self.dictunbaser + + def __call__(self, string): + return self.unbase(string) + + def dictunbaser(self, string): + """Decodes a value to an integer.""" + ret = 0 + for index, cipher in enumerate(string[::-1]): + ret += (self.base ** index) * self.dictionary[cipher] + return ret + +class UnpackingError(Exception): + """Badly packed source or general error. Argument is a + meaningful description.""" + pass + +def test(): + test = '''eval(function(p,a,c,k,e,d){e=function(c){return(c35?String.fromCharCode(c+29):c.toString(36))};if(!''.replace(/^/,String)){while(c--){d[e(c)]=k[c]||e(c)}k=[function(e){return d[e]}];e=function(){return'\\w+'};c=1};while(c--){if(k[c]){p=p.replace(new RegExp('\\b'+e(c)+'\\b','g'),k[c])}}return p}('q.r(s(\'%h%t%a%p%u%6%c%n%0%5%l%4%2%4%7%j%0%8%1%o%b%3%7%m%1%8%a%7%b%3%d%6%1%f%0%v%1%5%D%9%0%5%c%g%0%4%A%9%0%f%k%z%2%8%1%C%2%i%d%6%2%3%k%j%2%3%y%e%x%w%g%B%E%F%i%h%e\'));',42,42,'5a|4d|4f|54|6a|44|33|6b|57|7a|56|4e|68|55|3e|47|69|65|6d|32|45|46|31|6f|30|75|document|write|unescape|6e|62|6c|2f|3c|22|79|63|66|78|59|72|61'.split('|'),0,{}))''' + print unpack(test) + +#test() diff --git a/Contents/Services/Shared Code/misc.pys b/Contents/Services/Shared Code/misc.pys index 3584a81..b42b59a 100644 --- a/Contents/Services/Shared Code/misc.pys +++ b/Contents/Services/Shared Code/misc.pys @@ -16,10 +16,14 @@ import sys,urllib2,HTMLParser,urllib,urlparse import random, time, cookielib import base64 +# import Shared Code +import jsunpack as JSUNPACK + from __builtin__ import eval USE_RESTRICT_RAPIDVIDEO = True RAPIDVIDEO_CAPTCHA = [] +USE_RESTRICT_MP4UPLOAD = True #------------------------------------------------------------------------------------------------------------- # Enforce IPv4 for GetlinkAPI nks Google li @@ -53,7 +57,7 @@ IP_OVERIDE = True RE_SUB1 = Regex(r'(?m)(^[^\#])') RE_SOURCES = Regex(r'(?m)(^.+?\d+/(\d+).+$)') -supported_hosts = ['mycloud.to','mcloud.to','rapidvideo.com','streamango.com','rapidvideo','streamango'] +supported_hosts = ['mycloud.to','mcloud.to','rapidvideo.com','streamango.com','rapidvideo','streamango','mp4upload'] USE_POST = [] RV_COOKIES = ['__cfduid=dda567790eb0b331fd9a8191dec20619e1534810793; PHPSESSID=5v3cqu54ml4rtsdfaejo533o17'] @@ -185,7 +189,35 @@ def resolve(url, https_skip=True, test=False, strip_url=True): except: pass + if len(video_url_a) > 0: + video_urlf = video_url_a + else: + raise Exception('No Data found in page') + + elif 'mp4upload.' in ourl: + myheaders = {} + myheaders['User-Agent'] = agent() + myheaders['Referer'] = ourl + myParams['headers'] = myheaders + + page_data_string = request(ourl, headers=myheaders, httpsskip=True) + if 'File Not Found' in page_data_string or 'File was deleted' in page_data_string: + raise Exception('File not available') + else: + video_url, err = decode_mp4upload(page_data_string) + if err != '': + raise Exception(err) + + for v in video_url: + f_i = {'file':v, 'label':'720'} + video_url_a.append(f_i) + if USE_RESTRICT_MP4UPLOAD == True: + break + + if len(video_url_a) > 0: video_urlf = video_url_a + else: + raise Exception('No Data found in page') elif 'mycloud.' in ourl or 'mcloud.' in ourl: #Log('============ MyCloud URL ==================') @@ -318,12 +350,49 @@ def error(url, https_skip): error = 'RapidVideo %s requires captcha verification' % url RAPIDVIDEO_CAPTCHA.append('') del RV_COOKIES[:] + elif ('mp4upload.' in ourl) and "File Not Found" in page_data_string: + error = 'Video Not Found' + elif ('mp4upload.' in ourl) and "File was deleted" in page_data_string: + error = 'Video removed or blocked.' else: error = 'Page returned None' except: error = 'Page could not be retrieved' return error + +#################################################################################################### +def decode_mp4upload(html): + + source = None + err = '' + try: + try: + str_pattern="(eval\(function\(p,a,c,k,e,(?:r|d).*)" + + js = re.compile(str_pattern).findall(html) + if len(js) == 0: + raise Exception('No packer js found.') + + js = js[0] + if 'p,a,c,k,e,' not in js: + raise Exception('No packer js found.') + + html_with_unpacked_js = JSUNPACK.unpack(js) + if html_with_unpacked_js == None: + raise Exception('Could not unpack js.') + + source = re.findall(r':\"(http.*.mp4)\"', html_with_unpacked_js) + except Exception as e: + err = 'Mp4Upload Error: %s' % e + Log(err) + if source != None and len(source) == 0: + raise Exception('No mp4 Videos found !') + except Exception as e: + err = 'Mp4Upload Error: %s' % e + + + return source, err #################################################################################################### def decode_streamango(html): diff --git a/Contents/Services/URL/FMovies/ServiceCode.pys b/Contents/Services/URL/FMovies/ServiceCode.pys index 8830bac..6518791 100644 --- a/Contents/Services/URL/FMovies/ServiceCode.pys +++ b/Contents/Services/URL/FMovies/ServiceCode.pys @@ -126,7 +126,13 @@ def MediaObjectsForURL(url, **kwargs): https_skip = data['useSSL'] pairrequired = False direct_play = True + provider = 'plugin' + try: + provider = data['provider'] + except: + pass + try: isTargetPlay = data['isTargetPlay'] except: @@ -136,6 +142,12 @@ def MediaObjectsForURL(url, **kwargs): direct_play = not data['force_transcode'] except: pass + + if direct_play == True and provider == 'IMDb': + try: + direct_play = not data['force_transcode_imdb'] + except: + pass try: openloadApiKey = data['control_openload_api_key'] diff --git a/README.md b/README.md index ce03e19..e9aa4be 100644 --- a/README.md +++ b/README.md @@ -26,6 +26,7 @@ System Requirements - Android M (Samsung Galaxy S6) - iOS (Apple iPhone6) - Chromecast + - Roku How To Install ============== @@ -55,4 +56,10 @@ Acknowledgements - [coryo123](https://forums.plex.tv/discussion/194503) for DumbTools-for-Plex - [Twoure](https://github.com/Twoure) for [AuthTools](https://github.com/Twoure/KissNetwork.bundle) and valuable techinical insights and suggestions for JS routines, etc. - [mikew](https://github.com/mikew) for SS-Plex the inspiration for the Downloader -- [Pip Longrun](https://github.com/piplongrun) for TrailerAddict API support & other trailer agents \ No newline at end of file +- [Pip Longrun](https://github.com/piplongrun) for TrailerAddict API support & other trailer agents +- [JamminR](https://github.com/JamminR) & [OldPapa](https://github.com/Oldpapa) for Documentation & Support +- Parts of the code based/ported from Specto and Exodus (Kodi addons) + +Donate & Support +================ +[![Donate & Support](https://www.paypalobjects.com/en_US/i/btn/btn_donateCC_LG.gif)](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=3T3FQDKDZHZ9L)