Skip to content

Commit

Permalink
added anime mobile site as fallback for video playback
Browse files Browse the repository at this point in the history
  • Loading branch information
Twoure committed May 28, 2017
1 parent 005eabc commit 7130447
Show file tree
Hide file tree
Showing 7 changed files with 92 additions and 81 deletions.
8 changes: 4 additions & 4 deletions Contents/Code/DevTools.py
Original file line number Diff line number Diff line change
Expand Up @@ -388,20 +388,20 @@ def DevToolsH(title=None, header=None, message=None):

oc = ObjectContainer(title2='Header Tools', header=header, message=message)

fttl = [n for (n,u) in KCore.util.base_url_list_tuple]
if title:
header = 'Header Tools'
if title == 'Header_Dict':
Thread.Create(ResetCustomDict, file_to_reset=title)
message = 'Resetting {}. New values for {} will be written soon'.format(title, title)

return DevToolsH(header=header, message=message, title=None)
elif ( title == 'Anime' or title == 'Cartoon'
or title == 'Drama' or title == 'Manga' or title == 'Comic' ):
elif title in fttl:
Log('\n----------Updating {} Headers in Header_Dict----------'.format(title))

for (h_name, h_url) in KCore.util.base_url_list_tuple:
if h_name == title:
Headers.get_headers_for_url(h_url, update=True)
Headers.get_headers_for_url(h_url, update=True, mobile='Mobile' in h_name)
break

message = 'Updated {} Headers.'.format(title)
Expand All @@ -410,7 +410,7 @@ def DevToolsH(title=None, header=None, message=None):
oc.add(DirectoryObject(key=Callback(DevToolsH, title='Header_Dict'),
title='Reset Header_Dict File',
summary='Create backup of old Header_Dict, delete current, create new and fill with fresh headers. Remember Creating Header_Dict takes time, so the channel may timeout on the client while rebuilding. Do not worry. Exit channel and refresh client. The channel should load normally now.'))
for name in sorted(KCore.util.tt_list):
for name in sorted(fttl):
oc.add(DirectoryObject(key=Callback(DevToolsH, title=name),
title='Update {} Headers'.format(name),
summary='Update {} Headers Only in the \"Header_Dict\" file.'.format(name)))
Expand Down
4 changes: 3 additions & 1 deletion Contents/Code/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,9 @@ def MainMenu():
cp_match = True if Client.Platform in KCore.core.list_view_clients else False

data = list()
for t, u in KCore.util.base_url_list_tuple:
for (t, u) in KCore.util.base_url_list_tuple:
if 'Mobile' in t:
continue
thumb = 'icon-{}.png'.format(t.lower())
rthumb = None if cp_match else R(thumb)
art = 'art-{}.jpg'.format(t.lower())
Expand Down
3 changes: 2 additions & 1 deletion Contents/Services/Shared Code/kbase.pys
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@ class core(object):
prefix = '/video/kissnetwork'
channel_title = 'KissNetwork'
list_view_clients = ['Android', 'iOS']
user_agent = 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.110 Safari/537.36'
user_agent = 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36'
user_agent_mobile = 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Mobile Safari/537.36'

class cache(object):
# storage
Expand Down
96 changes: 54 additions & 42 deletions Contents/Services/Shared Code/kheaders.pys
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ try:
import cfscrape
except ImportError, e:
cfscrape = None
Log.Critical('* <kheaders.import>: cfscrape import ERROR: {}'.format(e))
Log.Critical('* <kheaders.import[CRITICAL]>: cfscrape import ERROR: {}'.format(e))

####################################################################################################
def CFTest(kind):
Expand All @@ -30,13 +30,14 @@ def CFTest(kind):
class KissHeaders(object):
def __init__(self):
self.user_agent = core.user_agent
self.header_file = os.path.join(paths.data_path, 'Header_Dict')
self.user_agent_mobile = core.user_agent_mobile
self.header_file = os.path.join(paths.data_path, cache.header_dict)
self.header_data = dict()
self.event = Event()

def save_dict(self):
if not self.header_data:
Log.Error('* <KissHeaders.save_dict> -error: No header content to Save')
Log.Error('* <KissHeaders.save_dict[ERROR]>: No header content to Save')
return False

with open(self.header_file, 'wb') as f:
Expand All @@ -45,7 +46,7 @@ class KissHeaders(object):
if os.path.isfile(self.header_file) and os.stat(self.header_file).st_size != 0:
Log.Debug('* <KissHeaders.save_dict>: Header file Saved')
return True
Log.Error('* <KissHeaders.save_dict> -error: Header file NOT Saved')
Log.Error('* <KissHeaders.save_dict[ERROR]>: Header file NOT Saved')
return False

def load_dict(self):
Expand All @@ -62,73 +63,79 @@ class KissHeaders(object):

# file does not exist or is empty, create new file and fill
if not self.create_dict():
Log.Error('* <KissHeaders.load_dict> -error: cannot create new header dict')
Log.Error('* <KissHeaders.load_dict[ERROR]>: cannot create new header dict')
return False
if not self.load_dict():
Log.Error('* <KissHeaders.load_dict> -error: tried to re-save header dict but failed.')
Log.Error('* <KissHeaders.load_dict[ERROR]>: tried to re-save header dict but failed.')
return False
return bool(self.header_data)

def set_header(self, url):
def set_header(self, url, mobile=False):
base_url = util.get_base_url(url)
type_title = util.get_tt(url)
base_url = base_url+"/M" if (mobile and 'kissanime' in base_url) else base_url
tt = util.get_tt(url)
tt = "{}_Mobile".format(tt) if mobile else tt
userAgent = self.user_agent_mobile if mobile else self.user_agent
try:
try:
cookie, user_agent = cfscrape.get_cookie_string(url=base_url, user_agent=self.user_agent)
cookie, user_agent = cfscrape.get_cookie_string(url=base_url, user_agent=userAgent)
r_cf_clearance = Regex(r'cf_clearance\=.*\-(\d+)\-(\d+)').search(cookie)
except:
Log.Exception(u'* <KissHeaders.set_header[cfscrape.get_cookie_string]> -error: >>>')
Log.Exception(u'* <KissHeaders.set_header[cfscrape.get_cookie_string][EXCEPTION]>: >>>')
cookie = 'na'
user_agent = self.user_agent
user_agent = userAgent
r_cf_clearance = None

if r_cf_clearance:
date = int(r_cf_clearance.group(1))
expire = date + int(r_cf_clearance.group(2))
else:
expire = int(Datetime.TimestampFromDatetime(Datetime.Now() + Datetime.Delta(days=364)))
Log.Warn(u'* <KissHeaders.set_header> -error: Cannot calculate expire time for {}.'.format(base_url))
Log.Warn(u'* <KissHeaders.set_header[WARN]>: Cannot calculate expire time for {}.'.format(base_url))
if 'kim' in base_url:
expire = int(Datetime.TimestampFromDatetime(Datetime.Now() + Datetime.Delta(hours=1)))

return {
type_title: {
tt: {
'cookie': cookie, 'user-agent': user_agent, 'referer': base_url,
'expire': '{}'.format(expire)
}
}
except:
Log.Exception(u'* <KissHeaders.set_header> -error: >>>')
Log.Exception(u'* <KissHeaders.set_header[EXCEPTION]>: >>>')
return {}

def create_dict(self):
Log.Debug('* <KissHeaders.create_dict>: Creating New Header Dict')
for item in util.base_url_list_tuple:
self.header_data.update(self.set_header(item[1]))
m = 'Mobile' in item[0]
self.header_data.update(self.set_header(item[1], mobile=m))
if not self.save_dict():
Log.Error('* <KissHeaders.create_dict> -error: failed to save new header dict')
Log.Error('* <KissHeaders.create_dict[ERROR]>: failed to save new header dict')
return False
return True

def get_headers_for_url(self, url, update=False):
type_title = util.get_tt(url)
def get_headers_for_url(self, url, update=False, mobile=False):
tt = util.get_tt(url)
tt = "{}_Mobile".format(tt) if mobile else tt
if not self.header_data:
Log("* <KissHeaders.get_headers_for_url>: Loading Header Dict")
self.load_dict()
if not self.header_data:
Log.Error(u'* <KissHeaders.get_headers_for_url> -error: Cannot load {} header, because header file does not exist'.format(type_title))
Log.Error(u'* <KissHeaders.get_headers_for_url[ERROR]>: Cannot load {} header, because header file does not exist'.format(tt))
return {}

base_url = util.get_base_url(url)
if not type_title:
base_url = base_url+"/M" if (mobile and 'kissanime' in base_url) else base_url
if not tt:
Log(u"* <KissHeaders.get_headers_for_url>: '{}' is NOT a Kiss URL. Returning default headers.".format(base_url))
return {
'user-agent': core.user_agent,
'referer': base_url
}

current_timestamp = int(Datetime.TimestampFromDatetime(Datetime.Now()))
if len(self.header_data) >= 1:
if (len(self.header_data) == 10):
def check_update_instance(tt, cts, up1=False, up2=False):
expire = None
if up1:
Expand All @@ -146,63 +153,68 @@ class KissHeaders(object):
raise KeyError(u"<KissHeaders.get_headers_for_url> -error: '{}' is NOT within Header_Dict".format(tt))
return False, expire

update2, expire = check_update_instance(type_title, current_timestamp, update)
update2, expire = check_update_instance(tt, current_timestamp, update)
if update2:
Log.Debug(u'* <KissHeaders.get_headers_for_url>: {} cookies expired. Collecting fresh cookies.'.format(type_title))
Log.Debug(u'* <KissHeaders.get_headers_for_url>: {} cookies expired. Collecting fresh cookies.'.format(tt))

self.header_data.update(self.set_header(base_url))
self.header_data.update(self.set_header(base_url, mobile=mobile))

Log.Debug('* <KissHeaders.get_headers_for_url>: Updated {} Header to >>'.format(type_title))
Log.Debug('* {}'.format(self.header_data[type_title]))
Log.Debug('* <KissHeaders.get_headers_for_url>: Updated {} Header to >>'.format(tt))
Log.Debug('* {}'.format(self.header_data[tt]))

self.save_dict()
Log.Debug('* <KissHeaders.get_headers_for_url>: New Cookies saved for {} Header'.format(base_url))
elif expire:
current_datetime = Datetime.FromTimestamp(current_timestamp)
expire_datetime = Datetime.FromTimestamp(expire)
deltatime = str(expire_datetime - current_datetime)
Log.Debug(u'* <KissHeaders.get_headers_for_url>: {} cookies expire in {}'.format(type_title, deltatime))
Log.Debug(u'* <KissHeaders.get_headers_for_url>: {} cookies expire in {}'.format(tt, deltatime))
else:
Log.Warn('* <KissHeaders.get_headers_for_url>: No Expire time within {} cookies'.format(type_title))
Log.Warn('* <KissHeaders.get_headers_for_url[WARN]>: No Expire time within {} cookies'.format(tt))
else:
self.create_dict()

# setup headers to return, do not want date in header field
return {
'cookie': self.header_data[type_title]['cookie'],
'user-agent': self.header_data[type_title]['user-agent'],
'referer': self.header_data[type_title]['referer']
'cookie': self.header_data[tt]['cookie'],
'user-agent': self.header_data[tt]['user-agent'],
'referer': self.header_data[tt]['referer']
}

def check_all_headers(self):
if not self.header_data:
self.load_dict()
if not self.header_data:
Log.Error('* <KissHeaders.check_all_headers> -error: Cannot check all headers, because header file does not exist')
Log.Error('* <KissHeaders.check_all_headers[ERROR]>: Cannot check all headers, because header file does not exist')
return False
if (len(self.header_data) != 10):
Log.Warn("* <KissHeaders.check_all_headers[WARN]>: Found {} headers, but need 10. Pulling fresh headers for all Sites.".format(len(self.header_data)))
self.create_dict()

updated = False
for (type_title, base_url) in util.base_url_list_tuple:
site_pref = 'kissasian' if type_title == 'Drama' else 'kiss{}'.format(type_title.lower())
if len(self.header_data) > 1 and Prefs[site_pref]:
expire = int(self.header_data[type_title]['expire'])
for (tt, base_url) in util.base_url_list_tuple:
ttb = tt.split('_')[0]
mobile = 'Mobile' in tt
site_pref = 'kissasian' if ttb == 'Drama' else 'kiss{}'.format(ttb.lower())
if Prefs[site_pref]:
expire = int(self.header_data[tt]['expire'])
current_timestamp = int(Datetime.TimestampFromDatetime(Datetime.Now()))
if current_timestamp >= expire:
updated = True
Log.Debug(u'* <KissHeaders.check_all_headers>: {} cookies expired. Collecting fresh cookies.'.format(type_title))
self.header_data.update(self.set_header(base_url))
Log.Debug(u'* <KissHeaders.check_all_headers>: {} cookies expired. Collecting fresh cookies.'.format(tt))
self.header_data.update(self.set_header(base_url, mobile=mobile))
else:
if 'expire' in self.header_data[type_title].keys():
if 'expire' in self.header_data[tt].keys():
current_datetime = Datetime.FromTimestamp(current_timestamp)
expire_datetime = Datetime.FromTimestamp(expire)
deltatime = str(expire_datetime - current_datetime)
Log.Debug(u'* <KissHeaders.check_all_headers>: {} cookies expire in {}'.format(type_title, deltatime))
Log.Debug(u'* <KissHeaders.check_all_headers>: {} cookies expire in {}'.format(tt, deltatime))
else:
Log.Warn(u'* <KissHeaders.check_all_headers>: No Expire time within {} cookies'.format(type_title))
Log.Warn(u'* <KissHeaders.check_all_headers[WARN]>: No Expire time within {} cookies'.format(tt))

if updated:
if not self.save_dict():
Log.Error('* <KissHeaders.check_all_headers> -error: Failed to save new header data to file')
Log.Error('* <KissHeaders.check_all_headers[ERROR]>: Failed to save new header data to file')
self.event.set()
Log('* <KissHeaders.check_all_headers>: Finished checking all headers')
return updated
Expand Down
17 changes: 14 additions & 3 deletions Contents/Services/Shared Code/knetwork.pys
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ class network(object):
else:
reason = req.reason

if Regex(r'(/recaptcha)').search(req.text) or ('/Special/AreYouHuman/' in req.url):
if Regex(r'(/recaptcha)').search(req.text) or ('/Special/AreYouHuman' in req.url):
http_error_msg = u'%s Server Error: %s for url: %s' % (req.status_code, 'Captcha Present!', ourl)
elif util.is_kiss_url(ourl) and req.history:
tt = util.get_tt(ourl)
Expand Down Expand Up @@ -72,6 +72,18 @@ class network(object):
return self.HTTPRequest(url, headers, data, follow_redirects, method, count)
self.raise_error_for_req(req)
except Exception as e:
if ('kissanime' in url) and ('id=' in url) and ('Captcha Present!' in str(e)) and (count < 1):
# try again but with mobile site
count += 1
Log.Warn("* <network.HTTPRequest[WARN]>: detected captcha video page, trying mobile url instead")
headers = Headers.get_headers_for_url(url, mobile=True)
eid = Regex(r'id\=(\d+)').search(url)
if not eid:
Log.Error("* <network.HTTPRequest[ERROR]>: cannot find episode id for POST values")
return None
data = {'eID': eid.group(1)}
url = util.get_base_url(url) + '/Mobile/GetEpisode'
return self.HTTPRequest(url, headers, data, follow_redirects, 'POST', count)
Log.Error(u"* <network.request> -error: cannot handle '{}' >>>\n{}".format(url, e))
return None

Expand Down Expand Up @@ -156,7 +168,6 @@ class network(object):
Save images and videos
Limit to videos for now...
"""
#tt = util.get_tt(url) if util.get_tt(url) else 'Unknown'
filepath = self.stream_fpath(hash_name)

if self.check_stream_fp(hash_name):
Expand All @@ -173,8 +184,8 @@ class network(object):

self.setup_hash_fp(hash_name, filepath)
with open(filepath, 'wb') as f:
for chunk in res.iter_content(chunk_size=1024):
#for chunk in res.iter_content():
for chunk in res.iter_content(chunk_size=1024):
if not chunk:
continue # filter out keep-alive new chunks
f.write(chunk)
Expand Down
18 changes: 11 additions & 7 deletions Contents/Services/Shared Code/kutil.pys
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ import os
import sys
import pickle
import shutil
import datetime
from io import open
from kbase import core, cache, paths
from kdomain import domain
Expand Down Expand Up @@ -235,8 +234,8 @@ class util(object):
except: return version

def datetime_to_utc(self, dt):
n = datetime.datetime.now().replace(microsecond=0)
nutc = datetime.datetime.utcnow().replace(microsecond=0)
n = Datetime.Now().replace(microsecond=0)
nutc = Datetime.UTCNow().replace(microsecond=0)
if n < nutc:
return dt + (nutc - n)
elif n == nutc:
Expand All @@ -247,8 +246,8 @@ class util(object):
if os.path.exists(path):
ts = os.path.getmtime(path)
if utc:
return datetime_to_utc(datetime.datetime.fromtimestamp(ts)).replace(microsecond=0)
return datetime.datetime.fromtimestamp(ts).replace(microsecond=0)
return self.datetime_to_utc(Datetime.FromTimestamp(ts)).replace(microsecond=0)
return Datetime.Fromtimestamp(ts).replace(microsecond=0)
return False

def is_kiss_url(self, url):
Expand All @@ -265,13 +264,18 @@ class util(object):

@property
def base_url_list_tuple(self):
return [ (self.get_tt(u), u) for u in self.base_url_list ]
tmp = list()
for (n,u) in [ (self.get_tt(u), u) for u in self.base_url_list ]:
tmp.append((n,u))
u = u + '/M' if (n == 'Anime') else u
tmp.append(("{}_Mobile".format(n),u))
return tmp

def base_url(self, kind):
return [u for t, u in self.base_url_list_tuple if t == kind][0]

def search_url(self, kind):
return self.base_url(kind) + '/Search/' + kind + '?keyword={}'
return self.base_url(kind) + '/Search/' + kind.split('_')[0] + '?keyword={}'

@property
def search_url_list(self):
Expand Down
Loading

0 comments on commit 7130447

Please sign in to comment.