fix wstream e animesaturn

This commit is contained in:
mac12m99
2021-05-15 10:38:41 +02:00
parent f8647df9e0
commit 87541fb1e4
3 changed files with 31 additions and 8 deletions

View File

@@ -8,7 +8,27 @@ from core import support
from platformcode import config
host = support.config.get_channel_url()
headers={'X-Requested-With': 'XMLHttpRequest'}
__channel__ = 'animesaturn'
cookie = support.config.get_setting('cookie', __channel__)
headers = {'X-Requested-With': 'XMLHttpRequest', 'Cookie': cookie}
def get_cookie(data):
global cookie, headers
cookie = support.match(data, patron=r'document.cookie="([^\s]+)').match
support.config.set_setting('cookie', cookie, __channel__)
headers = [['Cookie', cookie]]
def get_data(item):
# support.dbg()
# url = support.match(item.url, headers=headers, follow_redirects=True, only_headers=True).url
data = support.match(item.url, headers=headers, follow_redirects=True).data
if 'ASCookie' in data:
get_cookie(data)
data = get_data(item)
return data
@support.menu
def mainlist(item):
@@ -96,6 +116,9 @@ def peliculas(item):
action = 'check'
page = None
post = "page=" + str(item.page if item.page else 1) if item.page and int(item.page) > 1 else None
data = get_data(item)
# debug = True
if item.args == 'top':
data = item.other

View File

@@ -12,9 +12,9 @@ cookie = support.config.get_setting('cookie', __channel__)
headers = [['Cookie', cookie]]
def get_cookie():
def get_cookie(data):
global cookie, headers
cookie = support.match(host, patron=r'document.cookie="([^\s]+)').match
cookie = support.match(data, patron=r'document.cookie="([^\s]+)').match
support.config.set_setting('cookie', cookie, __channel__)
headers = [['Cookie', cookie]]
@@ -24,8 +24,8 @@ def get_data(item):
url = httptools.downloadpage(item.url, headers=headers, follow_redirects=True, only_headers=True).url
data = support.match(url, headers=headers, follow_redirects=True).data
if 'AWCookieVerify' in data:
get_cookie()
get_data(item)
get_cookie(data)
data = get_data(item)
return data

View File

@@ -19,12 +19,12 @@ errorsStr = ['Sorry this file is not longer available', 'Sorry this video is una
def test_video_exists(page_url):
logger.debug("(page_url='%s')" % page_url)
disable_directIP = False
if 'swvideoid' in page_url: disable_directIP = True
# if 'swvideoid' in page_url: disable_directIP = True
resp = httptools.downloadpage(page_url, verify=False, disable_directIP=disable_directIP, follow_redirects=False)
resp = httptools.downloadpage(page_url.replace('https:', 'http:'), verify=False, disable_directIP=disable_directIP, follow_redirects=False)
while resp.headers.get('location'):
page_url = resp.headers.get('location')
resp = httptools.downloadpage(page_url, verify=False, disable_directIP=disable_directIP, follow_redirects=False)
resp = httptools.downloadpage(page_url.replace('https:', 'http:'), verify=False, disable_directIP=disable_directIP, follow_redirects=False)
global data, real_url
data = resp.data