migliorie wstream

This commit is contained in:
marco
2020-01-09 23:02:34 +01:00
parent 35503691a5
commit f302f3081a
2 changed files with 14 additions and 9 deletions

View File

@@ -11,10 +11,10 @@ from platformcode import logger, config
def test_video_exists(page_url):
logger.info("(page_url='%s')" % page_url)
return True, ""
data = httptools.downloadpage(page_url).data
if "File was deleted" in data or "Page Cannot Be Found" in data:
return False, config.get_localized_string(70449) % "Akvideo"
return True, ""
# data = httptools.downloadpage(page_url).data
# if "File was deleted" in data or "Page Cannot Be Found" in data:
# return False, config.get_localized_string(70449) % "Akvideo"
# return True, ""
def get_video_url(page_url, premium=False, user="", password="", video_password=""):

View File

@@ -11,6 +11,7 @@ headers = [['User-Agent', 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:54.0) Gecko/20
def test_video_exists(page_url):
logger.info("(page_url='%s')" % page_url)
global data
data = httptools.downloadpage(page_url).data
if "Not Found" in data or "File was deleted" in data:
return False, config.get_localized_string(70449) % 'Wstream'
@@ -21,14 +22,18 @@ def test_video_exists(page_url):
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
logger.info("[Wstream] url=" + page_url)
video_urls = []
global data
if '/streaming.php' in page_url or 'html' in page_url:
code = httptools.downloadpage(page_url, headers=headers, follow_redirects=False).headers['location'].split('/')[-1].replace('.html','')
logger.info('WCODE='+code)
page_url = 'https://wstream.video/video.php?file_code=' + code
try:
code = httptools.downloadpage(page_url, headers=headers, follow_redirects=False).headers['location'].split('/')[-1].replace('.html','')
logger.info('WCODE='+code)
page_url = 'https://wstream.video/video.php?file_code=' + code
data = httptools.downloadpage(page_url, headers=headers, follow_redirects=True).data
except:
pass
code = page_url.split('=')[-1]
data = httptools.downloadpage(page_url, headers=headers, follow_redirects=False).data
ID = scrapertools.find_single_match(data, r'''input\D*id=(?:'|")([^'"]+)(?:'|")''')
post = urllib.urlencode({ID: code})