fix MaxStream

This commit is contained in:
fatshotty
2021-06-25 19:26:25 +02:00
committed by mac12m99
parent d4a82d00eb
commit 1fe780d7a8
2 changed files with 54 additions and 15 deletions

View File

@@ -4,12 +4,8 @@
"ignore_urls": [], "ignore_urls": [],
"patterns": [ "patterns": [
{ {
"pattern": "https?://maxstream.video/(?:e/)?([a-z0-9]+)", "pattern": "https?://maxstream.video/(?:e/|embed-|cast/)?([a-z0-9]+)",
"url": "https://maxstream.video/\\1" "url": "https://maxstream.video/cast/\\1"
},
{
"pattern": "https?://maxstream.video/embed-([a-z0-9]+).html",
"url": "https://maxstream.video/\\1"
} }
] ]
}, },

View File

@@ -1,22 +1,65 @@
from core import httptools # -*- coding: utf-8 -*-
from core import scrapertools, support # --------------------------------------------------------
from lib import jsunpack # Conector MaxStream
from platformcode import logger, config # --------------------------------------------------------
from core import httptools, scrapertools, support
from platformcode import logger, config
import ast, sys
if sys.version_info[0] >= 3:
import urllib.parse as urlparse
else:
import urlparse
def test_video_exists(page_url): def test_video_exists(page_url):
logger.debug("(page_url='%s')" % page_url) logger.debug("(page_url='%s')" % page_url)
global data global data
data = httptools.downloadpage(page_url).data data = httptools.downloadpage(page_url).data
if "File Not Found" in data or "File was deleted" in data: if "file was deleted" in data:
return False, config.get_localized_string(70449) % "MaxStream" return False, config.get_localized_string(70449) % "MaxStream"
return True, "" return True, ""
def get_video_url(page_url, premium=False, user="", password="", video_password=""): def get_video_url(page_url, premium=False, user="", password="", video_password=""):
logger.debug("url=" + page_url) logger.debug("url=" + page_url)
global data video_urls = []
packed = scrapertools.find_single_match(data, r'(eval.*?)</script>') url_video = ''
unpacked = jsunpack.unpack(packed)
return support.get_jwplayer_mediaurl(unpacked, 'MaxStream') lastIndexStart = data.rfind('<script>')
lastIndexEnd = data.rfind('</script>')
script = data[ (lastIndexStart + len('<script>')):lastIndexEnd ]
match = scrapertools.find_single_match(script, r'(\[[^\]]+\])[^\{]*\{[^\(]+\(parseInt\(value\)\s?-\s?([0-9]+)')
if match:
char_codes = ast.literal_eval(match[0])
hidden_js = "".join([chr(c - int(match[1])) for c in char_codes])
newurl = scrapertools.find_single_match(hidden_js, r'\$.get\(\'([^\']+)')
castpage = httptools.downloadpage(newurl, headers={'x-requested-with': 'XMLHttpRequest', 'Referer': page_url }).data
url_video = scrapertools.find_single_match(castpage, r"cc\.cast\('(http[s]?.[^']+)'")
else:
logger.debug('Something wrong: no url found before that :(')
if url_video:
import random, string
parse = urlparse.urlparse(url_video)
video_urls.append(['mp4 [MaxStream]', url_video])
try:
r1 = ''.join(random.choice(string.ascii_letters + string.digits) for i in range(19))
r2 = ''.join(random.choice(string.ascii_letters + string.digits) for i in range(19))
r3 = ''.join(random.choice(string.ascii_letters + string.digits) for i in range(19))
video_urls.append(['m3u8 [MaxStream]', '{}://{}/hls/{},{},{},{},.urlset/master.m3u8'.format(parse.scheme, parse.netloc, parse.path.split('/')[1], r1, r2, r3)])
# video_urls.append(['m3u8 [MaxStream]', '{}://{}/hls/{},wpsc2hllm5g5fkjvslq,4jcc2hllm5gzykkkgha,fmca2hllm5jtpb7cj5q,.urlset/master.m3u8'.format(parse.scheme, parse.netloc, parse.path.split('/')[1])])
except:
logger.debug('Something wrong: Impossible get HLS stream')
return video_urls