Merge pull request #372 from pipcat/master

Corrección vidoza y platformtools
This commit is contained in:
Alfa
2018-07-25 12:02:31 -05:00
committed by GitHub
5 changed files with 39 additions and 26 deletions

View File

@@ -28,7 +28,7 @@ ficherocookies = os.path.join(config.get_data_path(), "cookies.dat")
# Headers por defecto, si no se especifica nada
default_headers = dict()
default_headers["User-Agent"] = "Mozilla/5.0 AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3163.100 Safari/537.36"
default_headers["User-Agent"] = "Mozilla/5.0 AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3163.100 Safari/537.36"
default_headers["Accept"] = "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8"
default_headers["Accept-Language"] = "es-ES,es;q=0.8,en-US;q=0.5,en;q=0.3"
default_headers["Accept-Charset"] = "UTF-8"

View File

@@ -127,13 +127,12 @@ def render_items(itemlist, parent_item):
if 'anime' in channeltools.get_channel_parameters(parent_item.channel)['categories']:
anime = True
# Recorremos el itemlist
unify_enabled = config.get_setting('unify')
#logger.debug('unify_enabled: %s' % unify_enabled)
# Recorremos el itemlist
for item in itemlist:
try:
channel_parameters = channeltools.get_channel_parameters(item.channel)
except:
pass
#logger.debug(item)
# Si el item no contiene categoria, le ponemos la del item padre
if item.category == "":
@@ -143,9 +142,7 @@ def render_items(itemlist, parent_item):
if item.fanart == "":
item.fanart = parent_item.fanart
if genre:
valid_genre = True
thumb = get_thumb(item.title, auto=True)
if thumb != '':
@@ -155,12 +152,7 @@ def render_items(itemlist, parent_item):
valid_genre = True
unify_enabled = config.get_setting('unify')
#logger.debug('unify_enabled: %s' % unify_enabled)
if unify_enabled and not channel_parameters['adult'] and 'skip_unify' not in channel_parameters:
if unify_enabled:
# Formatear titulo con unify
item = unify.title_format(item)
else:

View File

@@ -7,8 +7,6 @@ from core import scrapertools
from lib import jsunpack
from platformcode import logger
headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:58.0) Gecko/20100101 Firefox/58.0'}
def test_video_exists(page_url):
logger.info("(page_url='%s')" % page_url)

View File

@@ -30,14 +30,24 @@ def get_video_url(page_url, premium=False, user="", password="", video_password=
packed = scrapertools.find_single_match(data, "<script type=[\"']text/javascript[\"']>(eval.*?)</script>")
unpacked = jsunpack.unpack(packed)
url = scrapertools.find_single_match(unpacked, "(?:src):\\\\'([^\\\\]+.mp4)\\\\'")
itemlist.append([".mp4" + " [powvideo]", decode_powvideo_url(url)])
a, b = scrapertools.find_single_match(data, "\['splice'\]\(0x([0-9a-fA-F]*),0x([0-9a-fA-F]*)\);")
if a and b:
url = decode_powvideo_url(url, int(a, 16), int(b, 16))
else:
logger.debug('No detectado splice! Revisar sistema de decode...')
itemlist.append([".mp4" + " [powvideo]", url])
itemlist.sort(key=lambda x: x[0], reverse=True)
return itemlist
def decode_powvideo_url(url):
def decode_powvideo_url(url, desde, num):
tria = re.compile('[0-9a-z]{40,}', re.IGNORECASE).findall(url)[0]
gira = tria[::-1]
x = gira[:1] + gira[2:]
if desde == 0:
x = gira[num:]
else:
x = gira[:desde] + gira[(desde+num):]
return re.sub(tria, x, url)

View File

@@ -3,6 +3,7 @@
from core import httptools
from core import scrapertools
from platformcode import logger
from core import jsontools
def test_video_exists(page_url):
@@ -20,13 +21,25 @@ def get_video_url(page_url, premium=False, user="", password="", video_password=
logger.info("(page_url='%s')" % page_url)
data = httptools.downloadpage(page_url).data
video_urls = []
matches = scrapertools.find_multiple_matches(data, 'src\s*:\s*"([^"]+)".*?label:\'([^\']+)\'')
for media_url, calidad in matches:
ext = media_url[-4:]
video_urls.append(["%s %s [vidoza]" % (ext, calidad), media_url])
s = scrapertools.find_single_match(data, 'sourcesCode\s*:\s*(\[\{.*?\}\])')
s = s.replace('src:', '"src":').replace('file:', '"file":').replace('type:', '"type":').replace('label:', '"label":').replace('res:', '"res":')
try:
data = jsontools.load(s)
for enlace in data:
if 'src' in enlace or 'file' in enlace:
url = enlace['src'] if 'src' in enlace else enlace['file']
tit = ''
if 'label' in enlace: tit += '[%s]' % enlace['label']
if 'res' in enlace: tit += '[%s]' % enlace['res']
if tit == '' and 'type' in enlace: tit = enlace['type']
if tit == '': tit = '.mp4'
video_urls.append(["%s [vidoza]" % tit, url])
except:
logger.debug('No se detecta json %s' % s)
pass
video_urls.reverse()
for video_url in video_urls:
logger.info("%s - %s" % (video_url[0], video_url[1]))
return video_urls