@@ -14,7 +14,7 @@ from lib import jsunpack
|
||||
from platformcode import config, logger
|
||||
|
||||
|
||||
host = "http://www.asialiveaction.com"
|
||||
host = "https://asialiveaction.com"
|
||||
|
||||
IDIOMAS = {'Japones': 'Japones'}
|
||||
list_language = IDIOMAS.values()
|
||||
@@ -26,9 +26,9 @@ def mainlist(item):
|
||||
autoplay.init(item.channel, list_servers, list_quality)
|
||||
itemlist = list()
|
||||
itemlist.append(Item(channel=item.channel, action="lista", title="Peliculas",
|
||||
url=urlparse.urljoin(host, "/category/pelicula"), type='pl', pag=1))
|
||||
url=urlparse.urljoin(host, "/pelicula"), type='pl'))
|
||||
itemlist.append(Item(channel=item.channel, action="lista", title="Series",
|
||||
url=urlparse.urljoin(host, "/category/serie"), type='sr', pag=1))
|
||||
url=urlparse.urljoin(host, "/serie"), type='sr'))
|
||||
itemlist.append(Item(channel=item.channel, action="category", title="Géneros", url=host, cat='genre'))
|
||||
itemlist.append(Item(channel=item.channel, action="category", title="Calidad", url=host, cat='quality'))
|
||||
itemlist.append(Item(channel=item.channel, action="category", title="Orden Alfabético", url=host, cat='abc'))
|
||||
@@ -58,7 +58,7 @@ def category(item):
|
||||
for scrapedurl,scrapedtitle in matches:
|
||||
if scrapedtitle != 'Próximas Películas':
|
||||
if not scrapedurl.startswith("http"): scrapedurl = host + scrapedurl
|
||||
itemlist.append(item.clone(action=action, title=scrapedtitle, url=scrapedurl, type='cat', pag=0))
|
||||
itemlist.append(item.clone(action=action, title=scrapedtitle, url=scrapedurl, type='cat'))
|
||||
return itemlist
|
||||
|
||||
|
||||
@@ -88,7 +88,6 @@ def search(item, texto):
|
||||
logger.info()
|
||||
texto = texto.replace(" ", "+")
|
||||
item.url = item.url + texto
|
||||
item.pag = 0
|
||||
if texto != '':
|
||||
return lista(item)
|
||||
|
||||
@@ -119,12 +118,13 @@ def lista_a(item):
|
||||
itemlist = []
|
||||
data = httptools.downloadpage(item.url).data
|
||||
patron = '(?is)Num">.*?href="([^"]+)".*?'
|
||||
patron += 'src="([^"]+)".*?>.*?'
|
||||
patron += 'data-src="([^"]+)".*?>.*?'
|
||||
patron += '<strong>([^<]+)<.*?'
|
||||
patron += '<td>([^<]+)<.*?'
|
||||
patron += 'href.*?>([^"]+)<\/a>'
|
||||
matches = scrapertools.find_multiple_matches(data, patron)
|
||||
for scrapedurl, scrapedthumbnail, scrapedtitle, scrapedyear, scrapedtype in matches:
|
||||
if not scrapedthumbnail.startswith("http"): scrapedthumbnail = "https:" + scrapedthumbnail
|
||||
action = "findvideos"
|
||||
if "Serie" in scrapedtype: action = "episodios"
|
||||
itemlist.append(item.clone(action=action, title=scrapedtitle, contentTitle=scrapedtitle, contentSerieName=scrapedtitle, url=scrapedurl, thumbnail=scrapedthumbnail,
|
||||
@@ -140,14 +140,14 @@ def lista(item):
|
||||
data = httptools.downloadpage(item.url).data
|
||||
data = re.sub(r"\n|\r|\t|\s{2}| ", "", data)
|
||||
|
||||
patron = '<article .*?">'
|
||||
patron += '<a href="([^"]+)"><.*?><figure.*?>' #scrapedurl
|
||||
patron += '<img.*?src="([^"]+)".*?>.*?' #scrapedthumbnail
|
||||
patron += '<h3 class=".*?">([^"]+)<\/h3>' #scrapedtitle
|
||||
patron += '<span.*?>([^"]+)<\/span>.+?' #scrapedyear
|
||||
patron += '<a.+?>([^"]+)<\/a>' #scrapedtype
|
||||
patron = '(?is)class="TPost C">.*?href="([^"]+)".*?' #scrapedurl
|
||||
patron += 'lazy-src="([^"]+)".*?>.*?' #scrapedthumbnail
|
||||
patron += 'title">([^<]+)<.*?' #scrapedtitle
|
||||
patron += 'year">([^<]+)<.*?' #scrapedyear
|
||||
patron += 'href.*?>([^"]+)<\/a>' #scrapedtype
|
||||
matches = scrapertools.find_multiple_matches(data, patron)
|
||||
for scrapedurl, scrapedthumbnail, scrapedtitle, scrapedyear, scrapedtype in matches:
|
||||
if not scrapedthumbnail.startswith("http"): scrapedthumbnail = "https:" + scrapedthumbnail
|
||||
title="%s - %s" % (scrapedtitle,scrapedyear)
|
||||
|
||||
new_item = Item(channel=item.channel, title=title, url=scrapedurl, thumbnail=scrapedthumbnail,
|
||||
@@ -158,16 +158,12 @@ def lista(item):
|
||||
else:
|
||||
new_item.contentTitle = scrapedtitle
|
||||
new_item.action = 'findvideos'
|
||||
|
||||
itemlist.append(new_item)
|
||||
|
||||
itemlist.append(new_item)
|
||||
tmdb.set_infoLabels_itemlist(itemlist, seekTmdb=True)
|
||||
|
||||
#pagination
|
||||
pag = item.pag + 1
|
||||
url_next_page = item.url+"/page/"+str(pag)+"/"
|
||||
if len(itemlist)>19:
|
||||
itemlist.append(item.clone(title="Siguiente >>", url=url_next_page, action='lista', pag=pag))
|
||||
url_next_page = scrapertools.find_single_match(data, 'rel="next" href="([^"]+)"')
|
||||
if len(itemlist)>0 and url_next_page:
|
||||
itemlist.append(item.clone(title="Siguiente >>", url=url_next_page, action='lista'))
|
||||
return itemlist
|
||||
|
||||
|
||||
@@ -189,14 +185,16 @@ def findvideos(item):
|
||||
data1 = httptools.downloadpage(url, headers={"Referer":url1}).data
|
||||
url = scrapertools.find_single_match(data1, 'src: "([^"]+)"')
|
||||
if "embed.php" not in url:
|
||||
itemlist.append(item.clone(action = "play", title = "Ver en %s (" + language + ")", language = language, url = url))
|
||||
if url:
|
||||
itemlist.append(item.clone(action = "play", title = "Ver en %s (" + language + ")", language = language, url = url))
|
||||
continue
|
||||
data1 = httptools.downloadpage(url).data
|
||||
packed = scrapertools.find_single_match(data1, "(?is)eval\(function\(p,a,c,k,e.*?</script>")
|
||||
unpack = jsunpack.unpack(packed)
|
||||
urls = scrapertools.find_multiple_matches(unpack, '"file":"([^"]+).*?label":"([^"]+)')
|
||||
for url2, quality in urls:
|
||||
itemlist.append(item.clone(action = "play", title = "Ver en %s (" + quality + ") (" + language + ")", language = language, url = url2))
|
||||
if url2:
|
||||
itemlist.append(item.clone(action = "play", title = "Ver en %s (" + quality + ") (" + language + ")", language = language, url = url2))
|
||||
# Segundo grupo de enlaces
|
||||
matches = scrapertools.find_multiple_matches(data, '<span><a rel="nofollow" target="_blank" href="([^"]+)"')
|
||||
for url in matches:
|
||||
@@ -212,7 +210,8 @@ def findvideos(item):
|
||||
language = "Sub. Español"
|
||||
matches2 = scrapertools.find_multiple_matches(ser, 'href="([^"]+)')
|
||||
for url2 in matches2:
|
||||
itemlist.append(item.clone(action = "play", title = "Ver en %s (" + quality + ") (" + language + ")", language = language, url = url2))
|
||||
if url2:
|
||||
itemlist.append(item.clone(action = "play", title = "Ver en %s (" + quality + ") (" + language + ")", language = language, url = url2))
|
||||
itemlist = servertools.get_servers_itemlist(itemlist, lambda i: i.title % i.server.capitalize())
|
||||
# Requerido para FilterTools
|
||||
itemlist = filtertools.get_links(itemlist, item, list_language)
|
||||
|
||||
@@ -242,7 +242,7 @@ def findvideos(item):
|
||||
else:
|
||||
title = ''
|
||||
url = scrapertools.find_single_match(new_data, "src='([^']+)'")
|
||||
url = get_url(url.replace('\\/', '/'))
|
||||
url = get_url(url)
|
||||
if url:
|
||||
itemlist.append(item.clone(title ='%s'+title, url=url, action='play',
|
||||
language=IDIOMAS[language], text_color = ""))
|
||||
@@ -255,7 +255,7 @@ def findvideos(item):
|
||||
title = ''
|
||||
new_data = httptools.downloadpage(hidden_url).data
|
||||
url = scrapertools.find_single_match(new_data, 'id="link" href="([^"]+)"')
|
||||
url = get_url(url.replace('\\/', '/'))
|
||||
url = get_url(url)
|
||||
if url:
|
||||
itemlist.append(Item(channel=item.channel, title='%s'+title, url=url, action='play', quality=quality,
|
||||
language=IDIOMAS[language], infoLabels=item.infoLabels, text_color = ""))
|
||||
@@ -280,6 +280,7 @@ def findvideos(item):
|
||||
|
||||
def get_url(url):
|
||||
logger.info()
|
||||
url = url.replace('\\/', '/')
|
||||
if "cinetux.me" in url:
|
||||
d1 = httptools.downloadpage(url).data
|
||||
if "mail" in url or "drive" in url or "ok.cinetux" in url or "mp4/" in url:
|
||||
@@ -288,6 +289,8 @@ def get_url(url):
|
||||
url = scrapertools.find_single_match(d1, '<iframe src="([^"]+)') + id
|
||||
if "drive" in url:
|
||||
url += "/preview"
|
||||
if "FFFFFF" in url:
|
||||
url = scrapertools.find_single_match(d1, 'class="cta" href="([^"]+)"')
|
||||
else:
|
||||
url = scrapertools.find_single_match(d1, 'document.location.replace\("([^"]+)')
|
||||
url = url.replace("povwideo","powvideo")
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
from core import httptools
|
||||
from core import scrapertools
|
||||
from core import servertools
|
||||
from core import tmdb
|
||||
from core.item import Item
|
||||
from platformcode import config, logger
|
||||
from channelselector import get_thumb
|
||||
@@ -68,6 +69,7 @@ def sub_search(item):
|
||||
if "ver-" not in scrapedurl:
|
||||
continue
|
||||
year = scrapertools.find_single_match(scrapedtitle, "\d{4}")
|
||||
contentTitle = scrapedtitle.replace(scrapertools.find_single_match('\[.+', scrapedtitle),"")
|
||||
contentTitle = scrapedtitle.replace("(%s)" %year,"").replace("Ver","").strip()
|
||||
itemlist.append(Item(action = "findvideos",
|
||||
channel = item.channel,
|
||||
@@ -77,6 +79,7 @@ def sub_search(item):
|
||||
thumbnail = scrapedthumbnail,
|
||||
url = scrapedurl,
|
||||
))
|
||||
tmdb.set_infoLabels_itemlist(itemlist, True)
|
||||
return itemlist
|
||||
|
||||
|
||||
@@ -89,11 +92,11 @@ def generos(item):
|
||||
matches = scrapertools.find_multiple_matches(data, patron)
|
||||
for genero, scrapedurl in matches:
|
||||
title = scrapertools.htmlclean(genero)
|
||||
url = item.url + scrapedurl
|
||||
if not item.url.startswith("http"): scrapedurl = item.url + scrapedurl
|
||||
itemlist.append(Item(channel = item.channel,
|
||||
action = 'peliculas',
|
||||
title = title,
|
||||
url = url,
|
||||
url = scrapedurl,
|
||||
viewmode = "movie",
|
||||
first=0))
|
||||
itemlist = sorted(itemlist, key=lambda item: item.title)
|
||||
@@ -124,19 +127,21 @@ def peliculas(item):
|
||||
title = scrapedtitle + " " + plot
|
||||
if not scrapedurl.startswith("http"):
|
||||
scrapedurl = item.url + scrapedurl
|
||||
itemlist.append(Item(channel = item.channel,
|
||||
action = 'findvideos',
|
||||
title = title,
|
||||
url = scrapedurl,
|
||||
thumbnail = scrapedthumbnail,
|
||||
plot = plot,
|
||||
year = scrapertools.find_single_match(scrapedurl, "\-(\d{4})\-")
|
||||
contentTitle = scrapedtitle.replace(scrapertools.find_single_match('\[.+', scrapedtitle),"")
|
||||
itemlist.append(Item(action = 'findvideos',
|
||||
channel = item.channel,
|
||||
contentTitle = scrapedtitle,
|
||||
contentType = "movie",
|
||||
infoLabels = {"year":year},
|
||||
language=language,
|
||||
quality=quality
|
||||
plot = plot,
|
||||
quality=quality,
|
||||
title = title,
|
||||
thumbnail = scrapedthumbnail,
|
||||
url = scrapedurl
|
||||
))
|
||||
tmdb.set_infoLabels_itemlist(itemlist, True)
|
||||
#paginacion
|
||||
|
||||
url_next_page = item.url
|
||||
first = last
|
||||
if next:
|
||||
@@ -149,9 +154,9 @@ def findvideos(item):
|
||||
logger.info()
|
||||
itemlist = []
|
||||
data = httptools.downloadpage(item.url).data
|
||||
item.plot = scrapertools.find_single_match(data, '<div class="entry">(.*?)<div class="iframes">')
|
||||
item.plot = scrapertools.htmlclean(item.plot).strip()
|
||||
item.contentPlot = item.plot
|
||||
#item.plot = scrapertools.find_single_match(data, '<div class="entry">(.*?)<div class="iframes">')
|
||||
#item.plot = scrapertools.htmlclean(item.plot).strip()
|
||||
#item.contentPlot = item.plot
|
||||
patron = '<strong>Ver película online.*?>.*?>([^<]+)'
|
||||
scrapedopcion = scrapertools.find_single_match(data, patron)
|
||||
titulo_opcional = scrapertools.find_single_match(scrapedopcion, ".*?, (.*)").upper()
|
||||
@@ -167,14 +172,12 @@ def findvideos(item):
|
||||
urls = scrapertools.find_multiple_matches(datos, '(?:src|href)="([^"]+)')
|
||||
titulo = "Ver en %s " + titulo_opcion
|
||||
for url in urls:
|
||||
itemlist.append(Item(channel = item.channel,
|
||||
action = "play",
|
||||
contentThumbnail = item.thumbnail,
|
||||
fulltitle = item.contentTitle,
|
||||
itemlist.append(item.clone(action = "play",
|
||||
title = titulo,
|
||||
url = url
|
||||
))
|
||||
itemlist = servertools.get_servers_itemlist(itemlist, lambda i: i.title % i.server.capitalize())
|
||||
#tmdb.set_infoLabels_itemlist(itemlist, True)
|
||||
if itemlist:
|
||||
if config.get_videolibrary_support():
|
||||
itemlist.append(Item(channel = item.channel, action = ""))
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
"adult": false,
|
||||
"language": ["cast", "lat", "vose"],
|
||||
"fanart": "https://i.postimg.cc/qvFCZNKT/Alpha-652355392-large.jpg",
|
||||
"thumbnail": "https://hdfilmologia.com/templates/gorstyle/images/logo.png",
|
||||
"thumbnail": "https://hdfilmologia.com/templates/hdfilmologia/images/logo.png",
|
||||
"banner": "",
|
||||
"categories": [
|
||||
"movie",
|
||||
|
||||
@@ -179,7 +179,7 @@ def genres(item):
|
||||
logger.info()
|
||||
itemlist = []
|
||||
|
||||
data = httptools.downloadpage(item.url).data
|
||||
data = httptools.downloadpage(item.url)
|
||||
data = re.sub(r"\n|\r|\t|\s{2}| ", "", data)
|
||||
|
||||
patron = '<li class="myli"><a href="/([^"]+)">([^<]+)</a>'
|
||||
@@ -221,12 +221,11 @@ def findvideos(item):
|
||||
|
||||
data = httptools.downloadpage(item.url).data
|
||||
data = re.sub(r"\n|\r|\t|amp;|#038;|\(.*?\)|\s{2}| ", "", data)
|
||||
|
||||
patron = '(\w+)src\d+="([^"]+)"'
|
||||
patron = '>([^<]+)</a></li><li><a class="src_tab" id="[^"]+" data-src="([^"]+)"'
|
||||
matches = re.compile(patron, re.DOTALL).findall(data)
|
||||
|
||||
for lang, url in matches:
|
||||
|
||||
lang = re.sub(r"1|2|3|4", "", lang)
|
||||
server = servertools.get_server_from_url(url)
|
||||
if 'dropbox' in url:
|
||||
server = 'dropbox'
|
||||
@@ -243,9 +242,9 @@ def findvideos(item):
|
||||
for key in matches:
|
||||
url = 'https://www.dropbox.com/s/%s?dl=1' % (key)
|
||||
server = 'dropbox'
|
||||
languages = {'l': '[COLOR cornflowerblue](LAT)[/COLOR]',
|
||||
'e': '[COLOR green](CAST)[/COLOR]',
|
||||
's': '[COLOR red](VOS)[/COLOR]'}
|
||||
languages = {'Latino': '[COLOR cornflowerblue](LAT)[/COLOR]',
|
||||
'Castellano': '[COLOR green](CAST)[/COLOR]',
|
||||
'Subtitulado': '[COLOR red](VOS)[/COLOR]'}
|
||||
if lang in languages:
|
||||
lang = languages[lang]
|
||||
|
||||
|
||||
@@ -10,7 +10,6 @@ import urlparse
|
||||
|
||||
from platformcode import logger
|
||||
from decimal import Decimal
|
||||
from js2py.internals import seval
|
||||
|
||||
|
||||
class Cloudflare:
|
||||
@@ -47,25 +46,50 @@ class Cloudflare:
|
||||
logger.debug("Metodo #2 (headers): NO disponible")
|
||||
self.header_data = {}
|
||||
|
||||
|
||||
def solve_cf(self, body, domain):
|
||||
k = re.compile('<div style="display:none;visibility:hidden;" id=".*?">(.*?)<\/div>', re.DOTALL).findall(body)
|
||||
k1 = re.compile('function\(p\){var p = eval\(eval.*?atob.*?return \+\(p\)}\(\)', re.DOTALL).findall(body)
|
||||
if k1:
|
||||
body = body.replace(k1[0], k[0])
|
||||
js = re.search(r"setTimeout\(function\(\){\s+(var "
|
||||
"s,t,o,p,b,r,e,a,k,i,n,g,f.+?\r?\n[\s\S]+?a\.value =.+?)\r?\n", body).group(1)
|
||||
js = re.search(
|
||||
r"setTimeout\(function\(\){\s+(var s,t,o,p,b,r,e,a,k,i,n,g,f.+?\r?\n[\s\S]+?a\.value =.+?)\r?\n",
|
||||
body
|
||||
).group(1)
|
||||
|
||||
js = re.sub(r"a\.value = ((.+).toFixed\(10\))?", r"\1", js)
|
||||
js = re.sub(r'(e\s=\sfunction\(s\)\s{.*?};)', '', js, flags=re.DOTALL|re.MULTILINE)
|
||||
js = re.sub(r"\s{3,}[a-z](?: = |\.).+", "", js).replace("t.length", str(len(domain)))
|
||||
js = js.replace('; 121', '')
|
||||
reemplazar = re.compile('(?is)function\(p\)\{return eval.*?\+p\+"\)"\)}', re.DOTALL).findall(js)
|
||||
if reemplazar:
|
||||
js = js.replace(reemplazar[0],'t.charCodeAt')
|
||||
js = re.sub(r"[\n\\']", "", js)
|
||||
js = 'a = {{}}; t = "{}";{}'.format(domain, js)
|
||||
result = seval.eval_js_vm(js)
|
||||
jsEnv = """
|
||||
var t = "{domain}";
|
||||
var g = String.fromCharCode;
|
||||
o = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=";
|
||||
e = function(s) {{
|
||||
s += "==".slice(2 - (s.length & 3));
|
||||
var bm, r = "", r1, r2, i = 0;
|
||||
for (; i < s.length;) {{
|
||||
bm = o.indexOf(s.charAt(i++)) << 18 | o.indexOf(s.charAt(i++)) << 12 | (r1 = o.indexOf(s.charAt(i++))) << 6 | (r2 = o.indexOf(s.charAt(i++)));
|
||||
r += r1 === 64 ? g(bm >> 16 & 255) : r2 === 64 ? g(bm >> 16 & 255, bm >> 8 & 255) : g(bm >> 16 & 255, bm >> 8 & 255, bm & 255);
|
||||
}}
|
||||
return r;
|
||||
}};
|
||||
function italics (str) {{ return '<i>' + this + '</i>'; }};
|
||||
var document = {{
|
||||
getElementById: function () {{
|
||||
return {{'innerHTML': '{innerHTML}'}};
|
||||
}}
|
||||
}};
|
||||
{js}
|
||||
"""
|
||||
innerHTML = re.search('<div(?: [^<>]*)? id="([^<>]*?)">([^<>]*?)<\/div>', body , re.MULTILINE | re.DOTALL)
|
||||
innerHTML = innerHTML.group(2).replace("'", r"\'") if innerHTML else ""
|
||||
import js2py
|
||||
from jsc import jsunc
|
||||
js = jsunc(jsEnv.format(domain=domain, innerHTML=innerHTML, js=js))
|
||||
def atob(s):
|
||||
return base64.b64decode('{}'.format(s)).decode('utf-8')
|
||||
js2py.disable_pyimport()
|
||||
context = js2py.EvalJs({'atob': atob})
|
||||
result = context.eval(js)
|
||||
return float(result)
|
||||
|
||||
|
||||
|
||||
@property
|
||||
def wait_time(self):
|
||||
|
||||
@@ -10,25 +10,6 @@ from core import httptools
|
||||
from platformcode import logger
|
||||
|
||||
|
||||
def downloadpage(url, post=None, headers=None, follow_redirects=True, timeout=None, header_to_get=None):
|
||||
response = httptools.downloadpage(url, post=post, headers=headers, follow_redirects=follow_redirects,
|
||||
timeout=timeout)
|
||||
if header_to_get:
|
||||
return response.headers.get(header_to_get)
|
||||
else:
|
||||
return response.data
|
||||
|
||||
|
||||
def downloadpageGzip(url):
|
||||
response = httptools.downloadpage(url, add_referer=True)
|
||||
return response.data
|
||||
|
||||
|
||||
def getLocationHeaderFromResponse(url):
|
||||
response = httptools.downloadpage(url, only_headers=True)
|
||||
return response.headers.get("location")
|
||||
|
||||
|
||||
def get_header_from_response(url, header_to_get="", post=None, headers=None):
|
||||
header_to_get = header_to_get.lower()
|
||||
response = httptools.downloadpage(url, post=post, headers=headers, only_headers=True)
|
||||
@@ -48,11 +29,6 @@ def printMatches(matches):
|
||||
i = i + 1
|
||||
|
||||
|
||||
def get_match(data, patron, index=0):
|
||||
matches = re.findall(patron, data, flags=re.DOTALL)
|
||||
return matches[index]
|
||||
|
||||
|
||||
def find_single_match(data, patron, index=0):
|
||||
try:
|
||||
matches = re.findall(patron, data, flags=re.DOTALL)
|
||||
|
||||
@@ -18,10 +18,6 @@ def printMatches(matches):
|
||||
i = i + 1
|
||||
|
||||
|
||||
def get_match(data, patron, index=0):
|
||||
return find_single_match(data, patron, index=0)
|
||||
|
||||
|
||||
def find_single_match(data, patron, index=0):
|
||||
try:
|
||||
matches = re.findall(patron, data, flags=re.DOTALL)
|
||||
|
||||
@@ -155,7 +155,7 @@ def limited(func):
|
||||
inf = float('inf')
|
||||
|
||||
|
||||
def Literal(type, value, raw, regex=None):
|
||||
def Literal(type, value, raw, regex=None, comments=None):
|
||||
if regex: # regex
|
||||
return 'JsRegExp(%s)' % repr(compose_regex(value))
|
||||
elif value is None: # null
|
||||
@@ -165,12 +165,12 @@ def Literal(type, value, raw, regex=None):
|
||||
return 'Js(%s)' % repr(value) if value != inf else 'Js(float("inf"))'
|
||||
|
||||
|
||||
def Identifier(type, name):
|
||||
def Identifier(type, name, comments=None):
|
||||
return 'var.get(%s)' % repr(name)
|
||||
|
||||
|
||||
@limited
|
||||
def MemberExpression(type, computed, object, property):
|
||||
def MemberExpression(type, computed, object, property, comments=None):
|
||||
far_left = trans(object)
|
||||
if computed: # obj[prop] type accessor
|
||||
# may be literal which is the same in every case so we can save some time on conversion
|
||||
@@ -183,12 +183,12 @@ def MemberExpression(type, computed, object, property):
|
||||
return far_left + '.get(%s)' % prop
|
||||
|
||||
|
||||
def ThisExpression(type):
|
||||
def ThisExpression(type, comments=None):
|
||||
return 'var.get(u"this")'
|
||||
|
||||
|
||||
@limited
|
||||
def CallExpression(type, callee, arguments):
|
||||
def CallExpression(type, callee, arguments, comments=None):
|
||||
arguments = [trans(e) for e in arguments]
|
||||
if callee['type'] == 'MemberExpression':
|
||||
far_left = trans(callee['object'])
|
||||
@@ -210,14 +210,14 @@ def CallExpression(type, callee, arguments):
|
||||
# ========== ARRAYS ============
|
||||
|
||||
|
||||
def ArrayExpression(type, elements): # todo fix null inside problem
|
||||
def ArrayExpression(type, elements, comments=None): # todo fix null inside problem
|
||||
return 'Js([%s])' % ', '.join(trans(e) if e else 'None' for e in elements)
|
||||
|
||||
|
||||
# ========== OBJECTS =============
|
||||
|
||||
|
||||
def ObjectExpression(type, properties):
|
||||
def ObjectExpression(type, properties, comments=None):
|
||||
name = inline_stack.require('Object')
|
||||
elems = []
|
||||
after = ''
|
||||
@@ -241,7 +241,7 @@ def ObjectExpression(type, properties):
|
||||
return name
|
||||
|
||||
|
||||
def Property(type, kind, key, computed, value, method, shorthand):
|
||||
def Property(type, kind, key, computed, value, method, shorthand, comments=None):
|
||||
if shorthand or computed:
|
||||
raise NotImplementedError(
|
||||
'Shorthand and Computed properties not implemented!')
|
||||
@@ -256,7 +256,7 @@ def Property(type, kind, key, computed, value, method, shorthand):
|
||||
|
||||
|
||||
@limited
|
||||
def UnaryExpression(type, operator, argument, prefix):
|
||||
def UnaryExpression(type, operator, argument, prefix, comments=None):
|
||||
a = trans(
|
||||
argument, standard=True
|
||||
) # unary involve some complex operations so we cant use line shorteners here
|
||||
@@ -271,7 +271,7 @@ def UnaryExpression(type, operator, argument, prefix):
|
||||
|
||||
|
||||
@limited
|
||||
def BinaryExpression(type, operator, left, right):
|
||||
def BinaryExpression(type, operator, left, right, comments=None):
|
||||
a = trans(left)
|
||||
b = trans(right)
|
||||
# delegate to our friends
|
||||
@@ -279,7 +279,7 @@ def BinaryExpression(type, operator, left, right):
|
||||
|
||||
|
||||
@limited
|
||||
def UpdateExpression(type, operator, argument, prefix):
|
||||
def UpdateExpression(type, operator, argument, prefix, comments=None):
|
||||
a = trans(
|
||||
argument, standard=True
|
||||
) # also complex operation involving parsing of the result so no line length reducing here
|
||||
@@ -287,7 +287,7 @@ def UpdateExpression(type, operator, argument, prefix):
|
||||
|
||||
|
||||
@limited
|
||||
def AssignmentExpression(type, operator, left, right):
|
||||
def AssignmentExpression(type, operator, left, right, comments=None):
|
||||
operator = operator[:-1]
|
||||
if left['type'] == 'Identifier':
|
||||
if operator:
|
||||
@@ -319,12 +319,12 @@ six
|
||||
|
||||
|
||||
@limited
|
||||
def SequenceExpression(type, expressions):
|
||||
def SequenceExpression(type, expressions, comments=None):
|
||||
return reduce(js_comma, (trans(e) for e in expressions))
|
||||
|
||||
|
||||
@limited
|
||||
def NewExpression(type, callee, arguments):
|
||||
def NewExpression(type, callee, arguments, comments=None):
|
||||
return trans(callee) + '.create(%s)' % ', '.join(
|
||||
trans(e) for e in arguments)
|
||||
|
||||
@@ -332,7 +332,7 @@ def NewExpression(type, callee, arguments):
|
||||
@limited
|
||||
def ConditionalExpression(
|
||||
type, test, consequent,
|
||||
alternate): # caused plenty of problems in my home-made translator :)
|
||||
alternate, comments=None): # caused plenty of problems in my home-made translator :)
|
||||
return '(%s if %s else %s)' % (trans(consequent), trans(test),
|
||||
trans(alternate))
|
||||
|
||||
@@ -340,49 +340,49 @@ def ConditionalExpression(
|
||||
# =========== STATEMENTS =============
|
||||
|
||||
|
||||
def BlockStatement(type, body):
|
||||
def BlockStatement(type, body, comments=None):
|
||||
return StatementList(
|
||||
body) # never returns empty string! In the worst case returns pass\n
|
||||
|
||||
|
||||
def ExpressionStatement(type, expression):
|
||||
def ExpressionStatement(type, expression, comments=None):
|
||||
return trans(expression) + '\n' # end expression space with new line
|
||||
|
||||
|
||||
def BreakStatement(type, label):
|
||||
def BreakStatement(type, label, comments=None):
|
||||
if label:
|
||||
return 'raise %s("Breaked")\n' % (get_break_label(label['name']))
|
||||
else:
|
||||
return 'break\n'
|
||||
|
||||
|
||||
def ContinueStatement(type, label):
|
||||
def ContinueStatement(type, label, comments=None):
|
||||
if label:
|
||||
return 'raise %s("Continued")\n' % (get_continue_label(label['name']))
|
||||
else:
|
||||
return 'continue\n'
|
||||
|
||||
|
||||
def ReturnStatement(type, argument):
|
||||
def ReturnStatement(type, argument, comments=None):
|
||||
return 'return %s\n' % (trans(argument)
|
||||
if argument else "var.get('undefined')")
|
||||
|
||||
|
||||
def EmptyStatement(type):
|
||||
def EmptyStatement(type, comments=None):
|
||||
return 'pass\n'
|
||||
|
||||
|
||||
def DebuggerStatement(type):
|
||||
def DebuggerStatement(type, comments=None):
|
||||
return 'pass\n'
|
||||
|
||||
|
||||
def DoWhileStatement(type, body, test):
|
||||
def DoWhileStatement(type, body, test, comments=None):
|
||||
inside = trans(body) + 'if not %s:\n' % trans(test) + indent('break\n')
|
||||
result = 'while 1:\n' + indent(inside)
|
||||
return result
|
||||
|
||||
|
||||
def ForStatement(type, init, test, update, body):
|
||||
def ForStatement(type, init, test, update, body, comments=None):
|
||||
update = indent(trans(update)) if update else ''
|
||||
init = trans(init) if init else ''
|
||||
if not init.endswith('\n'):
|
||||
@@ -398,7 +398,7 @@ def ForStatement(type, init, test, update, body):
|
||||
return result
|
||||
|
||||
|
||||
def ForInStatement(type, left, right, body, each):
|
||||
def ForInStatement(type, left, right, body, each, comments=None):
|
||||
res = 'for PyJsTemp in %s:\n' % trans(right)
|
||||
if left['type'] == "VariableDeclaration":
|
||||
addon = trans(left) # make sure variable is registered
|
||||
@@ -417,7 +417,7 @@ def ForInStatement(type, left, right, body, each):
|
||||
return res
|
||||
|
||||
|
||||
def IfStatement(type, test, consequent, alternate):
|
||||
def IfStatement(type, test, consequent, alternate, comments=None):
|
||||
# NOTE we cannot do elif because function definition inside elif statement would not be possible!
|
||||
IF = 'if %s:\n' % trans(test)
|
||||
IF += indent(trans(consequent))
|
||||
@@ -427,7 +427,7 @@ def IfStatement(type, test, consequent, alternate):
|
||||
return IF + ELSE
|
||||
|
||||
|
||||
def LabeledStatement(type, label, body):
|
||||
def LabeledStatement(type, label, body, comments=None):
|
||||
# todo consider using smarter approach!
|
||||
inside = trans(body)
|
||||
defs = ''
|
||||
@@ -448,7 +448,7 @@ def LabeledStatement(type, label, body):
|
||||
return defs + inside
|
||||
|
||||
|
||||
def StatementList(lis):
|
||||
def StatementList(lis, comments=None):
|
||||
if lis: # ensure we don't return empty string because it may ruin indentation!
|
||||
code = ''.join(trans(e) for e in lis)
|
||||
return code if code else 'pass\n'
|
||||
@@ -456,7 +456,7 @@ def StatementList(lis):
|
||||
return 'pass\n'
|
||||
|
||||
|
||||
def PyimportStatement(type, imp):
|
||||
def PyimportStatement(type, imp, comments=None):
|
||||
lib = imp['name']
|
||||
jlib = 'PyImport_%s' % lib
|
||||
code = 'import %s as %s\n' % (lib, jlib)
|
||||
@@ -471,7 +471,7 @@ def PyimportStatement(type, imp):
|
||||
return code
|
||||
|
||||
|
||||
def SwitchStatement(type, discriminant, cases):
|
||||
def SwitchStatement(type, discriminant, cases, comments=None):
|
||||
#TODO there will be a problem with continue in a switch statement.... FIX IT
|
||||
code = 'while 1:\n' + indent('SWITCHED = False\nCONDITION = (%s)\n')
|
||||
code = code % trans(discriminant)
|
||||
@@ -491,12 +491,12 @@ def SwitchStatement(type, discriminant, cases):
|
||||
return code
|
||||
|
||||
|
||||
def ThrowStatement(type, argument):
|
||||
def ThrowStatement(type, argument, comments=None):
|
||||
return 'PyJsTempException = JsToPyException(%s)\nraise PyJsTempException\n' % trans(
|
||||
argument)
|
||||
|
||||
|
||||
def TryStatement(type, block, handler, handlers, guardedHandlers, finalizer):
|
||||
def TryStatement(type, block, handler, handlers, guardedHandlers, finalizer, comments=None):
|
||||
result = 'try:\n%s' % indent(trans(block))
|
||||
# complicated catch statement...
|
||||
if handler:
|
||||
@@ -516,13 +516,13 @@ def TryStatement(type, block, handler, handlers, guardedHandlers, finalizer):
|
||||
return result
|
||||
|
||||
|
||||
def LexicalDeclaration(type, declarations, kind):
|
||||
def LexicalDeclaration(type, declarations, kind, comments=None):
|
||||
raise NotImplementedError(
|
||||
'let and const not implemented yet but they will be soon! Check github for updates.'
|
||||
)
|
||||
|
||||
|
||||
def VariableDeclarator(type, id, init):
|
||||
def VariableDeclarator(type, id, init, comments=None):
|
||||
name = id['name']
|
||||
# register the name if not already registered
|
||||
Context.register(name)
|
||||
@@ -531,21 +531,21 @@ def VariableDeclarator(type, id, init):
|
||||
return ''
|
||||
|
||||
|
||||
def VariableDeclaration(type, declarations, kind):
|
||||
def VariableDeclaration(type, declarations, kind, comments=None):
|
||||
code = ''.join(trans(d) for d in declarations)
|
||||
return code if code else 'pass\n'
|
||||
|
||||
|
||||
def WhileStatement(type, test, body):
|
||||
def WhileStatement(type, test, body, comments=None):
|
||||
result = 'while %s:\n' % trans(test) + indent(trans(body))
|
||||
return result
|
||||
|
||||
|
||||
def WithStatement(type, object, body):
|
||||
def WithStatement(type, object, body, comments=None):
|
||||
raise NotImplementedError('With statement not implemented!')
|
||||
|
||||
|
||||
def Program(type, body):
|
||||
def Program(type, body, comments=None):
|
||||
inline_stack.reset()
|
||||
code = ''.join(trans(e) for e in body)
|
||||
# here add hoisted elements (register variables and define functions)
|
||||
@@ -559,7 +559,7 @@ def Program(type, body):
|
||||
|
||||
|
||||
def FunctionDeclaration(type, id, params, defaults, body, generator,
|
||||
expression):
|
||||
expression, comments=None):
|
||||
if generator:
|
||||
raise NotImplementedError('Generators not supported')
|
||||
if defaults:
|
||||
@@ -610,7 +610,7 @@ def FunctionDeclaration(type, id, params, defaults, body, generator,
|
||||
|
||||
|
||||
def FunctionExpression(type, id, params, defaults, body, generator,
|
||||
expression):
|
||||
expression, comments=None):
|
||||
if generator:
|
||||
raise NotImplementedError('Generators not supported')
|
||||
if defaults:
|
||||
|
||||
83
plugin.video.alfa/lib/jsc.py
Normal file
83
plugin.video.alfa/lib/jsc.py
Normal file
@@ -0,0 +1,83 @@
|
||||
MAPPING = {
|
||||
'a': '(false+"")[1]',
|
||||
'b': '([]["entries"]()+"")[2]',
|
||||
'c': '([]["fill"]+"")[3]',
|
||||
'd': '(undefined+"")[2]',
|
||||
'e': '(true+"")[3]',
|
||||
'f': '(false+"")[0]',
|
||||
'g': '(false+[0]+String)[20]',
|
||||
'h': '(+(101))["to"+String["name"]](21)[1]',
|
||||
'i': '([false]+undefined)[10]',
|
||||
'j': '([]["entries"]()+"")[3]',
|
||||
'k': '(+(20))["to"+String["name"]](21)',
|
||||
'l': '(false+"")[2]',
|
||||
'm': '(Number+"")[11]',
|
||||
'n': '(undefined+"")[1]',
|
||||
'o': '(true+[]["fill"])[10]',
|
||||
'p': '(+(211))["to"+String["name"]](31)[1]',
|
||||
'q': '(+(212))["to"+String["name"]](31)[1]',
|
||||
'r': '(true+"")[1]',
|
||||
's': '(false+"")[3]',
|
||||
't': '(true+"")[0]',
|
||||
'u': '(undefined+"")[0]',
|
||||
'v': '(+(31))["to"+String["name"]](32)',
|
||||
'w': '(+(32))["to"+String["name"]](33)',
|
||||
'x': '(+(101))["to"+String["name"]](34)[1]',
|
||||
'y': '(NaN+[Infinity])[10]',
|
||||
'z': '(+(35))["to"+String["name"]](36)',
|
||||
'A': '(+[]+Array)[10]',
|
||||
'B': '(+[]+Boolean)[10]',
|
||||
'C': 'Function("return escape")()(("")["italics"]())[2]',
|
||||
'D': 'Function("return escape")()([]["fill"])["slice"]("-1")',
|
||||
'E': '(RegExp+"")[12]',
|
||||
'F': '(+[]+Function)[10]',
|
||||
'G': '(false+Function("return Date")()())[30]',
|
||||
'I': '(Infinity+"")[0]',
|
||||
'M': '(true+Function("return Date")()())[30]',
|
||||
'N': '(NaN+"")[0]',
|
||||
'O': '(NaN+Function("return{}")())[11]',
|
||||
'R': '(+[]+RegExp)[10]',
|
||||
'S': '(+[]+String)[10]',
|
||||
'T': '(NaN+Function("return Date")()())[30]',
|
||||
'U': '(NaN+Function("return{}")()["to"+String["name"]]["call"]())[11]',
|
||||
' ': '(NaN+[]["fill"])[11]',
|
||||
'"': '("")["fontcolor"]()[12]',
|
||||
'%': 'Function("return escape")()([]["fill"])[21]',
|
||||
'&': '("")["link"](0+")[10]',
|
||||
'(': '(undefined+[]["fill"])[22]',
|
||||
')': '([0]+false+[]["fill"])[20]',
|
||||
'+': '(+(+!+[]+(!+[]+[])[!+[]+!+[]+!+[]]+[+!+[]]+[+[]]+[+[]])+[])[2]',
|
||||
',': '([]["slice"]["call"](false+"")+"")[1]',
|
||||
'-': '(+(.+[0000000001])+"")[2]',
|
||||
'.': '(+(+!+[]+[+!+[]]+(!![]+[])[!+[]+!+[]+!+[]]+[!+[]+!+[]]+[+[]])+[])[+!+[]]',
|
||||
'/': '(false+[0])["italics"]()[10]',
|
||||
':': '(RegExp()+"")[3]',
|
||||
';': '("")["link"](")[14]',
|
||||
'<': '("")["italics"]()[0]',
|
||||
'=': '("")["fontcolor"]()[11]',
|
||||
'>': '("")["italics"]()[2]',
|
||||
'?': '(RegExp()+"")[2]',
|
||||
'[': '([]["entries"]()+"")[0]',
|
||||
']': '([]["entries"]()+"")[22]',
|
||||
'{': '(true+[]["fill"])[20]',
|
||||
'}': '([]["fill"]+"")["slice"]("-1")'
|
||||
}
|
||||
|
||||
SIMPLE = {
|
||||
'false': '![]',
|
||||
'true': '!![]',
|
||||
'undefined': '[][[]]',
|
||||
'NaN': '+[![]]',
|
||||
'Infinity': '+(+!+[]+(!+[]+[])[!+[]+!+[]+!+[]]+[+!+[]]+[+[]]+[+[]]+[+[]])' # +"1e1000"
|
||||
}
|
||||
|
||||
def jsunc(jscString):
|
||||
|
||||
for key in sorted(MAPPING, key=lambda k: len(MAPPING[k]), reverse=True):
|
||||
if MAPPING.get(key) in jscString:
|
||||
jscString = jscString.replace(MAPPING.get(key), '"{}"'.format(key))
|
||||
|
||||
for key in sorted(SIMPLE, key=lambda k: len(SIMPLE[k]), reverse=True):
|
||||
if SIMPLE.get(key) in jscString:
|
||||
jscString = jscString.replace(SIMPLE.get(key), '{}'.format(key))
|
||||
return jscString
|
||||
@@ -4,7 +4,7 @@
|
||||
"ignore_urls": [],
|
||||
"patterns": [
|
||||
{
|
||||
"pattern": "((?:fembed|divload).com/v/[A-z0-9_-]+)",
|
||||
"pattern": "((?:fembed|divload).com/(?:f|v)/[A-z0-9_-]+)",
|
||||
"url": "https://www.\\1"
|
||||
}
|
||||
]
|
||||
|
||||
@@ -16,6 +16,7 @@ def test_video_exists(page_url):
|
||||
def get_video_url(page_url, user="", password="", video_password=""):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
video_urls = []
|
||||
page_url = page_url.replace("/f/","/v/")
|
||||
page_url = page_url.replace("/v/","/api/source/")
|
||||
data = httptools.downloadpage(page_url, post={}).data
|
||||
data = jsontools.load(data)
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
"url": "http://docs.google.com/get_video_info?docid=\\1"
|
||||
},
|
||||
{
|
||||
"pattern": "(?s)https://(?:docs|drive).google.com/file/d/([^/]+)/(?:preview|edit)",
|
||||
"pattern": "(?s)(?:https|http)://(?:docs|drive).google.com/file/d/([^/]+)/(?:preview|edit|view)",
|
||||
"url": "http://docs.google.com/get_video_info?docid=\\1"
|
||||
},
|
||||
{
|
||||
|
||||
42
plugin.video.alfa/servers/videobb.json
Normal file
42
plugin.video.alfa/servers/videobb.json
Normal file
@@ -0,0 +1,42 @@
|
||||
{
|
||||
"active": true,
|
||||
"find_videos": {
|
||||
"ignore_urls": [],
|
||||
"patterns": [
|
||||
{
|
||||
"pattern": "videobb.ru/v/([A-z0-9]+)",
|
||||
"url": "https://videobb.ru/api/source/\\1"
|
||||
}
|
||||
]
|
||||
},
|
||||
"free": true,
|
||||
"id": "videobb",
|
||||
"name": "videobb",
|
||||
"settings": [
|
||||
{
|
||||
"default": false,
|
||||
"enabled": true,
|
||||
"id": "black_list",
|
||||
"label": "@60654",
|
||||
"type": "bool",
|
||||
"visible": true
|
||||
},
|
||||
{
|
||||
"default": 0,
|
||||
"enabled": true,
|
||||
"id": "favorites_servers_list",
|
||||
"label": "@60655",
|
||||
"lvalues": [
|
||||
"No",
|
||||
"1",
|
||||
"2",
|
||||
"3",
|
||||
"4",
|
||||
"5"
|
||||
],
|
||||
"type": "list",
|
||||
"visible": false
|
||||
}
|
||||
],
|
||||
"thumbnail": "https://www.cinetux.to/videobb/logo.jpg"
|
||||
}
|
||||
31
plugin.video.alfa/servers/videobb.py
Normal file
31
plugin.video.alfa/servers/videobb.py
Normal file
@@ -0,0 +1,31 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# --------------------------------------------------------
|
||||
# Conector videobb By Alfa development Group
|
||||
# --------------------------------------------------------
|
||||
|
||||
from core import httptools
|
||||
from core import scrapertools
|
||||
from core import jsontools
|
||||
from platformcode import logger
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
data = httptools.downloadpage(page_url).data
|
||||
if "no longer exists" in data:
|
||||
return False, "[videobb] El video ha sido borrado"
|
||||
return True, ""
|
||||
|
||||
|
||||
def get_video_url(page_url, user="", password="", video_password=""):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
video_urls = []
|
||||
id = scrapertools.find_single_match("v/(\w+)", page_url)
|
||||
post = "r=&d=videobb.ru"
|
||||
data = httptools.downloadpage(page_url, post=post).data
|
||||
data = jsontools.load(data)["data"]
|
||||
for url in data:
|
||||
video_urls.append([url["label"] + "p [videobb]", url["file"]])
|
||||
logger.info("Intel11 %s" %data)
|
||||
|
||||
return video_urls
|
||||
@@ -4,8 +4,8 @@
|
||||
"ignore_urls": [],
|
||||
"patterns": [
|
||||
{
|
||||
"pattern": "(?i)(https://vidlox.(?:tv|me)/embed-.*?.html)",
|
||||
"url": "\\1"
|
||||
"pattern": "(?i)(vidlox.(?:tv|me)/embed-\\w+)",
|
||||
"url": "https://\\1.html"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
Reference in New Issue
Block a user