KoD 0.8
- tanti miglioramenti sotto il cofano, supporto iniziale al futuro kodi 19 - Nuova modalità di visualizzazione per episodio successivo - fixato wstream tramite l'aggiunta della finestra per risolvere il reCaptcha - aggiunta sezione segnala un problema in Aiuto - altri fix e migliorie varie a canali e server
This commit is contained in:
13
addon.xml
13
addon.xml
@@ -1,4 +1,4 @@
|
||||
<addon id="plugin.video.kod" name="Kodi on Demand" provider-name="KOD Team" version="0.7.2">
|
||||
<addon id="plugin.video.kod" name="Kodi on Demand" provider-name="KOD Team" version="0.8">
|
||||
<requires>
|
||||
<import addon="xbmc.python" version="2.1.0"/>
|
||||
<import addon="script.module.libtorrent" optional="true"/>
|
||||
@@ -19,12 +19,11 @@
|
||||
<screenshot>resources/media/themes/ss/2.png</screenshot>
|
||||
<screenshot>resources/media/themes/ss/3.png</screenshot>
|
||||
</assets>
|
||||
<news>- - aggiunto raiplay
|
||||
- agigunto d.s.d.a (ex documentaristreamingda)
|
||||
- svariati fix ai canali (eurostreaming, streamtime, piratestreaming, altadefinizioneclick)
|
||||
- la videoteca ora può essere messa nelle unità di rete
|
||||
- aggiunto server upstream
|
||||
- altri piccoli fix vari</news>
|
||||
<news>- tanti miglioramenti "sotto il cofano", supporto iniziale al futuro kodi 19
|
||||
- Nuova modalità di visualizzazione per episodio successivo
|
||||
- fixato wstream tramite l'aggiunta della finestra per risolvere il reCaptcha
|
||||
- aggiunta sezione "segnala un problema" in Aiuto
|
||||
- altri fix e migliorie varie a canali e server</news>
|
||||
<description lang="it">Naviga velocemente sul web e guarda i contenuti presenti</description>
|
||||
<disclaimer>[COLOR red]The owners and submitters to this addon do not host or distribute any of the content displayed by these addons nor do they have any affiliation with the content providers.[/COLOR]
|
||||
[COLOR yellow]Kodi © is a registered trademark of the XBMC Foundation. We are not connected to or in any other way affiliated with Kodi, Team Kodi, or the XBMC Foundation. Furthermore, any software, addons, or products offered by us will receive no support in official Kodi channels, including the Kodi forums and various social networks.[/COLOR]</disclaimer>
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
{
|
||||
"altadefinizione01": "https://www.altadefinizione01.tel",
|
||||
"altadefinizione01_link": "https://altadefinizione01.cam",
|
||||
"altadefinizioneclick": "https://altadefinizione.style",
|
||||
"altadefinizione01": "https://altadefinizione01.media",
|
||||
"altadefinizione01_link": "https://altadefinizione01.kim",
|
||||
"altadefinizioneclick": "https://altadefinizione.style",
|
||||
"animeforce": "https://ww1.animeforce.org",
|
||||
"animeleggendari": "https://animepertutti.com",
|
||||
"animesaturn": "https://animesaturn.com",
|
||||
"animesaturn": "https://www.animesaturn.com",
|
||||
"animestream": "https://www.animeworld.it",
|
||||
"animesubita": "http://www.animesubita.org",
|
||||
"animetubeita": "http://www.animetubeita.com",
|
||||
"animeunity": "https://www.animeunity.it",
|
||||
"animeworld": "https://www.animeworld.cc",
|
||||
"casacinema": "https://www.casacinema.biz",
|
||||
"casacinemaInfo": "https://casacinema.kim",
|
||||
"animeworld": "https://www.animeworld.tv",
|
||||
"casacinema": "https://www.casacinema.bid",
|
||||
"casacinemaInfo": "https://casacinema.blue",
|
||||
"cb01anime": "https://www.cineblog01.ink",
|
||||
"cinetecadibologna": "http://cinestore.cinetecadibologna.it",
|
||||
"dreamsub": "https://dreamsub.stream",
|
||||
@@ -19,13 +19,13 @@
|
||||
"fastsubita": "https://fastsubita.com",
|
||||
"filmgratis": "https://www.filmaltadefinizione.org",
|
||||
"filmigratis": "https://filmigratis.org",
|
||||
"filmpertutti": "https://www.filmpertutti.casa",
|
||||
"filmsenzalimiticc": "https://www.filmsenzalimiti.monster",
|
||||
"filmpertutti": "https://www.filmpertutti.date",
|
||||
"filmsenzalimiticc": "https://www.filmsenzalimiti.london",
|
||||
"filmstreaming01": "https://filmstreaming01.com",
|
||||
"guardarefilm": "https://www.guardarefilm.red",
|
||||
"guardaserie_stream": "https://guardaserie.store",
|
||||
"guardaserieclick": "https://www.guardaserie.media",
|
||||
"ilgeniodellostreaming": "https://ilgeniodellostreaming.si",
|
||||
"ilgeniodellostreaming": "https://ilgeniodellostreaming.si",
|
||||
"italiaserie": "https://italiaserie.org",
|
||||
"mondoserietv": "https://mondoserietv.com",
|
||||
"netfreex": "https://www.netfreex.pro",
|
||||
@@ -33,12 +33,11 @@
|
||||
"polpotv": "https://polpo.tv",
|
||||
"pufimovies": "https://pufimovies.com",
|
||||
"raiplay": "https://www.raiplay.it",
|
||||
"seriehd": "https://www.seriehd.watch",
|
||||
"seriehd": "https://seriehd.click",
|
||||
"serietvonline": "https://serietvonline.icu",
|
||||
"serietvsubita": "http://serietvsubita.xyz",
|
||||
"serietvu": "https://www.serietvu.link",
|
||||
"streamingaltadefinizione": "https://www.popcornstream.best",
|
||||
"streamtime": "https://t.me/s/StreamTime",
|
||||
"streamtime": "https://t.me/s/StreamTime",
|
||||
"tantifilm": "https://www.tantifilm.eu",
|
||||
"toonitalia": "https://toonitalia.org",
|
||||
"vedohd": "https://vedohd.uno",
|
||||
|
||||
@@ -44,8 +44,6 @@
|
||||
|
||||
# per l'uso dei decoratori, per i log, e funzioni per siti particolari
|
||||
from core import support
|
||||
# se non si fa uso di findhost()
|
||||
from platformcode import config
|
||||
|
||||
# in caso di necessità
|
||||
#from core import scrapertools, httptools, servertools, tmdb
|
||||
@@ -54,30 +52,22 @@ from core.item import Item # per newest
|
||||
|
||||
##### fine import
|
||||
|
||||
# impostazioni variabili o def findhost()
|
||||
|
||||
# se necessaria la variabile __channel__
|
||||
# da cancellare se non utilizzata
|
||||
__channel__ = "id nel json"
|
||||
# da cancellare se si utilizza findhost()
|
||||
host = config.get_channel_url('id nel json' OR __channel__) # <-- ATTENZIONE
|
||||
headers = [['Referer', host]]
|
||||
|
||||
# Inizio findhost() - da cancellare se usato l'altro metodo
|
||||
#impostati dinamicamente da findhost()
|
||||
host = ""
|
||||
headers = ""
|
||||
|
||||
# se il sito ha un link per ottenere l'url corretto in caso di oscuramenti
|
||||
# la funzione deve ritornare l'indirizzo corretto, verrà chiamata solo se necessario (link primario irraggiungibile)
|
||||
def findhost():
|
||||
global host, headers
|
||||
# da adattare alla bisogna...
|
||||
permUrl = httptools.downloadpage('INSERIRE-URL-QUI', follow_redirects=False).headers
|
||||
host = 'https://www.'+permUrl['location'].replace('https://www.google.it/search?q=site:', '')
|
||||
# cancellare host non utilizzato
|
||||
host = scrapertools.find_single_match(permUrl, r'<div class="elementor-button-wrapper"> <a href="([^"]+)"')
|
||||
headers = [['Referer', host]]
|
||||
# così le imposta una volta per tutte
|
||||
### fine findhost
|
||||
def findhost():
|
||||
permUrl = httptools.downloadpage('https://www.cb01.uno/', follow_redirects=False).headers
|
||||
if 'google' in permUrl['location']:
|
||||
host = permUrl['location'].replace('https://www.google.it/search?q=site:', '')
|
||||
else:
|
||||
host = permUrl['location']
|
||||
return host
|
||||
|
||||
# se si usa findhost
|
||||
host = config.get_channel_url(findhost)
|
||||
# se non si usa (metti l'url in channels.json)
|
||||
host = config.get_channel_url()
|
||||
headers = [['Referer', host]]
|
||||
|
||||
# server di esempio...
|
||||
list_servers = ['supervideo', 'streamcherry','rapidvideo', 'streamango', 'openload']
|
||||
@@ -141,25 +131,24 @@ def mainlist(item):
|
||||
nome = [( '' ['', '', '', ''])
|
||||
return locals()
|
||||
|
||||
# Legenda known_keys per i groups nei patron
|
||||
# known_keys = ['url', 'title', 'title2', 'season', 'episode', 'thumb', 'quality',
|
||||
# 'year', 'plot', 'duration', 'genere', 'rating', 'type', 'lang']
|
||||
# url = link relativo o assoluto alla pagina titolo film/serie
|
||||
# title = titolo Film/Serie/Anime/Altro
|
||||
# title2 = titolo dell'episodio Serie/Anime/Altro
|
||||
# season = stagione in formato numerico
|
||||
# episode = numero episodio, in formato numerico.
|
||||
# thumb = linkrealtivo o assoluto alla locandina Film/Serie/Anime/Altro
|
||||
# quality = qualità indicata del video
|
||||
# year = anno in formato numerico (4 cifre)
|
||||
# duration = durata del Film/Serie/Anime/Altro
|
||||
# genere = genere del Film/Serie/Anime/Altro. Es: avventura, commedia
|
||||
# rating = punteggio/voto in formato numerico
|
||||
# type = tipo del video. Es. movie per film o tvshow per le serie. Di solito sono discrimanti usati dal sito
|
||||
# lang = lingua del video. Es: ITA, Sub-ITA, Sub, SUB ITA.
|
||||
# AVVERTENZE: Se il titolo è trovato nella ricerca TMDB/TVDB/Altro allora le locandine e altre info non saranno quelle recuperate nel sito.!!!!
|
||||
|
||||
|
||||
# Legenda known_keys per i groups nei patron
|
||||
# known_keys = ['url', 'title', 'title2', 'season', 'episode', 'thumb', 'quality',
|
||||
# 'year', 'plot', 'duration', 'genere', 'rating', 'type', 'lang']
|
||||
# url = link relativo o assoluto alla pagina titolo film/serie
|
||||
# title = titolo Film/Serie/Anime/Altro
|
||||
# title2 = titolo dell'episodio Serie/Anime/Altro
|
||||
# season = stagione in formato numerico
|
||||
# episode = numero episodio, in formato numerico.
|
||||
# thumb = linkrealtivo o assoluto alla locandina Film/Serie/Anime/Altro
|
||||
# quality = qualità indicata del video
|
||||
# year = anno in formato numerico (4 cifre)
|
||||
# duration = durata del Film/Serie/Anime/Altro
|
||||
# genere = genere del Film/Serie/Anime/Altro. Es: avventura, commedia
|
||||
# rating = punteggio/voto in formato numerico
|
||||
# type = tipo del video. Es. movie per film o tvshow per le serie. Di solito sono discrimanti usati dal sito
|
||||
# lang = lingua del video. Es: ITA, Sub-ITA, Sub, SUB ITA.
|
||||
# AVVERTENZE: Se il titolo è trovato nella ricerca TMDB/TVDB/Altro allora le locandine e altre info non saranno quelle recuperate nel sito.!!!!
|
||||
@support.scrape
|
||||
def peliculas(item):
|
||||
support.log(item)
|
||||
|
||||
@@ -9,6 +9,7 @@ from core import scrapertools, httptools, servertools, tmdb, support
|
||||
from core.item import Item
|
||||
from lib import unshortenit
|
||||
from platformcode import logger, config
|
||||
from lib.concurrent import futures
|
||||
|
||||
|
||||
def findhost():
|
||||
@@ -35,7 +36,7 @@ def mainlist(item):
|
||||
('HD', ['', 'menu', 'Film HD Streaming']),
|
||||
('Generi', ['', 'menu', 'Film per Genere']),
|
||||
('Anni', ['', 'menu', 'Film per Anno']),
|
||||
('Paese', ['', 'menu', 'Film per Paese']),
|
||||
('Paese', ['', 'menu', 'Film per Paese']),
|
||||
('Ultimi Aggiornati',['/lista-film-ultimi-100-film-aggiornati/', 'peliculas', 'newest']),
|
||||
('Ultimi Aggiunti', ['/lista-film-ultimi-100-film-aggiunti/', 'peliculas', 'newest'])
|
||||
]
|
||||
@@ -63,7 +64,7 @@ def menu(item):
|
||||
|
||||
# @support.scrape
|
||||
# def newest(categoria):
|
||||
#
|
||||
#
|
||||
# # debug = True
|
||||
# patron = r'<a href="?(?P<url>[^">]+)"?>(?P<title>[^<([]+)(?:\[(?P<lang>Sub-ITA|B/N|SUB-ITA)\])?\s*(?:\[(?P<quality>HD|SD|HD/3D)\])?\s*\((?P<year>[0-9]{4})\)<\/a>'
|
||||
|
||||
@@ -93,7 +94,7 @@ def menu(item):
|
||||
|
||||
def newest(categoria):
|
||||
support.log(categoria)
|
||||
|
||||
|
||||
item = support.Item()
|
||||
try:
|
||||
if categoria == "series":
|
||||
@@ -170,36 +171,28 @@ def episodios(item):
|
||||
|
||||
|
||||
def findvideos(item):
|
||||
|
||||
|
||||
|
||||
if item.contentType == "episode":
|
||||
return findvid_serie(item)
|
||||
|
||||
def load_links(itemlist, re_txt, color, desc_txt, quality=""):
|
||||
streaming = scrapertools.find_single_match(data, re_txt).replace('"', '')
|
||||
support.log('STREAMING',streaming)
|
||||
support.log('STREAMING=', streaming)
|
||||
# patron = '<td><a.*?href=(.*?) (?:target|rel)[^>]+>([^<]+)<'
|
||||
patron = '<td><a.*?href=([^ ]+) [^>]+>([^<]+)<'
|
||||
matches = re.compile(patron, re.DOTALL).findall(streaming)
|
||||
for scrapedurl, scrapedtitle in matches:
|
||||
logger.debug("##### findvideos %s ## %s ## %s ##" % (desc_txt, scrapedurl, scrapedtitle))
|
||||
itemlist.append(
|
||||
Item(channel=item.channel,
|
||||
action="play",
|
||||
title=scrapedtitle,
|
||||
url=scrapedurl,
|
||||
server=scrapedtitle,
|
||||
fulltitle=item.fulltitle,
|
||||
thumbnail=item.thumbnail,
|
||||
show=item.show,
|
||||
quality=quality,
|
||||
contentType=item.contentType,
|
||||
folder=False))
|
||||
def load_links(urls, re_txt, desc_txt, quality=""):
|
||||
if re_txt:
|
||||
streaming = scrapertools.find_single_match(data, re_txt).replace('"', '')
|
||||
support.log('STREAMING',streaming)
|
||||
matches = support.match(streaming, patron = r'<td><a.*?href=([^ ]+) [^>]+>[^<]+<').matches
|
||||
with futures.ThreadPoolExecutor() as executor:
|
||||
u = [executor.submit(final_links, match) for match in matches]
|
||||
for res in futures.as_completed(u):
|
||||
if res.result():
|
||||
urls.append(res.result())
|
||||
# for url in matches:
|
||||
# # logger.debug("##### findvideos %s ## %s ## %s ##" % (desc_txt, url, server))
|
||||
# urls.append(final_links(url))
|
||||
|
||||
support.log()
|
||||
|
||||
itemlist = []
|
||||
itemlist = urls = []
|
||||
|
||||
# Carica la pagina
|
||||
data = httptools.downloadpage(item.url).data
|
||||
@@ -213,22 +206,22 @@ def findvideos(item):
|
||||
QualityStr = scrapertools.decodeHtmlentities(match.group(1))
|
||||
|
||||
# Estrae i contenuti - Streaming
|
||||
load_links(itemlist, '<strong>Streamin?g:</strong>(.*?)cbtable', "orange", "Streaming", "SD")
|
||||
load_links(urls, '<strong>Streamin?g:</strong>(.*?)cbtable', "Streaming", "SD")
|
||||
|
||||
# Estrae i contenuti - Streaming HD
|
||||
load_links(itemlist, '<strong>Streamin?g HD[^<]+</strong>(.*?)cbtable', "yellow", "Streaming HD", "HD")
|
||||
load_links(urls, '<strong>Streamin?g HD[^<]+</strong>(.*?)cbtable', "Streaming HD", "HD")
|
||||
|
||||
# Estrae i contenuti - Streaming 3D
|
||||
load_links(itemlist, '<strong>Streamin?g 3D[^<]+</strong>(.*?)cbtable', "pink", "Streaming 3D")
|
||||
|
||||
itemlist=support.server(item, itemlist=itemlist)
|
||||
load_links(urls, '<strong>Streamin?g 3D[^<]+</strong>(.*?)cbtable', "Streaming 3D")
|
||||
|
||||
itemlist=support.server(item, urls)
|
||||
if itemlist and QualityStr:
|
||||
itemlist.insert(0,
|
||||
Item(channel=item.channel,
|
||||
action="",
|
||||
title="[COLOR orange]%s[/COLOR]" % QualityStr,
|
||||
title=support.typo(QualityStr,'[] color kod bold'),
|
||||
folder=False))
|
||||
|
||||
|
||||
return itemlist
|
||||
|
||||
# Estrae i contenuti - Download
|
||||
@@ -239,68 +232,47 @@ def findvideos(item):
|
||||
|
||||
|
||||
def findvid_serie(item):
|
||||
def load_vid_series(html, item, itemlist, blktxt):
|
||||
logger.info('HTML' + html)
|
||||
patron = r'<a href="([^"]+)"[^=]+="_blank"[^>]+>(?!<!--)(.*?)</a>'
|
||||
def load_vid_series(html, item, urls, blktxt=''):
|
||||
# logger.info('HTML' + html)
|
||||
# patron = r'<a href="([^"]+)"[^=]+="_blank"[^>]+>(?!<!--)(.*?)</a>'
|
||||
# Estrae i contenuti
|
||||
matches = re.compile(patron, re.DOTALL).finditer(html)
|
||||
for match in matches:
|
||||
scrapedurl = match.group(1)
|
||||
scrapedtitle = match.group(2)
|
||||
# title = item.title + " [COLOR blue][" + scrapedtitle + "][/COLOR]"
|
||||
itemlist.append(
|
||||
Item(channel=item.channel,
|
||||
action="play",
|
||||
title=scrapedtitle,
|
||||
url=scrapedurl,
|
||||
server=scrapedtitle,
|
||||
fulltitle=item.fulltitle,
|
||||
show=item.show,
|
||||
contentType=item.contentType,
|
||||
folder=False))
|
||||
# matches = re.compile(patron, re.DOTALL).finditer(html)
|
||||
matches = support.match(html, patron = r'<a href="([^"]+)"[^=]+="_blank"[^>]+>(?!<!--).*?</a>').matches
|
||||
with futures.ThreadPoolExecutor() as executor:
|
||||
u = [executor.submit(final_links, match) for match in matches]
|
||||
for res in futures.as_completed(u):
|
||||
if res.result():
|
||||
urls.append(res.result())
|
||||
# for url, server in matches:
|
||||
# urls.append(final_links(url))
|
||||
|
||||
support.log()
|
||||
|
||||
itemlist = []
|
||||
lnkblk = []
|
||||
lnkblkp = []
|
||||
urls = []
|
||||
|
||||
data = item.url
|
||||
|
||||
# First blocks of links
|
||||
if data[0:data.find('<a')].find(':') > 0:
|
||||
lnkblk.append(data[data.find(' - ') + 3:data[0:data.find('<a')].find(':') + 1])
|
||||
lnkblkp.append(data.find(' - ') + 3)
|
||||
else:
|
||||
lnkblk.append(' ')
|
||||
lnkblkp.append(data.find('<a'))
|
||||
# Blocks with split
|
||||
blk=re.split(r"(?:>\s*)?([A-Za-z\s0-9]*):\s*<",data,re.S)
|
||||
blktxt=""
|
||||
for b in blk:
|
||||
if b[0:3]=="a h" or b[0:4]=="<a h":
|
||||
load_vid_series("<%s>"%b, item, urls, blktxt)
|
||||
blktxt=""
|
||||
elif len(b.strip())>1:
|
||||
blktxt=b.strip()
|
||||
|
||||
# Find new blocks of links
|
||||
patron = r'<a\s[^>]+>[^<]+</a>([^<]+)'
|
||||
matches = re.compile(patron, re.DOTALL).finditer(data)
|
||||
for match in matches:
|
||||
sep = match.group(1)
|
||||
if sep != ' - ':
|
||||
lnkblk.append(sep)
|
||||
|
||||
i = 0
|
||||
if len(lnkblk) > 1:
|
||||
for lb in lnkblk[1:]:
|
||||
lnkblkp.append(data.find(lb, lnkblkp[i] + len(lnkblk[i])))
|
||||
i = i + 1
|
||||
|
||||
for i in range(0, len(lnkblk)):
|
||||
if i == len(lnkblk) - 1:
|
||||
load_vid_series(data[lnkblkp[i]:], item, itemlist, lnkblk[i])
|
||||
else:
|
||||
load_vid_series(data[lnkblkp[i]:lnkblkp[i + 1]], item, itemlist, lnkblk[i])
|
||||
|
||||
return support.server(item, itemlist=itemlist)
|
||||
return support.server(item, urls)
|
||||
|
||||
|
||||
def play(item):
|
||||
def final_links(url):
|
||||
support.log()
|
||||
itemlist = []
|
||||
item= Item()
|
||||
item.url = url
|
||||
### Handling new cb01 wrapper
|
||||
if host[9:] + "/film/" in item.url:
|
||||
iurl = httptools.downloadpage(item.url, only_headers=True, follow_redirects=False).headers.get("location", "")
|
||||
@@ -324,9 +296,9 @@ def play(item):
|
||||
data, c = unshortenit.unwrap_30x_only(data)
|
||||
else:
|
||||
data = scrapertools.find_single_match(data, r'<a href="([^"]+)".*?class="btn-wrapper">.*?licca.*?</a>')
|
||||
|
||||
|
||||
logger.debug("##### play go.php data ##\n%s\n##" % data)
|
||||
else:
|
||||
data = support.swzz_get_url(item)
|
||||
|
||||
return servertools.find_video_items(data=data)
|
||||
return data
|
||||
|
||||
@@ -14,7 +14,10 @@ list_quality = ['default']
|
||||
|
||||
def findhost():
|
||||
permUrl = httptools.downloadpage('https://www.cinemalibero.online/', follow_redirects=False).headers
|
||||
import urlparse
|
||||
try:
|
||||
import urlparse
|
||||
except:
|
||||
import urllib.parse as urlparse
|
||||
p = list(urlparse.urlparse(permUrl['location'].replace('https://www.google.com/search?q=site:', '')))
|
||||
if not p[0]:
|
||||
p[0] = 'https'
|
||||
|
||||
@@ -3,14 +3,7 @@
|
||||
# Ringraziamo Icarus crew
|
||||
# Canale per cinetecadibologna
|
||||
# ------------------------------------------------------------
|
||||
|
||||
import re
|
||||
|
||||
import urlparse
|
||||
|
||||
from core import httptools, scrapertools
|
||||
from core.item import Item
|
||||
from platformcode import logger, config
|
||||
|
||||
from core import support
|
||||
|
||||
|
||||
@@ -110,6 +110,7 @@ def episodios(item):
|
||||
def findvideos(item):
|
||||
itemlist = []
|
||||
support.log()
|
||||
# support.dbg()
|
||||
|
||||
matches = support.match(item, patron=r'href="([^"]+)"', patronBlock=r'<div style="white-space: (.*?)<div id="main-content"')
|
||||
|
||||
@@ -118,27 +119,37 @@ def findvideos(item):
|
||||
item.contentType = 'tvshow'
|
||||
return episodios(item)
|
||||
|
||||
# matches.matches.sort()
|
||||
support.log('VIDEO')
|
||||
for url in matches.matches:
|
||||
lang = url.split('/')[-2]
|
||||
if 'ita' in lang.lower():
|
||||
language = 'ITA'
|
||||
if 'sub' in lang.lower():
|
||||
language = 'Sub-' + language
|
||||
quality = url.split('/')[-1]
|
||||
|
||||
if 'vvvvid' in matches.data:
|
||||
itemlist.append(
|
||||
support.Item(channel=item.channel,
|
||||
action="play",
|
||||
contentType=item.contentType,
|
||||
title=language,
|
||||
url=url,
|
||||
contentLanguage = language,
|
||||
quality = quality,
|
||||
order = quality.replace('p','').zfill(4),
|
||||
server='directo',
|
||||
))
|
||||
support.Item(channel=item.channel,
|
||||
action="play",
|
||||
contentType=item.contentType,
|
||||
title='vvvid',
|
||||
url=support.match(matches.data, patron=r'(http://www.vvvvid[^"]+)').match,
|
||||
server='vvvvid',
|
||||
))
|
||||
else:
|
||||
# matches.matches.sort()
|
||||
support.log('VIDEO')
|
||||
for url in matches.matches:
|
||||
lang = url.split('/')[-2]
|
||||
if 'ita' in lang.lower():
|
||||
language = 'ITA'
|
||||
if 'sub' in lang.lower():
|
||||
language = 'Sub-' + language
|
||||
quality = url.split('/')[-1]
|
||||
|
||||
itemlist.append(
|
||||
support.Item(channel=item.channel,
|
||||
action="play",
|
||||
contentType=item.contentType,
|
||||
title=language,
|
||||
url=url,
|
||||
contentLanguage = language,
|
||||
quality = quality,
|
||||
order = quality.replace('p','').zfill(4),
|
||||
server='directo',
|
||||
))
|
||||
|
||||
itemlist.sort(key=lambda x: (x.title, x.order), reverse=False)
|
||||
return support.server(item, itemlist=itemlist)
|
||||
@@ -3,10 +3,8 @@
|
||||
# Ringraziamo Icarus crew
|
||||
# Canale per documentaristreamingda
|
||||
# ------------------------------------------------------------
|
||||
import re
|
||||
import urlparse
|
||||
|
||||
from core import httptools, scrapertools, servertools, support
|
||||
from core import support
|
||||
from core.item import Item
|
||||
from platformcode import logger, config
|
||||
|
||||
|
||||
@@ -4,7 +4,10 @@
|
||||
# ------------------------------------------------------------
|
||||
import re
|
||||
|
||||
import urlparse
|
||||
try:
|
||||
import urlparse
|
||||
except:
|
||||
import urllib.parse as urlparse
|
||||
|
||||
from core import scrapertools, servertools, httptools
|
||||
from core import tmdb
|
||||
|
||||
@@ -69,7 +69,6 @@ def getmainlist(view="thumb_"):
|
||||
itemlist.append(Item(title=config.get_localized_string(30100), channel="setting", action="mainlist",
|
||||
thumbnail=get_thumb(thumb_setting, view),
|
||||
category=config.get_localized_string(30100), viewmode="list"))
|
||||
|
||||
itemlist.append(Item(title=config.get_localized_string(30104) + " (v" + config.get_addon_version(with_fix=True) + ")", channel="help", action="mainlist",
|
||||
thumbnail=get_thumb("help.png", view),
|
||||
category=config.get_localized_string(30104), viewmode="list"))
|
||||
@@ -109,7 +108,7 @@ def getchanneltypes(view="thumb_"):
|
||||
|
||||
|
||||
itemlist.append(Item(title=config.get_localized_string(70685), channel="community", action="mainlist", view=view,
|
||||
category=title, channel_type="all", thumbnail=get_thumb("channels_community.png", view),
|
||||
category=config.get_localized_string(70685), channel_type="all", thumbnail=get_thumb("channels_community.png", view),
|
||||
viewmode="thumbnails"))
|
||||
return itemlist
|
||||
|
||||
@@ -150,9 +149,9 @@ def filterchannels(category, view="thumb_"):
|
||||
if channel_parameters["channel"] == 'community':
|
||||
continue
|
||||
|
||||
# si el canal no es compatible, no se muestra
|
||||
if not channel_parameters["compatible"]:
|
||||
continue
|
||||
# # si el canal no es compatible, no se muestra
|
||||
# if not channel_parameters["compatible"]:
|
||||
# continue
|
||||
|
||||
# Si no es un canal lo saltamos
|
||||
if not channel_parameters["channel"]:
|
||||
|
||||
@@ -3,10 +3,9 @@
|
||||
# channeltools - Herramientas para trabajar con canales
|
||||
# ------------------------------------------------------------
|
||||
|
||||
import os
|
||||
|
||||
import jsontools
|
||||
from __future__ import absolute_import
|
||||
|
||||
from core import jsontools
|
||||
from platformcode import config, logger
|
||||
|
||||
DEFAULT_UPDATE_URL = "/channels/"
|
||||
@@ -14,6 +13,7 @@ dict_channels_parameters = dict()
|
||||
|
||||
remote_path = 'https://raw.githubusercontent.com/kodiondemand/media/master/'
|
||||
|
||||
|
||||
def is_adult(channel_name):
|
||||
logger.info("channel_name=" + channel_name)
|
||||
channel_parameters = get_channel_parameters(channel_name)
|
||||
@@ -27,6 +27,7 @@ def is_enabled(channel_name):
|
||||
|
||||
|
||||
def get_channel_parameters(channel_name):
|
||||
from core import filetools
|
||||
global dict_channels_parameters
|
||||
|
||||
if channel_name not in dict_channels_parameters:
|
||||
@@ -35,20 +36,22 @@ def get_channel_parameters(channel_name):
|
||||
# logger.debug(channel_parameters)
|
||||
if channel_parameters:
|
||||
# cambios de nombres y valores por defecto
|
||||
channel_parameters["title"] = channel_parameters.pop("name") + (' [DEPRECATED]' if channel_parameters.has_key('deprecated') and channel_parameters['deprecated'] else '')
|
||||
channel_parameters["title"] = channel_parameters.pop("name") + (' [DEPRECATED]' if 'deprecated' in channel_parameters and channel_parameters['deprecated'] else '')
|
||||
channel_parameters["channel"] = channel_parameters.pop("id")
|
||||
|
||||
# si no existe el key se declaran valor por defecto para que no de fallos en las funciones que lo llaman
|
||||
channel_parameters["adult"] = channel_parameters.get("adult", False)
|
||||
logger.info(channel_parameters["adult"])
|
||||
if channel_parameters["adult"]:
|
||||
channel_parameters["update_url"] = channel_parameters.get("update_url", DEFAULT_UPDATE_URL+'porn/')
|
||||
channel_parameters["update_url"] = channel_parameters.get("update_url",
|
||||
DEFAULT_UPDATE_URL + 'porn/')
|
||||
else:
|
||||
channel_parameters["update_url"] = channel_parameters.get("update_url", DEFAULT_UPDATE_URL)
|
||||
channel_parameters["language"] = channel_parameters.get("language", ["all"])
|
||||
## channel_parameters["adult"] = channel_parameters.get("adult", False)
|
||||
## channel_parameters["adult"] = channel_parameters.get("adult", False)
|
||||
channel_parameters["active"] = channel_parameters.get("active", False)
|
||||
channel_parameters["include_in_global_search"] = channel_parameters.get("include_in_global_search", False)
|
||||
channel_parameters["include_in_global_search"] = channel_parameters.get("include_in_global_search",
|
||||
False)
|
||||
channel_parameters["categories"] = channel_parameters.get("categories", list())
|
||||
|
||||
channel_parameters["thumbnail"] = channel_parameters.get("thumbnail", "")
|
||||
@@ -57,57 +60,27 @@ def get_channel_parameters(channel_name):
|
||||
|
||||
# Imagenes: se admiten url y archivos locales dentro de "resources/images"
|
||||
if channel_parameters.get("thumbnail") and "://" not in channel_parameters["thumbnail"]:
|
||||
channel_parameters["thumbnail"] = os.path.join(remote_path, 'resources', "thumb", channel_parameters["thumbnail"])
|
||||
channel_parameters["thumbnail"] = filetools.join(remote_path, "resources", "thumb", channel_parameters["thumbnail"])
|
||||
if channel_parameters.get("banner") and "://" not in channel_parameters["banner"]:
|
||||
channel_parameters["banner"] = os.path.join(remote_path, 'resources', "banner", channel_parameters["banner"])
|
||||
channel_parameters["banner"] = filetools.join(remote_path, "resources", "banner", channel_parameters["banner"])
|
||||
if channel_parameters.get("fanart") and "://" not in channel_parameters["fanart"]:
|
||||
channel_parameters["fanart"] = os.path.join(remote_path, 'resources', "fanart", channel_parameters["fanart"])
|
||||
channel_parameters["fanart"] = filetools.join(remote_path, "resources", channel_parameters["fanart"])
|
||||
|
||||
# Obtenemos si el canal tiene opciones de configuración
|
||||
channel_parameters["has_settings"] = False
|
||||
if 'settings' in channel_parameters:
|
||||
# if not isinstance(channel_parameters['settings'], list):
|
||||
# channel_parameters['settings'] = [channel_parameters['settings']]
|
||||
|
||||
# if "include_in_global_search" in channel_parameters['settings']:
|
||||
# channel_parameters["include_in_global_search"] = channel_parameters['settings']
|
||||
# ["include_in_global_search"].get('default', False)
|
||||
#
|
||||
# found = False
|
||||
# for el in channel_parameters['settings']:
|
||||
# for key in el.items():
|
||||
# if 'include_in' not in key:
|
||||
# channel_parameters["has_settings"] = True
|
||||
# found = True
|
||||
# break
|
||||
# if found:
|
||||
# break
|
||||
channel_parameters['settings'] = get_default_settings(channel_name)
|
||||
for s in channel_parameters['settings']:
|
||||
if 'id' in s:
|
||||
if s['id'] == "include_in_global_search":
|
||||
channel_parameters["include_in_global_search"] = True
|
||||
elif s['id'] == "filter_languages":
|
||||
channel_parameters["filter_languages"] = s.get('lvalues',[])
|
||||
channel_parameters["filter_languages"] = s.get('lvalues', [])
|
||||
elif s['id'].startswith("include_in_"):
|
||||
channel_parameters["has_settings"] = True
|
||||
|
||||
del channel_parameters['settings']
|
||||
|
||||
# Compatibilidad
|
||||
if 'compatible' in channel_parameters:
|
||||
# compatible python
|
||||
python_compatible = True
|
||||
if 'python' in channel_parameters["compatible"]:
|
||||
import sys
|
||||
python_condition = channel_parameters["compatible"]['python']
|
||||
if sys.version_info < tuple(map(int, (python_condition.split(".")))):
|
||||
python_compatible = False
|
||||
|
||||
channel_parameters["compatible"] = python_compatible
|
||||
else:
|
||||
channel_parameters["compatible"] = True
|
||||
|
||||
dict_channels_parameters[channel_name] = channel_parameters
|
||||
|
||||
else:
|
||||
@@ -115,13 +88,12 @@ def get_channel_parameters(channel_name):
|
||||
# lanzamos la excepcion y asi tenemos los valores básicos
|
||||
raise Exception
|
||||
|
||||
except Exception, ex:
|
||||
except Exception as ex:
|
||||
logger.error(channel_name + ".json error \n%s" % ex)
|
||||
channel_parameters = dict()
|
||||
channel_parameters["channel"] = ""
|
||||
channel_parameters["adult"] = False
|
||||
channel_parameters['active'] = False
|
||||
channel_parameters["compatible"] = True
|
||||
channel_parameters["language"] = ""
|
||||
channel_parameters["update_url"] = DEFAULT_UPDATE_URL
|
||||
return channel_parameters
|
||||
@@ -131,25 +103,26 @@ def get_channel_parameters(channel_name):
|
||||
|
||||
def get_channel_json(channel_name):
|
||||
# logger.info("channel_name=" + channel_name)
|
||||
import filetools
|
||||
from core import filetools
|
||||
channel_json = None
|
||||
try:
|
||||
channel_path = filetools.join(config.get_runtime_path(), "channels", channel_name + ".json")
|
||||
if not os.path.isfile(channel_path):
|
||||
if not filetools.isfile(channel_path):
|
||||
channel_path = filetools.join(config.get_runtime_path(), 'channels', "porn", channel_name + ".json")
|
||||
if not os.path.isfile(channel_path):
|
||||
if not filetools.isfile(channel_path):
|
||||
channel_path = filetools.join(config.get_runtime_path(), "specials", channel_name + ".json")
|
||||
if not os.path.isfile(channel_path):
|
||||
if not filetools.isfile(channel_path):
|
||||
channel_path = filetools.join(config.get_runtime_path(), "servers", channel_name + ".json")
|
||||
if not os.path.isfile(channel_path):
|
||||
channel_path = filetools.join(config.get_runtime_path(), "servers", "debriders", channel_name + ".json")
|
||||
if not filetools.isfile(channel_path):
|
||||
channel_path = filetools.join(config.get_runtime_path(), "servers", "debriders",
|
||||
channel_name + ".json")
|
||||
|
||||
if filetools.isfile(channel_path):
|
||||
# logger.info("channel_data=" + channel_path)
|
||||
channel_json = jsontools.load(filetools.read(channel_path))
|
||||
# logger.info("channel_json= %s" % channel_json)
|
||||
|
||||
except Exception, ex:
|
||||
except Exception as ex:
|
||||
template = "An exception of type %s occured. Arguments:\n%r"
|
||||
message = template % (type(ex).__name__, ex.args)
|
||||
logger.error(" %s" % message)
|
||||
@@ -174,6 +147,7 @@ def get_channel_controls_settings(channel_name):
|
||||
|
||||
return list_controls, dict_settings
|
||||
|
||||
|
||||
def get_lang(channel_name):
|
||||
channel = __import__('channels.%s' % channel_name, fromlist=["channels.%s" % channel_name])
|
||||
list_language = [config.get_localized_string(70522)]
|
||||
@@ -199,16 +173,17 @@ def get_lang(channel_name):
|
||||
list_language.append(lang)
|
||||
return list_language
|
||||
|
||||
|
||||
def get_default_settings(channel_name):
|
||||
import filetools
|
||||
from core import filetools
|
||||
default_path = filetools.join(config.get_runtime_path(), 'default_channel_settings' + '.json')
|
||||
default_file = jsontools.load(filetools.read(default_path))
|
||||
|
||||
channel_path = filetools.join(config.get_runtime_path(),'channels',channel_name + '.json')
|
||||
adult_path = filetools.join(config.get_runtime_path(),'channels', 'porn', channel_name + '.json')
|
||||
channel_path = filetools.join(config.get_runtime_path(), 'channels', channel_name + '.json')
|
||||
adult_path = filetools.join(config.get_runtime_path(), 'channels', 'porn', channel_name + '.json')
|
||||
|
||||
# from core.support import dbg; dbg()
|
||||
if os.path.exists(channel_path) or os.path.exists(adult_path):
|
||||
if filetools.exists(channel_path) or filetools.exists(adult_path):
|
||||
default_controls = default_file['settings']
|
||||
default_controls_renumber = default_file['renumber']
|
||||
channel_json = get_channel_json(channel_name)
|
||||
@@ -217,33 +192,43 @@ def get_default_settings(channel_name):
|
||||
channel_language = channel_json['language']
|
||||
channel_controls = channel_json['settings']
|
||||
categories = channel_json['categories']
|
||||
not_active = channel_json['not_active'] if channel_json.has_key('not_active') else []
|
||||
default_off = channel_json['default_off'] if channel_json.has_key('default_off') else []
|
||||
not_active = channel_json['not_active'] if 'not_active' in channel_json else []
|
||||
default_off = channel_json['default_off'] if 'default_off' in channel_json else []
|
||||
|
||||
# Apply default configurations if they do not exist
|
||||
for control in default_controls:
|
||||
if control['id'] not in str(channel_controls):
|
||||
if 'include_in_newest' in control['id'] and 'include_in_newest' not in not_active and control['id'] not in not_active:
|
||||
if 'include_in_newest' in control['id'] and 'include_in_newest' not in not_active and control[
|
||||
'id'] not in not_active:
|
||||
label = control['id'].split('_')
|
||||
label = label[-1]
|
||||
if label == 'peliculas':
|
||||
if 'movie' in categories:
|
||||
control['label'] = config.get_localized_string(70727) + ' - ' + config.get_localized_string(30122)
|
||||
control['default'] = False if ('include_in_newest' in default_off) or ('include_in_newest_peliculas' in default_off) else True
|
||||
control['label'] = config.get_localized_string(70727) + ' - ' + config.get_localized_string(
|
||||
30122)
|
||||
control['default'] = False if ('include_in_newest' in default_off) or (
|
||||
'include_in_newest_peliculas' in default_off) else True
|
||||
channel_controls.append(control)
|
||||
else: pass
|
||||
else:
|
||||
pass
|
||||
elif label == 'series':
|
||||
if 'tvshow' in categories:
|
||||
control['label'] = config.get_localized_string(70727) + ' - ' + config.get_localized_string(30123)
|
||||
control['default'] = False if ('include_in_newest' in default_off) or ('include_in_newest_series' in default_off) else True
|
||||
control['label'] = config.get_localized_string(70727) + ' - ' + config.get_localized_string(
|
||||
30123)
|
||||
control['default'] = False if ('include_in_newest' in default_off) or (
|
||||
'include_in_newest_series' in default_off) else True
|
||||
channel_controls.append(control)
|
||||
else: pass
|
||||
else:
|
||||
pass
|
||||
elif label == 'anime':
|
||||
if 'anime' in categories:
|
||||
control['label'] = config.get_localized_string(70727) + ' - ' + config.get_localized_string(30124)
|
||||
control['default'] = False if ('include_in_newest' in default_off) or ('include_in_newest_anime' in default_off) else True
|
||||
control['label'] = config.get_localized_string(70727) + ' - ' + config.get_localized_string(
|
||||
30124)
|
||||
control['default'] = False if ('include_in_newest' in default_off) or (
|
||||
'include_in_newest_anime' in default_off) else True
|
||||
channel_controls.append(control)
|
||||
else: pass
|
||||
else:
|
||||
pass
|
||||
|
||||
else:
|
||||
control['label'] = config.get_localized_string(70727) + ' - ' + label.capitalize()
|
||||
@@ -259,13 +244,15 @@ def get_default_settings(channel_name):
|
||||
for control in default_controls_renumber:
|
||||
if control['id'] not in str(channel_controls):
|
||||
channel_controls.append(control)
|
||||
else: pass
|
||||
else:
|
||||
pass
|
||||
else:
|
||||
return get_channel_json(channel_name).get('settings', list())
|
||||
return channel_controls
|
||||
|
||||
|
||||
def get_channel_setting(name, channel, default=None):
|
||||
from core import filetools
|
||||
"""
|
||||
Retorna el valor de configuracion del parametro solicitado.
|
||||
|
||||
@@ -288,13 +275,15 @@ def get_channel_setting(name, channel, default=None):
|
||||
@rtype: any
|
||||
|
||||
"""
|
||||
file_settings = os.path.join(config.get_data_path(), "settings_channels", channel + "_data.json")
|
||||
file_settings = filetools.join(config.get_data_path(), "settings_channels", channel + "_data.json")
|
||||
dict_settings = {}
|
||||
dict_file = {}
|
||||
if os.path.exists(file_settings):
|
||||
if channel not in ['trakt']: def_settings = get_default_settings(channel)
|
||||
|
||||
if filetools.exists(file_settings):
|
||||
# Obtenemos configuracion guardada de ../settings/channel_data.json
|
||||
try:
|
||||
dict_file = jsontools.load(open(file_settings, "rb").read())
|
||||
dict_file = jsontools.load(filetools.read(file_settings))
|
||||
if isinstance(dict_file, dict) and 'settings' in dict_file:
|
||||
dict_settings = dict_file['settings']
|
||||
except EnvironmentError:
|
||||
@@ -313,9 +302,7 @@ def get_channel_setting(name, channel, default=None):
|
||||
dict_file['settings'] = dict_settings
|
||||
# Creamos el archivo ../settings/channel_data.json
|
||||
json_data = jsontools.dump(dict_file)
|
||||
try:
|
||||
open(file_settings, "wb").write(json_data)
|
||||
except EnvironmentError:
|
||||
if not filetools.write(file_settings, json_data, silent=True):
|
||||
logger.error("ERROR al salvar el archivo: %s" % file_settings)
|
||||
|
||||
# Devolvemos el valor del parametro local 'name' si existe, si no se devuelve default
|
||||
@@ -323,7 +310,7 @@ def get_channel_setting(name, channel, default=None):
|
||||
|
||||
|
||||
def set_channel_setting(name, value, channel):
|
||||
import filetools
|
||||
from core import filetools
|
||||
"""
|
||||
Fija el valor de configuracion del parametro indicado.
|
||||
|
||||
@@ -346,36 +333,22 @@ def set_channel_setting(name, value, channel):
|
||||
|
||||
"""
|
||||
# Creamos la carpeta si no existe
|
||||
if not os.path.exists(os.path.join(config.get_data_path(), "settings_channels")):
|
||||
os.mkdir(os.path.join(config.get_data_path(), "settings_channels"))
|
||||
if not filetools.exists(filetools.join(config.get_data_path(), "settings_channels")):
|
||||
filetools.mkdir(filetools.join(config.get_data_path(), "settings_channels"))
|
||||
|
||||
file_settings = os.path.join(config.get_data_path(), "settings_channels", channel + "_data.json")
|
||||
file_settings = filetools.join(config.get_data_path(), "settings_channels", channel + "_data.json")
|
||||
dict_settings = {}
|
||||
if channel not in ['trakt']: def_settings = get_default_settings(channel)
|
||||
|
||||
dict_file = None
|
||||
|
||||
if os.path.exists(file_settings):
|
||||
if filetools.exists(file_settings):
|
||||
# Obtenemos configuracion guardada de ../settings/channel_data.json
|
||||
try:
|
||||
dict_file = jsontools.load(open(file_settings, "r").read())
|
||||
dict_file = jsontools.load(filetools.read(file_settings))
|
||||
dict_settings = dict_file.get('settings', {})
|
||||
except EnvironmentError:
|
||||
logger.error("ERROR al leer el archivo: %s" % file_settings)
|
||||
|
||||
if os.path.isfile(filetools.join(config.get_runtime_path(), "channels", channel + ".json")):
|
||||
|
||||
# delete unused Settings
|
||||
def_keys = []
|
||||
del_keys = []
|
||||
for key in def_settings:
|
||||
def_keys.append(key['id'])
|
||||
for key in dict_settings:
|
||||
if key not in def_keys:
|
||||
del_keys.append(key)
|
||||
for key in del_keys:
|
||||
del dict_settings[key]
|
||||
|
||||
dict_settings[name] = value
|
||||
|
||||
# comprobamos si existe dict_file y es un diccionario, sino lo creamos
|
||||
@@ -385,10 +358,8 @@ def set_channel_setting(name, value, channel):
|
||||
dict_file['settings'] = dict_settings
|
||||
|
||||
# Creamos el archivo ../settings/channel_data.json
|
||||
try:
|
||||
json_data = jsontools.dump(dict_file)
|
||||
open(file_settings, "w").write(json_data)
|
||||
except EnvironmentError:
|
||||
json_data = jsontools.dump(dict_file)
|
||||
if not filetools.write(file_settings, json_data, silent=True):
|
||||
logger.error("ERROR al salvar el archivo: %s" % file_settings)
|
||||
return None
|
||||
|
||||
|
||||
@@ -20,8 +20,8 @@ metodos:
|
||||
from __future__ import division
|
||||
from future import standard_library
|
||||
standard_library.install_aliases()
|
||||
from future.builtins import range
|
||||
from future.builtins import object
|
||||
from builtins import range
|
||||
from builtins import object
|
||||
from past.utils import old_div
|
||||
#from builtins import str
|
||||
import sys
|
||||
@@ -243,7 +243,7 @@ class Downloader(object):
|
||||
|
||||
# Abrimos en modo "a+" para que cree el archivo si no existe, luego en modo "r+b" para poder hacer seek()
|
||||
self.file = filetools.file_open(filetools.join(self._path, self._filename), "a+", vfs=VFS)
|
||||
if self.file: self.file.close()
|
||||
if self.file: self.file.close()
|
||||
self.file = filetools.file_open(filetools.join(self._path, self._filename), "r+b", vfs=VFS)
|
||||
if not self.file:
|
||||
return
|
||||
@@ -258,7 +258,7 @@ class Downloader(object):
|
||||
self.__get_download_info__()
|
||||
|
||||
try:
|
||||
logger.info("Initialized Download: Parts: %s | Path: %s | Archive: %s | Size: %s" % \
|
||||
logger.info("Descarga inicializada: Partes: %s | Ruta: %s | Archivo: %s | Tamaño: %s" % \
|
||||
(str(len(self._download_info["parts"])), self._pathencode('utf-8'), \
|
||||
self._filenameencode('utf-8'), str(self._download_info["size"])))
|
||||
except:
|
||||
|
||||
@@ -110,6 +110,8 @@ def limpia_nombre_excepto_1(s):
|
||||
stripped = ''.join(c for c in s if c in validchars)
|
||||
# Convierte a iso
|
||||
s = stripped.encode("iso-8859-1")
|
||||
if PY3:
|
||||
s = s.decode('utf-8')
|
||||
return s
|
||||
|
||||
|
||||
@@ -129,7 +131,7 @@ def getfilefromtitle(url, title):
|
||||
logger.info("platform=" + plataforma)
|
||||
|
||||
# nombrefichero = xbmc.makeLegalFilename(title + url[-4:])
|
||||
from . import scrapertools
|
||||
from core import scrapertools
|
||||
|
||||
nombrefichero = title + scrapertools.get_filename_from_url(url)[-4:]
|
||||
logger.info("filename=%s" % nombrefichero)
|
||||
@@ -169,7 +171,10 @@ def downloadbest(video_urls, title, continuar=False):
|
||||
for elemento in invertida:
|
||||
# videotitle = elemento[0]
|
||||
url = elemento[1]
|
||||
logger.info("Downloading option " + title + " " + url.encode('ascii', 'ignore'))
|
||||
if not PY3:
|
||||
logger.info("Downloading option " + title + " " + url.encode('ascii', 'ignore'))
|
||||
else:
|
||||
logger.info("Downloading option " + title + " " + url.encode('ascii', 'ignore').decode('utf-8'))
|
||||
|
||||
# Calcula el fichero donde debe grabar
|
||||
try:
|
||||
@@ -621,7 +626,7 @@ def downloadfileGzipped(url, pathfichero):
|
||||
break
|
||||
except:
|
||||
reintentos += 1
|
||||
logger.info("ERROR in block download, retry %dd" % reintentos)
|
||||
logger.info("ERROR in block download, retry %d" % reintentos)
|
||||
for line in sys.exc_info():
|
||||
logger.error("%s" % line)
|
||||
|
||||
@@ -660,7 +665,7 @@ def GetTitleFromFile(title):
|
||||
# Imprime en el log lo que va a descartar
|
||||
logger.info("title=" + title)
|
||||
plataforma = config.get_system_platform()
|
||||
logger.info("platform=" + plataforma)
|
||||
logger.info("plataform=" + plataforma)
|
||||
|
||||
# nombrefichero = xbmc.makeLegalFilename(title + url[-4:])
|
||||
nombrefichero = title
|
||||
@@ -678,7 +683,7 @@ def downloadIfNotModifiedSince(url, timestamp):
|
||||
|
||||
# Convierte la fecha a GMT
|
||||
fecha_formateada = time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime(timestamp))
|
||||
logger.info("DateFormat=%s" % fecha_formateada)
|
||||
logger.info("fechaFormateada=%s" % fecha_formateada)
|
||||
|
||||
# Comprueba si ha cambiado
|
||||
inicio = time.clock()
|
||||
@@ -700,11 +705,11 @@ def downloadIfNotModifiedSince(url, timestamp):
|
||||
except urllib.error.URLError as e:
|
||||
# Si devuelve 304 es que no ha cambiado
|
||||
if hasattr(e, 'code'):
|
||||
logger.info("HTTP response code: %d" % e.code)
|
||||
logger.info("HTTP response code : %d" % e.code)
|
||||
if e.code == 304:
|
||||
logger.info("It has not changed")
|
||||
updated = False
|
||||
# Agarra los errores con codigo de respuesta del servidor externo solicitado
|
||||
# Agarra los errores con codigo de respuesta del servidor externo solicitado
|
||||
else:
|
||||
for line in sys.exc_info():
|
||||
logger.error("%s" % line)
|
||||
@@ -814,6 +819,7 @@ def download_all_episodes(item, channel, first_episode="", preferred_server="vid
|
||||
|
||||
for mirror_item in mirrors_itemlist:
|
||||
logger.info("mirror=" + mirror_item.title)
|
||||
|
||||
if "(Italiano)" in mirror_item.title:
|
||||
idioma = "(Italiano)"
|
||||
codigo_idioma = "it"
|
||||
@@ -885,8 +891,8 @@ def download_all_episodes(item, channel, first_episode="", preferred_server="vid
|
||||
|
||||
|
||||
def episodio_ya_descargado(show_title, episode_title):
|
||||
from . import scrapertools
|
||||
ficheros = os.listdir(".")
|
||||
from core import scrapertools
|
||||
ficheros = filetools.listdir(".")
|
||||
|
||||
for fichero in ficheros:
|
||||
# logger.info("fichero="+fichero)
|
||||
|
||||
@@ -87,7 +87,7 @@ def encode(path, _samba=False):
|
||||
if scrapertools.find_single_match(path, '(^\w+:\/\/)') or _samba:
|
||||
path = path.encode("utf-8", "ignore")
|
||||
else:
|
||||
if fs_encoding:
|
||||
if fs_encoding and not PY3:
|
||||
path = path.encode(fs_encoding, "ignore")
|
||||
|
||||
return path
|
||||
@@ -133,13 +133,13 @@ def read(path, linea_inicio=0, total_lineas=None, whence=0, silent=False, vfs=Tr
|
||||
try:
|
||||
linea_inicio = int(linea_inicio)
|
||||
except:
|
||||
logger.error('Read: ERROR de linea_inicio: %s' % str(linea_inicio))
|
||||
logger.error('Read: Start_line ERROR: %s' % str(linea_inicio))
|
||||
linea_inicio = 0
|
||||
if total_lineas != None and not isinstance(total_lineas, int):
|
||||
try:
|
||||
total_lineas = int(total_lineas)
|
||||
except:
|
||||
logger.error('Read: ERROR de total_lineas: %s' % str(total_lineas))
|
||||
logger.error('Read: ERROR of total_lineas: %s' % str(total_lineas))
|
||||
total_lineas = None
|
||||
if xbmc_vfs and vfs:
|
||||
if not exists(path): return False
|
||||
@@ -151,7 +151,7 @@ def read(path, linea_inicio=0, total_lineas=None, whence=0, silent=False, vfs=Tr
|
||||
except:
|
||||
return False
|
||||
f.seek(linea_inicio, whence)
|
||||
logger.debug('POSICIÓN de comienzo de lectura, tell(): %s' % f.seek(0, 1))
|
||||
logger.debug('POSITION of beginning of reading,, tell(): %s' % f.seek(0, 1))
|
||||
if total_lineas == None:
|
||||
total_lineas = 0
|
||||
data = f.read(total_lineas)
|
||||
@@ -169,15 +169,15 @@ def read(path, linea_inicio=0, total_lineas=None, whence=0, silent=False, vfs=Tr
|
||||
f.close()
|
||||
except:
|
||||
if not silent:
|
||||
logger.error("ERROR al leer el archivo: %s" % path)
|
||||
logger.error("ERROR reading file: %s" % path)
|
||||
logger.error(traceback.format_exc())
|
||||
return False
|
||||
|
||||
else:
|
||||
if not PY3:
|
||||
return "".join(data)
|
||||
return unicode("".join(data))
|
||||
else:
|
||||
return b"".join(data)
|
||||
return unicode(b"".join(data))
|
||||
|
||||
|
||||
def write(path, data, mode="wb", silent=False, vfs=True):
|
||||
@@ -226,20 +226,20 @@ def file_open(path, mode="r", silent=False, vfs=True):
|
||||
if xbmc_vfs and vfs:
|
||||
if 'r' in mode and '+' in mode:
|
||||
mode = mode.replace('r', 'w').replace('+', '')
|
||||
logger.debug('Open MODE cambiado a: %s' % mode)
|
||||
logger.debug('Open MODE changed to: %s' % mode)
|
||||
if 'a' in mode:
|
||||
mode = mode.replace('a', 'w').replace('+', '')
|
||||
logger.debug('Open MODE cambiado a: %s' % mode)
|
||||
logger.debug('Open MODE changed to: %s' % mode)
|
||||
return xbmcvfs.File(path, mode)
|
||||
elif path.lower().startswith("smb://"):
|
||||
return samba.smb_open(path, mode)
|
||||
else:
|
||||
return open(path, mode)
|
||||
except:
|
||||
logger.error("ERROR al abrir el archivo: %s, %s" % (path, mode))
|
||||
logger.error("ERROR when opening file: %s, %s" % (path, mode))
|
||||
if not silent:
|
||||
logger.error(traceback.format_exc())
|
||||
platformtools.dialog_notification("Error al abrir", path)
|
||||
platformtools.dialog_notification("Error Opening", path)
|
||||
return False
|
||||
|
||||
|
||||
@@ -258,7 +258,7 @@ def file_stat(path, silent=False, vfs=True):
|
||||
return xbmcvfs.Stat(path)
|
||||
raise
|
||||
except:
|
||||
logger.error("File_Stat no soportado: %s" % path)
|
||||
logger.error("File_Stat not supported: %s" % path)
|
||||
if not silent:
|
||||
logger.error(traceback.format_exc())
|
||||
return False
|
||||
@@ -283,9 +283,9 @@ def rename(path, new_name, silent=False, strict=False, vfs=True):
|
||||
dest = encode(join(dirname(path_end), new_name))
|
||||
result = xbmcvfs.rename(path, dest)
|
||||
if not result and not strict:
|
||||
logger.error("ERROR al RENOMBRAR el archivo: %s. Copiando y borrando" % path)
|
||||
logger.error("ERROR RENAME file: %s. Copying and deleting" % path)
|
||||
if not silent:
|
||||
dialogo = platformtools.dialog_progress("Copiando archivo", "")
|
||||
dialogo = platformtools.dialog_progress("Copying file", "")
|
||||
result = xbmcvfs.copy(path, dest)
|
||||
if not result:
|
||||
return False
|
||||
@@ -298,10 +298,10 @@ def rename(path, new_name, silent=False, strict=False, vfs=True):
|
||||
new_name = encode(new_name, False)
|
||||
os.rename(path, os.path.join(os.path.dirname(path), new_name))
|
||||
except:
|
||||
logger.error("ERROR al renombrar el archivo: %s" % path)
|
||||
logger.error("ERROR when renaming the file: %s" % path)
|
||||
if not silent:
|
||||
logger.error(traceback.format_exc())
|
||||
platformtools.dialog_notification("Error al renombrar", path)
|
||||
platformtools.dialog_notification("Error renaming", path)
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
@@ -324,9 +324,9 @@ def move(path, dest, silent=False, strict=False, vfs=True):
|
||||
dest = encode(dest)
|
||||
result = xbmcvfs.rename(path, dest)
|
||||
if not result and not strict:
|
||||
logger.error("ERROR al MOVER el archivo: %s. Copiando y borrando" % path)
|
||||
logger.error("ERROR when MOVING the file: %s. Copying and deleting" % path)
|
||||
if not silent:
|
||||
dialogo = platformtools.dialog_progress("Copiando archivo", "")
|
||||
dialogo = platformtools.dialog_progress("Copying file", "")
|
||||
result = xbmcvfs.copy(path, dest)
|
||||
if not result:
|
||||
return False
|
||||
@@ -349,7 +349,7 @@ def move(path, dest, silent=False, strict=False, vfs=True):
|
||||
dialogo = platformtools.dialog_progress("Copiando archivo", "")
|
||||
return copy(path, dest) == True and remove(path) == True
|
||||
except:
|
||||
logger.error("ERROR al mover el archivo: %s a %s" % (path, dest))
|
||||
logger.error("ERROR when moving file: %s to %s" % (path, dest))
|
||||
if not silent:
|
||||
logger.error(traceback.format_exc())
|
||||
return False
|
||||
@@ -376,7 +376,7 @@ def copy(path, dest, silent=False, vfs=True):
|
||||
if not silent:
|
||||
dialogo = platformtools.dialog_progress("Copiando archivo", "")
|
||||
return bool(xbmcvfs.copy(path, dest))
|
||||
|
||||
|
||||
fo = file_open(path, "rb")
|
||||
fd = file_open(dest, "wb")
|
||||
if fo and fd:
|
||||
@@ -398,7 +398,7 @@ def copy(path, dest, silent=False, vfs=True):
|
||||
if not silent:
|
||||
dialogo.close()
|
||||
except:
|
||||
logger.error("ERROR al copiar el archivo: %s" % path)
|
||||
logger.error("ERROR when copying the file: %s" % path)
|
||||
if not silent:
|
||||
logger.error(traceback.format_exc())
|
||||
return False
|
||||
@@ -420,13 +420,13 @@ def exists(path, silent=False, vfs=True):
|
||||
result = bool(xbmcvfs.exists(path))
|
||||
if not result and not path.endswith('/') and not path.endswith('\\'):
|
||||
result = bool(xbmcvfs.exists(join(path, ' ').rstrip()))
|
||||
return result
|
||||
return result
|
||||
elif path.lower().startswith("smb://"):
|
||||
return samba.exists(path)
|
||||
else:
|
||||
return os.path.exists(path)
|
||||
except:
|
||||
logger.error("ERROR al comprobar la ruta: %s" % path)
|
||||
logger.error("ERROR when checking the path: %s" % path)
|
||||
if not silent:
|
||||
logger.error(traceback.format_exc())
|
||||
return False
|
||||
@@ -458,7 +458,7 @@ def isfile(path, silent=False, vfs=True):
|
||||
else:
|
||||
return os.path.isfile(path)
|
||||
except:
|
||||
logger.error("ERROR al comprobar el archivo: %s" % path)
|
||||
logger.error("ERROR when checking file: %s" % path)
|
||||
if not silent:
|
||||
logger.error(traceback.format_exc())
|
||||
return False
|
||||
@@ -490,7 +490,7 @@ def isdir(path, silent=False, vfs=True):
|
||||
else:
|
||||
return os.path.isdir(path)
|
||||
except:
|
||||
logger.error("ERROR al comprobar el directorio: %s" % path)
|
||||
logger.error("ERROR when checking the directory: %s" % path)
|
||||
if not silent:
|
||||
logger.error(traceback.format_exc())
|
||||
return False
|
||||
@@ -517,7 +517,7 @@ def getsize(path, silent=False, vfs=True):
|
||||
else:
|
||||
return os.path.getsize(path)
|
||||
except:
|
||||
logger.error("ERROR al obtener el tamaño: %s" % path)
|
||||
logger.error("ERROR when getting the size: %s" % path)
|
||||
if not silent:
|
||||
logger.error(traceback.format_exc())
|
||||
return long(0)
|
||||
@@ -540,10 +540,10 @@ def remove(path, silent=False, vfs=True):
|
||||
else:
|
||||
os.remove(path)
|
||||
except:
|
||||
logger.error("ERROR al eliminar el archivo: %s" % path)
|
||||
logger.error("ERROR deleting file: %s" % path)
|
||||
if not silent:
|
||||
logger.error(traceback.format_exc())
|
||||
platformtools.dialog_notification("Error al eliminar el archivo", path)
|
||||
platformtools.dialog_notification("ERROR deleting file", path)
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
@@ -580,10 +580,10 @@ def rmdirtree(path, silent=False, vfs=True):
|
||||
import shutil
|
||||
shutil.rmtree(path, ignore_errors=True)
|
||||
except:
|
||||
logger.error("ERROR al eliminar el directorio: %s" % path)
|
||||
logger.error("ERROR deleting directory: %s" % path)
|
||||
if not silent:
|
||||
logger.error(traceback.format_exc())
|
||||
platformtools.dialog_notification("Error al eliminar el directorio", path)
|
||||
platformtools.dialog_notification("ERROR deleting directory", path)
|
||||
return False
|
||||
else:
|
||||
return not exists(path)
|
||||
@@ -608,10 +608,10 @@ def rmdir(path, silent=False, vfs=True):
|
||||
else:
|
||||
os.rmdir(path)
|
||||
except:
|
||||
logger.error("ERROR al eliminar el directorio: %s" % path)
|
||||
logger.error("ERROR deleting directory: %s" % path)
|
||||
if not silent:
|
||||
logger.error(traceback.format_exc())
|
||||
platformtools.dialog_notification("Error al eliminar el directorio", path)
|
||||
platformtools.dialog_notification("ERROR deleting directory", path)
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
@@ -641,10 +641,10 @@ def mkdir(path, silent=False, vfs=True):
|
||||
else:
|
||||
os.mkdir(path)
|
||||
except:
|
||||
logger.error("ERROR al crear el directorio: %s" % path)
|
||||
logger.error("ERROR when creating directory: %s" % path)
|
||||
if not silent:
|
||||
logger.error(traceback.format_exc())
|
||||
platformtools.dialog_notification("Error al crear el directorio", path)
|
||||
platformtools.dialog_notification("ERROR when creating directory", path)
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
@@ -724,7 +724,7 @@ def listdir(path, silent=False, vfs=True):
|
||||
else:
|
||||
return decode(os.listdir(path))
|
||||
except:
|
||||
logger.error("ERROR al leer el directorio: %s" % path)
|
||||
logger.error("ERROR when reading the directory: %s" % path)
|
||||
if not silent:
|
||||
logger.error(traceback.format_exc())
|
||||
return False
|
||||
@@ -740,14 +740,13 @@ def join(*paths):
|
||||
list_path = []
|
||||
if paths[0].startswith("/"):
|
||||
list_path.append("")
|
||||
|
||||
for path in paths:
|
||||
if path:
|
||||
if xbmc_vfs:
|
||||
if xbmc_vfs and type(path) != str:
|
||||
path = encode(path)
|
||||
list_path += path.replace("\\", "/").strip("/").split("/")
|
||||
|
||||
if scrapertools.find_single_match(paths[0], '(^\w+:\/\/)'):
|
||||
if scrapertools.find_single_match(paths[0], r'(^\w+:\/\/)'):
|
||||
return str("/".join(list_path))
|
||||
else:
|
||||
return str(os.sep.join(list_path))
|
||||
@@ -812,8 +811,8 @@ def remove_tags(title):
|
||||
return title_without_tags
|
||||
else:
|
||||
return title
|
||||
|
||||
|
||||
|
||||
|
||||
def remove_smb_credential(path):
|
||||
"""
|
||||
devuelve el path sin contraseña/usuario para paths de SMB
|
||||
@@ -823,10 +822,10 @@ def remove_smb_credential(path):
|
||||
@rtype: str
|
||||
"""
|
||||
logger.info()
|
||||
|
||||
|
||||
if not scrapertools.find_single_match(path, '(^\w+:\/\/)'):
|
||||
return path
|
||||
|
||||
|
||||
protocol = scrapertools.find_single_match(path, '(^\w+:\/\/)')
|
||||
path_without_credentials = scrapertools.find_single_match(path, '^\w+:\/\/(?:[^;\n]+;)?(?:[^:@\n]+[:|@])?(?:[^@\n]+@)?(.*?$)')
|
||||
|
||||
|
||||
@@ -31,7 +31,7 @@ cookies_file = os.path.join(config.get_data_path(), "cookies.dat")
|
||||
default_headers = dict()
|
||||
default_headers["User-Agent"] = "Mozilla/5.0 AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36"
|
||||
default_headers["Accept"] = "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8"
|
||||
default_headers["Accept-Language"] = "es-ES,es;q=0.8,en-US;q=0.5,en;q=0.3"
|
||||
default_headers["Accept-Language"] = "it-IT,it;q=0.8,en-US;q=0.5,en;q=0.3"
|
||||
default_headers["Accept-Charset"] = "UTF-8"
|
||||
default_headers["Accept-Encoding"] = "gzip"
|
||||
|
||||
@@ -255,13 +255,11 @@ def downloadpage(url, **opt):
|
||||
domain = urlparse.urlparse(url).netloc
|
||||
CF = False
|
||||
if domain in ['www.guardaserie.media', 'casacinema.space', 'wstream.video', 'akvideo.stream', 'backin.net',
|
||||
'dreamsub.stream', 'altadefinizione-nuovo.link', 'ilgeniodellostreaming.si', 'www.piratestreaming.gratis']:
|
||||
'dreamsub.stream', 'altadefinizione-nuovo.link', 'ilgeniodellostreaming.si', 'www.piratestreaming.gratis',
|
||||
'altadefinizione.style']:
|
||||
from lib import cloudscraper
|
||||
session = cloudscraper.create_scraper()
|
||||
CF = True
|
||||
elif opt.get('session', False):
|
||||
session = opt['session'] # same session to speed up search
|
||||
logger.info('same session')
|
||||
else:
|
||||
from lib import requests
|
||||
session = requests.session()
|
||||
@@ -360,6 +358,7 @@ def downloadpage(url, **opt):
|
||||
timeout=opt['timeout'])
|
||||
except Exception as e:
|
||||
from lib import requests
|
||||
req = requests.Response()
|
||||
if not opt.get('ignore_response_code', False) and not proxy_data.get('stat', ''):
|
||||
response['data'] = ''
|
||||
response['sucess'] = False
|
||||
@@ -371,7 +370,6 @@ def downloadpage(url, **opt):
|
||||
show_infobox(info_dict)
|
||||
return type('HTTPResponse', (), response)
|
||||
else:
|
||||
req = requests.Response()
|
||||
req.status_code = str(e)
|
||||
|
||||
else:
|
||||
@@ -384,6 +382,10 @@ def downloadpage(url, **opt):
|
||||
|
||||
response['data'] = req.content
|
||||
response['url'] = req.url
|
||||
|
||||
if type(response['data']) != str:
|
||||
response['data'] = response['data'].decode('UTF-8')
|
||||
|
||||
if not response['data']:
|
||||
response['data'] = ''
|
||||
try:
|
||||
|
||||
80
core/item.py
80
core/item.py
@@ -3,12 +3,23 @@
|
||||
# Item is the object we use for representing data
|
||||
# --------------------------------------------------------------------------------
|
||||
|
||||
#from builtins import str
|
||||
from future.builtins import object
|
||||
import sys
|
||||
PY3 = False
|
||||
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
|
||||
|
||||
if PY3:
|
||||
#from future import standard_library
|
||||
#standard_library.install_aliases()
|
||||
import urllib.parse as urllib # Es muy lento en PY2. En PY3 es nativo
|
||||
from html.parser import HTMLParser
|
||||
else:
|
||||
import urllib # Usamos el nativo de PY2 que es más rápido
|
||||
from HTMLParser import HTMLParser
|
||||
|
||||
import base64
|
||||
import copy
|
||||
import os
|
||||
import urllib
|
||||
|
||||
from HTMLParser import HTMLParser
|
||||
|
||||
from core import jsontools as json
|
||||
|
||||
@@ -58,12 +69,12 @@ class InfoLabels(dict):
|
||||
elif key == 'code':
|
||||
code = []
|
||||
# Añadir imdb_id al listado de codigos
|
||||
if 'imdb_id' in super(InfoLabels, self).keys() and super(InfoLabels, self).__getitem__('imdb_id'):
|
||||
if 'imdb_id' in list(super(InfoLabels, self).keys()) and super(InfoLabels, self).__getitem__('imdb_id'):
|
||||
code.append(super(InfoLabels, self).__getitem__('imdb_id'))
|
||||
|
||||
# Completar con el resto de codigos
|
||||
for scr in ['tmdb_id', 'tvdb_id', 'noscrap_id']:
|
||||
if scr in super(InfoLabels, self).keys() and super(InfoLabels, self).__getitem__(scr):
|
||||
if scr in list(super(InfoLabels, self).keys()) and super(InfoLabels, self).__getitem__(scr):
|
||||
value = "%s%s" % (scr[:-2], super(InfoLabels, self).__getitem__(scr))
|
||||
code.append(value)
|
||||
|
||||
@@ -78,21 +89,21 @@ class InfoLabels(dict):
|
||||
|
||||
elif key == 'mediatype':
|
||||
# "list", "movie", "tvshow", "season", "episode"
|
||||
if 'tvshowtitle' in super(InfoLabels, self).keys() \
|
||||
if 'tvshowtitle' in list(super(InfoLabels, self).keys()) \
|
||||
and super(InfoLabels, self).__getitem__('tvshowtitle') != "":
|
||||
if 'episode' in super(InfoLabels, self).keys() and super(InfoLabels, self).__getitem__('episode') != "":
|
||||
if 'episode' in list(super(InfoLabels, self).keys()) and super(InfoLabels, self).__getitem__('episode') != "":
|
||||
return 'episode'
|
||||
|
||||
if 'episodeName' in super(InfoLabels, self).keys() \
|
||||
if 'episodeName' in list(super(InfoLabels, self).keys()) \
|
||||
and super(InfoLabels, self).__getitem__('episodeName') != "":
|
||||
return 'episode'
|
||||
|
||||
if 'season' in super(InfoLabels, self).keys() and super(InfoLabels, self).__getitem__('season') != "":
|
||||
if 'season' in list(super(InfoLabels, self).keys()) and super(InfoLabels, self).__getitem__('season') != "":
|
||||
return 'season'
|
||||
else:
|
||||
return 'tvshow'
|
||||
|
||||
elif 'title' in super(InfoLabels, self).keys() and super(InfoLabels, self).__getitem__('title') != "":
|
||||
elif 'title' in list(super(InfoLabels, self).keys()) and super(InfoLabels, self).__getitem__('title') != "":
|
||||
return 'movie'
|
||||
|
||||
else:
|
||||
@@ -104,7 +115,7 @@ class InfoLabels(dict):
|
||||
|
||||
def tostring(self, separador=', '):
|
||||
ls = []
|
||||
dic = dict(super(InfoLabels, self).items())
|
||||
dic = dict(list(super(InfoLabels, self).items()))
|
||||
|
||||
for i in sorted(dic.items()):
|
||||
i_str = str(i)[1:-1]
|
||||
@@ -158,6 +169,7 @@ class Item(object):
|
||||
Función llamada al modificar cualquier atributo del item, modifica algunos atributos en función de los datos
|
||||
modificados.
|
||||
"""
|
||||
if PY3: name = self.toutf8(name)
|
||||
value = self.toutf8(value)
|
||||
if name == "__dict__":
|
||||
for key in value:
|
||||
@@ -313,9 +325,13 @@ class Item(object):
|
||||
valor = dic[var].tostring(',\r\t\t')
|
||||
else:
|
||||
valor = dic[var].tostring()
|
||||
elif PY3 and isinstance(dic[var], bytes):
|
||||
valor = "'%s'" % dic[var].decode('utf-8')
|
||||
else:
|
||||
valor = str(dic[var])
|
||||
|
||||
if PY3 and isinstance(var, bytes):
|
||||
var = var.decode('utf-8')
|
||||
ls.append(var + "= " + valor)
|
||||
|
||||
return separator.join(ls)
|
||||
@@ -327,12 +343,12 @@ class Item(object):
|
||||
|
||||
Uso: url = item.tourl()
|
||||
"""
|
||||
dump = json.dump(self.__dict__)
|
||||
dump = json.dump(self.__dict__).encode("utf8")
|
||||
# if empty dict
|
||||
if not dump:
|
||||
# set a str to avoid b64encode fails
|
||||
dump = ""
|
||||
return urllib.quote(base64.b64encode(dump))
|
||||
dump = "".encode("utf8")
|
||||
return str(urllib.quote(base64.b64encode(dump)))
|
||||
|
||||
def fromurl(self, url):
|
||||
"""
|
||||
@@ -367,6 +383,7 @@ class Item(object):
|
||||
return self
|
||||
|
||||
def tojson(self, path=""):
|
||||
from core import filetools
|
||||
"""
|
||||
Crea un JSON a partir del item, para guardar archivos de favoritos, lista de descargas, etc...
|
||||
Si se especifica un path, te lo guarda en la ruta especificada, si no, devuelve la cadena json
|
||||
@@ -377,11 +394,13 @@ class Item(object):
|
||||
@type path: str
|
||||
"""
|
||||
if path:
|
||||
open(path, "wb").write(json.dump(self.__dict__))
|
||||
#open(path, "wb").write(json.dump(self.__dict__))
|
||||
res = filetools.write(path, json.dump(self.__dict__))
|
||||
else:
|
||||
return json.dump(self.__dict__)
|
||||
|
||||
def fromjson(self, json_item=None, path=""):
|
||||
from core import filetools
|
||||
"""
|
||||
Genera un item a partir de un archivo JSON
|
||||
Si se especifica un path, lee directamente el archivo, si no, lee la cadena de texto pasada.
|
||||
@@ -394,8 +413,9 @@ class Item(object):
|
||||
@type path: str
|
||||
"""
|
||||
if path:
|
||||
if os.path.exists(path):
|
||||
json_item = open(path, "rb").read()
|
||||
if filetools.exists(path):
|
||||
#json_item = open(path, "rb").read()
|
||||
json_item = filetools.read(path)
|
||||
else:
|
||||
json_item = {}
|
||||
|
||||
@@ -436,6 +456,8 @@ class Item(object):
|
||||
unicode_title = unicode(value, "utf8", "ignore")
|
||||
return HTMLParser().unescape(unicode_title).encode("utf8")
|
||||
except:
|
||||
if PY3 and isinstance(value, bytes):
|
||||
value = value.decode("utf8")
|
||||
return value
|
||||
|
||||
def toutf8(self, *args):
|
||||
@@ -447,13 +469,18 @@ class Item(object):
|
||||
else:
|
||||
value = self.__dict__
|
||||
|
||||
if type(value) == unicode:
|
||||
return value.encode("utf8")
|
||||
if isinstance(value, unicode):
|
||||
value = value.encode("utf8")
|
||||
if PY3: value = value.decode("utf8")
|
||||
return value
|
||||
|
||||
elif type(value) == str:
|
||||
elif not PY3 and isinstance(value, str):
|
||||
return unicode(value, "utf8", "ignore").encode("utf8")
|
||||
|
||||
elif type(value) == list:
|
||||
elif PY3 and isinstance(value, bytes):
|
||||
return value.decode("utf8")
|
||||
|
||||
elif isinstance(value, list):
|
||||
for x, key in enumerate(value):
|
||||
value[x] = self.toutf8(value[x])
|
||||
return value
|
||||
@@ -461,11 +488,12 @@ class Item(object):
|
||||
elif isinstance(value, dict):
|
||||
newdct = {}
|
||||
for key in value:
|
||||
v = self.toutf8(value[key])
|
||||
if type(key) == unicode:
|
||||
key = key.encode("utf8")
|
||||
value_unc = self.toutf8(value[key])
|
||||
key_unc = self.toutf8(key)
|
||||
#if isinstance(key, unicode):
|
||||
# key = key.encode("utf8")
|
||||
|
||||
newdct[key] = v
|
||||
newdct[key_unc] = value_unc
|
||||
|
||||
if len(args) > 0:
|
||||
if isinstance(value, InfoLabels):
|
||||
|
||||
@@ -10,24 +10,28 @@ from platformcode import logger
|
||||
try:
|
||||
import json
|
||||
except:
|
||||
logger.info("json incluido en el interprete **NO** disponible")
|
||||
logger.info("json included in the interpreter **NOT** available")
|
||||
|
||||
try:
|
||||
import simplejson as json
|
||||
except:
|
||||
logger.info("simplejson incluido en el interprete **NO** disponible")
|
||||
logger.info("simplejson included in the interpreter **NOT** available")
|
||||
try:
|
||||
from lib import simplejson as json
|
||||
except:
|
||||
logger.info("simplejson en el directorio lib **NO** disponible")
|
||||
logger.error("No se ha encontrado un parser de JSON valido")
|
||||
logger.info("simplejson in lib directory **NOT** available")
|
||||
logger.error("A valid JSON parser was not found")
|
||||
json = None
|
||||
else:
|
||||
logger.info("Usando simplejson en el directorio lib")
|
||||
logger.info("Using simplejson in the lib directory")
|
||||
else:
|
||||
logger.info("Usando simplejson incluido en el interprete")
|
||||
else:
|
||||
logger.info("Usando json incluido en el interprete")
|
||||
logger.info("Using simplejson included in the interpreter")
|
||||
# ~ else:
|
||||
# ~ logger.info("Usando json incluido en el interprete")
|
||||
|
||||
import sys
|
||||
PY3 = False
|
||||
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
|
||||
|
||||
|
||||
def load(*args, **kwargs):
|
||||
@@ -37,7 +41,7 @@ def load(*args, **kwargs):
|
||||
try:
|
||||
value = json.loads(*args, **kwargs)
|
||||
except:
|
||||
logger.error("**NO** se ha podido cargar el JSON")
|
||||
logger.error("**NOT** able to load the JSON")
|
||||
logger.error(traceback.format_exc())
|
||||
value = {}
|
||||
|
||||
@@ -46,12 +50,12 @@ def load(*args, **kwargs):
|
||||
|
||||
def dump(*args, **kwargs):
|
||||
if not kwargs:
|
||||
kwargs = {"indent": 4, "skipkeys": True, "sort_keys": True, "ensure_ascii": False}
|
||||
kwargs = {"indent": 4, "skipkeys": True, "sort_keys": True, "ensure_ascii": True}
|
||||
|
||||
try:
|
||||
value = json.dumps(*args, **kwargs)
|
||||
except:
|
||||
logger.error("**NO** se ha podido cargar el JSON")
|
||||
logger.error("JSON could **NOT** be saved")
|
||||
logger.error(traceback.format_exc())
|
||||
value = ""
|
||||
return value
|
||||
@@ -59,11 +63,15 @@ def dump(*args, **kwargs):
|
||||
|
||||
def to_utf8(dct):
|
||||
if isinstance(dct, dict):
|
||||
return dict((to_utf8(key), to_utf8(value)) for key, value in dct.iteritems())
|
||||
return dict((to_utf8(key), to_utf8(value)) for key, value in dct.items())
|
||||
elif isinstance(dct, list):
|
||||
return [to_utf8(element) for element in dct]
|
||||
elif isinstance(dct, unicode):
|
||||
return dct.encode('utf-8')
|
||||
dct = dct.encode("utf8")
|
||||
if PY3: dct = dct.decode("utf8")
|
||||
return dct
|
||||
elif PY3 and isinstance(dct, bytes):
|
||||
return dct.decode('utf-8')
|
||||
else:
|
||||
return dct
|
||||
|
||||
@@ -124,18 +132,18 @@ def check_to_backup(data, fname, dict_data):
|
||||
logger.info()
|
||||
|
||||
if not dict_data:
|
||||
logger.error("Error al cargar el json del fichero %s" % fname)
|
||||
logger.error("Error loading json from file %s" % fname)
|
||||
|
||||
if data != "":
|
||||
# se crea un nuevo fichero
|
||||
from core import filetools
|
||||
title = filetools.write("%s.bk" % fname, data)
|
||||
if title != "":
|
||||
logger.error("Ha habido un error al guardar el fichero: %s.bk" % fname)
|
||||
logger.error("There was an error saving the file: %s.bk" % fname)
|
||||
else:
|
||||
logger.debug("Se ha guardado una copia con el nombre: %s.bk" % fname)
|
||||
logger.debug("A copy with the name has been saved: %s.bk" % fname)
|
||||
else:
|
||||
logger.debug("Está vacío el fichero: %s" % fname)
|
||||
logger.debug("The file is empty: %s" % fname)
|
||||
|
||||
|
||||
def update_node(dict_node, name_file, node, path=None):
|
||||
@@ -175,18 +183,18 @@ def update_node(dict_node, name_file, node, path=None):
|
||||
# es un dict
|
||||
if dict_data:
|
||||
if node in dict_data:
|
||||
logger.debug(" existe el key %s" % node)
|
||||
logger.debug(" the key exists %s" % node)
|
||||
dict_data[node] = dict_node
|
||||
else:
|
||||
logger.debug(" NO existe el key %s" % node)
|
||||
logger.debug(" The key does NOT exist %s" % node)
|
||||
new_dict = {node: dict_node}
|
||||
dict_data.update(new_dict)
|
||||
else:
|
||||
logger.debug(" NO es un dict")
|
||||
logger.debug(" It is NOT a dict")
|
||||
dict_data = {node: dict_node}
|
||||
json_data = dump(dict_data)
|
||||
result = filetools.write(fname, json_data)
|
||||
except:
|
||||
logger.error("No se ha podido actualizar %s" % fname)
|
||||
logger.error("Could not update %s" % fname)
|
||||
|
||||
return result, json_data
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import sys
|
||||
PY3 = False
|
||||
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
|
||||
|
||||
#from builtins import str
|
||||
from core.item import InfoLabels
|
||||
from platformcode import config, logger
|
||||
from platformcode import platformtools
|
||||
@@ -46,7 +51,7 @@ def find_and_set_infoLabels(item):
|
||||
try:
|
||||
scraper = __import__('core.%s' % scraper_actual, fromlist=["core.%s" % scraper_actual])
|
||||
except ImportError:
|
||||
exec "import core." + scraper_actual + " as scraper"
|
||||
exec("import core." + scraper_actual + " as scraper")
|
||||
except:
|
||||
import traceback
|
||||
logger.error(traceback.format_exc())
|
||||
@@ -99,9 +104,9 @@ def find_and_set_infoLabels(item):
|
||||
return True
|
||||
# raise
|
||||
|
||||
elif list_opciones_cuadro[index] in scrapers_disponibles.values():
|
||||
elif list_opciones_cuadro[index] in list(scrapers_disponibles.values()):
|
||||
# Obtener el nombre del modulo del scraper
|
||||
for k, v in scrapers_disponibles.items():
|
||||
for k, v in list(scrapers_disponibles.items()):
|
||||
if list_opciones_cuadro[index] == v:
|
||||
if scrapers_disponibles[scraper_actual] not in list_opciones_cuadro:
|
||||
list_opciones_cuadro.append(scrapers_disponibles[scraper_actual])
|
||||
@@ -111,7 +116,7 @@ def find_and_set_infoLabels(item):
|
||||
scraper = None
|
||||
scraper = __import__('core.%s' % scraper_actual, fromlist=["core.%s" % scraper_actual])
|
||||
except ImportError:
|
||||
exec "import core." + scraper_actual + " as scraper_module"
|
||||
exec("import core." + scraper_actual + " as scraper_module")
|
||||
break
|
||||
|
||||
logger.error("Error al importar el modulo scraper %s" % scraper_actual)
|
||||
@@ -175,7 +180,7 @@ def cuadro_completar(item):
|
||||
|
||||
if not dict_default[c[0]] or dict_default[c[0]] == 'None' or dict_default[c[0]] == 0:
|
||||
dict_default[c[0]] = ''
|
||||
elif isinstance(dict_default[c[0]], (int, float, long)):
|
||||
elif isinstance(dict_default[c[0]], (int, float)) or (not PY3 and isinstance(dict_default[c[0]], (int, float, long))):
|
||||
# Si es numerico lo convertimos en str
|
||||
dict_default[c[0]] = str(dict_default[c[0]])
|
||||
|
||||
@@ -204,7 +209,7 @@ def callback_cuadro_completar(item, dict_values):
|
||||
if dict_values.get("title", None):
|
||||
# Adaptar dict_values a infoLabels validos
|
||||
dict_values['mediatype'] = ['movie', 'tvshow'][dict_values['mediatype']]
|
||||
for k, v in dict_values.items():
|
||||
for k, v in list(dict_values.items()):
|
||||
if k in dict_default and dict_default[k] == dict_values[k]:
|
||||
del dict_values[k]
|
||||
|
||||
|
||||
@@ -1,17 +1,36 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# --------------------------------------------------------------------------------
|
||||
# Scraper tools v2 for reading and processing web elements
|
||||
# Scraper tools for reading and processing web elements
|
||||
# --------------------------------------------------------------------------------
|
||||
|
||||
#from future import standard_library
|
||||
#standard_library.install_aliases()
|
||||
#from builtins import str
|
||||
#from builtins import chr
|
||||
import sys
|
||||
PY3 = False
|
||||
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
|
||||
|
||||
import re
|
||||
import time
|
||||
|
||||
import urlparse
|
||||
|
||||
# from core import httptools
|
||||
from core.entities import html5
|
||||
from platformcode import logger
|
||||
|
||||
|
||||
# def get_header_from_response(url, header_to_get="", post=None, headers=None):
|
||||
# header_to_get = header_to_get.lower()
|
||||
# response = httptools.downloadpage(url, post=post, headers=headers, only_headers=True)
|
||||
# return response.headers.get(header_to_get)
|
||||
|
||||
|
||||
# def read_body_and_headers(url, post=None, headers=None, follow_redirects=False, timeout=None):
|
||||
# response = httptools.downloadpage(url, post=post, headers=headers, follow_redirects=follow_redirects,
|
||||
# timeout=timeout)
|
||||
# return response.data, response.headers
|
||||
|
||||
|
||||
def printMatches(matches):
|
||||
i = 0
|
||||
for match in matches:
|
||||
@@ -89,7 +108,10 @@ def unescape(text):
|
||||
else:
|
||||
# named entity
|
||||
try:
|
||||
import htmlentitydefs
|
||||
if PY3:
|
||||
import html.entities as htmlentitydefs
|
||||
else:
|
||||
import htmlentitydefs
|
||||
text = unichr(htmlentitydefs.name2codepoint[text[1:-1]]).encode("utf-8")
|
||||
except KeyError:
|
||||
logger.error("keyerror")
|
||||
@@ -103,6 +125,50 @@ def unescape(text):
|
||||
# Convierte los codigos html "ñ" y lo reemplaza por "ñ" caracter unicode utf-8
|
||||
|
||||
|
||||
# def decodeHtmlentities(string):
|
||||
# string = entitiesfix(string)
|
||||
# entity_re = re.compile("&(#?)(\d{1,5}|\w{1,8});")
|
||||
|
||||
# def substitute_entity(match):
|
||||
# if PY3:
|
||||
# from html.entities import name2codepoint as n2cp
|
||||
# else:
|
||||
# from htmlentitydefs import name2codepoint as n2cp
|
||||
# ent = match.group(2)
|
||||
# if match.group(1) == "#":
|
||||
# return unichr(int(ent)).encode('utf-8')
|
||||
# else:
|
||||
# cp = n2cp.get(ent)
|
||||
|
||||
# if cp:
|
||||
# return unichr(cp).encode('utf-8')
|
||||
# else:
|
||||
# return match.group()
|
||||
|
||||
# return entity_re.subn(substitute_entity, string)[0]
|
||||
|
||||
|
||||
# def entitiesfix(string):
|
||||
# # Las entidades comienzan siempre con el símbolo & , y terminan con un punto y coma ( ; ).
|
||||
# string = string.replace("á", "á")
|
||||
# string = string.replace("é", "é")
|
||||
# string = string.replace("í", "í")
|
||||
# string = string.replace("ó", "ó")
|
||||
# string = string.replace("ú", "ú")
|
||||
# string = string.replace("Á", "Á")
|
||||
# string = string.replace("É", "É")
|
||||
# string = string.replace("Í", "Í")
|
||||
# string = string.replace("Ó", "Ó")
|
||||
# string = string.replace("Ú", "Ú")
|
||||
# string = string.replace("ü", "ü")
|
||||
# string = string.replace("Ü", "Ü")
|
||||
# string = string.replace("ñ", "ñ")
|
||||
# string = string.replace("¿", "¿")
|
||||
# string = string.replace("¡", "¡")
|
||||
# string = string.replace(";;", ";")
|
||||
# return string
|
||||
|
||||
|
||||
def htmlclean(cadena):
|
||||
cadena = re.compile("<!--.*?-->", re.DOTALL).sub("", cadena)
|
||||
|
||||
@@ -292,8 +358,12 @@ def remove_show_from_title(title, show):
|
||||
return title
|
||||
|
||||
|
||||
# scrapertools.get_filename_from_url(media_url)[-4:]
|
||||
def get_filename_from_url(url):
|
||||
if PY3:
|
||||
import urllib.parse as urlparse # Es muy lento en PY2. En PY3 es nativo
|
||||
else:
|
||||
import urlparse # Usamos el nativo de PY2 que es más rápido
|
||||
|
||||
parsed_url = urlparse.urlparse(url)
|
||||
try:
|
||||
filename = parsed_url.path
|
||||
@@ -311,6 +381,11 @@ def get_filename_from_url(url):
|
||||
|
||||
|
||||
def get_domain_from_url(url):
|
||||
if PY3:
|
||||
import urllib.parse as urlparse # Es muy lento en PY2. En PY3 es nativo
|
||||
else:
|
||||
import urlparse # Usamos el nativo de PY2 que es más rápido
|
||||
|
||||
parsed_url = urlparse.urlparse(url)
|
||||
try:
|
||||
filename = parsed_url.netloc
|
||||
|
||||
@@ -3,18 +3,32 @@
|
||||
# Server management
|
||||
# --------------------------------------------------------------------------------
|
||||
|
||||
import os
|
||||
from __future__ import division
|
||||
from __future__ import absolute_import
|
||||
import sys
|
||||
PY3 = False
|
||||
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
|
||||
|
||||
if PY3:
|
||||
#from future import standard_library
|
||||
#standard_library.install_aliases()
|
||||
import urllib.parse as urlparse # Es muy lento en PY2. En PY3 es nativo
|
||||
else:
|
||||
import urlparse # Usamos el nativo de PY2 que es más rápido
|
||||
|
||||
from future.builtins import range
|
||||
from past.utils import old_div
|
||||
|
||||
import datetime
|
||||
import re
|
||||
import time
|
||||
|
||||
import filetools
|
||||
import urlparse
|
||||
|
||||
from core import filetools
|
||||
from core import httptools
|
||||
from core import jsontools
|
||||
from core.item import Item
|
||||
from platformcode import config, logger
|
||||
from platformcode import platformtools
|
||||
# from servers.decrypters import zcrypt
|
||||
from lib import unshortenit
|
||||
|
||||
dict_servers_parameters = {}
|
||||
@@ -80,7 +94,7 @@ def get_servers_itemlist(itemlist, fnc=None, sort=False):
|
||||
@type sort: bool
|
||||
"""
|
||||
# Recorre los servidores
|
||||
for serverid in get_servers_list().keys():
|
||||
for serverid in list(get_servers_list().keys()):
|
||||
server_parameters = get_server_parameters(serverid)
|
||||
|
||||
# Recorre los patrones
|
||||
@@ -105,18 +119,18 @@ def get_servers_itemlist(itemlist, fnc=None, sort=False):
|
||||
item.url = url
|
||||
|
||||
# Eliminamos los servidores desactivados
|
||||
itemlist = filter(lambda i: not i.server or is_server_enabled(i.server), itemlist)
|
||||
#itemlist = filter(lambda i: not i.server or is_server_enabled(i.server), itemlist)
|
||||
# Filtrar si es necesario
|
||||
itemlist = filter_servers(itemlist)
|
||||
|
||||
for item in itemlist:
|
||||
# Asignamos "directo" en caso de que el server no se encuentre en pelisalcarta
|
||||
# Asignamos "directo" en caso de que el server no se encuentre en Alfa
|
||||
if not item.server and item.url:
|
||||
item.server = 'directo'
|
||||
item.server = "directo"
|
||||
|
||||
if fnc:
|
||||
item.title = fnc(item)
|
||||
|
||||
# Filtrar si es necesario
|
||||
itemlist = filter_servers(itemlist)
|
||||
|
||||
# Ordenar segun favoriteslist si es necesario
|
||||
if sort:
|
||||
@@ -137,7 +151,8 @@ def findvideos(data, skip=False):
|
||||
logger.info()
|
||||
devuelve = []
|
||||
skip = int(skip)
|
||||
servers_list = get_servers_list().keys()
|
||||
servers_list = list(get_servers_list().keys())
|
||||
|
||||
|
||||
# Ordenar segun favoriteslist si es necesario
|
||||
servers_list = sort_servers(servers_list)
|
||||
@@ -145,8 +160,8 @@ def findvideos(data, skip=False):
|
||||
|
||||
# Ejecuta el findvideos en cada servidor activo
|
||||
for serverid in servers_list:
|
||||
if not is_server_enabled(serverid):
|
||||
continue
|
||||
'''if not is_server_enabled(serverid):
|
||||
continue'''
|
||||
if config.get_setting("filter_servers") == True and config.get_setting("black_list", server=serverid):
|
||||
is_filter_servers = True
|
||||
continue
|
||||
@@ -167,6 +182,8 @@ def findvideosbyserver(data, serverid):
|
||||
return []
|
||||
|
||||
server_parameters = get_server_parameters(serverid)
|
||||
if not server_parameters["active"]:
|
||||
return []
|
||||
devuelve = []
|
||||
if "find_videos" in server_parameters:
|
||||
# Recorre los patrones
|
||||
@@ -229,6 +246,8 @@ def resolve_video_urls_for_playing(server, url, video_password="", muestra_dialo
|
||||
|
||||
# Si el vídeo es "directo" o "local", no hay que buscar más
|
||||
if server == "directo" or server == "local":
|
||||
if isinstance(video_password, list):
|
||||
return video_password, len(video_password) > 0, "<br/>".join(error_messages)
|
||||
logger.info("Server: %s, la url es la buena" % server)
|
||||
video_urls.append(["%s [%s]" % (urlparse.urlparse(url)[2][-4:], server), url])
|
||||
|
||||
@@ -309,7 +328,7 @@ def resolve_video_urls_for_playing(server, url, video_password="", muestra_dialo
|
||||
|
||||
# Muestra el progreso
|
||||
if muestra_dialogo:
|
||||
progreso.update((100 / len(opciones)) * opciones.index(opcion), config.get_localized_string(70180) % server_name)
|
||||
progreso.update((old_div(100, len(opciones))) * opciones.index(opcion), config.get_localized_string(70180) % server_name)
|
||||
|
||||
# Modo free
|
||||
if opcion == "free":
|
||||
@@ -377,7 +396,7 @@ def get_server_name(serverid):
|
||||
serverid = serverid.lower().split(".")[0]
|
||||
|
||||
# Obtenemos el listado de servers
|
||||
server_list = get_servers_list().keys()
|
||||
server_list = list(get_servers_list().keys())
|
||||
|
||||
# Si el nombre está en la lista
|
||||
if serverid in server_list:
|
||||
@@ -445,25 +464,25 @@ def get_server_parameters(server):
|
||||
if server not in dict_servers_parameters:
|
||||
try:
|
||||
# Servers
|
||||
if os.path.isfile(os.path.join(config.get_runtime_path(), "servers", server + ".json")):
|
||||
path = os.path.join(config.get_runtime_path(), "servers", server + ".json")
|
||||
if filetools.isfile(filetools.join(config.get_runtime_path(), "servers", server + ".json")):
|
||||
path = filetools.join(config.get_runtime_path(), "servers", server + ".json")
|
||||
|
||||
# Debriders
|
||||
elif os.path.isfile(os.path.join(config.get_runtime_path(), "servers", "debriders", server + ".json")):
|
||||
path = os.path.join(config.get_runtime_path(), "servers", "debriders", server + ".json")
|
||||
elif filetools.isfile(filetools.join(config.get_runtime_path(), "servers", "debriders", server + ".json")):
|
||||
path = filetools.join(config.get_runtime_path(), "servers", "debriders", server + ".json")
|
||||
#
|
||||
#Cuando no está bien definido el server en el canal (no existe conector), muestra error por no haber "path" y se tiene que revisar el canal
|
||||
#
|
||||
data = filetools.read(path)
|
||||
dict_server = jsontools.load(data)
|
||||
dict_server = jsontools.load(filetools.read(path))
|
||||
|
||||
# Imagenes: se admiten url y archivos locales dentro de "resources/images"
|
||||
if dict_server.get("thumbnail") and "://" not in dict_server["thumbnail"]:
|
||||
dict_server["thumbnail"] = os.path.join("https://raw.githubusercontent.com/kodiondemand/media/master/resources/servers", dict_server["thumbnail"])
|
||||
dict_server["thumbnail"] = filetools.join(config.get_runtime_path(), "resources", "media",
|
||||
"servers", dict_server["thumbnail"])
|
||||
for k in ['premium', 'id']:
|
||||
dict_server[k] = dict_server.get(k, list())
|
||||
|
||||
if type(dict_server[k]) == str:
|
||||
if isinstance(dict_server[k], str):
|
||||
dict_server[k] = [dict_server[k]]
|
||||
|
||||
if "find_videos" in dict_server:
|
||||
@@ -497,7 +516,7 @@ def get_server_json(server_name):
|
||||
server_json = jsontools.load(filetools.read(server_path))
|
||||
# logger.info("server_json= %s" % server_json)
|
||||
|
||||
except Exception, ex:
|
||||
except Exception as ex:
|
||||
template = "An exception of type %s occured. Arguments:\n%r"
|
||||
message = template % (type(ex).__name__, ex.args)
|
||||
logger.error(" %s" % message)
|
||||
@@ -554,16 +573,16 @@ def get_server_setting(name, server, default=None):
|
||||
|
||||
"""
|
||||
# Creamos la carpeta si no existe
|
||||
if not os.path.exists(os.path.join(config.get_data_path(), "settings_servers")):
|
||||
os.mkdir(os.path.join(config.get_data_path(), "settings_servers"))
|
||||
if not filetools.exists(filetools.join(config.get_data_path(), "settings_servers")):
|
||||
filetools.mkdir(filetools.join(config.get_data_path(), "settings_servers"))
|
||||
|
||||
file_settings = os.path.join(config.get_data_path(), "settings_servers", server + "_data.json")
|
||||
file_settings = filetools.join(config.get_data_path(), "settings_servers", server + "_data.json")
|
||||
dict_settings = {}
|
||||
dict_file = {}
|
||||
if os.path.exists(file_settings):
|
||||
if filetools.exists(file_settings):
|
||||
# Obtenemos configuracion guardada de ../settings/channel_data.json
|
||||
try:
|
||||
dict_file = jsontools.load(open(file_settings, "rb").read())
|
||||
dict_file = jsontools.load(filetools.read(file_settings))
|
||||
if isinstance(dict_file, dict) and 'settings' in dict_file:
|
||||
dict_settings = dict_file['settings']
|
||||
except EnvironmentError:
|
||||
@@ -580,10 +599,7 @@ def get_server_setting(name, server, default=None):
|
||||
dict_settings = default_settings
|
||||
dict_file['settings'] = dict_settings
|
||||
# Creamos el archivo ../settings/channel_data.json
|
||||
json_data = jsontools.dump(dict_file)
|
||||
try:
|
||||
open(file_settings, "wb").write(json_data)
|
||||
except EnvironmentError:
|
||||
if not filetools.write(file_settings, jsontools.dump(dict_file)):
|
||||
logger.info("ERROR al salvar el archivo: %s" % file_settings)
|
||||
|
||||
# Devolvemos el valor del parametro local 'name' si existe, si no se devuelve default
|
||||
@@ -592,18 +608,18 @@ def get_server_setting(name, server, default=None):
|
||||
|
||||
def set_server_setting(name, value, server):
|
||||
# Creamos la carpeta si no existe
|
||||
if not os.path.exists(os.path.join(config.get_data_path(), "settings_servers")):
|
||||
os.mkdir(os.path.join(config.get_data_path(), "settings_servers"))
|
||||
if not filetools.exists(filetools.join(config.get_data_path(), "settings_servers")):
|
||||
filetools.mkdir(filetools.join(config.get_data_path(), "settings_servers"))
|
||||
|
||||
file_settings = os.path.join(config.get_data_path(), "settings_servers", server + "_data.json")
|
||||
file_settings = filetools.join(config.get_data_path(), "settings_servers", server + "_data.json")
|
||||
dict_settings = {}
|
||||
|
||||
dict_file = None
|
||||
|
||||
if os.path.exists(file_settings):
|
||||
if filetools.exists(file_settings):
|
||||
# Obtenemos configuracion guardada de ../settings/channel_data.json
|
||||
try:
|
||||
dict_file = jsontools.load(open(file_settings, "r").read())
|
||||
dict_file = jsontools.load(filetools.read(file_settings))
|
||||
dict_settings = dict_file.get('settings', {})
|
||||
except EnvironmentError:
|
||||
logger.info("ERROR al leer el archivo: %s" % file_settings)
|
||||
@@ -617,10 +633,7 @@ def set_server_setting(name, value, server):
|
||||
dict_file['settings'] = dict_settings
|
||||
|
||||
# Creamos el archivo ../settings/channel_data.json
|
||||
try:
|
||||
json_data = jsontools.dump(dict_file)
|
||||
open(file_settings, "w").write(json_data)
|
||||
except EnvironmentError:
|
||||
if not filetools.write(file_settings, jsontools.dump(dict_file)):
|
||||
logger.info("ERROR al salvar el archivo: %s" % file_settings)
|
||||
return None
|
||||
|
||||
@@ -636,11 +649,10 @@ def get_servers_list():
|
||||
@rtype: dict
|
||||
"""
|
||||
server_list = {}
|
||||
for server in os.listdir(os.path.join(config.get_runtime_path(), "servers")):
|
||||
for server in filetools.listdir(filetools.join(config.get_runtime_path(), "servers")):
|
||||
if server.endswith(".json") and not server == "version.json":
|
||||
server_parameters = get_server_parameters(server)
|
||||
if server_parameters["active"] == True:
|
||||
server_list[server.split(".")[0]] = server_parameters
|
||||
server_list[server.split(".")[0]] = server_parameters
|
||||
|
||||
return server_list
|
||||
|
||||
@@ -654,7 +666,7 @@ def get_debriders_list():
|
||||
@rtype: dict
|
||||
"""
|
||||
server_list = {}
|
||||
for server in os.listdir(os.path.join(config.get_runtime_path(), "servers", "debriders")):
|
||||
for server in filetools.listdir(filetools.join(config.get_runtime_path(), "servers", "debriders")):
|
||||
if server.endswith(".json"):
|
||||
server_parameters = get_server_parameters(server)
|
||||
if server_parameters["active"] == True:
|
||||
@@ -678,6 +690,7 @@ def sort_servers(servers_list):
|
||||
else:
|
||||
servers_list = sorted(servers_list,
|
||||
key=lambda x: config.get_setting("favorites_servers_list", server=x) or 100)
|
||||
|
||||
return servers_list
|
||||
|
||||
|
||||
@@ -689,18 +702,26 @@ def filter_servers(servers_list):
|
||||
u objetos Item. En cuyo caso es necesario q tengan un atributo item.server del tipo str.
|
||||
:return: Lista del mismo tipo de objetos que servers_list filtrada en funcion de la Lista Negra.
|
||||
"""
|
||||
#Eliminamos los inactivos
|
||||
if servers_list:
|
||||
servers_list = [i for i in servers_list if not i.server or is_server_enabled(i.server)]
|
||||
|
||||
|
||||
if servers_list and config.get_setting('filter_servers'):
|
||||
if isinstance(servers_list[0], Item):
|
||||
servers_list_filter = filter(lambda x: not config.get_setting("black_list", server=x.server), servers_list)
|
||||
servers_list_filter = [x for x in servers_list if not config.get_setting("black_list", server=x.server)]
|
||||
else:
|
||||
servers_list_filter = filter(lambda x: not config.get_setting("black_list", server=x), servers_list)
|
||||
servers_list_filter = [x for x in servers_list if not config.get_setting("black_list", server=x)]
|
||||
|
||||
# Si no hay enlaces despues de filtrarlos
|
||||
if servers_list_filter or not platformtools.dialog_yesno(config.get_localized_string(60000),
|
||||
config.get_localized_string(60010),
|
||||
config.get_localized_string(70281)):
|
||||
servers_list = servers_list_filter
|
||||
|
||||
|
||||
if config.get_setting("favorites_servers") == True:
|
||||
servers_list = sort_servers(servers_list)
|
||||
|
||||
return servers_list
|
||||
|
||||
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
@@ -715,21 +736,31 @@ def check_list_links(itemlist, numero='', timeout=3):
|
||||
El parámetro timeout indica un tope de espera para descargar la página
|
||||
"""
|
||||
numero = ((int(numero) + 1) * 5) if numero != '' else 10
|
||||
for it in itemlist:
|
||||
if numero > 0 and it.server != '' and it.url != '':
|
||||
verificacion = check_video_link(it.url, it.server, timeout)
|
||||
it.title = verificacion + ' ' + it.title.strip()
|
||||
logger.info('VERIFICATION= '+ verificacion)
|
||||
it.alive = verificacion
|
||||
numero -= 1
|
||||
from lib.concurrent import futures
|
||||
with futures.ThreadPoolExecutor() as executor:
|
||||
checked = []
|
||||
for it in itemlist:
|
||||
if numero > 0 and it.server != '' and it.url != '':
|
||||
checked.append(executor.submit(check_video_link, it, timeout))
|
||||
numero -= 1
|
||||
for link in futures.as_completed(checked):
|
||||
res = link.result()
|
||||
if res:
|
||||
it = res[0]
|
||||
verificacion = res[1]
|
||||
it.title = verificacion + ' ' + it.title.strip()
|
||||
logger.info('VERIFICATION= ' + verificacion)
|
||||
it.alive = verificacion
|
||||
return itemlist
|
||||
|
||||
def check_video_link(url, server, timeout=3):
|
||||
"""
|
||||
Comprueba si el enlace a un video es valido y devuelve un string de 2 posiciones con la verificacion.
|
||||
:param url, server: Link y servidor
|
||||
:return: str(2) '??':No se ha podido comprobar. 'Ok':Parece que el link funciona. 'NO':Parece que no funciona.
|
||||
def check_video_link(item, timeout=3):
|
||||
"""
|
||||
Comprueba si el enlace a un video es valido y devuelve un string de 2 posiciones con la verificacion.
|
||||
:param url, server: Link y servidor
|
||||
:return: str(2) '??':No se ha podido comprobar. 'Ok':Parece que el link funciona. 'NO':Parece que no funciona.
|
||||
"""
|
||||
url = item.url
|
||||
server = item.server
|
||||
|
||||
NK = "[COLOR 0xFFF9B613][B]" + u"\u2022".encode('utf-8') + "[/B][/COLOR]"
|
||||
OK = "[COLOR 0xFF00C289][B]" + u"\u2022".encode('utf-8') + "[/B][/COLOR]"
|
||||
@@ -744,7 +775,7 @@ def check_video_link(url, server, timeout=3):
|
||||
except:
|
||||
server_module = None
|
||||
logger.info("[check_video_link] No se puede importar el servidor! %s" % server)
|
||||
return NK
|
||||
return item, NK
|
||||
|
||||
if hasattr(server_module, 'test_video_exists'):
|
||||
ant_timeout = httptools.HTTPTOOLS_DEFAULT_DOWNLOAD_TIMEOUT
|
||||
@@ -764,7 +795,7 @@ def check_video_link(url, server, timeout=3):
|
||||
|
||||
finally:
|
||||
httptools.HTTPTOOLS_DEFAULT_DOWNLOAD_TIMEOUT = ant_timeout # Restaurar tiempo de descarga
|
||||
return resultado
|
||||
return item, resultado
|
||||
|
||||
logger.info("[check_video_link] No hay test_video_exists para servidor: %s" % server)
|
||||
return NK
|
||||
return item, NK
|
||||
|
||||
@@ -10,8 +10,10 @@ from concurrent import futures
|
||||
try:
|
||||
import urllib.request as urllib
|
||||
import urllib.parse as urlparse
|
||||
from urllib.parse import urlencode
|
||||
except ImportError:
|
||||
import urllib, urlparse
|
||||
from urllib import urlencode
|
||||
|
||||
from channelselector import thumb
|
||||
from core import httptools, scrapertools, servertools, tmdb, channeltools
|
||||
@@ -157,7 +159,8 @@ def scrapeLang(scraped, lang, longtitle):
|
||||
return language, longtitle
|
||||
|
||||
def cleantitle(title):
|
||||
cleantitle = scrapertools.htmlclean(scrapertools.decodeHtmlentities(title).replace('"', "'").replace('×', 'x').replace('–', '-')).strip()
|
||||
if type(title) != str: title.decode('UTF-8')
|
||||
cleantitle = title.replace('"', "'").replace('×', 'x').replace('–', '-').strip()
|
||||
return cleantitle
|
||||
|
||||
def scrapeBlock(item, args, block, patron, headers, action, pagination, debug, typeContentDict, typeActionDict, blacklist, search, pag, function, lang):
|
||||
@@ -192,16 +195,17 @@ def scrapeBlock(item, args, block, patron, headers, action, pagination, debug, t
|
||||
for i, match in enumerate(matches):
|
||||
if pagination and (pag - 1) * pagination > i and not search: continue # pagination
|
||||
if pagination and i >= pag * pagination and not search: break # pagination
|
||||
listGroups = match.keys()
|
||||
match = match.values()
|
||||
# listGroups = match.keys()
|
||||
# match = match.values()
|
||||
|
||||
if len(listGroups) > len(match): # to fix a bug
|
||||
match = list(match)
|
||||
match.extend([''] * (len(listGroups) - len(match)))
|
||||
# if len(listGroups) > len(match): # to fix a bug
|
||||
# match = list(match)
|
||||
# match.extend([''] * (len(listGroups) - len(match)))
|
||||
|
||||
scraped = {}
|
||||
for kk in known_keys:
|
||||
val = match[listGroups.index(kk)] if kk in listGroups else ''
|
||||
val = match[kk] if kk in match else ''
|
||||
# val = match[listGroups.index(kk)] if kk in listGroups else ''
|
||||
if val and (kk == "url" or kk == 'thumb') and 'http' not in val:
|
||||
val = scrapertools.find_single_match(item.url, 'https?://[a-z0-9.-]+') + (val if val.startswith('/') else '/' + val)
|
||||
scraped[kk] = val
|
||||
@@ -294,8 +298,10 @@ def scrapeBlock(item, args, block, patron, headers, action, pagination, debug, t
|
||||
other = scraped['other'] if scraped['other'] else ''
|
||||
)
|
||||
|
||||
for lg in list(set(listGroups).difference(known_keys)):
|
||||
it.__setattr__(lg, match[listGroups.index(lg)])
|
||||
# for lg in list(set(listGroups).difference(known_keys)):
|
||||
# it.__setattr__(lg, match[listGroups.index(lg)])
|
||||
for lg in list(set(match.keys()).difference(known_keys)):
|
||||
it.__setattr__(lg, match[lg])
|
||||
|
||||
if 'itemHook' in args:
|
||||
it = args['itemHook'](it)
|
||||
@@ -367,7 +373,7 @@ def scrape(func):
|
||||
|
||||
log('PATRON= ', patron)
|
||||
if not data:
|
||||
page = httptools.downloadpage(item.url, headers=headers, ignore_response_code=True, session=item.session)
|
||||
page = httptools.downloadpage(item.url, headers=headers, ignore_response_code=True)
|
||||
# if url may be changed and channel has findhost to update
|
||||
if (not page.data or scrapertools.get_domain_from_url(page.url) != scrapertools.get_domain_from_url(item.url)) and 'findhost' in func.__globals__:
|
||||
host = func.__globals__['findhost']()
|
||||
@@ -376,8 +382,7 @@ def scrape(func):
|
||||
jsontools.update_node(host, func.__module__.split('.')[-1], 'url')
|
||||
parse[1] = scrapertools.get_domain_from_url(host)
|
||||
item.url = urlparse.urlunparse(parse)
|
||||
page = httptools.downloadpage(item.url, headers=headers, ignore_response_code=True,
|
||||
session=item.session)
|
||||
page = httptools.downloadpage(item.url, headers=headers, ignore_response_code=True)
|
||||
data = page.data.replace("'", '"')
|
||||
data = re.sub('\n|\t', ' ', data)
|
||||
data = re.sub(r'>\s+<', '> <', data)
|
||||
@@ -468,7 +473,7 @@ def dooplay_get_links(item, host):
|
||||
ret = []
|
||||
|
||||
for type, post, nume, title, server in matches:
|
||||
postData = urllib.urlencode({
|
||||
postData = urlencode({
|
||||
"action": "doo_player_ajax",
|
||||
"post": post,
|
||||
"nume": nume,
|
||||
@@ -582,7 +587,7 @@ def swzz_get_url(item):
|
||||
elif 'https://stayonline.pro' in item.url:
|
||||
id = item.url.split('/')[-2]
|
||||
reqUrl = 'https://stayonline.pro/ajax/linkView.php'
|
||||
p = urllib.urlencode({"id": id})
|
||||
p = urlencode({"id": id})
|
||||
data = httptools.downloadpage(reqUrl, post=p).data
|
||||
try:
|
||||
import json
|
||||
@@ -699,9 +704,9 @@ def menu(func):
|
||||
if global_search:
|
||||
menuItem(itemlist, filename, config.get_localized_string(70741) % '… bold', 'search', host + dictUrl['search'])
|
||||
|
||||
|
||||
autoplay.init(item.channel, list_servers, list_quality)
|
||||
autoplay.show_option(item.channel, itemlist)
|
||||
if 'get_channel_results' not in inspect.stack()[1][3]:
|
||||
autoplay.init(item.channel, list_servers, list_quality)
|
||||
autoplay.show_option(item.channel, itemlist)
|
||||
channel_config(item, itemlist)
|
||||
|
||||
return itemlist
|
||||
@@ -744,7 +749,7 @@ def typo(string, typography=''):
|
||||
if '{}' in string:
|
||||
string = '{' + re.sub(r'\s\{\}','',string) + '}'
|
||||
if 'submenu' in string:
|
||||
string = u"\u2022\u2022 ".encode('utf-8') + re.sub(r'\ssubmenu','',string)
|
||||
string = "•• " + re.sub(r'\ssubmenu','',string)
|
||||
if 'color' in string:
|
||||
color = scrapertools.find_single_match(string, 'color ([a-z]+)')
|
||||
if color == 'kod' or '': color = kod_color
|
||||
@@ -758,7 +763,7 @@ def typo(string, typography=''):
|
||||
if '--' in string:
|
||||
string = ' - ' + re.sub(r'\s--','',string)
|
||||
if 'bullet' in string:
|
||||
string = '[B]' + u"\u2022".encode('utf-8') + '[/B] ' + re.sub(r'\sbullet','',string)
|
||||
string = '[B]' + "•" + '[/B] ' + re.sub(r'\sbullet','',string)
|
||||
|
||||
return string
|
||||
|
||||
@@ -766,10 +771,33 @@ def typo(string, typography=''):
|
||||
def match(item_url_string, **args):
|
||||
'''
|
||||
match is a function that combines httptools and scraper tools:
|
||||
|
||||
supports all httptools and the following arggs:
|
||||
@param item_url_string: if it's a titem download the page item.url, if it's a URL download the page, if it's a string pass it to scrapertools
|
||||
@type item_url_string: item or str
|
||||
@param string: force item_url_string to be a string
|
||||
@type string: bool
|
||||
@param patronBlock: find first element in patron
|
||||
@type patronBlock: str
|
||||
@param patronBloks: find multiple matches
|
||||
@type patronBloks: str or list
|
||||
@param debugBlock: regex101.com for debug
|
||||
@type debugBlock: bool
|
||||
@param patron: find multiple matches on block, blocks or data
|
||||
@type patron: str or list
|
||||
@param debug: regex101.com for debug
|
||||
@type debug: bool
|
||||
|
||||
Return a item with the following key:
|
||||
data: data of the webpage
|
||||
block: first block
|
||||
blocks: all the blocks
|
||||
match: first match
|
||||
matches: all the matches
|
||||
'''
|
||||
log(item_url_string)
|
||||
|
||||
matches = []
|
||||
matches = blocks = []
|
||||
url = None
|
||||
# arguments allowed for scrape
|
||||
patron = args.get('patron', None)
|
||||
@@ -778,12 +806,15 @@ def match(item_url_string, **args):
|
||||
debug = args.get('debug', False)
|
||||
debugBlock = args.get('debugBlock', False)
|
||||
string = args.get('string', False)
|
||||
|
||||
# remove scrape arguments
|
||||
args = dict([(key, val) for key, val in args.items() if key not in ['patron', 'patronBlock', 'patronBlocks', 'debug', 'debugBlock', 'string']])
|
||||
# dbg()
|
||||
|
||||
# check type of item_url_string
|
||||
if type(item_url_string) == str:
|
||||
if item_url_string.startswith('http') and not string: url = item_url_string
|
||||
if string:
|
||||
data = item_url_string
|
||||
elif type(item_url_string) == str:
|
||||
if item_url_string.startswith('http'): url = item_url_string
|
||||
else : data = item_url_string
|
||||
else:
|
||||
# if item_url_string is an item use item.url as url
|
||||
@@ -803,7 +834,9 @@ def match(item_url_string, **args):
|
||||
if patronBlock:
|
||||
blocks = [scrapertools.find_single_match(data, patronBlock)]
|
||||
elif patronBlocks:
|
||||
blocks = scrapertools.find_multiple_matches(data, patronBlock)
|
||||
if type(patronBlock) == str: patron = [patronBlock]
|
||||
for p in patronBlock:
|
||||
blocks += scrapertools.find_multiple_matches(data, p)
|
||||
else:
|
||||
blocks = [data]
|
||||
|
||||
@@ -1010,7 +1043,7 @@ def server(item, data='', itemlist=[], headers='', AutoPlay=True, CheckLinks=Tru
|
||||
|
||||
item.title = typo(item.contentTitle.strip(),'bold') if item.contentType == 'movie' or (config.get_localized_string(30161) in item.title) else item.title
|
||||
|
||||
videoitem.plot= typo(videoitem.title, 'bold')
|
||||
videoitem.plot= typo(videoitem.title, 'bold') + typo(videoitem.quality, '_ [] bold')
|
||||
videoitem.title = item.title + (typo(videoitem.title, '_ color kod [] bold') if videoitem.title else "") + (typo(videoitem.quality, '_ color kod []') if videoitem.quality else "")
|
||||
videoitem.fulltitle = item.fulltitle
|
||||
videoitem.show = item.show
|
||||
@@ -1036,7 +1069,7 @@ def controls(itemlist, item, AutoPlay=True, CheckLinks=True, down_load=True):
|
||||
channel_node = autoplay_node.get(item.channel, {})
|
||||
settings_node = channel_node.get('settings', {})
|
||||
AP = get_setting('autoplay') or settings_node['active']
|
||||
HS = config.get_setting('hide_servers') or (settings_node['hide_servers'] if settings_node.has_key('hide_server') else False)
|
||||
HS = config.get_setting('hide_servers') or (settings_node['hide_servers'] if 'hide_server' in settings_node else False)
|
||||
|
||||
if CL and not AP:
|
||||
if get_setting('checklinks', item.channel):
|
||||
|
||||
114
core/tmdb.py
114
core/tmdb.py
@@ -1,10 +1,26 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
#from future import standard_library
|
||||
#standard_library.install_aliases()
|
||||
#from builtins import str
|
||||
import sys
|
||||
PY3 = False
|
||||
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
|
||||
|
||||
if PY3:
|
||||
import urllib.parse as urllib # Es muy lento en PY2. En PY3 es nativo
|
||||
else:
|
||||
import urllib # Usamos el nativo de PY2 que es más rápido
|
||||
|
||||
from future.builtins import range
|
||||
from future.builtins import object
|
||||
|
||||
import ast
|
||||
|
||||
import copy
|
||||
import re
|
||||
import sqlite3
|
||||
import time
|
||||
import urllib
|
||||
|
||||
import xbmcaddon
|
||||
|
||||
@@ -37,8 +53,8 @@ def_lang = addon.getSetting('language')
|
||||
# tmdb.set_infoLabels(item, seekTmdb = True)
|
||||
#
|
||||
# Obtener datos basicos de una pelicula:
|
||||
# Antes de llamar al metodo set_infoLabels el titulo a buscar debe estar en item.fulltitle
|
||||
# o en item.contentTitle y el año en item.infoLabels['year'].
|
||||
# Antes de llamar al metodo set_infoLabels el titulo a buscar debe estar en item.contentTitle
|
||||
# y el año en item.infoLabels['year'].
|
||||
#
|
||||
# Obtener datos basicos de una serie:
|
||||
# Antes de llamar al metodo set_infoLabels el titulo a buscar debe estar en item.show o en
|
||||
@@ -73,7 +89,6 @@ def_lang = addon.getSetting('language')
|
||||
otmdb_global = None
|
||||
fname = filetools.join(config.get_data_path(), "kod_db.sqlite")
|
||||
|
||||
|
||||
def create_bd():
|
||||
conn = sqlite3.connect(fname)
|
||||
c = conn.cursor()
|
||||
@@ -160,7 +175,7 @@ def cache_response(fn):
|
||||
conn = sqlite3.connect(fname, timeout=15)
|
||||
c = conn.cursor()
|
||||
url = re.sub('&year=-', '', args[0])
|
||||
# logger.error('la url %s' % url)
|
||||
if PY3: url = str.encode(url)
|
||||
url_base64 = base64.b64encode(url)
|
||||
c.execute("SELECT response, added FROM tmdb_cache WHERE url=?", (url_base64,))
|
||||
row = c.fetchone()
|
||||
@@ -171,7 +186,9 @@ def cache_response(fn):
|
||||
# si no se ha obtenido información, llamamos a la funcion
|
||||
if not result:
|
||||
result = fn(*args)
|
||||
result_base64 = base64.b64encode(str(result))
|
||||
result = str(result)
|
||||
if PY3: result = str.encode(result)
|
||||
result_base64 = base64.b64encode(result)
|
||||
c.execute("INSERT OR REPLACE INTO tmdb_cache (url, response, added) VALUES (?, ?, ?)",
|
||||
(url_base64, result_base64, time.time()))
|
||||
|
||||
@@ -375,17 +392,19 @@ def set_infoLabels_item(item, seekTmdb=True, idioma_busqueda=def_lang, lock=None
|
||||
# ... buscar datos temporada
|
||||
item.infoLabels['mediatype'] = 'season'
|
||||
temporada = otmdb_global.get_temporada(numtemporada)
|
||||
if not isinstance(temporada, dict):
|
||||
temporada = ast.literal_eval(temporada.decode('utf-8'))
|
||||
|
||||
if temporada:
|
||||
# Actualizar datos
|
||||
__leer_datos(otmdb_global)
|
||||
item.infoLabels['title'] = temporada['name'] if temporada.has_key('name') else ''
|
||||
if temporada.has_key('overview') and temporada['overview']:
|
||||
item.infoLabels['title'] = temporada['name'] if 'name' in temporada else ''
|
||||
if 'overview' in temporada and temporada['overview']:
|
||||
item.infoLabels['plot'] = temporada['overview']
|
||||
if temporada.has_key('air_date') and temporada['air_date']:
|
||||
if 'air_date' in temporada and temporada['air_date']:
|
||||
date = temporada['air_date'].split('-')
|
||||
item.infoLabels['aired'] = date[2] + "/" + date[1] + "/" + date[0]
|
||||
if temporada.has_key('poster_path') and temporada['poster_path']:
|
||||
if 'poster_path' in temporada and temporada['poster_path']:
|
||||
item.infoLabels['poster_path'] = 'http://image.tmdb.org/t/p/original' + temporada['poster_path']
|
||||
item.thumbnail = item.infoLabels['poster_path']
|
||||
|
||||
@@ -445,12 +464,8 @@ def set_infoLabels_item(item, seekTmdb=True, idioma_busqueda=def_lang, lock=None
|
||||
# Busqueda de pelicula por titulo...
|
||||
if item.infoLabels['year'] or item.infoLabels['filtro']:
|
||||
# ...y año o filtro
|
||||
if item.contentTitle:
|
||||
titulo_buscado = item.contentTitle
|
||||
else:
|
||||
titulo_buscado = item.fulltitle
|
||||
|
||||
otmdb = Tmdb(texto_buscado=titulo_buscado, tipo=tipo_busqueda, idioma_busqueda=idioma_busqueda,
|
||||
searched_title = item.contentTitle if item.contentTitle else item.fulltitle
|
||||
otmdb = Tmdb(texto_buscado=searched_title, tipo=tipo_busqueda, idioma_busqueda=idioma_busqueda,
|
||||
filtro=item.infoLabels.get('filtro', {}), year=item.infoLabels['year'])
|
||||
if otmdb is not None:
|
||||
if otmdb.get_id() and config.get_setting("tmdb_plus_info", default=False):
|
||||
@@ -492,7 +507,7 @@ def find_and_set_infoLabels(item):
|
||||
title = title.replace(year, "").strip()
|
||||
item.infoLabels['year'] = year[1:-1]
|
||||
|
||||
if not item.infoLabels.get("tmdb_id"):
|
||||
if not item.infoLabels.get("tmdb_id") or not item.infoLabels.get("tmdb_id")[0].isdigit():
|
||||
if not item.infoLabels.get("imdb_id"):
|
||||
otmdb_global = Tmdb(texto_buscado=title, tipo=tipo_busqueda, year=item.infoLabels['year'])
|
||||
else:
|
||||
@@ -588,7 +603,6 @@ def get_genres(type):
|
||||
return genres.dic_generos[lang]
|
||||
|
||||
|
||||
|
||||
# Clase auxiliar
|
||||
class ResultDictDefault(dict):
|
||||
# Python 2.4
|
||||
@@ -606,7 +620,7 @@ class ResultDictDefault(dict):
|
||||
return list()
|
||||
elif key == 'images_posters':
|
||||
posters = dict()
|
||||
if 'images' in super(ResultDictDefault, self).keys() and \
|
||||
if 'images' in list(super(ResultDictDefault, self).keys()) and \
|
||||
'posters' in super(ResultDictDefault, self).__getitem__('images'):
|
||||
posters = super(ResultDictDefault, self).__getitem__('images')['posters']
|
||||
super(ResultDictDefault, self).__setattr__("images_posters", posters)
|
||||
@@ -615,7 +629,7 @@ class ResultDictDefault(dict):
|
||||
|
||||
elif key == "images_backdrops":
|
||||
backdrops = dict()
|
||||
if 'images' in super(ResultDictDefault, self).keys() and \
|
||||
if 'images' in list(super(ResultDictDefault, self).keys()) and \
|
||||
'backdrops' in super(ResultDictDefault, self).__getitem__('images'):
|
||||
backdrops = super(ResultDictDefault, self).__getitem__('images')['backdrops']
|
||||
super(ResultDictDefault, self).__setattr__("images_backdrops", backdrops)
|
||||
@@ -624,7 +638,7 @@ class ResultDictDefault(dict):
|
||||
|
||||
elif key == "images_profiles":
|
||||
profiles = dict()
|
||||
if 'images' in super(ResultDictDefault, self).keys() and \
|
||||
if 'images' in list(super(ResultDictDefault, self).keys()) and \
|
||||
'profiles' in super(ResultDictDefault, self).__getitem__('images'):
|
||||
profiles = super(ResultDictDefault, self).__getitem__('images')['profiles']
|
||||
super(ResultDictDefault, self).__setattr__("images_profiles", profiles)
|
||||
@@ -640,7 +654,7 @@ class ResultDictDefault(dict):
|
||||
|
||||
def tostring(self, separador=',\n'):
|
||||
ls = []
|
||||
for i in super(ResultDictDefault, self).items():
|
||||
for i in list(super(ResultDictDefault, self).items()):
|
||||
i_str = str(i)[1:-1]
|
||||
if isinstance(i[0], str):
|
||||
old = i[0] + "',"
|
||||
@@ -899,12 +913,16 @@ class Tmdb(object):
|
||||
logger.info("[Tmdb.py] Filling in dictionary of genres")
|
||||
|
||||
resultado = cls.get_json(url)
|
||||
if not isinstance(resultado, dict):
|
||||
resultado = ast.literal_eval(resultado.decode('utf-8'))
|
||||
lista_generos = resultado["genres"]
|
||||
|
||||
for i in lista_generos:
|
||||
cls.dic_generos[idioma][tipo][str(i["id"])] = i["name"]
|
||||
except:
|
||||
logger.error("Error generating dictionaries")
|
||||
import traceback
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
def __by_id(self, source='tmdb'):
|
||||
|
||||
@@ -926,6 +944,8 @@ class Tmdb(object):
|
||||
|
||||
logger.info("[Tmdb.py] Searching %s:\n%s" % (buscando, url))
|
||||
resultado = self.get_json(url)
|
||||
if not isinstance(resultado, dict):
|
||||
resultado = ast.literal_eval(resultado.decode('utf-8'))
|
||||
|
||||
if resultado:
|
||||
if source != "tmdb":
|
||||
@@ -942,14 +962,14 @@ class Tmdb(object):
|
||||
else:
|
||||
# No hay resultados de la busqueda
|
||||
msg = "The search of %s gave no results" % buscando
|
||||
# logger.debug(msg)
|
||||
logger.debug(msg)
|
||||
|
||||
def __search(self, index_results=0, page=1):
|
||||
self.result = ResultDictDefault()
|
||||
results = []
|
||||
total_results = 0
|
||||
text_simple = self.busqueda_texto.lower()
|
||||
text_quote = urllib.quote(text_simple)
|
||||
total_results = 0
|
||||
total_pages = 0
|
||||
buscando = ""
|
||||
|
||||
@@ -957,15 +977,17 @@ class Tmdb(object):
|
||||
# http://api.themoviedb.org/3/search/movie?api_key=a1ab8b8669da03637a4b98fa39c39228&query=superman&language=es
|
||||
# &include_adult=false&page=1
|
||||
url = ('http://api.themoviedb.org/3/search/%s?api_key=a1ab8b8669da03637a4b98fa39c39228&query=%s&language=%s'
|
||||
'&include_adult=%s&page=%s' % (self.busqueda_tipo, text_quote.replace(' ', '%20'),
|
||||
'&include_adult=%s&page=%s' % (self.busqueda_tipo, text_quote,
|
||||
self.busqueda_idioma, self.busqueda_include_adult, page))
|
||||
|
||||
if self.busqueda_year:
|
||||
url += '&year=%s' % self.busqueda_year
|
||||
|
||||
buscando = self.busqueda_texto.capitalize()
|
||||
logger.info("[Tmdb.py] Searching %s on page %s:\n%s" % (buscando, page, url))
|
||||
logger.info("[Tmdb.py] Buscando %s en pagina %s:\n%s" % (buscando, page, url))
|
||||
resultado = self.get_json(url)
|
||||
if not isinstance(resultado, dict):
|
||||
resultado = ast.literal_eval(resultado.decode('utf-8'))
|
||||
|
||||
total_results = resultado.get("total_results", 0)
|
||||
total_pages = resultado.get("total_pages", 0)
|
||||
@@ -973,11 +995,13 @@ class Tmdb(object):
|
||||
if total_results > 0:
|
||||
results = resultado["results"]
|
||||
|
||||
if self.busqueda_filtro and results:
|
||||
if self.busqueda_filtro and total_results > 1:
|
||||
# TODO documentar esta parte
|
||||
for key, value in dict(self.busqueda_filtro).items():
|
||||
for key, value in list(dict(self.busqueda_filtro).items()):
|
||||
for r in results[:]:
|
||||
if key not in r or r[key] != value:
|
||||
if not r[key]:
|
||||
r[key] = str(r[key])
|
||||
if key not in r or value not in r[key]:
|
||||
results.remove(r)
|
||||
total_results -= 1
|
||||
|
||||
@@ -1015,7 +1039,7 @@ class Tmdb(object):
|
||||
type_search = self.discover.get('url', '')
|
||||
if type_search:
|
||||
params = []
|
||||
for key, value in self.discover.items():
|
||||
for key, value in list(self.discover.items()):
|
||||
if key != "url":
|
||||
params.append(key + "=" + str(value))
|
||||
# http://api.themoviedb.org/3/discover/movie?api_key=a1ab8b8669da03637a4b98fa39c39228&query=superman&language=es
|
||||
@@ -1024,6 +1048,8 @@ class Tmdb(object):
|
||||
|
||||
logger.info("[Tmdb.py] Searcing %s:\n%s" % (type_search, url))
|
||||
resultado = self.get_json(url)
|
||||
if not isinstance(resultado, dict):
|
||||
resultado = ast.literal_eval(resultado.decode('utf-8'))
|
||||
|
||||
total_results = resultado.get("total_results", -1)
|
||||
total_pages = resultado.get("total_pages", 1)
|
||||
@@ -1036,7 +1062,7 @@ class Tmdb(object):
|
||||
results = resultado["results"]
|
||||
if self.busqueda_filtro and results:
|
||||
# TODO documentar esta parte
|
||||
for key, value in dict(self.busqueda_filtro).items():
|
||||
for key, value in list(dict(self.busqueda_filtro).items()):
|
||||
for r in results[:]:
|
||||
if key not in r or r[key] != value:
|
||||
results.remove(r)
|
||||
@@ -1184,6 +1210,8 @@ class Tmdb(object):
|
||||
(self.busqueda_tipo, self.busqueda_id, self.busqueda_idioma))
|
||||
|
||||
resultado = self.get_json(url)
|
||||
if not isinstance(resultado, dict):
|
||||
resultado = ast.literal_eval(resultado.decode('utf-8'))
|
||||
|
||||
if 'overview' in resultado:
|
||||
self.result['overview'] = resultado['overview']
|
||||
@@ -1316,6 +1344,8 @@ class Tmdb(object):
|
||||
logger.info("[Tmdb.py] Searcing " + buscando)
|
||||
try:
|
||||
self.temporada[numtemporada] = self.get_json(url)
|
||||
if not isinstance(self.temporada[numtemporada], dict):
|
||||
self.temporada[numtemporada] = ast.literal_eval(self.temporada[numtemporada].decode('utf-8'))
|
||||
|
||||
except:
|
||||
logger.error("Unable to get the season")
|
||||
@@ -1356,6 +1386,8 @@ class Tmdb(object):
|
||||
return {}
|
||||
|
||||
temporada = self.get_temporada(numtemporada)
|
||||
if not isinstance(temporada, dict):
|
||||
temporada = ast.literal_eval(temporada.decode('utf-8'))
|
||||
if not temporada:
|
||||
# Se ha producido un error
|
||||
return {}
|
||||
@@ -1388,9 +1420,9 @@ class Tmdb(object):
|
||||
dic_aux = dict((i['id'], i) for i in ret_dic["temporada_crew"])
|
||||
for e in temporada["episodes"]:
|
||||
for crew in e['crew']:
|
||||
if crew['id'] not in dic_aux.keys():
|
||||
if crew['id'] not in list(dic_aux.keys()):
|
||||
dic_aux[crew['id']] = crew
|
||||
ret_dic["temporada_crew"] = dic_aux.values()
|
||||
ret_dic["temporada_crew"] = list(dic_aux.values())
|
||||
|
||||
# Obtener datos del capitulo si procede
|
||||
if capitulo != -1:
|
||||
@@ -1429,6 +1461,8 @@ class Tmdb(object):
|
||||
% (self.busqueda_tipo, self.result['id'], self.busqueda_idioma)
|
||||
|
||||
dict_videos = self.get_json(url)
|
||||
if not isinstance(dict_videos, dict):
|
||||
dict_videos = ast.literal_eval(dict_videos.decode('utf-8'))
|
||||
|
||||
if dict_videos['results']:
|
||||
dict_videos['results'] = sorted(dict_videos['results'], key=lambda x: (x['type'], x['size']))
|
||||
@@ -1440,6 +1474,8 @@ class Tmdb(object):
|
||||
% (self.busqueda_tipo, self.result['id'])
|
||||
|
||||
dict_videos = self.get_json(url)
|
||||
if not isinstance(dict_videos, dict):
|
||||
dict_videos = ast.literal_eval(dict_videos.decode('utf-8'))
|
||||
|
||||
if dict_videos['results']:
|
||||
dict_videos['results'] = sorted(dict_videos['results'], key=lambda x: (x['type'], x['size']))
|
||||
@@ -1481,13 +1517,13 @@ class Tmdb(object):
|
||||
if not origen:
|
||||
origen = self.result
|
||||
|
||||
if 'credits' in origen.keys():
|
||||
if 'credits' in list(origen.keys()):
|
||||
dic_origen_credits = origen['credits']
|
||||
origen['credits_cast'] = dic_origen_credits.get('cast', [])
|
||||
origen['credits_crew'] = dic_origen_credits.get('crew', [])
|
||||
del origen['credits']
|
||||
|
||||
items = origen.items()
|
||||
items = list(origen.items())
|
||||
|
||||
# Informacion Temporada/episodio
|
||||
if ret_infoLabels['season'] and self.temporada.get(ret_infoLabels['season']):
|
||||
@@ -1496,14 +1532,14 @@ class Tmdb(object):
|
||||
if ret_infoLabels['episode']:
|
||||
episodio = ret_infoLabels['episode']
|
||||
|
||||
items.extend(self.get_episodio(ret_infoLabels['season'], episodio).items())
|
||||
items.extend(list(self.get_episodio(ret_infoLabels['season'], episodio).items()))
|
||||
|
||||
# logger.info("ret_infoLabels" % ret_infoLabels)
|
||||
|
||||
for k, v in items:
|
||||
if not v:
|
||||
continue
|
||||
elif type(v) == str:
|
||||
elif isinstance(v, str):
|
||||
v = re.sub(r"\n|\r|\t", "", v)
|
||||
# fix
|
||||
if v == "None":
|
||||
@@ -1517,7 +1553,7 @@ class Tmdb(object):
|
||||
|
||||
elif k == 'runtime': #Duration for movies
|
||||
ret_infoLabels['duration'] = int(v) * 60
|
||||
|
||||
|
||||
elif k == 'episode_run_time': #Duration for episodes
|
||||
try:
|
||||
for v_alt in v: #It comes as a list (?!)
|
||||
@@ -1572,7 +1608,7 @@ class Tmdb(object):
|
||||
|
||||
elif k == 'credits_cast' or k == 'temporada_cast' or k == 'episodio_guest_stars':
|
||||
dic_aux = dict((name, character) for (name, character) in l_castandrole)
|
||||
l_castandrole.extend([(p['name'], p['character']) for p in v if p['name'] not in dic_aux.keys()])
|
||||
l_castandrole.extend([(p['name'], p['character']) for p in v if p['name'] not in list(dic_aux.keys())])
|
||||
|
||||
elif k == 'videos':
|
||||
if not isinstance(v, list):
|
||||
|
||||
86
core/tvdb.py
86
core/tvdb.py
@@ -7,9 +7,14 @@
|
||||
# del addon y también Kodi.
|
||||
# ------------------------------------------------------------
|
||||
|
||||
import re
|
||||
from future import standard_library
|
||||
standard_library.install_aliases()
|
||||
#from builtins import str
|
||||
from future.builtins import object
|
||||
|
||||
import urllib2
|
||||
import urllib.request, urllib.error, urllib.parse
|
||||
|
||||
import re
|
||||
|
||||
from core import jsontools
|
||||
from core import scrapertools
|
||||
@@ -218,7 +223,7 @@ def set_infoLabels_item(item):
|
||||
break
|
||||
|
||||
_next = list_episodes['links']['next']
|
||||
if type(_next) == int:
|
||||
if isinstance(_next, int):
|
||||
page = _next
|
||||
else:
|
||||
break
|
||||
@@ -330,7 +335,7 @@ def completar_codigos(item):
|
||||
break
|
||||
|
||||
|
||||
class Tvdb:
|
||||
class Tvdb(object):
|
||||
def __init__(self, **kwargs):
|
||||
|
||||
self.__check_token()
|
||||
@@ -398,12 +403,12 @@ class Tvdb:
|
||||
params = {"apikey": apikey}
|
||||
|
||||
try:
|
||||
req = urllib2.Request(url, data=jsontools.dump(params), headers=DEFAULT_HEADERS)
|
||||
response = urllib2.urlopen(req)
|
||||
req = urllib.request.Request(url, data=jsontools.dump(params), headers=DEFAULT_HEADERS)
|
||||
response = urllib.request.urlopen(req)
|
||||
html = response.read()
|
||||
response.close()
|
||||
|
||||
except Exception, ex:
|
||||
except Exception as ex:
|
||||
message = "An exception of type %s occured. Arguments:\n%s" % (type(ex).__name__, repr(ex.args))
|
||||
logger.error("error en: %s" % message)
|
||||
|
||||
@@ -426,12 +431,12 @@ class Tvdb:
|
||||
url = HOST + "/refresh_token"
|
||||
|
||||
try:
|
||||
req = urllib2.Request(url, headers=DEFAULT_HEADERS)
|
||||
response = urllib2.urlopen(req)
|
||||
req = urllib.request.Request(url, headers=DEFAULT_HEADERS)
|
||||
response = urllib.request.urlopen(req)
|
||||
html = response.read()
|
||||
response.close()
|
||||
|
||||
except urllib2.HTTPError, err:
|
||||
except urllib.error.HTTPError as err:
|
||||
logger.error("err.code es %s" % err.code)
|
||||
# si hay error 401 es que el token se ha pasado de tiempo y tenemos que volver a llamar a login
|
||||
if err.code == 401:
|
||||
@@ -439,7 +444,7 @@ class Tvdb:
|
||||
else:
|
||||
raise
|
||||
|
||||
except Exception, ex:
|
||||
except Exception as ex:
|
||||
message = "An exception of type %s occured. Arguments:\n%s" % (type(ex).__name__, repr(ex.args))
|
||||
logger.error("error en: %s" % message)
|
||||
|
||||
@@ -525,19 +530,18 @@ class Tvdb:
|
||||
params = {"airedSeason": "%s" % season, "airedEpisode": "%s" % episode}
|
||||
|
||||
try:
|
||||
import urllib
|
||||
params = urllib.urlencode(params)
|
||||
params = urllib.parse.urlencode(params)
|
||||
|
||||
url = HOST + "/series/%s/episodes/query?%s" % (_id, params)
|
||||
DEFAULT_HEADERS["Accept-Language"] = lang
|
||||
logger.debug("url: %s, \nheaders: %s" % (url, DEFAULT_HEADERS))
|
||||
|
||||
req = urllib2.Request(url, headers=DEFAULT_HEADERS)
|
||||
response = urllib2.urlopen(req)
|
||||
req = urllib.request.Request(url, headers=DEFAULT_HEADERS)
|
||||
response = urllib.request.urlopen(req)
|
||||
html = response.read()
|
||||
response.close()
|
||||
|
||||
except Exception, ex:
|
||||
except Exception as ex:
|
||||
message = "An exception of type %s occured. Arguments:\n%s" % (type(ex).__name__, repr(ex.args))
|
||||
logger.error("error en: %s" % message)
|
||||
|
||||
@@ -595,12 +599,12 @@ class Tvdb:
|
||||
url = HOST + "/series/%s/episodes?page=%s" % (_id, page)
|
||||
logger.debug("url: %s, \nheaders: %s" % (url, DEFAULT_HEADERS))
|
||||
|
||||
req = urllib2.Request(url, headers=DEFAULT_HEADERS)
|
||||
response = urllib2.urlopen(req)
|
||||
req = urllib.request.Request(url, headers=DEFAULT_HEADERS)
|
||||
response = urllib.request.urlopen(req)
|
||||
html = response.read()
|
||||
response.close()
|
||||
|
||||
except Exception, ex:
|
||||
except Exception as ex:
|
||||
message = "An exception of type %s occured. Arguments:\n%s" % (type(ex).__name__, repr(ex.args))
|
||||
logger.error("error en: %s" % message)
|
||||
|
||||
@@ -682,13 +686,13 @@ class Tvdb:
|
||||
try:
|
||||
DEFAULT_HEADERS["Accept-Language"] = lang
|
||||
logger.debug("url: %s, \nheaders: %s" % (url, DEFAULT_HEADERS))
|
||||
req = urllib2.Request(url, headers=DEFAULT_HEADERS)
|
||||
response = urllib2.urlopen(req)
|
||||
req = urllib.request.Request(url, headers=DEFAULT_HEADERS)
|
||||
response = urllib.request.urlopen(req)
|
||||
html = response.read()
|
||||
response.close()
|
||||
|
||||
except Exception, ex:
|
||||
if type(ex) == urllib2.HTTPError:
|
||||
except Exception as ex:
|
||||
if isinstance(ex, urllib).HTTPError:
|
||||
logger.debug("code es %s " % ex.code)
|
||||
|
||||
message = "An exception of type %s occured. Arguments:\n%s" % (type(ex).__name__, repr(ex.args))
|
||||
@@ -741,20 +745,19 @@ class Tvdb:
|
||||
elif zap2it_id:
|
||||
params["zap2itId"] = zap2it_id
|
||||
|
||||
import urllib
|
||||
params = urllib.urlencode(params)
|
||||
params = urllib.parse.urlencode(params)
|
||||
|
||||
DEFAULT_HEADERS["Accept-Language"] = lang
|
||||
url = HOST + "/search/series?%s" % params
|
||||
logger.debug("url: %s, \nheaders: %s" % (url, DEFAULT_HEADERS))
|
||||
|
||||
req = urllib2.Request(url, headers=DEFAULT_HEADERS)
|
||||
response = urllib2.urlopen(req)
|
||||
req = urllib.request.Request(url, headers=DEFAULT_HEADERS)
|
||||
response = urllib.request.urlopen(req)
|
||||
html = response.read()
|
||||
response.close()
|
||||
|
||||
except Exception, ex:
|
||||
if type(ex) == urllib2.HTTPError:
|
||||
except Exception as ex:
|
||||
if isinstance(ex, urllib).HTTPError:
|
||||
logger.debug("code es %s " % ex.code)
|
||||
|
||||
message = "An exception of type %s occured. Arguments:\n%s" % (type(ex).__name__, repr(ex.args))
|
||||
@@ -835,15 +838,15 @@ class Tvdb:
|
||||
|
||||
try:
|
||||
DEFAULT_HEADERS["Accept-Language"] = lang
|
||||
req = urllib2.Request(url, headers=DEFAULT_HEADERS)
|
||||
req = urllib.request.Request(url, headers=DEFAULT_HEADERS)
|
||||
logger.debug("url: %s, \nheaders: %s" % (url, DEFAULT_HEADERS))
|
||||
|
||||
response = urllib2.urlopen(req)
|
||||
response = urllib.request.urlopen(req)
|
||||
html = response.read()
|
||||
response.close()
|
||||
|
||||
except Exception, ex:
|
||||
if type(ex) == urllib2.HTTPError:
|
||||
except Exception as ex:
|
||||
if isinstance(ex, urllib).HTTPError:
|
||||
logger.debug("code es %s " % ex.code)
|
||||
|
||||
message = "An exception of type %s occured. Arguments:\n%s" % (type(ex).__name__, repr(ex.args))
|
||||
@@ -905,18 +908,17 @@ class Tvdb:
|
||||
|
||||
try:
|
||||
|
||||
import urllib
|
||||
params = urllib.urlencode(params)
|
||||
params = urllib.parse.urlencode(params)
|
||||
DEFAULT_HEADERS["Accept-Language"] = lang
|
||||
url = HOST + "/series/%s/images/query?%s" % (_id, params)
|
||||
logger.debug("url: %s, \nheaders: %s" % (url, DEFAULT_HEADERS))
|
||||
|
||||
req = urllib2.Request(url, headers=DEFAULT_HEADERS)
|
||||
response = urllib2.urlopen(req)
|
||||
req = urllib.request.Request(url, headers=DEFAULT_HEADERS)
|
||||
response = urllib.request.urlopen(req)
|
||||
html = response.read()
|
||||
response.close()
|
||||
|
||||
except Exception, ex:
|
||||
except Exception as ex:
|
||||
message = "An exception of type %s occured. Arguments:\n%s" % (type(ex).__name__, repr(ex.args))
|
||||
logger.error("error en: %s" % message)
|
||||
|
||||
@@ -946,8 +948,8 @@ class Tvdb:
|
||||
DEFAULT_HEADERS["Accept-Language"] = lang
|
||||
logger.debug("url: %s, \nheaders: %s" % (url, DEFAULT_HEADERS))
|
||||
|
||||
req = urllib2.Request(url, headers=DEFAULT_HEADERS)
|
||||
response = urllib2.urlopen(req)
|
||||
req = urllib.request.Request(url, headers=DEFAULT_HEADERS)
|
||||
response = urllib.request.urlopen(req)
|
||||
html = response.read()
|
||||
response.close()
|
||||
|
||||
@@ -1039,7 +1041,7 @@ class Tvdb:
|
||||
# origen['credits_crew'] = dic_origen_credits.get('crew', [])
|
||||
# del origen['credits']
|
||||
|
||||
items = origen.items()
|
||||
items = list(origen.items())
|
||||
|
||||
for k, v in items:
|
||||
if not v:
|
||||
@@ -1118,7 +1120,7 @@ class Tvdb:
|
||||
|
||||
elif k == 'cast':
|
||||
dic_aux = dict((name, character) for (name, character) in l_castandrole)
|
||||
l_castandrole.extend([(p['name'], p['role']) for p in v if p['name'] not in dic_aux.keys()])
|
||||
l_castandrole.extend([(p['name'], p['role']) for p in v if p['name'] not in list(dic_aux.keys())])
|
||||
|
||||
else:
|
||||
logger.debug("Atributos no añadidos: %s=%s" % (k, v))
|
||||
|
||||
@@ -3,6 +3,11 @@
|
||||
# Common Library Tools
|
||||
# ------------------------------------------------------------
|
||||
|
||||
#from builtins import str
|
||||
import sys
|
||||
PY3 = False
|
||||
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
|
||||
|
||||
import errno
|
||||
import math
|
||||
import traceback
|
||||
@@ -130,7 +135,10 @@ def save_movie(item):
|
||||
else:
|
||||
base_name = item.contentTitle
|
||||
|
||||
base_name = unicode(filetools.validate_path(base_name.replace('/', '-')), "utf8").encode("utf8")
|
||||
if not PY3:
|
||||
base_name = unicode(filetools.validate_path(base_name.replace('/', '-')), "utf8").encode("utf8")
|
||||
else:
|
||||
base_name = filetools.validate_path(base_name.replace('/', '-'))
|
||||
|
||||
if config.get_setting("lowerize_title", "videolibrary") == 0:
|
||||
base_name = base_name.lower()
|
||||
@@ -191,9 +199,12 @@ def save_movie(item):
|
||||
|
||||
# Si se ha marcado la opción de url de emergencia, se añade ésta a la película después de haber ejecutado Findvideos del canal
|
||||
try:
|
||||
headers = {}
|
||||
if item.headers:
|
||||
headers = item.headers
|
||||
channel = generictools.verify_channel(item.channel)
|
||||
if config.get_setting("emergency_urls", channel) in [1, 3]:
|
||||
item = emergency_urls(item, None, json_path)
|
||||
item = emergency_urls(item, None, json_path, headers=headers)
|
||||
if item_nfo.emergency_urls and not isinstance(item_nfo.emergency_urls, dict):
|
||||
del item_nfo.emergency_urls
|
||||
if not item_nfo.emergency_urls:
|
||||
@@ -224,7 +235,7 @@ def save_movie(item):
|
||||
return 0, 0, -1
|
||||
|
||||
def filter_list(episodelist, action=None, path=None):
|
||||
if path: path = path.decode('utf8')
|
||||
# if path: path = path.decode('utf8')
|
||||
# import xbmc
|
||||
# if xbmc.getCondVisibility('system.platform.windows') > 0: path = path.replace('smb:','').replace('/','\\')
|
||||
channel_prefs = {}
|
||||
@@ -397,17 +408,29 @@ def save_tvshow(item, episodelist):
|
||||
return 0, 0, -1, path
|
||||
|
||||
_id = item.infoLabels['code'][0]
|
||||
if not item.infoLabels['code'][0] or item.infoLabels['code'][0] == 'None':
|
||||
if item.infoLabels['code'][1] and item.infoLabels['code'][1] != 'None':
|
||||
_id = item.infoLabels['code'][1]
|
||||
elif item.infoLabels['code'][2] and item.infoLabels['code'][2] != 'None':
|
||||
_id = item.infoLabels['code'][2]
|
||||
else:
|
||||
logger.error("NO ENCONTRADO EN SCRAPER O NO TIENE code: " + item.url
|
||||
+ ' / ' + item.infoLabels['code'])
|
||||
return 0, 0, -1, path
|
||||
|
||||
if config.get_setting("original_title_folder", "videolibrary") == 1 and item.infoLabels['originaltitle']:
|
||||
base_name = item.infoLabels[u'originaltitle']
|
||||
base_name = item.infoLabels['originaltitle']
|
||||
elif item.infoLabels['tvshowtitle']:
|
||||
base_name = item.infoLabels[u'tvshowtitle']
|
||||
base_name = item.infoLabels['tvshowtitle']
|
||||
elif item.infoLabels['title']:
|
||||
base_name = item.infoLabels[u'title']
|
||||
base_name = item.infoLabels['title']
|
||||
else:
|
||||
base_name = u'%s' % item.contentSerieName
|
||||
base_name = item.contentSerieName
|
||||
|
||||
base_name = unicode(filetools.validate_path(base_name.replace('/', '-')), "utf8").encode("utf8")
|
||||
if not PY3:
|
||||
base_name = unicode(filetools.validate_path(base_name.replace('/', '-')), "utf8").encode("utf8")
|
||||
else:
|
||||
base_name = filetools.validate_path(base_name.replace('/', '-'))
|
||||
|
||||
if config.get_setting("lowerize_title", "videolibrary") == 0:
|
||||
base_name = base_name.lower()
|
||||
@@ -415,7 +438,7 @@ def save_tvshow(item, episodelist):
|
||||
for raiz, subcarpetas, ficheros in filetools.walk(TVSHOWS_PATH):
|
||||
for c in subcarpetas:
|
||||
code = scrapertools.find_single_match(c, '\[(.*?)\]')
|
||||
if code and code in item.infoLabels['code']:
|
||||
if code and code != 'None' and code in item.infoLabels['code']:
|
||||
path = filetools.join(raiz, c)
|
||||
_id = code
|
||||
break
|
||||
@@ -425,7 +448,7 @@ def save_tvshow(item, episodelist):
|
||||
logger.info("Creating series directory: " + path)
|
||||
try:
|
||||
filetools.mkdir(path)
|
||||
except OSError, exception:
|
||||
except OSError as exception:
|
||||
if exception.errno != errno.EEXIST:
|
||||
raise
|
||||
|
||||
@@ -518,7 +541,7 @@ def save_episodes(path, episodelist, serie, silent=False, overwrite=True):
|
||||
news_in_playcounts = {}
|
||||
|
||||
# Listamos todos los ficheros de la serie, asi evitamos tener que comprobar si existe uno por uno
|
||||
raiz, carpetas_series, ficheros = filetools.walk(path).next()
|
||||
raiz, carpetas_series, ficheros = next(filetools.walk(path))
|
||||
ficheros = [filetools.join(path, f) for f in ficheros]
|
||||
|
||||
nostrm_episodelist = []
|
||||
@@ -550,7 +573,11 @@ def save_episodes(path, episodelist, serie, silent=False, overwrite=True):
|
||||
tags = []
|
||||
if config.get_setting("enable_filter", "videolibrary"):
|
||||
tags = [x.strip() for x in config.get_setting("filters", "videolibrary").lower().split(",")]
|
||||
|
||||
for e in episodelist:
|
||||
headers = {}
|
||||
if e.headers:
|
||||
headers = e.headers
|
||||
if tags != [] and tags != None and any(tag in e.title.lower() for tag in tags):
|
||||
continue
|
||||
|
||||
@@ -567,31 +594,34 @@ def save_episodes(path, episodelist, serie, silent=False, overwrite=True):
|
||||
if overwrite: #pero solo si se se sobrescriben los .json
|
||||
json_epi = Item().fromjson(filetools.read(json_path)) #Leemos el .json
|
||||
if json_epi.emergency_urls: #si existen las urls de emergencia...
|
||||
e.emergency_urls = json_epi.emergency_urls #... las copiamos
|
||||
else: #y si no...
|
||||
e = emergency_urls(e, channel, json_path) #... las generamos
|
||||
e.emergency_urls = json_epi.emergency_urls #... las copiamos
|
||||
else: #y si no...
|
||||
e = emergency_urls(e, channel, json_path, headers=headers) #... las generamos
|
||||
else:
|
||||
e = emergency_urls(e, channel, json_path) #Si el episodio no existe, generamos las urls
|
||||
if e.emergency_urls: #Si ya tenemos urls...
|
||||
e = emergency_urls(e, channel, json_path, headers=headers) #Si el episodio no existe, generamos las urls
|
||||
if e.emergency_urls: #Si ya tenemos urls...
|
||||
emergency_urls_succ = True #... es un éxito y vamos a marcar el .nfo
|
||||
elif emergency_urls_stat == 2 and e.contentType == 'episode': #Borramos urls de emergencia?
|
||||
elif emergency_urls_stat == 2 and e.contentType == 'episode': #Borramos urls de emergencia?
|
||||
if e.emergency_urls: del e.emergency_urls
|
||||
emergency_urls_succ = True #... es un éxito y vamos a marcar el .nfo
|
||||
elif emergency_urls_stat == 3 and e.contentType == 'episode': #Actualizamos urls de emergencia?
|
||||
if not silent:
|
||||
p_dialog.update(0, 'Cacheando enlaces y archivos .torrent...', e.title) #progress dialog
|
||||
e = emergency_urls(e, channel, json_path) #generamos las urls
|
||||
if e.emergency_urls: #Si ya tenemos urls...
|
||||
e = emergency_urls(e, channel, json_path, headers=headers) #generamos las urls
|
||||
if e.emergency_urls: #Si ya tenemos urls...
|
||||
emergency_urls_succ = True #... es un éxito y vamos a marcar el .nfo
|
||||
|
||||
if not e.infoLabels["tmdb_id"] or (serie.infoLabels["tmdb_id"] and e.infoLabels["tmdb_id"] != serie.infoLabels["tmdb_id"]): #en series multicanal, prevalece el infolabels...
|
||||
|
||||
if not e.infoLabels["tmdb_id"] or (serie.infoLabels["tmdb_id"] and e.infoLabels["tmdb_id"] != serie.infoLabels["tmdb_id"]): #en series multicanal, prevalece el infolabels...
|
||||
e.infoLabels = serie.infoLabels #... del canal actual y no el del original
|
||||
e.contentSeason, e.contentEpisodeNumber = season_episode.split("x")
|
||||
if e.videolibray_emergency_urls:
|
||||
del e.videolibray_emergency_urls
|
||||
if e.channel_redir:
|
||||
del e.channel_redir #... y se borran las marcas de redirecciones
|
||||
new_episodelist.append(e)
|
||||
except:
|
||||
if e.contentType == 'episode':
|
||||
logger.error("Unable to save %s emergency urls in the video library" % e.contentTitle)
|
||||
logger.error(traceback.format_exc())
|
||||
continue
|
||||
|
||||
# No hay lista de episodios, no hay nada que guardar
|
||||
@@ -600,18 +630,35 @@ def save_episodes(path, episodelist, serie, silent=False, overwrite=True):
|
||||
return 0, 0, 0
|
||||
|
||||
# fix float porque la division se hace mal en python 2.x
|
||||
t = float(100) / len(new_episodelist)
|
||||
try:
|
||||
t = float(100) / len(new_episodelist)
|
||||
except:
|
||||
t = 0
|
||||
|
||||
last_season_episode = ''
|
||||
for i, e in enumerate(scraper.sort_episode_list(new_episodelist)):
|
||||
if not silent:
|
||||
p_dialog.update(int(math.ceil((i + 1) * t)), config.get_localized_string(60064), e.title)
|
||||
|
||||
high_sea = e.contentSeason
|
||||
high_epi = e.contentEpisodeNumber
|
||||
if scrapertools.find_single_match(e.title, '[a|A][l|L]\s*(\d+)'):
|
||||
high_epi = int(scrapertools.find_single_match(e.title, 'al\s*(\d+)'))
|
||||
max_sea = e.infoLabels["number_of_seasons"]
|
||||
max_epi = 0
|
||||
if e.infoLabels["number_of_seasons"] and (e.infoLabels["temporada_num_episodios"] or e.infoLabels["number_of_seasons"] == 1):
|
||||
if e.infoLabels["number_of_seasons"] == 1 and e.infoLabels["number_of_episodes"]:
|
||||
max_epi = e.infoLabels["number_of_episodes"]
|
||||
else:
|
||||
max_epi = e.infoLabels["temporada_num_episodios"]
|
||||
|
||||
season_episode = "%sx%s" % (e.contentSeason, str(e.contentEpisodeNumber).zfill(2))
|
||||
strm_path = filetools.join(path, "%s.strm" % season_episode)
|
||||
nfo_path = filetools.join(path, "%s.nfo" % season_episode)
|
||||
json_path = filetools.join(path, ("%s [%s].json" % (season_episode, e.channel)).lower())
|
||||
|
||||
if season_episode in nostrm_episodelist:
|
||||
logger.error('Error in the structure of the Video Library: Seriese ' + serie.contentSerieName + ' ' + season_episode)
|
||||
continue
|
||||
strm_exists = strm_path in ficheros
|
||||
nfo_exists = nfo_path in ficheros
|
||||
@@ -659,8 +706,10 @@ def save_episodes(path, episodelist, serie, silent=False, overwrite=True):
|
||||
if not item_nfo:
|
||||
head_nfo, item_nfo = read_nfo(nfo_path)
|
||||
|
||||
if not e.infoLabels["tmdb_id"] or (item_nfo.infoLabels["tmdb_id"] and e.infoLabels["tmdb_id"] != item_nfo.infoLabels["tmdb_id"]): #en series multicanal, prevalece el infolabels...
|
||||
e.infoLabels = item_nfo.infoLabels #... del canal actual y no el del original
|
||||
# En series multicanal, prevalece el infolabels del canal actual y no el del original
|
||||
if not e.infoLabels["tmdb_id"] or (item_nfo.infoLabels["tmdb_id"] \
|
||||
and e.infoLabels["tmdb_id"] != item_nfo.infoLabels["tmdb_id"]):
|
||||
e.infoLabels = item_nfo.infoLabels
|
||||
|
||||
if filetools.write(json_path, e.tojson()):
|
||||
if not json_exists:
|
||||
@@ -688,10 +737,12 @@ def save_episodes(path, episodelist, serie, silent=False, overwrite=True):
|
||||
if not silent and p_dialog.iscanceled():
|
||||
break
|
||||
|
||||
#logger.debug('high_sea x high_epi: %sx%s' % (str(high_sea), str(high_epi)))
|
||||
#logger.debug('max_sea x max_epi: %sx%s' % (str(max_sea), str(max_epi)))
|
||||
if not silent:
|
||||
p_dialog.close()
|
||||
|
||||
if news_in_playcounts:
|
||||
if news_in_playcounts or emergency_urls_succ or serie.infoLabels["status"] == "Ended" or serie.infoLabels["status"] == "Canceled":
|
||||
# Si hay nuevos episodios los marcamos como no vistos en tvshow.nfo ...
|
||||
tvshow_path = filetools.join(path, "tvshow.nfo")
|
||||
try:
|
||||
@@ -703,16 +754,27 @@ def save_episodes(path, episodelist, serie, silent=False, overwrite=True):
|
||||
if emergency_urls_succ:
|
||||
if tvshow_item.emergency_urls and not isinstance(tvshow_item.emergency_urls, dict):
|
||||
del tvshow_item.emergency_urls
|
||||
if emergency_urls_stat in [1, 3]: #Operación de guardar/actualizar enlaces
|
||||
if emergency_urls_stat in [1, 3]: #Operación de guardar/actualizar enlaces
|
||||
if not tvshow_item.emergency_urls:
|
||||
tvshow_item.emergency_urls = dict()
|
||||
tvshow_item.emergency_urls.update({serie.channel: True})
|
||||
elif emergency_urls_stat == 2: #Operación de Borrar enlaces
|
||||
if tvshow_item.library_urls.get(serie.channel, False):
|
||||
tvshow_item.emergency_urls.update({serie.channel: True})
|
||||
elif emergency_urls_stat == 2: #Operación de Borrar enlaces
|
||||
if tvshow_item.emergency_urls and tvshow_item.emergency_urls.get(serie.channel, False):
|
||||
tvshow_item.emergency_urls.pop(serie.channel, None) #borramos la entrada del .nfo
|
||||
tvshow_item.emergency_urls.pop(serie.channel, None) #borramos la entrada del .nfo
|
||||
|
||||
if tvshow_item.active == 30:
|
||||
tvshow_item.active = 1
|
||||
if tvshow_item.infoLabels["tmdb_id"] == serie.infoLabels["tmdb_id"]:
|
||||
tvshow_item.infoLabels = serie.infoLabels
|
||||
tvshow_item.infoLabels["title"] = tvshow_item.infoLabels["tvshowtitle"]
|
||||
|
||||
if max_sea == high_sea and max_epi == high_epi and (tvshow_item.infoLabels["status"] == "Ended"
|
||||
or tvshow_item.infoLabels["status"] == "Canceled") and insertados == 0 and fallidos == 0:
|
||||
tvshow_item.active = 0 # ... no la actualizaremos más
|
||||
logger.debug("%s [%s]: serie 'Terminada' o 'Cancelada'. Se desactiva la actualización periódica" % \
|
||||
(serie.contentSerieName, serie.channel))
|
||||
|
||||
update_last = datetime.date.today()
|
||||
tvshow_item.update_last = update_last.strftime('%Y-%m-%d')
|
||||
update_next = datetime.date.today() + datetime.timedelta(days=int(tvshow_item.active))
|
||||
@@ -819,10 +881,10 @@ def add_tvshow(item, channel=None):
|
||||
|
||||
if not channel:
|
||||
try:
|
||||
#channel = __import__('channels.%s' % item.channel, fromlist=["channels.%s" % item.channel])
|
||||
# channel = __import__('channels.%s' % item.channel, fromlist=["channels.%s" % item.channel])
|
||||
channel = __import__('specials.%s' % channel_alt, fromlist=["specials.%s" % channel_alt])
|
||||
except ImportError:
|
||||
exec "import channels." + item.channel + " as channel"
|
||||
exec("import channels." + item.channel + " as channel")
|
||||
|
||||
#Para desambiguar títulos, se provoca que TMDB pregunte por el título realmente deseado
|
||||
#El usuario puede seleccionar el título entre los ofrecidos en la primera pantalla
|
||||
@@ -836,15 +898,15 @@ def add_tvshow(item, channel=None):
|
||||
# del item.tmdb_stat #Limpiamos el status para que no se grabe en la Videoteca
|
||||
|
||||
# Obtiene el listado de episodios
|
||||
#if item.channel == 'community':
|
||||
itemlist = getattr(channel, item.action)(item)
|
||||
|
||||
global magnet_caching
|
||||
magnet_caching = False
|
||||
insertados, sobreescritos, fallidos, path = save_tvshow(item, itemlist)
|
||||
|
||||
if not insertados and not sobreescritos and not fallidos:
|
||||
platformtools.dialog_ok(config.get_localized_string(30131), config.get_localized_string(60067))
|
||||
logger.error("The %s series could not be added to the video library. Could not get any episode"
|
||||
% item.show)
|
||||
logger.error("The %s series could not be added to the video library. Could not get any episode" % item.show)
|
||||
|
||||
elif fallidos == -1:
|
||||
platformtools.dialog_ok(config.get_localized_string(30131), config.get_localized_string(60068))
|
||||
@@ -856,8 +918,7 @@ def add_tvshow(item, channel=None):
|
||||
|
||||
else:
|
||||
platformtools.dialog_ok(config.get_localized_string(30131), config.get_localized_string(60070))
|
||||
logger.info("%s episodes of the %s series have been added to the video library" %
|
||||
(insertados, item.show))
|
||||
logger.info("%s episodes of the %s series have been added to the video library" % (insertados, item.show))
|
||||
if config.is_xbmc():
|
||||
if config.get_setting("sync_trakt_new_tvshow", "videolibrary"):
|
||||
import xbmc
|
||||
@@ -872,10 +933,16 @@ def add_tvshow(item, channel=None):
|
||||
xbmc_videolibrary.sync_trakt_addon(path)
|
||||
|
||||
|
||||
def emergency_urls(item, channel=None, path=None):
|
||||
def emergency_urls(item, channel=None, path=None, headers={}):
|
||||
logger.info()
|
||||
import re
|
||||
"""
|
||||
from servers import torrent
|
||||
try:
|
||||
magnet_caching_e = magnet_caching
|
||||
except:
|
||||
magnet_caching_e = True
|
||||
|
||||
"""
|
||||
Llamamos a Findvideos del canal con la variable "item.videolibray_emergency_urls = True" para obtener la variable
|
||||
"item.emergency_urls" con la lista de listas de tuplas de los enlaces torrent y de servidores directos para ese episodio o película
|
||||
En la lista [0] siempre deben ir los enlaces torrents, si los hay. Si se desea cachear los .torrents, la búsqueda va contra esa lista.
|
||||
@@ -890,17 +957,28 @@ def emergency_urls(item, channel=None, path=None):
|
||||
if hasattr(channel, 'findvideos'): #Si el canal tiene "findvideos"...
|
||||
item.videolibray_emergency_urls = True #... se marca como "lookup"
|
||||
channel_save = item.channel #... guarda el canal original por si hay fail-over en Newpct1
|
||||
category_save = item.category #... guarda la categoría original por si hay fail-over o redirección en Newpct1
|
||||
if item.channel_redir: #... si hay un redir, se restaura temporamente el canal alternativo
|
||||
item.channel = scrapertools.find_single_match(item.url, 'http.?\:\/\/(?:www.)?(\w+)\.\w+\/').lower()
|
||||
item.category = scrapertools.find_single_match(item.url, 'http.?\:\/\/(?:www.)?(\w+)\.\w+\/').capitalize()
|
||||
item_res = getattr(channel, 'findvideos')(item) #... se procesa Findvideos
|
||||
item_res.channel = channel_save #... restaura el canal original por si hay fail-over en Newpct1
|
||||
item_res.category = channel_save.capitalize() #... y la categoría
|
||||
item_res.category = category_save #... restaura la categoría original por si hay fail-over o redirección en Newpct1
|
||||
item.category = category_save #... restaura la categoría original por si hay fail-over o redirección en Newpct1
|
||||
del item_res.videolibray_emergency_urls #... y se borra la marca de lookup
|
||||
if item.videolibray_emergency_urls:
|
||||
del item.videolibray_emergency_urls #... y se borra la marca de lookup original
|
||||
except:
|
||||
logger.error('ERROR when processing the title in Findvideos del Canal: ' + item.channel + ' / ' + item.title)
|
||||
logger.error(traceback.format_exc())
|
||||
item.channel = channel_save #... restaura el canal original por si hay fail-over o redirección en Newpct1
|
||||
item.category = category_save #... restaura la categoría original por si hay fail-over o redirección en Newpct1
|
||||
item_res = item.clone() #Si ha habido un error, se devuelve el Item original
|
||||
if item_res.videolibray_emergency_urls:
|
||||
del item_res.videolibray_emergency_urls #... y se borra la marca de lookup
|
||||
|
||||
if item.videolibray_emergency_urls:
|
||||
del item.videolibray_emergency_urls #... y se borra la marca de lookup original
|
||||
|
||||
#Si el usuario ha activado la opción "emergency_urls_torrents", se descargarán los archivos .torrent de cada título
|
||||
else: #Si se han cacheado con éxito los enlaces...
|
||||
try:
|
||||
@@ -921,7 +999,9 @@ def emergency_urls(item, channel=None, path=None):
|
||||
if item_res.post: post = item_res.post
|
||||
for url in item_res.emergency_urls[0]: #Recorremos las urls de emergencia...
|
||||
torrents_path = re.sub(r'(?:\.\w+$)', '_%s.torrent' % str(i).zfill(2), path)
|
||||
path_real = caching_torrents(url, referer, post, torrents_path=torrents_path) #... para descargar los .torrents
|
||||
path_real = ''
|
||||
if magnet_caching_e or not url.startswith('magnet'):
|
||||
path_real = torrent.caching_torrents(url, referer, post, torrents_path=torrents_path, headers=headers) #... para descargar los .torrents
|
||||
if path_real: #Si ha tenido éxito...
|
||||
item_res.emergency_urls[0][i-1] = path_real.replace(videolibrary_path, '') #se guarda el "path" relativo
|
||||
i += 1
|
||||
@@ -944,140 +1024,3 @@ def emergency_urls(item, channel=None, path=None):
|
||||
|
||||
#logger.debug(item_res.emergency_urls)
|
||||
return item_res #Devolvemos el Item actualizado con los enlaces de emergencia
|
||||
|
||||
|
||||
def caching_torrents(url, referer=None, post=None, torrents_path=None, timeout=10, lookup=False, data_torrent=False):
|
||||
if torrents_path != None:
|
||||
logger.info("path = " + torrents_path)
|
||||
else:
|
||||
logger.info()
|
||||
if referer and post:
|
||||
logger.info('REFERER: ' + referer)
|
||||
from core import httptools
|
||||
torrent_file = ''
|
||||
headers = {'Content-Type': 'application/x-www-form-urlencoded', 'Referer': referer} #Necesario para el Post del .Torrent
|
||||
|
||||
"""
|
||||
Descarga en el path recibido el .torrent de la url recibida, y pasa el decode
|
||||
Devuelve el path real del .torrent, o el path vacío si la operación no ha tenido éxito
|
||||
"""
|
||||
|
||||
videolibrary_path = config.get_videolibrary_path() #Calculamos el path absoluto a partir de la Videoteca
|
||||
if torrents_path == None:
|
||||
if not videolibrary_path:
|
||||
torrents_path = ''
|
||||
if data_torrent:
|
||||
return (torrents_path, torrent_file)
|
||||
return torrents_path #Si hay un error, devolvemos el "path" vacío
|
||||
torrents_path = filetools.join(videolibrary_path, 'temp_torrents_Alfa', 'cliente_torrent_Alfa.torrent') #path de descarga temporal
|
||||
if '.torrent' not in torrents_path:
|
||||
torrents_path += '.torrent' #path para dejar el .torrent
|
||||
torrents_path_encode = filetools.encode(torrents_path) #encode utf-8 del path
|
||||
|
||||
if url.endswith(".rar") or url.startswith("magnet:"): #No es un archivo .torrent
|
||||
logger.error('It is not a Torrent file: ' + url)
|
||||
torrents_path = ''
|
||||
if data_torrent:
|
||||
return (torrents_path, torrent_file)
|
||||
return torrents_path #Si hay un error, devolvemos el "path" vacío
|
||||
|
||||
try:
|
||||
#Descargamos el .torrent
|
||||
if referer and post: #Descarga con POST
|
||||
response = httptools.downloadpage(url, headers=headers, post=post, follow_redirects=False, timeout=timeout)
|
||||
else: #Descarga sin post
|
||||
response = httptools.downloadpage(url, timeout=timeout)
|
||||
if not response.sucess:
|
||||
logger.error('.Torrent file not found: ' + url)
|
||||
torrents_path = ''
|
||||
if data_torrent:
|
||||
return (torrents_path, torrent_file)
|
||||
return torrents_path #Si hay un error, devolvemos el "path" vacío
|
||||
torrent_file = response.data
|
||||
|
||||
if "used CloudFlare" in torrent_file: #Si tiene CloudFlare, usamos este proceso
|
||||
response = httptools.downloadpage("http://anonymouse.org/cgi-bin/anon-www.cgi/" + url.strip(), timeout=timeout)
|
||||
if not response.sucess:
|
||||
logger.error('Archivo .torrent no encontrado: ' + url)
|
||||
torrents_path = ''
|
||||
if data_torrent:
|
||||
return (torrents_path, torrent_file)
|
||||
return torrents_path #Si hay un error, devolvemos el "path" vacío
|
||||
torrent_file = response.data
|
||||
|
||||
#Si es un archivo .ZIP tratamos de extraer el contenido
|
||||
if torrent_file.startswith("PK"):
|
||||
logger.info('It is a .ZIP file: ' + url)
|
||||
|
||||
torrents_path_zip = filetools.join(videolibrary_path, 'temp_torrents_zip') #Carpeta de trabajo
|
||||
torrents_path_zip = filetools.encode(torrents_path_zip)
|
||||
torrents_path_zip_file = filetools.join(torrents_path_zip, 'temp_torrents_zip.zip') #Nombre del .zip
|
||||
|
||||
import time
|
||||
filetools.rmdirtree(torrents_path_zip) #Borramos la carpeta temporal
|
||||
time.sleep(1) #Hay que esperar, porque si no da error
|
||||
filetools.mkdir(torrents_path_zip) #La creamos de nuevo
|
||||
|
||||
if filetools.write(torrents_path_zip_file, torrent_file): #Salvamos el .zip
|
||||
torrent_file = '' #Borramos el contenido en memoria
|
||||
try: #Extraemos el .zip
|
||||
from core import ziptools
|
||||
unzipper = ziptools.ziptools()
|
||||
unzipper.extract(torrents_path_zip_file, torrents_path_zip)
|
||||
except:
|
||||
import xbmc
|
||||
xbmc.executebuiltin('XBMC.Extract("%s", "%s")' % (torrents_path_zip_file, torrents_path_zip))
|
||||
time.sleep(1)
|
||||
|
||||
import os
|
||||
for root, folders, files in os.walk(torrents_path_zip): #Recorremos la carpeta para leer el .torrent
|
||||
for file in files:
|
||||
if file.endswith(".torrent"):
|
||||
input_file = filetools.join(root, file) #nombre del .torrent
|
||||
torrent_file = filetools.read(input_file) #leemos el .torrent
|
||||
|
||||
filetools.rmdirtree(torrents_path_zip) #Borramos la carpeta temporal
|
||||
|
||||
#Si no es un archivo .torrent (RAR, HTML,..., vacío) damos error
|
||||
if not scrapertools.find_single_match(torrent_file, '^d\d+:.*?\d+:'):
|
||||
logger.error('It is not a Torrent file: ' + url)
|
||||
torrents_path = ''
|
||||
if data_torrent:
|
||||
return (torrents_path, torrent_file)
|
||||
return torrents_path #Si hay un error, devolvemos el "path" vacío
|
||||
|
||||
#Salvamos el .torrent
|
||||
if not lookup:
|
||||
if not filetools.write(torrents_path_encode, torrent_file):
|
||||
logger.error('ERROR: Unwritten .torrent file: ' + torrents_path_encode)
|
||||
torrents_path = '' #Si hay un error, devolvemos el "path" vacío
|
||||
torrent_file = '' #... y el buffer del .torrent
|
||||
if data_torrent:
|
||||
return (torrents_path, torrent_file)
|
||||
return torrents_path
|
||||
except:
|
||||
torrents_path = '' #Si hay un error, devolvemos el "path" vacío
|
||||
torrent_file = '' #... y el buffer del .torrent
|
||||
logger.error('ERROR: .Torrent download process failed: ' + url + ' / ' + torrents_path_encode)
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
#logger.debug(torrents_path)
|
||||
if data_torrent:
|
||||
return (torrents_path, torrent_file)
|
||||
return torrents_path
|
||||
|
||||
|
||||
def verify_url_torrent(url, timeout=5):
|
||||
"""
|
||||
Verifica si el archivo .torrent al que apunta la url está disponible, descargándolo en un area temporal
|
||||
Entrada: url
|
||||
Salida: True o False dependiendo del resultado de la operación
|
||||
"""
|
||||
|
||||
if not url or url == 'javascript:;': #Si la url viene vacía...
|
||||
return False #... volvemos con error
|
||||
torrents_path = caching_torrents(url, timeout=timeout, lookup=True) #Descargamos el .torrent
|
||||
if torrents_path: #Si ha tenido éxito...
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
@@ -3,22 +3,26 @@
|
||||
# Zip Tools
|
||||
# --------------------------------------------------------------------------------
|
||||
|
||||
import io
|
||||
import os
|
||||
from builtins import object
|
||||
import sys
|
||||
PY3 = False
|
||||
VFS = True
|
||||
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int; VFS = False
|
||||
|
||||
import zipfile
|
||||
|
||||
from platformcode import config, logger
|
||||
from core import filetools
|
||||
|
||||
|
||||
class ziptools:
|
||||
class ziptools(object):
|
||||
def extract(self, file, dir, folder_to_extract="", overwrite_question=False, backup=False):
|
||||
logger.info("file=%s" % file)
|
||||
logger.info("dir=%s" % dir)
|
||||
|
||||
if not dir.endswith(':') and not os.path.exists(dir):
|
||||
os.mkdir(dir)
|
||||
if not dir.endswith(':') and not filetools.exists(dir):
|
||||
filetools.mkdir(dir)
|
||||
|
||||
file = io.FileIO(file)
|
||||
zf = zipfile.ZipFile(file)
|
||||
if not folder_to_extract:
|
||||
self._createstructure(file, dir)
|
||||
@@ -30,60 +34,66 @@ class ziptools:
|
||||
if not name.endswith('/'):
|
||||
logger.info("no es un directorio")
|
||||
try:
|
||||
(path, filename) = os.path.split(os.path.join(dir, name))
|
||||
(path, filename) = filetools.split(filetools.join(dir, name))
|
||||
logger.info("path=%s" % path)
|
||||
logger.info("name=%s" % name)
|
||||
if folder_to_extract:
|
||||
if path != os.path.join(dir, folder_to_extract):
|
||||
if path != filetools.join(dir, folder_to_extract):
|
||||
break
|
||||
else:
|
||||
os.makedirs(path)
|
||||
filetools.mkdir(path)
|
||||
except:
|
||||
pass
|
||||
if folder_to_extract:
|
||||
outfilename = os.path.join(dir, filename)
|
||||
outfilename = filetools.join(dir, filename)
|
||||
|
||||
else:
|
||||
outfilename = os.path.join(dir, name)
|
||||
outfilename = filetools.join(dir, name)
|
||||
logger.info("outfilename=%s" % outfilename)
|
||||
try:
|
||||
if os.path.exists(outfilename) and overwrite_question:
|
||||
if filetools.exists(outfilename) and overwrite_question:
|
||||
from platformcode import platformtools
|
||||
dyesno = platformtools.dialog_yesno("El archivo ya existe",
|
||||
"El archivo %s a descomprimir ya existe" \
|
||||
", ¿desea sobrescribirlo?" \
|
||||
% os.path.basename(outfilename))
|
||||
% filetools.basename(outfilename))
|
||||
if not dyesno:
|
||||
break
|
||||
if backup:
|
||||
import time
|
||||
import shutil
|
||||
hora_folder = "Copia seguridad [%s]" % time.strftime("%d-%m_%H-%M", time.localtime())
|
||||
backup = os.path.join(config.get_data_path(), 'backups', hora_folder, folder_to_extract)
|
||||
if not os.path.exists(backup):
|
||||
os.makedirs(backup)
|
||||
shutil.copy2(outfilename, os.path.join(backup, os.path.basename(outfilename)))
|
||||
backup = filetools.join(config.get_data_path(), 'backups', hora_folder, folder_to_extract)
|
||||
if not filetools.exists(backup):
|
||||
filetools.mkdir(backup)
|
||||
filetools.copy(outfilename, filetools.join(backup, filetools.basename(outfilename)))
|
||||
|
||||
outfile = open(outfilename, 'wb')
|
||||
outfile.write(zf.read(nameo))
|
||||
if not filetools.write(outfilename, zf.read(nameo), silent=True, vfs=VFS): #TRUNCA en FINAL en Kodi 19 con VFS
|
||||
logger.error("Error en fichero " + nameo)
|
||||
except:
|
||||
import traceback
|
||||
logger.error(traceback.format_exc())
|
||||
logger.error("Error en fichero " + nameo)
|
||||
|
||||
try:
|
||||
zf.close()
|
||||
except:
|
||||
logger.info("Error cerrando .zip " + file)
|
||||
|
||||
def _createstructure(self, file, dir):
|
||||
self._makedirs(self._listdirs(file), dir)
|
||||
|
||||
def create_necessary_paths(filename):
|
||||
try:
|
||||
(path, name) = os.path.split(filename)
|
||||
os.makedirs(path)
|
||||
(path, name) = filetools.split(filename)
|
||||
filetools.mkdir(path)
|
||||
except:
|
||||
pass
|
||||
|
||||
def _makedirs(self, directories, basedir):
|
||||
for dir in directories:
|
||||
curdir = os.path.join(basedir, dir)
|
||||
if not os.path.exists(curdir):
|
||||
os.mkdir(curdir)
|
||||
curdir = filetools.join(basedir, dir)
|
||||
if not filetools.exists(curdir):
|
||||
filetools.mkdir(curdir)
|
||||
|
||||
def _listdirs(self, file):
|
||||
zf = zipfile.ZipFile(file)
|
||||
|
||||
@@ -12,7 +12,7 @@ from platformcode import config, logger
|
||||
logger.info("init...")
|
||||
|
||||
librerias = xbmc.translatePath(os.path.join(config.get_runtime_path(), 'lib'))
|
||||
sys.path.insert(0, librerias)
|
||||
sys.path.append(librerias)
|
||||
|
||||
if not config.dev_mode():
|
||||
from platformcode import updater
|
||||
|
||||
12
lib/builtins/__init__.py
Normal file
12
lib/builtins/__init__.py
Normal file
@@ -0,0 +1,12 @@
|
||||
from __future__ import absolute_import
|
||||
import sys
|
||||
__future_module__ = True
|
||||
|
||||
if sys.version_info[0] < 3:
|
||||
from __builtin__ import *
|
||||
# Overwrite any old definitions with the equivalent future.builtins ones:
|
||||
from future.builtins import *
|
||||
else:
|
||||
raise ImportError('This package should not be accessible on Python 3. '
|
||||
'Either you are trying to run from the python-future src folder '
|
||||
'or your installation of python-future is corrupted.')
|
||||
@@ -1,4 +1,6 @@
|
||||
# https://github.com/VeNoMouS/cloudscraper/tree/master
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import ssl
|
||||
@@ -9,6 +11,14 @@ try:
|
||||
except ImportError:
|
||||
import copy_reg as copyreg
|
||||
|
||||
try:
|
||||
from HTMLParser import HTMLParser
|
||||
except ImportError:
|
||||
if sys.version_info >= (3, 4):
|
||||
import html
|
||||
else:
|
||||
from html.parser import HTMLParser
|
||||
|
||||
from copy import deepcopy
|
||||
from time import sleep
|
||||
from collections import OrderedDict
|
||||
@@ -31,13 +41,17 @@ except ImportError:
|
||||
pass
|
||||
|
||||
try:
|
||||
from urlparse import urlparse
|
||||
from urlparse import urlparse, urljoin
|
||||
except ImportError:
|
||||
from urllib.parse import urlparse
|
||||
from urllib.parse import urlparse, urljoin
|
||||
|
||||
# Add exceptions path
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), 'exceptions'))
|
||||
import cloudflare_exceptions # noqa: E402
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
__version__ = '1.2.19'
|
||||
__version__ = '1.2.24'
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
@@ -91,6 +105,7 @@ class CloudScraper(Session):
|
||||
'allow_brotli',
|
||||
True if 'brotli' in sys.modules.keys() else False
|
||||
)
|
||||
|
||||
self.user_agent = User_Agent(
|
||||
allow_brotli=self.allow_brotli,
|
||||
browser=kwargs.pop('browser', None)
|
||||
@@ -107,13 +122,16 @@ class CloudScraper(Session):
|
||||
# Set a random User-Agent if no custom User-Agent has been set
|
||||
# ------------------------------------------------------------------------------- #
|
||||
self.headers = self.user_agent.headers
|
||||
if not self.cipherSuite:
|
||||
self.cipherSuite = self.user_agent.cipherSuite
|
||||
|
||||
if isinstance(self.cipherSuite, list):
|
||||
self.cipherSuite = ':'.join(self.cipherSuite)
|
||||
|
||||
self.mount(
|
||||
'https://',
|
||||
CipherSuiteAdapter(
|
||||
cipherSuite=':'.join(self.user_agent.cipherSuite)
|
||||
if not self.cipherSuite else ':'.join(self.cipherSuite)
|
||||
if isinstance(self.cipherSuite, list) else self.cipherSuite
|
||||
cipherSuite=self.cipherSuite
|
||||
)
|
||||
)
|
||||
|
||||
@@ -138,6 +156,20 @@ class CloudScraper(Session):
|
||||
except ValueError as e:
|
||||
print("Debug Error: {}".format(getattr(e, 'message', e)))
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# Unescape / decode html entities
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
@staticmethod
|
||||
def unescape(html_text):
|
||||
if sys.version_info >= (3, 0):
|
||||
if sys.version_info >= (3, 4):
|
||||
return html.unescape(html_text)
|
||||
|
||||
return HTMLParser().unescape(html_text)
|
||||
|
||||
return HTMLParser().unescape(html_text)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# Decode Brotli on older versions of urllib3 manually
|
||||
# ------------------------------------------------------------------------------- #
|
||||
@@ -186,7 +218,7 @@ class CloudScraper(Session):
|
||||
sys.tracebacklimit = 0
|
||||
_ = self._solveDepthCnt
|
||||
self._solveDepthCnt = 0
|
||||
raise RuntimeError(
|
||||
raise cloudflare_exceptions.Cloudflare_Loop_Protection(
|
||||
"!!Loop Protection!! We have tried to solve {} time(s) in a row.".format(_)
|
||||
)
|
||||
|
||||
@@ -269,7 +301,7 @@ class CloudScraper(Session):
|
||||
def is_Challenge_Request(self, resp):
|
||||
if self.is_Firewall_Blocked(resp):
|
||||
sys.tracebacklimit = 0
|
||||
raise RuntimeError('Cloudflare has blocked this request (Code 1020 Detected).')
|
||||
raise cloudflare_exceptions.Cloudflare_Block('Cloudflare has blocked this request (Code 1020 Detected).')
|
||||
|
||||
if self.is_reCaptcha_Challenge(resp) or self.is_IUAM_Challenge(resp):
|
||||
return True
|
||||
@@ -280,17 +312,18 @@ class CloudScraper(Session):
|
||||
# Try to solve cloudflare javascript challenge.
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
@staticmethod
|
||||
def IUAM_Challenge_Response(body, url, interpreter):
|
||||
def IUAM_Challenge_Response(self, body, url, interpreter):
|
||||
try:
|
||||
challengeUUID = re.search(
|
||||
r'id="challenge-form" action="(?P<challengeUUID>\S+)"',
|
||||
body, re.M | re.DOTALL
|
||||
).groupdict().get('challengeUUID', '')
|
||||
|
||||
payload = OrderedDict(re.findall(r'name="(r|jschl_vc|pass)"\svalue="(.*?)"', body))
|
||||
|
||||
except AttributeError:
|
||||
sys.tracebacklimit = 0
|
||||
raise RuntimeError(
|
||||
raise cloudflare_exceptions.Cloudflare_Error_IUAM(
|
||||
"Cloudflare IUAM detected, unfortunately we can't extract the parameters correctly."
|
||||
)
|
||||
|
||||
@@ -301,7 +334,7 @@ class CloudScraper(Session):
|
||||
interpreter
|
||||
).solveChallenge(body, hostParsed.netloc)
|
||||
except Exception as e:
|
||||
raise RuntimeError(
|
||||
raise cloudflare_exceptions.Cloudflare_Error_IUAM(
|
||||
'Unable to parse Cloudflare anti-bots page: {}'.format(
|
||||
getattr(e, 'message', e)
|
||||
)
|
||||
@@ -311,7 +344,7 @@ class CloudScraper(Session):
|
||||
'url': '{}://{}{}'.format(
|
||||
hostParsed.scheme,
|
||||
hostParsed.netloc,
|
||||
challengeUUID
|
||||
self.unescape(challengeUUID)
|
||||
),
|
||||
'data': payload
|
||||
}
|
||||
@@ -320,8 +353,7 @@ class CloudScraper(Session):
|
||||
# Try to solve the reCaptcha challenge via 3rd party.
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
@staticmethod
|
||||
def reCaptcha_Challenge_Response(provider, provider_params, body, url):
|
||||
def reCaptcha_Challenge_Response(self, provider, provider_params, body, url):
|
||||
try:
|
||||
payload = re.search(
|
||||
r'(name="r"\svalue="(?P<r>\S+)"|).*?challenge-form" action="(?P<challengeUUID>\S+)".*?'
|
||||
@@ -330,7 +362,7 @@ class CloudScraper(Session):
|
||||
).groupdict()
|
||||
except (AttributeError):
|
||||
sys.tracebacklimit = 0
|
||||
raise RuntimeError(
|
||||
raise cloudflare_exceptions.Cloudflare_Error_reCaptcha(
|
||||
"Cloudflare reCaptcha detected, unfortunately we can't extract the parameters correctly."
|
||||
)
|
||||
|
||||
@@ -339,7 +371,7 @@ class CloudScraper(Session):
|
||||
'url': '{}://{}{}'.format(
|
||||
hostParsed.scheme,
|
||||
hostParsed.netloc,
|
||||
payload.get('challengeUUID', '')
|
||||
self.unescape(payload.get('challengeUUID', ''))
|
||||
),
|
||||
'data': OrderedDict([
|
||||
('r', payload.get('r', '')),
|
||||
@@ -377,7 +409,7 @@ class CloudScraper(Session):
|
||||
|
||||
if not self.recaptcha or not isinstance(self.recaptcha, dict) or not self.recaptcha.get('provider'):
|
||||
sys.tracebacklimit = 0
|
||||
raise RuntimeError(
|
||||
raise cloudflare_exceptions.Cloudflare_reCaptcha_Provider(
|
||||
"Cloudflare reCaptcha detected, unfortunately you haven't loaded an anti reCaptcha provider "
|
||||
"correctly via the 'recaptcha' parameter."
|
||||
)
|
||||
@@ -413,7 +445,7 @@ class CloudScraper(Session):
|
||||
self.delay = delay
|
||||
except (AttributeError, ValueError):
|
||||
sys.tracebacklimit = 0
|
||||
raise RuntimeError("Cloudflare IUAM possibility malformed, issue extracing delay value.")
|
||||
raise cloudflare_exceptions.Cloudflare_Error_IUAM("Cloudflare IUAM possibility malformed, issue extracing delay value.")
|
||||
|
||||
sleep(self.delay)
|
||||
|
||||
@@ -473,34 +505,25 @@ class CloudScraper(Session):
|
||||
return challengeSubmitResponse
|
||||
else:
|
||||
cloudflare_kwargs = deepcopy(kwargs)
|
||||
cloudflare_kwargs['headers'] = updateAttr(
|
||||
cloudflare_kwargs,
|
||||
'headers',
|
||||
{'Referer': challengeSubmitResponse.url}
|
||||
)
|
||||
|
||||
if not urlparse(challengeSubmitResponse.headers['Location']).netloc:
|
||||
cloudflare_kwargs['headers'] = updateAttr(
|
||||
cloudflare_kwargs,
|
||||
'headers',
|
||||
{'Referer': '{}://{}'.format(urlParsed.scheme, urlParsed.netloc)}
|
||||
)
|
||||
return self.request(
|
||||
resp.request.method,
|
||||
'{}://{}{}'.format(
|
||||
urlParsed.scheme,
|
||||
urlParsed.netloc,
|
||||
challengeSubmitResponse.headers['Location']
|
||||
),
|
||||
**cloudflare_kwargs
|
||||
redirect_location = urljoin(
|
||||
challengeSubmitResponse.url,
|
||||
challengeSubmitResponse.headers['Location']
|
||||
)
|
||||
else:
|
||||
redirectParsed = urlparse(challengeSubmitResponse.headers['Location'])
|
||||
cloudflare_kwargs['headers'] = updateAttr(
|
||||
cloudflare_kwargs,
|
||||
'headers',
|
||||
{'Referer': '{}://{}'.format(redirectParsed.scheme, redirectParsed.netloc)}
|
||||
)
|
||||
return self.request(
|
||||
resp.request.method,
|
||||
challengeSubmitResponse.headers['Location'],
|
||||
**cloudflare_kwargs
|
||||
)
|
||||
redirect_location = challengeSubmitResponse.headers['Location']
|
||||
|
||||
return self.request(
|
||||
resp.request.method,
|
||||
redirect_location,
|
||||
**cloudflare_kwargs
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# We shouldn't be here...
|
||||
@@ -561,7 +584,7 @@ class CloudScraper(Session):
|
||||
break
|
||||
else:
|
||||
sys.tracebacklimit = 0
|
||||
raise RuntimeError(
|
||||
raise cloudflare_exceptions.Cloudflare_Error_IUAM(
|
||||
"Unable to find Cloudflare cookies. Does the site actually "
|
||||
"have Cloudflare IUAM (I'm Under Attack Mode) enabled?"
|
||||
)
|
||||
|
||||
0
lib/cloudscraper/exceptions/__init__.py
Normal file
0
lib/cloudscraper/exceptions/__init__.py
Normal file
31
lib/cloudscraper/exceptions/cloudflare_exceptions.py
Normal file
31
lib/cloudscraper/exceptions/cloudflare_exceptions.py
Normal file
@@ -0,0 +1,31 @@
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
|
||||
class Cloudflare_Loop_Protection(Exception):
|
||||
"""
|
||||
Raise error for recursive depth protection
|
||||
"""
|
||||
|
||||
|
||||
class Cloudflare_Block(Exception):
|
||||
"""
|
||||
Raise error for Cloudflare 1020 block
|
||||
"""
|
||||
|
||||
|
||||
class Cloudflare_Error_IUAM(Exception):
|
||||
"""
|
||||
Raise error for problem extracting IUAM paramters from Cloudflare payload
|
||||
"""
|
||||
|
||||
|
||||
class Cloudflare_Error_reCaptcha(Exception):
|
||||
"""
|
||||
Raise error for problem extracting reCaptcha paramters from Cloudflare payload
|
||||
"""
|
||||
|
||||
|
||||
class Cloudflare_reCaptcha_Provider(Exception):
|
||||
"""
|
||||
Raise error for reCaptcha from Cloudflare, no provider loaded.
|
||||
"""
|
||||
49
lib/cloudscraper/exceptions/reCaptcha_exceptions.py
Normal file
49
lib/cloudscraper/exceptions/reCaptcha_exceptions.py
Normal file
@@ -0,0 +1,49 @@
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
|
||||
class reCaptcha_Service_Unavailable(Exception):
|
||||
"""
|
||||
Raise error for external services that cannot be reached
|
||||
"""
|
||||
|
||||
|
||||
class reCaptcha_Error_From_API(Exception):
|
||||
"""
|
||||
Raise error for error from API response.
|
||||
"""
|
||||
|
||||
|
||||
class reCaptcha_Account_Error(Exception):
|
||||
"""
|
||||
Raise error for reCaptcha provider account problem.
|
||||
"""
|
||||
|
||||
|
||||
class reCaptcha_Timeout(Exception):
|
||||
"""
|
||||
Raise error for reCaptcha provider taking too long.
|
||||
"""
|
||||
|
||||
|
||||
class reCaptcha_Bad_Parameter(NotImplementedError):
|
||||
"""
|
||||
Raise error for bad or missing Parameter.
|
||||
"""
|
||||
|
||||
|
||||
class reCaptcha_Bad_Job_ID(Exception):
|
||||
"""
|
||||
Raise error for invalid job id.
|
||||
"""
|
||||
|
||||
|
||||
class reCaptcha_Report_Error(Exception):
|
||||
"""
|
||||
Raise error for reCaptcha provider unable to report bad solve.
|
||||
"""
|
||||
|
||||
|
||||
class reCaptcha_Import_Error(Exception):
|
||||
"""
|
||||
Raise error for reCaptcha, cannot import a module.
|
||||
"""
|
||||
@@ -1,13 +1,17 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import requests
|
||||
import reCaptcha_exceptions
|
||||
|
||||
try:
|
||||
import polling
|
||||
except ImportError:
|
||||
import sys
|
||||
sys.tracebacklimit = 0
|
||||
raise RuntimeError("Please install the python module 'polling' via pip or download it from https://github.com/justiniso/polling/")
|
||||
raise reCaptcha_exceptions.reCaptcha_Import_Error(
|
||||
"Please install the python module 'polling' via pip or download it from "
|
||||
"https://github.com/justiniso/polling/"
|
||||
)
|
||||
|
||||
from . import reCaptcha
|
||||
|
||||
@@ -24,7 +28,7 @@ class captchaSolver(reCaptcha):
|
||||
@staticmethod
|
||||
def checkErrorStatus(response, request_type):
|
||||
if response.status_code in [500, 502]:
|
||||
raise RuntimeError('2Captcha: Server Side Error {}'.format(response.status_code))
|
||||
raise reCaptcha_exceptions.reCaptcha_Service_Unavailable('2Captcha: Server Side Error {}'.format(response.status_code))
|
||||
|
||||
errors = {
|
||||
'in.php': {
|
||||
@@ -71,16 +75,23 @@ class captchaSolver(reCaptcha):
|
||||
}
|
||||
|
||||
if response.json().get('status') is False and response.json().get('request') in errors.get(request_type):
|
||||
raise RuntimeError('{} {}'.format(response.json().get('request'), errors.get(request_type).get(response.json().get('request'))))
|
||||
raise reCaptcha_exceptions.reCaptcha_Error_From_API(
|
||||
'{} {}'.format(
|
||||
response.json().get('request'),
|
||||
errors.get(request_type).get(response.json().get('request'))
|
||||
)
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def reportJob(self, jobID):
|
||||
if not jobID:
|
||||
raise RuntimeError("2Captcha: Error bad job id to request reCaptcha.")
|
||||
raise reCaptcha_exceptions.reCaptcha_Bad_Job_ID(
|
||||
"2Captcha: Error bad job id to request reCaptcha."
|
||||
)
|
||||
|
||||
def _checkRequest(response):
|
||||
if response.status_code in [200, 303] and response.json().get('status') == 1:
|
||||
if response.ok and response.json().get('status') == 1:
|
||||
return response
|
||||
|
||||
self.checkErrorStatus(response, 'res.php')
|
||||
@@ -105,7 +116,9 @@ class captchaSolver(reCaptcha):
|
||||
if response:
|
||||
return True
|
||||
else:
|
||||
raise RuntimeError("2Captcha: Error - Failed to report bad reCaptcha solve.")
|
||||
raise reCaptcha_exceptions.reCaptcha_Report_Error(
|
||||
"2Captcha: Error - Failed to report bad reCaptcha solve."
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
@@ -114,7 +127,7 @@ class captchaSolver(reCaptcha):
|
||||
raise RuntimeError("2Captcha: Error bad job id to request reCaptcha.")
|
||||
|
||||
def _checkRequest(response):
|
||||
if response.status_code in [200, 303] and response.json().get('status') == 1:
|
||||
if response.ok and response.json().get('status') == 1:
|
||||
return response
|
||||
|
||||
self.checkErrorStatus(response, 'res.php')
|
||||
@@ -139,13 +152,15 @@ class captchaSolver(reCaptcha):
|
||||
if response:
|
||||
return response.json().get('request')
|
||||
else:
|
||||
raise RuntimeError("2Captcha: Error failed to solve reCaptcha.")
|
||||
raise reCaptcha_exceptions.reCaptcha_Timeout(
|
||||
"2Captcha: Error failed to solve reCaptcha."
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def requestSolve(self, site_url, site_key):
|
||||
def _checkRequest(response):
|
||||
if response.status_code in [200, 303] and response.json().get("status") == 1 and response.json().get('request'):
|
||||
if response.ok and response.json().get("status") == 1 and response.json().get('request'):
|
||||
return response
|
||||
|
||||
self.checkErrorStatus(response, 'in.php')
|
||||
@@ -173,7 +188,9 @@ class captchaSolver(reCaptcha):
|
||||
if response:
|
||||
return response.json().get('request')
|
||||
else:
|
||||
raise RuntimeError('2Captcha: Error no job id was returned.')
|
||||
raise reCaptcha_exceptions.reCaptcha_Bad_Job_ID(
|
||||
'2Captcha: Error no job id was returned.'
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
@@ -181,7 +198,9 @@ class captchaSolver(reCaptcha):
|
||||
jobID = None
|
||||
|
||||
if not reCaptchaParams.get('api_key'):
|
||||
raise ValueError("2Captcha: Missing api_key parameter.")
|
||||
raise reCaptcha_exceptions.reCaptcha_Bad_Parameter(
|
||||
"2Captcha: Missing api_key parameter."
|
||||
)
|
||||
|
||||
self.api_key = reCaptchaParams.get('api_key')
|
||||
|
||||
@@ -196,9 +215,13 @@ class captchaSolver(reCaptcha):
|
||||
if jobID:
|
||||
self.reportJob(jobID)
|
||||
except polling.TimeoutException:
|
||||
raise RuntimeError("2Captcha: reCaptcha solve took to long and also failed reporting the job.")
|
||||
raise reCaptcha_exceptions.reCaptcha_Timeout(
|
||||
"2Captcha: reCaptcha solve took to long and also failed reporting the job the job id {}.".format(jobID)
|
||||
)
|
||||
|
||||
raise RuntimeError("2Captcha: reCaptcha solve took to long to execute, aborting.")
|
||||
raise reCaptcha_exceptions.reCaptcha_Timeout(
|
||||
"2Captcha: reCaptcha solve took to long to execute job id {}, aborting.".format(jobID)
|
||||
)
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
202
lib/cloudscraper/reCaptcha/9kw.py
Normal file
202
lib/cloudscraper/reCaptcha/9kw.py
Normal file
@@ -0,0 +1,202 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import re
|
||||
import requests
|
||||
import reCaptcha_exceptions
|
||||
|
||||
try:
|
||||
import polling
|
||||
except ImportError:
|
||||
import sys
|
||||
sys.tracebacklimit = 0
|
||||
raise reCaptcha_exceptions.reCaptcha_Import_Error(
|
||||
"Please install the python module 'polling' via pip or download it from "
|
||||
"https://github.com/justiniso/polling/"
|
||||
)
|
||||
|
||||
from . import reCaptcha
|
||||
|
||||
|
||||
class captchaSolver(reCaptcha):
|
||||
|
||||
def __init__(self):
|
||||
super(captchaSolver, self).__init__('9kw')
|
||||
self.host = 'https://www.9kw.eu/index.cgi'
|
||||
self.maxtimeout = 180
|
||||
self.session = requests.Session()
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
@staticmethod
|
||||
def checkErrorStatus(response):
|
||||
if response.status_code in [500, 502]:
|
||||
raise reCaptcha_exceptions.reCaptcha_Service_Unavailable(
|
||||
'9kw: Server Side Error {}'.format(response.status_code)
|
||||
)
|
||||
|
||||
error_codes = {
|
||||
1: 'No API Key available.',
|
||||
2: 'No API key found.',
|
||||
3: 'No active API key found.',
|
||||
4: 'API Key has been disabled by the operator. ',
|
||||
5: 'No user found.',
|
||||
6: 'No data found.',
|
||||
7: 'Found No ID.',
|
||||
8: 'found No captcha.',
|
||||
9: 'No image found.',
|
||||
10: 'Image size not allowed.',
|
||||
11: 'credit is not sufficient.',
|
||||
12: 'what was done.',
|
||||
13: 'No answer contain.',
|
||||
14: 'Captcha already been answered.',
|
||||
15: 'Captcha to quickly filed.',
|
||||
16: 'JD check active.',
|
||||
17: 'Unknown problem.',
|
||||
18: 'Found No ID.',
|
||||
19: 'Incorrect answer.',
|
||||
20: 'Do not timely filed (Incorrect UserID).',
|
||||
21: 'Link not allowed.',
|
||||
22: 'Prohibited submit.',
|
||||
23: 'Entering prohibited.',
|
||||
24: 'Too little credit.',
|
||||
25: 'No entry found.',
|
||||
26: 'No Conditions accepted.',
|
||||
27: 'No coupon code found in the database.',
|
||||
28: 'Already unused voucher code.',
|
||||
29: 'maxTimeout under 60 seconds.',
|
||||
30: 'User not found.',
|
||||
31: 'An account is not yet 24 hours in system.',
|
||||
32: 'An account does not have the full rights.',
|
||||
33: 'Plugin needed a update.',
|
||||
34: 'No HTTPS allowed.',
|
||||
35: 'No HTTP allowed.',
|
||||
36: 'Source not allowed.',
|
||||
37: 'Transfer denied.',
|
||||
38: 'Incorrect answer without space',
|
||||
39: 'Incorrect answer with space',
|
||||
40: 'Incorrect answer with not only numbers',
|
||||
41: 'Incorrect answer with not only A-Z, a-z',
|
||||
42: 'Incorrect answer with not only 0-9, A-Z, a-z',
|
||||
43: 'Incorrect answer with not only [0-9,- ]',
|
||||
44: 'Incorrect answer with not only [0-9A-Za-z,- ]',
|
||||
45: 'Incorrect answer with not only coordinates',
|
||||
46: 'Incorrect answer with not only multiple coordinates',
|
||||
47: 'Incorrect answer with not only data',
|
||||
48: 'Incorrect answer with not only rotate number',
|
||||
49: 'Incorrect answer with not only text',
|
||||
50: 'Incorrect answer with not only text and too short',
|
||||
51: 'Incorrect answer with not enough chars',
|
||||
52: 'Incorrect answer with too many chars',
|
||||
53: 'Incorrect answer without no or yes',
|
||||
54: 'Assignment was not found.'
|
||||
}
|
||||
|
||||
if response.text.startswith('{'):
|
||||
if response.json().get('error'):
|
||||
raise reCaptcha_exceptions.reCaptcha_Error_From_API(error_codes.get(int(response.json().get('error'))))
|
||||
else:
|
||||
error_code = int(re.search(r'^00(?P<error_code>\d+)', response.text).groupdict().get('error_code', 0))
|
||||
if error_code:
|
||||
raise reCaptcha_exceptions.reCaptcha_Error_From_API(error_codes.get(error_code))
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def requestJob(self, jobID):
|
||||
if not jobID:
|
||||
raise reCaptcha_exceptions.reCaptcha_Bad_Job_ID(
|
||||
"9kw: Error bad job id to request reCaptcha against."
|
||||
)
|
||||
|
||||
def _checkRequest(response):
|
||||
if response.ok and response.json().get('answer') != 'NO DATA':
|
||||
return response
|
||||
|
||||
self.checkErrorStatus(response)
|
||||
|
||||
return None
|
||||
|
||||
response = polling.poll(
|
||||
lambda: self.session.get(
|
||||
self.host,
|
||||
params={
|
||||
'apikey': self.api_key,
|
||||
'action': 'usercaptchacorrectdata',
|
||||
'id': jobID,
|
||||
'info': 1,
|
||||
'json': 1
|
||||
}
|
||||
),
|
||||
check_success=_checkRequest,
|
||||
step=10,
|
||||
timeout=(self.maxtimeout + 10)
|
||||
)
|
||||
|
||||
if response:
|
||||
return response.json().get('answer')
|
||||
else:
|
||||
raise reCaptcha_exceptions.reCaptcha_Timeout("9kw: Error failed to solve reCaptcha.")
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def requestSolve(self, site_url, site_key):
|
||||
def _checkRequest(response):
|
||||
if response.ok and response.text.startswith('{') and response.json().get('captchaid'):
|
||||
return response
|
||||
|
||||
self.checkErrorStatus(response)
|
||||
|
||||
return None
|
||||
|
||||
response = polling.poll(
|
||||
lambda: self.session.post(
|
||||
self.host,
|
||||
data={
|
||||
'apikey': self.api_key,
|
||||
'action': 'usercaptchaupload',
|
||||
'interactive': 1,
|
||||
'file-upload-01': site_key,
|
||||
'oldsource': 'recaptchav2',
|
||||
'pageurl': site_url,
|
||||
'maxtimeout': self.maxtimeout,
|
||||
'json': 1
|
||||
},
|
||||
allow_redirects=False
|
||||
),
|
||||
check_success=_checkRequest,
|
||||
step=5,
|
||||
timeout=(self.maxtimeout + 10)
|
||||
)
|
||||
|
||||
if response:
|
||||
return response.json().get('captchaid')
|
||||
else:
|
||||
raise reCaptcha_exceptions.reCaptcha_Bad_Job_ID('9kw: Error no valid job id was returned.')
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def getCaptchaAnswer(self, site_url, site_key, reCaptchaParams):
|
||||
jobID = None
|
||||
|
||||
if not reCaptchaParams.get('api_key'):
|
||||
raise reCaptcha_exceptions.reCaptcha_Bad_Parameter("9kw: Missing api_key parameter.")
|
||||
|
||||
self.api_key = reCaptchaParams.get('api_key')
|
||||
|
||||
if reCaptchaParams.get('maxtimeout'):
|
||||
self.maxtimeout = reCaptchaParams.get('maxtimeout')
|
||||
|
||||
if reCaptchaParams.get('proxy'):
|
||||
self.session.proxies = reCaptchaParams.get('proxies')
|
||||
|
||||
try:
|
||||
jobID = self.requestSolve(site_url, site_key)
|
||||
return self.requestJob(jobID)
|
||||
except polling.TimeoutException:
|
||||
raise reCaptcha_exceptions.reCaptcha_Timeout(
|
||||
"9kw: reCaptcha solve took to long to execute 'captchaid' {}, aborting.".format(jobID)
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
|
||||
captchaSolver()
|
||||
@@ -1,12 +1,16 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import sys
|
||||
import reCaptcha_exceptions
|
||||
|
||||
try:
|
||||
from python_anticaptcha import AnticaptchaClient, NoCaptchaTaskProxylessTask
|
||||
except ImportError:
|
||||
sys.tracebacklimit = 0
|
||||
raise RuntimeError("Please install the python module 'python_anticaptcha' via pip or download it from https://github.com/ad-m/python-anticaptcha")
|
||||
raise reCaptcha_exceptions.reCaptcha_Import_Error(
|
||||
"Please install the python module 'python_anticaptcha' via pip or download it from "
|
||||
"https://github.com/ad-m/python-anticaptcha"
|
||||
)
|
||||
|
||||
from . import reCaptcha
|
||||
|
||||
@@ -16,9 +20,11 @@ class captchaSolver(reCaptcha):
|
||||
def __init__(self):
|
||||
super(captchaSolver, self).__init__('anticaptcha')
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def getCaptchaAnswer(self, site_url, site_key, reCaptchaParams):
|
||||
if not reCaptchaParams.get('api_key'):
|
||||
raise ValueError("reCaptcha provider 'anticaptcha' was not provided an 'api_key' parameter.")
|
||||
raise reCaptcha_exceptions.reCaptcha_Bad_Parameter("anticaptcha: Missing api_key parameter.")
|
||||
|
||||
client = AnticaptchaClient(reCaptchaParams.get('api_key'))
|
||||
|
||||
@@ -29,10 +35,14 @@ class captchaSolver(reCaptcha):
|
||||
|
||||
if not hasattr(client, 'createTaskSmee'):
|
||||
sys.tracebacklimit = 0
|
||||
raise RuntimeError("Please upgrade 'python_anticaptcha' via pip or download it from https://github.com/ad-m/python-anticaptcha")
|
||||
raise reCaptcha_exceptions.reCaptcha_Import_Error(
|
||||
"Please upgrade 'python_anticaptcha' via pip or download it from https://github.com/ad-m/python-anticaptcha"
|
||||
)
|
||||
|
||||
job = client.createTaskSmee(task)
|
||||
return job.get_solution_response()
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
captchaSolver()
|
||||
|
||||
@@ -2,13 +2,17 @@ from __future__ import absolute_import
|
||||
|
||||
import json
|
||||
import requests
|
||||
import reCaptcha_exceptions
|
||||
|
||||
try:
|
||||
import polling
|
||||
except ImportError:
|
||||
import sys
|
||||
sys.tracebacklimit = 0
|
||||
raise RuntimeError("Please install the python module 'polling' via pip or download it from https://github.com/justiniso/polling/")
|
||||
raise reCaptcha_exceptions.reCaptcha_Import_Error(
|
||||
"Please install the python module 'polling' via pip or download it from "
|
||||
"https://github.com/justiniso/polling/"
|
||||
)
|
||||
|
||||
from . import reCaptcha
|
||||
|
||||
@@ -20,7 +24,7 @@ class captchaSolver(reCaptcha):
|
||||
self.host = 'http://api.dbcapi.me/api'
|
||||
self.session = requests.Session()
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
@staticmethod
|
||||
def checkErrorStatus(response):
|
||||
@@ -34,21 +38,21 @@ class captchaSolver(reCaptcha):
|
||||
)
|
||||
|
||||
if response.status_code in errors:
|
||||
raise RuntimeError(errors.get(response.status_code))
|
||||
raise reCaptcha_exceptions.reCaptcha_Service_Unavailable(errors.get(response.status_code))
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def login(self, username, password):
|
||||
self.username = username
|
||||
self.password = password
|
||||
|
||||
def _checkRequest(response):
|
||||
if response.status_code == 200:
|
||||
if response.ok:
|
||||
if response.json().get('is_banned'):
|
||||
raise RuntimeError('DeathByCaptcha: Your account is banned.')
|
||||
raise reCaptcha_exceptions.reCaptcha_Account_Error('DeathByCaptcha: Your account is banned.')
|
||||
|
||||
if response.json().get('balanace') == 0:
|
||||
raise RuntimeError('DeathByCaptcha: insufficient credits.')
|
||||
raise reCaptcha_exceptions.reCaptcha_Account_Error('DeathByCaptcha: insufficient credits.')
|
||||
|
||||
return response
|
||||
|
||||
@@ -72,11 +76,13 @@ class captchaSolver(reCaptcha):
|
||||
|
||||
self.debugRequest(response)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def reportJob(self, jobID):
|
||||
if not jobID:
|
||||
raise RuntimeError("DeathByCaptcha: Error bad job id to report failed reCaptcha.")
|
||||
raise reCaptcha_exceptions.reCaptcha_Bad_Job_ID(
|
||||
"DeathByCaptcha: Error bad job id to report failed reCaptcha."
|
||||
)
|
||||
|
||||
def _checkRequest(response):
|
||||
if response.status_code == 200:
|
||||
@@ -103,16 +109,20 @@ class captchaSolver(reCaptcha):
|
||||
if response:
|
||||
return True
|
||||
else:
|
||||
raise RuntimeError("DeathByCaptcha: Error report failed reCaptcha.")
|
||||
raise reCaptcha_exceptions.reCaptcha_Report_Error(
|
||||
"DeathByCaptcha: Error report failed reCaptcha."
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def requestJob(self, jobID):
|
||||
if not jobID:
|
||||
raise RuntimeError("DeathByCaptcha: Error bad job id to request reCaptcha.")
|
||||
raise reCaptcha_exceptions.reCaptcha_Bad_Job_ID(
|
||||
"DeathByCaptcha: Error bad job id to request reCaptcha."
|
||||
)
|
||||
|
||||
def _checkRequest(response):
|
||||
if response.status_code in [200, 303] and response.json().get('text'):
|
||||
if response.ok and response.json().get('text'):
|
||||
return response
|
||||
|
||||
self.checkErrorStatus(response)
|
||||
@@ -132,13 +142,15 @@ class captchaSolver(reCaptcha):
|
||||
if response:
|
||||
return response.json().get('text')
|
||||
else:
|
||||
raise RuntimeError("DeathByCaptcha: Error failed to solve reCaptcha.")
|
||||
raise reCaptcha_exceptions.reCaptcha_Timeout(
|
||||
"DeathByCaptcha: Error failed to solve reCaptcha."
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def requestSolve(self, site_url, site_key):
|
||||
def _checkRequest(response):
|
||||
if response.status_code in [200, 303] and response.json().get("is_correct") and response.json().get('captcha'):
|
||||
if response.ok and response.json().get("is_correct") and response.json().get('captcha'):
|
||||
return response
|
||||
|
||||
self.checkErrorStatus(response)
|
||||
@@ -168,16 +180,20 @@ class captchaSolver(reCaptcha):
|
||||
if response:
|
||||
return response.json().get('captcha')
|
||||
else:
|
||||
raise RuntimeError('DeathByCaptcha: Error no job id was returned.')
|
||||
raise reCaptcha_exceptions.reCaptcha_Bad_Job_ID(
|
||||
'DeathByCaptcha: Error no job id was returned.'
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def getCaptchaAnswer(self, site_url, site_key, reCaptchaParams):
|
||||
jobID = None
|
||||
|
||||
for param in ['username', 'password']:
|
||||
if not reCaptchaParams.get(param):
|
||||
raise ValueError("DeathByCaptcha: Missing '{}' parameter.".format(param))
|
||||
raise reCaptcha_exceptions.reCaptcha_Bad_Parameter(
|
||||
"DeathByCaptcha: Missing '{}' parameter.".format(param)
|
||||
)
|
||||
setattr(self, param, reCaptchaParams.get(param))
|
||||
|
||||
if reCaptchaParams.get('proxy'):
|
||||
@@ -191,9 +207,13 @@ class captchaSolver(reCaptcha):
|
||||
if jobID:
|
||||
self.reportJob(jobID)
|
||||
except polling.TimeoutException:
|
||||
raise RuntimeError("DeathByCaptcha: reCaptcha solve took to long and also failed reporting the job.")
|
||||
raise reCaptcha_exceptions.reCaptcha_Timeout(
|
||||
"DeathByCaptcha: reCaptcha solve took to long and also failed reporting the job id {}.".format(jobID)
|
||||
)
|
||||
|
||||
raise RuntimeError("DeathByCaptcha: reCaptcha solve took to long to execute, aborting.")
|
||||
raise reCaptcha_exceptions.reCaptcha_Timeout(
|
||||
"DeathByCaptcha: reCaptcha solve took to long to execute job id {}, aborting.".format(jobID)
|
||||
)
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
@@ -47,7 +47,7 @@ class User_Agent():
|
||||
for browser in user_agents:
|
||||
for release in user_agents[browser]['releases']:
|
||||
for platform in ['mobile', 'desktop']:
|
||||
if re.search(self.custom, ' '.join(user_agents[browser]['releases'][release]['User-Agent'][platform])):
|
||||
if re.search(re.escape(self.custom), ' '.join(user_agents[browser]['releases'][release]['User-Agent'][platform])):
|
||||
self.browser = browser
|
||||
self.loadHeaders(user_agents, release)
|
||||
self.headers['User-Agent'] = self.custom
|
||||
@@ -74,10 +74,11 @@ class User_Agent():
|
||||
sys.tracebacklimit = 0
|
||||
raise RuntimeError("Sorry you can't have mobile and desktop disabled at the same time.")
|
||||
|
||||
user_agents = json.load(
|
||||
open(os.path.join(os.path.dirname(__file__), 'browsers.json'), 'r'),
|
||||
object_pairs_hook=OrderedDict
|
||||
)
|
||||
with open(os.path.join(os.path.dirname(__file__), 'browsers.json'), 'r') as fp:
|
||||
user_agents = json.load(
|
||||
fp,
|
||||
object_pairs_hook=OrderedDict
|
||||
)
|
||||
|
||||
if self.custom:
|
||||
if not self.tryMatchCustom(user_agents):
|
||||
|
||||
@@ -13,9 +13,14 @@
|
||||
"TLS_CHACHA20_POLY1305_SHA256",
|
||||
"ECDHE-ECDSA-AES128-GCM-SHA256",
|
||||
"ECDHE-RSA-AES128-GCM-SHA256",
|
||||
"ECDHE-ECDSA-AES256-GCM-SHA384",
|
||||
"ECDHE-RSA-AES256-GCM-SHA384",
|
||||
"ECDHE-ECDSA-CHACHA20-POLY1305",
|
||||
"ECDHE-RSA-CHACHA20-POLY1305",
|
||||
"ECDHE-RSA-AES256-SHA",
|
||||
"AES128-GCM-SHA256",
|
||||
"AES256-GCM-SHA384",
|
||||
"AES128-SHA",
|
||||
"AES256-SHA"
|
||||
],
|
||||
"releases": {
|
||||
@@ -12814,10 +12819,15 @@
|
||||
"ECDHE-ECDSA-AES128-GCM-SHA256",
|
||||
"ECDHE-RSA-AES128-GCM-SHA256",
|
||||
"ECDHE-ECDSA-CHACHA20-POLY1305",
|
||||
"ECDHE-RSA-CHACHA20-POLY1305",
|
||||
"ECDHE-ECDSA-AES256-GCM-SHA384",
|
||||
"ECDHE-RSA-AES256-GCM-SHA384",
|
||||
"ECDHE-ECDSA-AES256-SHA",
|
||||
"ECDHE-ECDSA-AES128-SHA",
|
||||
"ECDHE-RSA-AES256-SHA",
|
||||
"DHE-RSA-AES128-SHA",
|
||||
"DHE-RSA-AES256-SHA",
|
||||
"AES128-SHA",
|
||||
"AES256-SHA"
|
||||
],
|
||||
"releases": {
|
||||
|
||||
@@ -28,6 +28,7 @@ from __future__ import division
|
||||
from __future__ import absolute_import
|
||||
from future.builtins import range
|
||||
from future.builtins import bytes
|
||||
from future.builtins import str
|
||||
|
||||
__all__ = [
|
||||
'body_decode',
|
||||
|
||||
0
lib/future/backports/test/pystone.py
Normal file → Executable file
0
lib/future/backports/test/pystone.py
Normal file → Executable file
@@ -11,9 +11,9 @@ an application may want to handle an exception like a regular
|
||||
response.
|
||||
"""
|
||||
from __future__ import absolute_import, division, unicode_literals
|
||||
from ... import standard_library
|
||||
from future import standard_library
|
||||
|
||||
from . import response as urllib_response
|
||||
from future.backports.urllib import response as urllib_response
|
||||
|
||||
|
||||
__all__ = ['URLError', 'HTTPError', 'ContentTooShortError']
|
||||
|
||||
@@ -87,7 +87,7 @@ def clear_cache():
|
||||
# decoding and encoding. If valid use cases are
|
||||
# presented, we may relax this by using latin-1
|
||||
# decoding internally for 3.3
|
||||
_implicit_encoding = 'utf8'
|
||||
_implicit_encoding = 'ascii'
|
||||
_implicit_errors = 'strict'
|
||||
|
||||
def _noop(obj):
|
||||
@@ -122,7 +122,7 @@ class _ResultMixinStr(object):
|
||||
"""Standard approach to encoding parsed results from str to bytes"""
|
||||
__slots__ = ()
|
||||
|
||||
def encode(self, encoding='utf8', errors='strict'):
|
||||
def encode(self, encoding='ascii', errors='strict'):
|
||||
return self._encoded_counterpart(*(x.encode(encoding, errors) for x in self))
|
||||
|
||||
|
||||
@@ -130,7 +130,7 @@ class _ResultMixinBytes(object):
|
||||
"""Standard approach to decoding parsed results from bytes to str"""
|
||||
__slots__ = ()
|
||||
|
||||
def decode(self, encoding='utf8', errors='strict'):
|
||||
def decode(self, encoding='ascii', errors='strict'):
|
||||
return self._decoded_counterpart(*(x.decode(encoding, errors) for x in self))
|
||||
|
||||
|
||||
@@ -730,7 +730,7 @@ def quote_from_bytes(bs, safe='/'):
|
||||
###
|
||||
if isinstance(safe, str):
|
||||
# Normalize 'safe' by converting to bytes and removing non-ASCII chars
|
||||
safe = str(safe).encode('utf8', 'ignore')
|
||||
safe = str(safe).encode('ascii', 'ignore')
|
||||
else:
|
||||
### For Python-Future:
|
||||
safe = bytes(safe)
|
||||
|
||||
@@ -827,7 +827,7 @@ class ProxyHandler(BaseHandler):
|
||||
if user and password:
|
||||
user_pass = '%s:%s' % (unquote(user),
|
||||
unquote(password))
|
||||
creds = base64.b64encode(user_pass.encode()).decode("utf8")
|
||||
creds = base64.b64encode(user_pass.encode()).decode("ascii")
|
||||
req.add_header('Proxy-authorization', 'Basic ' + creds)
|
||||
hostport = unquote(hostport)
|
||||
req.set_proxy(hostport, proxy_type)
|
||||
@@ -977,7 +977,7 @@ class AbstractBasicAuthHandler(object):
|
||||
user, pw = self.passwd.find_user_password(realm, host)
|
||||
if pw is not None:
|
||||
raw = "%s:%s" % (user, pw)
|
||||
auth = "Basic " + base64.b64encode(raw.encode()).decode("utf8")
|
||||
auth = "Basic " + base64.b64encode(raw.encode()).decode("ascii")
|
||||
if req.headers.get(self.auth_header, None) == auth:
|
||||
return None
|
||||
req.add_unredirected_header(self.auth_header, auth)
|
||||
@@ -1080,7 +1080,7 @@ class AbstractDigestAuthHandler(object):
|
||||
# authentication, and to provide some message integrity protection.
|
||||
# This isn't a fabulous effort, but it's probably Good Enough.
|
||||
s = "%s:%s:%s:" % (self.nonce_count, nonce, time.ctime())
|
||||
b = s.encode("utf8") + _randombytes(8)
|
||||
b = s.encode("ascii") + _randombytes(8)
|
||||
dig = hashlib.sha1(b).hexdigest()
|
||||
return dig[:16]
|
||||
|
||||
@@ -1147,9 +1147,9 @@ class AbstractDigestAuthHandler(object):
|
||||
def get_algorithm_impls(self, algorithm):
|
||||
# lambdas assume digest modules are imported at the top level
|
||||
if algorithm == 'MD5':
|
||||
H = lambda x: hashlib.md5(x.encode("utf8")).hexdigest()
|
||||
H = lambda x: hashlib.md5(x.encode("ascii")).hexdigest()
|
||||
elif algorithm == 'SHA':
|
||||
H = lambda x: hashlib.sha1(x.encode("utf8")).hexdigest()
|
||||
H = lambda x: hashlib.sha1(x.encode("ascii")).hexdigest()
|
||||
# XXX MD5-sess
|
||||
KD = lambda s, d: H("%s:%s" % (s, d))
|
||||
return H, KD
|
||||
@@ -1829,13 +1829,13 @@ class URLopener(object):
|
||||
|
||||
if proxy_passwd:
|
||||
proxy_passwd = unquote(proxy_passwd)
|
||||
proxy_auth = base64.b64encode(proxy_passwd.encode()).decode('utf8')
|
||||
proxy_auth = base64.b64encode(proxy_passwd.encode()).decode('ascii')
|
||||
else:
|
||||
proxy_auth = None
|
||||
|
||||
if user_passwd:
|
||||
user_passwd = unquote(user_passwd)
|
||||
auth = base64.b64encode(user_passwd.encode()).decode('utf8')
|
||||
auth = base64.b64encode(user_passwd.encode()).decode('ascii')
|
||||
else:
|
||||
auth = None
|
||||
http_conn = connection_factory(host)
|
||||
@@ -2040,7 +2040,7 @@ class URLopener(object):
|
||||
msg.append('Content-type: %s' % type)
|
||||
if encoding == 'base64':
|
||||
# XXX is this encoding/decoding ok?
|
||||
data = base64.decodebytes(data.encode('utf8')).decode('latin-1')
|
||||
data = base64.decodebytes(data.encode('ascii')).decode('latin-1')
|
||||
else:
|
||||
data = unquote(data)
|
||||
msg.append('Content-Length: %d' % len(data))
|
||||
@@ -2498,17 +2498,7 @@ def _proxy_bypass_macosx_sysconf(host, proxy_settings):
|
||||
|
||||
|
||||
if sys.platform == 'darwin':
|
||||
try:
|
||||
from _scproxy import _get_proxy_settings, _get_proxies
|
||||
except:
|
||||
try:
|
||||
# By default use environment variables
|
||||
_get_proxy_settings = getproxies_environment
|
||||
_get_proxies = proxy_bypass_environment
|
||||
getproxies = getproxies_environment
|
||||
proxy_bypass = proxy_bypass_environment
|
||||
except:
|
||||
pass
|
||||
from _scproxy import _get_proxy_settings, _get_proxies
|
||||
|
||||
def proxy_bypass_macosx_sysconf(host):
|
||||
proxy_settings = _get_proxy_settings()
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from __future__ import absolute_import, division, unicode_literals
|
||||
from ...builtins import str
|
||||
from future.builtins import str
|
||||
""" robotparser.py
|
||||
|
||||
Copyright (C) 2000 Bastian Kleineidam
|
||||
@@ -13,8 +13,8 @@ from ...builtins import str
|
||||
"""
|
||||
|
||||
# Was: import urllib.parse, urllib.request
|
||||
from .. import urllib
|
||||
from . import parse as _parse, request as _request
|
||||
from future.backports import urllib
|
||||
from future.backports.urllib import parse as _parse, request as _request
|
||||
urllib.parse = _parse
|
||||
urllib.request = _request
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
``python-future``: pure Python implementation of Python 3 round().
|
||||
"""
|
||||
|
||||
from __future__ import division
|
||||
from future.utils import PYPY, PY26, bind_method
|
||||
|
||||
# Use the decimal module for simplicity of implementation (and
|
||||
@@ -29,8 +30,6 @@ def newround(number, ndigits=None):
|
||||
if hasattr(number, '__round__'):
|
||||
return number.__round__(ndigits)
|
||||
|
||||
if ndigits < 0:
|
||||
raise NotImplementedError('negative ndigits not supported yet')
|
||||
exponent = Decimal('10') ** (-ndigits)
|
||||
|
||||
if PYPY:
|
||||
@@ -42,15 +41,19 @@ def newround(number, ndigits=None):
|
||||
d = number
|
||||
else:
|
||||
if not PY26:
|
||||
d = Decimal.from_float(number).quantize(exponent,
|
||||
rounding=ROUND_HALF_EVEN)
|
||||
d = Decimal.from_float(number)
|
||||
else:
|
||||
d = from_float_26(number).quantize(exponent, rounding=ROUND_HALF_EVEN)
|
||||
d = from_float_26(number)
|
||||
|
||||
if ndigits < 0:
|
||||
result = newround(d / exponent) * exponent
|
||||
else:
|
||||
result = d.quantize(exponent, rounding=ROUND_HALF_EVEN)
|
||||
|
||||
if return_int:
|
||||
return int(d)
|
||||
return int(result)
|
||||
else:
|
||||
return float(d)
|
||||
return float(result)
|
||||
|
||||
|
||||
### From Python 2.7's decimal.py. Only needed to support Py2.6:
|
||||
|
||||
@@ -10,3 +10,9 @@ else:
|
||||
except ImportError:
|
||||
raise ImportError('The FileDialog module is missing. Does your Py2 '
|
||||
'installation include tkinter?')
|
||||
|
||||
try:
|
||||
from tkFileDialog import *
|
||||
except ImportError:
|
||||
raise ImportError('The tkFileDialog module is missing. Does your Py2 '
|
||||
'installation include tkinter?')
|
||||
|
||||
@@ -450,63 +450,35 @@ def install_aliases():
|
||||
# if hasattr(install_aliases, 'run_already'):
|
||||
# return
|
||||
for (newmodname, newobjname, oldmodname, oldobjname) in MOVES:
|
||||
try:
|
||||
__import__(newmodname)
|
||||
# We look up the module in sys.modules because __import__ just returns the
|
||||
# top-level package:
|
||||
newmod = sys.modules[newmodname]
|
||||
# newmod.__future_module__ = True
|
||||
__import__(newmodname)
|
||||
# We look up the module in sys.modules because __import__ just returns the
|
||||
# top-level package:
|
||||
newmod = sys.modules[newmodname]
|
||||
# newmod.__future_module__ = True
|
||||
|
||||
__import__(oldmodname)
|
||||
oldmod = sys.modules[oldmodname]
|
||||
__import__(oldmodname)
|
||||
oldmod = sys.modules[oldmodname]
|
||||
|
||||
obj = getattr(oldmod, oldobjname)
|
||||
setattr(newmod, newobjname, obj)
|
||||
except:
|
||||
try:
|
||||
flog.warning('*** FUTURE ERROR in module %s %s ' % (str(oldmod), str(oldobjname)))
|
||||
except:
|
||||
pass
|
||||
obj = getattr(oldmod, oldobjname)
|
||||
setattr(newmod, newobjname, obj)
|
||||
|
||||
# Hack for urllib so it appears to have the same structure on Py2 as on Py3
|
||||
try:
|
||||
import urllib
|
||||
from future.backports.urllib import response
|
||||
urllib.response = response
|
||||
sys.modules['urllib.response'] = response
|
||||
from future.backports.urllib import parse
|
||||
urllib.parse = parse
|
||||
sys.modules['urllib.parse'] = parse
|
||||
from future.backports.urllib import error
|
||||
urllib.error = error
|
||||
sys.modules['urllib.error'] = error
|
||||
except ImportError:
|
||||
try:
|
||||
flog.warning('*** FUTURE ERROR importing URLLIB.response, parse, error')
|
||||
urllib.response = urllib
|
||||
sys.modules['urllib.response'] = urllib
|
||||
urllib.parse = urllib
|
||||
sys.modules['urllib.parse'] = urllib
|
||||
urllib.error = urllib
|
||||
sys.modules['urllib.error'] = urllib
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
from future.backports.urllib import request
|
||||
urllib.request = request
|
||||
sys.modules['urllib.request'] = request
|
||||
from future.backports.urllib import robotparser
|
||||
urllib.robotparser = robotparser
|
||||
sys.modules['urllib.robotparser'] = robotparser
|
||||
except ImportError:
|
||||
try:
|
||||
flog.warning('*** FUTURE ERROR importing URLLIB.Request')
|
||||
urllib.request = urllib
|
||||
sys.modules['urllib.request'] = urllib
|
||||
urllib.robotparser = urllib
|
||||
sys.modules['urllib.robotparser'] = urllib
|
||||
except:
|
||||
pass
|
||||
import urllib
|
||||
from future.backports.urllib import request
|
||||
from future.backports.urllib import response
|
||||
from future.backports.urllib import parse
|
||||
from future.backports.urllib import error
|
||||
from future.backports.urllib import robotparser
|
||||
urllib.request = request
|
||||
urllib.response = response
|
||||
urllib.parse = parse
|
||||
urllib.error = error
|
||||
urllib.robotparser = robotparser
|
||||
sys.modules['urllib.request'] = request
|
||||
sys.modules['urllib.response'] = response
|
||||
sys.modules['urllib.parse'] = parse
|
||||
sys.modules['urllib.error'] = error
|
||||
sys.modules['urllib.robotparser'] = robotparser
|
||||
|
||||
# Patch the test module so it appears to have the same structure on Py2 as on Py3
|
||||
try:
|
||||
@@ -518,11 +490,8 @@ def install_aliases():
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
test.support = support
|
||||
sys.modules['test.support'] = support
|
||||
except:
|
||||
pass
|
||||
test.support = support
|
||||
sys.modules['test.support'] = support
|
||||
|
||||
# Patch the dbm module so it appears to have the same structure on Py2 as on Py3
|
||||
try:
|
||||
@@ -530,26 +499,23 @@ def install_aliases():
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
from future.moves.dbm import dumb
|
||||
dbm.dumb = dumb
|
||||
sys.modules['dbm.dumb'] = dumb
|
||||
try:
|
||||
from future.moves.dbm import dumb
|
||||
dbm.dumb = dumb
|
||||
sys.modules['dbm.dumb'] = dumb
|
||||
try:
|
||||
from future.moves.dbm import gnu
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
dbm.gnu = gnu
|
||||
sys.modules['dbm.gnu'] = gnu
|
||||
try:
|
||||
from future.moves.dbm import ndbm
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
dbm.ndbm = ndbm
|
||||
sys.modules['dbm.ndbm'] = ndbm
|
||||
except:
|
||||
flog.warning('*** FUTURE ERROR importing MOVES.dbm')
|
||||
from future.moves.dbm import gnu
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
dbm.gnu = gnu
|
||||
sys.modules['dbm.gnu'] = gnu
|
||||
try:
|
||||
from future.moves.dbm import ndbm
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
dbm.ndbm = ndbm
|
||||
sys.modules['dbm.ndbm'] = ndbm
|
||||
|
||||
# install_aliases.run_already = True
|
||||
|
||||
|
||||
@@ -527,9 +527,9 @@ def implements_iterator(cls):
|
||||
return cls
|
||||
|
||||
if PY3:
|
||||
get_next = lambda x: x.next
|
||||
else:
|
||||
get_next = lambda x: x.__next__
|
||||
else:
|
||||
get_next = lambda x: x.next
|
||||
|
||||
|
||||
def encode_filename(filename):
|
||||
|
||||
@@ -11,7 +11,10 @@ import re
|
||||
import os
|
||||
import sys
|
||||
import urllib
|
||||
import urlparse
|
||||
try:
|
||||
import urlparse
|
||||
except:
|
||||
import urllib.parse as urlparse
|
||||
import datetime
|
||||
import time
|
||||
import traceback
|
||||
@@ -161,7 +164,8 @@ def update_title(item):
|
||||
item.channel = new_item.channel #Restuaramos el nombre del canal, por si lo habíamos cambiado
|
||||
if item.tmdb_stat == True:
|
||||
if new_item.contentSerieName: #Si es serie...
|
||||
if config.get_setting("filter_languages", item.channel) >= 0:
|
||||
filter_languages = config.get_setting("filter_languages", item.channel)
|
||||
if filter_languages and filter_languages >= 0:
|
||||
item.title_from_channel = new_item.contentSerieName #Guardo el título incial para Filtertools
|
||||
item.contentSerieName = new_item.contentSerieName #Guardo el título incial para Filtertools
|
||||
else:
|
||||
|
||||
@@ -9,7 +9,6 @@ has been tested with Python2.7 and Python 3.4.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import stat
|
||||
import sys
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
2285
lib/httplib2/py2/__init__.py
Normal file
2285
lib/httplib2/py2/__init__.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -238,7 +238,15 @@ class socksocket(socket.socket):
|
||||
headers - Additional or modified headers for the proxy connect
|
||||
request.
|
||||
"""
|
||||
self.__proxy = (proxytype, addr, port, rdns, username, password, headers)
|
||||
self.__proxy = (
|
||||
proxytype,
|
||||
addr,
|
||||
port,
|
||||
rdns,
|
||||
username.encode() if username else None,
|
||||
password.encode() if password else None,
|
||||
headers,
|
||||
)
|
||||
|
||||
def __negotiatesocks5(self, destaddr, destport):
|
||||
"""__negotiatesocks5(self,destaddr,destport)
|
||||
2077
lib/httplib2/py3/__init__.py
Normal file
2077
lib/httplib2/py3/__init__.py
Normal file
File diff suppressed because it is too large
Load Diff
2197
lib/httplib2/py3/cacerts.txt
Normal file
2197
lib/httplib2/py3/cacerts.txt
Normal file
File diff suppressed because it is too large
Load Diff
42
lib/httplib2/py3/certs.py
Normal file
42
lib/httplib2/py3/certs.py
Normal file
@@ -0,0 +1,42 @@
|
||||
"""Utilities for certificate management."""
|
||||
|
||||
import os
|
||||
|
||||
certifi_available = False
|
||||
certifi_where = None
|
||||
try:
|
||||
from certifi import where as certifi_where
|
||||
certifi_available = True
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
custom_ca_locater_available = False
|
||||
custom_ca_locater_where = None
|
||||
try:
|
||||
from ca_certs_locater import get as custom_ca_locater_where
|
||||
custom_ca_locater_available = True
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
BUILTIN_CA_CERTS = os.path.join(
|
||||
os.path.dirname(os.path.abspath(__file__)), "cacerts.txt"
|
||||
)
|
||||
|
||||
|
||||
def where():
|
||||
env = os.environ.get("HTTPLIB2_CA_CERTS")
|
||||
if env is not None:
|
||||
if os.path.isfile(env):
|
||||
return env
|
||||
else:
|
||||
raise RuntimeError("Environment variable HTTPLIB2_CA_CERTS not a valid file")
|
||||
if custom_ca_locater_available:
|
||||
return custom_ca_locater_where()
|
||||
if certifi_available:
|
||||
return certifi_where()
|
||||
return BUILTIN_CA_CERTS
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print(where())
|
||||
124
lib/httplib2/py3/iri2uri.py
Normal file
124
lib/httplib2/py3/iri2uri.py
Normal file
@@ -0,0 +1,124 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Converts an IRI to a URI."""
|
||||
|
||||
__author__ = "Joe Gregorio (joe@bitworking.org)"
|
||||
__copyright__ = "Copyright 2006, Joe Gregorio"
|
||||
__contributors__ = []
|
||||
__version__ = "1.0.0"
|
||||
__license__ = "MIT"
|
||||
|
||||
import urllib.parse
|
||||
|
||||
# Convert an IRI to a URI following the rules in RFC 3987
|
||||
#
|
||||
# The characters we need to enocde and escape are defined in the spec:
|
||||
#
|
||||
# iprivate = %xE000-F8FF / %xF0000-FFFFD / %x100000-10FFFD
|
||||
# ucschar = %xA0-D7FF / %xF900-FDCF / %xFDF0-FFEF
|
||||
# / %x10000-1FFFD / %x20000-2FFFD / %x30000-3FFFD
|
||||
# / %x40000-4FFFD / %x50000-5FFFD / %x60000-6FFFD
|
||||
# / %x70000-7FFFD / %x80000-8FFFD / %x90000-9FFFD
|
||||
# / %xA0000-AFFFD / %xB0000-BFFFD / %xC0000-CFFFD
|
||||
# / %xD0000-DFFFD / %xE1000-EFFFD
|
||||
|
||||
escape_range = [
|
||||
(0xA0, 0xD7FF),
|
||||
(0xE000, 0xF8FF),
|
||||
(0xF900, 0xFDCF),
|
||||
(0xFDF0, 0xFFEF),
|
||||
(0x10000, 0x1FFFD),
|
||||
(0x20000, 0x2FFFD),
|
||||
(0x30000, 0x3FFFD),
|
||||
(0x40000, 0x4FFFD),
|
||||
(0x50000, 0x5FFFD),
|
||||
(0x60000, 0x6FFFD),
|
||||
(0x70000, 0x7FFFD),
|
||||
(0x80000, 0x8FFFD),
|
||||
(0x90000, 0x9FFFD),
|
||||
(0xA0000, 0xAFFFD),
|
||||
(0xB0000, 0xBFFFD),
|
||||
(0xC0000, 0xCFFFD),
|
||||
(0xD0000, 0xDFFFD),
|
||||
(0xE1000, 0xEFFFD),
|
||||
(0xF0000, 0xFFFFD),
|
||||
(0x100000, 0x10FFFD),
|
||||
]
|
||||
|
||||
|
||||
def encode(c):
|
||||
retval = c
|
||||
i = ord(c)
|
||||
for low, high in escape_range:
|
||||
if i < low:
|
||||
break
|
||||
if i >= low and i <= high:
|
||||
retval = "".join(["%%%2X" % o for o in c.encode("utf-8")])
|
||||
break
|
||||
return retval
|
||||
|
||||
|
||||
def iri2uri(uri):
|
||||
"""Convert an IRI to a URI. Note that IRIs must be
|
||||
passed in a unicode strings. That is, do not utf-8 encode
|
||||
the IRI before passing it into the function."""
|
||||
if isinstance(uri, str):
|
||||
(scheme, authority, path, query, fragment) = urllib.parse.urlsplit(uri)
|
||||
authority = authority.encode("idna").decode("utf-8")
|
||||
# For each character in 'ucschar' or 'iprivate'
|
||||
# 1. encode as utf-8
|
||||
# 2. then %-encode each octet of that utf-8
|
||||
uri = urllib.parse.urlunsplit((scheme, authority, path, query, fragment))
|
||||
uri = "".join([encode(c) for c in uri])
|
||||
return uri
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import unittest
|
||||
|
||||
class Test(unittest.TestCase):
|
||||
def test_uris(self):
|
||||
"""Test that URIs are invariant under the transformation."""
|
||||
invariant = [
|
||||
"ftp://ftp.is.co.za/rfc/rfc1808.txt",
|
||||
"http://www.ietf.org/rfc/rfc2396.txt",
|
||||
"ldap://[2001:db8::7]/c=GB?objectClass?one",
|
||||
"mailto:John.Doe@example.com",
|
||||
"news:comp.infosystems.www.servers.unix",
|
||||
"tel:+1-816-555-1212",
|
||||
"telnet://192.0.2.16:80/",
|
||||
"urn:oasis:names:specification:docbook:dtd:xml:4.1.2",
|
||||
]
|
||||
for uri in invariant:
|
||||
self.assertEqual(uri, iri2uri(uri))
|
||||
|
||||
def test_iri(self):
|
||||
"""Test that the right type of escaping is done for each part of the URI."""
|
||||
self.assertEqual(
|
||||
"http://xn--o3h.com/%E2%98%84",
|
||||
iri2uri("http://\N{COMET}.com/\N{COMET}"),
|
||||
)
|
||||
self.assertEqual(
|
||||
"http://bitworking.org/?fred=%E2%98%84",
|
||||
iri2uri("http://bitworking.org/?fred=\N{COMET}"),
|
||||
)
|
||||
self.assertEqual(
|
||||
"http://bitworking.org/#%E2%98%84",
|
||||
iri2uri("http://bitworking.org/#\N{COMET}"),
|
||||
)
|
||||
self.assertEqual("#%E2%98%84", iri2uri("#\N{COMET}"))
|
||||
self.assertEqual(
|
||||
"/fred?bar=%E2%98%9A#%E2%98%84",
|
||||
iri2uri("/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}"),
|
||||
)
|
||||
self.assertEqual(
|
||||
"/fred?bar=%E2%98%9A#%E2%98%84",
|
||||
iri2uri(iri2uri("/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}")),
|
||||
)
|
||||
self.assertNotEqual(
|
||||
"/fred?bar=%E2%98%9A#%E2%98%84",
|
||||
iri2uri(
|
||||
"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}".encode("utf-8")
|
||||
),
|
||||
)
|
||||
|
||||
unittest.main()
|
||||
518
lib/httplib2/py3/socks.py
Normal file
518
lib/httplib2/py3/socks.py
Normal file
@@ -0,0 +1,518 @@
|
||||
"""SocksiPy - Python SOCKS module.
|
||||
|
||||
Version 1.00
|
||||
|
||||
Copyright 2006 Dan-Haim. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
1. Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
3. Neither the name of Dan Haim nor the names of his contributors may be used
|
||||
to endorse or promote products derived from this software without specific
|
||||
prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY DAN HAIM "AS IS" AND ANY EXPRESS OR IMPLIED
|
||||
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
|
||||
EVENT SHALL DAN HAIM OR HIS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA
|
||||
OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
|
||||
OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMANGE.
|
||||
|
||||
This module provides a standard socket-like interface for Python
|
||||
for tunneling connections through SOCKS proxies.
|
||||
|
||||
Minor modifications made by Christopher Gilbert (http://motomastyle.com/) for
|
||||
use in PyLoris (http://pyloris.sourceforge.net/).
|
||||
|
||||
Minor modifications made by Mario Vilas (http://breakingcode.wordpress.com/)
|
||||
mainly to merge bug fixes found in Sourceforge.
|
||||
"""
|
||||
|
||||
import base64
|
||||
import socket
|
||||
import struct
|
||||
import sys
|
||||
|
||||
if getattr(socket, "socket", None) is None:
|
||||
raise ImportError("socket.socket missing, proxy support unusable")
|
||||
|
||||
PROXY_TYPE_SOCKS4 = 1
|
||||
PROXY_TYPE_SOCKS5 = 2
|
||||
PROXY_TYPE_HTTP = 3
|
||||
PROXY_TYPE_HTTP_NO_TUNNEL = 4
|
||||
|
||||
_defaultproxy = None
|
||||
_orgsocket = socket.socket
|
||||
|
||||
|
||||
class ProxyError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class GeneralProxyError(ProxyError):
|
||||
pass
|
||||
|
||||
|
||||
class Socks5AuthError(ProxyError):
|
||||
pass
|
||||
|
||||
|
||||
class Socks5Error(ProxyError):
|
||||
pass
|
||||
|
||||
|
||||
class Socks4Error(ProxyError):
|
||||
pass
|
||||
|
||||
|
||||
class HTTPError(ProxyError):
|
||||
pass
|
||||
|
||||
|
||||
_generalerrors = (
|
||||
"success",
|
||||
"invalid data",
|
||||
"not connected",
|
||||
"not available",
|
||||
"bad proxy type",
|
||||
"bad input",
|
||||
)
|
||||
|
||||
_socks5errors = (
|
||||
"succeeded",
|
||||
"general SOCKS server failure",
|
||||
"connection not allowed by ruleset",
|
||||
"Network unreachable",
|
||||
"Host unreachable",
|
||||
"Connection refused",
|
||||
"TTL expired",
|
||||
"Command not supported",
|
||||
"Address type not supported",
|
||||
"Unknown error",
|
||||
)
|
||||
|
||||
_socks5autherrors = (
|
||||
"succeeded",
|
||||
"authentication is required",
|
||||
"all offered authentication methods were rejected",
|
||||
"unknown username or invalid password",
|
||||
"unknown error",
|
||||
)
|
||||
|
||||
_socks4errors = (
|
||||
"request granted",
|
||||
"request rejected or failed",
|
||||
"request rejected because SOCKS server cannot connect to identd on the client",
|
||||
"request rejected because the client program and identd report different "
|
||||
"user-ids",
|
||||
"unknown error",
|
||||
)
|
||||
|
||||
|
||||
def setdefaultproxy(
|
||||
proxytype=None, addr=None, port=None, rdns=True, username=None, password=None
|
||||
):
|
||||
"""setdefaultproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
|
||||
Sets a default proxy which all further socksocket objects will use,
|
||||
unless explicitly changed.
|
||||
"""
|
||||
global _defaultproxy
|
||||
_defaultproxy = (proxytype, addr, port, rdns, username, password)
|
||||
|
||||
|
||||
def wrapmodule(module):
|
||||
"""wrapmodule(module)
|
||||
|
||||
Attempts to replace a module's socket library with a SOCKS socket. Must set
|
||||
a default proxy using setdefaultproxy(...) first.
|
||||
This will only work on modules that import socket directly into the
|
||||
namespace;
|
||||
most of the Python Standard Library falls into this category.
|
||||
"""
|
||||
if _defaultproxy != None:
|
||||
module.socket.socket = socksocket
|
||||
else:
|
||||
raise GeneralProxyError((4, "no proxy specified"))
|
||||
|
||||
|
||||
class socksocket(socket.socket):
|
||||
"""socksocket([family[, type[, proto]]]) -> socket object
|
||||
Open a SOCKS enabled socket. The parameters are the same as
|
||||
those of the standard socket init. In order for SOCKS to work,
|
||||
you must specify family=AF_INET, type=SOCK_STREAM and proto=0.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, _sock=None
|
||||
):
|
||||
_orgsocket.__init__(self, family, type, proto, _sock)
|
||||
if _defaultproxy != None:
|
||||
self.__proxy = _defaultproxy
|
||||
else:
|
||||
self.__proxy = (None, None, None, None, None, None)
|
||||
self.__proxysockname = None
|
||||
self.__proxypeername = None
|
||||
self.__httptunnel = True
|
||||
|
||||
def __recvall(self, count):
|
||||
"""__recvall(count) -> data
|
||||
Receive EXACTLY the number of bytes requested from the socket.
|
||||
Blocks until the required number of bytes have been received.
|
||||
"""
|
||||
data = self.recv(count)
|
||||
while len(data) < count:
|
||||
d = self.recv(count - len(data))
|
||||
if not d:
|
||||
raise GeneralProxyError((0, "connection closed unexpectedly"))
|
||||
data = data + d
|
||||
return data
|
||||
|
||||
def sendall(self, content, *args):
|
||||
""" override socket.socket.sendall method to rewrite the header
|
||||
for non-tunneling proxies if needed
|
||||
"""
|
||||
if not self.__httptunnel:
|
||||
content = self.__rewriteproxy(content)
|
||||
return super(socksocket, self).sendall(content, *args)
|
||||
|
||||
def __rewriteproxy(self, header):
|
||||
""" rewrite HTTP request headers to support non-tunneling proxies
|
||||
(i.e. those which do not support the CONNECT method).
|
||||
This only works for HTTP (not HTTPS) since HTTPS requires tunneling.
|
||||
"""
|
||||
host, endpt = None, None
|
||||
hdrs = header.split("\r\n")
|
||||
for hdr in hdrs:
|
||||
if hdr.lower().startswith("host:"):
|
||||
host = hdr
|
||||
elif hdr.lower().startswith("get") or hdr.lower().startswith("post"):
|
||||
endpt = hdr
|
||||
if host and endpt:
|
||||
hdrs.remove(host)
|
||||
hdrs.remove(endpt)
|
||||
host = host.split(" ")[1]
|
||||
endpt = endpt.split(" ")
|
||||
if self.__proxy[4] != None and self.__proxy[5] != None:
|
||||
hdrs.insert(0, self.__getauthheader())
|
||||
hdrs.insert(0, "Host: %s" % host)
|
||||
hdrs.insert(0, "%s http://%s%s %s" % (endpt[0], host, endpt[1], endpt[2]))
|
||||
return "\r\n".join(hdrs)
|
||||
|
||||
def __getauthheader(self):
|
||||
auth = self.__proxy[4] + b":" + self.__proxy[5]
|
||||
return "Proxy-Authorization: Basic " + base64.b64encode(auth).decode()
|
||||
|
||||
def setproxy(
|
||||
self,
|
||||
proxytype=None,
|
||||
addr=None,
|
||||
port=None,
|
||||
rdns=True,
|
||||
username=None,
|
||||
password=None,
|
||||
headers=None,
|
||||
):
|
||||
"""setproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
|
||||
|
||||
Sets the proxy to be used.
|
||||
proxytype - The type of the proxy to be used. Three types
|
||||
are supported: PROXY_TYPE_SOCKS4 (including socks4a),
|
||||
PROXY_TYPE_SOCKS5 and PROXY_TYPE_HTTP
|
||||
addr - The address of the server (IP or DNS).
|
||||
port - The port of the server. Defaults to 1080 for SOCKS
|
||||
servers and 8080 for HTTP proxy servers.
|
||||
rdns - Should DNS queries be preformed on the remote side
|
||||
(rather than the local side). The default is True.
|
||||
Note: This has no effect with SOCKS4 servers.
|
||||
username - Username to authenticate with to the server.
|
||||
The default is no authentication.
|
||||
password - Password to authenticate with to the server.
|
||||
Only relevant when username is also provided.
|
||||
headers - Additional or modified headers for the proxy connect
|
||||
request.
|
||||
"""
|
||||
self.__proxy = (
|
||||
proxytype,
|
||||
addr,
|
||||
port,
|
||||
rdns,
|
||||
username.encode() if username else None,
|
||||
password.encode() if password else None,
|
||||
headers,
|
||||
)
|
||||
|
||||
def __negotiatesocks5(self, destaddr, destport):
|
||||
"""__negotiatesocks5(self,destaddr,destport)
|
||||
Negotiates a connection through a SOCKS5 server.
|
||||
"""
|
||||
# First we'll send the authentication packages we support.
|
||||
if (self.__proxy[4] != None) and (self.__proxy[5] != None):
|
||||
# The username/password details were supplied to the
|
||||
# setproxy method so we support the USERNAME/PASSWORD
|
||||
# authentication (in addition to the standard none).
|
||||
self.sendall(struct.pack("BBBB", 0x05, 0x02, 0x00, 0x02))
|
||||
else:
|
||||
# No username/password were entered, therefore we
|
||||
# only support connections with no authentication.
|
||||
self.sendall(struct.pack("BBB", 0x05, 0x01, 0x00))
|
||||
# We'll receive the server's response to determine which
|
||||
# method was selected
|
||||
chosenauth = self.__recvall(2)
|
||||
if chosenauth[0:1] != chr(0x05).encode():
|
||||
self.close()
|
||||
raise GeneralProxyError((1, _generalerrors[1]))
|
||||
# Check the chosen authentication method
|
||||
if chosenauth[1:2] == chr(0x00).encode():
|
||||
# No authentication is required
|
||||
pass
|
||||
elif chosenauth[1:2] == chr(0x02).encode():
|
||||
# Okay, we need to perform a basic username/password
|
||||
# authentication.
|
||||
packet = bytearray()
|
||||
packet.append(0x01)
|
||||
packet.append(len(self.__proxy[4]))
|
||||
packet.extend(self.__proxy[4])
|
||||
packet.append(len(self.__proxy[5]))
|
||||
packet.extend(self.__proxy[5])
|
||||
self.sendall(packet)
|
||||
authstat = self.__recvall(2)
|
||||
if authstat[0:1] != chr(0x01).encode():
|
||||
# Bad response
|
||||
self.close()
|
||||
raise GeneralProxyError((1, _generalerrors[1]))
|
||||
if authstat[1:2] != chr(0x00).encode():
|
||||
# Authentication failed
|
||||
self.close()
|
||||
raise Socks5AuthError((3, _socks5autherrors[3]))
|
||||
# Authentication succeeded
|
||||
else:
|
||||
# Reaching here is always bad
|
||||
self.close()
|
||||
if chosenauth[1] == chr(0xFF).encode():
|
||||
raise Socks5AuthError((2, _socks5autherrors[2]))
|
||||
else:
|
||||
raise GeneralProxyError((1, _generalerrors[1]))
|
||||
# Now we can request the actual connection
|
||||
req = struct.pack("BBB", 0x05, 0x01, 0x00)
|
||||
# If the given destination address is an IP address, we'll
|
||||
# use the IPv4 address request even if remote resolving was specified.
|
||||
try:
|
||||
ipaddr = socket.inet_aton(destaddr)
|
||||
req = req + chr(0x01).encode() + ipaddr
|
||||
except socket.error:
|
||||
# Well it's not an IP number, so it's probably a DNS name.
|
||||
if self.__proxy[3]:
|
||||
# Resolve remotely
|
||||
ipaddr = None
|
||||
req = (
|
||||
req
|
||||
+ chr(0x03).encode()
|
||||
+ chr(len(destaddr)).encode()
|
||||
+ destaddr.encode()
|
||||
)
|
||||
else:
|
||||
# Resolve locally
|
||||
ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
|
||||
req = req + chr(0x01).encode() + ipaddr
|
||||
req = req + struct.pack(">H", destport)
|
||||
self.sendall(req)
|
||||
# Get the response
|
||||
resp = self.__recvall(4)
|
||||
if resp[0:1] != chr(0x05).encode():
|
||||
self.close()
|
||||
raise GeneralProxyError((1, _generalerrors[1]))
|
||||
elif resp[1:2] != chr(0x00).encode():
|
||||
# Connection failed
|
||||
self.close()
|
||||
if ord(resp[1:2]) <= 8:
|
||||
raise Socks5Error((ord(resp[1:2]), _socks5errors[ord(resp[1:2])]))
|
||||
else:
|
||||
raise Socks5Error((9, _socks5errors[9]))
|
||||
# Get the bound address/port
|
||||
elif resp[3:4] == chr(0x01).encode():
|
||||
boundaddr = self.__recvall(4)
|
||||
elif resp[3:4] == chr(0x03).encode():
|
||||
resp = resp + self.recv(1)
|
||||
boundaddr = self.__recvall(ord(resp[4:5]))
|
||||
else:
|
||||
self.close()
|
||||
raise GeneralProxyError((1, _generalerrors[1]))
|
||||
boundport = struct.unpack(">H", self.__recvall(2))[0]
|
||||
self.__proxysockname = (boundaddr, boundport)
|
||||
if ipaddr != None:
|
||||
self.__proxypeername = (socket.inet_ntoa(ipaddr), destport)
|
||||
else:
|
||||
self.__proxypeername = (destaddr, destport)
|
||||
|
||||
def getproxysockname(self):
|
||||
"""getsockname() -> address info
|
||||
Returns the bound IP address and port number at the proxy.
|
||||
"""
|
||||
return self.__proxysockname
|
||||
|
||||
def getproxypeername(self):
|
||||
"""getproxypeername() -> address info
|
||||
Returns the IP and port number of the proxy.
|
||||
"""
|
||||
return _orgsocket.getpeername(self)
|
||||
|
||||
def getpeername(self):
|
||||
"""getpeername() -> address info
|
||||
Returns the IP address and port number of the destination
|
||||
machine (note: getproxypeername returns the proxy)
|
||||
"""
|
||||
return self.__proxypeername
|
||||
|
||||
def __negotiatesocks4(self, destaddr, destport):
|
||||
"""__negotiatesocks4(self,destaddr,destport)
|
||||
Negotiates a connection through a SOCKS4 server.
|
||||
"""
|
||||
# Check if the destination address provided is an IP address
|
||||
rmtrslv = False
|
||||
try:
|
||||
ipaddr = socket.inet_aton(destaddr)
|
||||
except socket.error:
|
||||
# It's a DNS name. Check where it should be resolved.
|
||||
if self.__proxy[3]:
|
||||
ipaddr = struct.pack("BBBB", 0x00, 0x00, 0x00, 0x01)
|
||||
rmtrslv = True
|
||||
else:
|
||||
ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
|
||||
# Construct the request packet
|
||||
req = struct.pack(">BBH", 0x04, 0x01, destport) + ipaddr
|
||||
# The username parameter is considered userid for SOCKS4
|
||||
if self.__proxy[4] != None:
|
||||
req = req + self.__proxy[4]
|
||||
req = req + chr(0x00).encode()
|
||||
# DNS name if remote resolving is required
|
||||
# NOTE: This is actually an extension to the SOCKS4 protocol
|
||||
# called SOCKS4A and may not be supported in all cases.
|
||||
if rmtrslv:
|
||||
req = req + destaddr + chr(0x00).encode()
|
||||
self.sendall(req)
|
||||
# Get the response from the server
|
||||
resp = self.__recvall(8)
|
||||
if resp[0:1] != chr(0x00).encode():
|
||||
# Bad data
|
||||
self.close()
|
||||
raise GeneralProxyError((1, _generalerrors[1]))
|
||||
if resp[1:2] != chr(0x5A).encode():
|
||||
# Server returned an error
|
||||
self.close()
|
||||
if ord(resp[1:2]) in (91, 92, 93):
|
||||
self.close()
|
||||
raise Socks4Error((ord(resp[1:2]), _socks4errors[ord(resp[1:2]) - 90]))
|
||||
else:
|
||||
raise Socks4Error((94, _socks4errors[4]))
|
||||
# Get the bound address/port
|
||||
self.__proxysockname = (
|
||||
socket.inet_ntoa(resp[4:]),
|
||||
struct.unpack(">H", resp[2:4])[0],
|
||||
)
|
||||
if rmtrslv != None:
|
||||
self.__proxypeername = (socket.inet_ntoa(ipaddr), destport)
|
||||
else:
|
||||
self.__proxypeername = (destaddr, destport)
|
||||
|
||||
def __negotiatehttp(self, destaddr, destport):
|
||||
"""__negotiatehttp(self,destaddr,destport)
|
||||
Negotiates a connection through an HTTP server.
|
||||
"""
|
||||
# If we need to resolve locally, we do this now
|
||||
if not self.__proxy[3]:
|
||||
addr = socket.gethostbyname(destaddr)
|
||||
else:
|
||||
addr = destaddr
|
||||
headers = ["CONNECT ", addr, ":", str(destport), " HTTP/1.1\r\n"]
|
||||
wrote_host_header = False
|
||||
wrote_auth_header = False
|
||||
if self.__proxy[6] != None:
|
||||
for key, val in self.__proxy[6].iteritems():
|
||||
headers += [key, ": ", val, "\r\n"]
|
||||
wrote_host_header = key.lower() == "host"
|
||||
wrote_auth_header = key.lower() == "proxy-authorization"
|
||||
if not wrote_host_header:
|
||||
headers += ["Host: ", destaddr, "\r\n"]
|
||||
if not wrote_auth_header:
|
||||
if self.__proxy[4] != None and self.__proxy[5] != None:
|
||||
headers += [self.__getauthheader(), "\r\n"]
|
||||
headers.append("\r\n")
|
||||
self.sendall("".join(headers).encode())
|
||||
# We read the response until we get the string "\r\n\r\n"
|
||||
resp = self.recv(1)
|
||||
while resp.find("\r\n\r\n".encode()) == -1:
|
||||
resp = resp + self.recv(1)
|
||||
# We just need the first line to check if the connection
|
||||
# was successful
|
||||
statusline = resp.splitlines()[0].split(" ".encode(), 2)
|
||||
if statusline[0] not in ("HTTP/1.0".encode(), "HTTP/1.1".encode()):
|
||||
self.close()
|
||||
raise GeneralProxyError((1, _generalerrors[1]))
|
||||
try:
|
||||
statuscode = int(statusline[1])
|
||||
except ValueError:
|
||||
self.close()
|
||||
raise GeneralProxyError((1, _generalerrors[1]))
|
||||
if statuscode != 200:
|
||||
self.close()
|
||||
raise HTTPError((statuscode, statusline[2]))
|
||||
self.__proxysockname = ("0.0.0.0", 0)
|
||||
self.__proxypeername = (addr, destport)
|
||||
|
||||
def connect(self, destpair):
|
||||
"""connect(self, despair)
|
||||
Connects to the specified destination through a proxy.
|
||||
destpar - A tuple of the IP/DNS address and the port number.
|
||||
(identical to socket's connect).
|
||||
To select the proxy server use setproxy().
|
||||
"""
|
||||
# Do a minimal input check first
|
||||
if (
|
||||
(not type(destpair) in (list, tuple))
|
||||
or (len(destpair) < 2)
|
||||
or (not isinstance(destpair[0], (str, bytes)))
|
||||
or (type(destpair[1]) != int)
|
||||
):
|
||||
raise GeneralProxyError((5, _generalerrors[5]))
|
||||
if self.__proxy[0] == PROXY_TYPE_SOCKS5:
|
||||
if self.__proxy[2] != None:
|
||||
portnum = self.__proxy[2]
|
||||
else:
|
||||
portnum = 1080
|
||||
_orgsocket.connect(self, (self.__proxy[1], portnum))
|
||||
self.__negotiatesocks5(destpair[0], destpair[1])
|
||||
elif self.__proxy[0] == PROXY_TYPE_SOCKS4:
|
||||
if self.__proxy[2] != None:
|
||||
portnum = self.__proxy[2]
|
||||
else:
|
||||
portnum = 1080
|
||||
_orgsocket.connect(self, (self.__proxy[1], portnum))
|
||||
self.__negotiatesocks4(destpair[0], destpair[1])
|
||||
elif self.__proxy[0] == PROXY_TYPE_HTTP:
|
||||
if self.__proxy[2] != None:
|
||||
portnum = self.__proxy[2]
|
||||
else:
|
||||
portnum = 8080
|
||||
_orgsocket.connect(self, (self.__proxy[1], portnum))
|
||||
self.__negotiatehttp(destpair[0], destpair[1])
|
||||
elif self.__proxy[0] == PROXY_TYPE_HTTP_NO_TUNNEL:
|
||||
if self.__proxy[2] != None:
|
||||
portnum = self.__proxy[2]
|
||||
else:
|
||||
portnum = 8080
|
||||
_orgsocket.connect(self, (self.__proxy[1], portnum))
|
||||
if destpair[1] == 443:
|
||||
self.__negotiatehttp(destpair[0], destpair[1])
|
||||
else:
|
||||
self.__httptunnel = False
|
||||
elif self.__proxy[0] == None:
|
||||
_orgsocket.connect(self, (destpair[0], destpair[1]))
|
||||
else:
|
||||
raise GeneralProxyError((4, _generalerrors[4]))
|
||||
9
lib/reprlib/__init__.py
Normal file
9
lib/reprlib/__init__.py
Normal file
@@ -0,0 +1,9 @@
|
||||
from __future__ import absolute_import
|
||||
import sys
|
||||
|
||||
if sys.version_info[0] < 3:
|
||||
from repr import *
|
||||
else:
|
||||
raise ImportError('This package should not be accessible on Python 3. '
|
||||
'Either you are trying to run from the python-future src folder '
|
||||
'or your installation of python-future is corrupted.')
|
||||
@@ -3,6 +3,11 @@
|
||||
# Parámetros de configuración (kodi)
|
||||
# ------------------------------------------------------------
|
||||
|
||||
#from builtins import str
|
||||
import sys
|
||||
PY3 = False
|
||||
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
@@ -62,10 +67,12 @@ def get_platform(full_version=False):
|
||||
ret = {}
|
||||
codename = {"10": "dharma", "11": "eden", "12": "frodo",
|
||||
"13": "gotham", "14": "helix", "15": "isengard",
|
||||
"16": "jarvis", "17": "krypton", "18": "leia"}
|
||||
"16": "jarvis", "17": "krypton", "18": "leia",
|
||||
"19": "matrix"}
|
||||
code_db = {'10': 'MyVideos37.db', '11': 'MyVideos60.db', '12': 'MyVideos75.db',
|
||||
'13': 'MyVideos78.db', '14': 'MyVideos90.db', '15': 'MyVideos93.db',
|
||||
'16': 'MyVideos99.db', '17': 'MyVideos107.db', '18': 'MyVideos116.db'}
|
||||
'16': 'MyVideos99.db', '17': 'MyVideos107.db', '18': 'MyVideos116.db',
|
||||
'19': 'MyVideos116.db'}
|
||||
|
||||
num_version = xbmc.getInfoLabel('System.BuildVersion')
|
||||
num_version = re.match("\d+\.\d+", num_version).group(0)
|
||||
@@ -334,7 +341,7 @@ def set_setting(name, value, channel="", server=""):
|
||||
|
||||
__settings__.setSetting(name, value)
|
||||
|
||||
except Exception, ex:
|
||||
except Exception as ex:
|
||||
from platformcode import logger
|
||||
logger.error("Error al convertir '%s' no se guarda el valor \n%s" % (name, ex))
|
||||
return None
|
||||
@@ -346,7 +353,18 @@ def get_localized_string(code):
|
||||
dev = __language__(code)
|
||||
|
||||
try:
|
||||
dev = dev.encode("utf-8")
|
||||
# Unicode to utf8
|
||||
if isinstance(dev, unicode):
|
||||
dev = dev.encode("utf8")
|
||||
if PY3: dev = dev.decode("utf8")
|
||||
|
||||
# All encodings to utf8
|
||||
elif not PY3 and isinstance(dev, str):
|
||||
dev = unicode(dev, "utf8", errors="replace").encode("utf8")
|
||||
|
||||
# Bytes encodings to utf8
|
||||
elif PY3 and isinstance(dev, bytes):
|
||||
dev = dev.decode("utf8")
|
||||
except:
|
||||
pass
|
||||
|
||||
@@ -356,7 +374,7 @@ def get_localized_category(categ):
|
||||
categories = {'movie': get_localized_string(30122), 'tvshow': get_localized_string(30123),
|
||||
'anime': get_localized_string(30124), 'documentary': get_localized_string(30125),
|
||||
'vos': get_localized_string(30136), 'sub-ita': get_localized_string(70566), 'adult': get_localized_string(30126),
|
||||
'direct': get_localized_string(30137), 'torrent': get_localized_string(70015)}
|
||||
'direct': get_localized_string(30137), 'torrent': get_localized_string(70015), 'live': get_localized_string(30138)}
|
||||
return categories[categ] if categ in categories else categ
|
||||
|
||||
|
||||
@@ -391,6 +409,14 @@ def get_data_path():
|
||||
return dev
|
||||
|
||||
|
||||
def get_icon():
|
||||
return xbmc.translatePath(__settings__.getAddonInfo('icon'))
|
||||
|
||||
|
||||
def get_fanart():
|
||||
return xbmc.translatePath(__settings__.getAddonInfo('fanart'))
|
||||
|
||||
|
||||
def get_cookie_data():
|
||||
import os
|
||||
ficherocookies = os.path.join(get_data_path(), 'cookies.dat')
|
||||
|
||||
@@ -3,17 +3,23 @@
|
||||
# Updater (kodi)
|
||||
# --------------------------------------------------------------------------------
|
||||
|
||||
import json
|
||||
import os
|
||||
import traceback
|
||||
#from builtins import str
|
||||
import sys
|
||||
PY3 = False
|
||||
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
|
||||
|
||||
import traceback
|
||||
import xbmc
|
||||
import xbmcaddon
|
||||
import threading
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
from core import filetools
|
||||
from core import jsontools
|
||||
from platformcode import config, logger, platformtools
|
||||
|
||||
from core import jsontools
|
||||
from core import filetools
|
||||
|
||||
json_data_file_name = 'custom_code.json'
|
||||
|
||||
|
||||
@@ -21,10 +27,9 @@ def init():
|
||||
logger.info()
|
||||
|
||||
"""
|
||||
Todo el código añadido al add-on se borra con cada actualización. Esta función permite restaurarlo automáticamente con cada actualización.
|
||||
Esto permite al usuario tener su propio código, bajo su responsabilidad, y restaurarlo al add-on cada vez que se actualiza.
|
||||
Todo el código añadido al add-on se borra con cada actualización. Esta función permite restaurarlo automáticamente con cada actualización. Esto permite al usuario tener su propio código, bajo su responsabilidad, y restaurarlo al add-on cada vez que se actualiza.
|
||||
|
||||
El mecanismo funciona copiando el contenido de la carpeta-arbol ".\userdata\addon_data\plugin.video.alfa\custom_code\..." sobre
|
||||
El mecanismo funciona copiando el contenido de la carpeta-arbol "./userdata/addon_data/plugin.video.alfa/custom_code/..." sobre
|
||||
las carpetas de código del add-on. No verifica el contenido, solo vuelca(reemplaza) el contenido de "custom_code".
|
||||
|
||||
El usuario almacenará en las subcarpetas de "custom_code" su código actualizado y listo para ser copiado en cualquier momento.
|
||||
@@ -37,7 +42,7 @@ def init():
|
||||
from platformcode import custom_code
|
||||
custom_code.init()
|
||||
|
||||
2.- En el inicio de Kodi, comprueba si existe la carpeta "custom_code" en ".\userdata\addon_data\plugin.video.alfa\".
|
||||
2.- En el inicio de Kodi, comprueba si existe la carpeta "custom_code" en "./userdata/addon_data/plugin.video.alfa/".
|
||||
Si no existe, la crea y sale sin más, dando al ususario la posibilidad de copiar sobre esa estructura su código,
|
||||
y que la función la vuelque sobre el add-on en el próximo inicio de Kodi.
|
||||
|
||||
@@ -55,31 +60,45 @@ def init():
|
||||
|
||||
Tiempos: Copiando 7 archivos de prueba, el proceso ha tardado una décima de segundo.
|
||||
"""
|
||||
|
||||
|
||||
try:
|
||||
#Borra el .zip de instalación de Alfa de la carpeta Packages, por si está corrupto, y que así se pueda descargar de nuevo
|
||||
version = 'plugin.video.alfa-%s.zip' % config.get_addon_version(with_fix=False)
|
||||
filetools.remove(filetools.join(xbmc.translatePath('special://home'), 'addons', 'packages', version), True)
|
||||
|
||||
#Verifica si Kodi tiene algún achivo de Base de Datos de Vídeo de versiones anteriores, entonces los borra
|
||||
verify_Kodi_video_DB()
|
||||
|
||||
#LIBTORRENT: se descarga el binario de Libtorrent cada vez que se actualiza Alfa
|
||||
try:
|
||||
threading.Thread(target=update_libtorrent).start() # Creamos un Thread independiente, hasta el fin de Kodi
|
||||
time.sleep(2) # Dejamos terminar la inicialización...
|
||||
except: # Si hay problemas de threading, nos vamos
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
#QUASAR: Preguntamos si se hacen modificaciones a Quasar
|
||||
if not filetools.exists(os.path.join(config.get_data_path(), "quasar.json")) and not config.get_setting('addon_quasar_update', default=False):
|
||||
if not filetools.exists(filetools.join(config.get_data_path(), "quasar.json")) \
|
||||
and not config.get_setting('addon_quasar_update', default=False):
|
||||
question_update_external_addon("quasar")
|
||||
|
||||
#QUASAR: Hacemos las modificaciones a Quasar, si está permitido, y si está instalado
|
||||
if config.get_setting('addon_quasar_update', default=False):
|
||||
if config.get_setting('addon_quasar_update', default=False) or \
|
||||
(filetools.exists(filetools.join(config.get_data_path(), \
|
||||
"quasar.json")) and not xbmc.getCondVisibility('System.HasAddon("plugin.video.quasar")')):
|
||||
if not update_external_addon("quasar"):
|
||||
platformtools.dialog_notification("Actualización Quasar", "Ha fallado. Consulte el log")
|
||||
|
||||
#Existe carpeta "custom_code" ? Si no existe se crea y se sale
|
||||
custom_code_dir = os.path.join(config.get_data_path(), 'custom_code')
|
||||
if os.path.exists(custom_code_dir) == False:
|
||||
custom_code_dir = filetools.join(config.get_data_path(), 'custom_code')
|
||||
if not filetools.exists(custom_code_dir):
|
||||
create_folder_structure(custom_code_dir)
|
||||
return
|
||||
|
||||
else:
|
||||
#Existe "custom_code.json" ? Si no existe se crea
|
||||
custom_code_json_path = config.get_runtime_path()
|
||||
custom_code_json = os.path.join(custom_code_json_path, 'custom_code.json')
|
||||
if os.path.exists(custom_code_json) == False:
|
||||
custom_code_json = filetools.join(custom_code_json_path, 'custom_code.json')
|
||||
if not filetools.exists(custom_code_json):
|
||||
create_json(custom_code_json_path)
|
||||
|
||||
#Se verifica si la versión del .json y del add-on son iguales. Si es así se sale. Si no se copia "custom_code" al add-on
|
||||
@@ -92,13 +111,13 @@ def create_folder_structure(custom_code_dir):
|
||||
logger.info()
|
||||
|
||||
#Creamos todas las carpetas. La importante es "custom_code". Las otras sirven meramente de guía para evitar errores de nombres...
|
||||
os.mkdir(custom_code_dir)
|
||||
os.mkdir(filetools.join(custom_code_dir, 'channels'))
|
||||
os.mkdir(filetools.join(custom_code_dir, 'core'))
|
||||
os.mkdir(filetools.join(custom_code_dir, 'lib'))
|
||||
os.mkdir(filetools.join(custom_code_dir, 'platformcode'))
|
||||
os.mkdir(filetools.join(custom_code_dir, 'resources'))
|
||||
os.mkdir(filetools.join(custom_code_dir, 'servers'))
|
||||
filetools.mkdir(custom_code_dir)
|
||||
filetools.mkdir(filetools.join(custom_code_dir, 'channels'))
|
||||
filetools.mkdir(filetools.join(custom_code_dir, 'core'))
|
||||
filetools.mkdir(filetools.join(custom_code_dir, 'lib'))
|
||||
filetools.mkdir(filetools.join(custom_code_dir, 'platformcode'))
|
||||
filetools.mkdir(filetools.join(custom_code_dir, 'resources'))
|
||||
filetools.mkdir(filetools.join(custom_code_dir, 'servers'))
|
||||
|
||||
return
|
||||
|
||||
@@ -108,9 +127,9 @@ def create_json(custom_code_json_path, json_name=json_data_file_name):
|
||||
|
||||
#Guardamaos el json con la versión de Alfa vacía, para permitir hacer la primera copia
|
||||
json_data_file = filetools.join(custom_code_json_path, json_name)
|
||||
json_file = open(json_data_file, "a+")
|
||||
json_file.write(json.dumps({"addon_version": ""}))
|
||||
json_file.close()
|
||||
if filetools.exists(json_data_file):
|
||||
filetools.remove(json_data_file)
|
||||
result = filetools.write(json_data_file, jsontools.dump({"addon_version": ""}))
|
||||
|
||||
return
|
||||
|
||||
@@ -122,15 +141,21 @@ def verify_copy_folders(custom_code_dir, custom_code_json_path):
|
||||
json_data_file = filetools.join(custom_code_json_path, json_data_file_name)
|
||||
json_data = jsontools.load(filetools.read(json_data_file))
|
||||
current_version = config.get_addon_version(with_fix=False)
|
||||
if current_version == json_data['addon_version']:
|
||||
return
|
||||
if not json_data or not 'addon_version' in json_data:
|
||||
create_json(custom_code_json_path)
|
||||
json_data = jsontools.load(filetools.read(json_data_file))
|
||||
try:
|
||||
if current_version == json_data['addon_version']:
|
||||
return
|
||||
except:
|
||||
logger.error(traceback.format_exc(1))
|
||||
|
||||
#Ahora copiamos los archivos desde el área de Userdata, Custom_code, sobre las carpetas del add-on
|
||||
for root, folders, files in os.walk(custom_code_dir):
|
||||
for root, folders, files in filetools.walk(custom_code_dir):
|
||||
for file in files:
|
||||
input_file = filetools.join(root, file)
|
||||
output_file = input_file.replace(custom_code_dir, custom_code_json_path)
|
||||
if filetools.copy(input_file, output_file, silent=True) == False:
|
||||
if not filetools.copy(input_file, output_file, silent=True):
|
||||
return
|
||||
|
||||
#Guardamaos el json con la versión actual de Alfa, para no volver a hacer la copia hasta la nueva versión
|
||||
@@ -160,38 +185,163 @@ def question_update_external_addon(addon_name):
|
||||
create_json(config.get_data_path(), "%s.json" % addon_name)
|
||||
|
||||
return stat
|
||||
|
||||
|
||||
|
||||
def update_external_addon(addon_name):
|
||||
logger.info(addon_name)
|
||||
|
||||
#Verificamos que el addon está instalado
|
||||
if xbmc.getCondVisibility('System.HasAddon("plugin.video.%s")' % addon_name):
|
||||
#Path de actuali<aciones de Alfa
|
||||
alfa_addon_updates = filetools.join(config.get_runtime_path(), filetools.join("lib", addon_name))
|
||||
|
||||
#Path de destino en addon externo
|
||||
__settings__ = xbmcaddon.Addon(id="plugin.video." + addon_name)
|
||||
if addon_name.lower() in ['quasar', 'elementum']:
|
||||
addon_path = filetools.join(xbmc.translatePath(__settings__.getAddonInfo('Path')), filetools.join("resources", filetools.join("site-packages", addon_name)))
|
||||
try:
|
||||
#Verificamos que el addon está instalado
|
||||
if xbmc.getCondVisibility('System.HasAddon("plugin.video.%s")' % addon_name):
|
||||
#Path de actualizaciones de Alfa
|
||||
alfa_addon_updates_mig = filetools.join(config.get_runtime_path(), "lib")
|
||||
alfa_addon_updates = filetools.join(alfa_addon_updates_mig, addon_name)
|
||||
|
||||
#Path de destino en addon externo
|
||||
__settings__ = xbmcaddon.Addon(id="plugin.video." + addon_name)
|
||||
if addon_name.lower() in ['quasar', 'elementum']:
|
||||
addon_path_mig = filetools.join(xbmc.translatePath(__settings__.getAddonInfo('Path')), \
|
||||
filetools.join("resources", "site-packages"))
|
||||
addon_path = filetools.join(addon_path_mig, addon_name)
|
||||
else:
|
||||
addon_path_mig = ''
|
||||
addon_path = ''
|
||||
|
||||
#Hay modificaciones en Alfa? Las copiamos al addon, incuidas las carpetas de migración a PY3
|
||||
if filetools.exists(alfa_addon_updates) and filetools.exists(addon_path):
|
||||
for root, folders, files in filetools.walk(alfa_addon_updates_mig):
|
||||
if ('future' in root or 'past' in root) and not 'concurrent' in root:
|
||||
for file in files:
|
||||
alfa_addon_updates_mig_folder = root.replace(alfa_addon_updates_mig, addon_path_mig)
|
||||
if not filetools.exists(alfa_addon_updates_mig_folder):
|
||||
filetools.mkdir(alfa_addon_updates_mig_folder)
|
||||
if file.endswith('.pyo') or file.endswith('.pyd'):
|
||||
continue
|
||||
input_file = filetools.join(root, file)
|
||||
output_file = input_file.replace(alfa_addon_updates_mig, addon_path_mig)
|
||||
if not filetools.copy(input_file, output_file, silent=True):
|
||||
logger.error('Error en la copia de MIGRACIÓN: Input: %s o Output: %s' % (input_file, output_file))
|
||||
return False
|
||||
|
||||
for root, folders, files in filetools.walk(alfa_addon_updates):
|
||||
for file in files:
|
||||
input_file = filetools.join(root, file)
|
||||
output_file = input_file.replace(alfa_addon_updates, addon_path)
|
||||
if not filetools.copy(input_file, output_file, silent=True):
|
||||
logger.error('Error en la copia: Input: %s o Output: %s' % (input_file, output_file))
|
||||
return False
|
||||
return True
|
||||
else:
|
||||
logger.error('Alguna carpeta no existe: Alfa: %s o %s: %s' % (alfa_addon_updates, addon_name, addon_path))
|
||||
# Se ha desinstalado Quasar, reseteamos la opción
|
||||
else:
|
||||
addon_path = ''
|
||||
|
||||
#Hay modificaciones en Alfa? Las copiamos al addon
|
||||
if filetools.exists(alfa_addon_updates) and filetools.exists(addon_path):
|
||||
for root, folders, files in os.walk(alfa_addon_updates):
|
||||
for file in files:
|
||||
input_file = filetools.join(root, file)
|
||||
output_file = input_file.replace(alfa_addon_updates, addon_path)
|
||||
if filetools.copy(input_file, output_file, silent=True) == False:
|
||||
logger.error('Error en la copia: Input: %s o Output: %s' % (input_file, output_file))
|
||||
return False
|
||||
config.set_setting('addon_quasar_update', False)
|
||||
if filetools.exists(filetools.join(config.get_data_path(), "%s.json" % addon_name)):
|
||||
filetools.remove(filetools.join(config.get_data_path(), "%s.json" % addon_name))
|
||||
return True
|
||||
else:
|
||||
logger.error('Alguna carpeta no existe: Alfa: %s o %s: %s' % (alfa_addon_updates, addon_name, addon_path))
|
||||
except:
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def update_libtorrent():
|
||||
logger.info()
|
||||
|
||||
if not config.get_setting("mct_buffer", server="torrent", default=""):
|
||||
default = config.get_setting("torrent_client", server="torrent", default=0)
|
||||
config.set_setting("torrent_client", default, server="torrent")
|
||||
config.set_setting("mct_buffer", "50", server="torrent")
|
||||
if config.get_setting("mct_download_path", server="torrent", default=config.get_setting("downloadpath")):
|
||||
config.set_setting("mct_download_path", config.get_setting("downloadpath"), server="torrent")
|
||||
config.set_setting("mct_background_download", True, server="torrent")
|
||||
config.set_setting("mct_rar_unpack", True, server="torrent")
|
||||
config.set_setting("bt_buffer", "50", server="torrent")
|
||||
if config.get_setting("bt_download_path", server="torrent", default=config.get_setting("downloadpath")):
|
||||
config.set_setting("bt_download_path", config.get_setting("downloadpath"), server="torrent")
|
||||
config.set_setting("mct_download_limit", "", server="torrent")
|
||||
config.set_setting("magnet2torrent", False, server="torrent")
|
||||
|
||||
if not filetools.exists(filetools.join(config.get_runtime_path(), "custom_code.json")) or not \
|
||||
config.get_setting("unrar_path", server="torrent", default=""):
|
||||
|
||||
path = filetools.join(config.get_runtime_path(), 'lib', 'rarfiles')
|
||||
creationflags = ''
|
||||
sufix = ''
|
||||
unrar = ''
|
||||
for device in filetools.listdir(path):
|
||||
if xbmc.getCondVisibility("system.platform.android") and 'android' not in device: continue
|
||||
if xbmc.getCondVisibility("system.platform.windows") and 'windows' not in device: continue
|
||||
if not xbmc.getCondVisibility("system.platform.windows") and not xbmc.getCondVisibility("system.platform.android") \
|
||||
and ('android' in device or 'windows' in device): continue
|
||||
if 'windows' in device:
|
||||
creationflags = 0x08000000
|
||||
sufix = '.exe'
|
||||
else:
|
||||
creationflags = ''
|
||||
sufix = ''
|
||||
unrar = filetools.join(path, device, 'unrar%s') % sufix
|
||||
if not filetools.exists(unrar): unrar = ''
|
||||
if unrar:
|
||||
if not xbmc.getCondVisibility("system.platform.windows"):
|
||||
try:
|
||||
if xbmc.getCondVisibility("system.platform.android"):
|
||||
# Para Android copiamos el binario a la partición del sistema
|
||||
unrar_org = unrar
|
||||
unrar = filetools.join(xbmc.translatePath('special://xbmc/'), 'files').replace('/cache/apk/assets', '')
|
||||
if not filetools.exists(unrar):
|
||||
filetools.mkdir(unrar)
|
||||
unrar = filetools.join(unrar, 'unrar')
|
||||
filetools.copy(unrar_org, unrar, silent=True)
|
||||
|
||||
command = ['chmod', '777', '%s' % unrar]
|
||||
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
output_cmd, error_cmd = p.communicate()
|
||||
command = ['ls', '-l', unrar]
|
||||
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
output_cmd, error_cmd = p.communicate()
|
||||
xbmc.log('######## UnRAR file: %s' % str(output_cmd), xbmc.LOGNOTICE)
|
||||
except:
|
||||
xbmc.log('######## UnRAR ERROR in path: %s' % str(unrar), xbmc.LOGNOTICE)
|
||||
logger.error(traceback.format_exc(1))
|
||||
|
||||
try:
|
||||
if xbmc.getCondVisibility("system.platform.windows"):
|
||||
p = subprocess.Popen(unrar, stdout=subprocess.PIPE, stderr=subprocess.PIPE, creationflags=creationflags)
|
||||
else:
|
||||
p = subprocess.Popen(unrar, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
output_cmd, error_cmd = p.communicate()
|
||||
if p.returncode != 0 or error_cmd:
|
||||
xbmc.log('######## UnRAR returncode in module %s: %s, %s in %s' % \
|
||||
(device, str(p.returncode), str(error_cmd), unrar), xbmc.LOGNOTICE)
|
||||
unrar = ''
|
||||
else:
|
||||
xbmc.log('######## UnRAR OK in %s: %s' % (device, unrar), xbmc.LOGNOTICE)
|
||||
break
|
||||
except:
|
||||
xbmc.log('######## UnRAR ERROR in module %s: %s' % (device, unrar), xbmc.LOGNOTICE)
|
||||
logger.error(traceback.format_exc(1))
|
||||
unrar = ''
|
||||
|
||||
if unrar: config.set_setting("unrar_path", unrar, server="torrent")
|
||||
|
||||
if filetools.exists(filetools.join(config.get_runtime_path(), "custom_code.json")) and \
|
||||
config.get_setting("libtorrent_path", server="torrent", default="") :
|
||||
return
|
||||
|
||||
try:
|
||||
from lib.python_libtorrent.python_libtorrent import get_libtorrent
|
||||
except Exception as e:
|
||||
logger.error(traceback.format_exc(1))
|
||||
if not PY3:
|
||||
e = unicode(str(e), "utf8", errors="replace").encode("utf8")
|
||||
config.set_setting("libtorrent_path", "", server="torrent")
|
||||
if not config.get_setting("libtorrent_error", server="torrent", default=''):
|
||||
config.set_setting("libtorrent_error", str(e), server="torrent")
|
||||
|
||||
return
|
||||
|
||||
|
||||
def verify_Kodi_video_DB():
|
||||
logger.info()
|
||||
import random
|
||||
@@ -204,12 +354,12 @@ def verify_Kodi_video_DB():
|
||||
path = filetools.join(xbmc.translatePath("special://masterprofile/"), "Database")
|
||||
if filetools.exists(path):
|
||||
platform = config.get_platform(full_version=True)
|
||||
if platform:
|
||||
if platform and platform['num_version'] <= 19:
|
||||
db_files = filetools.walk(path)
|
||||
if filetools.exists(filetools.join(path, platform['video_db'])):
|
||||
for root, folders, files in db_files:
|
||||
for file in files:
|
||||
if file != platform['video_db']:
|
||||
if platform['video_db'] not in file:
|
||||
if file.startswith('MyVideos'):
|
||||
randnum = str(random.randrange(1, 999999))
|
||||
filetools.rename(filetools.join(path, file), 'OLD_' + randnum +'_' + file)
|
||||
|
||||
@@ -5,17 +5,25 @@
|
||||
# Based on code from the Mega add-on (xbmchub.com)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
from __future__ import division
|
||||
from future import standard_library
|
||||
standard_library.install_aliases()
|
||||
#from builtins import str
|
||||
from past.utils import old_div
|
||||
import sys
|
||||
PY3 = False
|
||||
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
|
||||
|
||||
import urllib.request, urllib.parse, urllib.error
|
||||
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
import threading
|
||||
import time
|
||||
import urllib
|
||||
|
||||
import urllib2
|
||||
import xbmc
|
||||
import xbmcgui
|
||||
|
||||
from core import downloadtools
|
||||
from platformcode import config, logger
|
||||
|
||||
@@ -43,7 +51,7 @@ def download_and_play(url, file_name, download_path):
|
||||
|
||||
while not cancelled and download_thread.isAlive():
|
||||
dialog.update(download_thread.get_progress(), config.get_localized_string(60313),
|
||||
"Velocidad: " + str(int(download_thread.get_speed() / 1024)) + " KB/s " + str(
|
||||
"Velocidad: " + str(int(old_div(download_thread.get_speed(), 1024))) + " KB/s " + str(
|
||||
download_thread.get_actual_size()) + "MB de " + str(
|
||||
download_thread.get_total_size()) + "MB",
|
||||
"Tiempo restante: " + str(downloadtools.sec_to_hms(download_thread.get_remaining_time())))
|
||||
@@ -232,7 +240,7 @@ class DownloadThread(threading.Thread):
|
||||
for additional_header in additional_headers:
|
||||
logger.info("additional_header: " + additional_header)
|
||||
name = re.findall("(.*?)=.*?", additional_header)[0]
|
||||
value = urllib.unquote_plus(re.findall(".*?=(.*?)$", additional_header)[0])
|
||||
value = urllib.parse.unquote_plus(re.findall(".*?=(.*?)$", additional_header)[0])
|
||||
headers.append([name, value])
|
||||
|
||||
self.url = self.url.split("|")[0]
|
||||
@@ -242,18 +250,18 @@ class DownloadThread(threading.Thread):
|
||||
socket.setdefaulttimeout(60)
|
||||
|
||||
# Crea la petición y añade las cabeceras
|
||||
h = urllib2.HTTPHandler(debuglevel=0)
|
||||
request = urllib2.Request(self.url)
|
||||
h = urllib.request.HTTPHandler(debuglevel=0)
|
||||
request = urllib.request.Request(self.url)
|
||||
for header in headers:
|
||||
logger.info("Header=" + header[0] + ": " + header[1])
|
||||
request.add_header(header[0], header[1])
|
||||
|
||||
# Lanza la petición
|
||||
opener = urllib2.build_opener(h)
|
||||
urllib2.install_opener(opener)
|
||||
opener = urllib.request.build_opener(h)
|
||||
urllib.request.install_opener(opener)
|
||||
try:
|
||||
connexion = opener.open(request)
|
||||
except urllib2.HTTPError, e:
|
||||
except urllib.error.HTTPError as e:
|
||||
logger.error("error %d (%s) al abrir la url %s" % (e.code, e.msg, self.url))
|
||||
# print e.code
|
||||
# print e.msg
|
||||
@@ -315,10 +323,10 @@ class DownloadThread(threading.Thread):
|
||||
bloqueleido = connexion.read(blocksize)
|
||||
after = time.time()
|
||||
if (after - before) > 0:
|
||||
self.velocidad = len(bloqueleido) / ((after - before))
|
||||
self.velocidad = old_div(len(bloqueleido), ((after - before)))
|
||||
falta = totalfichero - grabado
|
||||
if self.velocidad > 0:
|
||||
self.tiempofalta = falta / self.velocidad
|
||||
self.tiempofalta = old_div(falta, self.velocidad)
|
||||
else:
|
||||
self.tiempofalta = 0
|
||||
break
|
||||
|
||||
613
platformcode/envtal.py
Normal file
613
platformcode/envtal.py
Normal file
@@ -0,0 +1,613 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# ------------------------------------------------------------
|
||||
# Localiza las variables de entorno más habituales (kodi)
|
||||
# ------------------------------------------------------------
|
||||
|
||||
from __future__ import division
|
||||
# from builtins import str
|
||||
from past.utils import old_div
|
||||
import sys
|
||||
|
||||
PY3 = False
|
||||
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
|
||||
|
||||
import xbmc
|
||||
import xbmcaddon
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import re
|
||||
import platform
|
||||
|
||||
try:
|
||||
import ctypes
|
||||
except:
|
||||
pass
|
||||
import traceback
|
||||
|
||||
from core import filetools, scrapertools
|
||||
from platformcode import logger, config, platformtools
|
||||
|
||||
|
||||
def get_environment():
|
||||
"""
|
||||
Devuelve las variables de entorno del OS, de Kodi y de Alfa más habituales,
|
||||
necesarias para el diagnóstico de fallos
|
||||
"""
|
||||
|
||||
try:
|
||||
import base64
|
||||
import ast
|
||||
|
||||
environment = config.get_platform(full_version=True)
|
||||
environment['num_version'] = str(environment['num_version'])
|
||||
environment['python_version'] = str(platform.python_version())
|
||||
|
||||
environment['os_release'] = str(platform.release())
|
||||
if xbmc.getCondVisibility("system.platform.Windows"):
|
||||
try:
|
||||
if platform._syscmd_ver()[2]:
|
||||
environment['os_release'] = str(platform._syscmd_ver()[2])
|
||||
except:
|
||||
pass
|
||||
environment['prod_model'] = ''
|
||||
if xbmc.getCondVisibility("system.platform.Android"):
|
||||
environment['os_name'] = 'Android'
|
||||
try:
|
||||
for label_a in subprocess.check_output('getprop').split('\n'):
|
||||
if 'build.version.release' in label_a:
|
||||
environment['os_release'] = str(scrapertools.find_single_match(label_a, ':\s*\[(.*?)\]$'))
|
||||
if 'product.model' in label_a:
|
||||
environment['prod_model'] = str(scrapertools.find_single_match(label_a, ':\s*\[(.*?)\]$'))
|
||||
except:
|
||||
try:
|
||||
for label_a in filetools.read(os.environ['ANDROID_ROOT'] + '/build.prop').split():
|
||||
if 'build.version.release' in label_a:
|
||||
environment['os_release'] = str(scrapertools.find_single_match(label_a, '=(.*?)$'))
|
||||
if 'product.model' in label_a:
|
||||
environment['prod_model'] = str(scrapertools.find_single_match(label_a, '=(.*?)$'))
|
||||
except:
|
||||
pass
|
||||
|
||||
elif xbmc.getCondVisibility("system.platform.Linux.RaspberryPi"):
|
||||
environment['os_name'] = 'RaspberryPi'
|
||||
else:
|
||||
environment['os_name'] = str(platform.system())
|
||||
|
||||
environment['machine'] = str(platform.machine())
|
||||
environment['architecture'] = str(sys.maxsize > 2 ** 32 and "64-bit" or "32-bit")
|
||||
environment['language'] = str(xbmc.getInfoLabel('System.Language'))
|
||||
|
||||
environment['cpu_usage'] = str(xbmc.getInfoLabel('System.CpuUsage'))
|
||||
|
||||
environment['mem_total'] = str(xbmc.getInfoLabel('System.Memory(total)')).replace('MB', '').replace('KB', '')
|
||||
environment['mem_free'] = str(xbmc.getInfoLabel('System.Memory(free)')).replace('MB', '').replace('KB', '')
|
||||
if not environment['mem_total'] or not environment['mem_free']:
|
||||
try:
|
||||
if environment['os_name'].lower() == 'windows':
|
||||
kernel32 = ctypes.windll.kernel32
|
||||
c_ulong = ctypes.c_ulong
|
||||
c_ulonglong = ctypes.c_ulonglong
|
||||
|
||||
class MEMORYSTATUS(ctypes.Structure):
|
||||
_fields_ = [
|
||||
('dwLength', c_ulong),
|
||||
('dwMemoryLoad', c_ulong),
|
||||
('dwTotalPhys', c_ulonglong),
|
||||
('dwAvailPhys', c_ulonglong),
|
||||
('dwTotalPageFile', c_ulonglong),
|
||||
('dwAvailPageFile', c_ulonglong),
|
||||
('dwTotalVirtual', c_ulonglong),
|
||||
('dwAvailVirtual', c_ulonglong),
|
||||
('availExtendedVirtual', c_ulonglong)
|
||||
]
|
||||
|
||||
memoryStatus = MEMORYSTATUS()
|
||||
memoryStatus.dwLength = ctypes.sizeof(MEMORYSTATUS)
|
||||
kernel32.GlobalMemoryStatus(ctypes.byref(memoryStatus))
|
||||
environment['mem_total'] = str(old_div(int(memoryStatus.dwTotalPhys), (1024 ** 2)))
|
||||
environment['mem_free'] = str(old_div(int(memoryStatus.dwAvailPhys), (1024 ** 2)))
|
||||
|
||||
else:
|
||||
with open('/proc/meminfo') as f:
|
||||
meminfo = f.read()
|
||||
environment['mem_total'] = str(
|
||||
old_div(int(re.search(r'MemTotal:\s+(\d+)', meminfo).groups()[0]), 1024))
|
||||
environment['mem_free'] = str(
|
||||
old_div(int(re.search(r'MemAvailable:\s+(\d+)', meminfo).groups()[0]), 1024))
|
||||
except:
|
||||
environment['mem_total'] = ''
|
||||
environment['mem_free'] = ''
|
||||
|
||||
try:
|
||||
environment['kodi_buffer'] = '20'
|
||||
environment['kodi_bmode'] = '0'
|
||||
environment['kodi_rfactor'] = '4.0'
|
||||
if filetools.exists(filetools.join(xbmc.translatePath("special://userdata"), "advancedsettings.xml")):
|
||||
advancedsettings = filetools.read(filetools.join(xbmc.translatePath("special://userdata"),
|
||||
"advancedsettings.xml")).split('\n')
|
||||
for label_a in advancedsettings:
|
||||
if 'memorysize' in label_a:
|
||||
environment['kodi_buffer'] = str(old_div(int(scrapertools.find_single_match
|
||||
(label_a, '>(\d+)<\/')), 1024 ** 2))
|
||||
if 'buffermode' in label_a:
|
||||
environment['kodi_bmode'] = str(scrapertools.find_single_match
|
||||
(label_a, '>(\d+)<\/'))
|
||||
if 'readfactor' in label_a:
|
||||
environment['kodi_rfactor'] = str(scrapertools.find_single_match
|
||||
(label_a, '>(.*?)<\/'))
|
||||
except:
|
||||
pass
|
||||
|
||||
environment['userdata_path'] = str(xbmc.translatePath(config.get_data_path()))
|
||||
try:
|
||||
if environment['os_name'].lower() == 'windows':
|
||||
free_bytes = ctypes.c_ulonglong(0)
|
||||
ctypes.windll.kernel32.GetDiskFreeSpaceExW(ctypes.c_wchar_p(environment['userdata_path']),
|
||||
None, None, ctypes.pointer(free_bytes))
|
||||
environment['userdata_free'] = str(round(float(free_bytes.value) / (1024 ** 3), 3))
|
||||
else:
|
||||
disk_space = os.statvfs(environment['userdata_path'])
|
||||
if not disk_space.f_frsize: disk_space.f_frsize = disk_space.f_frsize.f_bsize
|
||||
environment['userdata_free'] = str(round((float(disk_space.f_bavail) / \
|
||||
(1024 ** 3)) * float(disk_space.f_frsize), 3))
|
||||
except:
|
||||
environment['userdata_free'] = '?'
|
||||
|
||||
try:
|
||||
environment['videolab_series'] = '?'
|
||||
environment['videolab_episodios'] = '?'
|
||||
environment['videolab_pelis'] = '?'
|
||||
environment['videolab_path'] = str(xbmc.translatePath(config.get_videolibrary_path()))
|
||||
if filetools.exists(filetools.join(environment['videolab_path'], \
|
||||
config.get_setting("folder_tvshows"))):
|
||||
environment['videolab_series'] = str(len(filetools.listdir(filetools.join(environment['videolab_path'], \
|
||||
config.get_setting(
|
||||
"folder_tvshows")))))
|
||||
counter = 0
|
||||
for root, folders, files in filetools.walk(filetools.join(environment['videolab_path'], \
|
||||
config.get_setting("folder_tvshows"))):
|
||||
for file in files:
|
||||
if file.endswith('.strm'): counter += 1
|
||||
environment['videolab_episodios'] = str(counter)
|
||||
if filetools.exists(filetools.join(environment['videolab_path'], \
|
||||
config.get_setting("folder_movies"))):
|
||||
environment['videolab_pelis'] = str(len(filetools.listdir(filetools.join(environment['videolab_path'], \
|
||||
config.get_setting(
|
||||
"folder_movies")))))
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
video_updates = ['No', 'Inicio', 'Una vez', 'Inicio+Una vez']
|
||||
environment['videolab_update'] = str(video_updates[config.get_setting("update", "videolibrary")])
|
||||
except:
|
||||
environment['videolab_update'] = '?'
|
||||
try:
|
||||
if environment['os_name'].lower() == 'windows':
|
||||
free_bytes = ctypes.c_ulonglong(0)
|
||||
ctypes.windll.kernel32.GetDiskFreeSpaceExW(ctypes.c_wchar_p(environment['videolab_path']),
|
||||
None, None, ctypes.pointer(free_bytes))
|
||||
environment['videolab_free'] = str(round(float(free_bytes.value) / (1024 ** 3), 3))
|
||||
else:
|
||||
disk_space = os.statvfs(environment['videolab_path'])
|
||||
if not disk_space.f_frsize: disk_space.f_frsize = disk_space.f_frsize.f_bsize
|
||||
environment['videolab_free'] = str(round((float(disk_space.f_bavail) / \
|
||||
(1024 ** 3)) * float(disk_space.f_frsize), 3))
|
||||
except:
|
||||
environment['videolab_free'] = '?'
|
||||
|
||||
environment['torrent_list'] = []
|
||||
environment['torrentcli_option'] = ''
|
||||
environment['torrent_error'] = ''
|
||||
environment['torrentcli_rar'] = config.get_setting("mct_rar_unpack", server="torrent", default=True)
|
||||
environment['torrentcli_backgr'] = config.get_setting("mct_background_download", server="torrent", default=True)
|
||||
environment['torrentcli_lib_path'] = config.get_setting("libtorrent_path", server="torrent", default="")
|
||||
if environment['torrentcli_lib_path']:
|
||||
lib_path = 'Activo'
|
||||
else:
|
||||
lib_path = 'Inactivo'
|
||||
environment['torrentcli_unrar'] = config.get_setting("unrar_path", server="torrent", default="")
|
||||
if environment['torrentcli_unrar']:
|
||||
if xbmc.getCondVisibility("system.platform.Android"):
|
||||
unrar = 'Android'
|
||||
else:
|
||||
unrar, bin = filetools.split(environment['torrentcli_unrar'])
|
||||
unrar = unrar.replace('\\', '/')
|
||||
if not unrar.endswith('/'):
|
||||
unrar = unrar + '/'
|
||||
unrar = scrapertools.find_single_match(unrar, '\/([^\/]+)\/$').capitalize()
|
||||
else:
|
||||
unrar = 'Inactivo'
|
||||
torrent_id = config.get_setting("torrent_client", server="torrent", default=0)
|
||||
environment['torrentcli_option'] = str(torrent_id)
|
||||
torrent_options = platformtools.torrent_client_installed()
|
||||
if lib_path == 'Activo':
|
||||
torrent_options = ['MCT'] + torrent_options
|
||||
torrent_options = ['BT'] + torrent_options
|
||||
environment['torrent_list'].append({'Torrent_opt': str(torrent_id), 'Libtorrent': lib_path, \
|
||||
'RAR_Auto': str(environment['torrentcli_rar']), \
|
||||
'RAR_backgr': str(environment['torrentcli_backgr']), \
|
||||
'UnRAR': unrar})
|
||||
environment['torrent_error'] = config.get_setting("libtorrent_error", server="torrent", default="")
|
||||
if environment['torrent_error']:
|
||||
environment['torrent_list'].append({'Libtorrent_error': environment['torrent_error']})
|
||||
|
||||
for torrent_option in torrent_options:
|
||||
cliente = dict()
|
||||
cliente['D_load_Path'] = ''
|
||||
cliente['Libre'] = '?'
|
||||
cliente['Plug_in'] = torrent_option.replace('Plugin externo: ', '')
|
||||
if cliente['Plug_in'] == 'BT':
|
||||
cliente['D_load_Path'] = str(config.get_setting("bt_download_path", server="torrent", default=''))
|
||||
if not cliente['D_load_Path']: continue
|
||||
cliente['Buffer'] = str(config.get_setting("bt_buffer", server="torrent", default=50))
|
||||
elif cliente['Plug_in'] == 'MCT':
|
||||
cliente['D_load_Path'] = str(config.get_setting("mct_download_path", server="torrent", default=''))
|
||||
if not cliente['D_load_Path']: continue
|
||||
cliente['Buffer'] = str(config.get_setting("mct_buffer", server="torrent", default=50))
|
||||
elif xbmc.getCondVisibility('System.HasAddon("plugin.video.%s")' % cliente['Plug_in']):
|
||||
__settings__ = xbmcaddon.Addon(id="plugin.video.%s" % cliente['Plug_in'])
|
||||
cliente['Plug_in'] = cliente['Plug_in'].capitalize()
|
||||
if cliente['Plug_in'] == 'Torrenter':
|
||||
cliente['D_load_Path'] = str(xbmc.translatePath(__settings__.getSetting('storage')))
|
||||
if not cliente['D_load_Path']:
|
||||
cliente['D_load_Path'] = str(filetools.join(xbmc.translatePath("special://home/"), \
|
||||
"cache", "xbmcup", "plugin.video.torrenter",
|
||||
"Torrenter"))
|
||||
cliente['Buffer'] = str(__settings__.getSetting('pre_buffer_bytes'))
|
||||
else:
|
||||
cliente['D_load_Path'] = str(xbmc.translatePath(__settings__.getSetting('download_path')))
|
||||
cliente['Buffer'] = str(__settings__.getSetting('buffer_size'))
|
||||
if __settings__.getSetting('download_storage') == '1' and __settings__.getSetting('memory_size'):
|
||||
cliente['Memoria'] = str(__settings__.getSetting('memory_size'))
|
||||
|
||||
if cliente['D_load_Path']:
|
||||
try:
|
||||
if environment['os_name'].lower() == 'windows':
|
||||
free_bytes = ctypes.c_ulonglong(0)
|
||||
ctypes.windll.kernel32.GetDiskFreeSpaceExW(ctypes.c_wchar_p(cliente['D_load_Path']),
|
||||
None, None, ctypes.pointer(free_bytes))
|
||||
cliente['Libre'] = str(round(float(free_bytes.value) / \
|
||||
(1024 ** 3), 3)).replace('.', ',')
|
||||
else:
|
||||
disk_space = os.statvfs(cliente['D_load_Path'])
|
||||
if not disk_space.f_frsize: disk_space.f_frsize = disk_space.f_frsize.f_bsize
|
||||
cliente['Libre'] = str(round((float(disk_space.f_bavail) / \
|
||||
(1024 ** 3)) * float(disk_space.f_frsize), 3)).replace('.', ',')
|
||||
except:
|
||||
pass
|
||||
environment['torrent_list'].append(cliente)
|
||||
|
||||
environment['proxy_active'] = ''
|
||||
try:
|
||||
proxy_channel_bloqued_str = base64.b64decode(config.get_setting('proxy_channel_bloqued')).decode('utf-8')
|
||||
proxy_channel_bloqued = dict()
|
||||
proxy_channel_bloqued = ast.literal_eval(proxy_channel_bloqued_str)
|
||||
for channel_bloqued, proxy_active in list(proxy_channel_bloqued.items()):
|
||||
if proxy_active != 'OFF':
|
||||
environment['proxy_active'] += channel_bloqued + ', '
|
||||
except:
|
||||
pass
|
||||
if not environment['proxy_active']: environment['proxy_active'] = 'OFF'
|
||||
environment['proxy_active'] = environment['proxy_active'].rstrip(', ')
|
||||
|
||||
for root, folders, files in filetools.walk(xbmc.translatePath("special://logpath/")):
|
||||
for file in files:
|
||||
if file.lower() in ['kodi.log', 'jarvis.log', 'spmc.log', 'cemc.log', \
|
||||
'mygica.log', 'wonderbox.log', 'leiapp,log', \
|
||||
'leianmc.log', 'kodiapp.log', 'anmc.log', \
|
||||
'latin-anmc.log']:
|
||||
environment['log_path'] = str(filetools.join(root, file))
|
||||
break
|
||||
else:
|
||||
environment['log_path'] = ''
|
||||
break
|
||||
|
||||
if environment['log_path']:
|
||||
environment['log_size_bytes'] = str(filetools.getsize(environment['log_path']))
|
||||
environment['log_size'] = str(round(float(environment['log_size_bytes']) / \
|
||||
(1024 * 1024), 3))
|
||||
else:
|
||||
environment['log_size_bytes'] = ''
|
||||
environment['log_size'] = ''
|
||||
|
||||
environment['debug'] = str(config.get_setting('debug'))
|
||||
environment['addon_version'] = str(config.get_addon_version())
|
||||
|
||||
except:
|
||||
logger.error(traceback.format_exc())
|
||||
environment = {}
|
||||
environment['log_size'] = ''
|
||||
environment['cpu_usage'] = ''
|
||||
environment['python_version'] = ''
|
||||
environment['log_path'] = ''
|
||||
environment['userdata_free'] = ''
|
||||
environment['mem_total'] = ''
|
||||
environment['machine'] = ''
|
||||
environment['platform'] = ''
|
||||
environment['videolab_path'] = ''
|
||||
environment['num_version'] = ''
|
||||
environment['os_name'] = ''
|
||||
environment['video_db'] = ''
|
||||
environment['userdata_path'] = ''
|
||||
environment['log_size_bytes'] = ''
|
||||
environment['name_version'] = ''
|
||||
environment['language'] = ''
|
||||
environment['mem_free'] = ''
|
||||
environment['prod_model'] = ''
|
||||
environment['proxy_active'] = ''
|
||||
environment['architecture'] = ''
|
||||
environment['os_release'] = ''
|
||||
environment['videolab_free'] = ''
|
||||
environment['kodi_buffer'] = ''
|
||||
environment['kodi_bmode'] = ''
|
||||
environment['kodi_rfactor'] = ''
|
||||
environment['videolab_series'] = ''
|
||||
environment['videolab_episodios'] = ''
|
||||
environment['videolab_pelis'] = ''
|
||||
environment['videolab_update'] = ''
|
||||
environment['debug'] = ''
|
||||
environment['addon_version'] = ''
|
||||
environment['torrent_list'] = []
|
||||
environment['torrentcli_option'] = ''
|
||||
environment['torrentcli_rar'] = ''
|
||||
environment['torrentcli_lib_path'] = ''
|
||||
environment['torrentcli_unrar'] = ''
|
||||
environment['torrent_error'] = ''
|
||||
|
||||
return environment
|
||||
|
||||
|
||||
def list_env(environment={}):
|
||||
if not environment:
|
||||
environment = get_environment()
|
||||
|
||||
if environment['debug'] == 'False':
|
||||
logger.log_enable(True)
|
||||
|
||||
logger.info('----------------------------------------------')
|
||||
logger.info('Variables de entorno Alfa: ' + environment['addon_version'] +
|
||||
' Debug: ' + environment['debug'])
|
||||
logger.info("----------------------------------------------")
|
||||
|
||||
logger.info(environment['os_name'] + ' ' + environment['prod_model'] + ' ' +
|
||||
environment['os_release'] + ' ' + environment['machine'] + ' ' +
|
||||
environment['architecture'] + ' ' + environment['language'])
|
||||
|
||||
logger.info('Kodi ' + environment['num_version'] + ', Vídeo: ' +
|
||||
environment['video_db'] + ', Python ' + environment['python_version'])
|
||||
|
||||
if environment['cpu_usage']:
|
||||
logger.info('CPU: ' + environment['cpu_usage'])
|
||||
|
||||
if environment['mem_total'] or environment['mem_free']:
|
||||
logger.info('Memoria: Total: ' + environment['mem_total'] + ' MB / Disp.: ' +
|
||||
environment['mem_free'] + ' MB / Buffers: ' +
|
||||
str(int(environment['kodi_buffer']) * 3) + ' MB / Buffermode: ' +
|
||||
environment['kodi_bmode'] + ' / Readfactor: ' +
|
||||
environment['kodi_rfactor'])
|
||||
|
||||
logger.info('Userdata: ' + environment['userdata_path'] + ' - Libre: ' +
|
||||
environment['userdata_free'].replace('.', ',') + ' GB')
|
||||
|
||||
logger.info('Videoteca: Series/Epis: ' + environment['videolab_series'] + '/' +
|
||||
environment['videolab_episodios'] + ' - Pelis: ' +
|
||||
environment['videolab_pelis'] + ' - Upd: ' +
|
||||
environment['videolab_update'] + ' - Path: ' +
|
||||
environment['videolab_path'] + ' - Libre: ' +
|
||||
environment['videolab_free'].replace('.', ',') + ' GB')
|
||||
|
||||
if environment['torrent_list']:
|
||||
for x, cliente in enumerate(environment['torrent_list']):
|
||||
if x == 0:
|
||||
cliente_alt = cliente.copy()
|
||||
del cliente_alt['Torrent_opt']
|
||||
logger.info('Torrent: Opt: %s, %s' % (str(cliente['Torrent_opt']), \
|
||||
str(cliente_alt).replace('{', '').replace('}', '') \
|
||||
.replace("'", '').replace('_', ' ')))
|
||||
elif x == 1 and environment['torrent_error']:
|
||||
logger.info('- ' + str(cliente).replace('{', '').replace('}', '') \
|
||||
.replace("'", '').replace('_', ' '))
|
||||
else:
|
||||
cliente_alt = cliente.copy()
|
||||
del cliente_alt['Plug_in']
|
||||
cliente_alt['Libre'] = cliente_alt['Libre'].replace('.', ',') + ' GB'
|
||||
logger.info('- %s: %s' % (str(cliente['Plug_in']), str(cliente_alt) \
|
||||
.replace('{', '').replace('}', '').replace("'", '') \
|
||||
.replace('\\\\', '\\')))
|
||||
|
||||
logger.info('Proxy: ' + environment['proxy_active'])
|
||||
|
||||
logger.info('TAMAÑO del LOG: ' + environment['log_size'].replace('.', ',') + ' MB')
|
||||
logger.info("----------------------------------------------")
|
||||
|
||||
if environment['debug'] == 'False':
|
||||
logger.log_enable(False)
|
||||
|
||||
return environment
|
||||
|
||||
|
||||
def paint_env(item, environment={}):
|
||||
from core.item import Item
|
||||
from channelselector import get_thumb
|
||||
|
||||
if not environment:
|
||||
environment = get_environment()
|
||||
environment = list_env(environment)
|
||||
|
||||
itemlist = []
|
||||
|
||||
thumb = get_thumb("setting_0.png")
|
||||
|
||||
cabecera = """\
|
||||
Muestra las [COLOR yellow]variables[/COLOR] del ecosistema de Kodi que puden ser relevantes para el diagnóstico de problema en Alfa:
|
||||
- Versión de Alfa con Fix
|
||||
- Debug Alfa: True/False
|
||||
"""
|
||||
plataform = """\
|
||||
Muestra los datos especificos de la [COLOR yellow]plataforma[/COLOR] en la que está alojado Kodi:
|
||||
- Sistema Operativo
|
||||
- Modelo (opt)
|
||||
- Versión SO
|
||||
- Procesador
|
||||
- Aquitectura
|
||||
- Idioma de Kodi
|
||||
"""
|
||||
kodi = """\
|
||||
Muestra los datos especificos de la instalación de [COLOR yellow]Kodi[/COLOR]:
|
||||
- Versión de Kodi
|
||||
- Base de Datos de Vídeo
|
||||
- Versión de Python
|
||||
"""
|
||||
cpu = """\
|
||||
Muestra los datos consumo actual de [COLOR yellow]CPU(s)[/COLOR]
|
||||
"""
|
||||
memoria = """\
|
||||
Muestra los datos del uso de [COLOR yellow]Memoria[/COLOR] del sistema:
|
||||
- Memoria total
|
||||
- Memoria disponible
|
||||
- en [COLOR yellow]Advancedsettings.xml[/COLOR]
|
||||
- Buffer de memoria
|
||||
configurado:
|
||||
para Kodi: 3 x valor de
|
||||
<memorysize>
|
||||
- Buffermode: cachea:
|
||||
* Internet (0, 2)
|
||||
* También local (1)
|
||||
* No Buffer (3)
|
||||
- Readfactor: readfactor *
|
||||
avg bitrate vídeo
|
||||
"""
|
||||
userdata = """\
|
||||
Muestra los datos del "path" de [COLOR yellow]Userdata[/COLOR]:
|
||||
- Path
|
||||
- Espacio disponible
|
||||
"""
|
||||
videoteca = """\
|
||||
Muestra los datos de la [COLOR yellow]Videoteca[/COLOR]:
|
||||
- Nº de Series y Episodios
|
||||
- Nº de Películas
|
||||
- Tipo de actulización
|
||||
- Path
|
||||
- Espacio disponible
|
||||
"""
|
||||
torrent = """\
|
||||
Muestra los datos generales del estado de [COLOR yellow]Torrent[/COLOR]:
|
||||
- ID del cliente seleccionado
|
||||
- Descompresión automática de archivos RAR?
|
||||
- Está activo Libtorrent?
|
||||
- Se descomprimen los RARs en background?
|
||||
- Está operativo el módulo UnRAR? Qué plataforma?
|
||||
"""
|
||||
torrent_error = """\
|
||||
Muestra los datos del error de importación de [COLOR yellow]Libtorrent[/COLOR]
|
||||
"""
|
||||
torrent_cliente = """\
|
||||
Muestra los datos de los [COLOR yellow]Clientes Torrent[/COLOR]:
|
||||
- Nombre del Cliente
|
||||
- Tamaño de buffer inicial
|
||||
- Path de descargas
|
||||
- Tamaño de buffer en Memoria
|
||||
(opt, si no disco)
|
||||
- Espacio disponible
|
||||
"""
|
||||
proxy = """\
|
||||
Muestra las direcciones de canales o servidores que necesitan [COLOR yellow]Proxy[/COLOR]
|
||||
"""
|
||||
log = """\
|
||||
Muestra el tamaño actual del [COLOR yellow]Log[/COLOR]
|
||||
"""
|
||||
reporte = """\
|
||||
Enlaza con la utilidad que permite el [COLOR yellow]envío del Log[/COLOR] de Kodi a través de un servicio Pastebin
|
||||
"""
|
||||
|
||||
itemlist.append(Item(channel=item.channel, title="[COLOR orange][B]Variables " +
|
||||
"de entorno Alfa: %s Debug: %s[/B][/COLOR]" %
|
||||
(environment['addon_version'], environment['debug']),
|
||||
action="", plot=cabecera, thumbnail=thumb, folder=False))
|
||||
|
||||
itemlist.append(Item(channel=item.channel, title='[COLOR yellow]%s[/COLOR]' %
|
||||
environment['os_name'] + ' ' + environment['prod_model'] + ' ' +
|
||||
environment['os_release'] + ' ' + environment['machine'] + ' ' +
|
||||
environment['architecture'] + ' ' + environment['language'],
|
||||
action="", plot=plataform, thumbnail=thumb, folder=False))
|
||||
|
||||
itemlist.append(Item(channel=item.channel, title='[COLOR yellow]Kodi [/COLOR]' +
|
||||
environment['num_version'] + ', Vídeo: ' + environment[
|
||||
'video_db'] +
|
||||
', Python ' + environment['python_version'], action="",
|
||||
plot=kodi, thumbnail=thumb, folder=False))
|
||||
|
||||
if environment['cpu_usage']:
|
||||
itemlist.append(Item(channel=item.channel, title='[COLOR yellow]CPU: [/COLOR]' +
|
||||
environment['cpu_usage'], action="", plot=cpu, thumbnail=thumb,
|
||||
folder=False))
|
||||
|
||||
if environment['mem_total'] or environment['mem_free']:
|
||||
itemlist.append(Item(channel=item.channel, title='[COLOR yellow]Memoria: [/COLOR]Total: ' +
|
||||
environment['mem_total'] + ' MB / Disp.: ' +
|
||||
environment['mem_free'] + ' MB / Buffers: ' +
|
||||
str(int(
|
||||
environment['kodi_buffer']) * 3) + ' MB / Buffermode: ' +
|
||||
environment['kodi_bmode'] + ' / Readfactor: ' +
|
||||
environment['kodi_rfactor'],
|
||||
action="", plot=memoria, thumbnail=thumb, folder=False))
|
||||
|
||||
itemlist.append(Item(channel=item.channel, title='[COLOR yellow]Userdata: [/COLOR]' +
|
||||
environment['userdata_path'] + ' - Free: ' + environment[
|
||||
'userdata_free'].replace('.', ',') +
|
||||
' GB', action="", plot=userdata, thumbnail=thumb, folder=False))
|
||||
|
||||
itemlist.append(Item(channel=item.channel, title='[COLOR yellow]Videoteca: [/COLOR]Series/Epis: ' +
|
||||
environment['videolab_series'] + '/' + environment[
|
||||
'videolab_episodios'] +
|
||||
' - Pelis: ' + environment['videolab_pelis'] + ' - Upd: ' +
|
||||
environment['videolab_update'] + ' - Path: ' +
|
||||
environment['videolab_path'] + ' - Free: ' + environment[
|
||||
'videolab_free'].replace('.', ',') +
|
||||
' GB', action="", plot=videoteca, thumbnail=thumb, folder=False))
|
||||
|
||||
if environment['torrent_list']:
|
||||
for x, cliente in enumerate(environment['torrent_list']):
|
||||
if x == 0:
|
||||
cliente_alt = cliente.copy()
|
||||
del cliente_alt['Torrent_opt']
|
||||
itemlist.append(Item(channel=item.channel, title='[COLOR yellow]Torrent: [/COLOR]Opt: %s, %s' \
|
||||
% (str(cliente['Torrent_opt']),
|
||||
str(cliente_alt).replace('{', '').replace('}', '') \
|
||||
.replace("'", '').replace('_', ' ')), action="",
|
||||
plot=torrent, thumbnail=thumb,
|
||||
folder=False))
|
||||
elif x == 1 and environment['torrent_error']:
|
||||
itemlist.append(Item(channel=item.channel,
|
||||
title='[COLOR magenta]- %s[/COLOR]' % str(cliente).replace('{', '').replace('}',
|
||||
'') \
|
||||
.replace("'", '').replace('_', ' '), action="", plot=torrent_error,
|
||||
thumbnail=thumb,
|
||||
folder=False))
|
||||
else:
|
||||
cliente_alt = cliente.copy()
|
||||
del cliente_alt['Plug_in']
|
||||
cliente_alt['Libre'] = cliente_alt['Libre'].replace('.', ',') + ' GB'
|
||||
itemlist.append(Item(channel=item.channel, title='[COLOR yellow]- %s: [/COLOR]: %s' %
|
||||
(str(cliente['Plug_in']),
|
||||
str(cliente_alt).replace('{', '').replace('}', '') \
|
||||
.replace("'", '').replace('\\\\', '\\')), action="",
|
||||
plot=torrent_cliente,
|
||||
thumbnail=thumb, folder=False))
|
||||
|
||||
itemlist.append(Item(channel=item.channel, title='[COLOR yellow]Proxy: [/COLOR]' +
|
||||
environment['proxy_active'], action="", plot=proxy,
|
||||
thumbnail=thumb,
|
||||
folder=False))
|
||||
|
||||
itemlist.append(Item(channel=item.channel, title='[COLOR yellow]TAMAÑO del LOG: [/COLOR]' +
|
||||
environment['log_size'].replace('.', ',') + ' MB', action="",
|
||||
plot=log, thumbnail=thumb,
|
||||
folder=False))
|
||||
|
||||
itemlist.append(Item(title="[COLOR hotpink][B]==> Reportar un fallo[/B][/COLOR]",
|
||||
channel="setting", action="report_menu", category='Configuración',
|
||||
unify=False, plot=reporte, thumbnail=get_thumb("error.png")))
|
||||
|
||||
return (itemlist, environment)
|
||||
@@ -1,5 +1,10 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from builtins import map
|
||||
#from builtins import str
|
||||
import sys
|
||||
PY3 = False
|
||||
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
|
||||
from threading import Timer
|
||||
|
||||
import xbmc
|
||||
@@ -113,7 +118,7 @@ class Main(xbmcgui.WindowXMLDialog):
|
||||
if config.get_platform(True)['num_version'] < 18:
|
||||
self.setCoordinateResolution(2)
|
||||
|
||||
for menuentry in MAIN_MENU.keys():
|
||||
for menuentry in list(MAIN_MENU.keys()):
|
||||
item = xbmcgui.ListItem(MAIN_MENU[menuentry]["label"])
|
||||
item.setProperty("thumb", str(MAIN_MENU[menuentry]["icon"]))
|
||||
item.setProperty("identifier", str(menuentry))
|
||||
|
||||
@@ -3,24 +3,33 @@
|
||||
# XBMC Launcher (xbmc / kodi)
|
||||
# ------------------------------------------------------------
|
||||
|
||||
#from future import standard_library
|
||||
#standard_library.install_aliases()
|
||||
#from builtins import str
|
||||
import sys
|
||||
PY3 = False
|
||||
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
|
||||
|
||||
if PY3:
|
||||
import urllib.error as urllib2 # Es muy lento en PY2. En PY3 es nativo
|
||||
else:
|
||||
import urllib2 # Usamos el nativo de PY2 que es más rápido
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
import urllib2
|
||||
import time
|
||||
|
||||
from core import channeltools
|
||||
from core import scrapertools
|
||||
from core import servertools
|
||||
from core import trakt_tools
|
||||
from core import videolibrarytools
|
||||
from core import trakt_tools
|
||||
from core.item import Item
|
||||
from platformcode import config, logger
|
||||
from platformcode import platformtools
|
||||
from platformcode.logger import WebErrorException
|
||||
|
||||
|
||||
|
||||
def start():
|
||||
""" Primera funcion que se ejecuta al entrar en el plugin.
|
||||
Dentro de esta funcion deberian ir todas las llamadas a las
|
||||
@@ -30,19 +39,15 @@ def start():
|
||||
#config.set_setting('show_once', True)
|
||||
# Test if all the required directories are created
|
||||
config.verify_directories_created()
|
||||
|
||||
# controlla se l'utente ha qualche problema di connessione
|
||||
# se lo ha: non lo fa entrare nell'addon
|
||||
# se ha problemi di DNS avvia ma lascia entrare
|
||||
# se tutto ok: entra nell'addon
|
||||
from specials import resolverdns
|
||||
|
||||
from specials.checkhost import test_conn
|
||||
import threading
|
||||
threading.Thread(target=test_conn, args=(True, not config.get_setting('resolver_dns'), True, [], [], True)).start()
|
||||
# check_adsl = test_conn(is_exit = True, check_dns = True, view_msg = True,
|
||||
# lst_urls = [], lst_site_check_dns = [], in_addon = True)
|
||||
|
||||
|
||||
|
||||
def run(item=None):
|
||||
logger.info()
|
||||
if not item:
|
||||
@@ -78,9 +83,6 @@ def run(item=None):
|
||||
else:
|
||||
item = Item(channel="channelselector", action="getmainlist", viewmode="movie")
|
||||
if not config.get_setting('show_once'):
|
||||
if not config.dev_mode():
|
||||
from platformcode import updater
|
||||
updater.calcCurrHash()
|
||||
from platformcode import xbmc_videolibrary
|
||||
xbmc_videolibrary.ask_set_content(1, config.get_setting('videolibrary_kodi_force'))
|
||||
config.set_setting('show_once', True)
|
||||
@@ -88,9 +90,12 @@ def run(item=None):
|
||||
logger.info(item.tostring())
|
||||
|
||||
try:
|
||||
if not config.get_setting('tmdb_active'):
|
||||
config.set_setting('tmdb_active', True)
|
||||
|
||||
# If item has no action, stops here
|
||||
if item.action == "":
|
||||
logger.info("Item sin accion")
|
||||
logger.info("Item without action")
|
||||
return
|
||||
|
||||
# Action for main menu in channelselector
|
||||
@@ -145,8 +150,12 @@ def run(item=None):
|
||||
if xbmc.getCondVisibility('system.platform.linux') and xbmc.getCondVisibility('system.platform.android'): # android
|
||||
xbmc.executebuiltin('StartAndroidActivity("", "android.intent.action.VIEW", "", "%s")' % (item.url))
|
||||
else:
|
||||
short = urllib2.urlopen(
|
||||
'https://u.nu/api.php?action=shorturl&format=simple&url=' + item.url).read()
|
||||
try:
|
||||
import urllib.request as urllib
|
||||
except ImportError:
|
||||
import urllib
|
||||
short = urllib.urlopen(
|
||||
'https://u.nu/api.php?action=shorturl&format=simple&url=' + item.url).read().decode('utf-8')
|
||||
platformtools.dialog_ok(config.get_localized_string(20000),
|
||||
config.get_localized_string(70740) % short)
|
||||
# Action in certain channel specified in "action" and "channel" parameters
|
||||
@@ -169,28 +178,28 @@ def run(item=None):
|
||||
|
||||
# Checks if channel exists
|
||||
if os.path.isfile(os.path.join(config.get_runtime_path(), 'channels', item.channel + ".py")):
|
||||
CHANNELS = 'channels'
|
||||
CHANNELS = 'channels'
|
||||
elif os.path.isfile(os.path.join(config.get_runtime_path(), 'channels', 'porn', item.channel + ".py")):
|
||||
CHANNELS = 'channels.porn'
|
||||
else:
|
||||
CHANNELS ='specials'
|
||||
CHANNELS = 'specials'
|
||||
|
||||
if CHANNELS != 'channels.porn':
|
||||
channel_file = os.path.join(config.get_runtime_path(), CHANNELS, item.channel + ".py")
|
||||
else:
|
||||
channel_file = os.path.join(config.get_runtime_path(), 'channels', 'porn', item.channel + ".py")
|
||||
channel_file = os.path.join(config.get_runtime_path(), 'channels', 'porn',
|
||||
item.channel + ".py")
|
||||
|
||||
logger.info("channel_file= " + channel_file + ' - ' + CHANNELS +' - ' + item.channel)
|
||||
logger.info("channel_file= " + channel_file + ' - ' + CHANNELS + ' - ' + item.channel)
|
||||
|
||||
channel = None
|
||||
|
||||
if os.path.exists(channel_file):
|
||||
try:
|
||||
channel = __import__(CHANNELS + item.channel, None, None, [CHANNELS + item.channel])
|
||||
channel = __import__('%s.%s' % (CHANNELS, item.channel), None,
|
||||
None, ['%s.%s' % (CHANNELS, item.channel)])
|
||||
except ImportError:
|
||||
importer = "import " + CHANNELS + "." + item.channel + " as channel "
|
||||
|
||||
exec(importer)
|
||||
exec("import " + CHANNELS + "." + item.channel + " as channel")
|
||||
|
||||
logger.info("Running channel %s | %s" % (channel.__name__, channel.__file__))
|
||||
|
||||
@@ -270,14 +279,22 @@ def run(item=None):
|
||||
# Special action for searching, first asks for the words then call the "search" function
|
||||
elif item.action == "search":
|
||||
logger.info("item.action=%s" % item.action.upper())
|
||||
if channeltools.get_channel_setting('last_search', 'search'):
|
||||
last_search = channeltools.get_channel_setting('Last_searched', 'search', '')
|
||||
else:
|
||||
last_search = ''
|
||||
|
||||
# last_search = ""
|
||||
# last_search_active = config.get_setting("last_search", "search")
|
||||
# if last_search_active:
|
||||
# try:
|
||||
# current_saved_searches_list = list(config.get_setting("saved_searches_list", "search"))
|
||||
# last_search = current_saved_searches_list[0]
|
||||
# except:
|
||||
# pass
|
||||
|
||||
last_search = channeltools.get_channel_setting('Last_searched', 'search', '')
|
||||
|
||||
tecleado = platformtools.dialog_input(last_search)
|
||||
|
||||
if tecleado is not None:
|
||||
channeltools.set_channel_setting('Last_searched', tecleado, 'search')
|
||||
|
||||
if 'search' in dir(channel):
|
||||
itemlist = channel.search(item, tecleado)
|
||||
else:
|
||||
@@ -308,26 +325,26 @@ def run(item=None):
|
||||
|
||||
platformtools.render_items(itemlist, item)
|
||||
|
||||
except urllib2.URLError, e:
|
||||
except urllib2.URLError as e:
|
||||
import traceback
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
# Grab inner and third party errors
|
||||
if hasattr(e, 'reason'):
|
||||
logger.error("Razon del error, codigo: %s | Razon: %s" % (str(e.reason[0]), str(e.reason[1])))
|
||||
logger.error("Reason for the error, code: %s | Reason: %s" % (str(e.reason[0]), str(e.reason[1])))
|
||||
texto = config.get_localized_string(30050) # "No se puede conectar con el sitio web"
|
||||
platformtools.dialog_ok(config.get_localized_string(20000), texto)
|
||||
|
||||
# Grab server response errors
|
||||
elif hasattr(e, 'code'):
|
||||
logger.error("Codigo de error HTTP : %d" % e.code)
|
||||
logger.error("HTTP error code: %d" % e.code)
|
||||
# "El sitio web no funciona correctamente (error http %d)"
|
||||
platformtools.dialog_ok(config.get_localized_string(20000), config.get_localized_string(30051) % e.code)
|
||||
except WebErrorException, e:
|
||||
except WebErrorException as e:
|
||||
import traceback
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
patron = 'File "' + os.path.join(config.get_runtime_path(), CHANNELS, "").replace("\\", "\\\\") + '([^.]+)\.py"'
|
||||
patron = 'File "' + os.path.join(config.get_runtime_path(), "channels", "").replace("\\", "\\\\") + '([^.]+)\.py"'
|
||||
canal = scrapertools.find_single_match(traceback.format_exc(), patron)
|
||||
|
||||
platformtools.dialog_ok(
|
||||
@@ -382,13 +399,19 @@ def reorder_itemlist(itemlist):
|
||||
[config.get_localized_string(60336), '[D]']]
|
||||
|
||||
for item in itemlist:
|
||||
old_title = unicode(item.title, "utf8").lower().encode("utf8")
|
||||
if not PY3:
|
||||
old_title = unicode(item.title, "utf8").lower().encode("utf8")
|
||||
else:
|
||||
old_title = item.title.lower()
|
||||
for before, after in to_change:
|
||||
if before in item.title:
|
||||
item.title = item.title.replace(before, after)
|
||||
break
|
||||
|
||||
new_title = unicode(item.title, "utf8").lower().encode("utf8")
|
||||
if not PY3:
|
||||
new_title = unicode(item.title, "utf8").lower().encode("utf8")
|
||||
else:
|
||||
new_title = item.title.lower()
|
||||
if old_title != new_title:
|
||||
mod_list.append(item)
|
||||
modified += 1
|
||||
@@ -401,7 +424,7 @@ def reorder_itemlist(itemlist):
|
||||
new_list.extend(mod_list)
|
||||
new_list.extend(not_mod_list)
|
||||
|
||||
logger.info("Titulos modificados:%i | No modificados:%i" % (modified, not_modified))
|
||||
logger.info("Modified Titles:%i |Unmodified:%i" % (modified, not_modified))
|
||||
|
||||
if len(new_list) == 0:
|
||||
new_list = itemlist
|
||||
|
||||
@@ -6,9 +6,12 @@
|
||||
import inspect
|
||||
|
||||
import xbmc
|
||||
|
||||
from platformcode import config
|
||||
|
||||
import sys
|
||||
PY3 = False
|
||||
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
|
||||
|
||||
loggeractive = (config.get_setting("debug") == True)
|
||||
|
||||
|
||||
@@ -18,13 +21,19 @@ def log_enable(active):
|
||||
|
||||
|
||||
def encode_log(message=""):
|
||||
|
||||
# Unicode to utf8
|
||||
if type(message) == unicode:
|
||||
if isinstance(message, unicode):
|
||||
message = message.encode("utf8")
|
||||
if PY3: message = message.decode("utf8")
|
||||
|
||||
# All encodings to utf8
|
||||
elif type(message) == str:
|
||||
elif not PY3 and isinstance(message, str):
|
||||
message = unicode(message, "utf8", errors="replace").encode("utf8")
|
||||
|
||||
# Bytes encodings to utf8
|
||||
elif PY3 and isinstance(message, bytes):
|
||||
message = message.decode("utf8")
|
||||
|
||||
# Objects to string
|
||||
else:
|
||||
@@ -34,6 +43,17 @@ def encode_log(message=""):
|
||||
|
||||
|
||||
def get_caller(message=None):
|
||||
|
||||
if message and isinstance(message, unicode):
|
||||
message = message.encode("utf8")
|
||||
if PY3: message = message.decode("utf8")
|
||||
elif message and PY3 and isinstance(message, bytes):
|
||||
message = message.decode("utf8")
|
||||
elif message and not PY3:
|
||||
message = unicode(message, "utf8", errors="replace").encode("utf8")
|
||||
elif message:
|
||||
message = str(message)
|
||||
|
||||
module = inspect.getmodule(inspect.currentframe().f_back.f_back)
|
||||
|
||||
if module == None:
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,12 +1,13 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from builtins import range
|
||||
import xbmcgui
|
||||
|
||||
from core import httptools
|
||||
from core import scrapertools
|
||||
from platformcode import config
|
||||
from platformcode import platformtools
|
||||
|
||||
lang = 'it'
|
||||
|
||||
class Recaptcha(xbmcgui.WindowXMLDialog):
|
||||
def Start(self, key, referer):
|
||||
@@ -14,9 +15,9 @@ class Recaptcha(xbmcgui.WindowXMLDialog):
|
||||
self.key = key
|
||||
self.headers = {'Referer': self.referer}
|
||||
|
||||
api_js = httptools.downloadpage("http://www.google.com/recaptcha/api.js?hl=es").data
|
||||
version = scrapertools.find_single_match(api_js, 'po.src = \'(.*?)\';').split("/")[5]
|
||||
self.url = "http://www.google.com/recaptcha/api/fallback?k=%s&hl=es&v=%s&t=2&ff=true" % (self.key, version)
|
||||
api_js = httptools.downloadpage("https://www.google.com/recaptcha/api.js?hl=" + lang).data
|
||||
version = scrapertools.find_single_match(api_js, 'po.src\s*=\s*\'(.*?)\';').split("/")[5]
|
||||
self.url = "https://www.google.com/recaptcha/api/fallback?k=" + self.key + "&hl=" + lang + "&v=" + version + "&t=2&ff=true"
|
||||
self.doModal()
|
||||
# Reload
|
||||
if self.result == {}:
|
||||
@@ -27,10 +28,10 @@ class Recaptcha(xbmcgui.WindowXMLDialog):
|
||||
def update_window(self):
|
||||
data = httptools.downloadpage(self.url, headers=self.headers).data
|
||||
self.message = scrapertools.find_single_match(data,
|
||||
'<div class="rc-imageselect-desc-no-canonical">(.*?)(?:</label>|</div>)').replace(
|
||||
'<div class="rc-imageselect-desc[a-z-]*">(.*?)(?:</label>|</div>)').replace(
|
||||
"<strong>", "[B]").replace("</strong>", "[/B]")
|
||||
self.token = scrapertools.find_single_match(data, 'name="c" value="([^"]+)"')
|
||||
self.image = "http://www.google.com/recaptcha/api2/payload?k=%s&c=%s" % (self.key, self.token)
|
||||
self.image = "https://www.google.com/recaptcha/api2/payload?k=%s&c=%s" % (self.key, self.token)
|
||||
self.result = {}
|
||||
self.getControl(10020).setImage(self.image)
|
||||
self.getControl(10000).setText(self.message)
|
||||
@@ -56,16 +57,18 @@ class Recaptcha(xbmcgui.WindowXMLDialog):
|
||||
self.close()
|
||||
|
||||
elif control == 10002:
|
||||
self.result = [int(k) for k in range(9) if self.result.get(k, False) == True]
|
||||
post = "c=%s" % self.token
|
||||
self.result = [int(k) for k in range(9) if self.result.get(k, False)]
|
||||
post = {
|
||||
"c": self.token,
|
||||
"response": self.result
|
||||
}
|
||||
|
||||
for r in self.result:
|
||||
post += "&response=%s" % r
|
||||
|
||||
data = httptools.downloadpage(self.url, post, headers=self.headers).data
|
||||
data = httptools.downloadpage(self.url, post=post, headers=self.headers).data
|
||||
from platformcode import logger
|
||||
logger.info(data)
|
||||
self.result = scrapertools.find_single_match(data, '<div class="fbc-verification-token">.*?>([^<]+)<')
|
||||
if self.result:
|
||||
platformtools.dialog_notification("Captcha Correcto", "La verificación ha concluido")
|
||||
platformtools.dialog_notification("Captcha corretto", "Verifica conclusa")
|
||||
self.close()
|
||||
else:
|
||||
self.result = {}
|
||||
|
||||
@@ -1,17 +1,34 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import print_function
|
||||
#from builtins import str
|
||||
import sys
|
||||
PY3 = False
|
||||
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
|
||||
|
||||
if PY3:
|
||||
#from future import standard_library
|
||||
#standard_library.install_aliases()
|
||||
import urllib.parse as urllib # Es muy lento en PY2. En PY3 es nativo
|
||||
else:
|
||||
import urllib # Usamos el nativo de PY2 que es más rápido
|
||||
|
||||
import os
|
||||
import re
|
||||
import string
|
||||
import urllib
|
||||
|
||||
from unicodedata import normalize
|
||||
from core import filetools
|
||||
from core import httptools
|
||||
from core import jsontools
|
||||
from core import scrapertools
|
||||
|
||||
import xbmc
|
||||
import xbmcgui
|
||||
|
||||
from platformcode import config, logger
|
||||
|
||||
allchars = string.maketrans('', '')
|
||||
if PY3: allchars = str.maketrans('', '')
|
||||
if not PY3: allchars = string.maketrans('', '')
|
||||
deletechars = ',\\/:*"<>|?'
|
||||
|
||||
|
||||
@@ -38,14 +55,14 @@ def regex_tvshow(compare, file, sub=""):
|
||||
for regex in regex_expressions:
|
||||
response_file = re.findall(regex, file)
|
||||
if len(response_file) > 0:
|
||||
print "Regex File Se: %s, Ep: %s," % (str(response_file[0][0]), str(response_file[0][1]),)
|
||||
print("Regex File Se: %s, Ep: %s," % (str(response_file[0][0]), str(response_file[0][1]),))
|
||||
tvshow = 1
|
||||
if not compare:
|
||||
title = re.split(regex, file)[0]
|
||||
for char in ['[', ']', '_', '(', ')', '.', '-']:
|
||||
title = title.replace(char, ' ')
|
||||
if title.endswith(" "): title = title.strip()
|
||||
print "title: %s" % title
|
||||
print("title: %s" % title)
|
||||
return title, response_file[0][0], response_file[0][1]
|
||||
else:
|
||||
break
|
||||
@@ -74,7 +91,7 @@ def set_Subtitle():
|
||||
logger.info()
|
||||
|
||||
exts = [".srt", ".sub", ".txt", ".smi", ".ssa", ".ass"]
|
||||
subtitle_folder_path = os.path.join(config.get_data_path(), "subtitles")
|
||||
subtitle_folder_path = filetools.join(config.get_data_path(), "subtitles")
|
||||
|
||||
subtitle_type = config.get_setting("subtitle_type")
|
||||
|
||||
@@ -90,9 +107,9 @@ def set_Subtitle():
|
||||
config.set_setting("subtitlepath_folder", subtitle_path)
|
||||
else:
|
||||
subtitle_path = config.get_setting("subtitlepath_keyboard")
|
||||
long = len(subtitle_path)
|
||||
if long > 0:
|
||||
if subtitle_path.startswith("http") or subtitle_path[long - 4, long] in exts:
|
||||
long_v = len(subtitle_path)
|
||||
if long_v > 0:
|
||||
if subtitle_path.startswith("http") or subtitle_path[long_v - 4, long] in exts:
|
||||
logger.info("Con subtitulo : " + subtitle_path)
|
||||
xbmc.Player().setSubtitles(subtitle_path)
|
||||
return
|
||||
@@ -106,13 +123,13 @@ def set_Subtitle():
|
||||
tvshow_title, season, episode = regex_tvshow(False, subtitle_name)
|
||||
try:
|
||||
if episode != "":
|
||||
Subnames = glob.glob(os.path.join(subtitle_path, "Tvshows", tvshow_title,
|
||||
Subnames = glob.glob(filetools.join(subtitle_path, "Tvshows", tvshow_title,
|
||||
"%s %sx%s" % (tvshow_title, season, episode) + "*.??.???"))
|
||||
else:
|
||||
Subnames = glob.glob(os.path.join(subtitle_path, "Movies", subtitle_name + "*.??.???"))
|
||||
Subnames = glob.glob(filetools.join(subtitle_path, "Movies", subtitle_name + "*.??.???"))
|
||||
for Subname in Subnames:
|
||||
if os.path.splitext(Subname)[1] in exts:
|
||||
logger.info("Con subtitulo : " + os.path.split(Subname)[1])
|
||||
logger.info("Con subtitulo : " + filetools.split(Subname)[1])
|
||||
xbmc.Player().setSubtitles((Subname))
|
||||
except:
|
||||
logger.error("error al cargar subtitulos")
|
||||
@@ -147,13 +164,13 @@ def searchSubtitle(item):
|
||||
if config.get_setting("subtitle_type") == 0:
|
||||
subtitlepath = config.get_setting("subtitlepath_folder")
|
||||
if subtitlepath == "":
|
||||
subtitlepath = os.path.join(config.get_data_path(), "subtitles")
|
||||
subtitlepath = filetools.join(config.get_data_path(), "subtitles")
|
||||
config.set_setting("subtitlepath_folder", subtitlepath)
|
||||
|
||||
elif config.get_setting("subtitle_type") == 1:
|
||||
subtitlepath = config.get_setting("subtitlepath_keyboard")
|
||||
if subtitlepath == "":
|
||||
subtitlepath = os.path.join(config.get_data_path(), "subtitles")
|
||||
subtitlepath = filetools.join(config.get_data_path(), "subtitles")
|
||||
config.set_setting("subtitlepathkeyboard", subtitlepath)
|
||||
elif subtitlepath.startswith("http"):
|
||||
subtitlepath = config.get_setting("subtitlepath_folder")
|
||||
@@ -161,27 +178,27 @@ def searchSubtitle(item):
|
||||
else:
|
||||
subtitlepath = config.get_setting("subtitlepath_folder")
|
||||
if subtitlepath == "":
|
||||
subtitlepath = os.path.join(config.get_data_path(), "subtitles")
|
||||
subtitlepath = filetools.join(config.get_data_path(), "subtitles")
|
||||
config.set_setting("subtitlepath_folder", subtitlepath)
|
||||
if not os.path.exists(subtitlepath):
|
||||
if not filetools.exists(subtitlepath):
|
||||
try:
|
||||
os.mkdir(subtitlepath)
|
||||
filetools.mkdir(subtitlepath)
|
||||
except:
|
||||
logger.error("error no se pudo crear path subtitulos")
|
||||
return
|
||||
|
||||
path_movie_subt = xbmc.translatePath(os.path.join(subtitlepath, "Movies"))
|
||||
if not os.path.exists(path_movie_subt):
|
||||
path_movie_subt = xbmc.translatePath(filetools.join(subtitlepath, "Movies"))
|
||||
if not filetools.exists(path_movie_subt):
|
||||
try:
|
||||
os.mkdir(path_movie_subt)
|
||||
filetools.mkdir(path_movie_subt)
|
||||
except:
|
||||
logger.error("error no se pudo crear el path Movies")
|
||||
return
|
||||
full_path_tvshow = ""
|
||||
path_tvshow_subt = xbmc.translatePath(os.path.join(subtitlepath, "Tvshows"))
|
||||
if not os.path.exists(path_tvshow_subt):
|
||||
path_tvshow_subt = xbmc.translatePath(filetools.join(subtitlepath, "Tvshows"))
|
||||
if not filetools.exists(path_tvshow_subt):
|
||||
try:
|
||||
os.mkdir(path_tvshow_subt)
|
||||
filetools.mkdir(path_tvshow_subt)
|
||||
except:
|
||||
logger.error("error no pudo crear el path Tvshows")
|
||||
return
|
||||
@@ -189,20 +206,20 @@ def searchSubtitle(item):
|
||||
title_new = title = urllib.unquote_plus(item.title)
|
||||
else:
|
||||
title_new = title = urllib.unquote_plus(item.show + " - " + item.title)
|
||||
path_video_temp = xbmc.translatePath(os.path.join(config.get_runtime_path(), "resources", "subtitle.mp4"))
|
||||
if not os.path.exists(path_video_temp):
|
||||
path_video_temp = xbmc.translatePath(filetools.join(config.get_runtime_path(), "resources", "subtitle.mp4"))
|
||||
if not filetools.exists(path_video_temp):
|
||||
logger.error("error : no existe el video temporal de subtitulos")
|
||||
return
|
||||
# path_video_temp = xbmc.translatePath(os.path.join( ,video_temp + ".mp4" ))
|
||||
# path_video_temp = xbmc.translatePath(filetools.join( ,video_temp + ".mp4" ))
|
||||
|
||||
title_new = _normalize(title_new)
|
||||
tvshow_title, season, episode = regex_tvshow(False, title_new)
|
||||
if episode != "":
|
||||
full_path_tvshow = xbmc.translatePath(os.path.join(path_tvshow_subt, tvshow_title))
|
||||
if not os.path.exists(full_path_tvshow):
|
||||
os.mkdir(full_path_tvshow) # title_new + ".mp4"
|
||||
full_path_tvshow = xbmc.translatePath(filetools.join(path_tvshow_subt, tvshow_title))
|
||||
if not filetools.exists(full_path_tvshow):
|
||||
filetools.mkdir(full_path_tvshow) # title_new + ".mp4"
|
||||
full_path_video_new = xbmc.translatePath(
|
||||
os.path.join(full_path_tvshow, "%s %sx%s.mp4" % (tvshow_title, season, episode)))
|
||||
filetools.join(full_path_tvshow, "%s %sx%s.mp4" % (tvshow_title, season, episode)))
|
||||
logger.info(full_path_video_new)
|
||||
listitem = xbmcgui.ListItem(title_new, iconImage="DefaultVideo.png", thumbnailImage="")
|
||||
listitem.setInfo("video",
|
||||
@@ -210,14 +227,14 @@ def searchSubtitle(item):
|
||||
"tvshowtitle": tvshow_title})
|
||||
|
||||
else:
|
||||
full_path_video_new = xbmc.translatePath(os.path.join(path_movie_subt, title_new + ".mp4"))
|
||||
full_path_video_new = xbmc.translatePath(filetools.join(path_movie_subt, title_new + ".mp4"))
|
||||
listitem = xbmcgui.ListItem(title, iconImage="DefaultVideo.png", thumbnailImage="")
|
||||
listitem.setInfo("video", {"Title": title_new, "Genre": "Movies"})
|
||||
|
||||
import shutil, time
|
||||
import time
|
||||
|
||||
try:
|
||||
shutil.copy(path_video_temp, full_path_video_new)
|
||||
filetools.copy(path_video_temp, full_path_video_new)
|
||||
copy = True
|
||||
logger.info("nuevo path =" + full_path_video_new)
|
||||
time.sleep(2)
|
||||
@@ -242,10 +259,10 @@ def searchSubtitle(item):
|
||||
continue
|
||||
|
||||
time.sleep(1)
|
||||
os.remove(full_path_video_new)
|
||||
filetools.remove(full_path_video_new)
|
||||
try:
|
||||
if full_path_tvshow != "":
|
||||
os.rmdir(full_path_tvshow)
|
||||
filetools.rmdir(full_path_tvshow)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
@@ -267,3 +284,70 @@ def saveSubtitleName(item):
|
||||
else:
|
||||
config.set_setting("subtitle_name", title)
|
||||
return
|
||||
|
||||
|
||||
def get_from_subdivx(sub_url):
|
||||
|
||||
"""
|
||||
:param sub_url: Url de descarga del subtitulo alojado en suvdivx.com
|
||||
Por Ejemplo: http://www.subdivx.com/bajar.php?id=573942&u=8
|
||||
|
||||
:return: La ruta al subtitulo descomprimido
|
||||
"""
|
||||
|
||||
logger.info()
|
||||
|
||||
sub = ''
|
||||
sub_dir = os.path.join(config.get_data_path(), 'temp_subs')
|
||||
|
||||
if os.path.exists(sub_dir):
|
||||
for sub_file in os.listdir(sub_dir):
|
||||
old_sub = os.path.join(sub_dir, sub_file)
|
||||
os.remove(old_sub)
|
||||
else:
|
||||
os.mkdir(sub_dir)
|
||||
|
||||
sub_url = sub_url.replace("&", "&")
|
||||
sub_data = httptools.downloadpage(sub_url, follow_redirects=False)
|
||||
if 'x-frame-options' not in sub_data.headers:
|
||||
sub_url = '%s' % sub_data.headers['location']
|
||||
ext = sub_url[-4::]
|
||||
file_id = "subtitle%s" % ext
|
||||
filename = os.path.join(sub_dir, file_id)
|
||||
try:
|
||||
data_dl = httptools.downloadpage(sub_url).data
|
||||
filetools.write(filename, data_dl)
|
||||
sub = extract_file_online(sub_dir, filename)
|
||||
except:
|
||||
logger.info('sub no valido')
|
||||
else:
|
||||
logger.info('sub no valido')
|
||||
return sub
|
||||
|
||||
|
||||
def extract_file_online(path, filename):
|
||||
|
||||
"""
|
||||
:param path: Ruta donde se encuentra el archivo comprimido
|
||||
|
||||
:param filename: Nombre del archivo comprimido
|
||||
|
||||
:return: Devuelve la ruta al subtitulo descomprimido
|
||||
"""
|
||||
|
||||
logger.info()
|
||||
|
||||
url = "http://online.b1.org/rest/online/upload"
|
||||
|
||||
data = httptools.downloadpage(url, file=filename).data
|
||||
|
||||
result = jsontools.load(scrapertools.find_single_match(data, "result.listing = ([^;]+);"))
|
||||
compressed = result["name"]
|
||||
extracted = result["children"][0]["name"]
|
||||
|
||||
dl_url = "http://online.b1.org/rest/online/download/%s/%s" % (compressed, extracted)
|
||||
extracted_path = os.path.join(path, extracted)
|
||||
data_dl = httptools.downloadpage(dl_url).data
|
||||
filetools.write(extracted_path, data_dl)
|
||||
|
||||
return extracted_path
|
||||
|
||||
@@ -6,189 +6,167 @@
|
||||
# datos obtenidos de las paginas
|
||||
# ----------------------------------------------------------
|
||||
|
||||
import re
|
||||
# from builtins import str
|
||||
import sys
|
||||
|
||||
PY3 = False
|
||||
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
|
||||
|
||||
import os
|
||||
import unicodedata
|
||||
import re
|
||||
|
||||
import config
|
||||
|
||||
from platformcode import config
|
||||
from core.item import Item
|
||||
from core import scrapertools
|
||||
from platformcode import logger
|
||||
|
||||
thumb_dict = {
|
||||
"numbers": "http://icons.iconarchive.com/icons/custom-icon-design/pretty-office-10/256/Numbers-icon.png",
|
||||
"a": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-A-black-icon.png",
|
||||
"accion": "https://s14.postimg.cc/sqy3q2aht/action.png",
|
||||
"actors": "https://i.postimg.cc/tC2HMhVV/actors.png",
|
||||
"adolescente" : "https://s10.postimg.cc/inq7u4p61/teens.png",
|
||||
"adultos": "https://s10.postimg.cc/s8raxc51l/adultos.png",
|
||||
"adults": "https://s10.postimg.cc/s8raxc51l/adultos.png",
|
||||
"alcinema": "http://icons.iconarchive.com/icons/chromatix/aerial/256/movie-icon.png", #"http://icons.iconarchive.com/icons/itzikgur/my-seven/256/Movies-Films-icon.png",
|
||||
"all": "https://s10.postimg.cc/h1igpgw0p/todas.png",
|
||||
"alphabet": "https://s10.postimg.cc/4dy3ytmgp/a-z.png",
|
||||
"animacion": "https://s14.postimg.cc/vl193mupd/animation.png",
|
||||
"anime" : "https://s10.postimg.cc/n9mc2ikzt/anime.png",
|
||||
"artes marciales" : "https://s10.postimg.cc/4u1v51tzt/martial_arts.png",
|
||||
"asiaticas" : "https://i.postimg.cc/Xq0HXD5d/asiaticas.png",
|
||||
"audio": "https://s10.postimg.cc/b34nern7d/audio.png",
|
||||
"aventura": "http://icons.iconarchive.com/icons/sirubico/movie-genre/256/Adventure-2-icon.png",#"https://s14.postimg.cc/ky7fy5he9/adventure.png",
|
||||
"b": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-B-black-icon.png",
|
||||
"belico": "https://s14.postimg.cc/5e027lru9/war.png",
|
||||
"biografia" : "https://s10.postimg.cc/jq0ecjxnt/biographic.png",
|
||||
"c": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-C-black-icon.png",
|
||||
"carreras": "https://s14.postimg.cc/yt5qgdr69/races.png",
|
||||
"cast": "https://i.postimg.cc/qvfP5Xvt/cast.png",
|
||||
"categories": "https://s10.postimg.cc/v0ako5lmh/categorias.png",
|
||||
"ciencia ficcion": "https://s14.postimg.cc/8kulr2jy9/scifi.png",
|
||||
"cine negro" : "https://s10.postimg.cc/6ym862qgp/noir.png",
|
||||
"colections": "https://s10.postimg.cc/ywnwjvytl/colecciones.png",
|
||||
"comedia": "https://s14.postimg.cc/9ym8moog1/comedy.png",
|
||||
"cortometraje" : "https://s10.postimg.cc/qggvlxndl/shortfilm.png",
|
||||
"country": "https://s10.postimg.cc/yz0h81j15/pais.png",
|
||||
"crimen": "https://s14.postimg.cc/duzkipjq9/crime.png",
|
||||
"d": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-D-black-icon.png",
|
||||
"de la tv": "https://s10.postimg.cc/94gj0iwh5/image.png",
|
||||
"deporte": "https://s14.postimg.cc/x1crlnnap/sports.png",
|
||||
"destacadas": "https://s10.postimg.cc/yu40x8q2x/destacadas.png",
|
||||
"documental": "https://s10.postimg.cc/68aygmmcp/documentales.png",
|
||||
"documentaries": "https://s10.postimg.cc/68aygmmcp/documentales.png",
|
||||
"doramas":"https://s10.postimg.cc/h4dyr4nfd/doramas.png",
|
||||
"drama": "https://s14.postimg.cc/fzjxjtnxt/drama.png",
|
||||
"e": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-E-black-icon.png",
|
||||
"erotica" : "https://s10.postimg.cc/dcbb9bfx5/erotic.png",
|
||||
"espanolas" : "https://s10.postimg.cc/x1y6zikx5/spanish.png",
|
||||
"estrenos" : "https://s10.postimg.cc/sk8r9xdq1/estrenos.png",
|
||||
"extranjera": "https://s10.postimg.cc/f44a4eerd/foreign.png",
|
||||
"f": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-F-black-icon.png",
|
||||
"familiar": "https://s14.postimg.cc/jj5v9ndsx/family.png",
|
||||
"fantasia": "https://s14.postimg.cc/p7c60ksg1/fantasy.png",
|
||||
"fantastico" : "https://s10.postimg.cc/tedufx5eh/fantastic.png",
|
||||
"favorites": "https://s10.postimg.cc/rtg147gih/favoritas.png",
|
||||
"g": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-G-black-icon.png",
|
||||
"genres": "https://s10.postimg.cc/6c4rx3x1l/generos.png",
|
||||
"h": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-H-black-icon.png",
|
||||
"historica": "https://s10.postimg.cc/p1faxj6yh/historic.png",
|
||||
"horror" : "https://s10.postimg.cc/8exqo6yih/horror2.png",
|
||||
"hot": "https://s10.postimg.cc/yu40x8q2x/destacadas.png",
|
||||
"i": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-I-black-icon.png",
|
||||
"infantil": "https://s14.postimg.cc/4zyq842mp/childish.png",
|
||||
"intriga": "https://s14.postimg.cc/5qrgdimw1/intrigue.png",
|
||||
"j": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-J-black-icon.png",
|
||||
"k": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-K-black-icon.png",
|
||||
"l": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-L-black-icon.png",
|
||||
"language": "https://s10.postimg.cc/6wci189ft/idioma.png",
|
||||
"last": "https://s10.postimg.cc/i6ciuk0eh/ultimas.png",
|
||||
"lat": "https://i.postimg.cc/Gt8fMH0J/lat.png",
|
||||
"latino" : "https://s10.postimg.cc/swip0b86h/latin.png",
|
||||
"m": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-M-black-icon.png",
|
||||
"mexicanas" : "https://s10.postimg.cc/swip0b86h/latin.png",
|
||||
"misterio": "https://s14.postimg.cc/3m73cg8ep/mistery.png",
|
||||
"more voted": "https://s10.postimg.cc/lwns2d015/masvotadas.png",
|
||||
"more watched": "https://s10.postimg.cc/c6orr5neh/masvistas.png",
|
||||
"movies": "https://s10.postimg.cc/fxtqzdog9/peliculas.png",
|
||||
"musical": "https://s10.postimg.cc/hy7fhtecp/musical.png",
|
||||
"n": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-N-black-icon.png",
|
||||
"new episodes": "https://s10.postimg.cc/fu4iwpnqh/nuevoscapitulos.png",
|
||||
"newest": "http://icons.iconarchive.com/icons/laurent-baumann/creme/128/Location-News-icon.png", #"http://icons.iconarchive.com/icons/uiconstock/ios8-setting/128/news-icon.png",
|
||||
"nextpage": "http://icons.iconarchive.com/icons/custom-icon-design/pretty-office-5/256/navigate-right-icon.png", #"http://icons.iconarchive.com/icons/custom-icon-design/office/256/forward-icon.png", #"http://icons.iconarchive.com/icons/ahmadhania/spherical/128/forward-icon.png",
|
||||
"o": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-O-black-icon.png",
|
||||
"others": "http://icons.iconarchive.com/icons/limav/movie-genres-folder/128/Others-icon.png",
|
||||
"p": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-P-black-icon.png",
|
||||
"peleas" : "https://s10.postimg.cc/7a3ojbjwp/Fight.png",
|
||||
"policial" : "https://s10.postimg.cc/wsw0wbgbd/cops.png",
|
||||
"premieres": "https://s10.postimg.cc/sk8r9xdq1/estrenos.png",
|
||||
"q": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-Q-black-icon.png",
|
||||
"quality": "https://s10.postimg.cc/9bbojsbjd/calidad.png",
|
||||
"r": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-R-black-icon.png",
|
||||
"recents": "https://s10.postimg.cc/649u24kp5/recents.png",
|
||||
"recomendadas": "https://s10.postimg.cc/7xk1oqccp/recomendadas.png",
|
||||
"recomended": "https://s10.postimg.cc/7xk1oqccp/recomendadas.png",
|
||||
"religion" : "https://s10.postimg.cc/44j2skquh/religion.png",
|
||||
"romance" : "https://s10.postimg.cc/yn8vdll6x/romance.png",
|
||||
"romantica": "https://s14.postimg.cc/8xlzx7cht/romantic.png",
|
||||
"s": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-S-black-icon.png",
|
||||
"search": "http://icons.iconarchive.com/icons/jamespeng/movie/256/database-icon.png",
|
||||
"suspenso": "https://s10.postimg.cc/7peybxdfd/suspense.png",
|
||||
"t": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-T-black-icon.png",
|
||||
"telenovelas": "https://i.postimg.cc/QCXZkyDM/telenovelas.png",
|
||||
"terror": "https://s14.postimg.cc/thqtvl52p/horror.png",
|
||||
"thriller": "https://s14.postimg.cc/uwsekl8td/thriller.png",
|
||||
"tvshows": "https://s10.postimg.cc/kxvslawe1/series.png",
|
||||
"u": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-U-black-icon.png",
|
||||
"ultimiarrivi" : "http://icons.iconarchive.com/icons/saki/snowish/128/Extras-internet-download-icon.png",
|
||||
"updated" : "https://s10.postimg.cc/46m3h6h9l/updated.png",
|
||||
"v": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-V-black-icon.png",
|
||||
"vose": "https://i.postimg.cc/kgmnbd8h/vose.png",
|
||||
"w": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-W-black-icon.png",
|
||||
"western": "https://s10.postimg.cc/5wc1nokjt/western.png",
|
||||
"x": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-X-black-icon.png",
|
||||
"y": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-Y-black-icon.png",
|
||||
"year": "https://s10.postimg.cc/atzrqg921/a_o.png",
|
||||
"z": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-Z-black-icon.png"
|
||||
}
|
||||
thumb_dict = {"movies": "https://s10.postimg.cc/fxtqzdog9/peliculas.png",
|
||||
"tvshows": "https://s10.postimg.cc/kxvslawe1/series.png",
|
||||
"on air": "https://i.postimg.cc/HLLJWMcr/en-emision.png",
|
||||
"all": "https://s10.postimg.cc/h1igpgw0p/todas.png",
|
||||
"genres": "https://s10.postimg.cc/6c4rx3x1l/generos.png",
|
||||
"search": "https://s10.postimg.cc/v985e2izd/buscar.png",
|
||||
"quality": "https://s10.postimg.cc/9bbojsbjd/calidad.png",
|
||||
"audio": "https://s10.postimg.cc/b34nern7d/audio.png",
|
||||
"newest": "https://s10.postimg.cc/g1s5tf1bt/novedades.png",
|
||||
"last": "https://s10.postimg.cc/i6ciuk0eh/ultimas.png",
|
||||
"hot": "https://s10.postimg.cc/yu40x8q2x/destacadas.png",
|
||||
"year": "https://s10.postimg.cc/atzrqg921/a_o.png",
|
||||
"alphabet": "https://s10.postimg.cc/4dy3ytmgp/a-z.png",
|
||||
"recomended": "https://s10.postimg.cc/7xk1oqccp/recomendadas.png",
|
||||
"more watched": "https://s10.postimg.cc/c6orr5neh/masvistas.png",
|
||||
"more voted": "https://s10.postimg.cc/lwns2d015/masvotadas.png",
|
||||
"favorites": "https://s10.postimg.cc/rtg147gih/favoritas.png",
|
||||
"colections": "https://s10.postimg.cc/ywnwjvytl/colecciones.png",
|
||||
"categories": "https://s10.postimg.cc/v0ako5lmh/categorias.png",
|
||||
"premieres": "https://s10.postimg.cc/sk8r9xdq1/estrenos.png",
|
||||
"documentaries": "https://s10.postimg.cc/68aygmmcp/documentales.png",
|
||||
"language": "https://s10.postimg.cc/6wci189ft/idioma.png",
|
||||
"new episodes": "https://s10.postimg.cc/fu4iwpnqh/nuevoscapitulos.png",
|
||||
"country": "https://s10.postimg.cc/yz0h81j15/pais.png",
|
||||
"adults": "https://s10.postimg.cc/s8raxc51l/adultos.png",
|
||||
"recents": "https://s10.postimg.cc/649u24kp5/recents.png",
|
||||
"updated": "https://s10.postimg.cc/46m3h6h9l/updated.png",
|
||||
"actors": "https://i.postimg.cc/tC2HMhVV/actors.png",
|
||||
"cast": "https://i.postimg.cc/qvfP5Xvt/cast.png",
|
||||
"lat": "https://i.postimg.cc/Gt8fMH0J/lat.png",
|
||||
"vose": "https://i.postimg.cc/kgmnbd8h/vose.png",
|
||||
"accion": "https://s14.postimg.cc/sqy3q2aht/action.png",
|
||||
"adolescente": "https://s10.postimg.cc/inq7u4p61/teens.png",
|
||||
"adultos": "https://s10.postimg.cc/s8raxc51l/adultos.png",
|
||||
"animacion": "https://s14.postimg.cc/vl193mupd/animation.png",
|
||||
"anime": "https://s10.postimg.cc/n9mc2ikzt/anime.png",
|
||||
"artes marciales": "https://s10.postimg.cc/4u1v51tzt/martial_arts.png",
|
||||
"asiaticas": "https://i.postimg.cc/Xq0HXD5d/asiaticas.png",
|
||||
"aventura": "https://s14.postimg.cc/ky7fy5he9/adventure.png",
|
||||
"belico": "https://s14.postimg.cc/5e027lru9/war.png",
|
||||
"biografia": "https://s10.postimg.cc/jq0ecjxnt/biographic.png",
|
||||
"carreras": "https://s14.postimg.cc/yt5qgdr69/races.png",
|
||||
"ciencia ficcion": "https://s14.postimg.cc/8kulr2jy9/scifi.png",
|
||||
"cine negro": "https://s10.postimg.cc/6ym862qgp/noir.png",
|
||||
"comedia": "https://s14.postimg.cc/9ym8moog1/comedy.png",
|
||||
"cortometraje": "https://s10.postimg.cc/qggvlxndl/shortfilm.png",
|
||||
"crimen": "https://s14.postimg.cc/duzkipjq9/crime.png",
|
||||
"de la tv": "https://s10.postimg.cc/94gj0iwh5/image.png",
|
||||
"deporte": "https://s14.postimg.cc/x1crlnnap/sports.png",
|
||||
"destacadas": "https://s10.postimg.cc/yu40x8q2x/destacadas.png",
|
||||
"documental": "https://s10.postimg.cc/68aygmmcp/documentales.png",
|
||||
"doramas": "https://s10.postimg.cc/h4dyr4nfd/doramas.png",
|
||||
"drama": "https://s14.postimg.cc/fzjxjtnxt/drama.png",
|
||||
"erotica": "https://s10.postimg.cc/dcbb9bfx5/erotic.png",
|
||||
"espanolas": "https://s10.postimg.cc/x1y6zikx5/spanish.png",
|
||||
"estrenos": "https://s10.postimg.cc/sk8r9xdq1/estrenos.png",
|
||||
"extranjera": "https://s10.postimg.cc/f44a4eerd/foreign.png",
|
||||
"familiar": "https://s14.postimg.cc/jj5v9ndsx/family.png",
|
||||
"fantasia": "https://s14.postimg.cc/p7c60ksg1/fantasy.png",
|
||||
"fantastico": "https://s10.postimg.cc/tedufx5eh/fantastic.png",
|
||||
"historica": "https://s10.postimg.cc/p1faxj6yh/historic.png",
|
||||
"horror": "https://s10.postimg.cc/8exqo6yih/horror2.png",
|
||||
"infantil": "https://s14.postimg.cc/4zyq842mp/childish.png",
|
||||
"intriga": "https://s14.postimg.cc/5qrgdimw1/intrigue.png",
|
||||
"latino": "https://s10.postimg.cc/swip0b86h/latin.png",
|
||||
"mexicanas": "https://s10.postimg.cc/swip0b86h/latin.png",
|
||||
"misterio": "https://s14.postimg.cc/3m73cg8ep/mistery.png",
|
||||
"musical": "https://s10.postimg.cc/hy7fhtecp/musical.png",
|
||||
"peleas": "https://s10.postimg.cc/7a3ojbjwp/Fight.png",
|
||||
"policial": "https://s10.postimg.cc/wsw0wbgbd/cops.png",
|
||||
"recomendadas": "https://s10.postimg.cc/7xk1oqccp/recomendadas.png",
|
||||
"religion": "https://s10.postimg.cc/44j2skquh/religion.png",
|
||||
"romance": "https://s10.postimg.cc/yn8vdll6x/romance.png",
|
||||
"romantica": "https://s14.postimg.cc/8xlzx7cht/romantic.png",
|
||||
"suspenso": "https://s10.postimg.cc/7peybxdfd/suspense.png",
|
||||
"telenovelas": "https://i.postimg.cc/QCXZkyDM/telenovelas.png",
|
||||
"terror": "https://s14.postimg.cc/thqtvl52p/horror.png",
|
||||
"thriller": "https://s14.postimg.cc/uwsekl8td/thriller.png",
|
||||
"western": "https://s10.postimg.cc/5wc1nokjt/western.png"
|
||||
}
|
||||
|
||||
|
||||
def set_genre(string):
|
||||
#logger.info()
|
||||
# logger.info()
|
||||
|
||||
genres_dict = {'accion':['azione'],
|
||||
'adultos':['adulto','adulti'],
|
||||
'animacion':['animazione'],
|
||||
'adolescente':['adolescente', 'adolescenti'],
|
||||
'aventura':['avventura'],
|
||||
'belico':['guerra','guerriglia'],
|
||||
'biografia':['biografia', 'biografie', 'biografico'],
|
||||
'ciencia ficcion':['ciencia ficcion', 'cienciaficcion', 'sci fi', 'c ficcion'],
|
||||
'cine negro':['film noir'],
|
||||
'comedia':['commedia', 'commedie'],
|
||||
'cortometraje':['cortometraggio', 'corto', 'corti'],
|
||||
'de la tv':['della tv', 'televisione', 'tv'],
|
||||
'deporte':['deporte', 'deportes'],
|
||||
'destacadas':['destacada', 'destacadas'],
|
||||
'documental':['documentario', 'documentari'],
|
||||
'erotica':['erotica', 'erotica +', 'eroticas', 'eroticas +', 'erotico', 'erotico +'],
|
||||
'estrenos':['estrenos', 'estrenos'],
|
||||
'extranjera':['extrajera', 'extrajeras', 'foreign'],
|
||||
'familiar':['familiare', 'famiglia'],
|
||||
'fantastico':['fantastico', 'fantastica', 'fantastici'],
|
||||
'historica':['storico', 'storia'],
|
||||
'infantil':['bambini', 'infanzia'],
|
||||
'musical':['musicale', 'musical', 'musica'],
|
||||
'numbers': ['0','1','2','3','4','5','6','7','8','9'],
|
||||
'policial':['politico', 'politici', 'politica'],
|
||||
'recomendadas':['raccomandato', 'raccomandati'],
|
||||
'religion':['religione', 'religioso', 'religiosa','religiosi'],
|
||||
'romantica':['romantica', 'romantico', 'romantici'],
|
||||
'suspenso':['suspenso', 'suspense'],
|
||||
'thriller':['thriller', 'thrillers'],
|
||||
'western':['western', 'westerns']
|
||||
genres_dict = {'accion': ['accion', 'action', 'accion y aventura', 'action & adventure'],
|
||||
'adultos': ['adultos', 'adultos +', 'adulto'],
|
||||
'animacion': ['animacion', 'animacion e infantil', 'dibujos animados'],
|
||||
'adolescente': ['adolescente', 'adolescentes', 'adolescencia', 'adolecentes'],
|
||||
'aventura': ['aventura', 'aventuras'],
|
||||
'belico': ['belico', 'belica', 'belicas', 'guerra', 'belico guerra'],
|
||||
'biografia': ['biografia', 'biografias', 'biografica', 'biograficas', 'biografico'],
|
||||
'ciencia ficcion': ['ciencia ficcion', 'cienciaficcion', 'sci fi', 'c ficcion'],
|
||||
'cine negro': ['film noir', 'negro'],
|
||||
'comedia': ['comedia', 'comedias'],
|
||||
'cortometraje': ['cortometraje', 'corto', 'cortos'],
|
||||
'de la tv': ['de la tv', 'television', 'tv'],
|
||||
'deporte': ['deporte', 'deportes'],
|
||||
'destacadas': ['destacada', 'destacadas'],
|
||||
'documental': ['documental', 'documentales'],
|
||||
'erotica': ['erotica', 'erotica +', 'eroticas', 'eroticas +', 'erotico', 'erotico +'],
|
||||
'estrenos': ['estrenos', 'estrenos'],
|
||||
'extranjera': ['extrajera', 'extrajeras', 'foreign'],
|
||||
'familiar': ['familiar', 'familia'],
|
||||
'fantastico': ['fantastico', 'fantastica', 'fantasticas'],
|
||||
'historica': ['historica', 'historicas', 'historico', 'historia'],
|
||||
'infantil': ['infantil', 'kids'],
|
||||
'musical': ['musical', 'musicales', 'musica'],
|
||||
'policial': ['policial', 'policiaco', 'policiaca'],
|
||||
'recomendadas': ['recomedada', 'recomendadas'],
|
||||
'religion': ['religion', 'religiosa', 'religiosas'],
|
||||
'romantica': ['romantica', 'romanticas', 'romantico'],
|
||||
'suspenso': ['suspenso', 'suspense'],
|
||||
'thriller': ['thriller', 'thrillers'],
|
||||
'western': ['western', 'westerns', 'oeste western']
|
||||
}
|
||||
string = re.sub(r'peliculas de |pelicula de la |peli |cine ','', string)
|
||||
for genre, variants in genres_dict.items():
|
||||
string = re.sub(r'peliculas de |pelicula de la |peli |cine ', '', string)
|
||||
for genre, variants in list(genres_dict.items()):
|
||||
if string in variants:
|
||||
string = genre
|
||||
|
||||
return string
|
||||
|
||||
|
||||
def remove_format(string):
|
||||
#logger.info()
|
||||
#logger.debug('entra en remove: %s' % string)
|
||||
# logger.info()
|
||||
# logger.debug('entra en remove: %s' % string)
|
||||
string = string.rstrip()
|
||||
string = re.sub(r'(\[|\[\/)(?:color|COLOR|b|B|i|I).*?\]|\[|\]|\(|\)|\:|\.', '', string)
|
||||
#logger.debug('sale de remove: %s' % string)
|
||||
# logger.debug('sale de remove: %s' % string)
|
||||
return string
|
||||
|
||||
|
||||
def normalize(string):
|
||||
string = string.decode('utf-8')
|
||||
if not PY3 and isinstance(string, str):
|
||||
string = string.decode('utf-8')
|
||||
normal = ''.join((c for c in unicodedata.normalize('NFD', unicode(string)) if unicodedata.category(c) != 'Mn'))
|
||||
return normal
|
||||
|
||||
|
||||
def simplify(string):
|
||||
|
||||
#logger.info()
|
||||
#logger.debug('entra en simplify: %s'%string)
|
||||
# logger.info()
|
||||
# logger.debug('entra en simplify: %s'%string)
|
||||
string = remove_format(string)
|
||||
string = string.replace('-',' ').replace('_',' ')
|
||||
string = re.sub(r'\d+','', string)
|
||||
string = string.replace('-', ' ').replace('_', ' ')
|
||||
string = re.sub(r'\d+', '', string)
|
||||
string = string.strip()
|
||||
|
||||
notilde = normalize(string)
|
||||
@@ -197,12 +175,13 @@ def simplify(string):
|
||||
except:
|
||||
pass
|
||||
string = string.lower()
|
||||
#logger.debug('sale de simplify: %s' % string)
|
||||
# logger.debug('sale de simplify: %s' % string)
|
||||
|
||||
return string
|
||||
|
||||
|
||||
def add_languages(title, languages):
|
||||
#logger.info()
|
||||
# logger.info()
|
||||
|
||||
if isinstance(languages, list):
|
||||
for language in languages:
|
||||
@@ -211,14 +190,55 @@ def add_languages(title, languages):
|
||||
title = '%s %s' % (title, set_color(languages, languages))
|
||||
return title
|
||||
|
||||
|
||||
def add_info_plot(plot, languages, quality):
|
||||
# logger.info()
|
||||
last = '[/I][/B]\n'
|
||||
|
||||
if languages:
|
||||
l_part = '[COLOR yellowgreen][B][I]Idiomas:[/COLOR] '
|
||||
mid = ''
|
||||
|
||||
if isinstance(languages, list):
|
||||
for language in languages:
|
||||
mid += '%s ' % (set_color(language, language))
|
||||
else:
|
||||
mid = '%s ' % (set_color(languages, languages))
|
||||
|
||||
p_lang = '%s%s%s' % (l_part, mid, last)
|
||||
|
||||
if quality:
|
||||
q_part = '[COLOR yellowgreen][B][I]Calidad:[/COLOR] '
|
||||
p_quality = '%s%s%s' % (q_part, quality, last)
|
||||
|
||||
if languages and quality:
|
||||
plot_ = '%s%s\n%s' % (p_lang, p_quality, plot)
|
||||
|
||||
elif languages:
|
||||
plot_ = '%s\n%s' % (p_lang, plot)
|
||||
|
||||
elif quality:
|
||||
plot_ = '%s\n%s' % (p_quality, plot)
|
||||
|
||||
else:
|
||||
plot_ = plot
|
||||
|
||||
return plot_
|
||||
|
||||
|
||||
def set_color(title, category):
|
||||
#logger.info()
|
||||
# logger.info()
|
||||
from core import jsontools
|
||||
|
||||
styles_path = os.path.join(config.get_runtime_path(), 'resources', 'color_styles.json')
|
||||
preset = config.get_setting("preset_style", default="Estilo 1")
|
||||
color_setting = jsontools.load((open(styles_path, "r").read()))[preset]
|
||||
|
||||
color_scheme = {'otro': 'white', 'dual': 'white'}
|
||||
|
||||
#logger.debug('category antes de remove: %s' % category)
|
||||
# logger.debug('category antes de remove: %s' % category)
|
||||
category = remove_format(category).lower()
|
||||
#logger.debug('category despues de remove: %s' % category)
|
||||
# logger.debug('category despues de remove: %s' % category)
|
||||
# Lista de elementos posibles en el titulo
|
||||
color_list = ['movie', 'tvshow', 'year', 'rating_1', 'rating_2', 'rating_3', 'quality', 'cast', 'lat', 'vose',
|
||||
'vos', 'vo', 'server', 'library', 'update', 'no_update']
|
||||
@@ -234,46 +254,45 @@ def set_color(title, category):
|
||||
if custom_colors:
|
||||
color_scheme[element] = remove_format(config.get_setting('%s_color' % element))
|
||||
else:
|
||||
color_scheme[element] = 'white'
|
||||
color_scheme[element] = remove_format(color_setting.get(element, 'white'))
|
||||
# color_scheme[element] = 'white'
|
||||
|
||||
if category in ['update', 'no_update']:
|
||||
#logger.debug('title antes de updates: %s' % title)
|
||||
title= re.sub(r'\[COLOR .*?\]','[COLOR %s]' % color_scheme[category],title)
|
||||
# logger.debug('title antes de updates: %s' % title)
|
||||
title = re.sub(r'\[COLOR .*?\]', '[COLOR %s]' % color_scheme[category], title)
|
||||
else:
|
||||
if category not in ['movie', 'tvshow', 'library', 'otro']:
|
||||
title = "[COLOR %s][%s][/COLOR]"%(color_scheme[category], title)
|
||||
title = "[COLOR %s][%s][/COLOR]" % (color_scheme[category], title)
|
||||
else:
|
||||
title = "[COLOR %s]%s[/COLOR]" % (color_scheme[category], title)
|
||||
return title
|
||||
|
||||
def set_lang(language):
|
||||
#logger.info()
|
||||
|
||||
cast =['castellano','espanol','cast','esp','espaol', 'es','zc', 'spa', 'spanish', 'vc']
|
||||
ita =['italiano','italian','ita','it']
|
||||
lat=['latino','lat','la', 'espanol latino', 'espaol latino', 'zl', 'mx', 'co', 'vl']
|
||||
vose=['subtitulado','subtitulada','sub','sub espanol','vose','espsub','su','subs castellano',
|
||||
'sub: español', 'vs', 'zs', 'vs', 'english-spanish subs', 'ingles sub espanol']
|
||||
sub_ita=['sottotitolato','sottotitolata','sub','sub ita','subs italiano',
|
||||
'sub: italiano', 'inglese sottotitolato']
|
||||
vos=['vos', 'sub ingles', 'engsub','ingles subtitulado', 'sub: ingles']
|
||||
vo=['ingles', 'en','vo', 'ovos', 'eng','v.o', 'english']
|
||||
dual=['dual']
|
||||
def set_lang(language):
|
||||
# logger.info()
|
||||
|
||||
cast = ['castellano', 'español', 'espanol', 'cast', 'esp', 'espaol', 'es', 'zc', 'spa', 'spanish', 'vc']
|
||||
ita = ['italiano', 'italian', 'ita', 'it']
|
||||
lat = ['latino', 'lat', 'la', 'español latino', 'espanol latino', 'espaol latino', 'zl', 'mx', 'co', 'vl']
|
||||
vose = ['subtitulado', 'subtitulada', 'sub', 'sub espanol', 'vose', 'espsub', 'su', 'subs castellano',
|
||||
'sub: español', 'vs', 'zs', 'vs', 'english-spanish subs', 'ingles sub espanol', 'ingles sub español']
|
||||
vos = ['vos', 'sub ingles', 'engsub', 'vosi', 'ingles subtitulado', 'sub: ingles']
|
||||
vo = ['ingles', 'en', 'vo', 'ovos', 'eng', 'v.o', 'english']
|
||||
dual = ['dual']
|
||||
|
||||
language = scrapertools.decodeHtmlentities(language)
|
||||
old_lang = language
|
||||
|
||||
language = simplify(language)
|
||||
|
||||
#logger.debug('language before simplify: %s' % language)
|
||||
#logger.debug('old language: %s' % old_lang)
|
||||
# logger.debug('language before simplify: %s' % language)
|
||||
# logger.debug('old language: %s' % old_lang)
|
||||
if language in cast:
|
||||
language = 'cast'
|
||||
elif language in lat:
|
||||
language = 'lat'
|
||||
elif language in ita:
|
||||
language = 'ita'
|
||||
elif language in sub_ita:
|
||||
language = 'sub-ita'
|
||||
elif language in vose:
|
||||
language = 'vose'
|
||||
elif language in vos:
|
||||
@@ -285,67 +304,67 @@ def set_lang(language):
|
||||
else:
|
||||
language = 'otro'
|
||||
|
||||
#logger.debug('language after simplify: %s' % language)
|
||||
# logger.debug('language after simplify: %s' % language)
|
||||
|
||||
return language
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def title_format(item):
|
||||
#logger.info()
|
||||
# logger.info()
|
||||
|
||||
lang = False
|
||||
valid = True
|
||||
language_color = 'otro'
|
||||
simple_language = ''
|
||||
|
||||
#logger.debug('item.title antes de formatear: %s' % item.title.lower())
|
||||
# logger.debug('item.title antes de formatear: %s' % item.title.lower())
|
||||
|
||||
# TODO se deberia quitar cualquier elemento que no sea un enlace de la lista de findvideos para quitar esto
|
||||
|
||||
#Palabras "prohibidas" en los titulos (cualquier titulo que contengas estas no se procesara en unify)
|
||||
# Palabras "prohibidas" en los titulos (cualquier titulo que contengas estas no se procesara en unify)
|
||||
excluded_words = ['online', 'descarga', 'downloads', 'trailer', 'videoteca', 'gb', 'autoplay']
|
||||
|
||||
# Actions excluidos, (se define canal y action) los titulos que contengan ambos valores no se procesaran en unify
|
||||
excluded_actions = [('videolibrary','get_episodes')]
|
||||
excluded_actions = [('videolibrary', 'get_episodes')]
|
||||
|
||||
# Verifica si hay marca de visto de trakt
|
||||
|
||||
visto = False
|
||||
#logger.debug('titlo con visto? %s' % item.title)
|
||||
|
||||
if '[[I]v[/I]]' in item.title or '[COLOR limegreen][v][/COLOR]' in item.title:
|
||||
visto = True
|
||||
|
||||
# Se elimina cualquier formato previo en el titulo
|
||||
if item.action != '' and item.action !='mainlist':
|
||||
item.title = remove_format(item.title)
|
||||
|
||||
#logger.debug('visto? %s' % visto)
|
||||
|
||||
# Evita que aparezcan los idiomas en los mainlist de cada canal
|
||||
if item.action == 'mainlist':
|
||||
item.language =''
|
||||
|
||||
info = item.infoLabels
|
||||
#logger.debug('item antes de formatear: %s'%item)
|
||||
|
||||
if hasattr(item,'text_color'):
|
||||
item.text_color=''
|
||||
|
||||
#Verifica el item sea valido para ser formateado por unify
|
||||
# Verifica el item sea valido para ser formateado por unify
|
||||
|
||||
if item.channel == 'trailertools' or (item.channel.lower(), item.action.lower()) in excluded_actions or \
|
||||
item.action=='':
|
||||
item.action == '':
|
||||
valid = False
|
||||
else:
|
||||
for word in excluded_words:
|
||||
if word in item.title.lower():
|
||||
valid = False
|
||||
break
|
||||
if not valid:
|
||||
return item
|
||||
|
||||
if valid and item.unify!=False:
|
||||
# Verifica si hay marca de visto de trakt
|
||||
|
||||
visto = False
|
||||
# logger.debug('titlo con visto? %s' % item.title)
|
||||
|
||||
if '[[I]v[/I]]' in item.title or '[COLOR limegreen][v][/COLOR]' in item.title:
|
||||
visto = True
|
||||
|
||||
# Se elimina cualquier formato previo en el titulo
|
||||
if item.action != '' and item.action != 'mainlist' and item.unify:
|
||||
item.title = remove_format(item.title)
|
||||
|
||||
# logger.debug('visto? %s' % visto)
|
||||
|
||||
# Evita que aparezcan los idiomas en los mainlist de cada canal
|
||||
if item.action == 'mainlist':
|
||||
item.language = ''
|
||||
|
||||
info = item.infoLabels
|
||||
# logger.debug('item antes de formatear: %s'%item)
|
||||
|
||||
if hasattr(item, 'text_color'):
|
||||
item.text_color = ''
|
||||
|
||||
if valid and item.unify != False:
|
||||
|
||||
# Formamos el titulo para serie, se debe definir contentSerieName
|
||||
# o show en el item para que esto funcione.
|
||||
@@ -354,25 +373,26 @@ def title_format(item):
|
||||
# Si se tiene la informacion en infolabels se utiliza
|
||||
if item.contentType == 'episode' and info['episode'] != '':
|
||||
if info['title'] == '':
|
||||
info['title'] = '%s - Episodio %s'% (info['tvshowtitle'], info['episode'])
|
||||
info['title'] = '%s - Episodio %s' % (info['tvshowtitle'], info['episode'])
|
||||
elif 'Episode' in info['title']:
|
||||
episode = info['title'].lower().replace('episode', 'episodio')
|
||||
info['title'] = '%s - %s' % (info['tvshowtitle'], episode.capitalize())
|
||||
elif info['episodio_titulo']!='':
|
||||
#logger.debug('info[episode_titulo]: %s' % info['episodio_titulo'])
|
||||
elif info['episodio_titulo'] != '':
|
||||
# logger.debug('info[episode_titulo]: %s' % info['episodio_titulo'])
|
||||
if 'episode' in info['episodio_titulo'].lower():
|
||||
episode = info['episodio_titulo'].lower().replace('episode', 'episodio')
|
||||
item.title = '%sx%s - %s' % (info['season'],info['episode'], episode.capitalize())
|
||||
item.title = '%sx%s - %s' % (info['season'], info['episode'], episode.capitalize())
|
||||
else:
|
||||
item.title = '%sx%s - %s' % (info['season'], info['episode'], info['episodio_titulo'].capitalize())
|
||||
item.title = '%sx%s - %s' % (
|
||||
info['season'], info['episode'], info['episodio_titulo'].capitalize())
|
||||
else:
|
||||
item.title = '%sx%s - %s' % (info['season'],info['episode'], info['title'])
|
||||
item.title = '%sx%s - %s' % (info['season'], info['episode'], info['title'])
|
||||
item.title = set_color(item.title, 'tvshow')
|
||||
|
||||
else:
|
||||
|
||||
# En caso contrario se utiliza el titulo proporcionado por el canal
|
||||
#logger.debug ('color_scheme[tvshow]: %s' % color_scheme['tvshow'])
|
||||
# logger.debug ('color_scheme[tvshow]: %s' % color_scheme['tvshow'])
|
||||
item.title = '%s' % set_color(item.title, 'tvshow')
|
||||
|
||||
elif item.contentTitle:
|
||||
@@ -386,27 +406,27 @@ def title_format(item):
|
||||
item.title = '%s [V.Extend.]' % set_color(item.contentTitle, 'movie')
|
||||
else:
|
||||
item.title = '%s' % set_color(item.contentTitle, 'movie')
|
||||
if item.contentType=='movie':
|
||||
if item.contentType == 'movie':
|
||||
if item.context:
|
||||
if isinstance(item.context, list):
|
||||
item.context.append('Buscar esta pelicula en otros canales')
|
||||
|
||||
if 'Novedades' in item.category and item.from_channel=='news':
|
||||
#logger.debug('novedades')
|
||||
item.title = '%s [%s]'%(item.title, item.channel)
|
||||
if ('Novedades' in item.category and item.from_channel == 'news'):
|
||||
# logger.debug('novedades')
|
||||
item.title = '%s [%s]' % (item.title, item.channel)
|
||||
|
||||
# Verificamos si item.language es una lista, si lo es se toma
|
||||
# cada valor y se normaliza formado una nueva lista
|
||||
|
||||
if hasattr(item,'language') and item.language !='':
|
||||
#logger.debug('tiene language: %s'%item.language)
|
||||
if hasattr(item, 'language') and item.language != '':
|
||||
# logger.debug('tiene language: %s'%item.language)
|
||||
if isinstance(item.language, list):
|
||||
language_list =[]
|
||||
language_list = []
|
||||
for language in item.language:
|
||||
if language != '':
|
||||
lang = True
|
||||
language_list.append(set_lang(remove_format(language)).upper())
|
||||
#logger.debug('language_list: %s' % language_list)
|
||||
# logger.debug('language_list: %s' % language_list)
|
||||
simple_language = language_list
|
||||
else:
|
||||
# Si item.language es un string se normaliza
|
||||
@@ -416,19 +436,19 @@ def title_format(item):
|
||||
else:
|
||||
simple_language = ''
|
||||
|
||||
#item.language = simple_language
|
||||
# item.language = simple_language
|
||||
|
||||
# Damos formato al año si existiera y lo agregamos
|
||||
# al titulo excepto que sea un episodio
|
||||
if info and info.get("year", "") not in [""," "] and item.contentType != 'episode' and not info['season']:
|
||||
if info and info.get("year", "") not in ["", " "] and item.contentType != 'episode' and not info['season']:
|
||||
try:
|
||||
year = '%s' % set_color(info['year'], 'year')
|
||||
item.title = item.title = '%s %s' % (item.title, year)
|
||||
except:
|
||||
logger.debug('infoLabels: %s'%info)
|
||||
logger.debug('infoLabels: %s' % info)
|
||||
|
||||
# Damos formato al puntaje si existiera y lo agregamos al titulo
|
||||
if info and info['rating'] and info['rating']!='0.0' and not info['season']:
|
||||
if info and info['rating'] and info['rating'] != '0.0' and not info['season']:
|
||||
|
||||
# Se normaliza el puntaje del rating
|
||||
|
||||
@@ -454,13 +474,29 @@ def title_format(item):
|
||||
# Damos formato a la calidad si existiera y lo agregamos al titulo
|
||||
if item.quality and isinstance(item.quality, str):
|
||||
quality = item.quality.strip()
|
||||
item.title = '%s %s' % (item.title, set_color(quality, 'quality'))
|
||||
else:
|
||||
quality = ''
|
||||
|
||||
# Damos formato al idioma si existiera y lo agregamos al titulo
|
||||
if lang:
|
||||
item.title = add_languages(item.title, simple_language)
|
||||
# Damos formato al idioma-calidad si existieran y los agregamos al plot
|
||||
quality_ = set_color(quality, 'quality')
|
||||
|
||||
if (lang or quality) and item.action == "play":
|
||||
if hasattr(item, "clean_plot"):
|
||||
item.contentPlot = item.clear_plot
|
||||
|
||||
if lang: item.title = add_languages(item.title, simple_language)
|
||||
if quality: item.title = '%s %s' % (item.title, quality_)
|
||||
|
||||
elif (lang or quality) and item.action != "play":
|
||||
|
||||
if item.contentPlot:
|
||||
item.clean_plot = item.contentPlot
|
||||
plot_ = add_info_plot(item.contentPlot, simple_language, quality_)
|
||||
item.contentPlot = plot_
|
||||
else:
|
||||
item.clean_plot = None
|
||||
plot_ = add_info_plot('', simple_language, quality_)
|
||||
item.contentPlot = plot_
|
||||
|
||||
# Para las busquedas por canal
|
||||
if item.from_channel != '':
|
||||
@@ -469,17 +505,16 @@ def title_format(item):
|
||||
logger.debug(channel_parameters)
|
||||
item.title = '%s [%s]' % (item.title, channel_parameters['title'])
|
||||
|
||||
|
||||
# Formato para actualizaciones de series en la videoteca sobreescribe los colores anteriores
|
||||
|
||||
if item.channel=='videolibrary' and item.context!='':
|
||||
if item.action=='get_seasons':
|
||||
if item.channel == 'videolibrary' and item.context != '':
|
||||
if item.action == 'get_seasons':
|
||||
if 'Desactivar' in item.context[1]['title']:
|
||||
item.title= '%s' % (set_color(item.title, 'update'))
|
||||
item.title = '%s' % (set_color(item.title, 'update'))
|
||||
if 'Activar' in item.context[1]['title']:
|
||||
item.title= '%s' % (set_color(item.title, 'no_update'))
|
||||
item.title = '%s' % (set_color(item.title, 'no_update'))
|
||||
|
||||
#logger.debug('Despues del formato: %s' % item)
|
||||
# logger.debug('Despues del formato: %s' % item)
|
||||
# Damos formato al servidor si existiera
|
||||
if item.server:
|
||||
server = '%s' % set_color(item.server.strip().capitalize(), 'server')
|
||||
@@ -487,18 +522,28 @@ def title_format(item):
|
||||
# Compureba si estamos en findvideos, y si hay server, si es asi no se muestra el
|
||||
# titulo sino el server, en caso contrario se muestra el titulo normalmente.
|
||||
|
||||
#logger.debug('item.title antes de server: %s'%item.title)
|
||||
# logger.debug('item.title antes de server: %s'%item.title)
|
||||
if item.action != 'play' and item.server:
|
||||
item.title ='%s %s'%(item.title, server.strip())
|
||||
item.title = '%s %s' % (item.title, server.strip())
|
||||
|
||||
elif item.action == 'play' and item.server:
|
||||
if hasattr(item, "clean_plot"):
|
||||
item.contentPlot = item.clean_plot
|
||||
|
||||
if item.quality == 'default':
|
||||
quality = ''
|
||||
#logger.debug('language_color: %s'%language_color)
|
||||
item.title = '%s %s' % (server, set_color(quality,'quality'))
|
||||
# logger.debug('language_color: %s'%language_color)
|
||||
item.title = '%s %s' % (server, set_color(quality, 'quality'))
|
||||
if lang:
|
||||
item.title = add_languages(item.title, simple_language)
|
||||
#logger.debug('item.title: %s' % item.title)
|
||||
# logger.debug('item.title: %s' % item.title)
|
||||
# Torrent_info
|
||||
if item.server == 'torrent' and item.torrent_info != '':
|
||||
item.title = '%s [%s]' % (item.title, item.torrent_info)
|
||||
|
||||
if item.channel == 'videolibrary':
|
||||
item.title += ' [%s]' % item.contentChannel
|
||||
|
||||
# si hay verificacion de enlaces
|
||||
if item.alive != '':
|
||||
if item.alive.lower() == 'no':
|
||||
@@ -507,29 +552,33 @@ def title_format(item):
|
||||
item.title = '[[COLOR yellow][B]?[/B][/COLOR]] %s' % item.title
|
||||
else:
|
||||
item.title = '%s' % item.title
|
||||
#logger.debug('item.title despues de server: %s' % item.title)
|
||||
|
||||
# logger.debug('item.title despues de server: %s' % item.title)
|
||||
elif 'library' in item.action:
|
||||
item.title = '%s' % set_color(item.title, 'library')
|
||||
elif item.action == '' and item.title !='':
|
||||
item.title='**- %s -**'%item.title
|
||||
else:
|
||||
elif item.action == '' and item.title != '':
|
||||
item.title = '**- %s -**' % item.title
|
||||
elif item.unify:
|
||||
item.title = '%s' % set_color(item.title, 'otro')
|
||||
#logger.debug('antes de salir %s' % item.title)
|
||||
# logger.debug('antes de salir %s' % item.title)
|
||||
if visto:
|
||||
try:
|
||||
check = u'\u221a'
|
||||
|
||||
title = '[B][COLOR limegreen][%s][/COLOR][/B] %s' % (check, item.title.decode('utf-8'))
|
||||
item.title = title.encode('utf-8')
|
||||
if PY3: item.title = item.title.decode('utf-8')
|
||||
except:
|
||||
check = 'v'
|
||||
title = '[B][COLOR limegreen][%s][/COLOR][/B] %s' % (check, item.title.decode('utf-8'))
|
||||
item.title = title.encode('utf-8')
|
||||
if PY3: item.title = item.title.decode('utf-8')
|
||||
|
||||
return item
|
||||
|
||||
|
||||
def thumbnail_type(item):
|
||||
#logger.info()
|
||||
# logger.info()
|
||||
# Se comprueba que tipo de thumbnail se utilizara en findvideos,
|
||||
# Poster o Logo del servidor
|
||||
|
||||
@@ -539,7 +588,7 @@ def thumbnail_type(item):
|
||||
item.contentThumbnail = item.thumbnail
|
||||
|
||||
if info:
|
||||
if info['thumbnail'] !='':
|
||||
if info['thumbnail'] != '':
|
||||
item.contentThumbnail = info['thumbnail']
|
||||
|
||||
if item.action == 'play':
|
||||
@@ -548,7 +597,7 @@ def thumbnail_type(item):
|
||||
item.thumbnail = info['thumbnail']
|
||||
elif thumb_type == 1:
|
||||
from core.servertools import get_server_parameters
|
||||
#logger.debug('item.server: %s'%item.server)
|
||||
# logger.debug('item.server: %s'%item.server)
|
||||
server_parameters = get_server_parameters(item.server.lower())
|
||||
item.thumbnail = server_parameters.get("thumbnail", item.contentThumbnail)
|
||||
|
||||
@@ -574,7 +623,7 @@ def check_rating(rating):
|
||||
try:
|
||||
# convertimos los deciamles p.e. 7.1
|
||||
return "%.1f" % round(_rating, 1)
|
||||
except Exception, ex_dl:
|
||||
except Exception as ex_dl:
|
||||
template = "An exception of type %s occured. Arguments:\n%r"
|
||||
message = template % (type(ex_dl).__name__, ex_dl.args)
|
||||
logger.error(message)
|
||||
@@ -601,18 +650,18 @@ def check_rating(rating):
|
||||
def convert_float(_rating):
|
||||
try:
|
||||
return float(_rating)
|
||||
except ValueError, ex_ve:
|
||||
except ValueError as ex_ve:
|
||||
template = "An exception of type %s occured. Arguments:\n%r"
|
||||
message = template % (type(ex_ve).__name__, ex_ve.args)
|
||||
logger.error(message)
|
||||
return None
|
||||
|
||||
if type(rating) != float:
|
||||
if not isinstance(rating, float):
|
||||
# logger.debug("no soy float")
|
||||
if type(rating) == int:
|
||||
if isinstance(rating, int):
|
||||
# logger.debug("soy int")
|
||||
rating = convert_float(rating)
|
||||
elif type(rating) == str:
|
||||
elif isinstance(rating, str):
|
||||
# logger.debug("soy str")
|
||||
|
||||
rating = rating.replace("<", "")
|
||||
@@ -634,4 +683,4 @@ def check_rating(rating):
|
||||
rating = check_decimal_length(rating)
|
||||
rating = check_range(rating)
|
||||
|
||||
return rating
|
||||
return rating
|
||||
@@ -2,7 +2,7 @@
|
||||
import io
|
||||
import os
|
||||
import shutil
|
||||
from cStringIO import StringIO
|
||||
from lib.six import BytesIO
|
||||
|
||||
from core import filetools
|
||||
from platformcode import logger, platformtools
|
||||
@@ -15,7 +15,9 @@ try:
|
||||
import urllib.request as urllib
|
||||
except ImportError:
|
||||
import urllib
|
||||
|
||||
import sys
|
||||
PY3 = False
|
||||
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
|
||||
addon = xbmcaddon.Addon('plugin.video.kod')
|
||||
|
||||
_hdr_pat = re.compile("^@@ -(\d+),?(\d+)? \+(\d+),?(\d+)? @@.*")
|
||||
@@ -33,7 +35,7 @@ def loadCommits(page=1):
|
||||
apiLink = 'https://api.github.com/repos/' + user + '/' + repo + '/commits?sha=' + branch + "&page=" + str(page)
|
||||
logger.info(apiLink)
|
||||
# riprova ogni secondo finchè non riesce (ad esempio per mancanza di connessione)
|
||||
for n in xrange(10):
|
||||
for n in range(10):
|
||||
try:
|
||||
commitsLink = urllib.urlopen(apiLink).read()
|
||||
ret = json.loads(commitsLink)
|
||||
@@ -112,24 +114,26 @@ def check(background=False):
|
||||
if 'patch' in file:
|
||||
text = ""
|
||||
try:
|
||||
localFile = open(addonDir + file["filename"], 'r+')
|
||||
localFile = io.open(addonDir + file["filename"], 'r+', encoding="utf8")
|
||||
text = localFile.read()
|
||||
if not PY3:
|
||||
text = text.decode('utf-8')
|
||||
except IOError: # nuovo file
|
||||
# crea le cartelle se non esistono
|
||||
dirname = os.path.dirname(addonDir + file["filename"])
|
||||
if not os.path.exists(dirname):
|
||||
os.makedirs(dirname)
|
||||
|
||||
localFile = open(addonDir + file["filename"], 'w')
|
||||
localFile = io.open(addonDir + file["filename"], 'w', encoding="utf8")
|
||||
|
||||
patched = apply_patch(text, (file['patch']+'\n').encode('utf-8'))
|
||||
if patched != text: # non eseguo se già applicata (es. scaricato zip da github)
|
||||
alreadyApplied = False
|
||||
if getShaStr(patched) == file['sha']:
|
||||
localFile.seek(0)
|
||||
localFile.truncate()
|
||||
localFile.writelines(patched)
|
||||
localFile.close()
|
||||
alreadyApplied = False
|
||||
else: # nel caso ci siano stati problemi
|
||||
logger.info('lo sha non corrisponde, scarico il file')
|
||||
localFile.close()
|
||||
@@ -250,7 +254,7 @@ def apply_patch(s,patch,revert=False):
|
||||
|
||||
def getSha(path):
|
||||
try:
|
||||
f = open(path, 'rb')
|
||||
f = io.open(path, 'rb', encoding="utf8")
|
||||
except:
|
||||
return ''
|
||||
size = len(f.read())
|
||||
@@ -259,7 +263,11 @@ def getSha(path):
|
||||
|
||||
|
||||
def getShaStr(str):
|
||||
return githash.blob_hash(StringIO(str), len(str)).hexdigest()
|
||||
if PY3:
|
||||
return githash.blob_hash(BytesIO(str.encode('utf-8')), len(str.encode('utf-8'))).hexdigest()
|
||||
else:
|
||||
return githash.blob_hash(BytesIO(str), len(str)).hexdigest()
|
||||
|
||||
|
||||
|
||||
def updateFromZip(message='Installazione in corso...'):
|
||||
@@ -267,7 +275,7 @@ def updateFromZip(message='Installazione in corso...'):
|
||||
dp.update(0)
|
||||
|
||||
remotefilename = 'https://github.com/' + user + "/" + repo + "/archive/" + branch + ".zip"
|
||||
localfilename = os.path.join(xbmc.translatePath("special://home/addons/"), "plugin.video.kod.update.zip").encode('utf-8')
|
||||
localfilename = filetools.join(xbmc.translatePath("special://home/addons/"), "plugin.video.kod.update.zip")
|
||||
destpathname = xbmc.translatePath("special://home/addons/")
|
||||
|
||||
logger.info("remotefilename=%s" % remotefilename)
|
||||
@@ -306,7 +314,7 @@ def updateFromZip(message='Installazione in corso...'):
|
||||
for member in zip.infolist():
|
||||
zip.extract(member, destpathname)
|
||||
cur_size += member.file_size
|
||||
dp.update(80 + cur_size * 19 / size)
|
||||
dp.update(int(90 + cur_size * 9 / size))
|
||||
|
||||
except Exception as e:
|
||||
logger.info('Non sono riuscito ad estrarre il file zip')
|
||||
@@ -417,13 +425,14 @@ def fOpen(file, mode = 'r'):
|
||||
logger.info('android, uso FileIO per leggere')
|
||||
return io.FileIO(file, mode)
|
||||
else:
|
||||
return open(file, mode)
|
||||
return io.open(file, mode)
|
||||
|
||||
|
||||
def _pbhook(numblocks, blocksize, filesize, url, dp):
|
||||
try:
|
||||
percent = min((numblocks*blocksize*90)/filesize, 100)
|
||||
dp.update(percent)
|
||||
except:
|
||||
dp.update(int(percent))
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
percent = 90
|
||||
dp.update(percent)
|
||||
dp.update(percent)
|
||||
|
||||
@@ -3,13 +3,21 @@
|
||||
# XBMC Config Menu
|
||||
# ------------------------------------------------------------
|
||||
|
||||
from __future__ import division
|
||||
#from builtins import str
|
||||
import sys
|
||||
PY3 = False
|
||||
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
|
||||
from builtins import range
|
||||
from past.utils import old_div
|
||||
|
||||
import inspect
|
||||
import os
|
||||
|
||||
import xbmcgui
|
||||
|
||||
from core import channeltools
|
||||
from core import servertools
|
||||
from core import servertools, scrapertools
|
||||
from platformcode import config, logger
|
||||
|
||||
|
||||
@@ -161,7 +169,7 @@ class SettingsWindow(xbmcgui.WindowXMLDialog):
|
||||
self.callback = callback
|
||||
self.item = item
|
||||
|
||||
if type(custom_button) == dict:
|
||||
if isinstance(custom_button, dict):
|
||||
self.custom_button = {}
|
||||
self.custom_button["label"] = custom_button.get("label", "")
|
||||
self.custom_button["function"] = custom_button.get("function", "")
|
||||
@@ -245,12 +253,16 @@ class SettingsWindow(xbmcgui.WindowXMLDialog):
|
||||
def evaluate(self, index, cond):
|
||||
import re
|
||||
|
||||
ok = False
|
||||
|
||||
# Si la condicion es True o False, no hay mas que evaluar, ese es el valor
|
||||
if type(cond) == bool:
|
||||
if isinstance(cond, bool):
|
||||
return cond
|
||||
|
||||
# Obtenemos las condiciones
|
||||
conditions = re.compile("(!?eq|!?gt|!?lt)?\(([^,]+),[\"|']?([^)|'|\"]*)['|\"]?\)[ ]*([+||])?").findall(cond)
|
||||
# conditions = re.compile("(!?eq|!?gt|!?lt)?\(([^,]+),[\"|']?([^)|'|\"]*)['|\"]?\)[ ]*([+||])?").findall(cond)
|
||||
conditions = re.compile(r'''(!?eq|!?gt|!?lt)?\s*\(\s*([^, ]+)\s*,\s*["']?([^"'\)]+)["']?\)([+|])?''').findall(cond)
|
||||
# conditions = scrapertools.find_multiple_matches(cond, r"(!?eq|!?gt|!?lt)?\(([^,]+),[\"|']?([^)|'|\"]*)['|\"]?\)[ ]*([+||])?")
|
||||
for operator, id, value, next in conditions:
|
||||
# El id tiene que ser un numero, sino, no es valido y devuelve False
|
||||
try:
|
||||
@@ -276,7 +288,7 @@ class SettingsWindow(xbmcgui.WindowXMLDialog):
|
||||
|
||||
if value.startswith('@') and unicode(value[1:]).isnumeric():
|
||||
value = config.get_localized_string(int(value[1:]))
|
||||
|
||||
|
||||
# Operaciones lt "menor que" y gt "mayor que", requieren que las comparaciones sean numeros, sino devuelve
|
||||
# False
|
||||
if operator in ["lt", "!lt", "gt", "!gt"]:
|
||||
@@ -294,9 +306,9 @@ class SettingsWindow(xbmcgui.WindowXMLDialog):
|
||||
pass
|
||||
|
||||
# valor bool
|
||||
if value.lower() == "true":
|
||||
if not isinstance(value, int) and value.lower() == "true":
|
||||
value = True
|
||||
elif value.lower() == "false":
|
||||
elif not isinstance(value, int) and value.lower() == "false":
|
||||
value = False
|
||||
|
||||
# operacion "eq" "igual a"
|
||||
@@ -515,7 +527,7 @@ class SettingsWindow(xbmcgui.WindowXMLDialog):
|
||||
continue
|
||||
if c["type"] == "list" and "lvalues" not in c:
|
||||
continue
|
||||
if c["type"] == "list" and not type(c["lvalues"]) == list:
|
||||
if c["type"] == "list" and not isinstance(c["lvalues"], list):
|
||||
continue
|
||||
if c["type"] == "list" and not len(c["lvalues"]) > 0:
|
||||
continue
|
||||
@@ -590,7 +602,7 @@ class SettingsWindow(xbmcgui.WindowXMLDialog):
|
||||
self.check_ok(self.values)
|
||||
|
||||
def dispose_controls(self, index, focus=False, force=False):
|
||||
show_controls = self.controls_height / self.height_control - 1
|
||||
show_controls = old_div(self.controls_height, self.height_control) - 1
|
||||
|
||||
visible_count = 0
|
||||
|
||||
@@ -609,7 +621,7 @@ class SettingsWindow(xbmcgui.WindowXMLDialog):
|
||||
if index < 0: index = 0
|
||||
new_index = index
|
||||
|
||||
if self.index <> new_index or force:
|
||||
if self.index != new_index or force:
|
||||
for x, c in enumerate(self.visible_controls):
|
||||
if x < new_index or visible_count > show_controls or not c["show"]:
|
||||
self.set_visible(c, False)
|
||||
@@ -693,7 +705,7 @@ class SettingsWindow(xbmcgui.WindowXMLDialog):
|
||||
else:
|
||||
self.return_value = getattr(cb_channel, self.custom_button['function'])(self.item, self.values)
|
||||
if not self.custom_button["close"]:
|
||||
if isinstance(self.return_value, dict) and self.return_value.has_key("label"):
|
||||
if isinstance(self.return_value, dict) and "label" in self.return_value:
|
||||
self.getControl(10006).setLabel(self.return_value['label'])
|
||||
|
||||
for c in self.list_controls:
|
||||
@@ -757,23 +769,23 @@ class SettingsWindow(xbmcgui.WindowXMLDialog):
|
||||
# Controles de ajustes, si se cambia el valor de un ajuste, cambiamos el valor guardado en el diccionario de
|
||||
# valores
|
||||
# Obtenemos el control sobre el que se ha echo click
|
||||
control = self.getControl(id)
|
||||
# control = self.getControl(id)
|
||||
|
||||
# Lo buscamos en el listado de controles
|
||||
for cont in self.list_controls:
|
||||
|
||||
# Si el control es un "downBtn" o "upBtn" son los botones del "list"
|
||||
# en este caso cambiamos el valor del list
|
||||
if cont["type"] == "list" and (cont["downBtn"] == control or cont["upBtn"] == control):
|
||||
if cont["type"] == "list" and (cont["downBtn"].getId() == id or cont["upBtn"].getId() == id):
|
||||
|
||||
# Para bajar una posicion
|
||||
if cont["downBtn"] == control:
|
||||
if cont["downBtn"].getId() == id:
|
||||
index = cont["lvalues"].index(cont["label"].getLabel())
|
||||
if index > 0:
|
||||
cont["label"].setLabel(cont["lvalues"][index - 1])
|
||||
|
||||
# Para subir una posicion
|
||||
elif cont["upBtn"] == control:
|
||||
elif cont["upBtn"].getId() == id:
|
||||
index = cont["lvalues"].index(cont["label"].getLabel())
|
||||
if index < len(cont["lvalues"]) - 1:
|
||||
cont["label"].setLabel(cont["lvalues"][index + 1])
|
||||
@@ -782,11 +794,11 @@ class SettingsWindow(xbmcgui.WindowXMLDialog):
|
||||
self.values[cont["id"]] = cont["lvalues"].index(cont["label"].getLabel())
|
||||
|
||||
# Si esl control es un "bool", guardamos el nuevo valor True/False
|
||||
if cont["type"] == "bool" and cont["control"] == control:
|
||||
if cont["type"] == "bool" and cont["control"].getId() == id:
|
||||
self.values[cont["id"]] = bool(cont["control"].isSelected())
|
||||
|
||||
# Si esl control es un "text", guardamos el nuevo valor
|
||||
if cont["type"] == "text" and cont["control"] == control:
|
||||
if cont["type"] == "text" and cont["control"].getId() == id:
|
||||
# Versiones antiguas requieren abrir el teclado manualmente
|
||||
if xbmcgui.ControlEdit == ControlEdit:
|
||||
import xbmc
|
||||
@@ -817,9 +829,9 @@ class SettingsWindow(xbmcgui.WindowXMLDialog):
|
||||
if action == 1:
|
||||
# Si el foco no está en ninguno de los tres botones inferiores, y esta en un "list" cambiamos el valor
|
||||
if focus not in [10004, 10005, 10006]:
|
||||
control = self.getFocus()
|
||||
control = self.getFocus().getId()
|
||||
for cont in self.list_controls:
|
||||
if cont["type"] == "list" and cont["control"] == control:
|
||||
if cont["type"] == "list" and cont["control"].getId() == control:
|
||||
index = cont["lvalues"].index(cont["label"].getLabel())
|
||||
if index > 0:
|
||||
cont["label"].setLabel(cont["lvalues"][index - 1])
|
||||
@@ -843,9 +855,9 @@ class SettingsWindow(xbmcgui.WindowXMLDialog):
|
||||
elif action == 2:
|
||||
# Si el foco no está en ninguno de los tres botones inferiores, y esta en un "list" cambiamos el valor
|
||||
if focus not in [10004, 10005, 10006]:
|
||||
control = self.getFocus()
|
||||
control = self.getFocus().getId()
|
||||
for cont in self.list_controls:
|
||||
if cont["type"] == "list" and cont["control"] == control:
|
||||
if cont["type"] == "list" and cont["control"].getId() == control:
|
||||
index = cont["lvalues"].index(cont["label"].getLabel())
|
||||
if index < len(cont["lvalues"]) - 1:
|
||||
cont["label"].setLabel(cont["lvalues"][index + 1])
|
||||
@@ -870,11 +882,9 @@ class SettingsWindow(xbmcgui.WindowXMLDialog):
|
||||
# Si el foco no está en ninguno de los tres botones inferiores, bajamos el foco en los controles de ajustes
|
||||
if focus not in [10004, 10005, 10006]:
|
||||
try:
|
||||
focus_control = \
|
||||
[self.visible_controls.index(c) for c in self.visible_controls if
|
||||
c["control"] == self.getFocus()][
|
||||
0]
|
||||
focus_control = [self.visible_controls.index(c) for c in self.visible_controls if c["control"].getId() == self.getFocus().getId()][0]
|
||||
focus_control += 1
|
||||
|
||||
except:
|
||||
focus_control = 0
|
||||
|
||||
@@ -895,9 +905,7 @@ class SettingsWindow(xbmcgui.WindowXMLDialog):
|
||||
if focus not in [10003, 10004, 10005, 10006]:
|
||||
try:
|
||||
focus_control = \
|
||||
[self.visible_controls.index(c) for c in self.visible_controls if
|
||||
c["control"] == self.getFocus()][
|
||||
0]
|
||||
[self.visible_controls.index(c) for c in self.visible_controls if c["control"].getId() == self.getFocus().getId()][0]
|
||||
focus_control -= 1
|
||||
|
||||
while not focus_control == -1 and (self.visible_controls[focus_control]["type"] == "label" or not
|
||||
@@ -936,11 +944,11 @@ class SettingsWindow(xbmcgui.WindowXMLDialog):
|
||||
elif action == 504:
|
||||
|
||||
if self.xx > raw_action.getAmount2():
|
||||
if (self.xx - int(raw_action.getAmount2())) / self.height_control:
|
||||
if old_div((self.xx - int(raw_action.getAmount2())), self.height_control):
|
||||
self.xx -= self.height_control
|
||||
self.dispose_controls(self.index + 1)
|
||||
else:
|
||||
if (int(raw_action.getAmount2()) - self.xx) / self.height_control:
|
||||
if old_div((int(raw_action.getAmount2()) - self.xx), self.height_control):
|
||||
self.xx += self.height_control
|
||||
self.dispose_controls(self.index - 1)
|
||||
return
|
||||
@@ -981,7 +989,7 @@ class ControlEdit(xbmcgui.ControlButton):
|
||||
|
||||
def setWidth(self, w):
|
||||
xbmcgui.ControlButton.setWidth(self, w)
|
||||
self.textControl.setWidth(w / 2)
|
||||
self.textControl.setWidth(old_div(w, 2))
|
||||
|
||||
def setHeight(self, w):
|
||||
xbmcgui.ControlButton.setHeight(self, w)
|
||||
@@ -992,7 +1000,7 @@ class ControlEdit(xbmcgui.ControlButton):
|
||||
if xbmcgui.__version__ == "1.2":
|
||||
self.textControl.setPosition(x + self.getWidth(), y)
|
||||
else:
|
||||
self.textControl.setPosition(x + self.getWidth() / 2, y)
|
||||
self.textControl.setPosition(x + old_div(self.getWidth(), 2), y)
|
||||
|
||||
def setText(self, text):
|
||||
self.text = text
|
||||
|
||||
@@ -91,8 +91,7 @@ class InfoWindow(xbmcgui.WindowXMLDialog):
|
||||
En caso de peliculas:
|
||||
Coge el titulo de los siguientes campos (en este orden)
|
||||
1. contentTitle (este tiene prioridad 1)
|
||||
2. fulltitle (este tiene prioridad 2)
|
||||
3. title (este tiene prioridad 3)
|
||||
2. title (este tiene prioridad 2)
|
||||
El primero que contenga "algo" lo interpreta como el titulo (es importante asegurarse que el titulo este en
|
||||
su sitio)
|
||||
|
||||
|
||||
@@ -3,17 +3,24 @@
|
||||
# XBMC Library Tools
|
||||
# ------------------------------------------------------------
|
||||
|
||||
from future import standard_library
|
||||
standard_library.install_aliases()
|
||||
#from builtins import str
|
||||
import sys
|
||||
PY3 = False
|
||||
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
|
||||
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
import re
|
||||
|
||||
import urllib2
|
||||
import xbmc
|
||||
|
||||
from core import filetools
|
||||
from core import jsontools
|
||||
from platformcode import config, logger
|
||||
from platformcode import platformtools
|
||||
from core import scrapertools
|
||||
|
||||
|
||||
def mark_auto_as_watched(item):
|
||||
@@ -83,7 +90,6 @@ def sync_trakt_addon(path_folder):
|
||||
"special://home/addons/script.trakt/"]
|
||||
|
||||
for path in paths:
|
||||
import sys
|
||||
sys.path.append(xbmc.translatePath(path))
|
||||
|
||||
# se obtiene las series vistas
|
||||
@@ -94,10 +100,9 @@ def sync_trakt_addon(path_folder):
|
||||
return
|
||||
|
||||
shows = traktapi.getShowsWatched({})
|
||||
shows = shows.items()
|
||||
shows = list(shows.items())
|
||||
|
||||
# obtenemos el id de la serie para comparar
|
||||
import re
|
||||
_id = re.findall("\[(.*?)\]", path_folder, flags=re.DOTALL)[0]
|
||||
logger.debug("el id es %s" % _id)
|
||||
|
||||
@@ -329,9 +334,7 @@ def mark_season_as_watched_on_kodi(item, value=1):
|
||||
def mark_content_as_watched_on_alfa(path):
|
||||
from specials import videolibrary
|
||||
from core import videolibrarytools
|
||||
from core import scrapertools
|
||||
from core import filetools
|
||||
import re
|
||||
|
||||
"""
|
||||
marca toda la serie o película como vista o no vista en la Videoteca de Alfa basado en su estado en la Videoteca de Kodi
|
||||
@type str: path
|
||||
@@ -361,6 +364,9 @@ def mark_content_as_watched_on_alfa(path):
|
||||
if "\\" in path:
|
||||
path = path.replace("/", "\\")
|
||||
head_nfo, item = videolibrarytools.read_nfo(path) #Leo el .nfo del contenido
|
||||
if not item:
|
||||
logger.error('.NFO no encontrado: ' + path)
|
||||
return
|
||||
|
||||
if FOLDER_TVSHOWS in path: #Compruebo si es CINE o SERIE
|
||||
contentType = "episode_view" #Marco la tabla de BBDD de Kodi Video
|
||||
@@ -379,7 +385,7 @@ def mark_content_as_watched_on_alfa(path):
|
||||
nfo_name = scrapertools.find_single_match(path2, '\]\/(.*?)$') #Construyo el nombre del .nfo
|
||||
path1 = path1.replace(nfo_name, '') #para la SQL solo necesito la carpeta
|
||||
path2 = path2.replace(nfo_name, '') #para la SQL solo necesito la carpeta
|
||||
path2 = filetools.remove_smb_credential(path2) #Si el archivo está en un servidor SMB, quiamos las credenciales
|
||||
path2 = filetools.remove_smb_credential(path2) #Si el archivo está en un servidor SMB, quitamos las credenciales
|
||||
|
||||
#Ejecutmos la sentencia SQL
|
||||
sql = 'select strFileName, playCount from %s where (strPath like "%s" or strPath like "%s")' % (contentType, path1, path2)
|
||||
@@ -399,7 +405,11 @@ def mark_content_as_watched_on_alfa(path):
|
||||
playCount_final = 0
|
||||
elif playCount >= 1:
|
||||
playCount_final = 1
|
||||
title_plain = title_plain.decode("utf-8").encode("utf-8") #Hacemos esto porque si no genera esto: u'title_plain'
|
||||
|
||||
elif not PY3 and isinstance(title_plain, (str, unicode)):
|
||||
title_plain = title_plain.decode("utf-8").encode("utf-8") #Hacemos esto porque si no genera esto: u'title_plain'
|
||||
elif PY3 and isinstance(var, bytes):
|
||||
title_plain = title_plain.decode('utf-8')
|
||||
item.library_playcounts.update({title_plain: playCount_final}) #actualizamos el playCount del .nfo
|
||||
|
||||
if item.infoLabels['mediatype'] == "tvshow": #Actualizamos los playCounts de temporadas y Serie
|
||||
@@ -420,6 +430,7 @@ def get_data(payload):
|
||||
@param payload: data
|
||||
:return:
|
||||
"""
|
||||
import urllib.request, urllib.error
|
||||
logger.info("payload: %s" % payload)
|
||||
# Required header for XBMC JSON-RPC calls, otherwise you'll get a 415 HTTP response code - Unsupported media type
|
||||
headers = {'content-type': 'application/json'}
|
||||
@@ -433,14 +444,14 @@ def get_data(payload):
|
||||
|
||||
xbmc_json_rpc_url = "http://" + config.get_setting("xbmc_host", "videolibrary") + ":" + str(
|
||||
xbmc_port) + "/jsonrpc"
|
||||
req = urllib2.Request(xbmc_json_rpc_url, data=jsontools.dump(payload), headers=headers)
|
||||
f = urllib2.urlopen(req)
|
||||
req = urllib.request.Request(xbmc_json_rpc_url, data=jsontools.dump(payload), headers=headers)
|
||||
f = urllib.request.urlopen(req)
|
||||
response = f.read()
|
||||
f.close()
|
||||
|
||||
logger.info("get_data: response %s" % response)
|
||||
data = jsontools.load(response)
|
||||
except Exception, ex:
|
||||
except Exception as ex:
|
||||
template = "An exception of type %s occured. Arguments:\n%r"
|
||||
message = template % (type(ex).__name__, ex.args)
|
||||
logger.error("error en xbmc_json_rpc_url: %s" % message)
|
||||
@@ -448,7 +459,7 @@ def get_data(payload):
|
||||
else:
|
||||
try:
|
||||
data = jsontools.load(xbmc.executeJSONRPC(jsontools.dump(payload)))
|
||||
except Exception, ex:
|
||||
except Exception as ex:
|
||||
template = "An exception of type %s occured. Arguments:\n%r"
|
||||
message = template % (type(ex).__name__, ex.args)
|
||||
logger.error("error en xbmc.executeJSONRPC: %s" % message)
|
||||
@@ -477,6 +488,7 @@ def update(folder_content=config.get_setting("folder_tvshows"), folder=""):
|
||||
}
|
||||
|
||||
if folder:
|
||||
folder = str(folder)
|
||||
videolibrarypath = config.get_videolibrary_config_path()
|
||||
|
||||
if folder.endswith('/') or folder.endswith('\\'):
|
||||
@@ -489,9 +501,10 @@ def update(folder_content=config.get_setting("folder_tvshows"), folder=""):
|
||||
videolibrarypath = videolibrarypath[:-1]
|
||||
update_path = videolibrarypath + "/" + folder_content + "/" + folder + "/"
|
||||
else:
|
||||
update_path = filetools.join(videolibrarypath, folder_content, folder) + "/"
|
||||
#update_path = filetools.join(videolibrarypath, folder_content, folder) + "/" # Problemas de encode en "folder"
|
||||
update_path = filetools.join(videolibrarypath, folder_content, ' ').rstrip()
|
||||
|
||||
if not update_path.startswith("smb://"):
|
||||
if not scrapertools.find_single_match(update_path, '(^\w+:\/\/)'):
|
||||
payload["params"] = {"directory": update_path}
|
||||
|
||||
while xbmc.getCondVisibility('Library.IsScanningVideo()'):
|
||||
@@ -663,7 +676,7 @@ def set_content(content_type, silent=False):
|
||||
if sql_videolibrarypath.startswith("special://"):
|
||||
sql_videolibrarypath = sql_videolibrarypath.replace('/profile/', '/%/').replace('/home/userdata/', '/%/')
|
||||
sep = '/'
|
||||
elif sql_videolibrarypath.startswith("smb://"):
|
||||
elif scrapertools.find_single_match(sql_videolibrarypath, '(^\w+:\/\/)'):
|
||||
sep = '/'
|
||||
else:
|
||||
sep = os.sep
|
||||
@@ -881,7 +894,7 @@ def add_sources(path):
|
||||
# Nodo <name>
|
||||
nodo_name = xmldoc.createElement("name")
|
||||
sep = os.sep
|
||||
if path.startswith("special://") or path.startswith("smb://"):
|
||||
if path.startswith("special://") or scrapertools.find_single_match(path, '(^\w+:\/\/)'):
|
||||
sep = "/"
|
||||
name = path
|
||||
if path.endswith(sep):
|
||||
@@ -904,8 +917,13 @@ def add_sources(path):
|
||||
nodo_video.appendChild(nodo_source)
|
||||
|
||||
# Guardamos los cambios
|
||||
filetools.write(SOURCES_PATH,
|
||||
'\n'.join([x for x in xmldoc.toprettyxml().encode("utf-8").splitlines() if x.strip()]))
|
||||
if not PY3:
|
||||
filetools.write(SOURCES_PATH,
|
||||
'\n'.join([x for x in xmldoc.toprettyxml().encode("utf-8").splitlines() if x.strip()]))
|
||||
else:
|
||||
filetools.write(SOURCES_PATH,
|
||||
b'\n'.join([x for x in xmldoc.toprettyxml().encode("utf-8").splitlines() if x.strip()]),
|
||||
vfs=False)
|
||||
|
||||
|
||||
def ask_set_content(flag, silent=False):
|
||||
|
||||
@@ -245,6 +245,10 @@ msgctxt "#30137"
|
||||
msgid "Direct"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#30138"
|
||||
msgid "Live"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#30151"
|
||||
msgid "Watch the video"
|
||||
msgstr ""
|
||||
@@ -5734,7 +5738,19 @@ msgid "Playback"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#70754"
|
||||
msgid "Compact mode"
|
||||
msgid "Display mode"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#70755"
|
||||
msgid "Default"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#70756"
|
||||
msgid "Extended"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#70757"
|
||||
msgid "Compact"
|
||||
msgstr ""
|
||||
|
||||
# DNS start [ settings and declaration ]
|
||||
@@ -5805,3 +5821,71 @@ msgstr ""
|
||||
msgctxt "#707417"
|
||||
msgid "Favourite quality"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#707418"
|
||||
msgid "Follow the steps below:"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#707419"
|
||||
msgid "%s) click here to enable debug logging"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#707420"
|
||||
msgid "%s) repeat what you did to cause the error"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#707421"
|
||||
msgid "%s) click here to create the report"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#707422"
|
||||
msgid "%s) click here to disable debug logging"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#707423"
|
||||
msgid "Explain the issue and share this link:"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#707424"
|
||||
msgid "Service not available. Try again later"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#707425"
|
||||
msgid "Debug should be active"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#707426"
|
||||
msgid "before generating the report"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#707427"
|
||||
msgid "Unable to read kodi log"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#707428"
|
||||
msgid "Failed to upload report"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#707429"
|
||||
msgid "Report an issue"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#707430"
|
||||
msgid "Debug logging"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#707431"
|
||||
msgid "Enabled"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#707432"
|
||||
msgid "Disabled"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#707433"
|
||||
msgid "Delete"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#707434"
|
||||
msgid "reCaptcha verification failed"
|
||||
msgstr ""
|
||||
@@ -245,6 +245,10 @@ msgctxt "#30137"
|
||||
msgid "Direct"
|
||||
msgstr "Diretto"
|
||||
|
||||
msgctxt "#30138"
|
||||
msgid "Live"
|
||||
msgstr "Live"
|
||||
|
||||
msgctxt "#30151"
|
||||
msgid "Watch the video"
|
||||
msgstr "Guarda il video"
|
||||
@@ -5738,8 +5742,20 @@ msgid "Playback"
|
||||
msgstr "Riproduzione"
|
||||
|
||||
msgctxt "#70754"
|
||||
msgid "Compact mode"
|
||||
msgstr "Modalità compatta"
|
||||
msgid "Display mode"
|
||||
msgstr "Modalità visualizzazione"
|
||||
|
||||
msgctxt "#70755"
|
||||
msgid "Default"
|
||||
msgstr "Predefinita"
|
||||
|
||||
msgctxt "#70756"
|
||||
msgid "Extended"
|
||||
msgstr "Estesa"
|
||||
|
||||
msgctxt "#70757"
|
||||
msgid "Compact"
|
||||
msgstr "Compatta"
|
||||
|
||||
# DNS start [ settings and declaration ]
|
||||
msgctxt "#707401"
|
||||
@@ -5809,3 +5825,71 @@ msgstr "Ogni quanto vuoi che vengano controllati? (ore)"
|
||||
msgctxt "#707417"
|
||||
msgid "Favourite quality"
|
||||
msgstr "Qualità preferita"
|
||||
|
||||
msgctxt "#707418"
|
||||
msgid "Follow the steps below:"
|
||||
msgstr "Segui i seguenti passi:"
|
||||
|
||||
msgctxt "#707419"
|
||||
msgid "%s) click here to enable debug logging"
|
||||
msgstr "%s) clicca qui per attivare il logging di debug"
|
||||
|
||||
msgctxt "#707420"
|
||||
msgid "%s) repeat what you did to cause the error"
|
||||
msgstr "%s) ripeti ciò che hai fatto per causare l'errore"
|
||||
|
||||
msgctxt "#707421"
|
||||
msgid "%s) click here to create the report"
|
||||
msgstr "%s) clicca qui per creare il report"
|
||||
|
||||
msgctxt "#707422"
|
||||
msgid "%s) click here to disable debug logging"
|
||||
msgstr "%s) clicca qui per disattivare il logging di debug"
|
||||
|
||||
msgctxt "#707423"
|
||||
msgid "Explain the issue and share this link:"
|
||||
msgstr "Spiega il problema e condividi questo link:"
|
||||
|
||||
msgctxt "#707424"
|
||||
msgid "Service not available. Try again later"
|
||||
msgstr "Servizio non disponibile Riprova più tardi"
|
||||
|
||||
msgctxt "#707425"
|
||||
msgid "Debug should be active"
|
||||
msgstr "Il debug dovrebbe essere attivo"
|
||||
|
||||
msgctxt "#707426"
|
||||
msgid "before generating the report"
|
||||
msgstr "prima di generare il report"
|
||||
|
||||
msgctxt "#707427"
|
||||
msgid "Unable to read kodi log"
|
||||
msgstr "Impossibile leggere il log di kodi"
|
||||
|
||||
msgctxt "#707428"
|
||||
msgid "Failed to upload report"
|
||||
msgstr "Impossibile caricare il report"
|
||||
|
||||
msgctxt "#707429"
|
||||
msgid "Report an issue"
|
||||
msgstr "Segnala un problema"
|
||||
|
||||
msgctxt "#707430"
|
||||
msgid "Debug logging"
|
||||
msgstr "Logging di debug"
|
||||
|
||||
msgctxt "#707431"
|
||||
msgid "Enabled"
|
||||
msgstr "Attivato"
|
||||
|
||||
msgctxt "#707432"
|
||||
msgid "Disabed"
|
||||
msgstr "Disattivato"
|
||||
|
||||
msgctxt "#707433"
|
||||
msgid "Delete"
|
||||
msgstr "Cancella"
|
||||
|
||||
msgctxt "#707434"
|
||||
msgid "reCaptcha verification failed"
|
||||
msgstr "Verifica reCaptcha fallita"
|
||||
@@ -59,7 +59,8 @@
|
||||
<setting id="videolibrary_kodi" type="bool" label="70120" enable="lt(-1,2)+eq(0,false)" default="false"/>
|
||||
<setting id="videolibrary_max_quality" type="bool" label="70729" default="false" visible="true"/>
|
||||
<setting id="next_ep" type="enum" label="70746" lvalues="70752|70747|70748" default="0"/>
|
||||
<setting id="next_ep_type" type="bool" label="70754" default="false" visible="eq(-1,2)"/>
|
||||
<setting id="next_ep_type" type="select" label="70754" lvalues="70755|70756|70757" default="0" visible="eq(-1,2)"/>
|
||||
<!-- <setting id="next_ep_type" type="bool" label="70754" default="false" visible="eq(-1,2)"/> -->
|
||||
<setting id="next_ep_seconds" type="enum" values="20|30|40|50|60|70|80|90|100|110|120" label="70749" default="2" visible="!eq(-2,0)"/>
|
||||
</category>
|
||||
|
||||
|
||||
@@ -26,7 +26,7 @@
|
||||
<height>34</height>
|
||||
<width>725</width>
|
||||
<font>font12_title</font>
|
||||
<textcolor>0xFFFFFFFF</textcolor>
|
||||
<textcolor>FFFFFFFF</textcolor>
|
||||
<align>center</align>
|
||||
<aligny>center</aligny>
|
||||
<label>$ADDON[plugin.video.kod 70000]</label>
|
||||
@@ -45,6 +45,7 @@
|
||||
<width>200</width>
|
||||
<height>50</height>
|
||||
<textwidth>110</textwidth>
|
||||
<textcolor>FFFFFFFF</textcolor>
|
||||
<texturefocus>Controls/KeyboardKey.png</texturefocus>
|
||||
<texturenofocus>Controls/KeyboardKeyNF.png</texturenofocus>
|
||||
<align>center</align>
|
||||
@@ -57,6 +58,7 @@
|
||||
<width>200</width>
|
||||
<height>50</height>
|
||||
<textwidth>110</textwidth>
|
||||
<textcolor>FFFFFFFF</textcolor>
|
||||
<texturefocus>Controls/KeyboardKey.png</texturefocus>
|
||||
<texturenofocus>Controls/KeyboardKeyNF.png</texturenofocus>
|
||||
<align>center</align>
|
||||
@@ -69,6 +71,7 @@
|
||||
<width>200</width>
|
||||
<height>50</height>
|
||||
<textwidth>110</textwidth>
|
||||
<textcolor>FFFFFFFF</textcolor>
|
||||
<texturefocus>Controls/KeyboardKey.png</texturefocus>
|
||||
<texturenofocus>Controls/KeyboardKeyNF.png</texturenofocus>
|
||||
<align>center</align>
|
||||
@@ -86,7 +89,7 @@
|
||||
<width>745</width>
|
||||
<height>300</height>
|
||||
<font>font16</font>
|
||||
<textcolor>0xFFFFFFFF</textcolor>
|
||||
<textcolor>FFFFFFFF</textcolor>
|
||||
<align>center</align>
|
||||
<aligny>center</aligny>
|
||||
<label>$ADDON[plugin.video.kod 70004]</label>
|
||||
@@ -97,7 +100,7 @@
|
||||
<posx>780</posx>
|
||||
<width>10</width>
|
||||
<height>300</height>
|
||||
<textcolor>0xFFFFFFFF</textcolor>
|
||||
<textcolor>FFFFFFFF</textcolor>
|
||||
<texture>Controls/ScrollBack.png</texture>
|
||||
</control>
|
||||
<control type="image" id="10009">
|
||||
|
||||
@@ -42,7 +42,7 @@
|
||||
<itemgap>0</itemgap>
|
||||
<align>right</align>
|
||||
<control type="button" id="11">
|
||||
<label>$ADDON[plugin.video.kod 70750] $INFO[Player.TimeRemaining(mm:ss)]</label>
|
||||
<label>$INFO[Window.Property(title)] | $INFO[Window.Property(ep_title)] | $INFO[Player.TimeRemaining(secs),,]</label>
|
||||
<onclick>SendClick(3012)</onclick>
|
||||
<height>40</height>
|
||||
<width min="50">auto</width>
|
||||
|
||||
@@ -42,7 +42,7 @@
|
||||
<itemgap>0</itemgap>
|
||||
<align>right</align>
|
||||
<control type="button" id="11">
|
||||
<label>[B]$INFO[Player.TimeRemaining(mm:ss)][/B]</label>
|
||||
<label>[B]$INFO[Player.TimeRemaining(secs),,][/B]</label>
|
||||
<onclick>SendClick(3012)</onclick>
|
||||
<!-- <visible>!Integer.IsGreater(Player.TimeRemaining,59)</visible> -->
|
||||
<height>40</height>
|
||||
|
||||
125
resources/skins/Default/720p/NextDialogExtended.xml
Normal file
125
resources/skins/Default/720p/NextDialogExtended.xml
Normal file
@@ -0,0 +1,125 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<window>
|
||||
<defaultcontrol always="true">20</defaultcontrol>
|
||||
<onload>Dialog.Close(fullscreeninfo,true)</onload>
|
||||
<onload>Dialog.Close(videoosd,true)</onload>
|
||||
<controls>
|
||||
<control type="group">
|
||||
<animation type="WindowOpen" reversible="false">
|
||||
<effect type="fade" start="0" end="100" time="600" />
|
||||
<effect type="slide" start="115,0" end="0,0" time="600" />
|
||||
</animation>
|
||||
<animation type="WindowClose" reversible="false">
|
||||
<effect type="fade" start="100" end="0" time="400" />
|
||||
<effect type="slide" start="0,0" end="115,0" time="400" />
|
||||
</animation>
|
||||
<control type="group">
|
||||
<right>30</right>
|
||||
<bottom>30</bottom>
|
||||
<height>220</height>
|
||||
<width>326</width>
|
||||
<!-- Background -->
|
||||
<control type="image">
|
||||
<top>0</top>
|
||||
<right>0</right>
|
||||
<width>326</width>
|
||||
<height>180</height>
|
||||
<texture>$INFO[Window.Property(next_img)]</texture>
|
||||
</control>
|
||||
<control type="group">
|
||||
<top>0</top>
|
||||
<right>0</right>
|
||||
<width>100%</width>
|
||||
<!-- buttons -->
|
||||
<control type="button" id="3012">
|
||||
<left>-1000</left>
|
||||
<top>-1000</top>
|
||||
<height>1</height>
|
||||
<width>1</width>
|
||||
</control>
|
||||
<control type="grouplist" id="20">
|
||||
<orientation>vertical</orientation>
|
||||
<height>220</height>
|
||||
<width>326</width>
|
||||
<itemgap>0</itemgap>
|
||||
<right>0</right>
|
||||
<control type="button" id="11">
|
||||
<label></label>
|
||||
<onclick>SendClick(3012)</onclick>
|
||||
<height>180</height>
|
||||
<width>326</width>
|
||||
<right>0</right>
|
||||
<font>font30_title</font>
|
||||
<textoffsety>20</textoffsety>
|
||||
<textcolor>FFFFFFFF</textcolor>
|
||||
<focusedcolor>FFFFFFFF</focusedcolor>
|
||||
<selectedcolor>FFFFFFFF</selectedcolor>
|
||||
<shadowcolor>22000000</shadowcolor>
|
||||
<aligny>top</aligny>
|
||||
<align>center</align>
|
||||
<texturefocus border="10">NextDialog/background-play.png</texturefocus>
|
||||
<texturenofocus border="10" colordiffuse="00232323">NextDialog/background-diffuse.png</texturenofocus>
|
||||
<pulseonselect>no</pulseonselect>
|
||||
</control>
|
||||
<control type="button" id="3013">
|
||||
<label>$ADDON[plugin.video.kod 60396]</label>
|
||||
<height>40</height>
|
||||
<width>326</width>
|
||||
<font>font30_title</font>
|
||||
<textoffsetx>20</textoffsetx>
|
||||
<textcolor>80FFFFFF</textcolor>
|
||||
<focusedcolor>FFFFFFFF</focusedcolor>
|
||||
<selectedcolor>80FFFFFF</selectedcolor>
|
||||
<shadowcolor>22000000</shadowcolor>
|
||||
<aligny>center</aligny>
|
||||
<align>center</align>
|
||||
<texturefocus border="10" colordiffuse="88232323">NextDialog/background-diffuse.png</texturefocus>
|
||||
<texturenofocus border="10" colordiffuse="88232323">NextDialog/background-diffuse.png</texturenofocus>
|
||||
<pulseonselect>no</pulseonselect>
|
||||
</control>
|
||||
</control>
|
||||
<control type="label">
|
||||
<bottom>60</bottom>
|
||||
<height>40</height>
|
||||
<aligny>center</aligny>
|
||||
<visible>true</visible>
|
||||
<align>center</align>
|
||||
<scroll>true</scroll>
|
||||
<scrollspeed>50</scrollspeed>
|
||||
<textcolor>FFFFFFFF</textcolor>
|
||||
<shadowcolor>ff000000</shadowcolor>
|
||||
<info>Window.Property(title)</info>
|
||||
<font>font30_title</font>
|
||||
</control>
|
||||
<control type="label">
|
||||
<bottom>40</bottom>
|
||||
<height>40</height>
|
||||
<aligny>center</aligny>
|
||||
<visible>true</visible>
|
||||
<align>center</align>
|
||||
<scroll>true</scroll>
|
||||
<scrollspeed>50</scrollspeed>
|
||||
<textcolor>FFFFFFFF</textcolor>
|
||||
<shadowcolor>ff000000</shadowcolor>
|
||||
<info>Window.Property(ep_title)</info>
|
||||
<font>font20_title</font>
|
||||
</control>
|
||||
<control type="label">
|
||||
<top>20</top>
|
||||
<right>25</right>
|
||||
<height>auto</height>
|
||||
<aligny>top</aligny>
|
||||
<visible>true</visible>
|
||||
<align>right</align>
|
||||
<scroll>true</scroll>
|
||||
<scrollspeed>50</scrollspeed>
|
||||
<textcolor>FFFFFFFF</textcolor>
|
||||
<shadowcolor>ff000000</shadowcolor>
|
||||
<info>Player.TimeRemaining(secs),,</info>
|
||||
<font>font30_title</font>
|
||||
</control>
|
||||
</control>
|
||||
</control>
|
||||
</control>
|
||||
</controls>
|
||||
</window>
|
||||
@@ -1,206 +1,221 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<window>
|
||||
<allowoverlays>false</allowoverlays>
|
||||
<animation type="WindowOpen" reversible="false">
|
||||
<effect type="zoom" start="80" end="100" center="640,225" delay="160" tween="back" time="240" />
|
||||
<effect type="fade" delay="160" end="100" time="240" />
|
||||
</animation>
|
||||
<animation type="WindowClose" reversible="false">
|
||||
<effect type="zoom" start="100" end="80" center="640,225" easing="in" tween="back" time="240" />
|
||||
<effect type="fade" start="100" end="0" time="240" />
|
||||
</animation>
|
||||
<controls>
|
||||
<control type="group" id="10001">
|
||||
<posx>250</posx>
|
||||
<posy>60</posy>
|
||||
<width>700</width>
|
||||
<height>600</height>
|
||||
<top>40</top>
|
||||
<left>390</left>
|
||||
<width>600</width>
|
||||
<height>640</height>
|
||||
<control type="image">
|
||||
<width>700</width>
|
||||
<height>600</height>
|
||||
<texture>Windows/DialogBack.png</texture>
|
||||
<width>510</width>
|
||||
<height>640</height>
|
||||
<left>45</left>
|
||||
<texture>Shortcut/dialog-bg-solid.png</texture>
|
||||
</control>
|
||||
<control type="textbox" id="10000">
|
||||
<posy>20</posy>
|
||||
<posx>30</posx>
|
||||
<top>30</top>
|
||||
<left>20</left>
|
||||
<height>60</height>
|
||||
<width>630</width>
|
||||
<textcolor>0xFFFFA500</textcolor>
|
||||
<width>560</width>
|
||||
<textcolor>FFFFFFFF</textcolor>
|
||||
<wrapmultiline>true</wrapmultiline>
|
||||
<align>center</align>
|
||||
<label></label>
|
||||
</control>
|
||||
<control type="button" id="10002">
|
||||
<posy>540</posy>
|
||||
<posx>110</posx>
|
||||
<width>140</width>
|
||||
<height>30</height>
|
||||
<textwidth>100</textwidth>
|
||||
<top>565</top>
|
||||
<left>75</left>
|
||||
<width>150</width>
|
||||
<height>50</height>
|
||||
<textwidth>110</textwidth>
|
||||
<textcolor>FFFFFFFF</textcolor>
|
||||
<focusedcolor>FFFFFFFF</focusedcolor>
|
||||
<texturefocus>Controls/KeyboardKey.png</texturefocus>
|
||||
<texturenofocus>Controls/KeyboardKeyNF.png</texturenofocus>
|
||||
<align>center</align>
|
||||
<aligny>center</aligny>
|
||||
<label>Aceptar</label>
|
||||
<onup>10013</onup>
|
||||
<ondown>10003</ondown>
|
||||
<onleft>10013</onleft>
|
||||
<label>$ADDON[plugin.video.kod 70007]</label>
|
||||
<onup>10011</onup>
|
||||
<ondown>10005</ondown>
|
||||
<onleft>10004</onleft>
|
||||
<onright>10003</onright>
|
||||
</control>
|
||||
</control>
|
||||
<control type="button" id="10003">
|
||||
<posy>540</posy>
|
||||
<posx>280</posx>
|
||||
<width>140</width>
|
||||
<height>30</height>
|
||||
<textwidth>100</textwidth>
|
||||
<top>565</top>
|
||||
<left>225</left>
|
||||
<width>150</width>
|
||||
<height>50</height>
|
||||
<textwidth>110</textwidth>
|
||||
<textcolor>FFFFFFFF</textcolor>
|
||||
<focusedcolor>FFFFFFFF</focusedcolor>
|
||||
<texturefocus>Controls/KeyboardKey.png</texturefocus>
|
||||
<texturenofocus>Controls/KeyboardKeyNF.png</texturenofocus>
|
||||
<align>center</align>
|
||||
<aligny>center</aligny>
|
||||
<label>Cancelar</label>
|
||||
<onup>10002</onup>
|
||||
<ondown>10004</ondown>
|
||||
<label>$ADDON[plugin.video.kod 707433]</label>
|
||||
<onup>10012</onup>
|
||||
<ondown>10006</ondown>
|
||||
<onleft>10002</onleft>
|
||||
<onright>10004</onright>
|
||||
</control>
|
||||
<control type="button" id="10004">
|
||||
<posy>540</posy>
|
||||
<posx>450</posx>
|
||||
<width>140</width>
|
||||
<height>30</height>
|
||||
<textwidth>100</textwidth>
|
||||
<top>565</top>
|
||||
<left>375</left>
|
||||
<width>150</width>
|
||||
<height>50</height>
|
||||
<textwidth>110</textwidth>
|
||||
<textcolor>FFFFFFFF</textcolor>
|
||||
<focusedcolor>FFFFFFFF</focusedcolor>
|
||||
<texturefocus>Controls/KeyboardKey.png</texturefocus>
|
||||
<texturenofocus>Controls/KeyboardKeyNF.png</texturenofocus>
|
||||
<align>center</align>
|
||||
<aligny>center</aligny>
|
||||
<label>Recargar</label>
|
||||
<onup>10003</onup>
|
||||
<ondown>10005</ondown>
|
||||
<label>$ADDON[plugin.video.kod 70008]</label>
|
||||
<onup>10013</onup>
|
||||
<ondown>10007</ondown>
|
||||
<onleft>10003</onleft>
|
||||
<onright>10005</onright>
|
||||
</control>
|
||||
<onright>10002</onright>
|
||||
</control>
|
||||
<control type="image" id="10020">
|
||||
<posy>80</posy>
|
||||
<posx>120</posx>
|
||||
<top>90</top>
|
||||
<left>75</left>
|
||||
<width>450</width>
|
||||
<height>450</height>
|
||||
</control>
|
||||
<control type="togglebutton" id="10005">
|
||||
<posy>80</posy>
|
||||
<posx>120</posx>
|
||||
<top>90</top>
|
||||
<left>75</left>
|
||||
<width>150</width>
|
||||
<height>150</height>
|
||||
<texturefocus border="50" colordiffuse="50D8D8D8">Controls/KeyboardKeyWhite.png</texturefocus>
|
||||
<texturenofocus border="15" colordiffuse="FFA2B2E7">Controls/KeyboardKeyNF.png</texturenofocus>
|
||||
<alttexturefocus>Controls/check_mark.png</alttexturefocus>
|
||||
<alttexturenofocus>Controls/check_mark.png</alttexturenofocus>
|
||||
<onup>10004</onup>
|
||||
<ondown>10006</ondown>
|
||||
<onleft>10004</onleft>
|
||||
<texturefocus colordiffuse="AA232323">Controls/background-diffuse.png</texturefocus>
|
||||
<texturenofocus colordiffuse="00232323">Controls/background-diffuse.png</texturenofocus>
|
||||
<alttexturefocus colordiffuse="FF232323">Controls/check_mark.png</alttexturefocus>
|
||||
<alttexturenofocus colordiffuse="FFFFFFFF">Controls/check_mark.png</alttexturenofocus>
|
||||
<onup>10002</onup>
|
||||
<ondown>10008</ondown>
|
||||
<onleft>10007</onleft>
|
||||
<onright>10006</onright>
|
||||
</control>
|
||||
<control type="togglebutton" id="10006">
|
||||
<posy>80</posy>
|
||||
<posx>270</posx>
|
||||
<top>90</top>
|
||||
<left>225</left>
|
||||
<width>150</width>
|
||||
<height>150</height>
|
||||
<texturefocus border="50" colordiffuse="50D8D8D8">Controls/KeyboardKeyWhite.png</texturefocus>
|
||||
<texturenofocus border="15" colordiffuse="FFA2B2E7">Controls/KeyboardKeyNF.png</texturenofocus>
|
||||
<alttexturefocus>Controls/check_mark.png</alttexturefocus>
|
||||
<alttexturenofocus>Controls/check_mark.png</alttexturenofocus>
|
||||
<onup>10005</onup>
|
||||
<ondown>10007</ondown>
|
||||
<texturefocus colordiffuse="AA232323">Controls/background-diffuse.png</texturefocus>
|
||||
<texturenofocus colordiffuse="00232323">Controls/background-diffuse.png</texturenofocus>
|
||||
<alttexturefocus colordiffuse="FF232323">Controls/check_mark.png</alttexturefocus>
|
||||
<alttexturenofocus colordiffuse="FFFFFFFF">Controls/check_mark.png</alttexturenofocus>
|
||||
<onup>10003</onup>
|
||||
<ondown>10009</ondown>
|
||||
<onleft>10005</onleft>
|
||||
<onright>10007</onright>
|
||||
</control>
|
||||
<control type="togglebutton" id="10007">
|
||||
<posy>80</posy>
|
||||
<posx>420</posx>
|
||||
<top>90</top>
|
||||
<left>375</left>
|
||||
<width>150</width>
|
||||
<height>150</height>
|
||||
<texturefocus border="50" colordiffuse="50D8D8D8">Controls/KeyboardKeyWhite.png</texturefocus>
|
||||
<texturenofocus border="15" colordiffuse="FFA2B2E7">Controls/KeyboardKeyNF.png</texturenofocus>
|
||||
<alttexturefocus>Controls/check_mark.png</alttexturefocus>
|
||||
<alttexturenofocus>Controls/check_mark.png</alttexturenofocus>
|
||||
<onup>10006</onup>
|
||||
<ondown>10008</ondown>
|
||||
<texturefocus colordiffuse="AA232323">Controls/background-diffuse.png</texturefocus>
|
||||
<texturenofocus colordiffuse="00232323">Controls/background-diffuse.png</texturenofocus>
|
||||
<alttexturefocus colordiffuse="FF232323">Controls/check_mark.png</alttexturefocus>
|
||||
<alttexturenofocus colordiffuse="FFFFFFFF">Controls/check_mark.png</alttexturenofocus>
|
||||
<onup>10004</onup>
|
||||
<ondown>10010</ondown>
|
||||
<onleft>10006</onleft>
|
||||
<onright>10008</onright>
|
||||
<onright>10005</onright>
|
||||
</control>
|
||||
<control type="togglebutton" id="10008">
|
||||
<posy>230</posy>
|
||||
<posx>120</posx>
|
||||
<top>240</top>
|
||||
<left>75</left>
|
||||
<width>150</width>
|
||||
<height>150</height>
|
||||
<texturefocus border="50" colordiffuse="50D8D8D8">Controls/KeyboardKeyWhite.png</texturefocus>
|
||||
<texturenofocus border="15" colordiffuse="FFA2B2E7">Controls/KeyboardKeyNF.png</texturenofocus>
|
||||
<alttexturefocus>Controls/check_mark.png</alttexturefocus>
|
||||
<alttexturenofocus>Controls/check_mark.png</alttexturenofocus>
|
||||
<onup>10007</onup>
|
||||
<ondown>10009</ondown>
|
||||
<onleft>10007</onleft>
|
||||
<texturefocus colordiffuse="AA232323">Controls/background-diffuse.png</texturefocus>
|
||||
<texturenofocus colordiffuse="00232323">Controls/background-diffuse.png</texturenofocus>
|
||||
<alttexturefocus colordiffuse="FF232323">Controls/check_mark.png</alttexturefocus>
|
||||
<alttexturenofocus colordiffuse="FFFFFFFF">Controls/check_mark.png</alttexturenofocus>
|
||||
<onup>10005</onup>
|
||||
<ondown>10011</ondown>
|
||||
<onleft>10010</onleft>
|
||||
<onright>10009</onright>
|
||||
</control>
|
||||
<control type="togglebutton" id="10009">
|
||||
<posy>230</posy>
|
||||
<posx>270</posx>
|
||||
<top>240</top>
|
||||
<left>225</left>
|
||||
<width>150</width>
|
||||
<height>150</height>
|
||||
<texturefocus border="50" colordiffuse="50D8D8D8">Controls/KeyboardKeyWhite.png</texturefocus>
|
||||
<texturenofocus border="15" colordiffuse="FFA2B2E7">Controls/KeyboardKeyNF.png</texturenofocus>
|
||||
<alttexturefocus>Controls/check_mark.png</alttexturefocus>
|
||||
<alttexturenofocus>Controls/check_mark.png</alttexturenofocus>
|
||||
<onup>10008</onup>
|
||||
<ondown>10010</ondown>
|
||||
<texturefocus colordiffuse="AA232323">Controls/background-diffuse.png</texturefocus>
|
||||
<texturenofocus colordiffuse="00232323">Controls/background-diffuse.png</texturenofocus>
|
||||
<alttexturefocus colordiffuse="FF232323">Controls/check_mark.png</alttexturefocus>
|
||||
<alttexturenofocus colordiffuse="FFFFFFFF">Controls/check_mark.png</alttexturenofocus>
|
||||
<onup>10006</onup>
|
||||
<ondown>10012</ondown>
|
||||
<onleft>10008</onleft>
|
||||
<onright>10010</onright>
|
||||
</control>
|
||||
<control type="togglebutton" id="10010">
|
||||
<posy>230</posy>
|
||||
<posx>420</posx>
|
||||
<top>240</top>
|
||||
<left>375</left>
|
||||
<width>150</width>
|
||||
<height>150</height>
|
||||
<texturefocus border="50" colordiffuse="50D8D8D8">Controls/KeyboardKeyWhite.png</texturefocus>
|
||||
<texturenofocus border="15" colordiffuse="FFA2B2E7">Controls/KeyboardKeyNF.png</texturenofocus>
|
||||
<alttexturefocus>Controls/check_mark.png</alttexturefocus>
|
||||
<alttexturenofocus>Controls/check_mark.png</alttexturenofocus>
|
||||
<onup>10009</onup>
|
||||
<ondown>10011</ondown>
|
||||
<texturefocus colordiffuse="AA232323">Controls/background-diffuse.png</texturefocus>
|
||||
<texturenofocus colordiffuse="00232323">Controls/background-diffuse.png</texturenofocus>
|
||||
<alttexturefocus colordiffuse="FF232323">Controls/check_mark.png</alttexturefocus>
|
||||
<alttexturenofocus colordiffuse="FFFFFFFF">Controls/check_mark.png</alttexturenofocus>
|
||||
<onup>10007</onup>
|
||||
<ondown>10013</ondown>
|
||||
<onleft>10009</onleft>
|
||||
<onright>10011</onright>
|
||||
<onright>10008</onright>
|
||||
</control>
|
||||
<control type="togglebutton" id="10011">
|
||||
<posy>380</posy>
|
||||
<posx>120</posx>
|
||||
<top>390</top>
|
||||
<left>75</left>
|
||||
<width>150</width>
|
||||
<height>150</height>
|
||||
<texturefocus border="50" colordiffuse="50D8D8D8">Controls/KeyboardKeyWhite.png</texturefocus>
|
||||
<texturenofocus border="15" colordiffuse="FFA2B2E7">Controls/KeyboardKeyNF.png</texturenofocus>
|
||||
<alttexturefocus>Controls/check_mark.png</alttexturefocus>
|
||||
<alttexturenofocus>Controls/check_mark.png</alttexturenofocus>
|
||||
<onup>10010</onup>
|
||||
<ondown>10012</ondown>
|
||||
<onleft>10010</onleft>
|
||||
<texturefocus colordiffuse="AA232323">Controls/background-diffuse.png</texturefocus>
|
||||
<texturenofocus colordiffuse="00232323">Controls/background-diffuse.png</texturenofocus>
|
||||
<alttexturefocus colordiffuse="FF232323">Controls/check_mark.png</alttexturefocus>
|
||||
<alttexturenofocus colordiffuse="FFFFFFFF">Controls/check_mark.png</alttexturenofocus>
|
||||
<onup>10008</onup>
|
||||
<ondown>10002</ondown>
|
||||
<onleft>10013</onleft>
|
||||
<onright>10012</onright>
|
||||
</control>
|
||||
<control type="togglebutton" id="10012">
|
||||
<posy>380</posy>
|
||||
<posx>270</posx>
|
||||
<top>390</top>
|
||||
<left>225</left>
|
||||
<width>150</width>
|
||||
<height>150</height>
|
||||
<texturefocus border="50" colordiffuse="50D8D8D8">Controls/KeyboardKeyWhite.png</texturefocus>
|
||||
<texturenofocus border="15" colordiffuse="FFA2B2E7">Controls/KeyboardKeyNF.png</texturenofocus>
|
||||
<alttexturefocus>Controls/check_mark.png</alttexturefocus>
|
||||
<alttexturenofocus>Controls/check_mark.png</alttexturenofocus>
|
||||
<onup>10011</onup>
|
||||
<ondown>10013</ondown>
|
||||
<texturefocus colordiffuse="AA232323">Controls/background-diffuse.png</texturefocus>
|
||||
<texturenofocus colordiffuse="00232323">Controls/background-diffuse.png</texturenofocus>
|
||||
<alttexturefocus colordiffuse="FF232323">Controls/check_mark.png</alttexturefocus>
|
||||
<alttexturenofocus colordiffuse="FFFFFFFF">Controls/check_mark.png</alttexturenofocus>
|
||||
<onup>10009</onup>
|
||||
<ondown>10003</ondown>
|
||||
<onleft>10011</onleft>
|
||||
<onright>10013</onright>
|
||||
</control>
|
||||
<control type="togglebutton" id="10013">
|
||||
<posy>380</posy>
|
||||
<posx>420</posx>
|
||||
<top>390</top>
|
||||
<left>375</left>
|
||||
<width>150</width>
|
||||
<height>150</height>
|
||||
<texturefocus border="50" colordiffuse="50D8D8D8">Controls/KeyboardKeyWhite.png</texturefocus>
|
||||
<texturenofocus border="15" colordiffuse="FFA2B2E7">Controls/KeyboardKeyNF.png</texturenofocus>
|
||||
<alttexturefocus>Controls/check_mark.png</alttexturefocus>
|
||||
<alttexturenofocus>Controls/check_mark.png</alttexturenofocus>
|
||||
<onup>10012</onup>
|
||||
<ondown>10002</ondown>
|
||||
<texturefocus colordiffuse="AA232323">Controls/background-diffuse.png</texturefocus>
|
||||
<texturenofocus colordiffuse="00232323">Controls/background-diffuse.png</texturenofocus>
|
||||
<alttexturefocus colordiffuse="FF232323">Controls/check_mark.png</alttexturefocus>
|
||||
<alttexturenofocus colordiffuse="FFFFFFFF">Controls/check_mark.png</alttexturenofocus>
|
||||
<onup>10010</onup>
|
||||
<ondown>10004</ondown>
|
||||
<onleft>10012</onleft>
|
||||
<onright>10002</onright>
|
||||
<onright>10011</onright>
|
||||
</control>
|
||||
</control>
|
||||
</controls>
|
||||
|
||||
@@ -69,10 +69,8 @@
|
||||
<texture colordiffuse="60FFFFFF">Shortcut/black.png</texture>
|
||||
</control>
|
||||
<control type="image">
|
||||
<left>35</left>
|
||||
<top>35</top>
|
||||
<width>150</width>
|
||||
<height>150</height>
|
||||
<width>220</width>
|
||||
<height>220</height>
|
||||
<texture>$INFO[ListItem.Property(thumb)]</texture>
|
||||
<aspectratio>keep</aspectratio>
|
||||
<align>center</align>
|
||||
@@ -97,8 +95,9 @@
|
||||
<animation effect="fade" start="100" end="0" time="0">Focus</animation>
|
||||
</control>
|
||||
<control type="image">
|
||||
<top>1</top>
|
||||
<width>220</width>
|
||||
<height>220</height>
|
||||
<height>218</height>
|
||||
<texture colordiffuse="FF0082C2">Shortcut/button-fo.png</texture>
|
||||
<animation effect="fade" start="100" end="0" time="0">Unfocus</animation>
|
||||
</control>
|
||||
@@ -118,20 +117,19 @@
|
||||
<texture colordiffuse="60FFFFFF">Shortcut/black.png</texture>
|
||||
</control>
|
||||
<control type="image">
|
||||
<left>35</left>
|
||||
<top>35</top>
|
||||
<width>150</width>
|
||||
<height>150</height>
|
||||
<width>220</width>
|
||||
<height>220</height>
|
||||
<texture>$INFO[ListItem.Property(thumb)]</texture>
|
||||
<aspectratio>keep</aspectratio>
|
||||
<align>center</align>
|
||||
</control>
|
||||
<control type="textbox">
|
||||
<left>0</left>
|
||||
<top>146</top>
|
||||
<top>160</top>
|
||||
<width>220</width>
|
||||
<height>74</height>
|
||||
<font>font12</font>
|
||||
<textcolor>FFFFFFFF</textcolor>
|
||||
<label>$INFO[ListItem.Label]</label>
|
||||
<align>center</align>
|
||||
<aligny>center</aligny>
|
||||
@@ -151,8 +149,8 @@
|
||||
<textureslidernibfocus>-</textureslidernibfocus>
|
||||
<showonepage>false</showonepage>
|
||||
<orientation>horizontal</orientation>
|
||||
<onleft>32500</onleft>
|
||||
<onright>32500</onright>
|
||||
<!-- <onleft>32500</onleft>
|
||||
<onright>32500</onright> -->
|
||||
<ondown>32500</ondown>
|
||||
<onup>32500</onup>
|
||||
<animation effect="slide" end="120,0" time="0" condition="!Control.IsVisible(5)">Conditional</animation>
|
||||
|
||||
BIN
resources/skins/Default/media/Controls/background-diffuse.png
Normal file
BIN
resources/skins/Default/media/Controls/background-diffuse.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 1.6 KiB |
BIN
resources/skins/Default/media/NextDialog/background-play.png
Normal file
BIN
resources/skins/Default/media/NextDialog/background-play.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 3.3 KiB |
@@ -8,17 +8,13 @@
|
||||
"url": "\\1"
|
||||
},
|
||||
{
|
||||
"pattern": "akvideo.stream\/video\/(?:embed-)?([a-zA-Z0-9]+)",
|
||||
"url": "http://akvideo.stream\/video\/\\1"
|
||||
},
|
||||
{
|
||||
"pattern": "akvideo.stream/(?:embed-)?([a-zA-Z0-9]+)",
|
||||
"pattern": "akvideo.stream/(?:video/|video\\.php\\?file_code=)?(?:embed-)?([a-zA-Z0-9]+)",
|
||||
"url": "http://akvideo.stream/video/\\1"
|
||||
}
|
||||
]
|
||||
},
|
||||
"free": true,
|
||||
"id": "akstream",
|
||||
"id": "akvideo",
|
||||
"name": "Akvideo",
|
||||
"settings": [
|
||||
{
|
||||
@@ -1,24 +1,34 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# by DrZ3r0
|
||||
|
||||
import urllib
|
||||
import urllib, re
|
||||
|
||||
from core import httptools
|
||||
from core import scrapertools
|
||||
from platformcode import logger, config
|
||||
from core.support import dbg
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
# page_url = re.sub('akvideo.stream/(?:video/|video\\.php\\?file_code=)?(?:embed-)?([a-zA-Z0-9]+)','akvideo.stream/video/\\1',page_url)
|
||||
global data
|
||||
# dbg()
|
||||
page = httptools.downloadpage(page_url)
|
||||
logger.info(page.data)
|
||||
if 'embed_ak.php' in page_url:
|
||||
code = scrapertools.find_single_match(page.url, '/embed-([0-9a-z]+)\.html')
|
||||
if code:
|
||||
if not code:
|
||||
code = scrapertools.find_single_match(page.data, r"""input\D*id=(?:'|")[^'"]+(?:'|").*?value='([a-z0-9]+)""")
|
||||
if code :
|
||||
page = httptools.downloadpage('http://akvideo.stream/video/' + code)
|
||||
else:
|
||||
return False, config.get_localized_string(70449) % "Akvideo"
|
||||
data = page.data
|
||||
|
||||
# ID, code = scrapertools.find_single_match(data, r"""input\D*id=(?:'|")([^'"]+)(?:'|").*?value='([a-z0-9]+)""")
|
||||
# post = urllib.urlencode({ID: code})
|
||||
# logger.info('PAGE DATA' + data)
|
||||
if "File Not Found" in data:
|
||||
return False, config.get_localized_string(70449) % "Akvideo"
|
||||
return True, ""
|
||||
@@ -27,14 +37,20 @@ def test_video_exists(page_url):
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info(" url=" + page_url)
|
||||
video_urls = []
|
||||
# dbg()
|
||||
|
||||
global data
|
||||
logger.info('PAGE DATA' + data)
|
||||
vres = scrapertools.find_multiple_matches(data, 'nowrap[^>]+>([^,]+)')
|
||||
if not vres: vres = scrapertools.find_multiple_matches(data, '<td>(\d+x\d+)')
|
||||
|
||||
data_pack = scrapertools.find_single_match(data, "</div>\n\s*<script[^>]+>(eval.function.p,a,c,k,e,.*?)\s*</script>")
|
||||
if data_pack != "":
|
||||
from lib import jsunpack
|
||||
data = jsunpack.unpack(data_pack)
|
||||
|
||||
block = scrapertools.find_single_match(data, "sources:\s\[([^\]]+)\]")
|
||||
data = block if block else data
|
||||
# URL
|
||||
# logger.info(data)
|
||||
matches = scrapertools.find_multiple_matches(data, '(http.*?\.mp4)')
|
||||
@@ -47,6 +63,4 @@ def get_video_url(page_url, premium=False, user="", password="", video_password=
|
||||
video_urls.append([vres[i] + " mp4 [Akvideo] ", media_url.replace('https://', 'http://') + '|' + _headers])
|
||||
i = i + 1
|
||||
|
||||
for video_url in video_urls:
|
||||
logger.info(" %s - %s" % (video_url[0], video_url[1]))
|
||||
return sorted(video_urls, key=lambda x: x[0].split('x')[1])
|
||||
return sorted(video_urls, key=lambda x: int(x[0].split('x')[0]))
|
||||
@@ -5,14 +5,14 @@
|
||||
|
||||
from core import httptools
|
||||
from core import scrapertools
|
||||
from platformcode import logger
|
||||
from platformcode import logger, config
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
response = httptools.downloadpage(page_url)
|
||||
if not response.sucess or "Not Found" in response.data or "File was deleted" in response.data or "is no longer available" in response.data:
|
||||
return False, "[anonfile] El fichero no existe o ha sido borrado"
|
||||
return False, config.get_localized_string(70449) % "anonfile"
|
||||
return True, ""
|
||||
|
||||
|
||||
|
||||
@@ -5,14 +5,14 @@
|
||||
|
||||
from core import httptools
|
||||
from core import scrapertools
|
||||
from platformcode import logger
|
||||
from platformcode import logger, config
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
response = httptools.downloadpage(page_url)
|
||||
if "File was deleted" in response.data or "is no longer available" in response.data:
|
||||
return False, "[filepup] El fichero no existe o ha sido borrado"
|
||||
return False, config.get_localized_string(70449) % "filepup"
|
||||
return True, ""
|
||||
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from core import httptools
|
||||
from core import scrapertools
|
||||
from lib import jsunpack
|
||||
from platformcode import logger
|
||||
from platformcode import logger, config
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
@@ -12,7 +12,7 @@ def test_video_exists(page_url):
|
||||
data = httptools.downloadpage(page_url).data
|
||||
|
||||
if "Not Found" in data or "File was deleted" in data:
|
||||
return False, "[Filevideo] El fichero no existe o ha sido borrado"
|
||||
return False, config.get_localized_string(70449) % "Filevideo"
|
||||
|
||||
return True, ""
|
||||
|
||||
|
||||
@@ -5,14 +5,14 @@
|
||||
|
||||
from core import httptools
|
||||
from core import scrapertools
|
||||
from platformcode import logger
|
||||
from platformcode import logger, config
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
response = httptools.downloadpage(page_url)
|
||||
if not response.sucess or "Not Found" in response.data or "File was deleted" in response.data or "is no longer available" in response.data:
|
||||
return False, "[nofile] El fichero no existe o ha sido borrado"
|
||||
return False, config.get_localized_string(70449) % "nofile"
|
||||
return True, ""
|
||||
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from core import httptools
|
||||
from core import scrapertools
|
||||
from lib import jsunpack
|
||||
from platformcode import logger
|
||||
from platformcode import logger, config
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
@@ -12,7 +12,7 @@ def test_video_exists(page_url):
|
||||
response = httptools.downloadpage(page_url)
|
||||
|
||||
if not response.sucess or "Not Found" in response.data or "File was deleted" in response.data or "is no longer available" in response.data:
|
||||
return False, "[Userscloud] El fichero no existe o ha sido borrado"
|
||||
return False, config.get_localized_string(70449) % "Userscloud"
|
||||
|
||||
return True, ""
|
||||
|
||||
|
||||
@@ -8,13 +8,13 @@
|
||||
"url": "https://vcstream.to/embed/\\1/\\2"
|
||||
},
|
||||
{
|
||||
"pattern": "vidcloud.co/(?:embed|f|v)/([a-z0-9A-Z]+)",
|
||||
"url": "https://vidcloud.co\/v\/\\1"
|
||||
"pattern": "vidcloud.ru/(?:embed|f|v)/([a-z0-9A-Z]+)",
|
||||
"url": "https://vidcloud.ru\/v\/\\1"
|
||||
}
|
||||
]
|
||||
},
|
||||
"free": true,
|
||||
"id": "vcstream",
|
||||
"id": "vidcloud",
|
||||
"name": "Vidcloud",
|
||||
"settings": [
|
||||
{
|
||||
@@ -2,7 +2,10 @@
|
||||
# Icarus pv7
|
||||
# Fix dentaku65
|
||||
|
||||
import urlparse
|
||||
try:
|
||||
import urlparse
|
||||
except:
|
||||
import urllib.parse as urlparse
|
||||
|
||||
from core import httptools
|
||||
from core import scrapertools
|
||||
@@ -4,14 +4,14 @@ import urllib
|
||||
|
||||
from core import httptools
|
||||
from core import scrapertools
|
||||
from platformcode import logger
|
||||
from platformcode import logger, config
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
data = httptools.downloadpage(page_url).data
|
||||
if "Not Found" in data:
|
||||
return False, "[Vidup] El fichero no existe o ha sido borrado"
|
||||
return False, config.get_localized_string(70449) % "Vidup"
|
||||
return True, ""
|
||||
|
||||
|
||||
|
||||
@@ -3,14 +3,14 @@
|
||||
from core import httptools
|
||||
from core import scrapertools
|
||||
from lib import jsunpack
|
||||
from platformcode import logger
|
||||
from platformcode import logger, config
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
data = httptools.downloadpage(page_url).data
|
||||
if "Not Found" in data or "File was deleted" in data:
|
||||
return False, "[Watchvideo] El fichero no existe o ha sido borrado"
|
||||
return False, config.get_localized_string(70449) % "Watchvideo"
|
||||
return True, ""
|
||||
|
||||
|
||||
|
||||
@@ -9,34 +9,49 @@ except ImportError:
|
||||
import urllib
|
||||
|
||||
from core import httptools, scrapertools
|
||||
from platformcode import logger, config
|
||||
from platformcode import logger, config, platformtools
|
||||
|
||||
headers = [['User-Agent', 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:54.0) Gecko/20100101 Firefox/54.0']]
|
||||
|
||||
def test_video_exists(page_url):
|
||||
def int_bckup_method():
|
||||
global data,headers
|
||||
page_url = scrapertools.find_single_match(data, r"""<center><a href='(https?:\/\/wstream[^']+)'\s*title='bkg'""")
|
||||
if page_url:
|
||||
data = httptools.downloadpage(page_url, headers=headers, follow_redirects=True, post={'g-recaptcha-response': captcha}).data
|
||||
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
resp = httptools.downloadpage(page_url)
|
||||
global data
|
||||
data = resp.data
|
||||
|
||||
sitekey = scrapertools.find_single_match(data, 'data-sitekey="([^"]+)')
|
||||
captcha = platformtools.show_recaptcha(sitekey, page_url) if sitekey else ''
|
||||
|
||||
page_url = resp.url
|
||||
if '/streaming.php' in page_url in page_url:
|
||||
code = httptools.downloadpage(page_url, headers=headers, follow_redirects=False).headers['location'].split('/')[-1].replace('.html','')
|
||||
logger.info('WCODE='+code)
|
||||
code = httptools.downloadpage(page_url, headers=headers, follow_redirects=False).headers['location'].split('/')[-1].replace('.html', '')
|
||||
logger.info('WCODE=' + code)
|
||||
page_url = 'https://wstream.video/video.php?file_code=' + code
|
||||
data = httptools.downloadpage(page_url, headers=headers, follow_redirects=True).data
|
||||
|
||||
possibleParam = scrapertools.find_multiple_matches(data, r"""<input.*?(?:name=["']([^'"]+).*?value=["']([^'"]*)['"]>|>)""")
|
||||
if possibleParam:
|
||||
post = urllib.urlencode({param[0]: param[1] for param in possibleParam if param[0]})
|
||||
data = httptools.downloadpage(page_url, headers=headers, post=post, follow_redirects=True).data
|
||||
post = {param[0]: param[1] for param in possibleParam if param[0]}
|
||||
if captcha: post['g-recaptcha-response'] = captcha
|
||||
if post:
|
||||
data = httptools.downloadpage(page_url, headers=headers, post=post, follow_redirects=True).data
|
||||
elif captcha:
|
||||
int_bckup_method()
|
||||
elif captcha:
|
||||
int_bckup_method()
|
||||
else:
|
||||
page_url = scrapertools.find_single_match(data, r"""<center><a href='(https?:\/\/wstream[^']+)'\s*title='bkg'""")
|
||||
if page_url:
|
||||
data = httptools.downloadpage(page_url, headers=headers, follow_redirects=True).data
|
||||
return False, config.get_localized_string(707434)
|
||||
|
||||
if "Not Found" in data or "File was deleted" in data:
|
||||
return False, config.get_localized_string(70449) % 'Wstream'
|
||||
return True, ""
|
||||
else:
|
||||
return True, ""
|
||||
|
||||
|
||||
# Returns an array of possible video url's from the page_url
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user