From 2020d20adbdeca7c2159923dcce84424d7d9799f Mon Sep 17 00:00:00 2001 From: Intel1 <25161862+Intel11@users.noreply.github.com> Date: Wed, 6 Jun 2018 12:21:08 -0500 Subject: [PATCH] anitoonstv: fix --- plugin.video.alfa/channels/anitoonstv.py | 65 +++++++++++++++--------- 1 file changed, 40 insertions(+), 25 deletions(-) diff --git a/plugin.video.alfa/channels/anitoonstv.py b/plugin.video.alfa/channels/anitoonstv.py index 5fcc9900..02720e32 100644 --- a/plugin.video.alfa/channels/anitoonstv.py +++ b/plugin.video.alfa/channels/anitoonstv.py @@ -33,13 +33,12 @@ def mainlist(item): itemlist = list() itemlist.append(Item(channel=item.channel, action="lista", title="Series", url=host+"/lista-de-anime.php", - thumbnail=thumb_series)) - #itemlist.append(Item(channel=item.channel, action="lista", title="Series Animadas", url=host, - # thumbnail=thumb_series)) - #itemlist.append(Item(channel=item.channel, action="lista", title="Novedades", url=host, - # thumbnail=thumb_series)) - #itemlist.append(Item(channel=item.channel, action="lista", title="Pokemon", url=host, - # thumbnail=thumb_series)) + thumbnail=thumb_series, range=[0,19])) + itemlist.append(Item(channel=item.channel, action="lista", title="Películas", url=host+"/catalogo.php?g=&t=peliculas&o=0", + thumbnail=thumb_series, range=[0,19] )) + itemlist.append(Item(channel=item.channel, action="lista", title="Especiales", url=host+"/catalogo.php?g=&t=especiales&o=0", + thumbnail=thumb_series, range=[0,19])) + itemlist = renumbertools.show_option(item.channel, itemlist) autoplay.show_option(item.channel, itemlist) return itemlist @@ -60,8 +59,9 @@ def lista(item): patron +=".+?(.+?)<\/span>" #scrapedplot matches = scrapertools.find_multiple_matches(data, patron) - for scrapedurl, scrapedthumbnail,scrapedtitle,scrapedplot in matches: - if ":" in scrapedtitle: + next_page = [item.range[0]+19, item.range[1]+20] + for scrapedurl, scrapedthumbnail,scrapedtitle,scrapedplot in matches[item.range[0] : item.range[1]]: + if ":" in scrapedtitle: cad = scrapedtitle.split(":") show = cad[0] else: @@ -81,9 +81,15 @@ def lista(item): context2 = autoplay.context context.extend(context2) scrapedurl=host+scrapedurl - itemlist.append(item.clone(title=scrapedtitle, url=scrapedurl, plot=scrapedplot, - thumbnail=scrapedthumbnail, action="episodios", show=show, context=context)) - #tmdb.set_infoLabels(itemlist) + if item.title!="Series": + itemlist.append(item.clone(title=scrapedtitle, contentTitle=show,url=scrapedurl, + thumbnail=scrapedthumbnail, action="findvideos", context=context)) + else: + itemlist.append(item.clone(title=scrapedtitle, contentSerieName=show,url=scrapedurl, plot=scrapedplot, + thumbnail=scrapedthumbnail, action="episodios", context=context)) + tmdb.set_infoLabels(itemlist, seekTmdb=True) + itemlist.append(Item(channel=item.channel, url=item.url, range=next_page, title='Pagina Siguente >>>', action='lista')) + return itemlist @@ -92,16 +98,16 @@ def episodios(item): itemlist = [] data = httptools.downloadpage(item.url).data data = re.sub(r"\n|\r|\t|\s{2}| ", "", data) - patron = '
(.+?)<\/div>
' data = scrapertools.find_single_match(data, patron) patron_caps = "
  • Cap(?:i|í)tulo: (.+?) - (.+?)<\/a>" matches = scrapertools.find_multiple_matches(data, patron_caps) - show = scrapertools.find_single_match(data, 'Titulo.+?<\/span>(.+?)
    ') + #show = scrapertools.find_single_match(data, 'Titulo.+?<\/span>(.+?)
    ') scrapedthumbnail = scrapertools.find_single_match(data, "") scrapedplot = scrapertools.find_single_match(data, 'Descripcion.+?<\/span>(.+?)
    ') i = 0 temp = 0 + infoLabels = item.infoLabels for link, cap, name in matches: if int(cap) == 1: temp = temp + 1 @@ -109,19 +115,25 @@ def episodios(item): cap = "0" + cap season = temp episode = int(cap) + season, episode = renumbertools.numbered_for_tratk( item.channel, item.show, season, episode) + + infoLabels['season'] = season + infoLabels['episode'] = episode date = name title = "%sx%s %s (%s)" % (season, str(episode).zfill(2), "Episodio %s" % episode, date) # title = str(temp)+"x"+cap+" "+name url = host + "/" + link if "NO DISPONIBLE" not in name: itemlist.append(Item(channel=item.channel, action="findvideos", title=title, thumbnail=scrapedthumbnail, - plot=scrapedplot, url=url, show=show)) + plot=scrapedplot, url=url, contentSeasonNumber=season, contentEpisodeNumber=episode, + contentSerieName=item.contentSerieName, infoLabels=infoLabels)) if config.get_videolibrary_support() and len(itemlist) > 0: itemlist.append(Item(channel=item.channel, title="[COLOR yellow]Añadir esta serie a la videoteca[/COLOR]", url=item.url, - action="add_serie_to_library", extra="episodios", show=item.title)) + action="add_serie_to_library", extra="episodios", contentSerieName=item.contentSerieName)) + return itemlist @@ -149,15 +161,15 @@ def findvideos(item): scrapedthumbnail = scrapertools.find_single_match(data, '
    ') itemla = scrapertools.find_multiple_matches(data_vid, '"(.+?)"') for url in itemla: - url=url.replace('\/', '/') - server1=url.split('/') - server=server1[2] - if "." in server: - server1=server.split('.') - if len(server1)==3: - server=server1[1] - else: - server=server1[0] + url=url.replace('\/', '/') + server1=url.split('/') + server=server1[2] + if "." in server: + server1=server.split('.') + if len(server1)==3: + server=server1[1] + else: + server=server1[0] if "goo" in url: url = googl(url) server='netutv' @@ -168,6 +180,9 @@ def findvideos(item): itemlist.append(item.clone(url=url, action="play", thumbnail=scrapedthumbnail, server=server, plot=scrapedplot, title="Enlace encontrado en: %s [%s]" % (server.capitalize(), quality))) + if item.contentTitle!="" and config.get_videolibrary_support() and len(itemlist) > 0: + itemlist.append(Item(channel=item.channel, title="[COLOR yellow]Añadir esta película a la videoteca[/COLOR]", url=item.url, + action="add_pelicula_to_library", extra="episodios", show=item.contentTitle)) autoplay.start(itemlist, item) return itemlist