Merge pull request #233 from danielr460/master

Ajustes canales
This commit is contained in:
Alfa
2018-04-05 15:29:25 -05:00
committed by GitHub
3 changed files with 28 additions and 16 deletions

View File

@@ -204,14 +204,16 @@ def episodios(item):
matches = scrapertools.find_multiple_matches(bloque, '<li><a href="([^"]+)" title="([^"]+)"')
for url, title in matches:
url = host + url
epi = scrapertools.find_single_match(title, '(?i)%s.*? (\d+) (?:Sub|Audio|Español)' % item.contentSerieName)
epi = scrapertools.find_single_match(title, '.+?(\d+) (?:Sub|Audio|Español)')
#epi = scrapertools.find_single_match(title, '(?i)%s.*? (\d+) (?:Sub|Audio|Español)' % item.contentSerieName)
new_item = item.clone(action="findvideos", url=url, title=title, extra="")
if epi:
if "Especial" in title:
epi=0
season, episode = renumbertools.numbered_for_tratk(
item.channel, show, 1, int(epi))
item.channel, item.contentSerieName, 1, int(epi))
new_item.infoLabels["episode"] = episode
new_item.infoLabels["season"] = season
new_item.title = "%sx%s %s" % (season, episode, title)
itemlist.append(new_item)

View File

@@ -117,12 +117,12 @@ def episodios(item):
itemlist = []
data = httptools.downloadpage(item.url).data
data = re.sub(r"\n|\r|\t|\s{2}|&nbsp;", "", data)
data = re.sub(r"\n|\r|\t|\s{2}|&nbsp;", "", data)
data_lista = scrapertools.find_single_match(data,
'<ul class="episodios">(.+?)<\/ul><\/div><\/div><\/div>')
show = item.title
patron_caps = '<img src="([^"]+)"><\/a><\/div><div class=".+?">([^"]+)<\/div>.+?<a .+? href="([^"]+)">([^"]+)<\/a>'
patron_caps = '<img alt=".+?" title=".+?" src="([^"]+)">'
patron_caps += '<\/a><\/div><div class=".+?">([^"]+)<\/div>.+?<a .+? href="([^"]+)">([^"]+)<\/a>'
#scrapedthumbnail,#scrapedtempepi, #scrapedurl, #scrapedtitle
matches = scrapertools.find_multiple_matches(data_lista, patron_caps)
for scrapedthumbnail, scrapedtempepi, scrapedurl, scrapedtitle in matches:
@@ -148,14 +148,24 @@ def findvideos(item):
data = httptools.downloadpage(item.url).data
data = re.sub(r"\n|\r|\t|\s{2}|&nbsp;", "", data)
data = scrapertools.find_single_match(data,
data1 = scrapertools.find_single_match(data,
'<div id="playex" .+?>(.+?)<\/nav><\/div><\/div>')
patron='src="(.+?)"'
logger.info("assfxxv "+data)
itemla = scrapertools.find_multiple_matches(data,patron)
itemla = scrapertools.find_multiple_matches(data1,patron)
if "favicons?domain" in itemla[1]:
method = 1
data2=scrapertools.find_single_match(data, "var \$user_hashs = {(.+?)}")
patron='".+?":"(.+?)"'
itemla = scrapertools.find_multiple_matches(data2,patron)
else:
method = 0
for i in range(len(itemla)):
#for url in itemla:
url=itemla[i]
if method==0:
url=itemla[i]
else:
import base64
b=base64.b64decode(itemla[i])
url=b.decode('utf8')
#verificar existencia del video (testing)
codigo=verificar_video(itemla[i])
if codigo==200:
@@ -199,5 +209,5 @@ def verificar_video(url):
else:
codigo1=200
else:
codigo1=200
codigo1=200
return codigo1

View File

@@ -71,11 +71,11 @@ def lista(item):
context2 = autoplay.context
context.extend(context2)
itemlist.append(item.clone(title=title, url=url, action="episodios", thumbnail=scrapedthumbnail, show=title,
itemlist.append(item.clone(title=title, url=url, action="episodios", thumbnail=scrapedthumbnail, show=title,contentSerieName=title,
context=context))
if b<29:
a=a+1
url="https://serieslan.com/pag-"+str(a)
url=host+"/pag-"+str(a)
if b>10:
itemlist.append(
Item(channel=item.channel, title="[COLOR cyan]Página Siguiente >>[/COLOR]", url=url, action="lista", page=0))
@@ -116,14 +116,14 @@ def episodios(item):
for pos in name.split(pat):
i = i + 1
total_episode += 1
season, episode = renumbertools.numbered_for_tratk(item.channel, item.show, 1, total_episode)
season, episode = renumbertools.numbered_for_tratk(item.channel, item.contentSerieName, 1, total_episode)
if len(name.split(pat)) == i:
title += "%sx%s " % (season, str(episode).zfill(2))
else:
title += "%sx%s_" % (season, str(episode).zfill(2))
else:
total_episode += 1
season, episode = renumbertools.numbered_for_tratk(item.channel, item.show, 1, total_episode)
season, episode = renumbertools.numbered_for_tratk(item.channel,item.contentSerieName, 1, total_episode)
title += "%sx%s " % (season, str(episode).zfill(2))