Fix e migliorie Paginazione
This commit is contained in:
@@ -3,7 +3,7 @@
|
||||
# Canale per AnimeForce
|
||||
# ------------------------------------------------------------
|
||||
|
||||
from core import support
|
||||
from core import scrapertools, support
|
||||
from platformcode import logger
|
||||
|
||||
host = support.config.get_channel_url()
|
||||
@@ -106,22 +106,39 @@ def check(item):
|
||||
else:
|
||||
return episodios(item)
|
||||
|
||||
|
||||
@support.scrape
|
||||
def episodios(item):
|
||||
numerationEnabled = True
|
||||
data = item.data
|
||||
@support.scrape
|
||||
def _episodes(item):
|
||||
actLike = 'episodios'
|
||||
disableAll = True
|
||||
data = item.data
|
||||
|
||||
if '<h6>Streaming</h6>' in data:
|
||||
patron = r'<td style[^>]+>\s*.*?(?:<span[^>]+)?<strong>(?P<episode>[^<]+)<\/strong>.*?<td style[^>]+>\s*<a href="(?P<url>[^"]+)"[^>]+>'
|
||||
else:
|
||||
patron = r'<a\s*href="(?P<url>[^"]+)"[^>]+>(?P<episode>\d+)[<-](?P<episode2>\d+)?'
|
||||
|
||||
def itemHook(item):
|
||||
if item.url.startswith('//'): item.url= 'https:' + item.url
|
||||
elif item.url.startswith('/'): item.url= 'https:/' + item.url
|
||||
return item
|
||||
action = 'findvideos'
|
||||
return locals()
|
||||
|
||||
itemlist = support.itemlistdb() if item.itemlist else []
|
||||
groups = support.match(item.data, patron=[r'"tabpanel">.*?</div', r'Special-tab">.*?</div']).matches
|
||||
for group in groups:
|
||||
item.data = group
|
||||
if 'Special' in group:
|
||||
item.contentSeason = 0
|
||||
itemlist.extend(_episodes(item))
|
||||
|
||||
from platformcode.autorenumber import start
|
||||
start(itemlist, item)
|
||||
itemlist = support.season_pagination(itemlist, item, function_level='episodios')
|
||||
return itemlist
|
||||
|
||||
|
||||
if '<h6>Streaming</h6>' in data:
|
||||
patron = r'<td style[^>]+>\s*.*?(?:<span[^>]+)?<strong>(?P<title>[^<]+)<\/strong>.*?<td style[^>]+>\s*<a href="(?P<url>[^"]+)"[^>]+>'
|
||||
else:
|
||||
patron = r'<a\s*href="(?P<url>[^"]+)"\s*title="(?P<title>[^"]+)"\s*class="btn btn-dark mb-1">'
|
||||
def itemHook(item):
|
||||
if item.url.startswith('//'): item.url= 'https:' + item.url
|
||||
elif item.url.startswith('/'): item.url= 'https:/' + item.url
|
||||
return item
|
||||
action = 'findvideos'
|
||||
return locals()
|
||||
|
||||
|
||||
def findvideos(item):
|
||||
|
||||
@@ -21,12 +21,13 @@ def get_cookie(data):
|
||||
|
||||
|
||||
def get_data(item):
|
||||
# support.dbg()
|
||||
url = httptools.downloadpage(item.url, headers=headers, follow_redirects=True, only_headers=True).url
|
||||
data = support.match(url, headers=headers, follow_redirects=True).data
|
||||
if 'AWCookieVerify' in data:
|
||||
get_cookie(data)
|
||||
data = get_data(item)
|
||||
data = ''
|
||||
if item.url:
|
||||
url = httptools.downloadpage(item.url, headers=headers, follow_redirects=True, only_headers=True).url
|
||||
data = support.match(url, headers=headers, follow_redirects=True).data
|
||||
if 'AWCookieVerify' in data:
|
||||
get_cookie(data)
|
||||
data = get_data(item)
|
||||
return data
|
||||
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
# ------------------------------------------------------------
|
||||
# Canale per cineblog01
|
||||
# ------------------------------------------------------------
|
||||
import datetime
|
||||
|
||||
import re
|
||||
|
||||
from core import scrapertools, httptools, servertools, support
|
||||
@@ -142,7 +142,7 @@ def episodios(item):
|
||||
disableAll = True
|
||||
|
||||
patronBlock = r'(?P<block>sp-head[^>]+>\s*(?:STAGION[EI]\s*(?:(?:DA)?\s*[0-9]+\s*A)?\s*[0-9]+|MINISSERIE)(?::\s*PARTE\s*[0-9]+)? - (?P<lang>[^-<]+)(?:- (?P<quality>[^-<]+))?.*?<\/div>.*?)spdiv[^>]*>'
|
||||
patron = r'(?:/>|<p>|<strong>)(?P<other>.*?(?P<episode>[0-9]+(?:×|ÃÂ)[0-9]+)\s*(?P<title2>.*?)?(?:\s*–|\s*-|\s*<).*?)(?:<\/p>|<br)'
|
||||
patron = r'(?:/>|<p>|<strong>)(?P<data>.*?(?P<episode>[0-9]+(?:×|ÃÂ)[0-9]+)\s*(?P<title2>.*?)?(?:\s*–|\s*-|\s*<).*?)(?:<\/p>|<br)'
|
||||
|
||||
return locals()
|
||||
|
||||
@@ -156,20 +156,21 @@ def episodios(item):
|
||||
|
||||
folderUrl = scrapertools.find_single_match(data, r'TUTT[EA] L[EA] \w+\s+(?:–|-)\s+<a href="?([^" ]+)')
|
||||
data = httptools.downloadpage(folderUrl, disable_directIP=True).data
|
||||
patron = r'<td>(?P<title>[^<]+)<td><a [^>]+href="(?P<url>[^"]+)[^>]+>'
|
||||
patron = r'<td>(?P<title>[^<]+)<td><a [^>]+href="(?P<folderdata>[^"]+)[^>]+>'
|
||||
|
||||
return locals()
|
||||
|
||||
|
||||
data = support.match(item.url, headers=headers).data
|
||||
# itemlist = listed(item, data)
|
||||
itemlist = []
|
||||
if not item.itemlist:
|
||||
itemlist.extend(folder(item, data) if 'TUTTE LE' in data or 'TUTTA LA' in data else [])
|
||||
|
||||
itemlist = listed(item, data)
|
||||
itemlist.extend(folder(item, data) if 'TUTTE LE' in data or 'TUTTA LA' in data else [])
|
||||
|
||||
itemDict = {'ITA':{}, 'Sub-ITA':{}}
|
||||
seasons = []
|
||||
# support.dbg()
|
||||
|
||||
for it in itemlist:
|
||||
it.contentType = 'episode'
|
||||
if it.contentSeason and it.contentSeason not in seasons:
|
||||
seasons.append(it.contentSeason)
|
||||
itemDict['ITA'][it.contentSeason] = []
|
||||
@@ -185,13 +186,15 @@ def episodios(item):
|
||||
itlist.extend(sorted(itemDict['Sub-ITA'].get(season, []), key=lambda it: (it.contentSeason, it.contentEpisodeNumber)))
|
||||
itemlist = itlist
|
||||
|
||||
|
||||
for i in itemlist: logger.debug(i.title, i.contentType)
|
||||
import inspect
|
||||
if inspect.stack()[1][3] not in ['add_tvshow', 'get_episodes', 'update', 'find_episodes']:
|
||||
if len(seasons) > 1:
|
||||
itemlist = support.season_pagination(itemlist, item, [], 'episodios')
|
||||
else:
|
||||
itemlist = support.pagination(itemlist, item, 'episodios')
|
||||
if config.get_setting('episode_info'):
|
||||
support.tmdb.set_infoLabels_itemlist(itemlist, seekTmdb=True)
|
||||
support.videolibrary(itemlist, item)
|
||||
support.download(itemlist, item)
|
||||
|
||||
@@ -199,56 +202,51 @@ def episodios(item):
|
||||
|
||||
|
||||
def findvideos(item):
|
||||
if item.serieFolder:
|
||||
return support.server(item, data=item.url)
|
||||
if item.contentType == "episode":
|
||||
return findvid_serie(item)
|
||||
|
||||
def load_links(itemlist, re_txt, desc_txt, quality=""):
|
||||
streaming = scrapertools.find_single_match(data, re_txt).replace('"', '')
|
||||
logger.debug('STREAMING', streaming)
|
||||
logger.debug('STREAMING=', streaming)
|
||||
matches = support.match(streaming, patron = r'<td><a.*?href=([^ ]+) [^>]+>([^<]+)<').matches
|
||||
for scrapedurl, scrapedtitle in matches:
|
||||
logger.debug("##### findvideos %s ## %s ## %s ##" % (desc_txt, scrapedurl, scrapedtitle))
|
||||
itemlist.append(item.clone(action="play", title=scrapedtitle, url=scrapedurl, server=scrapedtitle, quality=quality))
|
||||
|
||||
logger.debug()
|
||||
if item.folderdata:
|
||||
return support.server(item, data=item.folderdata)
|
||||
elif item.data:
|
||||
return support.server(item, data=re.sub(r'((?:<p>|<strong>)?[^\d]*\d*(?:×|Ã)[0-9]+[^<]+)', '', item.data))
|
||||
else:
|
||||
|
||||
itemlist = []
|
||||
def load_links(itemlist, re_txt, desc_txt, quality=""):
|
||||
streaming = scrapertools.find_single_match(data, re_txt).replace('"', '')
|
||||
logger.debug('STREAMING', streaming)
|
||||
logger.debug('STREAMING=', streaming)
|
||||
matches = support.match(streaming, patron = r'<td><a.*?href=([^ ]+) [^>]+>([^<]+)<').matches
|
||||
for scrapedurl, scrapedtitle in matches:
|
||||
logger.debug("##### findvideos %s ## %s ## %s ##" % (desc_txt, scrapedurl, scrapedtitle))
|
||||
itemlist.append(item.clone(action="play", title=scrapedtitle, url=scrapedurl, server=scrapedtitle, quality=quality))
|
||||
|
||||
# Carica la pagina
|
||||
data = httptools.downloadpage(item.url).data
|
||||
data = re.sub('\n|\t', '', data)
|
||||
logger.debug()
|
||||
|
||||
# Estrae i contenuti - Streaming
|
||||
load_links(itemlist, '<strong>Streamin?g:</strong>(.*?)cbtable', "Streaming", "SD")
|
||||
itemlist = []
|
||||
|
||||
# Estrae i contenuti - Streaming HD
|
||||
load_links(itemlist, '<strong>Streamin?g HD[^<]+</strong>(.*?)cbtable', "Streaming HD", "HD")
|
||||
# Carica la pagina
|
||||
data = httptools.downloadpage(item.url).data
|
||||
data = re.sub('\n|\t', '', data)
|
||||
|
||||
# Estrae i contenuti - Streaming 3D
|
||||
load_links(itemlist, '<strong>Streamin?g 3D[^<]+</strong>(.*?)cbtable', "Streaming 3D")
|
||||
# Estrae i contenuti - Streaming
|
||||
load_links(itemlist, '<strong>Streamin?g:</strong>(.*?)cbtable', "Streaming", "SD")
|
||||
|
||||
itemlist = support.server(item, itemlist=itemlist)
|
||||
# Extract the quality format
|
||||
patronvideos = r'([\w.]+)</strong></div></td>'
|
||||
support.addQualityTag(item, itemlist, data, patronvideos)
|
||||
# Estrae i contenuti - Streaming HD
|
||||
load_links(itemlist, '<strong>Streamin?g HD[^<]+</strong>(.*?)cbtable', "Streaming HD", "HD")
|
||||
|
||||
return itemlist
|
||||
# Estrae i contenuti - Streaming 3D
|
||||
load_links(itemlist, '<strong>Streamin?g 3D[^<]+</strong>(.*?)cbtable', "Streaming 3D")
|
||||
|
||||
# Estrae i contenuti - Download
|
||||
# load_links(itemlist, '<strong>Download:</strong>(.*?)<tableclass=cbtable height=30>', "aqua", "Download")
|
||||
itemlist = support.server(item, itemlist=itemlist)
|
||||
# Extract the quality format
|
||||
patronvideos = r'([\w.]+)</strong></div></td>'
|
||||
support.addQualityTag(item, itemlist, data, patronvideos)
|
||||
|
||||
# Estrae i contenuti - Download HD
|
||||
# load_links(itemlist, '<strong>Download HD[^<]+</strong>(.*?)<tableclass=cbtable width=100% height=20>', "azure", "Download HD")
|
||||
return itemlist
|
||||
|
||||
# Estrae i contenuti - Download
|
||||
# load_links(itemlist, '<strong>Download:</strong>(.*?)<tableclass=cbtable height=30>', "aqua", "Download")
|
||||
|
||||
def findvid_serie(item):
|
||||
logger.debug()
|
||||
data = re.sub(r'((?:<p>|<strong>)?[^\d]*\d*(?:×|Ã)[0-9]+[^<]+)', '', item.other)
|
||||
|
||||
return support.server(item, data=data)
|
||||
# Estrae i contenuti - Download HD
|
||||
# load_links(itemlist, '<strong>Download HD[^<]+</strong>(.*?)<tableclass=cbtable width=100% height=20>', "azure", "Download HD")
|
||||
|
||||
|
||||
def play(item):
|
||||
|
||||
@@ -138,6 +138,7 @@ class scrape:
|
||||
self.downloadEnabled = self.args.get('downloadEnabled', True)
|
||||
|
||||
if self.args.get('disableAll', False):
|
||||
self.tmdbEnabled = False
|
||||
self.videlibraryEnabled = False
|
||||
self.downloadEnabled = False
|
||||
self.seasonPagination = False
|
||||
@@ -235,7 +236,7 @@ class scrape:
|
||||
autorenumber.start(self.itemlist, item)
|
||||
|
||||
for i in self.itemlist:
|
||||
if i.contentSeason and i.contentSeason not in self.seasons:
|
||||
if type(i.contentSeason) == int and i.contentSeason not in self.seasons:
|
||||
self.seasons.append(i.contentSeason)
|
||||
|
||||
else: autorenumber.start(self.itemlist)
|
||||
@@ -244,10 +245,14 @@ class scrape:
|
||||
if inspect.stack()[1][3] not in ['add_tvshow', 'get_episodes', 'update', 'find_episodes']:
|
||||
if len(self.seasons) > 1 and self.seasonPagination:
|
||||
self.itemlist = season_pagination(self.itemlist, item, self.seasons, self.function)
|
||||
elif self.pagination:
|
||||
elif self.pagination or (self.function in ['episodios'] and self.seasonPagination):
|
||||
self.itemlist = pagination(self.itemlist, item, self.function)
|
||||
|
||||
if self.action != 'play' and 'patronMenu' not in self.args and 'patronGenreMenu' not in self.args and self.tmdbEnabled and inspect.stack()[1][3] not in ['add_tvshow'] and self.function not in ['episodios', 'mainlist'] or (self.function in ['episodios'] and config.get_setting('episode_info')): # and function != 'episodios' and item.contentType in ['movie', 'tvshow', 'episode', 'undefined']
|
||||
if self.tmdbEnabled and (
|
||||
self.action != 'play' and 'patronMenu' not in self.args and 'patronGenreMenu' not in self.args
|
||||
and inspect.stack()[1][3] not in ['add_tvshow'] and (self.function not in ['episodios', 'mainlist']
|
||||
or (self.function in ['episodios'] and config.get_setting('episode_info')))):
|
||||
|
||||
tmdb.set_infoLabels_itemlist(self.itemlist, seekTmdb=True)
|
||||
|
||||
if inspect.stack()[1][3] not in ['find_episodes', 'add_tvshow']:
|
||||
@@ -332,7 +337,7 @@ class scrape:
|
||||
self.matches.extend(matches)
|
||||
|
||||
def set_infolabels(self, item):
|
||||
if item.infoLabels["title"] == self.itemParams.title:
|
||||
if item.infoLabels["title"]:
|
||||
infolabels = item.infoLabels
|
||||
else:
|
||||
if self.function == 'episodios':
|
||||
@@ -360,6 +365,7 @@ class scrape:
|
||||
infolabels['rating'] = scrapertools.decodeHtmlentities(self.itemParams.rating)
|
||||
|
||||
self.itemParams.infoLabels = infolabels
|
||||
logger.debug
|
||||
|
||||
def set_sceneTitle(self):
|
||||
from lib.guessit import guessit
|
||||
@@ -447,20 +453,18 @@ class scrape:
|
||||
break
|
||||
else: AC = self.action
|
||||
if (not self.itemParams.title or self.itemParams.title not in self.blacklist) and (self.search.lower() in self.itemParams.title.lower()):
|
||||
|
||||
it = item.clone(title=self.itemParams.title,
|
||||
fulltitle=self.itemParams.title,
|
||||
show=self.itemParams.title,
|
||||
infoLabels=self.itemParams.infoLabels,
|
||||
contentSeason= self.itemParams.infoLabels.get('season', ''),
|
||||
contentEpisodeNumber= self.itemParams.infoLabels.get('episode', ''),
|
||||
grouped = self.group,
|
||||
episode2 = self.itemParams.second_episode,
|
||||
extraInfo = self.itemParams.extraInfo,
|
||||
disable_videolibrary = not self.args.get('addVideolibrary', True),
|
||||
size = self.itemParams.size,
|
||||
seed = self.itemParams.seed)
|
||||
|
||||
if self.itemParams.infoLabels.get('season'): it.contentSeason = self.itemParams.infoLabels.get('season')
|
||||
if self.itemParams.infoLabels.get('episode'): it.contentEpisodeNumber = self.itemParams.infoLabels.get('episode')
|
||||
if self.itemParams.url: it.url = self.itemParams.url
|
||||
if self.function == 'episodios': it.fulltitle = it.show = self.itemParams.title
|
||||
if self.itemParams.quality: it.quality = self.itemParams.quality
|
||||
@@ -470,8 +474,6 @@ class scrape:
|
||||
it.contentType = 'episode' if self.function == 'episodios' else CT if CT else item.contentType
|
||||
if it.contentType not in ['movie'] and self.function != 'episodios' or it.contentType in ['undefined']: it.contentSerieName = self.itemParams.title
|
||||
if self.function == 'peliculas': it.contentTitle= self.itemParams.title
|
||||
it.contentSeason= self.itemParams.infoLabels.get('season', ''),
|
||||
it.contentEpisodeNumber= self.itemParams.infoLabels.get('episode', ''),
|
||||
if self.itemParams.title2: it.title2 = self.itemParams.title2
|
||||
|
||||
if self.itemParams.episode and self.group and not item.grouped:
|
||||
@@ -494,7 +496,7 @@ class scrape:
|
||||
except:
|
||||
raise logger.ChannelScraperException
|
||||
|
||||
if it.contentSeason and it.contentSeason not in self.seasons:
|
||||
if type(it.contentSeason) == int and it.contentSeason not in self.seasons:
|
||||
self.seasons.append(it.contentSeason)
|
||||
|
||||
return it
|
||||
@@ -903,7 +905,8 @@ def nextPage(itemlist, item, function_or_level=1, **kwargs):
|
||||
total_pages = total_pages,
|
||||
page=page if page else item.page + 1 if item.page else 2,
|
||||
prevthumb = item.thumbnail,
|
||||
thumbnail=thumb()))
|
||||
thumbnail=thumb(),
|
||||
folder = False))
|
||||
return itemlist
|
||||
|
||||
|
||||
@@ -923,20 +926,44 @@ def pagination(itemlist, item, function_level=1):
|
||||
if len(itemlist) >= item.page * perpage:
|
||||
itemlistdb(itemlist)
|
||||
itlist.append(
|
||||
item.clone(channel=item.channel,
|
||||
action=action,
|
||||
contentType=item.contentType,
|
||||
title=typo(config.get_localized_string(30992), 'color kod bold'),
|
||||
page=item.page + 1,
|
||||
total_pages=round(len(itemlist)/perpage),
|
||||
nextPage = True,
|
||||
itemlist = True,
|
||||
prevthumb = item.thumbnail,
|
||||
thumbnail=thumb()))
|
||||
Item(channel=item.channel,
|
||||
contentType=item.contentType,
|
||||
action=action,
|
||||
title=typo(config.get_localized_string(90006), 'color kod bold'),
|
||||
page=item.page + 1,
|
||||
total_pages=round(len(itemlist)/perpage),
|
||||
nextPage = True,
|
||||
itemlist = True,
|
||||
prevthumb = item.thumbnail,
|
||||
thumbnail=thumb()))
|
||||
itlist.append(
|
||||
Item(channel=item.channel,
|
||||
contentType=item.contentType,
|
||||
action='gotopage',
|
||||
real_action=action,
|
||||
title=typo(config.get_localized_string(90007), 'color kod bold'),
|
||||
page=item.page + 1,
|
||||
total_pages=round(len(itemlist)/perpage),
|
||||
nextPage = True,
|
||||
itemlist = True,
|
||||
prevthumb = item.thumbnail,
|
||||
thumbnail=thumb(),
|
||||
folder = False))
|
||||
# itlist.append(
|
||||
# item.clone(channel=item.channel,
|
||||
# action=action,
|
||||
# contentType=item.contentType,
|
||||
# title=typo(config.get_localized_string(90006), 'color kod bold'),
|
||||
# page=item.page + 1,
|
||||
# total_pages=round(len(itemlist)/perpage),
|
||||
# nextPage = True,
|
||||
# itemlist = True,
|
||||
# prevthumb = item.thumbnail,
|
||||
# thumbnail=thumb()))
|
||||
return itlist
|
||||
|
||||
|
||||
def season_pagination(itemlist, item, seasons, function_level=1):
|
||||
def season_pagination(itemlist, item, seasons=[], function_level=1):
|
||||
if 'channel_search' in [s[3] for s in inspect.stack()]:
|
||||
return itemlist
|
||||
|
||||
@@ -945,21 +972,23 @@ def season_pagination(itemlist, item, seasons, function_level=1):
|
||||
if itemlist and not seasons:
|
||||
seasons = []
|
||||
for it in itemlist:
|
||||
if it.contentSeason and it.contentSeason not in seasons:
|
||||
if type(it.contentSeason) == int and it.contentSeason not in seasons:
|
||||
seasons.append(it.contentSeason)
|
||||
|
||||
if seasons:
|
||||
if len(seasons) > 1:
|
||||
itemlistdb(itemlist)
|
||||
seasons.sort()
|
||||
if not item.nextSeason:
|
||||
item.nextSeason = 0
|
||||
try:
|
||||
|
||||
current = seasons[item.nextSeason]
|
||||
|
||||
for it in itemlist:
|
||||
if it.contentSeason and it.contentSeason == current:
|
||||
for it in sorted(itemlist, key=lambda it: (it.contentSeason, it.contentEpisodeNumber)):
|
||||
logger.debug('SEASON',it.contentSeason)
|
||||
if type(it.contentSeason) == int and it.contentSeason == current:
|
||||
itlist.append(it)
|
||||
elif it.contentSeason and it.contentSeason > current:
|
||||
elif type(it.contentSeason) == int and it.contentSeason > current:
|
||||
break
|
||||
|
||||
if item.nextSeason + 1 < len(seasons):
|
||||
@@ -981,10 +1010,14 @@ def season_pagination(itemlist, item, seasons, function_level=1):
|
||||
nextSeason = item.nextSeason + 1,
|
||||
itemlist = True,
|
||||
prevthumb = item.thumbnail,
|
||||
thumbnail=thumb()))
|
||||
thumbnail=thumb(),
|
||||
folder = False))
|
||||
return itlist
|
||||
except:
|
||||
return itemlist
|
||||
else:
|
||||
return pagination(itemlist, item, function_level)
|
||||
|
||||
|
||||
|
||||
# Find servers
|
||||
|
||||
@@ -4,11 +4,10 @@
|
||||
# --------------------------------------------------------------------------------
|
||||
|
||||
|
||||
from core import support
|
||||
import xbmc, xbmcgui, re, base64, inspect, sys
|
||||
from core import jsontools, tmdb, scrapertools, filetools
|
||||
import xbmc, xbmcgui, re, base64, sys
|
||||
from core import jsontools, tmdb, filetools
|
||||
from core.item import Item
|
||||
from core.support import typo, match, dbg, Item
|
||||
from core.support import typo, dbg, Item
|
||||
from platformcode import config, platformtools, logger
|
||||
PY3 = True if sys.version_info[0] >= 3 else False
|
||||
if PY3:
|
||||
@@ -72,7 +71,7 @@ def b64(json, mode = 'encode'):
|
||||
def find_episodes(item):
|
||||
logger.debug()
|
||||
ch = __import__('channels.' + item.channel, fromlist=["channels.{}".format(item.channel)])
|
||||
itemlist = ch.episodios(item)
|
||||
itemlist = getattr(ch, item.action)(item)
|
||||
return itemlist
|
||||
|
||||
def busy(state):
|
||||
@@ -180,7 +179,7 @@ class autorenumber():
|
||||
|
||||
def renumber(self):
|
||||
def sub_thread(item):
|
||||
if not item.contentSeason:
|
||||
if type(item.contentSeason) != int:
|
||||
number = str(item.contentEpisodeNumber)
|
||||
if number:
|
||||
if not number in self.episodes: self.makelist()
|
||||
@@ -188,7 +187,7 @@ class autorenumber():
|
||||
item.contentSeason = int(self.episodes[number].split('x')[0])
|
||||
item.contentEpisodeNumber = int(self.episodes[number].split('x')[1])
|
||||
|
||||
# support.dbg()
|
||||
# dbg()
|
||||
# for i in self.itemlist:
|
||||
# sub_thread(i)
|
||||
|
||||
|
||||
@@ -167,8 +167,8 @@ def run(item=None):
|
||||
item.page = page
|
||||
import re
|
||||
item.url = re.sub('([=/])[0-9]+(/?)$', '\g<1>{}\g<2>'.format(page), item.url)
|
||||
run(item)
|
||||
# xbmc.executebuiltin("Container.Update(%s?%s)" % (sys.argv[0], item.tourl()))
|
||||
xbmc.executebuiltin("Container.Update(%s?%s)" % (sys.argv[0], item.tourl()))
|
||||
|
||||
elif item.action == "gotoseason":
|
||||
head = 'Seleziona la stagione'
|
||||
seasons = [str(s) for s in item.allSeasons]
|
||||
@@ -176,10 +176,8 @@ def run(item=None):
|
||||
if int(season) > -1:
|
||||
import xbmc
|
||||
item.action = item.real_action
|
||||
item.nextSeason = item.allSeasons.index(season + 1)
|
||||
run(item)
|
||||
# logger.debug(item)
|
||||
# xbmc.executebuiltin("Container.Update(%s?%s)" % (sys.argv[0], new_item.tourl()))
|
||||
item.nextSeason = season
|
||||
xbmc.executebuiltin("Container.Update(%s?%s)" % (sys.argv[0], item.tourl()))
|
||||
else:
|
||||
# Checks if channel exists
|
||||
if os.path.isfile(os.path.join(config.get_runtime_path(), 'channels', item.channel + ".py")):
|
||||
|
||||
Reference in New Issue
Block a user