diff --git a/.github/workflows/updateDomains.yml b/.github/workflows/updateDomains.yml new file mode 100644 index 00000000..6117241b --- /dev/null +++ b/.github/workflows/updateDomains.yml @@ -0,0 +1,28 @@ +name: Update channel domains +on: + workflow_dispatch: + schedule: + - cron: '30 17 * * *' + +jobs: + update: + runs-on: ubuntu-latest + steps: + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it + - uses: actions/checkout@v2 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v1 + with: + python-version: 2.7 + + - name: Update domains + run: | + python tools/updateDomains.py + + - name: Commit & Push changes + uses: actions-js/push@master + with: + message: "Aggiornamento domini" + branch: "master" + github_token: ${{ secrets.API_TOKEN_GITHUB }} \ No newline at end of file diff --git a/addon.xml b/addon.xml index 39a5c1ff..309038d7 100644 --- a/addon.xml +++ b/addon.xml @@ -1,4 +1,4 @@ - + @@ -26,8 +26,10 @@ resources/media/themes/ss/2.png resources/media/themes/ss/3.png - - ridisegnata la finestra della scelta film/serietv quando si aggiunge in videoteca -- modifiche minori, qualche fix ai canali/server ed alla ricerca alternativa + -Nuova Ricerca Globale +-Nuova Rinumerazione +-Messaggi di Errore più chiari +-Fix var Naviga velocemente sul web e guarda i contenuti presenti [COLOR red]The owners and submitters to this addon do not host or distribute any of the content displayed by these addons nor do they have any affiliation with the content providers.[/COLOR] [COLOR yellow]Kodi © is a registered trademark of the XBMC Foundation. We are not connected to or in any other way affiliated with Kodi, Team Kodi, or the XBMC Foundation. Furthermore, any software, addons, or products offered by us will receive no support in official Kodi channels, including the Kodi forums and various social networks.[/COLOR] diff --git a/channels.json b/channels.json index 5fd0a393..7940575a 100644 --- a/channels.json +++ b/channels.json @@ -1,57 +1,57 @@ { - "findhost": { - "altadefinizione01": "https://altadefinizione01-nuovo.info", - "altadefinizioneclick": "https://altadefinizione-nuovo.me", - "animealtadefinizione": "https://www.animealtadefinizione.it", - "cineblog01": "https://cb01.uno", - "eurostreaming": "https://eurostreaming.link", - "film4k": "https://film4k-nuovo.link", - "filmpertutti": "https://filmpertutti.nuovo.live", - "ilcorsaronero": "https://lagazzettadelcorsaro.com", - "seriehd": "https://nuovoindirizzo.info/seriehd/", - "serietvonline": "https://serietvonline.online", - "tantifilm": "https://www.tantifilm.wiki" - }, "direct": { - "altadefinizione01_link": "https://altadefinizione01.energy", - "animealtadefinizione": "https://www.animealtadefinizione.it", - "animeforce": "https://ww1.animeforce.org", - "animeleggendari": "https://animeora.com", - "animesaturn": "https://www.animesaturn.it", - "animestream": "https://www.animeworld.tv", - "animesubita": "http://www.animesubita.org", - "animetubeita": "http://www.animetubeita.com", - "animeunity": "https://www.animeunity.it", - "animeuniverse": "https://www.animeuniverse.it/", - "animeworld": "https://www.animeworld.tv", - "casacinema": "https://www.casacinema.page", - "cb01anime": "https://www.cineblog01.red", - "cinemalibero": "https://cinemalibero.xyz", - "cinetecadibologna": "http://cinestore.cinetecadibologna.it", - "dreamsub": "https://dreamsub.stream", - "dsda": "https://www.dsda.press", - "fastsubita": "https://fastsubita.uno", - "filmgratis": "https://www.filmaltadefinizione.co", - "filmigratis": "https://filmigratis.org", - "filmsenzalimiticc": "https://www.filmsenzalimiti01.online", - "filmstreaming01": "https://filmstreaming01.com", - "guardaserie_stream": "https://guardaserie.host", - "guardaseriecam": "https://guardaserie.cam", - "guardaserieclick": "https://www.guardaserie.clinic", - "guardaserieicu": "https://guardaserie.us", - "hd4me": "https://hd4me.net", - "ilgeniodellostreaming": "https://ilgeniodellostreaming.pet", - "ilgeniodellostreaming_cam": "https://ilgeniodellostreaming.gold", - "italiaserie": "https://italiaserie.eu", - "mondoserietv": "https://mondoserietv.fun", - "piratestreaming": "https://www.piratestreaming.deals", - "polpotv": "https://polpotv.life", - "raiplay": "https://www.raiplay.it", - "serietvsubita": "http://serietvsubita.xyz", - "serietvu": "https://www.serietvu.link", - "streamingcommunity": "https://streamingcommunity.net", - "streamtime": "https://t.me/s/StreamTime", - "toonitalia": "https://toonitalia.org", + "altadefinizione01_link": "https://altadefinizione01.tips", + "animealtadefinizione": "https://www.animealtadefinizione.it", + "animeforce": "https://ww1.animeforce.org", + "animeleggendari": "https://animeora.com", + "animesaturn": "https://www.animesaturn.it", + "animestream": "https://www.animeworld.tv", + "animesubita": "http://www.animesubita.org", + "animetubeita": "http://www.animetubeita.com", + "animeunity": "https://www.animeunity.it", + "animeuniverse": "https://www.animeuniverse.it/", + "animeworld": "https://www.animeworld.tv", + "casacinema": "https://www.casacinema.page", + "cb01anime": "https://www.cineblog01.red", + "cinemalibero": "https://cinemalibero.life", + "cinetecadibologna": "http://cinestore.cinetecadibologna.it", + "dreamsub": "https://dreamsub.stream", + "dsda": "https://www.dsda.press", + "eurostreaming": "https://eurostreaming.bid", + "fastsubita": "https://fastsubita.xyz", + "filmgratis": "https://www.filmaltadefinizione.me", + "filmigratis": "https://filmigratis.org", + "filmsenzalimiticc": "https://www.filmsenzalimiti01.online", + "filmstreaming01": "https://filmstreaming01.com", + "guardaserie_stream": "https://guardaserie.host", + "guardaseriecam": "https://guardaserie.cam", + "guardaserieclick": "https://www.guardaserie.deals", + "guardaserieicu": "https://guardaserie.rocks", + "hd4me": "https://hd4me.net", + "ilgeniodellostreaming": "https://ilgeniodellostreaming.pet", + "ilgeniodellostreaming_cam": "https://ilgeniodellostreaming.gold", + "italiaserie": "https://italiaserie.run", + "mondoserietv": "https://mondoserietv.fun", + "piratestreaming": "https://www.piratestreaming.date", + "polpotv": "https://roma.polpo.tv", + "raiplay": "https://www.raiplay.it", + "serietvonline": "https://serietvonline.cam", + "serietvsubita": "http://serietvsubita.xyz", + "serietvu": "https://www.serietvu.link", + "streamingcommunity": "https://streamingcommunity.net", + "streamtime": "https://t.me/s/StreamTime", + "toonitalia": "https://toonitalia.org", "vvvvid": "https://www.vvvvid.it" + }, + "findhost": { + "altadefinizione01": "https://altadefinizione01-nuovo.info", + "altadefinizioneclick": "https://altadefinizione-nuovo.me", + "animealtadefinizione": "https://www.animealtadefinizione.it", + "cineblog01": "https://cb01.uno", + "film4k": "https://film4k-nuovo.link", + "filmpertutti": "https://filmpertutti.nuovo.live", + "ilcorsaronero": "https://lagazzettadelcorsaro.com", + "seriehd": "https://nuovoindirizzo.info/seriehd/", + "tantifilm": "https://www.tantifilm.wiki" } } \ No newline at end of file diff --git a/channels/altadefinizione01.py b/channels/altadefinizione01.py index 2d5c351c..171f0e28 100644 --- a/channels/altadefinizione01.py +++ b/channels/altadefinizione01.py @@ -51,7 +51,7 @@ def peliculas(item): ## deflang = 'ITA' action="findvideos" - patron = r'

\s*(?P[^<]+).*?src="(?P<thumb>[^"]+).*?<div class="trdublaj">(?P<quality>[^<]+).*?<span class="ml-label">(?P<year>[0-9]+).*?<span class="ml-label">(?P<duration>[^<]+).*?<p>(?P<plot>[^<]+)' + patron = r'<div class="cover boxcaption"> +<h2>\s*<a href="(?P<url>[^"]+)">(?P<title>[^<]+).*?src="(?P<thumb>[^"]+).*?<div class="trdublaj">(?P<quality>[^<]+).*?<span class="ml-label">(?P<year>[0-9]+).*?<span class="ml-label">(?P<duration>[^<]+).*?<p>(?P<plot>[^<]+)' if item.args == "search": patronBlock = r'</script> <div class="boxgrid caption">(?P<block>.*)<div id="right_bar">' diff --git a/channels/altadefinizione01_link.py b/channels/altadefinizione01_link.py index 17f2a409..e76ca1e0 100644 --- a/channels/altadefinizione01_link.py +++ b/channels/altadefinizione01_link.py @@ -73,7 +73,7 @@ def search(item, text): except: import sys for line in sys.exc_info(): - logger.info("%s mainlist search log: %s" % (__channel__, line)) + logger.error("%s" % line) return [] # =========== def per le novità nel menu principale ============= diff --git a/channels/altadefinizioneclick.py b/channels/altadefinizioneclick.py index 66281a78..c03eac47 100644 --- a/channels/altadefinizioneclick.py +++ b/channels/altadefinizioneclick.py @@ -46,9 +46,8 @@ def mainlist(item): @support.scrape def peliculas(item): - # debug = True - patron = r'<div class="wrapperImage">[ ]?(?:<span class="hd">(?P<quality>[^<>]+))?.+?href="(?P<url>[^"]+)".+?src="(?P<thumb>[^"]+)".+?<h2 class="titleFilm">[^>]+>'\ - r'(?P<title>.+?)[ ]?(?:|\[(?P<lang>[^\]]+)\])?(?:\((?P<year>\d{4})\))?</a>.*?(?:IMDB\:</strong>[ ](?P<rating>.+?)<|</h2> )' + # debug=True + patron = r'<div class="wrapperImage">\s*(?:<span class="year">(?P<year>[^<]+)[^>]+>)?(?:<span class="hd">(?P<quality>[^<>]+))?.+?href="(?P<url>[^"]+)".+?src="(?P<thumb>[^"]+)".+?<h2 class="titleFilm">[^>]+>(?P<title>.+?)[ ]?(?:|\[(?P<lang>[^\]]+)\])?</a>.*?(?:IMDB\:</strong>[ ](?P<rating>.+?)<|</h2> )' patronBlock = r'h1>(?P<block>.*?)<div class="row ismobile">' if item.args == 'az': @@ -60,8 +59,8 @@ def peliculas(item): patron = r'<div class="wrapperImage">[ ]?(?:<span class="hd">(?P<quality>[^<>]+))?.+?href="(?P<url>[^"]+)".+?src="(?P<thumb>[^"]+)"'\ r'.+?<h2 class="titleFilm(?:Mobile)?">[^>]+>(?P<title>.+?)[ ]?(?:|\[(?P<lang>[^\]]+)\])?(?:\((?P<year>\d{4})\))?</a>.*?(IMDB\:[ ](?P<rating>.+?))<' elif item.args == 'search': - patronBlock = r'<section id="lastUpdate">(?P<block>.*?)<div class="row ismobile">' - patron = r'<a href="(?P<url>[^"]+)">\s*<div class="wrapperImage">(?:<span class="year">(?P<year>[^<]+)<\/span>)?(?:<span class="hd">(?P<quality>[^<]+)<\/span>)?<img[^s]+src="(?P<thumb>[^"]+)"[^>]+>[^>]+>[^>]+>(?P<title>[^<]+)' + patronBlock = r'<section id="lastUpdate">(?P<block>.*?)(?:<div class="row ismobile">|<section)' + patron = r'<a href="(?P<url>[^"]+)">\s*<div class="wrapperImage">(?:\s*<span class="year">(?P<year>[^<]+)<\/span>)?(?:\s*<span class="hd">(?P<quality>[^<]+)<\/span>)?[^>]+>\s*<img[^s]+src="(?P<thumb>[^"]+)"[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>\s*(?P<rating>[^<]+)[^>]+>[^>]+>[^>]+>[^>]+>(?P<title>[^<]+)' if not item.args: patronBlock = r'ULTIMI INSERITI(?P<block>.*?)<div class="sliderLastUpdate ismobile ">' @@ -99,7 +98,7 @@ def search(item, texto): support.info("search ", texto) item.args = 'search' - item.url = host + "/search/" + texto + item.url = host + "?s=" + texto try: return peliculas(item) # Continua la ricerca in caso di errore diff --git a/channels/animealtadefinizione.py b/channels/animealtadefinizione.py index f78a9abe..740c3f7c 100644 --- a/channels/animealtadefinizione.py +++ b/channels/animealtadefinizione.py @@ -26,7 +26,7 @@ def mainlist(item): @support.scrape def menu(item): action = 'peliculas' - data = support.match(item, patron= r'<a href="' + host + r'/category/' + item.args.lower() + r'/">' + item.args + r'</a><ul class="sub-menu">(.*?)</ul>').match + patronBlock= r'<a href="' + host + r'/category/' + item.args.lower() + r'/">' + item.args + r'</a>\s*<ul class="sub-menu">(?P<block>.*?)</ul>' patronMenu = r'<a href="(?P<url>[^"]+)">(?P<title>[^<]+)<' return locals() @@ -85,7 +85,7 @@ def peliculas(item): typeContentDict = {'movie':['movie']} typeActionDict = {'findvideos':['movie']} - def ItemItemlistHook(item, itemlist): + def itemlistHook(itemlist): if item.search: itemlist = [ it for it in itemlist if ' Episodio ' not in it.title ] if len(itemlist) == int(perpage): @@ -97,6 +97,7 @@ def peliculas(item): @support.scrape def episodios(item): + anime = True pagination = int(perpage) patron = epPatron return locals() @@ -107,18 +108,19 @@ def findvideos(item): if item.contentType == 'movie': matches = support.match(item, patron=epPatron).matches for title, url in matches: - get_video_list(url, title, itemlist) + # support.dbg() + get_video_list(item, url, title, itemlist) else: - get_video_list(item.url, support.config.get_localized_string(30137), itemlist) + get_video_list(item, item.url, support.config.get_localized_string(30137), itemlist) return support.server(item, itemlist=itemlist) -def get_video_list(url, title, itemlist): +def get_video_list(item, url, title, itemlist): from requests import get if not url.startswith('http'): url = host + url url = support.match(get(url).url, string=True, patron=r'file=([^$]+)').match if 'http' not in url: url = 'http://' + url - itemlist.append(support.Item(title=title, url=url, server='directo', action='play')) + itemlist.append(item.clone(title=title, url=url, server='directo', action='play')) return itemlist \ No newline at end of file diff --git a/channels/animeforce.py b/channels/animeforce.py index b1373656..6390ca90 100644 --- a/channels/animeforce.py +++ b/channels/animeforce.py @@ -27,7 +27,7 @@ def mainlist(item): def submenu(item): action = 'peliculas' patronBlock = r'data-taxonomy="' + item.args + r'"(?P<block>.*?)</select' - patronMenu = r'<option class="level-\d+ (?P<u>[^"]+)"[^>]+>(?P<t>[^&]+)[^\(]+\((?P<num>\d+)' + patronMenu = r'<option class="level-\d+ (?P<u>[^"]+)"[^>]+>(?P<t>[^(]+)[^\(]+\((?P<num>\d+)' def itemHook(item): item.url += host + '/anime/' + item.args + '/' + item.u item.title = support.typo(item.t, 'bold') @@ -54,10 +54,10 @@ def newest(categoria): return itemlist -def search(item, texto): - support.info(texto) - item.args = 'noorder' - item.url = host + '/?s=' + texto + '&cat=6010' +def search(item, text): + support.info('search',text) + item.search = text + item.url = host + '/lista-anime/' item.contentType = 'tvshow' try: return peliculas(item) @@ -71,6 +71,7 @@ def search(item, texto): @support.scrape def peliculas(item): + search = item.search anime = True if 'movie' in item.url: action = 'findvideos' diff --git a/channels/animeleggendari.py b/channels/animeleggendari.py index 4715ae78..10d032b9 100644 --- a/channels/animeleggendari.py +++ b/channels/animeleggendari.py @@ -59,7 +59,7 @@ def peliculas(item): anime = True blacklist = ['top 10 anime da vedere'] if item.url != host: patronBlock = r'<div id="main-content(?P<block>.*?)<aside' - patron = r'<figure class="(?:mh-carousel-thumb|mh-posts-grid-thumb)"> <a (?:class="[^"]+" )?href="(?P<url>[^"]+)" title="(?P<title>.*?)(?: \((?P<year>\d+)\))? (?:(?P<lang>SUB ITA|ITA))(?: (?P<title2>[Mm][Oo][Vv][Ii][Ee]))?[^"]*"><img (?:class="[^"]+"|width="[^"]+" height="[^"]+") src="(?P<thumb>[^"]+)"[^>]+' + patron = r'<figure class="(?:mh-carousel-thumb|mh-posts-grid-thumb)">\s*<a (?:class="[^"]+" )?href="(?P<url>[^"]+)" title="(?P<title>.*?)(?: \((?P<year>\d+)\))? (?:(?P<lang>SUB ITA|ITA))(?: (?P<title2>[Mm][Oo][Vv][Ii][Ee]))?[^"]*"><img (?:class="[^"]+"|width="[^"]+" height="[^"]+") src="(?P<thumb>[^"]+)"[^>]+' def itemHook(item): if 'movie' in item.title.lower(): item.title = support.re.sub(' - [Mm][Oo][Vv][Ii][Ee]|[Mm][Oo][Vv][Ii][Ee]','',item.title) diff --git a/channels/animesaturn.py b/channels/animesaturn.py index 14ad73a7..ee6d8f50 100644 --- a/channels/animesaturn.py +++ b/channels/animesaturn.py @@ -60,7 +60,7 @@ def newest(categoria): def submenu(item): data = support.match(item.url + item.args).data action = 'filter' - patronMenu = r'<h5 class="[^"]+">(?P<title>[^<]+)[^>]+>[^>]+><select id="(?P<parameter>[^"]+)"[^>]+>(?P<url>.*?)</select>' + patronMenu = r'<h5 class="[^"]+">(?P<title>[^<]+)[^>]+>[^>]+>\s*<select id="(?P<parameter>[^"]+)"[^>]+>(?P<url>.*?)</select>' def itemlistHook(itemlist): itemlist.insert(0, item.clone(title=support.typo('Tutti','bold'), url=item.url + item.args, action='peliculas')) return itemlist[:-1] @@ -104,7 +104,7 @@ def peliculas(item): data = support.match(item, post=post, headers=headers).data if item.args == 'updated': page = support.match(data, patron=r'data-page="(\d+)" title="Next">').match - patron = r'<a href="(?P<url>[^"]+)" title="(?P<title>[^"(]+)(?:\s*\((?P<year>\d+)\))?(?:\s*\((?P<lang>[A-Za-z-]+)\))?"><img src="(?P<thumb>[^"]+)"[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>\s\s*(?P<type>[^\s]+)\s*(?P<episode>\d+)' + patron = r'<a href="(?P<url>[^"]+)" title="(?P<title>[^"(]+)(?:\s*\((?P<year>\d+)\))?(?:\s*\((?P<lang>[A-Za-z-]+)\))?">\s*<img src="(?P<thumb>[^"]+)"[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>\s\s*(?P<type>[^\s]+)\s*(?P<episode>\d+)' typeContentDict = {'Movie':'movie', 'Episodio':'episode'} #item.contentType='episode' action = 'findvideos' def itemlistHook(itemlist): @@ -113,7 +113,7 @@ def peliculas(item): return itemlist elif 'filter' in item.args: page = support.match(data, patron=r'totalPages:\s*(\d+)').match - patron = r'<a href="(?P<url>[^"]+)" title="(?P<title>[^"(]+)(?:\s*\((?P<year>\d+)\))?(?:\s*\((?P<lang>[A-Za-z-]+)\))?"><img src="(?P<thumb>[^"]+)"' + patron = r'<a href="(?P<url>[^"]+)" title="(?P<title>[^"(]+)(?:\s*\((?P<year>\d+)\))?(?:\s*\((?P<lang>[A-Za-z-]+)\))?">\s*<img src="(?P<thumb>[^"]+)"' def itemlistHook(itemlist): if item.nextpage: item.nextpage += 1 else: item.nextpage = 2 @@ -127,7 +127,7 @@ def peliculas(item): patron = r'<a href="(?P<url>[^"]+)"[^>]+>(?P<title>[^<(]+)(?:\s*\((?P<year>\d+)\))?(?:\s*\((?P<lang>[A-za-z-]+)\))?</a>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>\s*<img width="[^"]+" height="[^"]+" src="(?P<thumb>[^"]+)"[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>(?P<plot>[^<]+)<' else: # debug=True - patron = r'<img src="(?P<thumb>[^"]+)" alt="(?P<title>[^"\(]+)(?:\((?P<lang>[Ii][Tt][Aa])\))?(?:\s*\((?P<year>\d+)\))?[^"]*"[^>]+>[^>]+>[^>]+>[^>]+>[^>]+><a class="[^"]+" href="(?P<url>[^"]+)">[^>]+>[^>]+>[^>]+><p[^>]+>(?:(?P<plot>[^<]+))?<' + patron = r'<img src="(?P<thumb>[^"]+)" alt="(?P<title>[^"\(]+)(?:\((?P<lang>[Ii][Tt][Aa])\))?(?:\s*\((?P<year>\d+)\))?[^"]*"[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>\s*<a class="[^"]+" href="(?P<url>[^"]+)">[^>]+>[^>]+>[^>]+>\s*<p[^>]+>(?:(?P<plot>[^<]+))?<' return locals() diff --git a/channels/animeunity.py b/channels/animeunity.py index 064d834d..a434acea 100644 --- a/channels/animeunity.py +++ b/channels/animeunity.py @@ -187,7 +187,7 @@ def peliculas(item): itemlist.append(itm) - autorenumber.renumber(itemlist) + autorenumber.start(itemlist) if len(itemlist) >= 30: itemlist.append(item.clone(title=support.typo(support.config.get_localized_string(30992), 'color kod bold'), thumbnail=support.thumb(), page=page + 1)) @@ -212,7 +212,7 @@ def episodios(item): contentType='episode', url=it['link'])) - autorenumber.renumber(itemlist, item, 'bold') + autorenumber.start(itemlist, item) support.videolibrary(itemlist, item) support.download(itemlist, item) return itemlist diff --git a/channels/animeuniverse.py b/channels/animeuniverse.py index 40c34dcf..e872ec01 100644 --- a/channels/animeuniverse.py +++ b/channels/animeuniverse.py @@ -84,12 +84,12 @@ def peliculas(item): if not item.pag: item.pag = 1 anime=True - blacklist=['Altri Hentai'] + # blacklist=['Altri Hentai'] data = support.match(host + '/wp-content/themes/animeuniverse/functions/ajax.php', post='sorter=recent&location=&loop=main+loop&action=sort&numarticles='+perpage+'&paginated='+str(item.pag)+'¤tquery%5B'+query+'%5D='+searchtext+'&thumbnail=1').data.replace('\\','') patron=r'<a href="(?P<url>[^"]+)"><img width="[^"]+" height="[^"]+" src="(?P<thumb>[^"]+)" class="[^"]+" alt="" title="(?P<title>.*?)\s*(?P<lang>Sub ITA|ITA)?(?:"| \[)' - def ItemItemlistHook(item, itemlist): - if len(itemlist) == int(perpage) - len(blacklist): + def itemlistHook(itemlist): + if len(itemlist) == int(perpage): item.pag += 1 itemlist.append(item.clone(title=support.typo(support.config.get_localized_string(30992), 'color kod bold'), action='peliculas')) return itemlist diff --git a/channels/animeworld.py b/channels/animeworld.py index 4897b96d..19ba5d17 100644 --- a/channels/animeworld.py +++ b/channels/animeworld.py @@ -88,7 +88,7 @@ def menu(item): action = 'submenu' # data = get_data(item) patronMenu=r'<button[^>]+>\s*(?P<title>[A-Za-z0-9]+)\s*<span.[^>]+>(?P<other>.*?)</ul>' - def ItemItemlistHook(item, itemlist): + def itemlistHook(itemlist): itemlist.insert(0, item.clone(title=support.typo('Tutti','bold'), action='peliculas')) itemlist.append(item.clone(title=support.typo('Cerca...','bold'), action='search', search=True, thumbnail=support.thumb('search.png'))) return itemlist @@ -174,7 +174,7 @@ def peliculas(item): @support.scrape def episodios(item): anime=True - pagination = 50 + pagination = 25 # data = get_data(item) patronBlock= r'<div class="server\s*active\s*"(?P<block>.*?)(?:<div class="server|<link)' patron = r'<li[^>]*>\s*<a.*?href="(?P<url>[^"]+)"[^>]*>(?P<episode>[^<]+)<' diff --git a/channels/casacinema.py b/channels/casacinema.py index bf27fed8..df59d713 100644 --- a/channels/casacinema.py +++ b/channels/casacinema.py @@ -105,9 +105,9 @@ def peliculas(item): action = 'select' if item.args == 'newest': - patron = r'<li><a href="(?P<url>[^"]+)"[^=]+="(?P<thumb>[^"]+)"><div> <div[^>]+>(?P<title>[^\(\[<]+)(?:\[(?P<quality1>HD)\])?[ ]?(?:\(|\[)?(?P<lang>Sub-ITA)?(?:\)|\])?[ ]?(?:\[(?P<quality>.+?)\])?[ ]?(?:\((?P<year>\d+)\))?<(?:[^>]+>.+?(?:title="Nuovi episodi">(?P<episode>\d+x\d+)[ ]?(?P<lang2>Sub-Ita)?|title="IMDb">(?P<rating>[^<]+)))?' + patron = r'<li><a href="(?P<url>[^"]+)"[^=]+="(?P<thumb>[^"]+)"><div>\s+<div[^>]+>(?P<title>[^\(\[<]+)(?:\[(?P<quality1>HD)\])?[ ]?(?:\(|\[)?(?P<lang>Sub-ITA)?(?:\)|\])?[ ]?(?:\[(?P<quality>.+?)\])?[ ]?(?:\((?P<year>\d+)\))?<(?:[^>]+>.+?(?:title="Nuovi episodi">(?P<episode>\d+x\d+)[ ]?(?P<lang2>Sub-Ita)?|title="IMDb">(?P<rating>[^<]+)))?' else: - patron = r'<li><a href="(?P<url>[^"]+)"[^=]+="(?P<thumb>[^"]+)"><div> <div[^>]+>(?P<title>[^\(\[<]+)(?:\[(?P<quality1>HD)\])?[ ]?(?:\(|\[)?(?P<lang>Sub-ITA)?(?:\)|\])?[ ]?(?:\[(?P<quality>.+?)\])?[ ]?(?:\((?P<year>\d+)\))?' + patron = r'<li><a href="(?P<url>[^"]+)"[^=]+="(?P<thumb>[^"]+)"><div>\s+<div[^>]+>(?P<title>[^\(\[<]+)(?:\[(?P<quality1>HD)\])?[ ]?(?:\(|\[)?(?P<lang>Sub-ITA)?(?:\)|\])?[ ]?(?:\[(?P<quality>.+?)\])?[ ]?(?:\((?P<year>\d+)\))?' patronNext = r'<a href="([^"]+)" >Pagina' # debug = True diff --git a/channels/cb01anime.py b/channels/cb01anime.py index deccdc5d..d870e34e 100644 --- a/channels/cb01anime.py +++ b/channels/cb01anime.py @@ -64,7 +64,7 @@ def peliculas(item): if item.args == 'newest': patron = r'<div id="blockvids">\s*<ul>\s*<li>\s*<a href="(?P<url>[^"]+)"[^>]+><img[^>]+src="(?P<thumb>[^"]+)"[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>(?P<title>[^\[]+)\[(?P<lang>[^\]]+)\]' else: - patron = r'<div class="span4">\s*<a href="(?P<url>[^"]+)"><img src="(?P<thumb>[^"]+)"[^>]+><\/a>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+> <h1>(?P<title>[^<\[]+)(?:\[(?P<lang>[^\]]+)\])?</h1></a>.*?-->(?:.*?<br />)?\s*(?P<plot>[^<]+)' + patron = r'<div class="span4">\s*<a href="(?P<url>[^"]+)"><img src="(?P<thumb>[^"]+)"[^>]+><\/a>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+> +<h1>(?P<title>[^<\[]+)(?:\[(?P<lang>[^\]]+)\])?</h1></a>.*?-->(?:.*?<br />)?\s*(?P<plot>[^<]+)' patronNext = r'<link rel="next" href="([^"]+)"' action = 'check' return locals() @@ -90,6 +90,7 @@ def episodios(item): s = 1 e = 0 sp = 0 + for match in item.url: if 'stagione' in match.lower(): find_season = support.match(match, patron=r'Stagione\s*(\d+)').match @@ -111,17 +112,17 @@ def episodios(item): s += 1 e = ep - 1 title = str(season) + 'x' + str(ep-e).zfill(2) + ' - ' + title - data += title + '|' + match + '\n' + data += title + '|' + match + '\|' else: title += ' #movie' - data += title + '|' + match + '\n' + data += title + '|' + match + '\|' def itemHook(item): if '#movie' in item.title: item.contentType='movie' item.title = item.title.replace(' #movie','') return item - patron = r'(?P<title>[^\|]+)\|(?P<url>[^\n]+)\n' + patron = r'(?P<title>[^\|]+)\|(?P<url>[^\|]+)\|' action = 'findvideos' return locals() diff --git a/channels/cineblog01.py b/channels/cineblog01.py index b38ca1b6..3137da60 100644 --- a/channels/cineblog01.py +++ b/channels/cineblog01.py @@ -76,7 +76,7 @@ def newest(categoria): def search(item, text): - logger.info(item, "search", text) + logger.info("search", text) if item.contentType == 'tvshow': item.url = host + '/serietv/' else: item.url = host try: @@ -127,40 +127,57 @@ def peliculas(item): @support.scrape def episodios(item): - # support.dbg() - data = support.match(item.url, headers=headers).data - support.info(data) - if 'TUTTA LA ' in data: - folderUrl = scrapertools.find_single_match(data, r'TUTTA LA \w+\s+(?:–|-)\s+<a href="?([^" ]+)') + @support.scrape + def folder(item, data): + """ + Quando c'è un link ad una cartelle di vcrypt contenente più stagioni + """ + actLike = 'episodios' + addVideolibrary = False + downloadEnabled = False + + folderUrl = scrapertools.find_single_match(data, r'TUTTA LA \w+\s+(?:–|-)\s+<a href="?([^" ]+)').replace( + '.net/', '.pw/') # vcrypt.pw non ha CF data = httptools.downloadpage(folderUrl).data - patron = r'<a href="(?P<url>[^"]+)[^>]+>(?P<title>[^<]+)' + patron = r'><a href="(?P<url>[^"]+)[^>]+>(?P<title>[^<]+)' sceneTitle = True + def itemHook(item): item.serieFolder = True return item - else: - patronBlock = r'(?P<block>sp-head[^>]+>\s*(?:STAGION[EI]\s*(?:DA\s*[0-9]+\s*A)?\s*[0-9]+|MINISERIE) - (?P<lang>[^-<]+)(?:- (?P<quality>[^-<]+))?.*?<\/div>.*?)spdiv[^>]*>' - patron = r'(?:/>|<p>|<strong>)(?P<url>.*?(?P<episode>[0-9]+(?:×|×)[0-9]+)\s*(?P<title2>.*?)?(?:\s*–|\s*-|\s*<).*?)(?:<\/p>|<br)' - def itemlistHook(itemlist): - title_dict = {} - itlist = [] - for item in itemlist: - item.title = re.sub(r'\.(\D)',' \\1', item.title) - match = support.match(item.title, patron=r'(\d+.\d+)').match.replace('x','') - item.order = match - if match not in title_dict: - title_dict[match] = item - elif match in title_dict and item.contentLanguage == title_dict[match].contentLanguage \ - or item.contentLanguage == 'ITA' and not title_dict[match].contentLanguage \ - or title_dict[match].contentLanguage == 'ITA' and not item.contentLanguage: - title_dict[match].url = item.url - else: - title_dict[match + '1'] = item + return locals() - for key, value in title_dict.items(): - itlist.append(value) + # debug=True + data = support.match(item.url, headers=headers).data + folderItemlist = folder(item, data) if 'TUTTA LA ' in data else [] - return sorted(itlist, key=lambda it: (it.contentLanguage, int(it.order))) + patronBlock = r'(?P<block>sp-head[^>]+>\s*(?:STAGION[EI]\s*(?:DA\s*[0-9]+\s*A)?\s*[0-9]+|MINISERIE) - (?P<lang>[^-<]+)(?:- (?P<quality>[^-<]+))?.*?<\/div>.*?)spdiv[^>]*>' + patron = r'(?:/>|<p>|<strong>)(?P<other>.*?(?P<episode>[0-9]+(?:×|×)[0-9]+)\s*(?P<title2>.*?)?(?:\s*–|\s*-|\s*<).*?)(?:<\/p>|<br)' + def itemlistHook(itemlist): + title_dict = {} + itlist = [] + for i in itemlist: + i.url = item.url + i.title = re.sub(r'\.(\D)',' \\1', i.title) + match = support.match(i.title, patron=r'(\d+.\d+)').match.replace('x','') + i.order = match + if match not in title_dict: + title_dict[match] = i + elif match in title_dict and i.contentLanguage == title_dict[match].contentLanguage \ + or i.contentLanguage == 'ITA' and not title_dict[match].contentLanguage \ + or title_dict[match].contentLanguage == 'ITA' and not i.contentLanguage: + title_dict[match].url = i.url + else: + title_dict[match + '1'] = i + + for key, value in title_dict.items(): + itlist.append(value) + + itlist = sorted(itlist, key=lambda it: (it.contentLanguage, int(it.order))) + + itlist.extend(folderItemlist) + + return itlist return locals() @@ -172,14 +189,14 @@ def findvideos(item): def load_links(itemlist, re_txt, desc_txt, quality=""): streaming = scrapertools.find_single_match(data, re_txt).replace('"', '') - support.info('STREAMING', streaming) - support.info('STREAMING=', streaming) + logger.debug('STREAMING', streaming) + logger.debug('STREAMING=', streaming) matches = support.match(streaming, patron = r'<td><a.*?href=([^ ]+) [^>]+>([^<]+)<').matches for scrapedurl, scrapedtitle in matches: logger.debug("##### findvideos %s ## %s ## %s ##" % (desc_txt, scrapedurl, scrapedtitle)) itemlist.append(item.clone(action="play", title=scrapedtitle, url=scrapedurl, server=scrapedtitle, quality=quality)) - support.info() + logger.debug() itemlist = [] @@ -211,34 +228,12 @@ def findvideos(item): def findvid_serie(item): - def load_vid_series(html, item, itemlist, blktxt): - support.info('HTML',html) - # Estrae i contenuti - matches = support.match(html, patron=r'<a href=(?:")?([^ "]+)[^>]+>(?!<!--)(.*?)(?:</a>|<img)').matches - for url, server in matches: - item = item.clone(action="play", title=server, url=url, server=server, quality=blktxt) - if 'swzz' in item.url: item.url = support.swzz_get_url(item) - itemlist.append(item) + logger.debug() + data = re.sub(r'((?:<p>|<strong>)?[^\d]*\d*(?:×|×)[0-9]+[^<]+)', '', item.other) - support.info() - - itemlist = [] - - data = re.sub(r'((?:<p>|<strong>)?[^\d]*\d*(?:×|×)[0-9]+[^<]+)', '' ,item.url) - - # Blocks with split - blk = re.split(r"(?:>\s*)?([A-Za-z\s0-9]*):\s*<", data, re.S) - blktxt = "" - for b in blk: - if b[0:3] == "a h" or b[0:4] == "<a h": - load_vid_series("<%s>" % b, item, itemlist, blktxt) - blktxt = "" - elif len(b.strip()) > 1: - blktxt = b.strip() - - return support.server(item, itemlist=itemlist) + return support.server(item, data=data) def play(item): - support.info() + logger.debug() return servertools.find_video_items(item, data=item.url) diff --git a/channels/cinemalibero.py b/channels/cinemalibero.py index a020b465..285106cc 100644 --- a/channels/cinemalibero.py +++ b/channels/cinemalibero.py @@ -59,13 +59,12 @@ def peliculas(item): patron = r'<a href="(?P<url>(?:https:\/\/.+?\/(?P<title>[^\/]+[a-zA-Z0-9\-]+)(?P<year>\d{4})))/".+?url\((?P<thumb>[^\)]+)\)">' elif item.contentType == 'tvshow': if item.args == 'update': - patron = r'<a href="(?P<url>[^"]+)"[^<]+?url\((?P<thumb>.+?)\)">\s<div class="titolo">(?P<title>.+?)(?: – Serie TV)?(?:\([sSuUbBiItTaA\-]+\))?[ ]?(?P<year>\d{4})?</div>[ ](?:<div class="genere">)?(?:[\w]+?\.?\s?[\s|S]?[\dx\-S]+?\s\(?(?P<lang>[iItTaA]+|[sSuUbBiItTaA\-]+)\)?\s?(?P<quality>[HD]+)?|.+?\(?(?P<lang2>[sSuUbBiItTaA\-]+)?\)?</div>)' + patron = r'<a href="(?P<url>[^"]+)"[^<]+?url\((?P<thumb>.+?)\)">\s+<div class="titolo">(?P<title>.+?)(?: – Serie TV)?(?:\([sSuUbBiItTaA\-]+\))?[ ]?(?P<year>\d{4})?</div>[ ](?:<div class="genere">)?(?:[\w]+?\.?\s?[\s|S]?[\dx\-S]+?\s\(?(?P<lang>[iItTaA]+|[sSuUbBiItTaA\-]+)\)?\s?(?P<quality>[HD]+)?|.+?\(?(?P<lang2>[sSuUbBiItTaA\-]+)?\)?</div>)' pagination = 25 else: patron = r'<a href="(?P<url>[^"]+)"\s*title="(?P<title>[^"\(]+)(?:"|\()(?:(?P<year>\d+)[^"]+)?.*?url\((?P<thumb>[^\)]+)\)(?:.*?<div class="voto">[^>]+>[^>]+>\s*(?P<rating>[^<]+))?.*?<div class="titolo">[^>]+>(?:<div class="genere">[^ ]*(?:\s\d+)?\s*(?:\()?(?P<lang>[^\)< ]+))?' else: - #search - patron = r'<div class="col-lg-3">[^>]+>[^>]+>\s<a href="(?P<url>[^"]+)".+?url\((?P<thumb>[^\)]+)\)">[^>]+>[^>]+>[^>]+>(?:[^>]+>)?\s?(?P<rating>[\d\.]+)?[^>]+>(?P<title>.+?)(?:[ ]\((?P<year>\d{4})\))?<[^>]+>[^>]+>(.?[\d\-x]+\s\(?(?P<lang>[sSuUbBiItTaA\-]+)?\)?\s?(?P<quality>[\w]+)?[|]?\s?(?:[fFiInNeE]+)?\s?\(?(?P<lang2>[sSuUbBiItTaA\-]+)?\)?)?' + patron = r'<div class="col-lg-3">[^>]+>[^>]+>\s*<a href="(?P<url>[^"]+)".+?url\((?P<thumb>[^\)]+)\)">[^>]+>[^>]+>[^>]+>(?:[^>]+>)?\s?(?P<rating>[\d\.]+)?[^>]+>(?P<title>.+?)(?:[ ]\((?P<year>\d{4})\))?<[^>]+>[^>]+>(.?[\d\-x]+\s\(?(?P<lang>[sSuUbBiItTaA\-]+)?\)?\s?(?P<quality>[\w]+)?[|]?\s?(?:[fFiInNeE]+)?\s?\(?(?P<lang2>[sSuUbBiItTaA\-]+)?\)?)?' def itemHook(item): if 'sub' in item.contentLanguage.lower() and not 'ita' in item.contentLanguage.lower(): @@ -152,6 +151,7 @@ def newest(categoria): return itemlist + def check(item): support.info() data = support.match(item.url, headers=headers).data @@ -171,10 +171,11 @@ def check(item): else: item.contentType = 'movie' - item.url = data + item.data = data return findvideos(item) + def findvideos(item): support.info() - item.url = item.url.replace('http://rapidcrypt.net/verys/', '').replace('http://rapidcrypt.net/open/', '') #blocca la ricerca - return support.server(item, data= item.url) + item.data = item.data.replace('http://rapidcrypt.net/verys/', '').replace('http://rapidcrypt.net/open/', '') #blocca la ricerca + return support.server(item, data=item.data) diff --git a/channels/dreamsub.py b/channels/dreamsub.py index ef6cf422..e0986595 100644 --- a/channels/dreamsub.py +++ b/channels/dreamsub.py @@ -89,9 +89,14 @@ def peliculas(item): patronBlock = r'<div id="%s"[^>]+>(?P<block>.*?)<div class="vistaDettagliata"' % item.args[1] patron = r'<li>\s*<a href="(?P<url>[^"]+)" title="(?P<title>[^"]+)" class="thumb">[^>]+>[^>]+>[^>]+>\s*[EePp]+\s*(?P<episode>\d+)[^>]+>\s<img src="(?P<thumb>[^"]+)"' else: - patron = r'<div class="showStreaming"> <b>(?P<title>[^<]+)[^>]+>[^>]+>\s*<span>Lingua:\s*(?P<lang>[^>]+)?>[<>br\s]+a href="(?P<url>[^"]+)"[^>]+>.*?--image-url:url\(/*(?P<thumb>[^\)]+).*?Anno di inizio</b>:\s*(?P<year>[0-9]{4})' + patron = r'<div class="showStreaming"> +<b>(?P<title>[^<]+)[^>]+>[^>]+>\s*<span>Lingua:\s*(?P<lang>[^>]+)?>[<>br\s]+a href="(?P<url>[^"]+)"[^>]+>.*?--image-url:url\(/*(?P<thumb>[^\)]+).*?Anno di inizio</b>:\s*(?P<year>[0-9]{4})' patronNext = '<li class="currentPage">[^>]+><li[^<]+<a href="([^"]+)">' + def itemHook(item): + if item.thumbnail and not item.thumbinail.startswith('http'): + item.thumbnail = 'http://' + item.thumbnail + return item + return locals() diff --git a/channels/dsda.py b/channels/dsda.py index 476e3143..71fe4ea5 100644 --- a/channels/dsda.py +++ b/channels/dsda.py @@ -75,7 +75,7 @@ def peliculas(item): else: patron = r'<div class="cover-racolta">\s*<a href="(?P<url>[^"]+)"[^>]+>\s*<img width="[^"]+" height="[^"]+" src="(?P<thumb>[^"]+)".*?<p class="title[^>]+>(?P<title>[^<]+)<' else: - patron = r'<article[^>]+>[^>]+>[^>]+>(?:<img width="[^"]+" height="[^"]+" src="(?P<thumb>[^"]+)"[^>]+>)?.*?<a href="(?P<url>[^"]+)">\s*(?P<title>[^<]+)<[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>\s*<p>(?P<plot>[^<]+)<' + patron = r'<article[^>]+>[^>]+>[^>]+>(?:<img width="[^"]+" height="[^"]+" src="(?P<thumb>[^"]+)"[^>]+>)?.*?<a href="(?P<url>[^"]+)"[^>]*>\s*(?P<title>[^<]+)<[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>\s*<p>(?P<plot>[^<]+)<' patronNext = r'<a class="page-numbers next" href="([^"]+)">' # select category @@ -121,14 +121,14 @@ def episodios(item): else: patron = r'class="title-episodio">(?P<title>[^<]+)<(?P<url>.*?)<p' - def itemlistHook(itemlist): - counter = 0 - for item in itemlist: - episode = support.match(item.title, patron=r'\d+').match - if episode == '1': - counter += 1 - item.title = support.typo(str(counter) + 'x' + episode.zfill(2) + support.re.sub(r'\[[^\]]+\](?:\d+)?','',item.title),'bold') - return itemlist + # def itemlistHook(itemlist): + # counter = 0 + # for item in itemlist: + # episode = support.match(item.title, patron=r'\d+').match + # if episode == '1': + # counter += 1 + # item.title = support.typo(str(counter) + 'x' + episode.zfill(2) + support.re.sub(r'\[[^\]]+\](?:\d+)?','',item.title),'bold') + # return itemlist return locals() diff --git a/channels/eurostreaming.py b/channels/eurostreaming.py index 78ce8f87..19b72cc2 100644 --- a/channels/eurostreaming.py +++ b/channels/eurostreaming.py @@ -12,7 +12,7 @@ def findhost(url): host = 'https://'+permUrl['location'].replace('https://www.google.it/search?q=site:', '') return host -host = support.config.get_channel_url(findhost) +host = support.config.get_channel_url() headers = [['Referer', host]] @support.menu diff --git a/channels/fastsubita.py b/channels/fastsubita.py index 088c3a43..c3cfe75f 100644 --- a/channels/fastsubita.py +++ b/channels/fastsubita.py @@ -25,14 +25,12 @@ host = config.get_channel_url() headers = [['Referer', host]] - - @support.menu def mainlist(item): Tvshow = [ - ('Aggiornamenti', ['', 'peliculas', '', 'update']), - ('Cerca... {bold}{TV}', ['','search']) + ('Aggiornamenti', ['', 'peliculas', 'update']), + ('Cerca... {bold}{TV}', ['', 'search']) ] # search = '' @@ -46,6 +44,16 @@ def peliculas(item): # support.dbg() deflang = 'Sub-ITA' + # è una singola pagina con tutti gli episodi + if item.grouped and not support.scrapertools.find_single_match(item.url, '-[0-9]+x[0-9]+-'): + item.grouped = False + return episodios_args(item) + + # ogni puntata è un articolo a se + if item.fulltitle: + item.url = host + '?s=' + item.fulltitle + actLike = 'episodios' + action = 'findvideos' blacklist = [''] if item.args == 'genres': @@ -53,32 +61,45 @@ def peliculas(item): patron = r'[^>]+>[^>]+>.+?href="(?P<url>[^"]+)[^>]>(?P<title>[^<]+)\s<' action = 'episodios' elif item.args == 'search': - patronBlock = r'</h1> </header>(?P<block>.*?)</main>' - patronMenu = r'(?:<img src="(?P<thumb>[^"]+)"[^>]+>)?[^>]+>[^>]+>[^>]+>[^>]+>[^>]+><a href="(?P<url>[^"]+)"[^>]+>(?:(?P<title>.+?)[ ](?P<episode>[\d&#;\d]+\d+|\d+..\d+)(?: \([a-zA-Z\s]+\) )(?:s\d+e\d+)?[ ]?(?:[&#\d;|.{3}]+)(?P<title2>[^&#\d;|^.{3}]+)(?:|.+?))<' + group = True + patronBlock = r'</header>(?P<block>.*?)</main>' + patron = '(?:<img[^>]+src="(?P<thumb>[^"]+)".*?)?<a href="(?P<url>[^"]+)"[^>]+>(?P<title>[^<]+?)(?:(?P<episode>\d+×\d+|\d+×\d+)|\[[sS](?P<season>[0-9]+)[^]]+\])\s?(?:(?P<lang>\([a-zA-Z\s]+\)) (?:[Ss]\d+[Ee]\d+)?\s?(?:[&#\d;|.{3}]+)(?P<title2>[^”[<]+)(?:&#\d)?)?' else: - patron = r'<div class="featured-thumb"> <a href="(?P<url>[^"]+)" title="(?:(?P<title>.+?)[ ]?(?P<episode>\d+×\d+).+?“(?P<title2>.+?)”).+?">' + # è una singola pagina con tutti gli episodi + if item.args != 'update' and not support.scrapertools.find_single_match(item.url, '-[0-9]+x[0-9]+-'): + return episodios_args(item) + patron = r'<div class="featured-thumb"> +<a href="(?P<url>[^"]+)" title="(?P<title>[^[]+)\[(?P<episode>\d+×\d+)?' patronBlock = r'<main id="main"[^>]+>(?P<block>.*?)<div id="secondary' + # def itemlistHook(itemlist): + # from core import scraper + # return scraper.sort_episode_list(itemlist) + patronNext = '<a class="next page-numbers" href="(.*?)">Successivi' - #debug = True + # debug = True + return locals() + + +def episodios_args(item): + actLike = 'episodios' + # support.dbg() + + deflang = 'Sub-ITA' + action = 'findvideos' + patron = '(?P<episode>\d+×\d+|\d+[×.]+\d+)(?:\s?\((?P<lang>[a-zA-Z ]+)\))?(?:\s[Ss]\d+[Ee]+\d+)? +(?:“|“)(?P<title2>.*?)(?:”|”).*?(?P<other>.*?)(?:/>|<p)' + patronBlock = r'<main id="main" class="site-main" role="main">(?P<block>.*?)</main>' + patronNext = '<a class="next page-numbers" href="(.*?)">Successivi' + + # debug = True return locals() @support.scrape def episodios(item): support.info(item) - #support.dbg() + return episodios_args(item) - deflang = 'Sub-ITA' - action = 'findvideos' - blacklist = [''] - patron = r'<div class="featured-thumb"> <a href="(?P<url>[^"]+)" title="(?:(?P<title>.+?)[ ]?(?P<episode>\d+×\d+|\d+[×.]+\d+).+?“(?P<title2>.+?)”).+?">' - patronBlock = r'<main id="main" class="site-main" role="main">(?P<block>.*?)</main>' - patronNext = '<a class="next page-numbers" href="(.*?)">Successivi' - - #debug = True - return locals() @support.scrape def genres(item): @@ -140,38 +161,54 @@ def newest(categoria): def findvideos(item): support.info('findvideos ->', item) - itemlist = [] - patronBlock = '<div class="entry-content">(?P<block>.*)<footer class="entry-footer">' patron = r'<a href="([^"]+)">' - html = support.match(item, patron=patron, patronBlock=patronBlock, headers=headers) - matches = html.matches - data= html.data - if item.args != 'episodios': - item.infoLabels['mediatype'] = 'episode' - for scrapedurl in matches: - if 'is.gd' in scrapedurl: - resp = httptools.downloadpage(scrapedurl, follow_redirects=False) - data += resp.headers.get("location", "") + '\n' + itemlist = [] + if item.other.startswith('http'): + resp = httptools.downloadpage(item.url, follow_redirects=False) + data = resp.headers.get("location", "") + '\n' + elif item.other: + html = support.match(item.other, patron=patron, headers=headers) + matches = html.matches + data = html.data + for scrapedurl in matches: + if 'is.gd' in scrapedurl: + resp = httptools.downloadpage(scrapedurl, follow_redirects=False) + data += resp.headers.get("location", "") + '\n' + elif not support.scrapertools.find_single_match(item.url, '-[0-9]+x[0-9]+-'): + return episodios(item) + else: + patronBlock = '<div class="entry-content">(?P<block>.*)<footer class="entry-footer">' + html = support.match(item, patron=patron, patronBlock=patronBlock, headers=headers) + matches = html.matches + data= html.data + + if item.args != 'episodios': + item.infoLabels['mediatype'] = 'episode' + for scrapedurl in matches: + if 'is.gd' in scrapedurl: + resp = httptools.downloadpage(scrapedurl, follow_redirects=False) + data += resp.headers.get("location", "") + '\n' itemlist += support.server(item, data) - data = support.match(item.url).data - patron = r'>Posted in <a href="https?://fastsubita.com/serietv/([^/]+)/(?:[^"]+)?"' - series = scrapertools.find_single_match(data, patron) - titles = support.typo(series.upper().replace('-', ' '), 'bold color kod') - goseries = support.typo("Vai alla Serie:", ' bold color kod') - itemlist.append( - item.clone(channel=item.channel, - title=goseries + titles, - fulltitle=titles, - show=series, - contentType='tvshow', - contentSerieName=series, - url=host+"/serietv/"+series, - action='episodios', - contentTitle=titles, - plot = "Vai alla Serie " + titles + " con tutte le puntate", - )) + # data = support.match(item.url).data + # patron = r'>Posted in <a href="https?://fastsubita.com/serietv/([^/]+)/(?:[^"]+)?"' + # series = scrapertools.find_single_match(data, patron) + # titles = support.typo(series.upper().replace('-', ' '), 'bold color kod') + # goseries = support.typo("Vai alla Serie:", ' bold color kod') + # itemlist.append( + # item.clone(channel=item.channel, + # # title=goseries + titles, + # title=titles, + # fulltitle=titles, + # show=series, + # contentType='tvshow', + # contentSerieName=series, + # url=host+"/serietv/"+series, + # action='episodios', + # contentTitle=titles, + # plot = "Vai alla Serie " + titles + " con tutte le puntate", + # )) return itemlist diff --git a/channels/film4k.json b/channels/film4k.json index fc92ab9d..2875fcfa 100644 --- a/channels/film4k.json +++ b/channels/film4k.json @@ -2,7 +2,7 @@ "id": "film4k", "name": "Film4k", "language": ["ita"], - "active": true, + "active": false, "thumbnail": "film4k.png", "banner": "film4k.png", "categories": ["tvshow", "movie", "anime"], diff --git a/channels/film4k.py b/channels/film4k.py index cc605718..3e82f3b3 100644 --- a/channels/film4k.py +++ b/channels/film4k.py @@ -29,7 +29,7 @@ def mainlist(item): def search(item, text): - logger.info() + logger.info('search', text) item.url = item.url + "/?s=" + text try: return support.dooplay_search(item) diff --git a/channels/filmpertutti.py b/channels/filmpertutti.py index eaeb8f3e..0f5d2016 100644 --- a/channels/filmpertutti.py +++ b/channels/filmpertutti.py @@ -99,7 +99,7 @@ def genres(item): def select(item): support.info() - patron=r'class="taxonomy category" ><span property="name">([^>]+)</span></a><meta property="position" content="2">' + patron=r'class="taxonomy category"\s*><span property="name">([^>]+)</span></a><meta property="position" content="2">' block = support.match(item.url, patron=patron,headers=headers).match if block.lower() != 'film': support.info('select = ### è una serie ###') diff --git a/channels/guardaseriecam.py b/channels/guardaseriecam.py index 1dc671de..cdab2da3 100644 --- a/channels/guardaseriecam.py +++ b/channels/guardaseriecam.py @@ -51,7 +51,7 @@ def episodios(item): def search(item, text): - support.info('search', item) + support.info('search', text) item.contentType = 'tvshow' itemlist = [] text = text.replace(' ', '+') @@ -66,5 +66,5 @@ def search(item, text): def findvideos(item): - logger.info("[guardaserie_live] findvideos") + logger.debug() return support.server(item, item.url) \ No newline at end of file diff --git a/channels/guardaserieicu.py b/channels/guardaserieicu.py index c92a0a42..4d60f951 100644 --- a/channels/guardaserieicu.py +++ b/channels/guardaserieicu.py @@ -28,7 +28,7 @@ def mainlist(item): @support.scrape def peliculas(item): patronBlock = r'movies-list movies-list-full(?P<block>.*?)footer>' - patron = r'<div data-movie-id[^>]+> <a href="(?P<url>[^"]+).*?<img data-original="(?P<thumbnail>[^"]+)[^>]+>[^>]+>[^>]+>(?P<title>[^<]+).*?jt-info[^>]+>[^:]+:\s*(?P<rating>[^<]+).*?rel="tag">(?P<year>\d+).*?jt-info">(?P<duration>\d+)' + patron = r'<div data-movie-id[^>]+>\s*<a href="(?P<url>[^"]+)"[^>]+>[^>]+>[^>]+><img src="(?P<thumbnail>[^"]+)[^>]+>[^>]+>[^>]+>[^>]+>(?P<title>[^<]+).*?jt-info[^>]+>[^:]+:\s*(?P<rating>[^<]+)[^>]+>[^>]+>[^>]+>(?P<year>\d*)[^>]+>[^>]+>[^>]+>(?P<duration>\d*)' patronNext = '<li class=.active.>.*?href=.(.*?).>' action = 'episodios' return locals() diff --git a/channels/ilgeniodellostreaming.py b/channels/ilgeniodellostreaming.py index d8d867c2..5f0cf03a 100644 --- a/channels/ilgeniodellostreaming.py +++ b/channels/ilgeniodellostreaming.py @@ -128,7 +128,7 @@ def search(item, text): info(text) itemlist = [] text = text.replace(' ', '+') - item.url = host + "/wp-json/wp/v2/search?search=" + text + item.url = host + "/wp-json/wp/v2/search?per_page=100&search=" + text results = support.httptools.downloadpage(item.url).json for r in results: title = r['title'] diff --git a/channels/italiaserie.py b/channels/italiaserie.py index b56d643b..e7a75716 100644 --- a/channels/italiaserie.py +++ b/channels/italiaserie.py @@ -2,22 +2,11 @@ # ------------------------------------------------------------ # Canale per italiaserie # ------------------------------------------------------------ -""" - - Problemi noti che non superano il test del canale: - Avvisi: - - - Ulteriori info: - -""" - -import re from core import support, httptools, scrapertools from core.item import Item -from platformcode import config +from platformcode import config, logger host = config.get_channel_url() headers = [['Referer', host]] @@ -25,11 +14,11 @@ headers = [['Referer', host]] @support.menu def mainlist(item): - support.info() - - tvshow = ['/category/serie-tv/', - ('Aggiornamenti', ['/ultimi-episodi/', 'peliculas', 'update']), - ('Generi', ['', 'category', 'Serie-Tv per Genere']) + tvshow = ['', + ('Aggiornamenti', ['/aggiornamento-episodi/', 'peliculas', 'update']), + ('Top 10', ['/top-10', 'peliculas', 'top']), + ('Netflix {tv submenu}', ['/genere/netflix', 'peliculas']), + ('A-Z', ['/lista-completa', 'peliculas', 'a-z']) ] return locals() @@ -37,44 +26,46 @@ def mainlist(item): @support.scrape def peliculas(item): - support.info() - action = 'episodios' - patron = r'<div class="post-thumb">\s*<a href="(?P<url>[^"]+)" '\ - 'title="(?P<title>[^"]+)">\s*<img src="(?P<thumb>[^"]+)"[^>]+>' + patron = r'<div class="post-thumb">\s*<a href="(?P<url>[^"]+)" title="(?P<title>[^"\[]+)[^>]+>\s*<img src="(?P<thumb>[^"]+)"[^>]+>' if item.args == 'update': - patron += r'.*?aj-eps">(?P<episode>.+?)[ ]?(?P<lang>Sub-Ita|Ita)</span>' + pagination = '' + patron = r'br />(?P<title>[^–]+)[^<]+<a href="(?P<url>[^"]+)">(?P<episode>[^ ]+)\s*(?P<title2>[^\(<]+)(?:\((?P<lang>[^\)]+))??' action = 'findvideos' + if item.args == 'top': + patron = r'<a href="(?P<url>[^"]+)">(?P<title>[^<]+)</a>[^>]+>[^>]+>[^>]+><img.*?src="(?P<thumb>[^"]+)"[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>[^>]+>:\s*(?P<rating>[^/]+)' + if item.args =='a-z': + pagination = '' + patron = r'<li ><a href="(?P<url>[^"]+)" title="(?P<title>[^"]+)"' patronNext = r'<a class="next page-numbers" href="(.*?)">' -## debug = True + def itemHook(item): + item.title = support.re.sub(r'<[^>]+>','', item.title) + return item + return locals() @support.scrape def episodios(item): - support.info() + res = support.match(item, patron=r'<a href="([^"]+)">>') + if res.match: data = support.match(res.match).data + else: data = res.data - patronBlock = r'</i> Stagione (?P<block>(?P<season>\d+)</div> '\ - '<div class="su-spoiler-content".*?)<div class="clearfix">' - patron = r'(?:(?P<season>\d+)?</div> <div class="su-spoiler-content"(:?.+?)?> )?'\ - '<div class="su-link-ep">\s+<a.*?href="(?P<url>[^"]+)".*?strong>[ ]'\ - '(?P<title>.+?)[ ](?P<episode>\d+-\d+|\d+)[ ](?:-\s+(?P<title2>.+?))?'\ - '[ ]?(?:(?P<lang>Sub-ITA))?[ ]?</strong>' + patronBlock = r'(?:Stagione|STAGIONE)\s*(?P<lang>[^<]+)?(?:</p>)?(?P<block>.*?)</p>' + patron = r'(?:p>|/>)(?P<title>[^–]+)–(?P<data>.*?)(?:<br|$)' - - #debug = True + def itemHook(item): + item.title = support.re.sub('<[^>]+>','', item.title) + return item return locals() @support.scrape def category(item): - support.info() - action = 'peliculas' patron = r'<li class="cat-item.*?href="(?P<url>[^"]+)".*?>(?P<title>.*?)</a>' - return locals() @@ -111,41 +102,28 @@ def newest(categoria): except: import sys for line in sys.exc_info(): - support.info("{0}".format(line)) + logger.error("{0}".format(line)) return [] return itemlist def findvideos(item): - support.info() + logger.debug() if item.args == 'update': itemlist = [] item.infoLabels['mediatype'] = 'episode' - data = httptools.downloadpage(item.url, headers=headers).data - data = re.sub('\n|\t', ' ', data) - data = re.sub(r'>\s+<', '> <', data) - url_video = scrapertools.find_single_match(data, r'<a rel="[^"]+" target="[^"]+" act="[^"]+"\s+href="([^"]+)" class="[^"]+-link".+?\d+.+?</strong> </a>', -1) - url_serie = scrapertools.find_single_match(data, r'<link rel="canonical" href="([^"]+)" />') + data = support.match(item.url, headers=headers).data + url_video = support.match(data, patron=r'<a rel="[^"]+" target="[^"]+" act="[^"]+"\s+href="([^"]+)" class="[^"]+-link".+?\d+.+?</strong> </a>').matches + url_serie = support.match(data, patron=r'<link rel="canonical" href="([^"]+)" />').matches goseries = support.typo("Vai alla Serie:", ' bold') series = support.typo(item.contentSerieName, ' bold color kod') itemlist = support.server(item, data=url_video) - itemlist.append( - Item(channel=item.channel, - title=goseries + series, - fulltitle=item.fulltitle, - show=item.show, - contentType='tvshow', - contentSerieName=item.contentSerieName, - url=url_serie, - action='episodios', - contentTitle=item.contentSerieName, - plot = goseries + series + "con tutte le puntate", - )) + itemlist.append(item.clone(title=goseries + series, contentType='tvshow', url=url_serie, action='episodios', plot = goseries + series + "con tutte le puntate")) return itemlist else: - return support.server(item, data=item.url) + return support.server(item, data=item.data) diff --git a/channels/mediasetplay.py b/channels/mediasetplay.py index cf205ddd..11e38eb1 100644 --- a/channels/mediasetplay.py +++ b/channels/mediasetplay.py @@ -87,24 +87,27 @@ def live(item): for key in it['tuningInstruction']['urn:theplatform:tv:location:any']: urls += key['publicUrls'] plot = support.typo(guide['currentListing']['mediasetlisting$epgTitle'],'bold') + '\n' + guide['currentListing']['mediasetlisting$shortDescription'] + '\n' + guide['currentListing']['description'] + '\n\n' + support.typo('A Seguire:' + guide['nextListing']['mediasetlisting$epgTitle'], 'bold') + itemlist.append(item.clone(title=support.typo(it['title'], 'bold'), - fulltitle=it['title'], - show=it['title'], - contentTitle=it['title'], - thumbnail=it['thumbnails']['channel_logo-100x100']['url'], - forcethumb = True, - urls=urls, - plot=plot, - action='play')) + fulltitle=it['title'], + show=it['title'], + contentTitle=it['title'], + thumbnail=it['thumbnails']['channel_logo-100x100']['url'], + forcethumb = True, + urls=urls, + plot=plot, + action='play')) return support.thumb(itemlist, live=True) def peliculas(item): support.info() itemlist = [] + titlelist = [] contentType = '' json = get_programs(item) for it in json: - if item.search.lower() in it['title'].lower(): + if item.search.lower() in it['title'].lower() and it['title'] not in titlelist: + titlelist.append(it['title']) if item.contentType == 'movie': action = 'findvideos' urls = [] @@ -184,8 +187,10 @@ def episodios(item): plot=it['longDescription'] if 'longDescription' in it else it['description'], urls=urls, url=it['mediasetprogram$pageUrl'])) - if episode: support.videolibrary(itemlist, item) - return sorted(itemlist, key=lambda it: it.title) + if episode: + itemlist = sorted(itemlist, key=lambda it: it.title) + support.videolibrary(itemlist, item) + return itemlist def findvideos(item): support.info() diff --git a/channels/metalvideo.py b/channels/metalvideo.py index 7db2eb3a..441b4922 100644 --- a/channels/metalvideo.py +++ b/channels/metalvideo.py @@ -13,7 +13,7 @@ headers = {'X-Requested-With': 'XMLHttpRequest'} def mainlist(item): item.url = host action = 'peliculas' - patronBlock = r'<ul class="dropdown-menu(?P<block>.*?)</ul> </div' + patronBlock = r'<ul class="dropdown-menu(?P<block>.*?)</ul>\s*</div' patron = r'<a href="(?P<url>[^"]+)"(?: class="")?>(?P<title>[^<]+)<' def itemHook(item): item.thumbnail = support.thumb('music') @@ -24,7 +24,7 @@ def mainlist(item): itemlist.append( support.Item( channel=item.channel, - title=support.typo('Cerca...', 'bold color kod'), + title=support.typo('Cerca...', 'bold'), contentType='music', url=item.url, action='search', diff --git a/channels/mondoserietv.py b/channels/mondoserietv.py index f3ae02b2..a3a9c194 100644 --- a/channels/mondoserietv.py +++ b/channels/mondoserietv.py @@ -111,7 +111,7 @@ def episodios(item): anime = True pagination = 50 patronBlock = r'<table>(?P<block>.*?)</table>' - patron = r'<tr><td><b>(?P<title>(?:\d+)?.*?)\s*(?:(?P<episode>(?:\d+x\d+|\d+)))\s*(?P<title2>[^<]+)(?P<url>.*?)<tr>' + patron = r'<tr><td><b>(?P<title>(?:\d+)?.*?)\s*(?:(?P<episode>(?:\d+x\d+|\d+)))\s*(?P<title2>[^<]+)(?P<data>.*?)<tr>' def itemHook(item): clear = support.re.sub(r'\[[^\]]+\]', '', item.title) if clear.isdigit(): @@ -121,4 +121,4 @@ def episodios(item): def findvideos(item): if item.contentType == 'movie': return support.server(item) - else: return support.server(item, item.url) + else: return support.server(item, item.data) diff --git a/channels/netfreex.py b/channels/netfreex.py index c40dd8cc..442145e7 100644 --- a/channels/netfreex.py +++ b/channels/netfreex.py @@ -29,7 +29,7 @@ def mainlist(item): def search(item, text): - logger.info() + logger.info('search', text) item.url = item.url + "/?s=" + text try: return support.dooplay_search(item) diff --git a/channels/paramount.py b/channels/paramount.py index 6a91cf5d..024618cf 100644 --- a/channels/paramount.py +++ b/channels/paramount.py @@ -73,27 +73,28 @@ def peliculas(item): pagination = pagination_values[support.config.get_setting('pagination','paramount')] item.url = host + '/api/search?activeTab=' + Type + '&searchFilter=site&pageNumber=0&rowsPerPage=10000' data = jsontools.load(support.match(item).data)['response']['items'] - + titles = [] for it in data: title = it['meta']['header']['title'] - support.info(title, it) - d = it['meta']['date'].split('/') if it['meta']['date'] else ['0000','00','00'] - date = int(d[2] + d[1] + d[0]) - if item.search.lower() in title.lower() \ - and 'stagione' not in it['url'] \ - and 'season' not in it['url'] \ - and title not in ['Serie TV']: - itemlist.append( - item.clone(title=support.typo(title,'bold'), - action=action, - fulltitle=title, - show=title, - contentTitle=title if it['type'] == 'movie' else '', - contentSerieName=title if it['type'] != 'movie' else '', - plot= it['meta']['description'] if 'description' in it['meta'] else '', - url=host + it['url'], - date=date, - thumbnail='https:' + it['media']['image']['url'] if 'url' in it['media']['image'] else item.thumbnail)) + if title not in titles: + titles.append(title) + d = it['meta']['date'].split('/') if it['meta']['date'] else ['0000','00','00'] + date = int(d[2] + d[1] + d[0]) + if item.search.lower() in title.lower() \ + and 'stagione' not in it['url'] \ + and 'season' not in it['url'] \ + and title not in ['Serie TV']: + itemlist.append( + item.clone(title=support.typo(title,'bold'), + action=action, + fulltitle=title, + show=title, + contentTitle=title if it['type'] == 'movie' else '', + contentSerieName=title if it['type'] != 'movie' else '', + plot= it['meta']['description'] if 'description' in it['meta'] else '', + url=host + it['url'], + date=date, + thumbnail='https:' + it['media']['image']['url'] if 'url' in it['media']['image'] else item.thumbnail)) itemlist.sort(key=lambda item: item.fulltitle) if not item.search: itlist = [] diff --git a/channels/raiplay.py b/channels/raiplay.py index cce4a064..66a0ae2a 100644 --- a/channels/raiplay.py +++ b/channels/raiplay.py @@ -3,9 +3,9 @@ # Canale per Rai Play # ------------------------------------------------------------ -import requests +import requests, sys from core import support -import sys +from platformcode import autorenumber if sys.version_info[0] >= 3: from concurrent import futures else: @@ -118,7 +118,6 @@ def replay(item): return itemlist def search(item, text): - # support.dbg() support.info() itemlist =[] try: @@ -224,13 +223,23 @@ def peliculas(item): def select(item): support.info() itemlist = [] - json = current_session.get(item.url).json()['blocks'] - for key in json: - itemlist.append(item.clone(title = support.typo(key['name'],'bold'), url = key['sets'], action = 'episodios')) - if len(itemlist) == 1: - return episodios(itemlist[0]) + if type(item.url) in [list, dict]: + json = item.url else: - return itemlist + json = current_session.get(item.url).json() + if 'blocks' in json: + json = json['blocks'] + season = '' + for key in json: + if item.fulltitle in key['name']: season = key['name'].replace(item.fulltitle, '').strip() + if not season.isdigit(): season = '' + itemlist.append(item.clone(title = support.typo(key['name'],'bold'), season = season, url = key['sets'], action = 'select')) + if len(itemlist) == 1: + return episodios(itemlist[0]) + else: + for key in item.url: + itemlist.append(item.clone(title = support.typo(key['name'], 'bold'), url = getUrl(key['path_id']), contentType = 'tvshow', action = 'episodios')) + return itemlist def episodios(item): @@ -241,6 +250,8 @@ def episodios(item): itemlist.append(item.clone(title = support.typo(key['name'], 'bold'), url = getUrl(key['path_id']), contentType = 'tvshow', action = 'episodios')) elif type(item.url) in [list, dict]: + for key in item.url: + load_episodes(key, item) with futures.ThreadPoolExecutor() as executor: itlist = [executor.submit(load_episodes, key, item) for key in item.url] for res in futures.as_completed(itlist): @@ -254,6 +265,7 @@ def episodios(item): itemlist = sorted(itemlist, key=lambda it: it.title) else: + date = '' if type(item.url) in [list, dict]: item.url = getUrl(item.url[0]['path_id']) json = current_session.get(item.url).json()['items'] for key in json: @@ -263,15 +275,30 @@ def episodios(item): episode = ep[1].zfill(2) title = support.re.sub(r'(?:St\s*\d+)?\s*Ep\s*\d+','',key['subtitle']) title = season + 'x' + episode + (' - ' + title if not title.startswith(' ') else title if title else '') + elif item.season and support.match(item.title.lower(), patron =r'(puntate)').match: + title = key['subtitle'].strip() + if not title: title = key['name'] + date = support.match(title, patron=r'(\d+/\d+/\d+)').match + if date: + date = title.split('/') + date = date[2][-2] + '/' + date[1] + '/' + date[0] + else: title = key['subtitle'].strip() - # title = key['subtitle'].strip() if not title: title = key['name'] itemlist.append(item.clone(title = support.typo(title, 'bold'), action = 'findvideos', VL=True if ep else False, plot = key['description'], - fanart = getUrl(key['images']['landscape']), url = key['video_url'], contentType = 'episode')) + fanart = getUrl(key['images']['landscape']), url = key['video_url'], contentType = 'episode', date=date)) + + if item.season and support.match(item.title.lower(), patron =r'(puntate)').match: + itemlist = sorted(itemlist, key=lambda it: it.date) + for i, it in enumerate(itemlist): + episode = str(i + 1) + it.title = support.typo(item.season + 'x' + episode, 'bold') + (' - ' + it.title) if itemlist and itemlist[0].VL: support.videolibrary(itemlist, item) + if itemlist and not support.match(itemlist[0].title, patron=r'[Ss]?(\d+)(?:x|_|\.|\s+)[Ee]?[Pp]?(\d+)').match: + autorenumber.start(itemlist, item) return itemlist diff --git a/channels/seriehd.py b/channels/seriehd.py index 9d75c8b5..55b3c64b 100644 --- a/channels/seriehd.py +++ b/channels/seriehd.py @@ -107,10 +107,8 @@ def episodios(item): # for i, season in enumerate(seasons.matches): # data += get_season(seasons.data if i == 0 else '', season[0], season[1]) import sys - if sys.version_info[0] >= 3: - from concurrent import futures - else: - from concurrent_py2 import futures + if sys.version_info[0] >= 3: from concurrent import futures + else: from concurrent_py2 import futures with futures.ThreadPoolExecutor() as executor: thL = [] for i, season in enumerate(seasons.matches): @@ -118,12 +116,12 @@ def episodios(item): for res in futures.as_completed(thL): if res.result(): data += res.result() - - patron = r'(?P<title>[^\|]+)\|(?P<url>[^\n]+)\n' + # debug = True + patron = r'(?P<season>\d+)x(?P<episode>\d+)\s*-\s*(?P<title>[^\|]+)\|(?P<url>[^ ]+)' action = 'findvideos' def itemlistHook(itemlist): - itemlist.sort(key=lambda item: int(support.re.sub(r'\[[^\]]+\]','',item.title).split('x')[0])) + itemlist.sort(key=lambda item: (item.infoLabels['season'], item.infoLabels['episode'])) return itemlist return locals() diff --git a/channels/serietvonline.py b/channels/serietvonline.py index 6b454a1b..69d34d9f 100644 --- a/channels/serietvonline.py +++ b/channels/serietvonline.py @@ -25,7 +25,7 @@ def findhost(url): host = support.match(url, patron=r'href="([^"]+)">\s*cliccando qui').matches[-1] return host -host = config.get_channel_url(findhost) +host = config.get_channel_url() headers = [['Referer', host]] diff --git a/channels/serietvsubita.py b/channels/serietvsubita.py index ce3f5cab..dd6fe970 100644 --- a/channels/serietvsubita.py +++ b/channels/serietvsubita.py @@ -337,7 +337,8 @@ def list_az(item): item.clone(action="lista_serie", data='\n\n'.join(alphabet[letter]), title=letter, - fulltitle=letter)) + fulltitle=letter, + args='')) return itemlist diff --git a/channels/toonitalia.py b/channels/toonitalia.py index e6362337..27e72b3a 100644 --- a/channels/toonitalia.py +++ b/channels/toonitalia.py @@ -4,6 +4,9 @@ # ------------------------------------------------------------ from core import support +import sys +if sys.version_info[0] >= 3: from concurrent import futures +else: from concurrent_py2 import futures host = support.config.get_channel_url() @@ -26,13 +29,23 @@ def mainlist(item): return locals() -def search(item, texto): - support.info(texto) - item.args='search' - item.contentType='tvshow' - item.url = host + '/wp-json/wp/v2/search?search=' + texto +def search(item, text): + support.info(text) + # item.args='search' + item.text = text + itemlist = [] + try: - return peliculas(item) + # item.url = host + '/lista-serie-tv/' + # item.contentType = 'tvshow' + # itemlist += peliculas(item) + with futures.ThreadPoolExecutor() as executor: + for par in [['/lista-serie-tv/', 'tvshow', ''],['/lista-anime-2/', 'tvshow', ''], ['/lista-anime-sub-ita/', 'tvshow', 'sub'], ['/lista-film-animazione/', 'movie', '']]: + item.url = host + par[0] + item.contentType = par[1] + item.args = par[2] + itemlist += executor.submit(peliculas, item).result() + return itemlist # Continua la ricerca in caso di errore except: import sys @@ -59,6 +72,7 @@ def newest(categoria): @support.scrape def peliculas(item): + search = item.text pagination = '' anime = True action = 'findvideos' if item.contentType == 'movie' else 'episodios' diff --git a/channels/vedohd.py b/channels/vedohd.py index 619135de..ffb81a21 100644 --- a/channels/vedohd.py +++ b/channels/vedohd.py @@ -30,7 +30,7 @@ def mainlist(item): def search(item, text): - logger.info("[vedohd.py] " + item.url + " search " + text) + logger.info("search",text) item.url = item.url + "/?s=" + text return support.dooplay_search(item, blacklist) @@ -44,7 +44,6 @@ def findvideos(item): itemlist = [] for link in support.dooplay_get_links(item, host): if link['title'] != 'Trailer': - logger.info(link['title']) server, quality = scrapertools.find_single_match(link['title'], '([^ ]+) ?(HD|3D)?') if quality: title = server + " [COLOR blue][" + quality + "][/COLOR]" @@ -63,7 +62,7 @@ def menu(item): def play(item): - logger.info("[vedohd.py] play") + logger.debug() data = support.swzz_get_url(item) diff --git a/channels/vvvvid.py b/channels/vvvvid.py index d5da84aa..20b0248a 100644 --- a/channels/vvvvid.py +++ b/channels/vvvvid.py @@ -4,19 +4,21 @@ # ---------------------------------------------------------- import requests, sys from core import support, tmdb -from platformcode import autorenumber +from platformcode import autorenumber, logger host = support.config.get_channel_url() # Creating persistent session current_session = requests.Session() -headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:62.0) Gecko/20100101 Firefox/62.0'} +headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.82 Safari/537.36'} # Getting conn_id token from vvvvid and creating payload login_page = host + '/user/login' try: - conn_id = current_session.get(login_page, headers=headers).json()['data']['conn_id'] + res = current_session.get(login_page, headers=headers) + conn_id = res.json()['data']['conn_id'] payload = {'conn_id': conn_id} + headers = {'User-Agent': 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.8.1.14) Gecko/20080404 Firefox/2.0.0.14', 'Cookie': res.headers['set-cookie']} except: conn_id = '' @@ -126,7 +128,7 @@ def peliculas(item): if 'category' in item.args: support.thumb(itemlist,genre=True) elif not 'filter' in item.args: - if item.contentType != 'movie': autorenumber.renumber(itemlist) + if item.contentType != 'movie': autorenumber.start(itemlist) tmdb.set_infoLabels_itemlist(itemlist, seekTmdb=True) return itemlist @@ -165,10 +167,10 @@ def episodios(item): if type(title) == tuple: title = title[0] itemlist.append( item.clone(title = title, - url= host + show_id + '/season/' + str(key['season_id']) + '/', + url= host + show_id + '/season/' + str(key['season_id']), action= 'findvideos', video_id= key['video_id'])) - autorenumber.renumber(itemlist, item, 'bold') + autorenumber.start(itemlist, item) if autorenumber.check(item) == True \ or support.match(itemlist[0].title, patron=r"(\d+x\d+)").match: support.videolibrary(itemlist,item) @@ -181,11 +183,12 @@ def findvideos(item): json_file = current_session.get(item.url, headers=headers, params=payload).json() item.url = host + str(json_file['data'][0]['show_id']) + '/season/' + str(json_file['data'][0]['episodes'][0]['season_id']) + '/' item.video_id = json_file['data'][0]['episodes'][0]['video_id'] - + logger.info('url=',item.url) json_file = current_session.get(item.url, headers=headers, params=payload).json() for episode in json_file['data']: + logger.info(episode) if episode['video_id'] == item.video_id: - url = vvvvid_decoder.dec_ei(episode['embed_info'] or episode['embed_info']) + url = vvvvid_decoder.dec_ei(episode['embed_info'] or episode['embed_info_sd']) if 'youtube' in url: item.url = url item.url = url.replace('manifest.f4m','master.m3u8').replace('http://','https://').replace('/z/','/i/') if 'https' not in item.url: diff --git a/channels/youtube_channel.py b/channels/youtube_channel.py index 66409814..80d57a85 100644 --- a/channels/youtube_channel.py +++ b/channels/youtube_channel.py @@ -14,15 +14,15 @@ YOUTUBE_V3_API_KEY = "AIzaSyCjsmBT0JZy1RT-PLwB-Zkfba87sa2inyI" def youtube_api_call(method, parameters): - logger.info("method=" + method + ", parameters=" + repr(parameters)) + logger.debug("method=" + method + ", parameters=" + repr(parameters)) encoded_parameters = urllib.urlencode(parameters) url = "https://www.googleapis.com/youtube/v3/" + method + "?" + encoded_parameters + "&key=" + YOUTUBE_V3_API_KEY; - logger.info("url=" + url) + logger.debug("url=" + url) data = httptools.downloadpage(url).data - logger.info("data=" + data) + logger.debug("data=" + data) json_object = jsontools.load(data) @@ -51,13 +51,13 @@ def youtube_get_playlist_items(playlist_id, pageToken=""): # Show all YouTube playlists for the selected channel def playlists(item, channel_id, pageToken=""): - logger.info() + logger.debug() itemlist = [] json_object = youtube_get_user_playlists(channel_id, pageToken) for entry in json_object["items"]: - logger.info("entry=" + repr(entry)) + logger.debug("entry=" + repr(entry)) title = entry["snippet"]["title"] plot = entry["snippet"]["description"] @@ -85,13 +85,13 @@ def latest_videos(item, channel_id): # Show all YouTube videos for the selected playlist def videos(item, pageToken=""): - logger.info() + logger.debug() itemlist = [] json_object = youtube_get_playlist_items(item.url, pageToken) for entry in json_object["items"]: - logger.info("entry=" + repr(entry)) + logger.debug("entry=" + repr(entry)) title = entry["snippet"]["title"] plot = entry["snippet"]["description"] diff --git a/channelselector.py b/channelselector.py index 9f810eff..649772c5 100644 --- a/channelselector.py +++ b/channelselector.py @@ -9,7 +9,7 @@ downloadenabled = addon.getSetting('downloadenabled') def getmainlist(view="thumb_"): - logger.info() + logger.debug() itemlist = list() if config.dev_mode(): @@ -62,14 +62,14 @@ def getmainlist(view="thumb_"): def getchanneltypes(view="thumb_"): - logger.info() + logger.debug() # Category List channel_types = ["movie", "tvshow", "anime", "documentary", "vos", "live", "torrent", "music"] #, "direct" # Channel Language channel_language = auto_filter() - logger.info("channel_language=%s" % channel_language) + logger.debug("channel_language=%s" % channel_language) # Build Itemlist itemlist = list() @@ -92,7 +92,7 @@ def getchanneltypes(view="thumb_"): def filterchannels(category, view="thumb_"): from core import channeltools - logger.info('Filter Channels ' + category) + logger.debug('Filter Channels ' + category) channelslist = [] @@ -103,14 +103,14 @@ def filterchannels(category, view="thumb_"): appenddisabledchannels = True channel_path = os.path.join(config.get_runtime_path(), 'channels', '*.json') - logger.info("channel_path = %s" % channel_path) + logger.debug("channel_path = %s" % channel_path) channel_files = glob.glob(channel_path) - logger.info("channel_files found %s" % (len(channel_files))) + logger.debug("channel_files found %s" % (len(channel_files))) # Channel Language channel_language = auto_filter() - logger.info("channel_language=%s" % channel_language) + logger.debug("channel_language=%s" % channel_language) for channel_path in channel_files: logger.debug("channel in for = %s" % channel_path) @@ -221,7 +221,7 @@ def get_thumb(thumb_name, view="thumb_"): def set_channel_info(parameters): - logger.info() + logger.debug() info = '' language = '' diff --git a/core/autoplay.py b/core/autoplay.py index f6707ffb..a8d141d6 100644 --- a/core/autoplay.py +++ b/core/autoplay.py @@ -29,7 +29,7 @@ def start(itemlist, item): if item.global_search: return itemlist - logger.info() + logger.debug() global PLAYED PLAYED = False @@ -274,7 +274,7 @@ def start(itemlist, item): def play_multi_channel(item, itemlist): - logger.info() + logger.debug() start(itemlist, item) diff --git a/core/channeltools.py b/core/channeltools.py index 90a2dd74..ff5a0442 100644 --- a/core/channeltools.py +++ b/core/channeltools.py @@ -15,7 +15,7 @@ default_file = dict() remote_path = 'https://raw.githubusercontent.com/kodiondemand/media/master/' def is_enabled(channel_name): - logger.info("channel_name=" + channel_name) + logger.debug("channel_name=" + channel_name) return get_channel_parameters(channel_name)["active"] and get_channel_setting("enabled", channel=channel_name, default=True) @@ -27,7 +27,7 @@ def get_channel_parameters(channel_name): if channel_name not in dict_channels_parameters: try: channel_parameters = get_channel_json(channel_name) - # logger.debug(channel_parameters) + logger.debug(channel_parameters) if channel_parameters: # name and default changes channel_parameters["title"] = channel_parameters.pop("name") + (' [DEPRECATED]' if 'deprecated' in channel_parameters and channel_parameters['deprecated'] else '') @@ -87,7 +87,7 @@ def get_channel_parameters(channel_name): def get_channel_json(channel_name): - # logger.info("channel_name=" + channel_name) + logger.debug("channel_name=" + channel_name) from core import filetools channel_json = None try: @@ -101,9 +101,9 @@ def get_channel_json(channel_name): channel_name + ".json") if filetools.isfile(channel_path): - # logger.info("channel_data=" + channel_path) + logger.debug("channel_data=" + channel_path) channel_json = jsontools.load(filetools.read(channel_path)) - # logger.info("channel_json= %s" % channel_json) + logger.debug("channel_json= %s" % channel_json) except Exception as ex: template = "An exception of type %s occured. Arguments:\n%r" @@ -114,7 +114,7 @@ def get_channel_json(channel_name): def get_channel_controls_settings(channel_name): - # logger.info("channel_name=" + channel_name) + logger.debug("channel_name=" + channel_name) dict_settings = {} # import web_pdb; web_pdb.set_trace() # list_controls = get_channel_json(channel_name).get('settings', list()) @@ -137,7 +137,7 @@ def get_lang(channel_name): if hasattr(channel, 'list_language'): for language in channel.list_language: list_language.append(language) - logger.info(list_language) + logger.debug(list_language) else: sub = False langs = [] diff --git a/core/downloader.py b/core/downloader.py index b714b474..919f9469 100644 --- a/core/downloader.py +++ b/core/downloader.py @@ -253,7 +253,7 @@ class Downloader(object): self.file.seek(2 ** 31, 0) except OverflowError: self._seekable = False - logger.info("Cannot do seek() or tell() in files larger than 2GB") + logger.error("Cannot do seek() or tell() in files larger than 2GB") self.__get_download_info__() diff --git a/core/filetools.py b/core/filetools.py index 554a2671..d99662bc 100644 --- a/core/filetools.py +++ b/core/filetools.py @@ -8,7 +8,6 @@ from __future__ import division # from builtins import str import io -from future.builtins import range from past.utils import old_div import sys PY3 = False @@ -814,7 +813,7 @@ def remove_tags(title): @rtype: str @return: string without tags """ - logger.info() + logger.debug() title_without_tags = scrapertools.find_single_match(title, r'\[color .+?\](.+)\[\/color\]') @@ -832,7 +831,7 @@ def remove_smb_credential(path): @return: chain without credentials @rtype: str """ - logger.info() + logger.debug() if not scrapertools.find_single_match(path, r'(^\w+:\/\/)'): return path diff --git a/core/filtertools.py b/core/filtertools.py index c03ffe28..09f016db 100644 --- a/core/filtertools.py +++ b/core/filtertools.py @@ -234,7 +234,7 @@ def get_link(list_item, item, list_language, list_quality=None, global_filter_la @return: Item list @rtype: list[Item] """ - logger.info() + logger.debug() # if the required fields are None we leave if list_item is None or item is None: @@ -274,7 +274,7 @@ def get_links(list_item, item, list_language, list_quality=None, global_filter_l @return: lista de Item @rtype: list[Item] """ - logger.info() + logger.debug() # if the required fields are None we leave @@ -362,7 +362,7 @@ def no_filter(item): @return: lista de enlaces @rtype: list[Item] """ - logger.info() + logger.debug() itemlist = [] for i in item.list_item_all: @@ -384,7 +384,7 @@ def mainlist(channel, list_language, list_quality): @return: Item list @rtype: list[Item] """ - logger.info() + logger.debug() itemlist = [] dict_series = jsontools.get_node_from_file(channel, TAG_TVSHOW_FILTER) @@ -425,8 +425,8 @@ def config_item(item): @param item: item @type item: Item """ - logger.info() - logger.info("item %s" % item.tostring()) + logger.debug() + logger.debug("item %s" % item.tostring()) # WE GET THE JSON DATA dict_series = jsontools.get_node_from_file(item.from_channel, TAG_TVSHOW_FILTER) @@ -448,8 +448,8 @@ def config_item(item): else: lang_selected = dict_series.get(tvshow, {}).get(TAG_LANGUAGE, default_lang) list_quality = dict_series.get(tvshow, {}).get(TAG_QUALITY_ALLOWED, [x.lower() for x in item.list_quality]) - # logger.info("lang selected {}".format(lang_selected)) - # logger.info("list quality {}".format(list_quality)) + # logger.debug("lang selected {}".format(lang_selected)) + # logger.debug("list quality {}".format(list_quality)) active = True custom_button = {'visible': False} @@ -516,7 +516,7 @@ def config_item(item): def delete(item, dict_values): - logger.info() + logger.debug() if item: dict_series = jsontools.get_node_from_file(item.from_channel, TAG_TVSHOW_FILTER) @@ -554,7 +554,7 @@ def save(item, dict_data_saved): @param dict_data_saved: dictionary with saved data @type dict_data_saved: dict """ - logger.info() + logger.debug() if item and dict_data_saved: logger.debug('item: %s\ndatos: %s' % (item.tostring(), dict_data_saved)) @@ -564,7 +564,7 @@ def save(item, dict_data_saved): dict_series = jsontools.get_node_from_file(item.from_channel, TAG_TVSHOW_FILTER) tvshow = item.show.strip().lower() - logger.info("Data is updated") + logger.debug("Data is updated") list_quality = [] for _id, value in list(dict_data_saved.items()): @@ -599,7 +599,7 @@ def save_from_context(item): @param item: item @type item: item """ - logger.info() + logger.debug() dict_series = jsontools.get_node_from_file(item.from_channel, TAG_TVSHOW_FILTER) tvshow = item.show.strip().lower() @@ -630,7 +630,7 @@ def delete_from_context(item): @param item: item @type item: item """ - logger.info() + logger.debug() # We come from get_links and no result has been obtained, in context menu and we delete if item.to_channel != "": diff --git a/core/httptools.py b/core/httptools.py index a06eb9ac..ae7de722 100755 --- a/core/httptools.py +++ b/core/httptools.py @@ -449,7 +449,7 @@ def downloadpage(url, **opt): if not 'api.themoviedb' in url and not opt.get('alfa_s', False): show_infobox(info_dict) - + if not config.get_setting("debug"): logger.info('Page URL:',url) return type('HTTPResponse', (), response) def fill_fields_pre(url, opt, proxy_data, file_name): diff --git a/core/jsontools.py b/core/jsontools.py index b2fed3a6..6bfe960f 100644 --- a/core/jsontools.py +++ b/core/jsontools.py @@ -11,22 +11,22 @@ from inspect import stack try: import json except: - logger.info("json included in the interpreter **NOT** available") + logger.error("json included in the interpreter **NOT** available") try: import simplejson as json except: - logger.info("simplejson included in the interpreter **NOT** available") + logger.error("simplejson included in the interpreter **NOT** available") try: from lib import simplejson as json except: - logger.info("simplejson in lib directory **NOT** available") + logger.error("simplejson in lib directory **NOT** available") logger.error("A valid JSON parser was not found") json = None else: logger.info("Using simplejson in the lib directory") else: - logger.info("Using simplejson included in the interpreter") + logger.error("Using simplejson included in the interpreter") # ~ else: # ~ logger.info("Usando json incluido en el interprete") diff --git a/core/scraper.py b/core/scraper.py index 2515a77d..d5aa9ac2 100644 --- a/core/scraper.py +++ b/core/scraper.py @@ -21,6 +21,7 @@ def find_and_set_infoLabels(item): :param item: :return: Boolean indicating if the 'code' could be found """ + # from core.support import dbg;dbg() global scraper scraper = None # logger.debug("item:\n" + item.tostring('\n')) @@ -32,7 +33,7 @@ def find_and_set_infoLabels(item): # Get the default Scraper of the configuration according to the content type if item.contentType == "movie": scraper_actual = ['tmdb'][config.get_setting("scraper_movies", "videolibrary")] - tipo_contenido = config.get_localized_string(70283) + tipo_contenido = "movie" title = item.contentTitle # Complete list of options for this type of content list_opciones_cuadro.append(scrapers_disponibles['tmdb']) @@ -61,7 +62,7 @@ def find_and_set_infoLabels(item): # Check if there is a 'code' if scraper_result and item.infoLabels['code']: # correct code - logger.info("Identificador encontrado: %s" % item.infoLabels['code']) + logger.debug("Identificador encontrado: %s" % item.infoLabels['code']) scraper.completar_codigos(item) return True elif scraper_result: @@ -71,57 +72,18 @@ def find_and_set_infoLabels(item): # Content not found msg = config.get_localized_string(60228) % title - logger.info(msg) + logger.debug(msg) # Show box with other options: - if scrapers_disponibles[scraper_actual] in list_opciones_cuadro: - list_opciones_cuadro.remove(scrapers_disponibles[scraper_actual]) - index = platformtools.dialog_select(msg, list_opciones_cuadro) - - if index < 0: + item = platformtools.dialog_info(item, scraper_actual) + if item.exit: logger.debug("You have clicked 'cancel' in the window '%s'" % msg) return False - elif index == 0: - # Ask the title - title = platformtools.dialog_input(title, config.get_localized_string(60229) % tipo_contenido) - if title: - if item.contentType == "movie": - item.contentTitle = title - else: - item.contentSerieName = title - else: - logger.debug("I clicked 'cancel' in the window 'Enter the correct name'") - return False - - elif index == 1: - # You have to create a dialog box to enter the data - logger.info("Complete information") - if cuadro_completar(item): - # correct code - logger.info("Identifier found: %s" % str(item.infoLabels['code'])) - return True - # raise - - elif list_opciones_cuadro[index] in list(scrapers_disponibles.values()): - # Get the name of the scraper module - for k, v in list(scrapers_disponibles.items()): - if list_opciones_cuadro[index] == v: - if scrapers_disponibles[scraper_actual] not in list_opciones_cuadro: - list_opciones_cuadro.append(scrapers_disponibles[scraper_actual]) - # We import the scraper k - scraper_actual = k - try: - scraper = None - scraper = __import__('core.%s' % scraper_actual, fromlist=["core.%s" % scraper_actual]) - except ImportError: - exec("import core." + scraper_actual + " as scraper_module") - break - logger.error("Error importing the scraper module %s" % scraper_actual) def cuadro_completar(item): - logger.info() + logger.debug() global dict_default dict_default = {} @@ -234,7 +196,7 @@ def get_nfo(item): @rtype: str @return: """ - logger.info() + logger.debug() if "infoLabels" in item and "noscrap_id" in item.infoLabels: # Create the xml file with the data obtained from the item since there is no active scraper info_nfo = '<?xml version="1.0" encoding="UTF-8" standalone="yes" ?>' diff --git a/core/scrapertools.py b/core/scrapertools.py index 3df0dab8..aa926c8b 100644 --- a/core/scrapertools.py +++ b/core/scrapertools.py @@ -34,7 +34,7 @@ from platformcode import logger def printMatches(matches): i = 0 for match in matches: - logger.info("%d %s" % (i, match)) + logger.debug("%d %s" % (i, match)) i = i + 1 @@ -447,7 +447,7 @@ def get_season_and_episode(title): except: pass - logger.info("'" + title + "' -> '" + filename + "'") + logger.debug("'" + title + "' -> '" + filename + "'") return filename diff --git a/core/servertools.py b/core/servertools.py index 36e0ef74..bd5877c0 100644 --- a/core/servertools.py +++ b/core/servertools.py @@ -47,7 +47,7 @@ def find_video_items(item=None, data=None): @return: returns the itemlist with the results @rtype: list """ - logger.info() + logger.debug() itemlist = [] # Download the page @@ -97,7 +97,7 @@ def get_servers_itemlist(itemlist, fnc=None, sort=False): # Walk the patterns for pattern in server_parameters.get("find_videos", {}).get("patterns", []): - logger.info(pattern["pattern"]) + logger.debug(pattern["pattern"]) # Scroll through the results for match in re.compile(pattern["pattern"], re.DOTALL).finditer( "\n".join([item.url.split('|')[0] for item in itemlist if not item.server])): @@ -144,7 +144,7 @@ def findvideos(data, skip=False): return some link. It can also be an integer greater than 1, which would represent the maximum number of links to search. :return: """ - logger.info() + logger.debug() devuelve = [] skip = int(skip) servers_list = list(get_servers_list().keys()) @@ -181,7 +181,7 @@ def findvideosbyserver(data, serverid): value = translate_server_name(server_parameters["name"]) , url, serverid, server_parameters.get("thumbnail", "") if value not in devuelve and url not in server_parameters["find_videos"].get("ignore_urls", []): devuelve.append(value) - logger.info(msg) + logger.debug(msg) return devuelve @@ -193,7 +193,7 @@ def guess_server_thumbnail(serverid): def get_server_from_url(url): - logger.info() + logger.debug() servers_list = list(get_servers_list().keys()) # Run findvideos on each active server @@ -211,7 +211,7 @@ def get_server_from_url(url): for n, pattern in enumerate(server_parameters["find_videos"].get("patterns", [])): msg = "%s\npattern: %s" % (serverid, pattern["pattern"]) if not "pattern_compiled" in pattern: - # logger.info('compiled ' + serverid) + # logger.debug('compiled ' + serverid) pattern["pattern_compiled"] = re.compile(pattern["pattern"]) dict_servers_parameters[serverid]["find_videos"]["patterns"][n]["pattern_compiled"] = pattern["pattern_compiled"] # Scroll through the results @@ -224,7 +224,7 @@ def get_server_from_url(url): msg += "\nurl encontrada: %s" % url value = translate_server_name(server_parameters["name"]), url, serverid, server_parameters.get("thumbnail", "") if url not in server_parameters["find_videos"].get("ignore_urls", []): - logger.info(msg) + logger.debug(msg) return value return None @@ -353,7 +353,7 @@ def resolve_video_urls_for_playing(server, url, video_password="", muestra_dialo video_urls.extend(response) except: logger.error("Error getting url in free mode") - error_messages.append(config.get_localized_string(60006) % server_name) + error_messages.append(config.get_localized_string(60014)) import traceback logger.error(traceback.format_exc()) @@ -370,10 +370,10 @@ def resolve_video_urls_for_playing(server, url, video_password="", muestra_dialo elif response and response[0][0]: error_messages.append(response[0][0]) else: - error_messages.append(config.get_localized_string(60006) % server_name) + error_messages.append(config.get_localized_string(60014)) except: logger.error("Server errorr: %s" % opcion) - error_messages.append(config.get_localized_string(60006) % server_name) + error_messages.append(config.get_localized_string(60014)) import traceback logger.error(traceback.format_exc()) @@ -394,7 +394,7 @@ def resolve_video_urls_for_playing(server, url, video_password="", muestra_dialo # If we do not have urls or error messages, we put a generic one elif not video_urls and not error_messages: - error_messages.append(config.get_localized_string(60006) % get_server_parameters(server)["name"]) + error_messages.append(config.get_localized_string(60014)) return video_urls, len(video_urls) > 0, "<br/>".join(error_messages) @@ -478,6 +478,7 @@ def get_server_parameters(server): if server not in dict_servers_parameters: try: + path = '' # Servers if filetools.isfile(filetools.join(config.get_runtime_path(), "servers", server + ".json")): path = filetools.join(config.get_runtime_path(), "servers", server + ".json") @@ -489,6 +490,8 @@ def get_server_parameters(server): # When the server is not well defined in the channel (there is no connector), it shows an error because there is no "path" and the channel has to be checked dict_server = jsontools.load(filetools.read(path)) + dict_server["name"] = translate_server_name(dict_server["name"]) + # Images: url and local files are allowed inside "resources / images" if dict_server.get("thumbnail") and "://" not in dict_server["thumbnail"]: dict_server["thumbnail"] = filetools.join(config.get_runtime_path(), "resources", "media", @@ -614,7 +617,7 @@ def get_server_setting(name, server, default=None): dict_file['settings'] = dict_settings # We create the file ../settings/channel_data.json if not filetools.write(file_settings, jsontools.dump(dict_file)): - logger.info("ERROR saving file: %s" % file_settings) + logger.error("ERROR saving file: %s" % file_settings) # We return the value of the local parameter 'name' if it exists, if default is not returned return dict_settings.get(name, default) @@ -636,7 +639,7 @@ def set_server_setting(name, value, server): dict_file = jsontools.load(filetools.read(file_settings)) dict_settings = dict_file.get('settings', {}) except EnvironmentError: - logger.info("ERROR when reading the file: %s" % file_settings) + logger.error("ERROR when reading the file: %s" % file_settings) dict_settings[name] = value @@ -648,7 +651,7 @@ def set_server_setting(name, value, server): # We create the file ../settings/channel_data.json if not filetools.write(file_settings, jsontools.dump(dict_file)): - logger.info("ERROR saving file: %s" % file_settings) + logger.error("ERROR saving file: %s" % file_settings) return None return value @@ -750,7 +753,7 @@ def check_video_link(item, timeout=3): server_module = __import__('servers.%s' % server, None, None, ["servers.%s" % server]) except: server_module = None - logger.info("[check_video_link] Cannot import server! %s" % server) + logger.error("[check_video_link] Cannot import server! %s" % server) return item, NK if hasattr(server_module, 'test_video_exists'): @@ -760,20 +763,20 @@ def check_video_link(item, timeout=3): try: video_exists, message = server_module.test_video_exists(page_url=url) if not video_exists: - logger.info("[check_video_link] Does not exist! %s %s %s" % (message, server, url)) + logger.error("[check_video_link] Does not exist! %s %s %s" % (message, server, url)) resultado = KO else: - logger.info("[check_video_link] check ok %s %s" % (server, url)) + logger.debug("[check_video_link] check ok %s %s" % (server, url)) resultado = OK except: - logger.info("[check_video_link] Can't check now! %s %s" % (server, url)) + logger.error("[check_video_link] Can't check now! %s %s" % (server, url)) resultado = NK finally: httptools.HTTPTOOLS_DEFAULT_DOWNLOAD_TIMEOUT = ant_timeout # Restore download time return item, resultado - logger.info("[check_video_link] There is no test_video_exists for server: %s" % server) + logger.debug("[check_video_link] There is no test_video_exists for server: %s" % server) return item, NK def translate_server_name(name): diff --git a/core/support.py b/core/support.py index 43bcd74a..fa579b43 100755 --- a/core/support.py +++ b/core/support.py @@ -17,7 +17,7 @@ else: from urllib import urlencode from time import time -from core import httptools, scrapertools, servertools, tmdb, channeltools, autoplay +from core import httptools, scrapertools, servertools, tmdb, channeltools, autoplay, scraper from core.item import Item from lib import unshortenit from platformcode import config @@ -33,7 +33,7 @@ def hdpass_get_servers(item): for mir_url, srv in scrapertools.find_multiple_matches(mir, patron_option): mir_url = scrapertools.decodeHtmlentities(mir_url) - info(mir_url) + logger.debug(mir_url) it = item.clone(action="play", quality=quality, title=srv, server=srv, url= mir_url) if not servertools.get_server_parameters(srv.lower()): it = hdpass_get_url(it)[0] # do not exists or it's empty ret.append(it) @@ -143,12 +143,16 @@ def scrapeLang(scraped, lang, longtitle): if language: longtitle += typo(language, '_ [] color kod') return language, longtitle + def cleantitle(title): - if type(title) != str: title.decode('UTF-8') - title = scrapertools.decodeHtmlentities(title) - cleantitle = title.replace('"', "'").replace('×', 'x').replace('–', '-').strip() + cleantitle = '' + if title: + if type(title) != str: title.decode('UTF-8') + title = scrapertools.decodeHtmlentities(title) + cleantitle = title.replace('"', "'").replace('×', 'x').replace('–', '-').strip() return cleantitle + def unifyEp(ep): # ep = re.sub(r'\s-\s|-|–|×|×', 'x', scraped['episode']) ep = ep.replace('-', 'x') @@ -157,7 +161,8 @@ def unifyEp(ep): ep = ep.replace('×', 'x') return ep -def scrapeBlock(item, args, block, patron, headers, action, pagination, debug, typeContentDict, typeActionDict, blacklist, search, pag, function, lang, sceneTitle): + +def scrapeBlock(item, args, block, patron, headers, action, pagination, debug, typeContentDict, typeActionDict, blacklist, search, pag, function, lang, sceneTitle, group): itemlist = [] if debug: regexDbg(item, patron, headers, block) @@ -184,6 +189,8 @@ def scrapeBlock(item, args, block, patron, headers, action, pagination, debug, t # AVVERTENZE: Se il titolo è trovato nella ricerca TMDB/TVDB/Altro allora le locandine e altre info non saranno quelle recuperate nel sito.!!!! stagione = '' # per quei siti che hanno la stagione nel blocco ma non nelle puntate + contents = [] + for i, match in enumerate(matches): if pagination and (pag - 1) * pagination > i and not search: continue # pagination if pagination and i >= pag * pagination and not search: break # pagination @@ -207,45 +214,52 @@ def scrapeBlock(item, args, block, patron, headers, action, pagination, debug, t val = domain + val scraped[kk] = val.strip() if type(val) == str else val - if scraped['season']: - stagione = scraped['season'] - ep = unifyEp(scraped['episode']) - if 'x' in ep: - episode = ep.split('x')[0].strip() - second_episode = ep.split('x')[1].strip() + episode = '' + if not group or item.grouped: + if scraped['season'] and scraped['episode']: + stagione = scraped['season'] + ep = unifyEp(scraped['episode']) + if 'x' in ep: + episode = ep.split('x')[0].strip() + second_episode = ep.split('x')[1].strip() + else: + episode = ep + second_episode = '' + item.infoLabels['season'] = int(scraped['season']) + item.infoLabels['episode'] = int(episode) + episode = str(int(scraped['season'])) +'x'+ str(int(episode)).zfill(2) + ('x' + str(int(second_episode)).zfill(2) if second_episode else '') + elif item.season: + item.infoLabels['season'] = int(item.season) + item.infoLabels['episode'] = int(scrapertools.find_single_match(scraped['episode'], r'(\d+)')) + episode = item.season +'x'+ scraped['episode'] + elif item.contentType == 'tvshow' and (scraped['episode'] == '' and scraped['season'] == '' and stagione == ''): + item.news = 'season_completed' + episode = '' else: - episode = ep - second_episode = '' - item.infoLabels['season'] = int(scraped['season']) - item.infoLabels['episode'] = int(episode) - episode = str(int(scraped['season'])) +'x'+ str(int(episode)).zfill(2) + ('x' + str(int(second_episode)).zfill(2) if second_episode else '') - elif item.season: - item.infoLabels['season'] = int(item.season) - item.infoLabels['episode'] = int(scrapertools.find_single_match(scraped['episode'], r'(\d+)')) - episode = item.season +'x'+ scraped['episode'] - elif item.contentType == 'tvshow' and (scraped['episode'] == '' and scraped['season'] == '' and stagione == ''): - item.news = 'season_completed' - episode = '' - else: - episode = unifyEp(scraped['episode']) if scraped['episode'] else '' - try: - if 'x' in episode: - ep = episode.split('x') - episode = str(int(ep[0])).zfill(1) + 'x' + str(int(ep[1])).zfill(2) - item.infoLabels['season'] = int(ep[0]) - item.infoLabels['episode'] = int(ep[1]) - second_episode = scrapertools.find_single_match(episode, r'x\d+x(\d+)') - if second_episode: episode = re.sub(r'(\d+x\d+)x\d+',r'\1-', episode) + second_episode.zfill(2) - except: - logger.debug('invalid episode: ' + episode) - pass + episode = unifyEp(scraped['episode']) if scraped['episode'] else '' + try: + if 'x' in episode: + ep = episode.split('x') + episode = str(int(ep[0])).zfill(1) + 'x' + str(int(ep[1])).zfill(2) + item.infoLabels['season'] = int(ep[0]) + item.infoLabels['episode'] = int(ep[1]) + second_episode = scrapertools.find_single_match(episode, r'x\d+x(\d+)') + if second_episode: episode = re.sub(r'(\d+x\d+)x\d+',r'\1-', episode) + second_episode.zfill(2) + except: + logger.debug('invalid episode: ' + episode) + pass #episode = re.sub(r'\s-\s|-|x|–|×', 'x', scraped['episode']) if scraped['episode'] else '' - title = cleantitle(scraped['title']) if scraped['title'] else '' - title2 = cleantitle(scraped['title2']) if scraped['title2'] else '' - quality = scraped['quality'].strip() if scraped['quality'] else '' - Type = scraped['type'] if scraped['type'] else '' - plot = cleantitle(scraped["plot"]) if scraped["plot"] else '' + title = cleantitle(scraped.get('title', '')) + if group and scraped.get('title', '') in contents and not item.grouped: # same title and grouping enabled + continue + if item.grouped and scraped.get('title', '') != item.fulltitle: # inside a group different tvshow should not be included + continue + contents.append(title) + title2 = cleantitle(scraped.get('title2', '')) if not group or item.grouped else '' + quality = scraped.get('quality', '') + # Type = scraped['type'] if scraped['type'] else '' + plot = cleantitle(scraped.get("plot", '')) # if title is set, probably this is a list of episodes or video sources # necessaria l'aggiunta di == scraped["title"] altrimenti non prende i gruppi dopo le categorie @@ -339,7 +353,8 @@ def scrapeBlock(item, args, block, patron, headers, action, pagination, debug, t AC = name break else: AC = action - if (scraped["title"] not in blacklist) and (search.lower() in longtitle.lower()): + + if (not scraped['title'] or scraped["title"] not in blacklist) and (search.lower() in longtitle.lower()): contentType = 'episode' if function == 'episodios' else CT if CT else item.contentType it = Item( channel=item.channel, @@ -349,17 +364,20 @@ def scrapeBlock(item, args, block, patron, headers, action, pagination, debug, t fulltitle=item.fulltitle if function == 'episodios' else title, show=item.show if function == 'episodios' else title, quality=quality, - url=scraped["url"], + url=scraped["url"] if scraped["url"] else item.url, infoLabels=infolabels, - thumbnail=item.thumbnail if not scraped["thumb"] else scraped["thumb"], + thumbnail=item.prevthumb if item.prevthumb else item.thumbnail if not scraped["thumb"] else scraped["thumb"], args=item.args, contentSerieName= title if 'movie' not in [contentType] and function != 'episodios' else item.contentSerieName, contentTitle= title if 'movie' in [contentType] and function == 'peliculas' else item.contentTitle, contentLanguage = lang1, contentEpisodeNumber=episode if episode else '', news= item.news if item.news else '', - other = scraped['other'] if scraped['other'] else '' + other = scraped['other'] if scraped['other'] else '', + grouped=group ) + if scraped['episode'] and group and not item.grouped: # some adjustment for grouping feature + it.action = function # for lg in list(set(listGroups).difference(known_keys)): # it.__setattr__(lg, match[listGroups.index(lg)]) @@ -367,38 +385,24 @@ def scrapeBlock(item, args, block, patron, headers, action, pagination, debug, t it.__setattr__(lg, match[lg]) if 'itemHook' in args: - it = args['itemHook'](it) + try: + it = args['itemHook'](it) + except: + raise logger.ChannelScraperException itemlist.append(it) return itemlist, matches -def scrape(func): - # args is a dict containing the foolowing keys: - # patron: the patron to use for scraping page, all capturing group must match with listGroups - # listGroups: a list containing the scraping info obtained by your patron, in order - # accepted values are: url, title, thumb, quality, year, plot, duration, genre, rating, episode, lang +def html_uniform(data): + """ + replace all ' with " and eliminate newline, so we don't need to worry about + """ + return re.sub("='([^']+)'", '="\\1"', data.replace('\n', ' ').replace('\t', ' ').replace(' ', ' ')) - # headers: values to pass to request header - # blacklist: titles that you want to exclude(service articles for example) - # data: if you want to pass data manually, maybe because you need some custom replacement - # patronBlock: patron to get parts of the page (to scrape with patron attribute), - # if you need a "block inside another block" you can create a list, please note that all matches - # will be packed as string - # patronNext: patron for scraping next page link - # action: if you want results perform an action different from "findvideos", useful when scraping film by genres - # addVideolibrary: if "add to videolibrary" should appear - # example usage: - # import support - # itemlist = [] - # patron = 'blablabla' - # headers = [['Referer', host]] - # blacklist = 'Request a TV serie!' - # return support.scrape(item, itemlist, patron, ['thumb', 'quality', 'url', 'title', 'year', 'plot', 'episode', 'lang'], - # headers=headers, blacklist=blacklist) - # 'type' is a check for typologies of content e.g. Film or TV Series - # 'episode' is a key to grab episode numbers if it is separated from the title - # IMPORTANT 'type' is a special key, to work need typeContentDict={} and typeActionDict={} + +def scrape(func): + """https://github.com/kodiondemand/addon/wiki/decoratori#scrape""" def wrapper(*args): itemlist = [] @@ -406,33 +410,34 @@ def scrape(func): args = func(*args) function = func.__name__ if not 'actLike' in args else args['actLike'] # info('STACK= ',inspect.stack()[1][3]) - item = args['item'] - action = args['action'] if 'action' in args else 'findvideos' - anime = args['anime'] if 'anime' in args else '' - addVideolibrary = args['addVideolibrary'] if 'addVideolibrary' in args else True - search = args['search'] if 'search' in args else '' - blacklist = args['blacklist'] if 'blacklist' in args else [] - data = args['data'] if 'data' in args else '' - patron = args['patron'] if 'patron' in args else args['patronMenu'] if 'patronMenu' in args else '' + action = args.get('action', 'findvideos') + anime = args.get('anime', '') + addVideolibrary = args.get('addVideolibrary', True) + search = args.get('search', '') + blacklist = args.get('blacklist', []) + data = args.get('data', '') + patron = args.get('patron', args.get('patronMenu', '')) if 'headers' in args: headers = args['headers'] elif 'headers' in func.__globals__: headers = func.__globals__['headers'] else: headers = '' - patronNext = args['patronNext'] if 'patronNext' in args else '' - patronBlock = args['patronBlock'] if 'patronBlock' in args else '' - typeActionDict = args['typeActionDict'] if 'typeActionDict' in args else {} - typeContentDict = args['typeContentDict'] if 'typeContentDict' in args else {} - debug = args['debug'] if 'debug' in args else False - debugBlock = args['debugBlock'] if 'debugBlock' in args else False - disabletmdb = args['disabletmdb'] if 'disabletmdb' in args else False + patronNext = args.get('patronNext', '') + patronBlock = args.get('patronBlock', '') + typeActionDict = args.get('typeActionDict', {}) + typeContentDict = args.get('typeContentDict', {}) + debug = args.get('debug', False) + debugBlock = args.get('debugBlock', False) + disabletmdb = args.get('disabletmdb', False) if 'pagination' in args and inspect.stack()[1][3] not in ['add_tvshow', 'get_episodes', 'update', 'find_episodes']: pagination = args['pagination'] if args['pagination'] else 20 else: pagination = '' - lang = args['deflang'] if 'deflang' in args else '' + lang = args.get('deflang', '') sceneTitle = args.get('sceneTitle') + group = args.get('group', False) + downloadEnabled = args.get('downloadEnabled', True) pag = item.page if item.page else 1 # pagination matches = [] @@ -440,24 +445,19 @@ def scrape(func): logger.debug('PATRON= ', patron) if not data: page = httptools.downloadpage(item.url, headers=headers, ignore_response_code=True) - data = re.sub("='([^']+)'", '="\\1"', page.data) - data = data.replace('\n', ' ') - data = data.replace('\t', ' ') - data = data.replace(' ', ' ') - data = re.sub(r'>\s{2,}<', '> <', data) - # replace all ' with " and eliminate newline, so we don't need to worry about + data = page.data + data = html_uniform(data) scrapingTime = time() if patronBlock: if debugBlock: regexDbg(item, patronBlock, headers, data) blocks = scrapertools.find_multiple_matches_groups(data, patronBlock) - block = "" for bl in blocks: # info(len(blocks),bl) if 'season' in bl and bl['season']: item.season = bl['season'] blockItemlist, blockMatches = scrapeBlock(item, args, bl['block'], patron, headers, action, pagination, debug, - typeContentDict, typeActionDict, blacklist, search, pag, function, lang, sceneTitle) + typeContentDict, typeActionDict, blacklist, search, pag, function, lang, sceneTitle, group) for it in blockItemlist: if 'lang' in bl: it.contentLanguage, it.title = scrapeLang(bl, it.contentLanguage, it.title) @@ -468,36 +468,64 @@ def scrape(func): matches.extend(blockMatches) elif patron: itemlist, matches = scrapeBlock(item, args, data, patron, headers, action, pagination, debug, typeContentDict, - typeActionDict, blacklist, search, pag, function, lang, sceneTitle) + typeActionDict, blacklist, search, pag, function, lang, sceneTitle, group) if 'itemlistHook' in args: - itemlist = args['itemlistHook'](itemlist) - - if 'ItemItemlistHook' in args: - itemlist = args['ItemItemlistHook'](item, itemlist) + try: + itemlist = args['itemlistHook'](itemlist) + except: + raise logger.ChannelScraperException # if url may be changed and channel has findhost to update if 'findhost' in func.__globals__ and not itemlist: info('running findhost ' + func.__module__) ch = func.__module__.split('.')[-1] - host = config.get_channel_url(func.__globals__['findhost'], ch, True) + try: + host = config.get_channel_url(func.__globals__['findhost'], ch, True) - parse = list(urlparse.urlparse(item.url)) - parse[1] = scrapertools.get_domain_from_url(host) - item.url = urlparse.urlunparse(parse) + parse = list(urlparse.urlparse(item.url)) + parse[1] = scrapertools.get_domain_from_url(host) + item.url = urlparse.urlunparse(parse) + except: + raise logger.ChannelScraperException data = None itemlist = [] matches = [] else: break + if not data: + from platformcode.logger import WebErrorException + raise WebErrorException(urlparse.urlparse(item.url)[1], item.channel) + + if group and item.grouped or args.get('groupExplode'): + import copy + nextArgs = copy.copy(args) + @scrape + def newFunc(): + return nextArgs + nextArgs['item'] = nextPage(itemlist, item, data, patronNext, function) + nextArgs['group'] = False + if nextArgs['item']: + nextArgs['groupExplode'] = True + itemlist.pop() # remove next page just added + itemlist.extend(newFunc()) + else: + nextArgs['groupExplode'] = False + nextArgs['item'] = item + itemlist = newFunc() + itemlist = [i for i in itemlist if i.action not in ['add_pelicula_to_library', 'add_serie_to_library']] + if action != 'play' and function != 'episodios' and 'patronMenu' not in args and item.contentType in ['movie', 'tvshow', 'episode', 'undefined'] and not disabletmdb: tmdb.set_infoLabels_itemlist(itemlist, seekTmdb=True) - if (pagination and len(matches) <= pag * pagination) or not pagination: # next page with pagination - if patronNext and inspect.stack()[1][3] not in ['newest']: + if not group and not args.get('groupExplode') and ((pagination and len(matches) <= pag * pagination) or not pagination): # next page with pagination + if patronNext and inspect.stack()[1][3] not in ['newest'] and inspect.stack()[2][3] not in ['get_channel_results']: nextPage(itemlist, item, data, patronNext, function) + # if function == 'episodios': + # scraper.sort_episode_list(itemlist) + # next page for pagination if pagination and len(matches) > pag * pagination and not search: if inspect.stack()[1][3] not in ['newest','get_newest']: @@ -511,25 +539,31 @@ def scrape(func): url=item.url, args=item.args, page=pag + 1, - thumbnail=thumb())) + thumbnail=thumb(), + prevthumb=item.prevthumb if item.prevthumb else item.thumbnail)) - if anime: + if anime and inspect.stack()[1][3] not in ['find_episodes']: from platformcode import autorenumber - if function == 'episodios' or item.action == 'episodios': autorenumber.renumber(itemlist, item, 'bold') - else: autorenumber.renumber(itemlist) + if (function == 'episodios' or item.action == 'episodios'): autorenumber.start(itemlist, item) + else: autorenumber.start(itemlist) # if anime and autorenumber.check(item) == False and len(itemlist)>0 and not scrapertools.find_single_match(itemlist[0].title, r'(\d+.\d+)'): # pass # else: - if addVideolibrary and (item.infoLabels["title"] or item.fulltitle): - # item.fulltitle = item.infoLabels["title"] - videolibrary(itemlist, item, function=function) - if function == 'episodios' or function == 'findvideos': - download(itemlist, item, function=function) + if inspect.stack()[1][3] not in ['find_episodes']: + if addVideolibrary and (item.infoLabels["title"] or item.fulltitle): + # item.fulltitle = item.infoLabels["title"] + videolibrary(itemlist, item, function=function) + if downloadEnabled and function == 'episodios' or function == 'findvideos': + download(itemlist, item, function=function) + if 'patronMenu' in args and itemlist: itemlist = thumb(itemlist, genre=True) if 'fullItemlistHook' in args: - itemlist = args['fullItemlistHook'](itemlist) + try: + itemlist = args['fullItemlistHook'](itemlist) + except: + raise logger.ChannelScraperException # itemlist = filterLang(item, itemlist) # causa problemi a newest @@ -634,63 +668,6 @@ def dooplay_menu(item, type): return locals() -def swzz_get_url(item): - headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:59.0) Gecko/20100101 Firefox/59.0'} - # dbg() - if "/link/" in item.url: - data = httptools.downloadpage(item.url, headers=headers).data - if "link =" in data: - data = scrapertools.find_single_match(data, 'link = "([^"]+)"') - if 'http' not in data: - data = 'https:' + data - elif 'linkId = ' in data: - id = scrapertools.find_single_match(data, 'linkId = "([^"]+)"') - data = stayonline(id) - else: - match = scrapertools.find_single_match(data, r'<meta name="og:url" content="([^"]+)"') - match = scrapertools.find_single_match(data, r'URL=([^"]+)">') if not match else match - - if not match: - from lib import jsunpack - - try: - data = scrapertools.find_single_match(data.replace('\n', ''), r"(eval\s?\(function\(p,a,c,k,e,d.*?)</script>") - data = jsunpack.unpack(data) - - logger.debug("##### play /link/ unpack ##\n%s\n##" % data) - except: - logger.debug("##### The content is yet unpacked ##\n%s\n##" % data) - - data = scrapertools.find_single_match(data, r'var link(?:\s)?=(?:\s)?"([^"]+)";') - data, c = unshortenit.unwrap_30x_only(data) - else: - data = match - if data.startswith('/'): - data = urlparse.urljoin("http://swzz.xyz", data) - if not "vcrypt" in data: - data = httptools.downloadpage(data).data - logger.debug("##### play /link/ data ##\n%s\n##" % data) - - elif 'stayonline.pro' in item.url: - id = item.url.split('/')[-2] - data = stayonline(id) - else: - data = item.url - - return data.replace('\\','') - -def stayonline(id): - reqUrl = 'https://stayonline.pro/ajax/linkView.php' - p = urlencode({"id": id}) - data = httptools.downloadpage(reqUrl, post=p).data - try: - import json - data = json.loads(data)['data']['value'] - except: - data = scrapertools.find_single_match(data, r'"value"\s*:\s*"([^"]+)"') - return data - - def menuItem(itemlist, filename, title='', action='', url='', contentType='undefined', args=[], style=True): # Function to simplify menu creation @@ -708,11 +685,13 @@ def menuItem(itemlist, filename, title='', action='', url='', contentType='undef url = url, extra = extra, args = args, - contentType = contentType + contentType = contentType, )) def menu(func): + """https://github.com/kodiondemand/addon/wiki/decoratori#menu""" + def wrapper(*args): args = func(*args) @@ -732,7 +711,7 @@ def menu(func): itemlist = [] for name in listUrls: - dictUrl[name] = args[name] if name in args else None + dictUrl[name] = args.get(name, None) logger.debug(dictUrl[name]) if name == 'film': title = 'Film' if name == 'tvshow': title = 'Serie TV' @@ -782,7 +761,7 @@ def menu(func): if name not in listUrls and name != 'item': listUrls_extra.append(name) for name in listUrls_extra: - dictUrl[name] = args[name] if name in args else None + dictUrl[name] = args.get(name, None) for sub, var in dictUrl[name]: menuItem(itemlist, filename, title = sub + ' ', @@ -827,7 +806,7 @@ def typo(string, typography=''): typography = string.split('{')[1].strip(' }').lower() string = string.replace('{' + typography + '}','').strip() else: - string = string.strip() + string = string typography.lower() @@ -924,12 +903,7 @@ def match(item_url_string, **args): data = httptools.downloadpage(url, **args).data # format page data - data = re.sub("='([^']+)'", '="\\1"', data) - data = data.replace('\n', ' ') - data = data.replace('\t', ' ') - data = data.replace(' ', ' ') - data = re.sub(r'>\s+<', '><', data) - data = re.sub(r'([a-zA-Z])"([a-zA-Z])', "\1'\2", data) + data = html_uniform(data) # collect blocks of a page if patronBlock: @@ -1064,7 +1038,7 @@ def videolibrary(itemlist, item, typography='', function_level=1, function=''): # Simply add this function to add video library support # Function_level is useful if the function is called by another function. # If the call is direct, leave it blank - info() + logger.debug() if item.contentType == 'movie': action = 'add_pelicula_to_library' @@ -1094,7 +1068,7 @@ def videolibrary(itemlist, item, typography='', function_level=1, function=''): or (function == 'episodios' and contentType != 'movie'): if config.get_videolibrary_support() and len(itemlist) > 0: itemlist.append( - Item(channel=item.channel, + item.clone(channel=item.channel, title=title, fulltitle=item.fulltitle, show=item.fulltitle, @@ -1111,10 +1085,11 @@ def videolibrary(itemlist, item, typography='', function_level=1, function=''): return itemlist + def nextPage(itemlist, item, data='', patron='', function_or_level=1, next_page='', resub=[]): # Function_level is useful if the function is called by another function. # If the call is direct, leave it blank - info() + logger.debug() action = inspect.stack()[function_or_level][3] if type(function_or_level) == int else function_or_level if next_page == '': next_page = scrapertools.find_single_match(data, patron) @@ -1124,9 +1099,9 @@ def nextPage(itemlist, item, data='', patron='', function_or_level=1, next_page= if 'http' not in next_page: next_page = scrapertools.find_single_match(item.url, 'https?://[a-z0-9.-]+') + (next_page if next_page.startswith('/') else '/' + next_page) next_page = next_page.replace('&', '&') - info('NEXT= ', next_page) + logger.debug('NEXT= ', next_page) itemlist.append( - Item(channel=item.channel, + item.clone(channel=item.channel, action = action, contentType=item.contentType, title=typo(config.get_localized_string(30992), 'color kod bold'), @@ -1134,8 +1109,7 @@ def nextPage(itemlist, item, data='', patron='', function_or_level=1, next_page= args=item.args, nextPage=True, thumbnail=thumb())) - - return itemlist + return itemlist[-1] def pagination(itemlist, item, page, perpage, function_level=1): if len(itemlist) >= page * perpage: @@ -1152,9 +1126,7 @@ def pagination(itemlist, item, page, perpage, function_level=1): def server(item, data='', itemlist=[], headers='', AutoPlay=True, CheckLinks=True, Download=True, patronTag=None, Videolibrary=True): - info() - blacklisted_servers = config.get_setting("black_list", server='servers') - if not blacklisted_servers: blacklisted_servers = [] + logger.debug() if not data and not itemlist: data = httptools.downloadpage(item.url, headers=headers, ignore_response_code=True).data if data: @@ -1191,6 +1163,7 @@ def server(item, data='', itemlist=[], headers='', AutoPlay=True, CheckLinks=Tru videoitem.contentType = item.contentType videoitem.infoLabels = item.infoLabels videoitem.quality = quality + videoitem.referer = item.url videoitem.action = "play" # videoitem.nfo = item.nfo # videoitem.strm_path = item.strm_path @@ -1199,7 +1172,7 @@ def server(item, data='', itemlist=[], headers='', AutoPlay=True, CheckLinks=Tru with futures.ThreadPoolExecutor() as executor: thL = [executor.submit(getItem, videoitem) for videoitem in itemlist if videoitem.url] for it in futures.as_completed(thL): - if it.result() and it.result().server.lower() not in blacklisted_servers: + if it.result() and not config.get_setting("black_list", server=it.result().server.lower()): verifiedItemlist.append(it.result()) try: verifiedItemlist.sort(key=lambda it: int(re.sub(r'\D','',it.quality))) @@ -1322,6 +1295,7 @@ def addQualityTag(item, itemlist, data, patron): itemlist.insert(0,Item(channel=item.channel, action="", title=typo(qualityStr, '[] color kod bold'), + fulltitle=qualityStr, plot=descr, folder=False, thumbnail=thumb('info'))) @@ -1417,7 +1391,7 @@ def thumb(item_itemlist_string=None, genre=False, live=False): '_tvshow':['serie','tv', 'fiction']} def autoselect_thumb(item, genre): - info('SPLIT',re.split(r'\.|\{|\}|\[|\]|\(|\)|/| ',item.title.lower())) + # logger.debug('SPLIT',re.split(r'\.|\{|\}|\[|\]|\(|\)|/| ',item.title.lower())) if genre == False: for thumb, titles in icon_dict.items(): if any(word in re.split(r'\.|\{|\}|\[|\]|\(|\)|/| ',item.title.lower()) for word in search): diff --git a/core/tmdb.py b/core/tmdb.py index 8216e519..0b6c709e 100644 --- a/core/tmdb.py +++ b/core/tmdb.py @@ -19,7 +19,7 @@ import ast, copy, re, sqlite3, time, xbmcaddon from core import filetools, httptools, jsontools, scrapertools from core.item import InfoLabels -from platformcode import config, logger +from platformcode import config, logger, platformtools info_language = ["de", "en", "es", "fr", "it", "pt"] # from videolibrary.json def_lang = info_language[config.get_setting("info_language", "videolibrary")] @@ -87,7 +87,7 @@ create_bd() # The function name is the name of the decorator and receives the function that decorates. def cache_response(fn): - logger.info() + logger.debug() # import time # start_time = time.time() @@ -495,7 +495,7 @@ def set_infoLabels_item(item, seekTmdb=True, idioma_busqueda=def_lang, lock=None def find_and_set_infoLabels(item): - logger.info() + logger.debug() global otmdb_global tmdb_result = None @@ -524,9 +524,7 @@ def find_and_set_infoLabels(item): otmdb_global = Tmdb(id_Tmdb=item.infoLabels['tmdb_id'], tipo=tipo_busqueda, idioma_busqueda=def_lang) results = otmdb_global.get_list_resultados() - if len(results) > 1: - from platformcode import platformtools tmdb_result = platformtools.show_video_info(results, item=item, caption= tipo_contenido % title) elif len(results) > 0: tmdb_result = results[0] @@ -904,7 +902,7 @@ class Tmdb(object): cls.dic_generos[idioma][tipo] = {} url = ('http://api.themoviedb.org/3/genre/%s/list?api_key=a1ab8b8669da03637a4b98fa39c39228&language=%s' % (tipo, idioma)) try: - logger.info("[Tmdb.py] Filling in dictionary of genres") + logger.debug("[Tmdb.py] Filling in dictionary of genres") resultado = cls.get_json(url) if not isinstance(resultado, dict): @@ -936,7 +934,7 @@ class Tmdb(object): '&language=%s' % (self.busqueda_id, source, self.busqueda_idioma)) buscando = "%s: %s" % (source.capitalize(), self.busqueda_id) - logger.info("[Tmdb.py] Searching %s:\n%s" % (buscando, url)) + logger.debug("[Tmdb.py] Searching %s:\n%s" % (buscando, url)) resultado = self.get_json(url) if not isinstance(resultado, dict): resultado = ast.literal_eval(resultado.decode('utf-8')) @@ -983,7 +981,7 @@ class Tmdb(object): url += '&year=%s' % self.busqueda_year buscando = self.busqueda_texto.capitalize() - logger.info("[Tmdb.py] Searching %s on page %s:\n%s" % (buscando, page, url)) + logger.debug("[Tmdb.py] Searching %s on page %s:\n%s" % (buscando, page, url)) resultado = self.get_json(url) if not isinstance(resultado, dict): resultado = ast.literal_eval(resultado.decode('utf-8')) @@ -1044,7 +1042,7 @@ class Tmdb(object): url = ('http://api.themoviedb.org/3/%s?api_key=a1ab8b8669da03637a4b98fa39c39228&%s' % (type_search, "&".join(params))) - logger.info("[Tmdb.py] Searcing %s:\n%s" % (type_search, url)) + logger.debug("[Tmdb.py] Searcing %s:\n%s" % (type_search, url)) resultado = self.get_json(url, cache=False) if not isinstance(resultado, dict): resultado = ast.literal_eval(resultado.decode('utf-8')) @@ -1109,7 +1107,7 @@ class Tmdb(object): return True def get_list_resultados(self, num_result=20): - # logger.info("self %s" % str(self)) + # logger.debug("self %s" % str(self)) res = [] if num_result <= 0: @@ -1329,7 +1327,7 @@ class Tmdb(object): "&append_to_response=credits" % (self.result["id"], numtemporada, self.busqueda_idioma) buscando = "id_Tmdb: " + str(self.result["id"]) + " season: " + str(numtemporada) + "\nURL: " + url - logger.info("[Tmdb.py] Searcing " + buscando) + logger.debug("[Tmdb.py] Searcing " + buscando) try: self.temporada[numtemporada] = self.get_json(url) if not isinstance(self.temporada[numtemporada], dict): @@ -1518,7 +1516,7 @@ class Tmdb(object): items.extend(list(self.get_episodio(ret_infoLabels['season'], episodio).items())) - # logger.info("ret_infoLabels" % ret_infoLabels) + # logger.debug("ret_infoLabels" % ret_infoLabels) for k, v in items: if not v: diff --git a/core/trakt_tools.py b/core/trakt_tools.py index e5d23e51..639c8803 100644 --- a/core/trakt_tools.py +++ b/core/trakt_tools.py @@ -128,7 +128,7 @@ def token_trakt(item): def set_trakt_info(item): - logger.info() + logger.debug() import xbmcgui # Envia los datos a trakt try: @@ -139,7 +139,7 @@ def set_trakt_info(item): pass def get_trakt_watched(id_type, mediatype, update=False): - logger.info() + logger.debug() id_list = [] id_dict = dict() @@ -229,7 +229,7 @@ def trakt_check(itemlist): def get_sync_from_file(): - logger.info() + logger.debug() sync_path = os.path.join(config.get_data_path(), 'settings_channels', 'trakt_data.json') trakt_node = {} if os.path.exists(sync_path): @@ -241,7 +241,7 @@ def get_sync_from_file(): def update_trakt_data(mediatype, trakt_data): - logger.info() + logger.debug() sync_path = os.path.join(config.get_data_path(), 'settings_channels', 'trakt_data.json') if os.path.exists(sync_path): @@ -251,7 +251,7 @@ def update_trakt_data(mediatype, trakt_data): def ask_install_script(): - logger.info() + logger.debug() from platformcode import platformtools @@ -265,7 +265,7 @@ def ask_install_script(): def wait_for_update_trakt(): - logger.info() + logger.debug() t = Thread(update_all) t.setDaemon(True) t.start() @@ -274,7 +274,7 @@ def wait_for_update_trakt(): def update_all(): # from core.support import dbg;dbg() from time import sleep - logger.info() + logger.debug() sleep(20) while xbmc.Player().isPlaying(): sleep(20) diff --git a/core/tvdb.py b/core/tvdb.py index 674125a8..eb635d36 100644 --- a/core/tvdb.py +++ b/core/tvdb.py @@ -77,8 +77,9 @@ otvdb_global = None def find_and_set_infoLabels(item): - logger.info() - # logger.info("item es %s" % item) + logger.debug() + # from core.support import dbg;dbg() + # logger.debug("item es %s" % item) p_dialog = None if not item.contentSeason: @@ -89,16 +90,18 @@ def find_and_set_infoLabels(item): title = item.contentSerieName # If the title includes the (year) we will remove it - year = scrapertools.find_single_match(title, "^.+?\s*(\(\d{4}\))$") + year = scrapertools.find_single_match(title, r"^.+?\s*(\(\d{4}\))$") if year: title = title.replace(year, "").strip() item.infoLabels['year'] = year[1:-1] - if not item.infoLabels.get("tvdb_id"): - if not item.infoLabels.get("imdb_id"): + if item.infoLabels.get("tvdb_id", '') in ['', 'None']: + if item.infoLabels['year']: otvdb_global = Tvdb(search=title, year=item.infoLabels['year']) - else: + elif item.infoLabels.get("imdb_id"): otvdb_global = Tvdb(imdb_id=item.infoLabels.get("imdb_id")) + else: + otvdb_global = Tvdb(search=title) elif not otvdb_global or otvdb_global.get_id() != item.infoLabels['tvdb_id']: otvdb_global = Tvdb(tvdb_id=item.infoLabels['tvdb_id']) @@ -114,9 +117,16 @@ def find_and_set_infoLabels(item): if len(results) > 1: tvdb_result = platformtools.show_video_info(results, item=item, scraper=Tvdb, caption=config.get_localized_string(60298) % title) + # if not tvdb_result: + # res = platformtools.dialog_info(item, 'tvdb') + # if not res.exit: return find_and_set_infoLabels(res) elif len(results) > 0: tvdb_result = results[0] + # else: + # res = platformtools.dialog_info(item, 'tvdb') + # if not res.exit: return find_and_set_infoLabels(res) + # todo revisar if isinstance(item.infoLabels, InfoLabels): logger.debug("is an instance of infoLabels") @@ -160,7 +170,7 @@ def set_infoLabels_item(item): if 'infoLabels' in item and 'fanart' in item.infoLabels['fanart']: item.fanart = item.infoLabels['fanart'] - if 'infoLabels' in item and 'season' in item.infoLabels: + if 'infoLabels' in item and 'season' in item.infoLabels and item.contentType != 'tvshow': try: int_season = int(item.infoLabels['season']) except ValueError: @@ -372,7 +382,7 @@ class Tvdb(object): @classmethod def __check_token(cls): - # logger.info() + # logger.debug() if TOKEN == "": cls.__login() else: @@ -387,7 +397,7 @@ class Tvdb(object): @staticmethod def __login(): - # logger.info() + # logger.debug() global TOKEN apikey = "106B699FDC04301C" @@ -398,19 +408,13 @@ class Tvdb(object): else: params = jsontools.dump(params) try: - req = urllib.request.Request(url, data=params, headers=DEFAULT_HEADERS) - response = urllib.request.urlopen(req) - html = response.read() - response.close() + dict_html = requests.post(url, data=params, headers=DEFAULT_HEADERS).json() except Exception as ex: message = "An exception of type %s occured. Arguments:\n%s" % (type(ex).__name__, repr(ex.args)) logger.error("error: %s" % message) else: - dict_html = jsontools.load(html) - # logger.debug("dict_html %s" % dict_html) - if "token" in dict_html: token = dict_html["token"] DEFAULT_HEADERS["Authorization"] = "Bearer " + token @@ -419,22 +423,19 @@ class Tvdb(object): @classmethod def __refresh_token(cls): - # logger.info() + # logger.debug() global TOKEN is_success = False url = HOST + "/refresh_token" - try: - req = urllib.request.Request(url, headers=DEFAULT_HEADERS) - response = urllib.request.urlopen(req) - html = response.read() - response.close() + req = requests.get(url, headers=DEFAULT_HEADERS) - except urllib.error.HTTPError as err: - logger.error("err.code %s" % err.code) + + except req as err: + logger.error("err.code %s" % err.status_code) # if there is error 401 it is that the token has passed the time and we have to call login again - if err.code == 401: + if err.status_code == 401: cls.__login() else: raise @@ -444,13 +445,15 @@ class Tvdb(object): logger.error("error: %s" % message) else: - dict_html = jsontools.load(html) + dict_html = req.json() # logger.error("tokencito %s" % dict_html) if "token" in dict_html: token = dict_html["token"] DEFAULT_HEADERS["Authorization"] = "Bearer " + token TOKEN = config.set_setting("tvdb_token", token) is_success = True + else: + cls.__login() return is_success @@ -518,7 +521,7 @@ class Tvdb(object): ] } """ - logger.info() + logger.debug() if id_episode and self.episodes.get(id_episode): return self.episodes.get(id_episode) @@ -531,18 +534,16 @@ class Tvdb(object): DEFAULT_HEADERS["Accept-Language"] = lang logger.debug("url: %s, \nheaders: %s" % (url, DEFAULT_HEADERS)) - req = urllib.request.Request(url, headers=DEFAULT_HEADERS) - response = urllib.request.urlopen(req) - html = response.read() - response.close() + req = requests.get(url, headers=DEFAULT_HEADERS) except Exception as ex: message = "An exception of type %s occured. Arguments:\n%s" % (type(ex).__name__, repr(ex.args)) logger.error("error: %s" % message) else: - dict_html = jsontools.load(html) - + dict_html = req.json() + if 'Error' in dict_html: + logger.debug("code %s " % dict_html['Error']) if "data" in dict_html and "id" in dict_html["data"][0]: self.get_episode_by_id(dict_html["data"][0]["id"], lang) return dict_html["data"] @@ -588,27 +589,14 @@ class Tvdb(object): } } """ - logger.info() + logger.debug() - try: - url = HOST + "/series/%s/episodes?page=%s" % (_id, page) - logger.debug("url: %s, \nheaders: %s" % (url, DEFAULT_HEADERS)) - req = urllib.request.Request(url, headers=DEFAULT_HEADERS) - response = urllib.request.urlopen(req) - html = response.read() - response.close() - - except Exception as ex: - message = "An exception of type %s occured. Arguments:\n%s" % (type(ex).__name__, repr(ex.args)) - logger.error("error: %s" % message) - - else: - self.list_episodes[page] = jsontools.load(html) - - # logger.info("dict_html %s" % self.list_episodes) - - return self.list_episodes[page] + url = HOST + "/series/%s/episodes?page=%s" % (_id, page) + logger.debug("url: %s, \nheaders: %s" % (url, DEFAULT_HEADERS)) + js = requests.get(url, headers=DEFAULT_HEADERS).json() + self.list_episodes[page] = js if 'Error' not in js else {} + return self.list_episodes[page] def get_episode_by_id(self, _id, lang=DEFAULT_LANG, semaforo=None): """ @@ -674,31 +662,27 @@ class Tvdb(object): """ if semaforo: semaforo.acquire() - logger.info() + logger.debug() url = HOST + "/episodes/%s" % _id + # from core.support import dbg;dbg() + try: DEFAULT_HEADERS["Accept-Language"] = lang logger.debug("url: %s, \nheaders: %s" % (url, DEFAULT_HEADERS)) - req = urllib.request.Request(url, headers=DEFAULT_HEADERS) - response = urllib.request.urlopen(req) - html = response.read() - response.close() + req = requests.get(url, headers=DEFAULT_HEADERS) except Exception as ex: # if isinstance(ex, urllib).HTTPError: - logger.debug("code %s " % ex.code) - + logger.debug("code %s " % ex) message = "An exception of type %s occured. Arguments:\n%s" % (type(ex).__name__, repr(ex.args)) logger.error("error en: %s" % message) else: - dict_html = jsontools.load(html) - dict_html = dict_html.pop("data") - - logger.info("dict_html %s" % dict_html) - self.episodes[_id] = dict_html + dict_html = req.json() + # logger.debug("dict_html %s" % dict_html) + self.episodes[_id] = dict_html.pop("data") if 'Error' not in dict_html else {} if semaforo: semaforo.release() @@ -728,39 +712,30 @@ class Tvdb(object): "status": "string" } """ - logger.info() + logger.debug() - try: + params = {} + if name: + params["name"] = name + elif imdb_id: + params["imdbId"] = imdb_id + elif zap2it_id: + params["zap2itId"] = zap2it_id - params = {} - if name: - params["name"] = name - elif imdb_id: - params["imdbId"] = imdb_id - elif zap2it_id: - params["zap2itId"] = zap2it_id + params = urllib.parse.urlencode(params) - params = urllib.parse.urlencode(params) + DEFAULT_HEADERS["Accept-Language"] = lang + url = HOST + "/search/series?%s" % params + logger.debug("url: %s, \nheaders: %s" % (url, DEFAULT_HEADERS)) - DEFAULT_HEADERS["Accept-Language"] = lang - url = HOST + "/search/series?%s" % params - logger.debug("url: %s, \nheaders: %s" % (url, DEFAULT_HEADERS)) + dict_html = requests.get(url, headers=DEFAULT_HEADERS).json() - req = urllib.request.Request(url, headers=DEFAULT_HEADERS) - response = urllib.request.urlopen(req) - html = response.read() - logger.info(html) - response.close() - except Exception as ex: + if 'Error' in dict_html: # if isinstance(ex, urllib.parse).HTTPError: - logger.debug("code %s " % ex.code) - - message = "An exception of type %s occured. Arguments:\n%s" % (type(ex).__name__, repr(ex.args)) - logger.error("error: %s" % message) + logger.debug("code %s " % dict_html['Error']) else: - dict_html = jsontools.load(html) if "errors" in dict_html and "invalidLanguage" in dict_html["errors"]: # no hay información en idioma por defecto @@ -827,20 +802,16 @@ class Tvdb(object): } } """ - logger.info() + logger.debug() resultado = {} url = HOST + "/series/%s" % _id try: DEFAULT_HEADERS["Accept-Language"] = lang - req = urllib.request.Request(url, headers=DEFAULT_HEADERS) + req = requests.get(url, headers=DEFAULT_HEADERS) logger.debug("url: %s, \nheaders: %s" % (url, DEFAULT_HEADERS)) - response = urllib.request.urlopen(req) - html = response.read() - response.close() - except Exception as ex: # if isinstance(ex, urllib).HTTPError: logger.debug("code %s " % ex) @@ -849,26 +820,24 @@ class Tvdb(object): logger.error("error: %s" % message) else: - dict_html = jsontools.load(html) - - if "errors" in dict_html and "invalidLanguage" in dict_html["errors"]: + dict_html = req.json() + if "Error" in dict_html and "invalidLanguage" in dict_html["Error"]: return {} - else: - resultado1 = dict_html["data"] - if not resultado1 and from_get_list: - return self.__get_by_id(_id, "en") + resultado1 = dict_html["data"] + if not resultado1 and from_get_list: + return self.__get_by_id(_id, "en") - logger.debug("Result %s" % dict_html) - resultado2 = {"image_poster": [{'keyType': 'poster', 'fileName': 'posters/%s-1.jpg' % _id}]} - resultado3 = {"image_fanart": [{'keyType': 'fanart', 'fileName': 'fanart/original/%s-1.jpg' % _id}]} + logger.debug("Result %s" % dict_html) + resultado2 = {"image_poster": [{'keyType': 'poster', 'fileName': 'posters/%s-1.jpg' % _id}]} + resultado3 = {"image_fanart": [{'keyType': 'fanart', 'fileName': 'fanart/original/%s-1.jpg' % _id}]} - resultado = resultado1.copy() - resultado.update(resultado2) - resultado.update(resultado3) + resultado = resultado1.copy() + resultado.update(resultado2) + resultado.update(resultado3) - logger.debug("total result %s" % resultado) - self.list_results = [resultado] - self.result = resultado + logger.debug("total result %s" % resultado) + self.list_results = [resultado] + self.result = resultado return resultado @@ -886,7 +855,7 @@ class Tvdb(object): @rtype: dict """ - logger.info() + logger.debug() if self.result.get('image_season_%s' % season): return self.result['image_season_%s' % season] @@ -909,24 +878,26 @@ class Tvdb(object): url = HOST + "/series/%s/images/query?%s" % (_id, params) logger.debug("url: %s, \nheaders: %s" % (url, DEFAULT_HEADERS)) - req = urllib.request.Request(url, headers=DEFAULT_HEADERS) - response = urllib.request.urlopen(req) - html = response.read() - response.close() + res = requests.get(url, headers=DEFAULT_HEADERS) except Exception as ex: + # if isinstance(ex, urllib).HTTPError: + logger.debug("code %s " % ex) + message = "An exception of type %s occured. Arguments:\n%s" % (type(ex).__name__, repr(ex.args)) logger.error("error: %s" % message) - return {} else: - dict_html = jsontools.load(html) + dict_html = res.json() + if 'Error' in dict_html: + # if isinstance(ex, urllib.parse).HTTPError: + logger.debug("code %s " % dict_html['Error']) + else: + dict_html["image_" + image] = dict_html.pop("data") + self.result.update(dict_html) - dict_html["image_" + image] = dict_html.pop("data") - self.result.update(dict_html) - - return dict_html + return dict_html def get_tvshow_cast(self, _id, lang=DEFAULT_LANG): """ @@ -938,20 +909,23 @@ class Tvdb(object): @return: dictionary with actors @rtype: dict """ - logger.info() + logger.debug() url = HOST + "/series/%s/actors" % _id DEFAULT_HEADERS["Accept-Language"] = lang logger.debug("url: %s, \nheaders: %s" % (url, DEFAULT_HEADERS)) - - req = urllib.request.Request(url, headers=DEFAULT_HEADERS) - response = urllib.request.urlopen(req) - html = response.read() - response.close() - - dict_html = jsontools.load(html) - - dict_html["cast"] = dict_html.pop("data") + try: + req = requests.get(url, headers=DEFAULT_HEADERS) + except Exception as ex: + logger.debug("code %s " % ex) + message = "An exception of type %s occured. Arguments:\n%s" % (type(ex).__name__, repr(ex.args)) + logger.error("error en: %s" % message) + else: + dict_html = req.json() + if 'Error' in dict_html: + logger.debug("code %s " % dict_html['Error']) + else: + dict_html["cast"] = dict_html.pop("data") self.result.update(dict_html) def get_id(self): @@ -968,7 +942,7 @@ class Tvdb(object): @rtype: list @return: list of results """ - logger.info() + logger.debug() list_results = [] # if we have a result and it has seriesName, we already have the info of the series, it is not necessary to search again @@ -1034,12 +1008,12 @@ class Tvdb(object): if 'data' in thumbs: ret_infoLabels['thumbnail'] = HOST_IMAGE + thumbs['data'][0]['fileName'] elif 'poster' in origen and origen['poster']: - ret_infoLabels['thumbnail'] = origen['poster'] + ret_infoLabels['thumbnail'] = HOST_IMAGE + origen['poster'] fanarts = requests.get(HOST + '/series/' + str(origen['id']) + '/images/query?keyType=fanart').json() if 'data' in fanarts: ret_infoLabels['fanart'] = HOST_IMAGE + fanarts['data'][0]['fileName'] elif 'fanart' in origen and origen['fanart']: - ret_infoLabels['thumbnail'] = origen['fanart'] + ret_infoLabels['fanart'] = HOST_IMAGE + origen['fanart'] if 'overview' in origen and origen['overview']: ret_infoLabels['plot'] = origen['overview'] if 'duration' in origen and origen['duration']: diff --git a/core/videolibrarytools.py b/core/videolibrarytools.py index 8535cd74..4603c198 100644 --- a/core/videolibrarytools.py +++ b/core/videolibrarytools.py @@ -78,7 +78,7 @@ def save_movie(item, silent=False): @rtype fallidos: int @return: the number of failed items or -1 if all failed """ - logger.info() + logger.debug() # logger.debug(item.tostring('\n')) insertados = 0 sobreescritos = 0 @@ -144,7 +144,7 @@ def save_movie(item, silent=False): if not path: # Create folder path = filetools.join(MOVIES_PATH, ("%s [%s]" % (base_name, _id)).strip()) - logger.info("Creating movie directory:" + path) + logger.debug("Creating movie directory:" + path) if not filetools.mkdir(path): logger.debug("Could not create directory") return 0, 0, -1, path @@ -159,7 +159,7 @@ def save_movie(item, silent=False): if not nfo_exists: # We create .nfo if it doesn't exist - logger.info("Creating .nfo: " + nfo_path) + logger.debug("Creating .nfo: " + nfo_path) head_nfo = scraper.get_nfo(item) item_nfo = Item(title=item.contentTitle, channel="videolibrary", action='findvideos', @@ -182,7 +182,7 @@ def save_movie(item, silent=False): if item_nfo and strm_exists: if json_exists: - logger.info("The file exists. Is overwritten") + logger.debug("The file exists. Is overwritten") sobreescritos += 1 else: insertados += 1 @@ -209,7 +209,7 @@ def save_movie(item, silent=False): item_nfo.library_urls[item.channel] = item.url if filetools.write(nfo_path, head_nfo + item_nfo.tojson()): - #logger.info("FOLDER_MOVIES : %s" % FOLDER_MOVIES) + #logger.debug("FOLDER_MOVIES : %s" % FOLDER_MOVIES) # We update the Kodi video library with the movie if config.is_xbmc() and config.get_setting("videolibrary_kodi") and not silent: from platformcode import xbmc_videolibrary @@ -238,7 +238,7 @@ def update_renumber_options(item, head_nfo, path): json = json_file['TVSHOW_AUTORENUMBER'] if item.fulltitle in json: item.channel_prefs[channel]['TVSHOW_AUTORENUMBER'] = json[item.fulltitle] - logger.info('UPDATED=\n' + str(item.channel_prefs)) + logger.debug('UPDATED=\n' + str(item.channel_prefs)) filetools.write(tvshow_path, head_nfo + item.tojson()) def add_renumber_options(item, head_nfo, path): @@ -426,7 +426,7 @@ def save_tvshow(item, episodelist, silent=False): @rtype path: str @return: serial directory """ - logger.info() + logger.debug() # logger.debug(item.tostring('\n')) path = "" @@ -486,7 +486,7 @@ def save_tvshow(item, episodelist, silent=False): if not path: path = filetools.join(TVSHOWS_PATH, ("%s [%s]" % (base_name, _id)).strip()) - logger.info("Creating series directory: " + path) + logger.debug("Creating series directory: " + path) try: filetools.mkdir(path) except OSError as exception: @@ -496,7 +496,7 @@ def save_tvshow(item, episodelist, silent=False): tvshow_path = filetools.join(path, "tvshow.nfo") if not filetools.exists(tvshow_path): # We create tvshow.nfo, if it does not exist, with the head_nfo, series info and watched episode marks - logger.info("Creating tvshow.nfo: " + tvshow_path) + logger.debug("Creating tvshow.nfo: " + tvshow_path) head_nfo = scraper.get_nfo(item) item.infoLabels['mediatype'] = "tvshow" item.infoLabels['title'] = item.contentSerieName @@ -570,11 +570,11 @@ def save_episodes(path, episodelist, serie, silent=False, overwrite=True): @rtype fallidos: int @return: the number of failed episodes """ - logger.info() + logger.debug() episodelist = filter_list(episodelist, serie.action, path) # No episode list, nothing to save if not len(episodelist): - logger.info("There is no episode list, we go out without creating strm") + logger.debug("There is no episode list, we go out without creating strm") return 0, 0, 0 # process local episodes @@ -589,7 +589,7 @@ def save_episodes(path, episodelist, serie, silent=False, overwrite=True): elif config.get_setting("local_episodes", "videolibrary"): done, local_episodes_path = config_local_episodes_path(path, serie) if done < 0: - logger.info("An issue has occurred while configuring local episodes, going out without creating strm") + logger.debug("An issue has occurred while configuring local episodes, going out without creating strm") return 0, 0, done item_nfo.local_episodes_path = local_episodes_path filetools.write(nfo_path, head_nfo + item_nfo.tojson()) @@ -601,7 +601,7 @@ def save_episodes(path, episodelist, serie, silent=False, overwrite=True): local_episodelist += get_local_content(local_episodes_path) clean_list = [] for f in filetools.listdir(path): - match = scrapertools.find_single_match(f, r'[Ss]?(\d+)(?:x|_|\.|\s+)?[Ee]?[Pp]?(\d+)') + match = scrapertools.find_single_match(f, r'[Ss]?(\d+)(?:x|_|\s+)?[Ee]?[Pp]?(\d+)') if match: ep = '%dx%02d' % (int(match[0]), int(match[1])) if ep in local_episodelist: @@ -713,7 +713,7 @@ def save_episodes(path, episodelist, serie, silent=False, overwrite=True): # No episode list, nothing to save if not len(new_episodelist): - logger.info("There is no episode list, we go out without creating strm") + logger.debug("There is no episode list, we go out without creating strm") return 0, 0, 0 local_episodelist += get_local_content(path) @@ -745,12 +745,12 @@ def save_episodes(path, episodelist, serie, silent=False, overwrite=True): json_path = filetools.join(path, ("%s [%s].json" % (season_episode, e.channel)).lower()) if season_episode in local_episodelist: - logger.info('Skipped: Serie ' + serie.contentSerieName + ' ' + season_episode + ' available as local content') + logger.debug('Skipped: Serie ' + serie.contentSerieName + ' ' + season_episode + ' available as local content') continue # check if the episode has been downloaded if filetools.join(path, "%s [downloads].json" % season_episode) in ficheros: - logger.info('INFO: "%s" episode %s has been downloaded, skipping it' % (serie.contentSerieName, season_episode)) + logger.debug('INFO: "%s" episode %s has been downloaded, skipping it' % (serie.contentSerieName, season_episode)) continue strm_exists = strm_path in ficheros @@ -806,7 +806,7 @@ def save_episodes(path, episodelist, serie, silent=False, overwrite=True): if filetools.write(json_path, e.tojson()): if not json_exists: - logger.info("Inserted: %s" % json_path) + logger.debug("Inserted: %s" % json_path) insertados += 1 # We mark episode as unseen news_in_playcounts[season_episode] = 0 @@ -817,14 +817,14 @@ def save_episodes(path, episodelist, serie, silent=False, overwrite=True): news_in_playcounts[serie.contentSerieName] = 0 else: - logger.info("Overwritten: %s" % json_path) + logger.debug("Overwritten: %s" % json_path) sobreescritos += 1 else: - logger.info("Failed: %s" % json_path) + logger.debug("Failed: %s" % json_path) fallidos += 1 else: - logger.info("Failed: %s" % json_path) + logger.debug("Failed: %s" % json_path) fallidos += 1 if not silent and p_dialog.iscanceled(): @@ -894,7 +894,7 @@ def save_episodes(path, episodelist, serie, silent=False, overwrite=True): def config_local_episodes_path(path, item, silent=False): - logger.info(item) + logger.debug(item) from platformcode.xbmc_videolibrary import search_local_path local_episodes_path=search_local_path(item) if not local_episodes_path: @@ -906,11 +906,11 @@ def config_local_episodes_path(path, item, silent=False): platformtools.dialog_ok(config.get_localized_string(30131), config.get_localized_string(80043)) local_episodes_path = platformtools.dialog_browse(0, config.get_localized_string(80046)) if local_episodes_path == '': - logger.info("User has canceled the dialog") + logger.debug("User has canceled the dialog") return -2, local_episodes_path elif path in local_episodes_path: platformtools.dialog_ok(config.get_localized_string(30131), config.get_localized_string(80045)) - logger.info("Selected folder is the same of the TV show one") + logger.debug("Selected folder is the same of the TV show one") return -2, local_episodes_path if local_episodes_path: @@ -925,7 +925,7 @@ def config_local_episodes_path(path, item, silent=False): def process_local_episodes(local_episodes_path, path): - logger.info() + logger.debug() sub_extensions = ['.srt', '.sub', '.sbv', '.ass', '.idx', '.ssa', '.smi'] artwork_extensions = ['.jpg', '.jpeg', '.png'] @@ -964,7 +964,7 @@ def process_local_episodes(local_episodes_path, path): def get_local_content(path): - logger.info() + logger.debug() local_episodelist = [] for root, folders, files in filetools.walk(path): @@ -993,7 +993,7 @@ def add_movie(item): @type item: item @param item: item to be saved. """ - logger.info() + logger.debug() from platformcode.launcher import set_search_temp; set_search_temp(item) # To disambiguate titles, TMDB is caused to ask for the really desired title @@ -1006,17 +1006,17 @@ def add_movie(item): item = generictools.update_title(item) # We call the method that updates the title with tmdb.find_and_set_infoLabels #if item.tmdb_stat: # del item.tmdb_stat # We clean the status so that it is not recorded in the Video Library - if item: - new_item = item.clone(action="findvideos") - insertados, sobreescritos, fallidos, path = save_movie(new_item) + # if item: + new_item = item.clone(action="findvideos") + insertados, sobreescritos, fallidos, path = save_movie(new_item) - if fallidos == 0: - platformtools.dialog_ok(config.get_localized_string(30131), - config.get_localized_string(30135) % new_item.contentTitle) # 'has been added to the video library' - else: - filetools.rmdirtree(path) - platformtools.dialog_ok(config.get_localized_string(30131), - config.get_localized_string(60066) % new_item.contentTitle) # "ERROR, the movie has NOT been added to the video library") + if fallidos == 0: + platformtools.dialog_ok(config.get_localized_string(30131), + config.get_localized_string(30135) % new_item.contentTitle) # 'has been added to the video library' + else: + filetools.rmdirtree(path) + platformtools.dialog_ok(config.get_localized_string(30131), + config.get_localized_string(60066) % new_item.contentTitle) # "ERROR, the movie has NOT been added to the video library") def add_tvshow(item, channel=None): @@ -1040,7 +1040,7 @@ def add_tvshow(item, channel=None): @param channel: channel from which the series will be saved. By default, item.from_channel or item.channel will be imported. """ - logger.info("show=#" + item.show + "#") + logger.debug("show=#" + item.show + "#") from platformcode.launcher import set_search_temp; set_search_temp(item) if item.channel == "downloads": @@ -1073,21 +1073,26 @@ def add_tvshow(item, channel=None): # If the second screen is canceled, the variable "scraper_return" will be False. The user does not want to continue item = generictools.update_title(item) # We call the method that updates the title with tmdb.find_and_set_infoLabels + if not item: return #if item.tmdb_stat: # del item.tmdb_stat # We clean the status so that it is not recorded in the Video Library # Get the episode list # from core.support import dbg;dbg() itemlist = getattr(channel, item.action)(item) - if itemlist and not scrapertools.find_single_match(itemlist[0].title, r'(\d+x\d+)'): - from platformcode.autorenumber import select_type, renumber, check + if itemlist and not scrapertools.find_single_match(itemlist[0].title, r'[Ss]?(\d+)(?:x|_|\s+)[Ee]?[Pp]?(\d+)'): + from platformcode.autorenumber import start, check if not check(item): action = item.action - select_type(item) + item.renumber = True + start(item) + item.renumber = False item.action = action - return add_tvshow(item, channel) + if not item.exit: + return add_tvshow(item, channel) + itemlist = getattr(channel, item.action)(item) else: - itemlist = renumber(itemlist) + itemlist = getattr(channel, item.action)(item) global magnet_caching magnet_caching = False @@ -1112,7 +1117,7 @@ def add_tvshow(item, channel=None): else: platformtools.dialog_ok(config.get_localized_string(30131), config.get_localized_string(60070) % item.show) - logger.info("%s episodes of series %s have been added to the video library" % (insertados, item.show)) + logger.debug("%s episodes of series %s have been added to the video library" % (insertados, item.show)) if config.is_xbmc(): if config.get_setting("sync_trakt_new_tvshow", "videolibrary"): import xbmc @@ -1128,7 +1133,7 @@ def add_tvshow(item, channel=None): def emergency_urls(item, channel=None, path=None, headers={}): - logger.info() + logger.debug() import re from servers import torrent try: diff --git a/core/ziptools.py b/core/ziptools.py index 138c4c1b..f7f73db6 100644 --- a/core/ziptools.py +++ b/core/ziptools.py @@ -17,8 +17,8 @@ from core import filetools class ziptools(object): def extract(self, file, dir, folder_to_extract="", overwrite_question=False, backup=False): - logger.info("file= %s" % file) - logger.info("dir= %s" % dir) + logger.debug("file= %s" % file) + logger.debug("dir= %s" % dir) if not dir.endswith(':') and not filetools.exists(dir): filetools.mkdir(dir) @@ -30,13 +30,13 @@ class ziptools(object): for nameo in zf.namelist(): name = nameo.replace(':', '_').replace('<', '_').replace('>', '_').replace('|', '_').replace('"', '_').replace('?', '_').replace('*', '_') - logger.info("name=%s" % nameo) + logger.debug("name=%s" % nameo) if not name.endswith('/'): - logger.info("it's not a directory") + logger.debug("it's not a directory") try: (path, filename) = filetools.split(filetools.join(dir, name)) - logger.info("path=%s" % path) - logger.info("name=%s" % name) + logger.debug("path=%s" % path) + logger.debug("name=%s" % name) if folder_to_extract: if path != filetools.join(dir, folder_to_extract): break @@ -49,7 +49,7 @@ class ziptools(object): else: outfilename = filetools.join(dir, name) - logger.info("outfilename=%s" % outfilename) + logger.debug("outfilename=%s" % outfilename) try: if filetools.exists(outfilename) and overwrite_question: from platformcode import platformtools @@ -74,7 +74,7 @@ class ziptools(object): try: zf.close() except: - logger.info("Error closing .zip " + file) + logger.error("Error closing .zip " + file) def _createstructure(self, file, dir): self._makedirs(self._listdirs(file), dir) diff --git a/default.py b/default.py index 006074e4..a8ccc0bb 100644 --- a/default.py +++ b/default.py @@ -8,10 +8,12 @@ import sys import xbmc -# on kodi 18 its xbmc.translatePath, on 19 xbmcvfs.translatePath +# functions that on kodi 19 moved to xbmcvfs try: import xbmcvfs xbmc.translatePath = xbmcvfs.translatePath + xbmc.validatePath = xbmcvfs.validatePath + xbmc.makeLegalFilename = xbmcvfs.makeLegalFilename except: pass from platformcode import config, logger diff --git a/lib/arm_chromeos.py b/lib/arm_chromeos.py index d4808402..b33d1dfd 100644 --- a/lib/arm_chromeos.py +++ b/lib/arm_chromeos.py @@ -27,7 +27,7 @@ class ChromeOSImage: """ def __init__(self, imgpath): - logger.info('Image Path: ' + imgpath) + logger.debug('Image Path: ' + imgpath) """Prepares the image""" self.imgpath = imgpath self.bstream = self.get_bstream(imgpath) @@ -59,7 +59,7 @@ class ChromeOSImage: self.seek_stream(entries_start * lba_size) if not calcsize(part_format) == entry_size: - logger.info('Partition table entries are not 128 bytes long') + logger.debug('Partition table entries are not 128 bytes long') return 0 for index in range(1, entries_num + 1): # pylint: disable=unused-variable @@ -71,7 +71,7 @@ class ChromeOSImage: break if not offset: - logger.info('Failed to calculate losetup offset.') + logger.debug('Failed to calculate losetup offset.') return 0 return offset @@ -93,7 +93,7 @@ class ChromeOSImage: while True: chunk2 = self.read_stream(chunksize) if not chunk2: - logger.info('File %s not found in the ChromeOS image' % filename) + logger.debug('File %s not found in the ChromeOS image' % filename) return False chunk = chunk1 + chunk2 diff --git a/lib/doh.py b/lib/doh.py index 5375caa5..14ce62a6 100644 --- a/lib/doh.py +++ b/lib/doh.py @@ -47,7 +47,7 @@ def query(name, type='A', server=DOH_SERVER, path="/dns-query", fallback=True): else: retval = [] except Exception as ex: - logger.info("Exception occurred: '%s'" % ex) + logger.error("Exception occurred: '%s'" % ex) if retval is None and fallback: if type == 'A': diff --git a/lib/generictools.py b/lib/generictools.py index ffe77898..44789305 100644 --- a/lib/generictools.py +++ b/lib/generictools.py @@ -25,7 +25,7 @@ intervenido_sucuri = 'Access Denied - Sucuri Website Firewall' def update_title(item): - logger.info() + logger.debug() from core import scraper,support @@ -41,7 +41,7 @@ def update_title(item): The channel must add a method to be able to receive the call from Kodi / Alfa, and be able to call this method: def actualizar_titulos(item): - logger.info() + logger.debug() itemlist = [] from lib import generictools from platformcode import launcher @@ -129,7 +129,8 @@ def update_title(item): scraper_return = scraper.find_and_set_infoLabels(item) if not scraper_return: # If the user has canceled, we restore the data to the initial situation and leave - item = new_item.clone() + return + # item = new_item.clone() else: # If the user has changed the data in "Complete Information" you must see the final title in TMDB if not item.infoLabels['tmdb_id']: @@ -205,7 +206,7 @@ def update_title(item): def refresh_screen(item): - logger.info() + logger.debug() """ #### Kodi 18 compatibility #### @@ -239,7 +240,7 @@ def refresh_screen(item): def post_tmdb_listado(item, itemlist): - logger.info() + logger.debug() itemlist_fo = [] """ @@ -484,7 +485,7 @@ def post_tmdb_listado(item, itemlist): def post_tmdb_seasons(item, itemlist): - logger.info() + logger.debug() """ @@ -644,7 +645,7 @@ def post_tmdb_seasons(item, itemlist): def post_tmdb_episodios(item, itemlist): - logger.info() + logger.debug() itemlist_fo = [] """ @@ -995,7 +996,7 @@ def post_tmdb_episodios(item, itemlist): def post_tmdb_findvideos(item, itemlist): - logger.info() + logger.debug() """ @@ -1215,7 +1216,7 @@ def post_tmdb_findvideos(item, itemlist): def get_field_from_kodi_DB(item, from_fields='*', files='file'): - logger.info() + logger.debug() """ Call to read from the Kodi DB the input fields received (from_fields, by default "*") of the video indicated in Item @@ -1293,7 +1294,7 @@ def get_field_from_kodi_DB(item, from_fields='*', files='file'): def fail_over_newpct1(item, patron, patron2=None, timeout=None): - logger.info() + logger.debug() import ast """ @@ -1494,7 +1495,7 @@ def fail_over_newpct1(item, patron, patron2=None, timeout=None): def web_intervenida(item, data, desactivar=True): - logger.info() + logger.debug() """ @@ -1577,7 +1578,7 @@ def web_intervenida(item, data, desactivar=True): def regenerate_clones(): - logger.info() + logger.debug() import json from core import videolibrarytools @@ -1591,7 +1592,7 @@ def regenerate_clones(): # Find the paths where to leave the control .json file, and the Video Library json_path = filetools.exists(filetools.join(config.get_runtime_path(), 'verify_cached_torrents.json')) if json_path: - logger.info('Previously repaired video library: WE ARE GOING') + logger.debug('Previously repaired video library: WE ARE GOING') return False json_path = filetools.join(config.get_runtime_path(), 'verify_cached_torrents.json') filetools.write(json_path, json.dumps({"CINE_verify": True})) # Prevents another simultaneous process from being launched @@ -1631,7 +1632,7 @@ def regenerate_clones(): # Delete the Tvshow.nfo files and check if the .nfo has more than one channel and one is clone Newpct1 for file in files: - # logger.info('file - nfos: ' + file) + # logger.debug('file - nfos: ' + file) if 'tvshow.nfo' in file: file_path = filetools.join(root, 'tvshow.nfo') filetools.remove(file_path) @@ -1697,7 +1698,7 @@ def regenerate_clones(): for file in files: file_path = filetools.join(root, file) if '.json' in file: - logger.info('** file: ' + file) + logger.debug('** file: ' + file) canal_json = scrapertools.find_single_match(file, r'\[(\w+)\].json') if canal_json not in nfo.library_urls: filetools.remove(file_path) # we delete the .json is a zombie @@ -1740,7 +1741,7 @@ def regenerate_clones(): def dejuice(data): - logger.info() + logger.debug() # Method to unobtrusive JuicyCodes data import base64 diff --git a/lib/guessit/rules/properties/website.py b/lib/guessit/rules/properties/website.py index b01e86c8..cbf677a3 100644 --- a/lib/guessit/rules/properties/website.py +++ b/lib/guessit/rules/properties/website.py @@ -29,7 +29,7 @@ def website(config): rebulk = rebulk.regex_defaults(flags=re.IGNORECASE).string_defaults(ignore_case=True) rebulk.defaults(name="website") - with open(os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'tlds-alpha-by-domain.txt')) as tld_file: + with open(os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'tlds-alpha-by-domain.txt'), 'rb') as tld_file: tlds = [ tld.strip().decode('utf-8') for tld in tld_file.readlines() diff --git a/lib/unshortenit.py b/lib/unshortenit.py index 96cf2eb6..833d5715 100644 --- a/lib/unshortenit.py +++ b/lib/unshortenit.py @@ -492,7 +492,6 @@ class UnshortenIt(object): except Exception as e: return uri, str(e) - def _unshorten_vcrypt(self, uri): uri = uri.replace('.net', '.pw') try: @@ -508,15 +507,15 @@ class UnshortenIt(object): from Crypto.Cipher import AES str = str.replace("_ppl_", "+").replace("_eqq_", "=").replace("_sll_", "/") - iv = "\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0" - key = "naphajU2usWUswec" + iv = b"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0" + key = b"naphajU2usWUswec" decoded = b64decode(str) - decoded = decoded + '\0' * (len(decoded) % 16) + decoded = decoded + b'\0' * (len(decoded) % 16) crypt_object = AES.new(key, AES.MODE_CBC, iv) - decrypted = '' + decrypted = b'' for p in range(0, len(decoded), 16): - decrypted += crypt_object.decrypt(decoded[p:p + 16]).replace('\0', '') - return decrypted + decrypted += crypt_object.decrypt(decoded[p:p + 16]).replace(b'\0', b'') + return decrypted.decode('ascii') if 'shield' in uri.split('/')[-2]: uri = decrypt(uri.split('/')[-1]) else: @@ -537,7 +536,7 @@ class UnshortenIt(object): r = httptools.downloadpage(uri, timeout=self._timeout, headers=headers, follow_redirects=False) if 'Wait 1 hour' in r.data: uri = '' - logger.info('IP bannato da vcrypt, aspetta un ora') + logger.error('IP bannato da vcrypt, aspetta un ora') else: prev_uri = uri uri = r.headers['location'] @@ -549,7 +548,11 @@ class UnshortenIt(object): if 'out_generator' in uri: uri = re.findall('url=(.*)$', uri)[0] elif '/decode/' in uri: - uri = httptools.downloadpage(uri, follow_redirects=True).url + scheme, netloc, path, query, fragment = urlsplit(uri) + splitted = path.split('/') + splitted[1] = 'outlink' + uri = httptools.downloadpage(scheme + '://' + netloc + "/".join(splitted) + query + fragment, follow_redirects=False, + post={'url': splitted[2]}).headers['location'] # uri = decrypt(uri.split('/')[-1]) return uri, r.code if r else 200 @@ -557,7 +560,6 @@ class UnshortenIt(object): logger.error(e) return uri, 0 - def _unshorten_linkup(self, uri): try: r = None diff --git a/lib/vvvvid_decoder.py b/lib/vvvvid_decoder.py index dd49128a..88703f04 100644 --- a/lib/vvvvid_decoder.py +++ b/lib/vvvvid_decoder.py @@ -1,5 +1,6 @@ import sys -import xbmc +if sys.version_info[0] > 2: from urllib.parse import unquote +else: from urllib2 import unquote def dec_ei(h): g = 'MNOPIJKL89+/4567UVWXQRSTEFGHABCDcdefYZabstuvopqr0123wxyzklmnghij' @@ -7,23 +8,14 @@ def dec_ei(h): for e in range(0,len(h)): c.append(g.find(h[e])) for e in range(len(c)*2-1,-1,-1): - #print 'e=' + str(e) a = c[e % len(c)] ^ c[(e+1)%len(c)] - #print 'a='+str(a) c[e%len(c)] = a - #print 'c['+str(e % len(c))+']='+ str(c[e % len(c)]) c = f(c) d = '' for e in range(0,len(c)): d += '%'+ (('0'+ (str(format(c[e],'x'))))[-2:]) - # if python 3 - if sys.version_info[0] > 2: - import urllib - return urllib.parse.unquote(d) - else: - import urllib2 - return urllib2.unquote(d) + return unquote(d) def f(m): l = list() diff --git a/platformcode/autorenumber.py b/platformcode/autorenumber.py index 119aa531..66883346 100644 --- a/platformcode/autorenumber.py +++ b/platformcode/autorenumber.py @@ -1,559 +1,49 @@ # -*- coding: utf-8 -*- # -------------------------------------------------------------------------------- -# autorenumber - Rinomina Automaticamente gli Episodi +# autorenumber - Rinumera Automaticamente gli Episodi # -------------------------------------------------------------------------------- -''' -USO: -1) utilizzare autorenumber.renumber(itemlist) nelle le funzioni peliculas e similari per aggiungere il menu contestuale -2) utilizzare autorenumber.renumber(itemlist, item, typography) nella funzione episodios -3) Aggiungere le seguinti stringhe nel json del canale (per attivare la configurazione di autonumerazione del canale) -{ - "id": "autorenumber", - "type": "bool", - "label": "@70712", - "default": false, - "enabled": true, - "visible": true -}, -{ - "id": "autorenumber_mode", - "type": "bool", - "label": "@70688", - "default": false, - "enabled": true, - "visible": "eq(-1,true)" -} -''' - -try: - import xbmcgui -except: - xbmcgui = None -import re, base64, json, inspect +import xbmc, xbmcgui, re, base64, inspect, sys from core import jsontools, tvdb, scrapertools, filetools -from core.support import typo +from core.item import Item +from core.support import typo, match, dbg, Item from platformcode import config, platformtools, logger - -TAG_TVSHOW_RENUMERATE = "TVSHOW_AUTORENUMBER" -TAG_ID = "ID" -TAG_SEASON = "Season" -TAG_EPISODE = "Episode" -TAG_SPECIAL = "Special" -TAG_MODE = "Mode" -TAG_EPLIST = "EpList" -TAG_CHECK = "ReCheck" -TAG_SPLIST = "SpList" -TAG_TYPE = "Type" - - -def renumber(itemlist, item='', typography=''): - logger.info() - dict_series = load(itemlist[0]) if len(itemlist) > 0 else {} - - if item: - item.channel = item.from_channel if item.from_channel else item.channel - title = item.fulltitle.rstrip() - try: - already_renumbered = scrapertools.find_single_match(itemlist[0].title, r'(\d+\D\d+)') - except: - return - if already_renumbered : - return itemlist - elif item.channel in item.channel_prefs and TAG_TVSHOW_RENUMERATE in item.channel_prefs[item.channel] and title not in dict_series: - from core.videolibrarytools import check_renumber_options - from specials.videolibrary import update_videolibrary - check_renumber_options(item) - update_videolibrary(item) - - elif inspect.stack()[2][3] == 'find_episodes': - return itemlist - - elif title in dict_series and TAG_ID in dict_series[title]: - ID = dict_series[title][TAG_ID] - Episode = dict_series[title][TAG_EPISODE] - Season = dict_series[title][TAG_SEASON] if TAG_SEASON in dict_series[title] else '' - Mode = dict_series[title][TAG_MODE] if TAG_MODE in dict_series[title] else False - Type = dict_series[title][TAG_TYPE] if TAG_TYPE in dict_series[title] else 'auto' - - renumeration(itemlist, item, typography, dict_series, ID, Season, Episode, Mode, title, Type) - - else: - if config.get_setting('autorenumber', item.channel): - config_item(item, itemlist, typography, True) - else: - return itemlist - - else: - for item in itemlist: - title = item.fulltitle.rstrip() - if title in dict_series and TAG_ID in dict_series[title]: - ID = dict_series[title][TAG_ID] - exist = True - else: - exist = False - - if item.contentType != 'movie': - if item.context: - context2 = item.context - item.show = item.fulltitle = title - item.context = context(exist) + context2 - else: - item.show = item.fulltitle = title - item.context = context(exist) - - -def config_item(item, itemlist=[], typography='', active=False): - logger.info() - # Configurazione Automatica, Tenta la numerazione Automatica degli episodi - title = item.fulltitle.rstrip() - - dict_series = load(item) - ID = dict_series[title][TAG_ID] if title in dict_series and TAG_ID in dict_series[title] else '' - - # Pulizia del Titolo - if any( word in title.lower() for word in ['specials', 'speciali']): - title = re.sub(r'\sspecials|\sspeciali', '', title.lower()) - tvdb.find_and_set_infoLabels(item) - elif not item.infoLabels['tvdb_id']: - item.contentSerieName= title.rstrip('123456789 ') - tvdb.find_and_set_infoLabels(item) - - if not ID and active: - if item.infoLabels['tvdb_id']: - ID = item.infoLabels['tvdb_id'] - dict_renumerate = {TAG_ID: ID} - dict_series[title] = dict_renumerate - # Trova La Stagione - if any(word in title.lower() for word in ['specials', 'speciali']): - dict_renumerate[TAG_SEASON] = '0' - elif RepresentsInt(title.split()[-1]): - dict_renumerate[TAG_SEASON] = title.split()[-1] - else: dict_renumerate[TAG_SEASON] = '1' - dict_renumerate[TAG_EPISODE] = '' - write(item, dict_series) - return renumber(itemlist, item, typography) - else: - return itemlist - - else: - return renumber(itemlist, item, typography) - - -def semiautomatic_config_item(item): - logger.info() - # Configurazione Semi Automatica, utile in caso la numerazione automatica fallisca - tvdb.find_and_set_infoLabels(item) - item.channel = item.from_channel if item.from_channel else item.channel - dict_series = load(item) - title = item.fulltitle.rstrip() - - # Trova l'ID della serie - while not item.infoLabels['tvdb_id']: - try: - item.show = platformtools.dialog_input(default=item.show, heading=config.get_localized_string(30112)) # <- Enter title to search - tvdb.find_and_set_infoLabels(item) - except: - heading = config.get_localized_string(70704) # <- TMDB ID (0 to cancel) - info = platformtools.dialog_numeric(0, heading) - item.infoLabels['tvdb_id'] = '0' if info == '' else info - - - if item.infoLabels['tvdb_id']: - ID = item.infoLabels['tvdb_id'] - dict_renumerate = {TAG_ID: ID} - dict_series[title] = dict_renumerate - - # Trova la Stagione - if any( word in title.lower() for word in ['specials', 'speciali'] ): - heading = config.get_localized_string(70686) # <- Enter the number of the starting season (for specials) - season = platformtools.dialog_numeric(0, heading, '0') - dict_renumerate[TAG_SEASON] = season - elif RepresentsInt(title.split()[-1]): - heading = config.get_localized_string(70686) # <- Enter the number of the starting season (for season > 1) - season = platformtools.dialog_numeric(0, heading, title.split()[-1]) - dict_renumerate[TAG_SEASON] = season - else: - heading = config.get_localized_string(70686) # <- Enter the number of the starting season (for season 1) - season = platformtools.dialog_numeric(0, heading, '1') - dict_renumerate[TAG_SEASON] = season - - mode = platformtools.dialog_yesno(config.get_localized_string(70687), config.get_localized_string(70688), nolabel=config.get_localized_string(30023), yeslabel=config.get_localized_string(30022)) - if mode == True: - dict_renumerate[TAG_MODE] = False - - if TAG_SPECIAL in dict_series[title]: - specials = dict_renumerate[TAG_SPECIAL] - else: - specials = [] - - write(item, dict_series) - _list = [] - - itemlist = find_episodes(item) - for item in itemlist: - Title = re.sub(r'\d+x\d+ - ', '', item.title) - if item.action == 'findvideos': - _list.append(Title) - - selected = platformtools.dialog_multiselect(config.get_localized_string(70734), _list) - # if len(selected) > 0: - for select in selected: - specials.append(int(scrapertools.find_single_match(_list[select], r'(\d+)'))) - dict_renumerate[TAG_SPECIAL] = specials - - dict_renumerate[TAG_MODE] = False - - dict_renumerate[TAG_TYPE] = 'auto' - dict_renumerate[TAG_EPISODE] = '' - write(item, dict_series) - # xbmc.executebuiltin("Container.Refresh") - - else: - message = config.get_localized_string(60444) - heading = item.fulltitle.strip() - platformtools.dialog_notification(heading, message) - - -def renumeration (itemlist, item, typography, dict_series, ID, Season, Episode, Mode, Title, Type): - - # Se ID è 0 salta la rinumerazione - if ID == '0': - return itemlist - - # Numerazione per gli Speciali - elif Season == '0': - EpisodeDict = {} - for item in itemlist: - if config.get_localized_string(30992) not in item.title: - number = scrapertools.find_single_match(item.title, r'\d+') - item.title = typo('0x' + number + ' - ', typography) + item.title - - - # Usa la lista degli Episodi se esiste nel Json - - elif Episode: - EpisodeDict = json.loads(base64.b64decode(Episode)) - - # Controlla che la lista egli Episodi sia della stessa lunghezza di Itemlist - if EpisodeDict == 'none': - return error(itemlist) - if Type == 'manual' and len(EpisodeDict) < len(itemlist): - EpisodeDict = manual_renumeration(item, True) - if len(EpisodeDict) >= len(itemlist) and scrapertools.find_single_match(itemlist[0].title, r'\d+') in EpisodeDict: - for item in itemlist: - if config.get_localized_string(30992) not in item.title: - number = scrapertools.find_single_match(item.title, r'\d+') - number = int(number) # if number !='0': number.lstrip('0') - item.title = typo(EpisodeDict[str(number)] + ' - ', typography) + item.title - else: - make_list(itemlist, item, typography, dict_series, ID, Season, Episode, Mode, Title) - - else: - make_list(itemlist, item, typography, dict_series, ID, Season, Episode, Mode, Title) - - -def manual_renumeration(item, modify=False): - logger.info() - _list = [] - if item.from_channel: item.channel = item.from_channel - title = item.fulltitle.rstrip() - - dict_series = load(item) - - if title not in dict_series: dict_series[title] = {} - - if TAG_EPISODE in dict_series[title] and dict_series[title][TAG_EPISODE]: - EpisodeDict = json.loads(base64.b64decode(dict_series[title][TAG_EPISODE])) - del dict_series[title][TAG_EPISODE] - else: EpisodeDict = {} - - if TAG_EPLIST in dict_series[title]: del dict_series[title][TAG_EPLIST] - if TAG_MODE in dict_series[title]: del dict_series[title][TAG_MODE] - if TAG_CHECK in dict_series[title]: del dict_series[title][TAG_CHECK] - if TAG_SEASON in dict_series[title]: del dict_series[title][TAG_SEASON] - if TAG_SPECIAL in dict_series[title]: del dict_series[title][TAG_SPECIAL] - dict_series[title][TAG_TYPE] = 'manual' - write(item, dict_series) - - if TAG_ID not in dict_series[title] or (TAG_ID in dict_series[title] and not dict_series[title][TAG_ID]): - tvdb.find_and_set_infoLabels(item) - - # Trova l'ID della serie - while not item.infoLabels['tvdb_id']: - try: - item.show = platformtools.dialog_input(default=item.show, heading=config.get_localized_string(30112)) # <- Enter title to search - tvdb.find_and_set_infoLabels(item) - except: - heading = config.get_localized_string(70704) # <- TMDB ID (0 to cancel) - info = platformtools.dialog_numeric(0, heading) - item.infoLabels['tvdb_id'] = '0' if info == '' else info - - if item.infoLabels['tvdb_id']: - ID = item.infoLabels['tvdb_id'] - dict_renumerate = {TAG_ID: ID} - dict_series[title] = dict_renumerate - - itemlist = find_episodes(item) - for it in itemlist: - Title = re.sub(r'\d+x\d+ - ', '', it.title) - if modify == True: - ep = int(scrapertools.find_single_match(Title, r'(\d+)')) - if it.action == 'findvideos' and str(ep) not in EpisodeDict: - _list.append(Title) - else: - if it.action == 'findvideos': - _list.append(Title) - - count = 1 - preselect = platformtools.dialog_select(config.get_localized_string(70732),[typo(config.get_localized_string(70518),'bold'),typo(config.get_localized_string(70519),'bold')]) - selection = [] - if preselect == 0: - for i in _list: - selection.append(_list.index(i)) - while len(_list) > 0: - selected = platformtools.dialog_multiselect(config.get_localized_string(70734), _list, preselect=selection) - if selected == None: break - season = '' - while not season: - season = platformtools.dialog_numeric(0, config.get_localized_string(70733)) - count = int(platformtools.dialog_numeric(0, config.get_localized_string(70733).replace('Season','Episode').replace('della Stagione',"dell'Episodio"))) - - for select in selected: - ep = int(scrapertools.find_single_match(_list[select], r'(\d+)')) - if season == '0': - episode = '' - while not episode: - episode = platformtools.dialog_numeric(0, config.get_localized_string(70735) % _list[select] ) - EpisodeDict[str(ep)] = '%sx%s' %(season, episode.zfill(2)) - else: - EpisodeDict[str(ep)] = '%sx%s' %(season, str(count).zfill(2)) - count += 1 - - for select in reversed(selected): - del _list[select] - - - dict_series[title][TAG_TYPE] = 'manual' - EpisodeDict = base64.b64encode(json.dumps(EpisodeDict).encode()) - dict_series[title][TAG_EPISODE] = EpisodeDict.decode() - write(item, dict_series) - # xbmc.executebuiltin("Container.Refresh") - if modify == True: - return json.loads(base64.b64decode(EpisodeDict)) - - -def delete_renumeration(item): - logger.info() - if item.from_channel: item.channel = item.from_channel - title = item.fulltitle.rstrip() - - dict_series = load(item) - if title in dict_series: del dict_series[title] - write(item, dict_series) - - -def make_list(itemlist, item, typography, dict_series, ID, Season, Episode, Mode, title): - logger.info() - exist = True - item.infoLabels['tvdb_id'] = ID - tvdb.set_infoLabels_item(item) - FirstOfSeason= 0 - - EpisodeDict = json.loads(base64.b64decode(Episode)) if Episode else {} - Special = dict_series[title][TAG_SPECIAL] if TAG_SPECIAL in dict_series[title] else [] - EpList = json.loads(base64.b64decode(dict_series[title][TAG_EPLIST])) if TAG_EPLIST in dict_series[title] else [] - Pages = dict_series[title][TAG_CHECK] if TAG_CHECK in dict_series[title] else [1] - - # Ricava Informazioni da TVDB - checkpages = [] - check = True - Page = Pages[-1] - - while exist: - if check: - for page in Pages: - data = tvdb.otvdb_global.get_list_episodes(ID,page) - logger.info('DATA',data) - for episodes in data['data']: - if episodes['firstAired'] and [episodes['firstAired'], episodes['airedSeason'], episodes['airedEpisodeNumber']] not in EpList: - EpList.append([episodes['firstAired'], episodes['airedSeason'], episodes['airedEpisodeNumber']]) - else: - if page not in checkpages: - checkpages.append(page) - check = False - - data = tvdb.otvdb_global.get_list_episodes(ID,Page) - if data: - Page = Page + 1 - for episodes in data['data']: - if episodes['firstAired'] and [episodes['firstAired'], episodes['airedSeason'], episodes['airedEpisodeNumber']] not in EpList: - EpList.append([episodes['firstAired'], episodes['airedSeason'], episodes['airedEpisodeNumber']]) - else: - if page not in checkpages: - checkpages.append(Page -1) - exist = False - - EpList.sort() - - dict_series[title][TAG_CHECK] = checkpages - EpList = base64.b64encode(json.dumps(EpList).encode()) - dict_series[title][TAG_EPLIST] = EpList.decode() - write(item, dict_series) - - # Crea Dizionari per la numerazione - if EpList: - EpList = json.loads(base64.b64decode(dict_series[title][TAG_EPLIST])) - specials = [] - regular = {} - complete = {} - allep = 1 - ep = 1 - specialep = 0 - for episode in EpList: - complete[allep] = [str(episode[1]) + 'x' + str(episode[2]), episode[0]] - if episode[1] == 0: - specials.append(allep) - specialep = specialep + 1 - else: - regular[ep] = [str(episode[1]) + 'x' + str(episode[2]), str(episode[0]), allep - 1] - ep = ep + 1 - allep = allep + 1 - - # seleziona l'Episodio di partenza - if int(Season) > 1: - for numbers, data in regular.items(): - if data[0] == Season + 'x1': - FirstOfSeason = numbers - 1 - - if Mode == True: Special = specials - - addiction = 0 - for item in itemlist: - # Otiene Numerazione Episodi - scraped_ep = scrapertools.find_single_match(re.sub(r'\[[^\]]+\]','',item.title), r'\d+') - if scraped_ep: - episode = int(scraped_ep) - number = episode + FirstOfSeason - addiction - count = number + addiction - # Crea Dizionario Episodi - - if episode == 0: - EpisodeDict[str(episode)] = str(complete[regular[FirstOfSeason+1][2]][0]) - elif addiction < len(Special): - if episode in Special: - try: - season = complete[regular[count][2]][0] - EpisodeDict[str(episode)] = str(complete[regular[count][2]][0]) if season.startswith( '0' ) else '0x' + platformtools.dialog_numeric(0, item.title + '?', '') - - except: - EpisodeDict[str(episode)] = '0x' + platformtools.dialog_numeric(0, item.title + '?', '') - addiction = addiction + 1 - elif number <= len(regular): - EpisodeDict[str(episode)] = str(regular[number][0]) - else: - try: EpisodeDict[str(episode)] = str(complete[regular[number+2][2]][0]) - except: EpisodeDict[str(episode)] = '0x0' - elif number <= len(regular) and number in regular: - EpisodeDict[str(episode)] = str(regular[number][0]) - else: - try: EpisodeDict[str(episode)] = str(complete[regular[number+2][2]][0]) - except: EpisodeDict[str(episode)] = '0x0' - - # Aggiunge numerazione agli Episodi - - item.title = typo(EpisodeDict[str(episode)] + ' - ', typography) + item.title - - # Scrive Dizionario Episodi sul json - EpisodeDict = base64.b64encode(json.dumps(EpisodeDict).encode()) - dict_series[title][TAG_EPISODE] = EpisodeDict.decode() - write(item, dict_series) - - else: - heading = config.get_localized_string(70704) - ID = platformtools.dialog_numeric(0, heading) - dict_series[title][TAG_ID] = ID - write(item, dict_series) - if ID == '0': - return itemlist - else: - return make_list(itemlist, item, typography, dict_series, ID, Season, Episode, Mode, title) - - +PY3 = True if sys.version_info[0] >= 3 else False + +# Json Var +TVSHOW_RENUMERATE = "TVSHOW_AUTORENUMBER" +ID = "ID" +SEASON = "Season" +EPISODE = "Episode" +SPECIAL = "Special" +MODE = "Mode" +EPLIST = "EpList" +CHECK = "ReCheck" +SPLIST = "SpList" +TYPE = "Type" + +# helper Functions def check(item): - logger.info() + logger.debug() dict_series = load(item) title = item.fulltitle.rstrip() if title in dict_series: title = dict_series[title] - return True if TAG_ID in title and TAG_EPISODE in title else False - - -def error(itemlist): - message = config.get_localized_string(70713) - heading = itemlist[0].fulltitle.strip() - platformtools.dialog_notification(heading, message) - return itemlist - - -def find_episodes(item): - logger.info() - ch = __import__('channels.' + item.channel, fromlist=["channels.%s" % item.channel]) - itemlist = ch.episodios(item) - return itemlist - - -def RepresentsInt(s): - # Controllo Numro Stagione - logger.info() - try: - int(s) - return True - except ValueError: - return False - - -def access(): - allow = False - - if config.is_xbmc(): - allow = True - - return allow - - -def context(exist): - if access(): - modify = config.get_localized_string(70714) if exist else '' - _context = [{"title": typo(modify + config.get_localized_string(70585), 'bold'), - "action": "select_type", - "channel": "autorenumber",}] - - return _context - - -def select_type(item): - select = platformtools.dialog_select(config.get_localized_string(70730),[typo(config.get_localized_string(70731),'bold'), typo(config.get_localized_string(70732),'bold'), typo(config.get_localized_string(707433),'bold')]) - if select == 0: semiautomatic_config_item(item) - elif select == 1: manual_renumeration(item) - elif select == 2: return delete_renumeration(item) - else: return - + return True if ID in title and EPISODE in title else False def filename(item): - logger.info() + logger.debug() name_file = item.channel + "_data.json" path = filetools.join(config.get_data_path(), "settings_channels") fname = filetools.join(path, name_file) - return fname def load(item): - logger.info() + logger.debug() try: json_file = open(filename(item), "r").read() - json = jsontools.load(json_file)[TAG_TVSHOW_RENUMERATE] + json = jsontools.load(json_file)[TVSHOW_RENUMERATE] except: json = {} @@ -562,10 +52,687 @@ def load(item): def write(item, json): - logger.info() + logger.debug() json_file = open(filename(item), "r").read() js = jsontools.load(json_file) - js[TAG_TVSHOW_RENUMERATE] = json + js[TVSHOW_RENUMERATE] = json with open(filename(item), "w") as file: file.write(jsontools.dump(js)) file.close() + +def b64(json, mode = 'encode'): + if PY3: json = bytes(json, 'ascii') + if mode == 'encode': + ret = base64.b64encode(json) + if PY3: ret = ret.decode() + else: + ret = jsontools.load(base64.b64decode(json)) + return ret + +def RepresentsInt(s): + # Controllo Numro Stagione + logger.debug() + try: + int(s) + return True + except ValueError: + return False + +def find_episodes(item): + logger.debug() + ch = __import__('channels.' + item.channel, fromlist=["channels.%s" % item.channel]) + itemlist = ch.episodios(item) + return itemlist + +def busy(state): + if state: xbmc.executebuiltin('ActivateWindow(busydialognocancel)') + else: xbmc.executebuiltin('Dialog.Close(busydialognocancel)') + +# Main +def start(itemlist, item=None): + if type(itemlist) == Item: + item = itemlist + if item.channel in ['autorenumber']: + item.channel = item.from_channel + item.action = item.from_action + item.renumber = True + busy(True) + itemlist = find_episodes(item) + busy(False) + return autorenumber(itemlist, item) + +class autorenumber(): + def __init__(self, itemlist, item=None): + self.item = item + self.itemlist = itemlist + self.auto = False + self.dictSeries = load(self.itemlist[0]) if self.itemlist else load(item) if item else {} + self.Episodes = {} + self.sp = False + if self.item: + self.auto = config.get_setting('autorenumber', item.channel) + self.title = self.item.fulltitle.strip() + if match(self.itemlist[0].title, patron=r'[Ss]?(\d+)(?:x|_|\s+)[Ee]?[Pp]?(\d+)').match: + item.exit = True + return + elif self.item.channel in self.item.channel_prefs and TVSHOW_RENUMERATE in self.item.channel_prefs[item.channel] and self.title not in self.dictSeries: + from core.videolibrarytools import check_renumber_options + from specials.videolibrary import update_videolibrary + check_renumber_options(self.item) + update_videolibrary(self.item) + if self.title in self.dictSeries and ID in self.dictSeries[self.title] and self.dictSeries[self.title][ID] != '0': + self.id = self.dictSeries[self.title][ID] + self.Episodes = b64(self.dictSeries[self.title][EPISODE], 'decode') if EPISODE in self.dictSeries[self.title] else {} + self.Season = self.dictSeries[self.title][SEASON] + self.Mode = self.dictSeries[self.title].get(MODE, False) + self.Type = self.dictSeries[self.title].get(TYPE, False) + if self.item.renumber: + self.config() + else: + self.renumber() + elif self.auto or self.item.renumber: + self.Episodes = {} + self.config() + + else: + for item in self.itemlist: + item.context = [{"title": typo(config.get_localized_string(70585), 'bold'), + "action": "start", + "channel": "autorenumber", + "from_channel": item.channel, + "from_action": item.action}] + + def config(self): + self.id = '' + if self.title in self.dictSeries: + self.id = self.dictSeries[self.title].get(ID,'') + + # Pulizia del Titolo + if any( word in self.title.lower() for word in ['specials', 'speciali']): + self.title = re.sub(r'\s*specials|\s*speciali', '', self.title.lower()) + elif not self.item.infoLabels['tvdb_id']: + self.item.contentSerieName = self.title.rstrip('123456789 ') + + while not self.item.exit: + tvdb.find_and_set_infoLabels(self.item) + if self.item.infoLabels['tvdb_id']: self.item.exit = True + else: self.item = platformtools.dialog_info(self.item, 'tvdb') + + # Rinumerazione Automatica + if (not self.id and self.auto) or self.item.renumber: + self.id = self.item.infoLabels['tvdb_id'] if 'tvdb_id' in self.item.infoLabels else '' + if self.id: + self.dictRenumber = {ID: self.id} + self.dictSeries[self.title] = self.dictRenumber + if any(word in self.title.lower() for word in ['specials', 'speciali']): season = '0' + elif RepresentsInt(self.title.split()[-1]): season = self.title.split()[-1] + else: season = '1' + self.Season = self.dictRenumber[SEASON] = season + self.renumber() + + def renumber(self): + if not self.item.renumber and self.itemlist: + if '|' in self.Season: + season = int(self.Season.split('|')[0]) + addNumber = int(self.Season.split('|')[-1]) - 1 + else: + season = int(self.Season) + addNumber = 0 + for item in self.itemlist: + if not match(item.title, patron=r'[Ss]?(\d+)(?:x|_|\s+)[Ee]?[Pp]?(\d+)').match: + number = match(item.title, patron=r'(\d+)').match.lstrip('0') + if number: + if number in self.Episodes: + if season > 0: item.title = typo(self.Episodes[number] + ' - ', 'bold') + item.title + else: item.title = typo('0x%s - ' % str(int(number) + addNumber), 'bold') + item.title + else: + self.makelist() + if season > 0: item.title = typo(self.Episodes[number] + ' - ', 'bold') + item.title + else: item.title = typo('0x%s - ' % str(int(number) + addNumber), 'bold') + item.title + else: + self.makelist() + + + def makelist(self): + FirstOfSeason= 0 + self.EpList = b64(self.dictSeries[self.title][EPLIST], 'decode') if EPLIST in self.dictSeries[self.title] else [] + self.Pages = self.dictSeries[self.title].get(CHECK, [1]) + self.Mode = self.dictSeries[self.title].get(MODE, False) + self.Type = self.dictSeries[self.title].get(TYPE, False) + Specials = {} + Seasons = {} + + if '|' in self.Season: + ep = int(self.Season.split('|')[-1]) + season = int(self.Season.split('|')[0]) + else: + season = int(self.Season) + ep = 1 + + busy(True) + itemlist = find_episodes(self.item) + busy(False) + + if self.item.renumber: + self.s = season + self.e = 1 + Season, Episode, self.Mode, Specials, Seasons, Exit = SelectreNumeration(self, itemlist) + if Exit: return + if ep != 1: self.Season = '%s|%s' % (Season, Episode) + else: self.Season = str(Season) + + elif self.Episodes and not self.Mode: + self.s = season + self.e = ep + self.sp = True + Season, Episode, self.Mode, Specials, Seasons, Exit = SelectreNumeration(self, itemlist) + + if self.Mode: + if not Seasons: + self.s = 1 + self.e = 1 + Season, Episode, self.Mode, Specials, Seasons, Exit = SelectreNumeration(self, itemlist, True) + self.Episodes = Seasons + + else: + # Ricava Informazioni da TVDB + checkpages = [] + exist = True + Page = self.Pages[-1] + Episode = ep + + while exist: + data = tvdb.Tvdb(tvdb_id=self.id).get_list_episodes(self.id, Page) + if data: + for episode in data['data']: + if episode['firstAired'] and [episode['firstAired'], episode['airedSeason'], episode['airedEpisodeNumber']] not in self.EpList: + self.EpList.append([episode['firstAired'], episode['airedSeason'], episode['airedEpisodeNumber']]) + Page += 1 + else: + if Page not in checkpages: + checkpages.append(Page -1) + exist = False + self.Pages = [checkpages[-1]] + self.EpList.sort() + + # Crea Dizionari per la Rinumerazione + if self.EpList: + self.specials = [] + self.regular = {} + self.complete = {} + allep = 1 + specialep = 0 + + for episode in self.EpList: + self.complete[allep] = [str(episode[1]) + 'x' + str(episode[2]), episode[0]] + if episode[1] == 0: + self.specials.append(allep) + specialep = specialep + 1 + else: + self.regular[ep] = [str(episode[1]) + 'x' + str(episode[2]), str(episode[0]), allep - 1] + ep = ep + 1 + allep = allep + 1 + + if season > 1: + for numbers, data in self.regular.items(): + if data[0] == str(season) + 'x1': + FirstOfSeason = numbers - 1 + else: FirstOfSeason = Episode - 1 + + addiction = 0 + for item in itemlist: + if not match(re.sub(r'\[[^\]]+\]','',item.title), patron=r'[Ss]?(\d+)(?:x|_|\s+)[Ee]?[Pp]?(\d+)').match: + # Otiene Numerazione Episodi + scraped_ep = match(re.sub(r'\[[^\]]+\]','',item.title), patron=r'(\d+)').match + if scraped_ep: + episode = int(scraped_ep) + number = episode + FirstOfSeason - addiction + if episode == 0: + self.Episodes[str(episode)] = str(self.complete[self.regular[FirstOfSeason+1][2]][0]) + elif episode in Specials: + self.Episodes[str(episode)] = Specials[episode] + addiction += 1 + elif number <= len(self.regular) and number in self.regular: + self.Episodes[str(episode)] = str(self.regular[number][0]) + else: + try: Episodes[str(episode)] = str(self.complete[self.regular[number+2][2]][0]) + except: self.Episodes[str(episode)] = '0x0' + + if self.Episodes: self.dictSeries[self.title][EPISODE] = b64(jsontools.dump(self.Episodes)) + self.dictSeries[self.title][EPLIST] = b64(jsontools.dump(self.EpList)) + self.dictSeries[self.title][MODE] = self.Mode + self.dictSeries[self.title][SEASON] = self.Season + self.dictSeries[self.title][CHECK] = self.Pages + write(self.item, self.dictSeries) + + if self.auto: self.renumber() + + +def SelectreNumeration(opt, itemlist, manual=False): + opt.itemlist = itemlist + opt.manual = manual + return SelectreNumerationWindow('Renumber.xml', path).start(opt) + +# Select Season +SELECT = 100 +S = 101 +E = 102 +O = 103 +SS = 104 +M = 105 +D = 106 +C = 107 + +# Main +MAIN = 10000 +INFO = 10001 +OK=10002 +CLOSE = 10003 + +# Select Specials +SPECIALS = 200 +POSTER= 201 +LIST = 202 +SELECTED = 203 +BACKGROUND = 208 + +SPECIALCOMMANDS = 204 +SU = 205 +SD = 206 +SR = 207 + +# Select Manual +MANUAL = 300 +MPOSTER= 301 +MLIST = 302 +MSEASONS = 303 +MSEP = 304 +MBACKGROUND = 310 + +MANUALEP = 305 +MS = 306 +ME = 307 +MSS = 308 +MC = 309 + +# Actions +LEFT = 1 +RIGHT = 2 +UP = 3 +DOWN = 4 +EXIT = 10 +BACKSPACE = 92 + +path = config.get_runtime_path() + +class SelectreNumerationWindow(xbmcgui.WindowXMLDialog): + def start(self, opt): + self.episodes = opt.Episodes if opt.Episodes else {} + self.dictSeries = opt.dictSeries + self.item = opt.item + self.title = opt.title + self.season = opt.s + self.episode = opt.e + self.mode = opt.Mode + self.sp = opt.sp + self.manual = opt.manual + self.offset = 0 + self.Exit = False + + self.itemlist = opt.itemlist + self.count = 1 + self.specials = {} + self.items = [] + self.selected = [] + self.seasons = {} + + self.doModal() + return self.season, self.episode, self.mode, self.specials, self.seasons, self.Exit + + def onInit(self): + # Compatibility with Kodi 18 + if config.get_platform(True)['num_version'] < 18: self.setCoordinateResolution(2) + fanart = self.item.fanart + thumb = self.item.thumbnail + self.getControl(SELECT).setVisible(False) + self.getControl(SPECIALS).setVisible(False) + self.getControl(MANUAL).setVisible(False) + # MANUAL + if self.manual: + self.getControl(MANUAL).setVisible(True) + self.getControl(MPOSTER).setImage(thumb) + if fanart: self.getControl(MBACKGROUND).setImage(fanart) + self.getControl(INFO).setLabel(typo(config.get_localized_string(70822) + self.title,'bold')) + + self.mode = True + + se = '1' + ep = '1' + position = 0 + for i, item in enumerate(self.itemlist): + title = match(item.title, patron=r'(\d+)').match.lstrip('0') + it = xbmcgui.ListItem(title) + if int(title) <= len(self.episodes): + se, ep = self.episodes[title].split('x') + else: + if position == 0: position = i + ep = str(int(ep) + 1) + it.setProperties({'season':se, "episode":ep}) + self.items.append(it) + self.makerenumber() + self.addseasons() + season = self.getControl(MSEASONS).getSelectedItem().getLabel() + self.getControl(MSEP).reset() + self.getControl(MSEP).addItems(self.episodes[season]) + self.getControl(MLIST).addItems(self.items) + self.setFocusId(MLIST) + self.getControl(MLIST).selectItem(position) + # MAIN / SPECIALS + else: + for item in self.itemlist: + if not match(item.title, patron=r'[Ss]?(\d+)(?:x|_|\s+)[Ee]?[Pp]?(\d+)').match: + title = match(item.title, patron=r'(\d+)').match.lstrip('0') + it = xbmcgui.ListItem(title) + self.items.append(it) + + self.getControl(POSTER).setImage(thumb) + self.getControl(MPOSTER).setImage(thumb) + if fanart: + self.getControl(BACKGROUND).setImage(fanart) + self.getControl(MBACKGROUND).setImage(fanart) + self.getControl(INFO).setLabel(typo(config.get_localized_string(70824) + self.title, 'bold')) + self.getControl(LIST).addItems(self.items) + + if self.sp: + self.getControl(SPECIALS).setVisible(True) + self.setFocusId(OK) + else: + self.getControl(SELECT).setVisible(True) + + self.getControl(S).setLabel(str(self.season)) + self.getControl(E).setLabel(str(self.episode)) + + self.setFocusId(O) + + def onFocus(self, focus): + if focus in [S]: self.getControl(108).setLabel(typo(config.get_localized_string(70825), 'bold')) + elif focus in [E]: self.getControl(108).setLabel(typo(config.get_localized_string(70826), 'bold')) + elif focus in [O]: self.getControl(108).setLabel(typo(config.get_localized_string(70001), 'bold')) + elif focus in [SS]: self.getControl(108).setLabel(typo(config.get_localized_string(70827), 'bold')) + elif focus in [M]: self.getControl(108).setLabel(typo(config.get_localized_string(70828), 'bold')) + elif focus in [D]: self.getControl(108).setLabel(typo(config.get_localized_string(70829) + self.title, 'bold')) + elif focus in [C]: self.getControl(108).setLabel(typo(config.get_localized_string(70002), 'bold')) + + + def onAction(self, action): + action = action.getId() + focus = self.getFocusId() + # SEASON SELECT + if 100 < focus < 200: + s = int(self.getControl(S).getLabel()) + e = int(self.getControl(E).getLabel()) + if action in [RIGHT]: + if focus in [C]: self.setFocusId(S) + else: self.setFocusId(focus + 1) + elif action in [LEFT]: + if focus in [S]: self.setFocusId(C) + else: self.setFocusId(focus - 1) + elif action in [UP]: + if focus in [S]: + s += 1 + self.getControl(S).setLabel(str(s)) + elif focus in [E]: + e += 1 + self.getControl(E).setLabel(str(e)) + elif action in [DOWN]: + if focus in [S]: + if s > 0: s -= 1 + self.getControl(S).setLabel(str(s)) + elif focus in [E]: + if e > 0: e -= 1 + self.getControl(E).setLabel(str(e)) + # MANUAL + if focus in [MS, ME]: + s = int(self.getControl(MLIST).getSelectedItem().getProperty('season')) + e = int(self.getControl(MLIST).getSelectedItem().getProperty('episode')) + pos = self.getControl(MLIST).getSelectedPosition() + # Set Season + if focus in [MS] and action in [UP]: s += 1 + elif focus in [MS] and action in [DOWN] and s > 0: s -= 1 + # Set Episode + if focus in [ME] and action in [UP]: e += 1 + elif focus in [ME] and action in [DOWN] and e > 0: e -= 1 + if action in [UP, DOWN]: + if s != self.season: e = 1 + self.season = s + self.episode = e + self.makerenumber(pos) + self.addseasons() + season = self.getControl(MSEASONS).getSelectedItem().getLabel() + self.getControl(MSEP).reset() + self.getControl(MSEP).addItems(self.episodes[season]) + self.getControl(MLIST).reset() + self.getControl(MLIST).addItems(self.items) + self.getControl(MLIST).selectItem(pos) + if focus in [MSEASONS]: + season = self.getControl(MSEASONS).getSelectedItem().getLabel() + self.getControl(MSEP).reset() + self.getControl(MSEP).addItems(self.episodes[season]) + + # EXIT + if action in [EXIT, BACKSPACE]: + self.Exit = True + self.close() + + def onClick(self, control_id): + ## FIRST SECTION + if control_id in [S]: + selected = platformtools.dialog_numeric(0, config.get_localized_string(70825), self.getControl(S).getLabel()) + if selected: s = self.getControl(S).setLabel(selected) + elif control_id in [E]: + selected = platformtools.dialog_numeric(0, config.get_localized_string(70826), self.getControl(E).getLabel()) + if selected: e = self.getControl(E).setLabel(selected) + # OPEN SPECIALS OR OK + if control_id in [O, SS]: + s = self.getControl(S).getLabel() + e = self.getControl(E).getLabel() + self.season = int(s) + self.episode = int(e) + if control_id in [O]: + self.close() + elif control_id in [SS]: + self.getControl(SELECT).setVisible(False) + self.getControl(SPECIALS).setVisible(True) + self.setFocusId(OK) + # OPEN MANUAL + elif control_id in [M]: + self.getControl(INFO).setLabel(typo(config.get_localized_string(70823) + self.title, 'bold')) + self.mode = True + if self.episodes: + items = [] + se = '1' + ep = '1' + for item in self.items: + if int(item.getLabel()) <= len(self.episodes) - 1: + se, ep = self.episodes[item.getLabel()].split('x') + else: + ep = str(int(ep) + 1) + item.setProperties({'season':se, "episode":ep}) + items.append(item) + self.seasons[item.getLabel()] = '%sx%s' %(se, ep) + self.items = items + else: + self.makerenumber() + self.addseasons() + season = self.getControl(MSEASONS).getSelectedItem().getLabel() + self.getControl(MSEP).reset() + self.getControl(MSEP).addItems(self.episodes[season]) + self.getControl(MLIST).addItems(self.items) + self.getControl(SELECT).setVisible(False) + self.getControl(MANUAL).setVisible(True) + self.setFocusId(OK) + # CLOSE + elif control_id in [C]: + self.Exit = True + self.close() + # DELETE + if control_id in [D]: + self.Exit = True + self.dictSeries.pop(self.title) + write(self.item, self.dictSeries) + self.close() + + ## SPECIAL SECTION + # ADD TO SPECIALS + p1 = self.getControl(SELECTED).getSelectedPosition() + if control_id in [LIST]: + item = self.getControl(LIST).getSelectedItem() + it = xbmcgui.ListItem(str(len(self.selected) + 1)) + it.setProperty('title', item.getLabel()) + self.selected.append(it) + index = self.getControl(SELECTED).getSelectedPosition() + self.getControl(SELECTED).reset() + self.getControl(SELECTED).addItems(self.selected) + self.getControl(SELECTED).selectItem(index) + + index = self.getControl(LIST).getSelectedPosition() + self.items.pop(index) + self.getControl(LIST).reset() + self.getControl(LIST).addItems(self.items) + if index == len(self.items): index -= 1 + self.getControl(LIST).selectItem(index) + # MOVE SPECIALS + elif control_id in [SU]: + p2 = p1 - 1 + if p2 > -1: + self.selected[p1], self.selected[p2] = self.selected[p2], self.selected[p1] + for i, it in enumerate(self.selected): + it.setLabel(str(i+1)) + break + self.getControl(SELECTED).reset() + self.getControl(SELECTED).addItems(self.selected) + self.getControl(SELECTED).selectItem(p2) + + elif control_id in [SD]: + p2 = p1 + 1 + if p2 < len(self.selected): + self.selected[p1], self.selected[p2] = self.selected[p2], self.selected[p1] + for i, it in enumerate(self.selected): + it.setLabel(str(i+1)) + break + self.getControl(SELECTED).reset() + self.getControl(SELECTED).addItems(self.selected) + self.getControl(SELECTED).selectItem(p2) + # REMOVE FROM SPECIALS + elif control_id in [SR]: + item = self.getControl(SELECTED).getSelectedItem() + it = xbmcgui.ListItem(item.getProperty('title')) + if int(item.getProperty('title')) < int(self.items[-1].getLabel()): + for i, itm in enumerate(self.items): + if int(itm.getLabel()) > int(item.getProperty('title')): + self.items.insert(i, it) + break + else: + self.items.append(it) + self.getControl(LIST).reset() + self.getControl(LIST).addItems(self.items) + index = self.getControl(SELECTED).getSelectedPosition() + self.selected.pop(index) + self.getControl(SELECTED).reset() + self.getControl(SELECTED).addItems(self.selected) + + if index == len(self.selected): index -= 1 + self.getControl(SELECTED).selectItem(index) + # RELOAD SPECIALS + if control_id in [SELECTED]: + epnumber = platformtools.dialog_numeric(0, config.get_localized_string(60386)) + it = self.getControl(SELECTED).getSelectedItem() + it.setLabel(str(epnumber)) + self.selected.sort(key=lambda it: int(it.getLabel())) + for i, it in enumerate(self.selected): + if it.getLabel() == epnumber: pos = i + self.selected.sort(key=lambda it: int(it.getLabel())) + self.getControl(SELECTED).reset() + self.getControl(SELECTED).addItems(self.selected) + self.getControl(SELECTED).selectItem(pos) + break + if len(self.selected) > 0: self.getControl(SPECIALCOMMANDS).setVisible(True) + else: self.getControl(SPECIALCOMMANDS).setVisible(False) + + ## MANUAL SECTION + # SELECT SEASON EPISODE (MANUAL) + if control_id in [MS, ME]: + s = int(self.getControl(MLIST).getSelectedItem().getProperty('season')) + e = int(self.getControl(MLIST).getSelectedItem().getProperty('episode')) + pos = self.getControl(MLIST).getSelectedPosition() + if control_id in [MS]: + selected = platformtools.dialog_numeric(0, config.get_localized_string(70825), str(s)) + if selected: s = int(selected) + elif control_id in [ME]: + selected = platformtools.dialog_numeric(0, config.get_localized_string(70826), str(e)) + if selected: e = int(selected) + if s != self.season or e != self.episode: + self.season = s + self.episode = 1 if s != self.season else e + self.makerenumber(pos) + self.addseasons() + season = self.getControl(MSEASONS).getSelectedItem().getLabel() + self.getControl(MSEP).reset() + self.getControl(MSEP).addItems(self.episodes[season]) + self.getControl(MLIST).reset() + self.getControl(MLIST).addItems(self.items) + self.getControl(MLIST).selectItem(pos) + # OK + if control_id in [OK]: + for it in self.selected: + self.specials[int(it.getProperty('title'))] = '0x' + it.getLabel() + self.close() + # CLOSE + elif control_id in [CLOSE]: + self.Exit = True + self.close() + + + def makerenumber(self, pos = 0): + items = [] + currentSeason = self.items[pos].getProperty('season') + previousSeason = self.items[pos - 1 if pos > 0 else 0].getProperty('season') + prevEpisode = self.items[pos - 1 if pos > 0 else 0].getProperty('episode') + if currentSeason != str(self.season): + if str(self.season) == previousSeason: + prevEpisode = int(prevEpisode) + 1 + else: + prevEpisode = 1 + else: prevEpisode = self.episode + + for i, item in enumerate(self.items): + if (i >= pos and item.getProperty('season') == currentSeason) or not item.getProperty('season'): + if i > pos: prevEpisode += 1 + item.setProperties({'season':self.season, 'episode':prevEpisode}) + items.append(item) + self.seasons[item.getLabel()] = '%sx%s' % (item.getProperty('season'), item.getProperty('episode')) + self.items = items + logger.debug('SELF',self.seasons) + + def addseasons(self): + seasonlist = [] + seasons = [] + self.episodes = {} + for ep, value in self.seasons.items(): + season = value.split('x')[0] + if season not in seasonlist: + item = xbmcgui.ListItem(season) + seasonlist.append(season) + seasons.append(item) + if season in seasonlist: + if season not in self.episodes: + self.episodes[season] = [] + item = xbmcgui.ListItem('%s - Ep. %s' % (value, ep)) + item.setProperty('episode', ep) + self.episodes[season].append(item) + logger.log('EPISODES',self.episodes[season]) + self.episodes[season].sort(key=lambda it: int(it.getProperty('episode'))) + + seasons.sort(key=lambda it: int(it.getLabel())) + self.getControl(MSEASONS).reset() + self.getControl(MSEASONS).addItems(seasons) \ No newline at end of file diff --git a/platformcode/checkhost.py b/platformcode/checkhost.py index 60df57fd..213ee23f 100644 --- a/platformcode/checkhost.py +++ b/platformcode/checkhost.py @@ -2,7 +2,6 @@ import xbmc, xbmcgui import xbmcaddon -import json from platformcode import config, logger import requests import sys @@ -236,63 +235,3 @@ def test_conn(is_exit, check_dns, view_msg, # else: # return False -# def for creating the channels.json file -def check_channels(inutile=''): - """ - I read the channel hosts from the channels.json file, I check them, - I write the channels-test.json file with the error code and the new url in case of redirect - - urls MUST have http (s) - - During the urls check the ip, asdl and dns checks are carried out. - This is because it can happen that at any time the connection may have problems. If it does, check it - relative writing of the file is interrupted with a warning message - """ - logger.info() - - folderJson = xbmc.translatePath(xbmcaddon.Addon().getAddonInfo('path')).decode('utf-8') - fileJson = 'channels.json' - - with open(folderJson+'/'+fileJson) as f: - data = json.load(f) - - risultato = {} - - for chann, host in sorted(data['direct'].items()): - - ris = [] - # to get an idea of the timing - # useful only if you control all channels - # for channels with error 522 about 40 seconds are lost ... - logger.info("check #### INIZIO #### channel - host :%s - %s " % (chann, host)) - - rslt = Kdicc(lst_urls = [host]).http_Resp() - - # all right - if rslt['code'] == 200: - risultato[chann] = host - # redirect - elif str(rslt['code']).startswith('3'): - # risultato[chann] = str(rslt['code']) +' - '+ rslt['redirect'][:-1] - if rslt['redirect'].endswith('/'): - rslt['redirect'] = rslt['redirect'][:-1] - risultato[chann] = rslt['redirect'] - # non-existent site - elif rslt['code'] == -2: - risultato[chann] = 'Host Sconosciuto - '+ str(rslt['code']) +' - '+ host - # site not reachable - probable dns not set - elif rslt['code'] == 111: - risultato[chann] = ['Host non raggiungibile - '+ str(rslt['code']) +' - '+ host] - else: - # other types of errors - # risultato[chann] = 'Errore Sconosciuto - '+str(rslt['code']) +' - '+ host - risultato[chann] = host - - logger.info("check #### FINE #### rslt :%s " % (rslt)) - - risultato = {'findhost': data['findhost'], 'direct': risultato} - fileJson_test = 'channels-test.json' - # I write the updated file - with open(folderJson+'/'+fileJson_test, 'w') as f: - data = json.dump(risultato, f, sort_keys=True, indent=4) - logger.info(data) diff --git a/platformcode/globalsearch.py b/platformcode/globalsearch.py new file mode 100644 index 00000000..f18826c1 --- /dev/null +++ b/platformcode/globalsearch.py @@ -0,0 +1,604 @@ +# -*- coding: utf-8 -*- + +import xbmc, xbmcgui, sys, channelselector, time +from core.support import dbg, typo, tmdb +from core.item import Item +from core import channeltools, servertools, scrapertools +from platformcode import platformtools, config, logger +from platformcode.launcher import run +from threading import Thread + +if sys.version_info[0] >= 3: from concurrent import futures +else: from concurrent_py2 import futures + +info_language = ["de", "en", "es", "fr", "it", "pt"] # from videolibrary.json +def_lang = info_language[config.get_setting("info_language", "videolibrary")] + + +def busy(state): + if state: xbmc.executebuiltin('ActivateWindow(busydialognocancel)') + else: xbmc.executebuiltin('Dialog.Close(busydialognocancel)') + +def set_workers(): + workers = config.get_setting('thread_number') if config.get_setting('thread_number') > 0 else None + return workers + +def Search(item): + xbmc.executebuiltin('Dialog.Close(all,true)') + SearchWindow('GlobalSearch.xml', config.get_runtime_path()).start(item) + xbmc.sleep(600) + +# Actions +LEFT = 1 +RIGHT = 2 +UP = 3 +DOWN = 4 +EXIT = 10 +BACKSPACE = 92 +SWIPEUP = 531 +CONTEXT = 117 + +# Container +SEARCH = 1 +EPISODES = 2 +SERVERS = 3 +NORESULTS = 4 +LOADING = 5 + +# Search +MAINTITLE = 100 +CHANNELS = 101 +RESULTS = 102 + +PROGRESS = 500 +COUNT = 501 +MENU = 502 +BACK = 503 +CLOSE = 504 +QUALITYTAG = 505 + +# Servers +EPISODESLIST = 200 +SERVERLIST = 300 + +class SearchWindow(xbmcgui.WindowXML): + def start(self, item): + logger.debug() + self.exit = False + self.item = item + self.lastSearch() + if not self.item.text: return + self.type = self.item.mode + self.channels = [] + self.find = [] + self.persons = [] + self.episodes = [] + self.servers = [] + self.results = {} + self.channelsList = self.get_channels() + self.focus = SEARCH + self.process = True + self.page = 1 + self.moduleDict = {} + self.searchActions = [] + self.thread = None + self.doModal() + + def lastSearch(self): + logger.debug() + if not self.item.text: + if config.get_setting('last_search'): last_search = channeltools.get_channel_setting('Last_searched', 'search', '') + else: last_search = '' + if not self.item.text: self.item.text = platformtools.dialog_input(default=last_search, heading='') + if self.item.text: channeltools.set_channel_setting('Last_searched', self.item.text, 'search') + + def select(self): + logger.debug() + self.PROGRESS.setVisible(False) + items = [] + if self.persons: + tmdb_info = tmdb.discovery(self.item, dict_=self.item.discovery) + results = tmdb_info.results.get('cast',[]) + else: + tmdb_info = tmdb.Tmdb(texto_buscado=self.item.text, tipo=self.item.mode.replace('show', '')) + results = tmdb_info.results + + for result in results: + logger.info(result) + result = tmdb_info.get_infoLabels(result, origen=result) + movie = result.get('title','') + tvshow = result.get('name','') + title = tvshow if tvshow else movie + result['mode'] = 'tvshow' if tvshow else 'movie' + self.find.append(result) + thumb = 'Infoplus/' + result['mode'].replace('show','') + '.png' + it = xbmcgui.ListItem(title) + it.setProperty('thumb', result.get('thumbnail', thumb)) + it.setProperty('fanart', result.get('fanart','')) + it.setProperty('plot', result.get('overview', '')) + it.setProperty('search','search') + year = result.get('release_date','') + if year: it.setProperty('year','[' + year.split('/')[-1] + ']') + else: + year = result.get('first_air_date','') + if year: it.setProperty('year','[' + year.split('-')[0] + ']') + items.append(it) + + if items: + self.RESULTS.reset() + self.RESULTS.addItems(items) + self.setFocusId(RESULTS) + else: + self.NORESULTS.setVisible(True) + + def actors(self): + logger.debug() + self.PROGRESS.setVisible(False) + items = [] + + dict_ = {'url': 'search/person', 'language': def_lang, 'query': self.item.text, 'page':self.page} + prof = {'Acting': 'Actor', 'Directing': 'Director', 'Production': 'Productor'} + plot = '' + self.item.search_type = 'person' + tmdb_inf = tmdb.discovery(self.item, dict_=dict_) + results = tmdb_inf.results + + for elem in results: + name = elem.get('name', '') + if not name: continue + rol = elem.get('known_for_department', '') + rol = prof.get(rol, rol) + know_for = elem.get('known_for', '') + cast_id = elem.get('id', '') + if know_for: + t_k = know_for[0].get('title', '') + if t_k: plot = '%s in %s' % (rol, t_k) + + t = elem.get('profile_path', '') + if t: thumb = 'https://image.tmdb.org/t/p/original' + t + else : thumb = 'Infoplus/no_photo.png' + + discovery = {'url': 'person/%s/combined_credits' % cast_id, 'page': '1', 'sort_by': 'primary_release_date.desc', 'language': def_lang} + self.persons.append(discovery) + it = xbmcgui.ListItem(name) + it.setProperty('thumb', thumb) + it.setProperty('plot', plot) + it.setProperty('search','persons') + items.append(it) + if len(results) > 19: + it = xbmcgui.ListItem(config.get_localized_string(70006)) + it.setProperty('thumb', 'Infoplus/next_focus.png') + it.setProperty('search','next') + items.append(it) + if self.page > 1: + it = xbmcgui.ListItem(config.get_localized_string(70005)) + it.setProperty('thumb', 'Infoplus/previous_focus.png') + it.setProperty('search','previous') + items.insert(0, it) + + if items: + self.RESULTS.reset() + self.RESULTS.addItems(items) + self.setFocusId(RESULTS) + else: + self.NORESULTS.setVisible(True) + + def get_channels(self): + logger.debug() + channels_list = [] + all_channels = channelselector.filterchannels('all') + + for ch in all_channels: + channel = ch.channel + ch_param = channeltools.get_channel_parameters(channel) + if not ch_param.get("active", False): + continue + list_cat = ch_param.get("categories", []) + + if not ch_param.get("include_in_global_search", False): + continue + + if 'anime' in list_cat: + n = list_cat.index('anime') + list_cat[n] = 'tvshow' + + if self.item.mode == 'all' or self.item.type in list_cat: + if config.get_setting("include_in_global_search", channel) and ch_param.get("active", False): + channels_list.append(channel) + + logger.debug('search in channels:',channels_list) + + return channels_list + + def getModule(self, channel): + logger.debug() + try: + module = __import__('channels.%s' % channel, fromlist=["channels.%s" % channel]) + mainlist = getattr(module, 'mainlist')(Item(channel=channel, global_search=True)) + action = [elem for elem in mainlist if elem.action == "search" and (self.item.mode == 'all' or elem.contentType in [self.item.mode, 'undefined'])] + return module, action + except: + import traceback + logger.error('error importing/getting search items of ' + channel) + logger.error(traceback.format_exc()) + return None, None + + def search(self): + count = 0 + self.count = 0 + self.LOADING.setVisible(True) + with futures.ThreadPoolExecutor() as executor: + for channel in self.channelsList: + if self.exit: break + module, action = executor.submit(self.getModule, channel).result() + if module and action: + self.moduleDict[channel] = module + self.searchActions += action + count += 1 + percent = (float(count) / len(self.channelsList)) * 100 + self.PROGRESS.setPercent(percent) + self.COUNT.setText('%s/%s' % (count, len(self.channelsList))) + + with futures.ThreadPoolExecutor(max_workers=set_workers()) as executor: + for searchAction in self.searchActions: + if self.exit: break + executor.submit(self.get_channel_results, self.item, self.moduleDict, searchAction) + + def get_channel_results(self, item, module_dict, search_action): + logger.debug() + channel = search_action.channel + results = [] + valid = [] + other = [] + module = module_dict[channel] + searched_id = item.infoLabels['tmdb_id'] + + try: + results.extend(module.search(search_action, item.text)) + if len(results) == 1: + if not results[0].action or config.get_localized_string(70006).lower() in results[0].title.lower(): + results = [] + + if self.item.mode != 'all': + for elem in results: + if not elem.infoLabels.get('year', ""): + elem.infoLabels['year'] = '-' + tmdb.set_infoLabels_item(elem) + if elem.infoLabels['tmdb_id'] == searched_id: + elem.from_channel = channel + elem.verified ='ok.png' + valid.append(elem) + else: + other.append(elem) + except: + pass + + self.count += 1 + if self.item.mode == 'all': self.update(channel, results) + else: self.update(channel, valid + other) + + def makeItem(self, item): + logger.debug() + thumb = item.thumbnail if item.thumbnail else 'Infoplus/' + item.contentType.replace('show','') + 'png' + logger.info('THUMB', thumb) + it = xbmcgui.ListItem(item.title) + it.setProperty('year', '[' + str(item.year if item.year else item.infoLabels.get('year','')) + ']') + it.setProperty('thumb', thumb) + it.setProperty('fanart', item.fanart) + it.setProperty('plot', item.plot) + it.setProperty('verified', item.verified) + if item.server: + color = scrapertools.find_single_match(item.alive, r'(FF[^\]]+)') + it.setProperty('channel', channeltools.get_channel_parameters(item.channel).get('title','')) + it.setProperty('thumb', "https://raw.githubusercontent.com/kodiondemand/media/master/resources/servers/%s.png" % item.server.lower()) + it.setProperty('servername', servertools.get_server_parameters(item.server.lower()).get('name',item.server)) + it.setProperty('color', color if color else 'FF0082C2') + return it + + def update(self, channel, results): + logger.debug('Search on channel', channel) + if results: + channelParams = channeltools.get_channel_parameters(channel) + name = channelParams['title'] + if name not in self.results: + self.results[name] = [results, len(self.channels)] + item = xbmcgui.ListItem(name) + item.setProperty('thumb', channelParams['thumbnail']) + item.setProperty('position', '0') + item.setProperty('results', str(len(results))) + item.setProperty('verified', results[0].verified) + self.channels.append(item) + else: + self.results[name].append([results, len(self.channels)]) + self.channels[int(self.results[name][1])].setProperty('results', str(len(results))) + pos = self.CHANNELS.getSelectedPosition() + self.CHANNELS.reset() + self.CHANNELS.addItems(self.channels) + self.CHANNELS.selectItem(pos) + if len(self.channels) == 1: + self.setFocusId(CHANNELS) + items = [] + for result in self.results[name][0]: + items.append(self.makeItem(result)) + self.RESULTS.reset() + self.RESULTS.addItems(items) + percent = (float(self.count) / len(self.searchActions)) * 100 + self.LOADING.setVisible(False) + self.PROGRESS.setPercent(percent) + self.COUNT.setText('%s/%s [%s"]' % (self.count, len(self.searchActions), int(time.time() - self.time) )) + if percent == 100 and not self.results: + self.PROGRESS.setVisible(False) + self.NORESULTS.setVisible(True) + + def onInit(self): + self.time = time.time() + + # collect controls + self.CHANNELS = self.getControl(CHANNELS) + self.RESULTS = self.getControl(RESULTS) + self.PROGRESS = self.getControl(PROGRESS) + self.COUNT = self.getControl(COUNT) + self.MAINTITLE = self.getControl(MAINTITLE) + self.MAINTITLE.setText(typo(config.get_localized_string(30993).replace('...','') % '"%s"' % self.item.text, 'bold')) + self.SEARCH = self.getControl(SEARCH) + self.EPISODES = self.getControl(EPISODES) + self.EPISODESLIST = self.getControl(EPISODESLIST) + self.SERVERS = self.getControl(SERVERS) + self.SERVERLIST = self.getControl(SERVERLIST) + self.NORESULTS = self.getControl(NORESULTS) + self.NORESULTS.setVisible(False) + self.LOADING = self.getControl(LOADING) + self.LOADING.setVisible(False) + + self.Focus(self.focus) + + if self.type: + self.type = None + if self.item.mode in ['all', 'search']: + if self.item.type: self.item.mode = self.item.type + self.thread = Thread(target=self.search) + self.thread.start() + elif self.item.mode in ['movie', 'tvshow']: + self.select() + elif self.item.mode in ['person']: + self.actors() + + def Focus(self, focusid): + if focusid in [SEARCH]: + self.focus = CHANNELS + self.SEARCH.setVisible(True) + self.EPISODES.setVisible(False) + self.SERVERS.setVisible(False) + if focusid in [EPISODES]: + self.focus = focusid + self.SEARCH.setVisible(False) + self.EPISODES.setVisible(True) + self.SERVERS.setVisible(False) + if focusid in [SERVERS]: + self.focus = SERVERLIST + self.SEARCH.setVisible(False) + self.EPISODES.setVisible(False) + self.SERVERS.setVisible(True) + + def onAction(self, action): + action = action.getId() + focus = self.getFocusId() + if action in [CONTEXT] and focus in [RESULTS, EPISODESLIST, SERVERLIST]: + self.context() + + elif action in [SWIPEUP] and self.CHANNELS.isVisible() : + self.setFocusId(CHANNELS) + pos = self.CHANNELS.getSelectedPosition() + self.CHANNELS.selectItem(pos) + + elif action in [LEFT, RIGHT] and focus in [CHANNELS] and self.CHANNELS.isVisible(): + items = [] + name = self.CHANNELS.getSelectedItem().getLabel() + subpos = int(self.CHANNELS.getSelectedItem().getProperty('position')) + for result in self.results[name][0]: + items.append(self.makeItem(result)) + self.RESULTS.reset() + self.RESULTS.addItems(items) + self.RESULTS.selectItem(subpos) + + elif action in [DOWN] and focus in [BACK, CLOSE, MENU]: + if self.SERVERS.isVisible(): self.setFocusId(SERVERLIST) + elif self.EPISODES.isVisible(): self.setFocusId(EPISODESLIST) + else: self.setFocusId(RESULTS) + + elif focus in [RESULTS] and self.item.mode == 'all': + pos = self.RESULTS.getSelectedPosition() + self.CHANNELS.getSelectedItem().setProperty('position', str(pos)) + + if action in [BACKSPACE]: + self.Back() + + elif action in [EXIT]: + self.Close() + + def onClick(self, control_id): + if self.RESULTS.getSelectedItem(): search = self.RESULTS.getSelectedItem().getProperty('search') + else: search = None + if control_id in [CHANNELS]: + items = [] + name = self.CHANNELS.getSelectedItem().getLabel() + subpos = int(self.CHANNELS.getSelectedItem().getProperty('position')) + for result in self.results[name][0]: + items.append(self.makeItem(result)) + self.RESULTS.reset() + self.RESULTS.addItems(items) + self.RESULTS.selectItem(subpos) + self.CHANNELS.getSelectedItem().setProperty('position', str(subpos)) + # self.setFocusId(RESULTS) + + elif control_id in [BACK]: + self.Back() + + elif control_id in [CLOSE]: + self.Close() + + elif control_id in [MENU]: + self.context() + + elif search: + pos = self.RESULTS.getSelectedPosition() + if search == 'next': + self.page += 1 + self.actors() + elif search == 'previous': + self.page -= 1 + self.actors() + elif search == 'persons': + self.item.discovery = self.persons[pos] + self.select() + else: + result = self.find[pos] + name = self.RESULTS.getSelectedItem().getLabel() + item = Item(mode='search', type=result['mode'], contentType=result['mode'], infoLabels=result, selected = True, text=name) + if self.item.mode == 'movie': item.contentTitle = self.RESULTS.getSelectedItem().getLabel() + else: item.contentSerieName = self.RESULTS.getSelectedItem().getLabel() + return Search(item) + + elif control_id in [RESULTS, EPISODESLIST]: + busy(True) + if control_id in [RESULTS]: + name = self.CHANNELS.getSelectedItem().getLabel() + self.pos = self.RESULTS.getSelectedPosition() + item = self.results[name][0][self.pos] + else: + self.pos = self.EPISODESLIST.getSelectedPosition() + item = self.episodes[self.pos] + try: + self.channel = __import__('channels.%s' % item.channel, fromlist=["channels.%s" % item.channel]) + self.itemsResult = getattr(self.channel, item.action)(item) + except: + import traceback + logger.error('error importing/getting search items of ' + item.channel) + logger.error(traceback.format_exc()) + self.itemsResult = [] + + if self.itemsResult and self.itemsResult[0].action in ['play', '']: + + if config.get_setting('checklinks') and not config.get_setting('autoplay'): + self.itemsResult = servertools.check_list_links(self.itemsResult, config.get_setting('checklinks_number')) + self.servers = self.itemsResult + self.itemsResult = [] + uhd = [] + fhd = [] + hd = [] + sd = [] + unknown = [] + for i, item in enumerate(self.servers): + if item.server: + it = self.makeItem(item) + it.setProperty('index', str(i)) + if item.quality in ['4k', '2160p', '2160', '4k2160p', '4k2160', '4k 2160p', '4k 2160', '2k']: + it.setProperty('quality', 'uhd.png') + uhd.append(it) + elif item.quality in ['fullhd', 'fullhd 1080', 'fullhd 1080p', 'full hd', 'full hd 1080', 'full hd 1080p', 'hd1080', 'hd1080p', 'hd 1080', 'hd 1080p', '1080', '1080p']: + it.setProperty('quality', 'Fhd.png') + fhd.append(it) + elif item.quality in ['hd', 'hd720', 'hd720p', 'hd 720', 'hd 720p', '720', '720p', 'hdtv']: + it.setProperty('quality', 'hd.png') + hd.append(it) + elif item.quality in ['sd', '480p', '480', '360p', '360', '240p', '240']: + it.setProperty('quality', 'sd.png') + sd.append(it) + else: + it.setProperty('quality', '') + unknown.append(it) + elif not item.action: + self.getControl(QUALITYTAG).setText(item.fulltitle) + + + uhd.sort(key=lambda it: it.getProperty('index')) + fhd.sort(key=lambda it: it.getProperty('index')) + hd.sort(key=lambda it: it.getProperty('index')) + sd.sort(key=lambda it: it.getProperty('index')) + unknown.sort(key=lambda it: it.getProperty('index')) + + serverlist = uhd + fhd + hd + sd + unknown + + self.Focus(SERVERS) + self.SERVERLIST.reset() + self.SERVERLIST.addItems(serverlist) + self.setFocusId(SERVERLIST) + + else: + self.episodes = self.itemsResult + self.itemsResult = [] + episodes = [] + for item in self.episodes: + if item.action == 'findvideos': + it = xbmcgui.ListItem(item.title) + episodes.append(it) + + self.Focus(EPISODES) + self.EPISODESLIST.reset() + self.EPISODESLIST.addItems(episodes) + self.setFocusId(EPISODESLIST) + + busy(False) + + elif control_id in [SERVERLIST]: + index = int(self.getControl(control_id).getSelectedItem().getProperty('index')) + server = self.servers[index] + server.player_mode = 0 + run(server) + + def Back(self): + self.getControl(QUALITYTAG).setText('') + if self.SERVERS.isVisible(): + if self.episodes: + self.Focus(EPISODES) + self.setFocusId(EPISODESLIST) + else: + self.Focus(SEARCH) + self.setFocusId(RESULTS) + self.RESULTS.selectItem(self.pos) + elif self.EPISODES.isVisible(): + self.episodes = [] + self.Focus(SEARCH) + self.setFocusId(RESULTS) + self.RESULTS.selectItem(self.pos) + elif self.item.mode in ['person'] and self.find: + self.find = [] + self.actors() + else: + self.Close() + + def Close(self): + self.exit = True + if self.thread: + busy(True) + while self.thread.is_alive(): xbmc.sleep(200) + busy(False) + self.close() + + def context(self): + pos = self.RESULTS.getSelectedPosition() + name = self.CHANNELS.getSelectedItem().getLabel() + item = self.results[name][0][pos] + context = [config.get_localized_string(70739), config.get_localized_string(70557), config.get_localized_string(60359)] + context_commands = ["RunPlugin(%s?%s)" % (sys.argv[0], 'action=open_browser&url=' + item.url), + "RunPlugin(%s?%s&%s)" % (sys.argv[0], item.tourl(), 'channel=kodfavorites&action=addFavourite&from_channel=' + item.channel + '&from_action=' + item.action), + "RunPlugin(%s?%s)" % (sys.argv[0], 'channel=trailertools&action=buscartrailer&contextual=True&search_title=' + item.contentTitle if item.contentTitle else item.fulltitle)] + if item.contentType == 'movie': + context += [config.get_localized_string(60353), config.get_localized_string(60354)] + context_commands += ["RunPlugin(%s?%s&%s)" % (sys.argv[0], item.tourl(), 'action=add_pelicula_to_library&from_action=' + item.action), + "RunPlugin(%s?%s&%s)" % (sys.argv[0], item.tourl(), 'channel=downloads&action=save_download&from_channel=' + item.channel + '&from_action=' +item.action)] + + else: + context += [config.get_localized_string(60352), config.get_localized_string(60355), config.get_localized_string(60357)] + context_commands += ["RunPlugin(%s?%s&%s)" % (sys.argv[0], item.tourl(), 'action=add_serie_to_library&from_action=' + item.action), + "RunPlugin(%s?%s&%s)" % (sys.argv[0], item.tourl(), 'channel=downloads&action=save_download&from_channel=' + item.channel + '&from_action=' + item.action), + "RunPlugin(%s?%s&%s)" % (sys.argv[0], item.tourl(), 'channel=downloads&action=save_download&download=season&from_channel=' + item.channel +'&from_action=' + item.action)] + + if self.EPISODES.isVisible() or self.SERVERS.isVisible(): + pos = self.EPISODESLIST.getSelectedPosition() + item = self.episodes[pos] + context += [config.get_localized_string(60356)] + context_commands += ["RunPlugin(%s?%s&%s)" % (sys.argv[0], item.tourl(), 'channel=downloads&action=save_download&from_channel=' + item.channel + '&from_action=' +item.action)] + + index = xbmcgui.Dialog().contextmenu(context) + if index > 0: xbmc.executebuiltin(context_commands[index]) \ No newline at end of file diff --git a/platformcode/infoplus.py b/platformcode/infoplus.py index faec9e0a..a1894697 100644 --- a/platformcode/infoplus.py +++ b/platformcode/infoplus.py @@ -191,7 +191,8 @@ class SearchWindow(xbmcgui.WindowXMLDialog): self.getControl(NUMBER).setText(support.typo(config.get_localized_string(70362),'uppercase bold')) else: it = xbmcgui.ListItem(item.infoLabels['title']) - it.setProperty('channel', channeltools.get_channel_parameters(item.channel).get('title','')) + it.setProperty('channelname', channeltools.get_channel_parameters(item.channel).get('title','')) + it.setProperty('channel', item.channel) it.setProperty('action', item.action) it.setProperty('server', servertools.get_server_parameters(item.server.lower()).get('name',item.server)) it.setProperty('url', item.url) @@ -213,7 +214,6 @@ class SearchWindow(xbmcgui.WindowXMLDialog): self.commands.append(itemlist[0].clone(channel='downloads', action='save_download', from_channel=itemlist[0].channel, from_action=itemlist[0].action, thumbnail=support.thumb('downloads'))) else: self.commands.append(Info.clone(channel='downloads', action='save_download', from_channel=Info.channel, from_action=Info.action, thumbnail=support.thumb('downloads'))) - if self.commands: commands = [] for command in self.commands: @@ -248,7 +248,8 @@ class SearchWindow(xbmcgui.WindowXMLDialog): if action == 'play': item.server = self.getControl(RECOMANDED).getSelectedItem().getProperty('server') self.close() - platformtools.play_video(item) + from platformcode.launcher import run + run(item) xbmc.sleep(500) while xbmc.Player().isPlaying(): xbmc.sleep(500) diff --git a/platformcode/launcher.py b/platformcode/launcher.py index 4d950d4b..efe5cc19 100644 --- a/platformcode/launcher.py +++ b/platformcode/launcher.py @@ -19,7 +19,7 @@ def start(): Within this function all calls should go to functions that we want to execute as soon as we open the plugin. """ - logger.info() + logger.debug() # config.set_setting('show_once', True) # Test if all the required directories are created config.verify_directories_created() @@ -37,7 +37,7 @@ def start(): updater.showSavedChangelog() def run(item=None): - logger.info() + logger.debug() if not item: # Extract item from sys.argv if sys.argv[2]: @@ -94,7 +94,7 @@ def run(item=None): # If item has no action, stops here if item.action == "": - logger.info("Item without action") + logger.debug("Item without action") return # Action for main menu in channelselector @@ -135,6 +135,12 @@ def run(item=None): from platformcode import infoplus return infoplus.Main(item) + elif config.get_setting('new_search') and item.channel == "search" and item.action == 'new_search': + from platformcode.globalsearch import Search + item.contextual = True + Search(item) + return + elif item.channel == "backup": from platformcode import backup return getattr(backup, item.action)(item) @@ -187,7 +193,7 @@ def run(item=None): channel_file = os.path.join(config.get_runtime_path(), CHANNELS, item.channel + ".py") - logger.info("channel_file= " + channel_file + ' - ' + CHANNELS + ' - ' + item.channel) + logger.debug("channel_file= " + channel_file + ' - ' + CHANNELS + ' - ' + item.channel) channel = None @@ -207,12 +213,12 @@ def run(item=None): trakt_tools.set_trakt_info(item) except: pass - logger.info("item.action=%s" % item.action.upper()) + logger.debug("item.action=%s" % item.action.upper()) # logger.debug("item_toPlay: " + "\n" + item.tostring('\n')) # First checks if channel has a "play" function if hasattr(channel, 'play'): - logger.info("Executing channel 'play' method") + logger.debug("Executing channel 'play' method") itemlist = channel.play(item) b_favourite = item.isFavourite # Play should return a list of playable URLS @@ -233,7 +239,7 @@ def run(item=None): # If player don't have a "play" function, not uses the standard play from platformtools else: - logger.info("Executing core 'play' method") + logger.debug("Executing core 'play' method") platformtools.play_video(item) # Special action for findvideos, where the plugin looks for known urls @@ -246,8 +252,7 @@ def run(item=None): # If not, uses the generic findvideos function else: - logger.info("No channel 'findvideos' method, " - "executing core method") + logger.debug("No channel 'findvideos' method, " "executing core method") itemlist = servertools.find_video_items(item) if config.get_setting("max_links", "videolibrary") != 0: @@ -291,7 +296,7 @@ def run(item=None): else: filetools.remove(temp_search_file) - logger.info("item.action=%s" % item.action.upper()) + logger.debug("item.action=%s" % item.action.upper()) from core import channeltools if config.get_setting('last_search'): @@ -312,7 +317,7 @@ def run(item=None): # For all other actions else: # import web_pdb; web_pdb.set_trace() - logger.info("Executing channel '%s' method" % item.action) + logger.debug("Executing channel '%s' method" % item.action) itemlist = getattr(channel, item.action)(item) if config.get_setting('trakt_sync'): from core import trakt_tools @@ -336,13 +341,10 @@ def run(item=None): logger.error(traceback.format_exc()) - patron = 'File "' + os.path.join(config.get_runtime_path(), "channels", "").replace("\\", "\\\\") + r'([^.]+)\.py"' - Channel = scrapertools.find_single_match(traceback.format_exc(), patron) - platformtools.dialog_ok( - config.get_localized_string(59985) + Channel, - config.get_localized_string(60013) %(e)) - except: + config.get_localized_string(59985) % e.channel, + config.get_localized_string(60013) % e.url) + except Exception as e: import traceback from core import scrapertools @@ -351,26 +353,15 @@ def run(item=None): patron = 'File "' + os.path.join(config.get_runtime_path(), "channels", "").replace("\\", "\\\\") + r'([^.]+)\.py"' Channel = scrapertools.find_single_match(traceback.format_exc(), patron) - try: - import xbmc - if config.get_platform(True)['num_version'] < 14: - log_name = "xbmc.log" - else: - log_name = "kodi.log" - log_message = config.get_localized_string(50004) + xbmc.translatePath("special://logpath") + log_name - except: - log_message = "" - - if Channel: + if Channel or e.__class__ == logger.ChannelScraperException: if item.url: - if platformtools.dialog_yesno(config.get_localized_string(60087) % Channel, config.get_localized_string(60014) + '\n' + log_message, nolabel='ok', yeslabel=config.get_localized_string(70739)): + if platformtools.dialog_yesno(config.get_localized_string(60087) % Channel, config.get_localized_string(60014), nolabel='ok', yeslabel=config.get_localized_string(70739)): run(Item(action="open_browser", url=item.url)) else: - platformtools.dialog_ok(config.get_localized_string(60087) % Channel, config.get_localized_string(60014) + '\n' + log_message) + platformtools.dialog_ok(config.get_localized_string(60087) % Channel, config.get_localized_string(60014)) else: - platformtools.dialog_ok( - config.get_localized_string(60038), - config.get_localized_string(60015) + '\n' + log_message) + if platformtools.dialog_yesno(config.get_localized_string(60038), config.get_localized_string(60015)): + run(Item(channel="setting", action="report_menu")) def new_search(item, channel=None): @@ -393,7 +384,7 @@ def set_search_temp(item): filetools.write(temp_search_file, f) def reorder_itemlist(itemlist): - logger.info() + logger.debug() # logger.debug("Inlet itemlist size: %i" % len(itemlist)) new_list = [] @@ -431,7 +422,7 @@ def reorder_itemlist(itemlist): new_list.extend(mod_list) new_list.extend(not_mod_list) - logger.info("Modified Titles:%i |Unmodified:%i" % (modified, not_modified)) + logger.debug("Modified Titles:%i |Unmodified:%i" % (modified, not_modified)) if len(new_list) == 0: new_list = itemlist @@ -441,7 +432,7 @@ def reorder_itemlist(itemlist): def limit_itemlist(itemlist): - logger.info() + logger.debug() # logger.debug("Inlet itemlist size: %i" % len(itemlist)) try: @@ -474,7 +465,7 @@ def play_from_library(item): itemlist=[] item.fromLibrary = True - logger.info() + logger.debug() # logger.debug("item: \n" + item.tostring('\n')) # Try to reproduce an image (this does nothing and also does not give an error) diff --git a/platformcode/logger.py b/platformcode/logger.py index e312617a..263bee7a 100644 --- a/platformcode/logger.py +++ b/platformcode/logger.py @@ -3,7 +3,7 @@ # Logger (kodi) # -------------------------------------------------------------------------------- from __future__ import unicode_literals -import inspect, os, xbmc, sys +import inspect,os, xbmc, sys from platformcode import config # for test suite @@ -53,5 +53,11 @@ def log(*args, **kwargs): class WebErrorException(Exception): - def __init__(self, *args, **kwargs): + def __init__(self, url, channel, *args, **kwargs): + self.url = url + self.channel = channel Exception.__init__(self, *args, **kwargs) + + +class ChannelScraperException(Exception): + pass \ No newline at end of file diff --git a/platformcode/platformtools.py b/platformcode/platformtools.py index fa820aa7..42a3aec4 100644 --- a/platformcode/platformtools.py +++ b/platformcode/platformtools.py @@ -178,6 +178,60 @@ def dialog_register(heading, user=False, email=False, password=False, user_defau dialog = Register('Register.xml', config.get_runtime_path()).Start(heading, user, email, password, user_default, email_default, password_default, captcha_img) return dialog +def dialog_info(item, scraper): + class TitleOrIDWindow(xbmcgui.WindowXMLDialog): + def Start(self, item, scraper): + self.item = item + self.item.exit = False + self.title = item.show if item.show else item.fulltitle + self.id = item.infoLabels.get('tmdb_id', '') if scraper == 'tmdb' else item.infoLabels.get('tvdb_id', '') + self.scraper = scraper + self.idtitle = 'TMDB ID' if scraper == 'tmdb' else 'TVDB ID' + self.doModal() + return self.item + + def onInit(self): + #### Kodi 18 compatibility #### + if config.get_platform(True)['num_version'] < 18: + self.setCoordinateResolution(2) + self.HEADER = self.getControl(100) + self.TITLE = self.getControl(101) + self.ID = self.getControl(102) + self.EXIT = self.getControl(103) + self.EXIT2 = self.getControl(104) + + self.HEADER.setText(config.get_localized_string(60228) % self.title) + self.TITLE.setLabel('[UPPERCASE]' + config.get_localized_string(60230).replace(':','') + '[/UPPERCASE]') + self.ID.setLabel(self.idtitle) + self.setFocusId(101) + + def onClick(self, control): + if control in [101]: + result = dialog_input(self.title) + if result: + if self.item.contentType == 'movie': self.item.contentTitle = result + else: self.item.contentSerieName = result + self.close() + elif control in [102]: + result = dialog_numeric(0, self.idtitle, self.id) + if result: + if self.scraper == 'tmdb': self.item.infoLabels['tmdb_id'] = result + elif self.scraper == 'tvdb': self.item.infoLabels['tvdb_id'] = result + self.close() + + elif control in [103, 104]: + self.item.exit = True + self.close() + + def onAction(self, action): + action = action.getId() + if action in [92, 10]: + self.item.exit = True + self.close() + + dialog = TitleOrIDWindow('TitleOrIDWindow.xml', config.get_runtime_path()).Start(item, scraper) + return dialog + def itemlist_refresh(): # pos = Item().fromurl(xbmc.getInfoLabel('ListItem.FileNameAndPath')).itemlistPosition @@ -202,7 +256,7 @@ def render_items(itemlist, parent_item): """ Function used to render itemlist on kodi """ - logger.info('START render_items') + logger.debug('START render_items') thumb_type = config.get_setting('video_thumbnail_type') from platformcode import shortcuts # from core import httptools @@ -287,7 +341,7 @@ def render_items(itemlist, parent_item): set_view_mode(itemlist[0], parent_item) xbmcplugin.endOfDirectory(_handle) - logger.info('END render_items') + logger.debug('END render_items') def getCurrentView(item=None, parent_item=None): @@ -344,11 +398,11 @@ def set_view_mode(item, parent_item): if content: mode = int(config.get_setting('view_mode_%s' % content).split(',')[-1]) if mode == 0: - logger.info('default mode') + logger.debug('default mode') mode = 55 xbmcplugin.setContent(handle=int(sys.argv[1]), content=Type) xbmc.executebuiltin('Container.SetViewMode(%s)' % mode) - logger.info('TYPE: ' + Type + ' - ' + 'CONTENT: ' + content) + logger.debug('TYPE: ' + Type + ' - ' + 'CONTENT: ' + content) def set_infolabels(listitem, item, player=False): @@ -568,10 +622,10 @@ def is_playing(): def play_video(item, strm=False, force_direct=False, autoplay=False): - logger.info() + logger.debug() logger.debug(item.tostring('\n')) if item.channel == 'downloads': - logger.info("Play local video: %s [%s]" % (item.title, item.url)) + logger.debug("Play local video: %s [%s]" % (item.title, item.url)) xlistitem = xbmcgui.ListItem(path=item.url) xlistitem.setArt({"thumb": item.thumbnail}) set_infolabels(xlistitem, item, True) @@ -579,18 +633,22 @@ def play_video(item, strm=False, force_direct=False, autoplay=False): return default_action = config.get_setting("default_action") - logger.info("default_action=%s" % default_action) + logger.debug("default_action=%s" % default_action) + + # pass referer + from core import httptools + httptools.default_headers['Referer'] = item.referer # Open the selection dialog to see the available options opciones, video_urls, seleccion, salir = get_dialogo_opciones(item, default_action, strm, autoplay) - if salir: return + if salir: exit() # get default option of addon configuration seleccion = get_seleccion(default_action, opciones, seleccion, video_urls) - if seleccion < 0: return # Canceled box + if seleccion < 0: exit() # Canceled box - logger.info("selection=%d" % seleccion) - logger.info("selection=%s" % opciones[seleccion]) + logger.debug("selection=%d" % seleccion) + logger.debug("selection=%s" % opciones[seleccion]) # run the available option, jdwonloader, download, favorites, add to the video library ... IF IT IS NOT PLAY salir = set_opcion(item, seleccion, opciones, video_urls) @@ -751,7 +809,7 @@ def alert_unsopported_server(): def handle_wait(time_to_wait, title, text): - logger.info("handle_wait(time_to_wait=%d)" % time_to_wait) + logger.debug("handle_wait(time_to_wait=%d)" % time_to_wait) espera = dialog_progress(' ' + title, "") secs = 0 @@ -770,15 +828,15 @@ def handle_wait(time_to_wait, title, text): break if cancelled: - logger.info('Wait canceled') + logger.debug('Wait canceled') return False else: - logger.info('Wait finished') + logger.debug('Wait finished') return True def get_dialogo_opciones(item, default_action, strm, autoplay): - logger.info() + logger.debug() # logger.debug(item.tostring('\n')) from core import servertools @@ -827,10 +885,6 @@ def get_dialogo_opciones(item, default_action, strm, autoplay): # "Add to Favorites" opciones.append(config.get_localized_string(30155)) - if not strm and item.contentType == 'movie' and item.channel != 'videolibrary': - # "Add to video library" - opciones.append(config.get_localized_string(30161)) - if default_action == 3: seleccion = len(opciones) - 1 @@ -844,13 +898,14 @@ def get_dialogo_opciones(item, default_action, strm, autoplay): if not autoplay: if item.server != "": if "<br/>" in motivo: - ret = dialog_yesno(config.get_localized_string(60362), motivo.split("<br/>")[0] + '\n' + motivo.split("<br/>")[1] + '\n' + item.url, nolabel='ok', yeslabel=config.get_localized_string(70739)) + ret = dialog_yesno(config.get_localized_string(60362) % item.server, motivo.split("<br/>")[0] + '\n' + motivo.split("<br/>")[1], nolabel='ok', yeslabel=config.get_localized_string(70739)) else: - ret = dialog_yesno(config.get_localized_string(60362), motivo + '\n' + item.url, nolabel='ok', yeslabel=config.get_localized_string(70739)) + ret = dialog_yesno(config.get_localized_string(60362) % item.server, motivo, nolabel='ok', yeslabel=config.get_localized_string(70739)) else: - ret = dialog_yesno(config.get_localized_string(60362), config.get_localized_string(60363) + '\n' + config.get_localized_string(60364) + '\n' + item.url, nolabel='ok', yeslabel=config.get_localized_string(70739)) + ret = dialog_yesno(config.get_localized_string(60362) % item.server, config.get_localized_string(60363) + '\n' + config.get_localized_string(60364), nolabel='ok', yeslabel=config.get_localized_string(70739)) if ret: - xbmc.executebuiltin("Container.Update (%s?%s)" % (sys.argv[0], Item(action="open_browser", url=item.url).tourl())) + xbmc.executebuiltin("Container.Update (%s?%s)" % + (sys.argv[0], Item(action="open_browser", url=item.url).tourl())) if item.channel == "favorites": # "Remove from favorites" opciones.append(config.get_localized_string(30154)) @@ -862,7 +917,7 @@ def get_dialogo_opciones(item, default_action, strm, autoplay): def set_opcion(item, seleccion, opciones, video_urls): - logger.info() + logger.debug() # logger.debug(item.tostring('\n')) salir = False # You have not chosen anything, most likely because you have given the ESC @@ -912,7 +967,7 @@ def set_opcion(item, seleccion, opciones, video_urls): def get_video_seleccionado(item, seleccion, video_urls): - logger.info() + logger.debug() mediaurl = "" view = False wait_time = 0 @@ -938,7 +993,7 @@ def get_video_seleccionado(item, seleccion, video_urls): mpd = True # If there is no mediaurl it is because the video is not there :) - logger.info("mediaurl=" + mediaurl) + logger.debug("mediaurl=" + mediaurl) if mediaurl == "": if item.server == "unknown": alert_unsopported_server() @@ -955,7 +1010,7 @@ def get_video_seleccionado(item, seleccion, video_urls): def set_player(item, xlistitem, mediaurl, view, strm, nfo_path=None, head_nfo=None, item_nfo=None): - logger.info() + logger.debug() # logger.debug("item:\n" + item.tostring('\n')) # Moved del conector "torrent" here if item.server == "torrent": @@ -969,7 +1024,10 @@ def set_player(item, xlistitem, mediaurl, view, strm, nfo_path=None, head_nfo=No xbmc_player.setSubtitles(item.subtitle) else: - player_mode = config.get_setting("player_mode") + if type(item.player_mode) == int: + player_mode = item.player_mode + else: + player_mode = config.get_setting("player_mode") if (player_mode == 3 and mediaurl.startswith("rtmp")) or item.play_from == 'window' or item.nfo: player_mode = 0 elif "megacrypter.com" in mediaurl: player_mode = 3 logger.info("mediaurl=" + mediaurl) @@ -989,9 +1047,10 @@ def set_player(item, xlistitem, mediaurl, view, strm, nfo_path=None, head_nfo=No elif player_mode == 1: logger.info('Player Mode: setResolvedUrl') - xlistitem.setPath(mediaurl) - xbmcplugin.setResolvedUrl(int(sys.argv[1]), True, xlistitem) - xbmc.sleep(2500) + # xlistitem.setPath(mediaurl) + par = int(sys.argv[1]) + xbmcplugin.setResolvedUrl(int(sys.argv[1]), True, xbmcgui.ListItem(path=mediaurl)) + # xbmc.sleep(2500) elif player_mode == 2: logger.info('Player Mode: Built-In') @@ -1038,7 +1097,7 @@ def torrent_client_installed(show_tuple=False): def play_torrent(item, xlistitem, mediaurl): - logger.info() + logger.debug() import time from servers import torrent @@ -1074,7 +1133,7 @@ def play_torrent(item, xlistitem, mediaurl): torrent.mark_auto_as_watched(item) - while is_playing() and not xbmc.abortRequested: + while is_playing() and not xbmc.Monitor().abortRequested(): time.sleep(3) diff --git a/platformcode/recaptcha.py b/platformcode/recaptcha.py index c4422143..d7265d43 100644 --- a/platformcode/recaptcha.py +++ b/platformcode/recaptcha.py @@ -65,7 +65,7 @@ class Recaptcha(xbmcgui.WindowXMLDialog): data = httptools.downloadpage(self.url, post=post, headers=self.headers).data from platformcode import logger - logger.info(data) + logger.debug(data) self.result = scrapertools.find_single_match(data, '<div class="fbc-verification-token">.*?>([^<]+)<') if self.result: platformtools.dialog_notification("Captcha corretto", "Verifica conclusa") diff --git a/platformcode/shortcuts.py b/platformcode/shortcuts.py index d5effcf2..e2b6eb80 100644 --- a/platformcode/shortcuts.py +++ b/platformcode/shortcuts.py @@ -126,7 +126,7 @@ def SettingOnPosition(item): xbmc.executebuiltin('Addon.OpenSettings(plugin.video.kod)') category = item.category if item.category else 0 setting = item.setting if item.setting else 0 - logger.info('SETTING= ' + str(setting)) + logger.debug('SETTING= ' + str(setting)) xbmc.executebuiltin('SetFocus(%i)' % (category - 100)) xbmc.executebuiltin('SetFocus(%i)' % (setting - 80)) diff --git a/platformcode/side_menu.py b/platformcode/side_menu.py index bef22c2a..fe46c936 100644 --- a/platformcode/side_menu.py +++ b/platformcode/side_menu.py @@ -43,7 +43,7 @@ def set_menu_settings(item): jsontools.update_node(menu_node, 'menu_settings_data.json', "menu") def check_user_home(item): - logger.info() + logger.debug() if os.path.exists(menu_settings_path): menu_node = jsontools.get_node_from_file('menu_settings_data.json', 'menu') if 'user_home' in menu_node: @@ -55,7 +55,7 @@ def check_user_home(item): return item def set_custom_start(item): - logger.info() + logger.debug() if os.path.exists(menu_settings_path): menu_node = jsontools.get_node_from_file('menu_settings_data.json', 'menu') else: @@ -69,7 +69,7 @@ def set_custom_start(item): jsontools.update_node(menu_node, 'menu_settings_data.json', "menu") def get_start_page(): - logger.info() + logger.debug() dictCategory = { config.get_localized_string(70137): 'peliculas', @@ -355,7 +355,7 @@ class Main(xbmcgui.WindowXMLDialog): self.focus -= 1 def run_action(self, item): - logger.info() + logger.debug() if item.menu != True: self.close() xbmc.executebuiltin("Container.update(%s)"%launcher.run(item)) diff --git a/platformcode/subtitletools.py b/platformcode/subtitletools.py index 9a90bd1f..2442eca1 100644 --- a/platformcode/subtitletools.py +++ b/platformcode/subtitletools.py @@ -84,7 +84,7 @@ def regex_tvshow(compare, file, sub=""): def set_Subtitle(): - logger.info() + logger.debug() exts = [".srt", ".sub", ".txt", ".smi", ".ssa", ".ass"] subtitle_folder_path = filetools.join(config.get_data_path(), "subtitles") @@ -216,7 +216,7 @@ def searchSubtitle(item): filetools.mkdir(full_path_tvshow) # title_new + ".mp4" full_path_video_new = xbmc.translatePath( filetools.join(full_path_tvshow, "%s %sx%s.mp4" % (tvshow_title, season, episode))) - logger.info(full_path_video_new) + logger.debug(full_path_video_new) listitem = xbmcgui.ListItem(title_new, iconImage="DefaultVideo.png", thumbnailImage="") listitem.setInfo("video", {"Title": title_new, "Genre": "Tv shows", "episode": int(episode), "season": int(season), "tvshowtitle": tvshow_title}) @@ -230,7 +230,7 @@ def searchSubtitle(item): try: filetools.copy(path_video_temp, full_path_video_new) copy = True - logger.info("nuevo path =" + full_path_video_new) + logger.debug("nuevo path =" + full_path_video_new) time.sleep(2) playlist = xbmc.PlayList(xbmc.PLAYLIST_VIDEO) playlist.clear() @@ -288,7 +288,7 @@ def get_from_subdivx(sub_url): :return: The path to the unzipped subtitle """ - logger.info() + logger.debug() sub = '' sub_dir = os.path.join(config.get_data_path(), 'temp_subs') @@ -312,9 +312,9 @@ def get_from_subdivx(sub_url): filetools.write(filename, data_dl) sub = extract_file_online(sub_dir, filename) except: - logger.info('sub invalid') + logger.debug('sub invalid') else: - logger.info('sub invalid') + logger.debug('sub invalid') return sub @@ -328,7 +328,7 @@ def extract_file_online(path, filename): :return: """ - logger.info() + logger.debug() url = "http://online.b1.org/rest/online/upload" diff --git a/platformcode/xbmc_info_window.py b/platformcode/xbmc_info_window.py index a4452cd3..ac588928 100644 --- a/platformcode/xbmc_info_window.py +++ b/platformcode/xbmc_info_window.py @@ -1,14 +1,21 @@ # -*- coding: utf-8 -*- -import xbmcgui +import xbmcgui, sys from core.tmdb import Tmdb from platformcode import config, logger from core import filetools +if sys.version_info[0] >= 3: + from concurrent import futures +else: + from concurrent_py2 import futures BACKGROUND = 30000 LOADING = 30001 SELECT = 30002 +CLOSE = 30003 +EXIT = 10 +BACKSPACE = 92 def imagepath(image): if len(image.split('.')) == 1: image += '.png' @@ -25,23 +32,30 @@ class InfoWindow(xbmcgui.WindowXMLDialog): self.scraper = scraper self.doModal() - logger.info('RESPONSE',self.response) + logger.debug('RESPONSE',self.response) return self.response + def make_items(self, i, result): + infoLabels = self.scraper().get_infoLabels(origen=result) + it = xbmcgui.ListItem(infoLabels['title']) + it.setProperty('fanart', infoLabels.get('fanart', '')) + it.setProperty('thumbnail', infoLabels.get('thumbnail', imagepath('movie' if infoLabels['mediatype'] == 'movie' else 'tv'))) + it.setProperty('genre', infoLabels.get('genre', 'N/A')) + it.setProperty('rating', str(infoLabels.get('rating', 'N/A'))) + it.setProperty('plot', str(infoLabels.get('plot', ''))) + it.setProperty('year', str(infoLabels.get('year', ''))) + it.setProperty('position', str(i)) + return it + def onInit(self): if config.get_platform(True)['num_version'] < 18: self.setCoordinateResolution(2) - - for result in self.results: - infoLabels = self.scraper().get_infoLabels(origen=result) - it = xbmcgui.ListItem(infoLabels['title']) - it.setProperty('fanart', infoLabels.get('fanart', '')) - it.setProperty('thumbnail', infoLabels.get('thumbnail', imagepath('movie' if infoLabels['mediatype'] == 'movie' else 'tv'))) - it.setProperty('genre', infoLabels.get('genre', 'N/A')) - it.setProperty('rating', str(infoLabels.get('rating', 'N/A'))) - it.setProperty('plot', str(infoLabels.get('plot', ''))) - it.setProperty('year', str(infoLabels.get('year', ''))) - self.items.append(it) + with futures.ThreadPoolExecutor() as executor: + for i, result in enumerate(self.results): + logger.debug(result) + if ('seriesName' in result and result['seriesName']) or ('name' in result and result['name']) or ('title' in result and result['title']): + self.items += [executor.submit(self.make_items, i, result).result()] + self.items.sort(key=lambda it: int(it.getProperty('position'))) self.getControl(SELECT).addItems(self.items) self.getControl(BACKGROUND).setImage(self.items[0].getProperty('fanart')) @@ -51,5 +65,16 @@ class InfoWindow(xbmcgui.WindowXMLDialog): def onClick(self, control_id): if control_id == SELECT: self.response = self.results[self.getControl(SELECT).getSelectedPosition()] - self.close() + self.close() + elif control_id == CLOSE: + self.close() + + def onAction(self, action): + if self.getFocusId() in [SELECT]: + fanart = self.getControl(self.getFocusId()).getSelectedItem().getProperty('fanart') + self.getControl(BACKGROUND).setImage(fanart) + if action in [BACKSPACE]: + self.close() + elif action in [EXIT]: + self.close() diff --git a/platformcode/xbmc_videolibrary.py b/platformcode/xbmc_videolibrary.py index ea0c7745..96d9c6da 100644 --- a/platformcode/xbmc_videolibrary.py +++ b/platformcode/xbmc_videolibrary.py @@ -22,7 +22,7 @@ from xml.dom import minidom def mark_auto_as_watched(item, nfo_path=None, head_nfo=None, item_nfo=None): def mark_as_watched_subThread(item, nfo_path, head_nfo, item_nfo): - logger.info() + logger.debug() # logger.debug("item:\n" + item.tostring('\n')) time_limit = time.time() + 30 @@ -53,7 +53,7 @@ def mark_auto_as_watched(item, nfo_path=None, head_nfo=None, item_nfo=None): # Mark as Watched if actual_time > mark_time and not marked: - logger.debug("Marked as Watched") + logger.info("Marked as Watched") item.playcount = 1 marked = True show_server = False @@ -104,7 +104,7 @@ def sync_trakt_addon(path_folder): """ Updates the values ​​of episodes seen if """ - logger.info() + logger.debug() # if the addon exists we do the search if xbmc.getCondVisibility('System.HasAddon("script.trakt")'): # we import dependencies @@ -230,7 +230,7 @@ def sync_trakt_kodi(silent=True): notificacion = False xbmc.executebuiltin('RunScript(script.trakt,action=sync,silent=%s)' % silent) - logger.info("Synchronization with Trakt started") + logger.debug("Synchronization with Trakt started") if notificacion: platformtools.dialog_notification(config.get_localized_string(20000), config.get_localized_string(60045), sound=False, time=2000) @@ -244,7 +244,7 @@ def mark_content_as_watched_on_kodi(item, value=1): @type value: int @param value: > 0 for seen, 0 for not seen """ - logger.info() + logger.debug() # logger.debug("item:\n" + item.tostring('\n')) payload_f = '' @@ -316,7 +316,7 @@ def mark_season_as_watched_on_kodi(item, value=1): @type value: int @param value: > 0 for seen, 0 for not seen """ - logger.info() + logger.debug() # logger.debug("item:\n" + item.tostring('\n')) # We can only mark the season as seen in the Kodi database if the database is local, in case of sharing database this functionality will not work @@ -350,7 +350,7 @@ def mark_content_as_watched_on_kod(path): @type str: path @param path: content folder to mark """ - logger.info() + logger.debug() #logger.debug("path: " + path) FOLDER_MOVIES = config.get_setting("folder_movies") @@ -443,7 +443,7 @@ def get_data(payload): import urllib.request as urllib except ImportError: import urllib - logger.info("payload: %s" % payload) + logger.debug("payload: %s" % payload) # Required header for XBMC JSON-RPC calls, otherwise you'll get a 415 HTTP response code - Unsupported media type headers = {'content-type': 'application/json'} @@ -460,7 +460,7 @@ def get_data(payload): response = f.read() f.close() - logger.info("get_data: response %s" % response) + logger.debug("get_data: response %s" % response) data = jsontools.load(response) except Exception as ex: template = "An exception of type %s occured. Arguments:\n%r" @@ -476,7 +476,7 @@ def get_data(payload): logger.error("error en xbmc.executeJSONRPC: %s" % message) data = ["error"] - logger.info("data: %s" % data) + logger.debug("data: %s" % data) return data @@ -490,7 +490,7 @@ def update(folder_content=config.get_setting("folder_tvshows"), folder=""): @type folder: str @param folder: name of the folder to scan. """ - logger.info(folder) + logger.debug(folder) payload = { "jsonrpc": "2.0", @@ -554,7 +554,7 @@ def set_content(content_type, silent=False, custom=False): @type content_type: str ('movie' o 'tvshow') @param content_type: content type to configure, series or movies """ - logger.info() + logger.debug() continuar = True msg_text = "" videolibrarypath = config.get_setting("videolibrarypath") @@ -580,7 +580,7 @@ def set_content(content_type, silent=False, custom=False): try: # Install metadata.themoviedb.org xbmc.executebuiltin('InstallAddon(metadata.themoviedb.org)', True) - logger.info("Instalado el Scraper de películas de TheMovieDB") + logger.debug("Instalado el Scraper de películas de TheMovieDB") except: pass @@ -634,7 +634,7 @@ def set_content(content_type, silent=False, custom=False): try: # Install metadata.tvdb.com xbmc.executebuiltin('InstallAddon(metadata.tvdb.com)', True) - logger.info("The TVDB series Scraper installed ") + logger.debug("The TVDB series Scraper installed ") except: pass @@ -729,7 +729,7 @@ def set_content(content_type, silent=False, custom=False): strScraper = 'metadata.universal' path_settings = xbmc.translatePath("special://profile/addon_data/metadata.universal/settings.xml") if not os.path.exists(path_settings): - logger.info("%s: %s" % (content_type, path_settings + " doesn't exist")) + logger.debug("%s: %s" % (content_type, path_settings + " doesn't exist")) return continuar settings_data = filetools.read(path_settings) strSettings = ' '.join(settings_data.split()).replace("> <", "><") @@ -748,7 +748,7 @@ def set_content(content_type, silent=False, custom=False): strScraper = 'metadata.tvshows.themoviedb.org' path_settings = xbmc.translatePath("special://profile/addon_data/metadata.tvshows.themoviedb.org/settings.xml") if not os.path.exists(path_settings): - logger.info("%s: %s" % (content_type, path_settings + " doesn't exist")) + logger.debug("%s: %s" % (content_type, path_settings + " doesn't exist")) return continuar settings_data = filetools.read(path_settings) strSettings = ' '.join(settings_data.split()).replace("> <", "><") @@ -758,7 +758,7 @@ def set_content(content_type, silent=False, custom=False): videolibrarypath += sep strPath = videolibrarypath + config.get_setting("folder_tvshows") + sep - logger.info("%s: %s" % (content_type, strPath)) + logger.debug("%s: %s" % (content_type, strPath)) # We check if strPath already exists in the DB to avoid duplicates sql = 'SELECT idPath FROM path where strPath="%s"' % strPath nun_records, records = execute_sql_kodi(sql) @@ -800,15 +800,15 @@ def set_content(content_type, silent=False, custom=False): heading = config.get_localized_string(70103) % content_type msg_text = config.get_localized_string(70104) - logger.info("%s: %s" % (heading, msg_text)) + logger.debug("%s: %s" % (heading, msg_text)) return continuar def update_db(old_path, new_path, old_movies_folder, new_movies_folder, old_tvshows_folder, new_tvshows_folder, progress): def path_replace(path, old, new): - logger.info() - logger.info('path: ' + path + ', old: ' + old + ', new: ' + new) + logger.debug() + logger.debug('path: ' + path + ', old: ' + old + ', new: ' + new) if new.startswith("special://") or '://' in new: sep = '/' else: sep = os.sep @@ -819,7 +819,7 @@ def update_db(old_path, new_path, old_movies_folder, new_movies_folder, old_tvsh return path - logger.info() + logger.debug() sql_old_path = old_path if sql_old_path.startswith("special://"): @@ -831,10 +831,10 @@ def update_db(old_path, new_path, old_movies_folder, new_movies_folder, old_tvsh if not sql_old_path.endswith(sep): sql_old_path += sep - logger.info('sql_old_path: ' + sql_old_path) + logger.debug('sql_old_path: ' + sql_old_path) # search MAIN path in the DB sql = 'SELECT idPath, strPath FROM path where strPath LIKE "%s"' % sql_old_path - logger.info('sql: ' + sql) + logger.debug('sql: ' + sql) nun_records, records = execute_sql_kodi(sql) # change main path @@ -842,7 +842,7 @@ def update_db(old_path, new_path, old_movies_folder, new_movies_folder, old_tvsh idPath = records[0][0] strPath = path_replace(records[0][1], old_path, new_path) sql = 'UPDATE path SET strPath="%s" WHERE idPath=%s' % (strPath, idPath) - logger.info('sql: ' + sql) + logger.debug('sql: ' + sql) nun_records, records = execute_sql_kodi(sql) else: progress.update(100) @@ -859,7 +859,7 @@ def update_db(old_path, new_path, old_movies_folder, new_movies_folder, old_tvsh # Search Main Sub Folder sql = 'SELECT idPath, strPath FROM path where strPath LIKE "%s"' % sql_old_folder - logger.info('sql: ' + sql) + logger.debug('sql: ' + sql) nun_records, records = execute_sql_kodi(sql) # Change Main Sub Folder @@ -868,13 +868,13 @@ def update_db(old_path, new_path, old_movies_folder, new_movies_folder, old_tvsh idPath = record[0] strPath = path_replace(record[1], filetools.join(old_path, OldFolder), filetools.join(new_path, NewFolder)) sql = 'UPDATE path SET strPath="%s" WHERE idPath=%s' % (strPath, idPath) - logger.info('sql: ' + sql) + logger.debug('sql: ' + sql) nun_records, records = execute_sql_kodi(sql) # Search if Sub Folder exixt in all paths sql_old_folder += '%' sql = 'SELECT idPath, strPath FROM path where strPath LIKE "%s"' % sql_old_folder - logger.info('sql: ' + sql) + logger.debug('sql: ' + sql) nun_records, records = execute_sql_kodi(sql) #Change Sub Folder in all paths @@ -883,7 +883,7 @@ def update_db(old_path, new_path, old_movies_folder, new_movies_folder, old_tvsh idPath = record[0] strPath = path_replace(record[1], filetools.join(old_path, OldFolder), filetools.join(new_path, NewFolder)) sql = 'UPDATE path SET strPath="%s" WHERE idPath=%s' % (strPath, idPath) - logger.info('sql: ' + sql) + logger.debug('sql: ' + sql) nun_records, records = execute_sql_kodi(sql) @@ -891,27 +891,27 @@ def update_db(old_path, new_path, old_movies_folder, new_movies_folder, old_tvsh # if is Movie Folder # search and modify in "movie" sql = 'SELECT idMovie, c22 FROM movie where c22 LIKE "%s"' % sql_old_folder - logger.info('sql: ' + sql) + logger.debug('sql: ' + sql) nun_records, records = execute_sql_kodi(sql) if records: for record in records: idMovie = record[0] strPath = path_replace(record[1], filetools.join(old_path, OldFolder), filetools.join(new_path, NewFolder)) sql = 'UPDATE movie SET c22="%s" WHERE idMovie=%s' % (strPath, idMovie) - logger.info('sql: ' + sql) + logger.debug('sql: ' + sql) nun_records, records = execute_sql_kodi(sql) else: # if is TV Show Folder # search and modify in "episode" sql = 'SELECT idEpisode, c18 FROM episode where c18 LIKE "%s"' % sql_old_folder - logger.info('sql: ' + sql) + logger.debug('sql: ' + sql) nun_records, records = execute_sql_kodi(sql) if records: for record in records: idEpisode = record[0] strPath = path_replace(record[1], filetools.join(old_path, OldFolder), filetools.join(new_path, NewFolder)) sql = 'UPDATE episode SET c18="%s" WHERE idEpisode=%s' % (strPath, idEpisode) - logger.info('sql: ' + sql) + logger.debug('sql: ' + sql) nun_records, records = execute_sql_kodi(sql) p += 5 progress.update(p, config.get_localized_string(20000) + '\n' + config.get_localized_string(80013)) @@ -936,26 +936,26 @@ def clean(path_list=[]): return path, sep - logger.info() + logger.debug() progress = platformtools.dialog_progress_bg(config.get_localized_string(20000), config.get_localized_string(80025)) progress.update(0) # if the path list is empty, clean the entire video library if not path_list: - logger.info('the path list is empty, clean the entire video library') + logger.debug('the path list is empty, clean the entire video library') if not config.get_setting("videolibrary_kodi"): sql_path, sep = sql_format(config.get_setting("videolibrarypath")) if not sql_path.endswith(sep): sql_path += sep sql = 'SELECT idPath FROM path where strPath LIKE "%s"' % sql_path - logger.info('sql: ' + sql) + logger.debug('sql: ' + sql) nun_records, records = execute_sql_kodi(sql) idPath = records[0][0] sql = 'DELETE from path WHERE idPath=%s' % idPath - logger.info('sql: ' + sql) + logger.debug('sql: ' + sql) nun_records, records = execute_sql_kodi(sql) sql = 'DELETE from path WHERE idParentPath=%s' % idPath - logger.info('sql: ' + sql) + logger.debug('sql: ' + sql) nun_records, records = execute_sql_kodi(sql) from core import videolibrarytools @@ -969,7 +969,7 @@ def clean(path_list=[]): if filetools.exists(tvshow_nfo): path_list.append(filetools.join(config.get_setting("videolibrarypath"), videolibrarytools.FOLDER_TVSHOWS, folder)) - logger.info('path_list: ' + str(path_list)) + logger.debug('path_list: ' + str(path_list)) if path_list: t = float(100) / len(path_list) for i, path in enumerate(path_list): progress.update(int(math.ceil((i + 1) * t))) @@ -979,13 +979,13 @@ def clean(path_list=[]): sql_path, sep = sql_format(path) if filetools.isdir(path) and not sql_path.endswith(sep): sql_path += sep - logger.info('path: ' + path) - logger.info('sql_path: ' + sql_path) + logger.debug('path: ' + path) + logger.debug('sql_path: ' + sql_path) if filetools.isdir(path): # search movie in the DB sql = 'SELECT idMovie FROM movie where c22 LIKE "%s"' % (sql_path + '%') - logger.info('sql: ' + sql) + logger.debug('sql: ' + sql) nun_records, records = execute_sql_kodi(sql) # delete movie if records: @@ -994,7 +994,7 @@ def clean(path_list=[]): continue # search TV show in the DB sql = 'SELECT idShow FROM tvshow_view where strPath LIKE "%s"' % sql_path - logger.info('sql: ' + sql) + logger.debug('sql: ' + sql) nun_records, records = execute_sql_kodi(sql) # delete TV show if records: @@ -1003,7 +1003,7 @@ def clean(path_list=[]): elif config.get_setting("folder_movies") in sql_path: # search movie in the DB sql = 'SELECT idMovie FROM movie where c22 LIKE "%s"' % sql_path - logger.info('sql: ' + sql) + logger.debug('sql: ' + sql) nun_records, records = execute_sql_kodi(sql) # delete movie if records: @@ -1012,7 +1012,7 @@ def clean(path_list=[]): else: # search episode in the DB sql = 'SELECT idEpisode FROM episode where c18 LIKE "%s"' % sql_path - logger.info('sql: ' + sql) + logger.debug('sql: ' + sql) nun_records, records = execute_sql_kodi(sql) # delete episode if records: @@ -1031,7 +1031,7 @@ def check_db(path): ret = False sql_path = '%' + sep + path.split(sep)[-1] + sep + '%' sql = 'SELECT idShow FROM tvshow_view where strPath LIKE "%s"' % sql_path - logger.info('sql: ' + sql) + logger.debug('sql: ' + sql) nun_records, records = execute_sql_kodi(sql) if records: ret = True @@ -1048,7 +1048,7 @@ def execute_sql_kodi(sql): @return: list with the query result @rtype records: list of tuples """ - logger.info() + logger.debug() file_db = "" nun_records = 0 records = None @@ -1069,14 +1069,14 @@ def execute_sql_kodi(sql): break if file_db: - logger.info("DB file: %s" % file_db) + logger.debug("DB file: %s" % file_db) conn = None try: import sqlite3 conn = sqlite3.connect(file_db) cursor = conn.cursor() - logger.info("Running sql: %s" % sql) + logger.debug("Running sql: %s" % sql) cursor.execute(sql) conn.commit() @@ -1090,7 +1090,7 @@ def execute_sql_kodi(sql): nun_records = conn.total_changes conn.close() - logger.info("Query executed. Records: %s" % nun_records) + logger.debug("Query executed. Records: %s" % nun_records) except: logger.error("Error executing sql query") @@ -1110,7 +1110,7 @@ def check_sources(new_movies_path='', new_tvshows_path=''): if not path.endswith(sep): path += sep return path - logger.info() + logger.debug() new_movies_path = format_path(new_movies_path) new_tvshows_path = format_path(new_tvshows_path) @@ -1140,7 +1140,7 @@ def check_sources(new_movies_path='', new_tvshows_path=''): def update_sources(new='', old=''): - logger.info() + logger.debug() if new == old: return SOURCES_PATH = xbmc.translatePath("special://userdata/sources.xml") @@ -1182,9 +1182,9 @@ def update_sources(new='', old=''): # create new path list_path = [p.firstChild.data for p in paths_node] if new in list_path: - logger.info("The path %s already exists in sources.xml" % new) + logger.debug("The path %s already exists in sources.xml" % new) return - logger.info("The path %s does not exist in sources.xml" % new) + logger.debug("The path %s does not exist in sources.xml" % new) # if the path does not exist we create one source_node = xmldoc.createElement("source") @@ -1223,7 +1223,7 @@ def update_sources(new='', old=''): def ask_set_content(silent=False): - logger.info() + logger.debug() logger.debug("videolibrary_kodi %s" % config.get_setting("videolibrary_kodi")) def do_config(custom=False): @@ -1280,7 +1280,7 @@ def ask_set_content(silent=False): def next_ep(item): from core.item import Item - logger.info() + logger.debug() item.next_ep = False # check if next file exist @@ -1296,7 +1296,7 @@ def next_ep(item): nextIndex = fileList.index(current_filename) + 1 if nextIndex == 0 or nextIndex == len(fileList): next_file = None else: next_file = fileList[nextIndex] - logger.info('Next File:' + str(next_file)) + logger.debug('Next File:' + str(next_file)) # start next episode window afther x time if next_file: diff --git a/resources/language/resource.language.en_gb/strings.po b/resources/language/resource.language.en_gb/strings.po index 02cea72d..351de1e5 100644 --- a/resources/language/resource.language.en_gb/strings.po +++ b/resources/language/resource.language.en_gb/strings.po @@ -589,7 +589,7 @@ msgid "%.2f %s of %.2f %s a %.2f %s/s (%d/%d)" msgstr "" msgctxt "#59985" -msgid "Error in the channel " +msgid "Channel %s unreachable" msgstr "" msgctxt "#59986" @@ -669,7 +669,7 @@ msgid "" msgstr "" msgctxt "#60006" -msgid "An error has occurred in %s" +msgid "[B]An error has occurred in %s:[/B]" msgstr "" msgctxt "#60007" @@ -697,15 +697,15 @@ msgid "No video to play" msgstr "" msgctxt "#60013" -msgid "This website seems to be unavailable, try later, if the problem persists, check with a browser: %s. If the web page is working correctly, please report the error on : https://t.me/kodiondemand" +msgid "This website [B]%s[/B] seems to be unavailable, try later. if the web page is working correctly, please report the error on: https://github.com/kodiondemand/addon/issues" msgstr "" msgctxt "#60014" -msgid "It may be due to a connection problem, the web page of the channel has changed its structure, or an internal error of KoD. To have more details, see the log file." +msgid "It may be due to a connection problem, the web page of the channel has changed its structure, or an internal error of KoD. If on browser it works, report the issue using [B]Help->Report an issue.[B]" msgstr "" msgctxt "#60015" -msgid "Check the log for more details on the error." +msgid "Do you want to report the issue?\n(Be sure you follow all steps and give a clear and comprehensive explanation of what happened" msgstr "" msgctxt "#60016" @@ -800,8 +800,8 @@ msgctxt "#60038" msgid "An error has occurred in KoD" msgstr "" -msgctxt "#60039" -msgid "Error on channel %s" +msgctxt "60039" +msgid "Channel %s unreachable" msgstr "" msgctxt "#60040" @@ -1633,7 +1633,7 @@ msgid "Super favourites menu" msgstr "" msgctxt "#60362" -msgid "You can't watch this video because..." +msgid "Unexpected error on server %s" msgstr "" msgctxt "#60363" @@ -6086,6 +6086,44 @@ msgctxt "#70821" msgid "Search results" msgstr "" +# RENUMBER +msgctxt "#70822" +msgid "Renumber new episodes of: " +msgstr "" + +msgctxt "#70823" +msgid "Renumber episodes of: " +msgstr "" + +msgctxt "#70824" +msgid "Select the specials of: " +msgstr "" + +msgctxt "#70825" +msgid "Select Season" +msgstr "" + +msgctxt "#70826" +msgid "Select Episode" +msgstr "" + +msgctxt "#70827" +msgid "Select Specials" +msgstr "" + +msgctxt "#70828" +msgid "Manual renumbering" +msgstr "" + +msgctxt "#70829" +msgid "Delete Numbering for: " +msgstr "" + +msgctxt "#70830" +msgid "The series / episode number should only be changed if the series has relative numbering." +msgstr "" + + # DNS start [ settings and declaration ] msgctxt "#707401" msgid "Enable DNS check alert" diff --git a/resources/language/resource.language.it_it/strings.po b/resources/language/resource.language.it_it/strings.po index d0a6603e..b8c72df9 100644 --- a/resources/language/resource.language.it_it/strings.po +++ b/resources/language/resource.language.it_it/strings.po @@ -588,8 +588,8 @@ msgid "%.2f %s of %.2f %s a %.2f %s/s (%d/%d)" msgstr "%.2f %s di %.2f %s a %.2f %s/s (%d/%d)" msgctxt "#59985" -msgid "Error in the channel " -msgstr "Errore nel canale " +msgid "Channel %s unreachable" +msgstr "Canale %s irraggiungibile" msgctxt "#59986" msgid "Error loading the server: %s\n" @@ -668,8 +668,8 @@ msgid "" msgstr "" msgctxt "#60006" -msgid "An error has occurred in %s" -msgstr "Si è verificato un errore in %s" +msgid "[B]An error has occurred in %s:[/B]" +msgstr "[B]Si è verificato un errore in %s:[/B]" msgctxt "#60007" msgid "An error has occurred on %s" @@ -696,16 +696,16 @@ msgid "No video to play" msgstr "Nessun video da riprodurre" msgctxt "#60013" -msgid "This website seems to be unavailable, try later, if the problem persists, check with a browser: %s. If the web page is working correctly, please report the error on : https://t.me/kodiondemand" -msgstr "Questo sito non sembra essere disponibile, riprova più tardi, se il problema persiste verifica mediante un browser: %s. Se la pagina web funziona correttamente segnala l'errore su : https://t.me/kodiondemand" +msgid "This website [B]%s[/B] seems to be unavailable, try later. if the web page is working correctly, please report the error on: https://github.com/kodiondemand/addon/issues" +msgstr "Il sito [B]%s[/B] non sembra essere disponibile, riprova più tardi. Se la pagina web funziona correttamente segnala l'errore qui: https://github.com/kodiondemand/addon/issues" msgctxt "#60014" -msgid "It may be due to a connection problem, the web page of the channel has changed its structure, or an internal error of KoD. To have more details, see the log file." -msgstr "Potrebbe essere dovuto a un problema di connessione, la pagina web del canale ha cambiato la sua struttura, oppure un errore interno di KoD. Per avere maggiori dettagli, consulta il file di log." +msgid "It may be due to a connection problem, the web page of the channel has changed its structure, or an internal error of KoD. If on browser it works, report the issue using [B]Help->Report an issue.[B]" +msgstr "Potrebbe essere dovuto a un problema di connessione, la pagina web del canale ha cambiato la sua struttura, oppure un errore interno di KoD. Se sul browser funziona, segnala il problema andando in [B]Aiuto->Segnala un problema[/B]." msgctxt "#60015" -msgid "Check the log for more details on the error." -msgstr "Controlla il log per avere maggiori dettagli sull'errore." +msgid "Do you want to report the issue?\n(Be sure you follow all steps and give a clear and comprehensive explanation of what happened" +msgstr "Vuoi fare una segnalazione agli sviluppatori?\n(Assicurati di seguire bene tutti i punti e dai una spiegazione chiara ed esaustiva di quanto accaduto)" msgctxt "#60016" msgid "Segna film come non visto" @@ -1632,8 +1632,8 @@ msgid "Super favourites menu" msgstr "Menu super favoriti" msgctxt "#60362" -msgid "You can't watch this video because..." -msgstr "Non è possibile vedere questo video perchè..." +msgid "Unexpected error on server %s" +msgstr "Errore inaspettato sul server %s" msgctxt "#60363" msgid "The server on which it is hosted" @@ -6087,6 +6087,43 @@ msgctxt "#70821" msgid "Search results" msgstr "Risultati della ricerca" +# RENUMBER +msgctxt "#70822" +msgid "Renumber new episodes of: " +msgstr "Rinumera i nuovi episodi di: " + +msgctxt "#70823" +msgid "Renumber episodes of: " +msgstr "Rinumera gli episodi di: " + +msgctxt "#70824" +msgid "Select the specials of: " +msgstr "Seleziona gli speciali di: " + +msgctxt "#70825" +msgid "Select Season" +msgstr "Seleziona Stagione" + +msgctxt "#70826" +msgid "Select Episode" +msgstr "Seleziona Episodio" + +msgctxt "#70827" +msgid "Select Specials" +msgstr "Seleziona Speciali" + +msgctxt "#70828" +msgid "Manual renumbering" +msgstr "Rinumerazione Manuale" + +msgctxt "#70829" +msgid "Delete Numbering for: " +msgstr "Elimina Numerazione per: " + +msgctxt "#70830" +msgid "The series / episode number should only be changed if the series has relative numbering." +msgstr "Il numero della serie / episodio deve essere modificato solo se la serie ha una numerazione relativa." + # DNS start [ settings and declaration ] msgctxt "#707401" msgid "Enable DNS check alert" diff --git a/resources/settings.xml b/resources/settings.xml index 6edd64e5..49a1f180 100644 --- a/resources/settings.xml +++ b/resources/settings.xml @@ -81,6 +81,7 @@ <!-- Search --> <category label="60423"> <setting label="60422" type="lsep"/> + <setting id="new_search" type="bool" label="Usa la nuova ricerca globale dove disponibile" default="true" visible="true"/> <setting id="last_search" type="bool" label="60678" default="true" visible="true"/> <setting id="saved_searches_limit" type="slider" option="int" range="10,10,40" label="60677" default="10" visible="eq(-1,0)" subsetting="true"/> <setting id="result_mode" type="select" label="60657" lvalues="60675|60676" default="0"/> diff --git a/resources/skins/Default/720p/GlobalSearch.xml b/resources/skins/Default/720p/GlobalSearch.xml new file mode 100644 index 00000000..70a52c37 --- /dev/null +++ b/resources/skins/Default/720p/GlobalSearch.xml @@ -0,0 +1,627 @@ +<?xml version="1.0" encoding="utf-8" standalone="yes"?> +<window> + <depth>0.52</depth> + <coordinates> + <left>0</left> + <top>0</top> + </coordinates> + <controls> + + <!-- control groups --> + <control type='group' id="1"/> + <control type='group' id="2"/> + <control type='group' id="3"/> + <control type='group' id="4"/> + <control type='group' id="5"/> + + <control type="image"> <!-- BACKGROUND --> + <description>Window Background</description> + <width>100%</width> + <height>100%</height> + <texture colordiffuse="FF232323">white.png</texture> + </control> + + <control type="group"> <!-- CONTROLS GROUP --> + <description>Main Group</description> + <left>0</left> + <top>0</top> + <animation type="WindowOpen" reversible="false"> + <effect type="fade" delay="160" end="100" time="300" /> + </animation> + <animation type="WindowClose" reversible="false"> + <effect type="fade" delay="160" start="100" end="0" time="300" /> + </animation> + + <control type="image"> + <description>Fanart</description> + <width>100%</width> + <height>100%</height> + <aspectratio>scale</aspectratio> + <texture colordiffuse="FF555555">$INFO[Container(102).ListItem.Property(fanart)]</texture> + </control> + + + <control type="group"> <!-- SEARCH GROUP--> + <description>Search Group</description> + <visible>Control.IsVisible(1)</visible> + + <control type="textbox" id='100'> + <description>Title</description> + <left>30</left> + <top>30</top> + <width>1000</width> + <height>30</height> + <!-- <font>font30_title</font> --> + <textcolor>FFFFFFFF</textcolor> + <shadowcolor>00000000</shadowcolor> + <visible>Integer.IsGreater(Container(101).NumItems, 0)</visible> + <align>left</align> + <aligny>center</aligny> + </control> + + <control type="group"> <!-- Search Result Group --> + <description>Search Result Group</description> + <visible>Integer.IsGreater(Container(102).NumItems, 0)</visible> + <animation effect="fade" time="200">Visible</animation> + + <control type="textbox"> + <description>Item Title</description> + <left>400</left> + <top>90</top> + <width>840</width> + <height>30</height> + <!-- <font>font30_title</font> --> + <textcolor>FFFFFFFF</textcolor> + <shadowcolor>00000000</shadowcolor> + <label>$INFO[Container(102).ListItem.Label] [B][COLOR FFAAAAAA]$INFO[Container(102).ListItem.Property(year)][/COLOR][/B]</label> + <align>left</align> + <aligny>center</aligny> + </control> + + <control type="textbox"> + <description>Item Title</description> + <right>40</right> + <top>90</top> + <width>200</width> + <height>30</height> + <!-- <font>font30_title</font> --> + <textcolor>FFFFFFFF</textcolor> + <shadowcolor>00000000</shadowcolor> + <label>[B][COLOR FFAAAAAA]$INFO[Container(102).CurrentItem]/$INFO[Container(102).NumItems][/COLOR][/B]</label> + <align>right</align> + <aligny>center</aligny> + </control> + + <control type="textbox"> + <description>Item Plot</description> + <left>400</left> + <top>150</top> + <width>840</width> + <height>170</height> + <!-- <font>font13</font> --> + <textcolor>FFFFFFFF</textcolor> + <shadowcolor>00000000</shadowcolor> + <label>$INFO[Container(102).ListItem.Property(plot)]</label> + <align>left</align> + </control> + + <control type="wraplist" id="102"> + <description>Search Results list</description> + <bottom>70</bottom> + <left>0</left> + <width>100%</width> + <height>570</height> + <ondown>101</ondown> + <onup>503</onup> + <orientation>horizontal</orientation> + <scrolltime tween="cubic" easing="out">300</scrolltime> + <itemlayout height="570" width="180"> + <control type="image"> + <description>Item Poster</description> + <top>300</top> + <left>0</left> + <width>180</width> + <height>270</height> + <texture>$INFO[ListItem.Property(thumb)]</texture> + <bordersize>10</bordersize> + <aspectratio>scale</aspectratio> + </control> + <control type="image"> + <description>Item Verified</description> + <top>315</top> + <left>145</left> + <width>20</width> + <height>20</height> + <texture colordiffuse="FF0082C2">$INFO[ListItem.Property(verified)]</texture> + <aspectratio>scale</aspectratio> + </control> + </itemlayout> + <focusedlayout height="570" width="380"> + <control type="image"> + <description>Item Poster</description> + <top>0</top> + <left>0</left> + <width>380</width> + <height>570</height> + <texture>$INFO[ListItem.Property(thumb)]</texture> + <bordersize>10</bordersize> + <aspectratio>scale</aspectratio> + </control> + <control type="image"> + <description>Item Verified</description> + <top>15</top> + <left>330</left> + <width>40</width> + <height>40</height> + <texture colordiffuse="FF0082C2">$INFO[ListItem.Property(verified)]</texture> + <aspectratio>scale</aspectratio> + </control> + </focusedlayout> + </control> + </control> <!-- END Search Result Group --> + + <control type="group"> <!-- Chennels Group--> + <description>Chennels Group</description> + <bottom>-100</bottom> + <width>100%</width> + <height>150</height> + <animation effect="slide" start="0,150" time="500" condition="Integer.IsGreater(Container(101).NumItems, 0)">Conditional</animation> + <animation effect="slide" start="0,-100" time="200" condition="!Control.HasFocus(101)">Conditional</animation> + <visible>Integer.IsGreater(Container(101).NumItems, 0)</visible> + + <control type="image"> + <description>Chennels Bar Background Opacity on hover</description> + <width>100%</width> + <height>100%</height> + <animation effect="fade" start='100' end='0' time="200" condition="!Control.HasFocus(101)">Conditional</animation> + <texture colordiffuse="FF232323">white.png</texture> + </control> + + <control type="image"> + <description>Chennels Bar Background</description> + <width>100%</width> + <height>100%</height> + <texture colordiffuse="88232323">white.png</texture> + </control> + + <control type="list" id="101"> + <description>Channels list</description> + <width>100%</width> + <height>150</height> + <onup>102</onup> + <orientation>horizontal</orientation> + <scrolltime tween="cubic" easing="out">300</scrolltime> + + <itemlayout height="150" width="150"> + <control type="image"> + <description>Channel Icon</description> + <top>0</top> + <left>0</left> + <width>150</width> + <height>150</height> + <texture colordiffuse="55FFFFFF">$INFO[ListItem.Property(thumb)]</texture> + <aspectratio>scale</aspectratio> + </control> + <control type="image"> + <description>Results Count Background</description> + <top>0</top> + <left>110</left> + <width>40</width> + <height>40</height> + <texture colordiffuse="20232323">white.png</texture> + <aspectratio>scale</aspectratio> + </control> + <control type="textbox"> + <description>Results Count</description> + <right>5</right> + <top>5</top> + <width>30</width> + <height>30</height> + <!-- <font>font30_title</font> --> + <textcolor>22FFFFFF</textcolor> + <shadowcolor>00000000</shadowcolor> + <label>[B]$INFO[ListItem.Property(results)][/B]</label> + <align>center</align> + <aligny>center</aligny> + </control> + <control type="image"> + <description>Verified</description> + <top>5</top> + <left>5</left> + <width>30</width> + <height>30</height> + <texture colordiffuse="880082C2">$INFO[ListItem.Property(verified)]</texture> + <aspectratio>scale</aspectratio> + </control> + </itemlayout> + <focusedlayout height="150" width="150"> + <control type="image"> + <description>Channel Icon</description> + <top>0</top> + <left>0</left> + <width>150</width> + <height>150</height> + <texture>$INFO[ListItem.Property(thumb)]</texture> + <aspectratio>scale</aspectratio> + </control> + <control type="image"> + <description>Results Count Background</description> + <top>0</top> + <left>110</left> + <width>40</width> + <height>40</height> + <texture colordiffuse="880082C2">white.png</texture> + <aspectratio>scale</aspectratio> + </control> + <control type="image"> + <description>Results Count</description> + <top>5</top> + <left>5</left> + <width>30</width> + <height>30</height> + <texture colordiffuse="FF0082C2">$INFO[ListItem.Property(verified)]</texture> + <aspectratio>scale</aspectratio> + </control> + <control type="textbox"> + <description>Verified</description> + <right>5</right> + <top>5</top> + <width>30</width> + <height>30</height> + <!-- <font>font30_title</font> --> + <textcolor>FFFFFFFF</textcolor> + <shadowcolor>00000000</shadowcolor> + <label>[B]$INFO[ListItem.Property(results)][/B]</label> + <align>center</align> + <aligny>center</aligny> + </control> + </focusedlayout> + </control> + </control> <!-- END Chennels Group--> + + </control> <!-- END SEARCH GROUP--> + + + <control type="group"> <!-- EPISODES GROUP--> + <description>Episodes Group</description> + <visible>Control.IsVisible(2)</visible> + + <control type="image"> + <description>Poster</description> + <top>0</top> + <left>0</left> + <width>480</width> + <height>720</height> + <texture>$INFO[Container(102).ListItem.Property(thumb)]</texture> + </control> + + <control type="textbox"> + <description>Main Title</description> + <left>520</left> + <top>40</top> + <width>1150</width> + <height>30</height> + <!-- <font>font30_title</font> --> + <textcolor>FFFFFFFF</textcolor> + <shadowcolor>00000000</shadowcolor> + <align>left</align> + <aligny>center</aligny> + <label>$INFO[Container(102).ListItem.Label()]</label> + </control> + + <control type="list" id="200"> <!-- Episodes List --> + <description>Episodes List</description> + <bottom>40</bottom> + <left>520</left> + <width>700</width> + <height>570</height> + <onleft>503</onleft> + <onright>503</onright> + <scrolltime tween="cubic" easing="out">300</scrolltime> + <itemlayout height="60" width="700"> + <control type="textbox"> + <description>Episode Title</description> + <left>20</left> + <width>660</width> + <height>60</height> + <!-- <font>font30_title</font> --> + <textcolor>FFFFFFFF</textcolor> + <shadowcolor>00000000</shadowcolor> + <align>left</align> + <aligny>center</aligny> + <label>[COLOR FFAAAAAA]$INFO[ListItem.Label()][/COLOR]</label> + </control> + </itemlayout> + <focusedlayout height="60" width="700"> + <control type="image"> + <description>Selected Background</description> + <width>700</width> + <height>60</height> + <texture colordiffuse="88000000">white.png</texture> + <aspectratio>scale</aspectratio> + </control> + <control type="textbox"> + <description>Episode Title</description> + <left>20</left> + <width>660</width> + <height>60</height> + <!-- <font>font30_title</font> --> + <textcolor>FFFFFFFF</textcolor> + <shadowcolor>00000000</shadowcolor> + <align>left</align> + <aligny>center</aligny> + <label>$INFO[ListItem.Label]</label> + </control> + </focusedlayout> + </control> <!-- END Episodes List --> + </control> <!-- END EPISODES GROUP --> + + <control type="group"> <!-- SERVERS GROUP--> + <description>Servers Group</description> + <visible>Control.IsVisible(3)</visible> + + <control type="image"> + <description>Poster</description> + <top>0</top> + <left>0</left> + <width>480</width> + <height>720</height> + <texture>$INFO[Container(102).ListItem.Property(thumb)]</texture> + </control> + + <control type="textbox"> + <description>Main Title</description> + <left>520</left> + <top>40</top> + <width>1150</width> + <height>30</height> + <!-- <font>font30_title</font> --> + <textcolor>FFFFFFFF</textcolor> + <shadowcolor>00000000</shadowcolor> + <align>left</align> + <aligny>center</aligny> + <label>$INFO[Container(102).ListItem.Label]</label> + </control> + + <control type="list" id="300"> <!-- Servers List --> + <description>Servers List</description> + <bottom>40</bottom> + <left>520</left> + <width>700</width> + <height>570</height> + <onleft>503</onleft> + <onright>503</onright> + <scrolltime tween="cubic" easing="out">300</scrolltime> + <itemlayout height="140" width="700"> + <control type="image"> + <description>Servers Icon</description> + <top>5</top> + <left>5</left> + <width>120</width> + <height>120</height> + <texture>$INFO[ListItem.Property(thumb)]</texture> + <aspectratio>scale</aspectratio> + </control> + <control type="group"> + <visible>ListItem.Property(quality)</visible> + <control type="image"> + <description>Servers Quality</description> + <top>35</top> + <left>150</left> + <width>60</width> + <height>60</height> + <texture>$INFO[ListItem.Property(quality)]</texture> + <aspectratio>scale</aspectratio> + </control> + <control type="textbox"> + <description>Server Title</description> + <left>220</left> + <top>35</top> + <width>450</width> + <height>60</height> + <!-- <font>font30_title</font> --> + <textcolor>FFFFFFFF</textcolor> + <shadowcolor>00000000</shadowcolor> + <align>left</align> + <aligny>center</aligny> + <label>[B][COLOR FFAAAAAA]$INFO[ListItem.Property(servername)][/COLOR][/B]</label> + </control> + </control> + <control type="textbox"> + <visible>!ListItem.Property(quality)</visible> + <description>Server Title</description> + <left>150</left> + <top>35</top> + <width>450</width> + <height>60</height> + <!-- <font>font30_title</font> --> + <textcolor>FFFFFFFF</textcolor> + <shadowcolor>00000000</shadowcolor> + <align>left</align> + <aligny>center</aligny> + <label>[B][COLOR FFAAAAAA]$INFO[ListItem.Property(servername)][/COLOR][/B]</label> + </control> + </itemlayout> + <focusedlayout height="140" width="700"> + <control type="image"> + <description>Selection Background</description> + <width>700</width> + <height>130</height> + <texture colordiffuse="88000000">white.png</texture> + <aspectratio>scale</aspectratio> + </control> + <control type="image"> + <description>Servers Color</description> + <top>0</top> + <left>0</left> + <width>130</width> + <height>130</height> + <visible>Control.HasFocus(300)</visible> + <texture colordiffuse="$INFO[ListItem.Property(color)]">white.png</texture> + <aspectratio>scale</aspectratio> + </control> + <control type="image"> + <description>Servers Icon</description> + <top>5</top> + <left>5</left> + <width>120</width> + <height>120</height> + <texture>$INFO[ListItem.Property(thumb)]</texture> + <aspectratio>scale</aspectratio> + </control> + <control type="group"> + <visible>ListItem.Property(quality)</visible> + <control type="image"> + <description>Servers Quality</description> + <top>35</top> + <left>150</left> + <width>60</width> + <height>60</height> + <texture>$INFO[ListItem.Property(quality)]</texture> + <aspectratio>scale</aspectratio> + </control> + <control type="textbox"> + <description>Server Title</description> + <left>220</left> + <top>35</top> + <width>450</width> + <height>60</height> + <!-- <font>font30_title</font> --> + <textcolor>FFFFFFFF</textcolor> + <shadowcolor>00000000</shadowcolor> + <align>left</align> + <aligny>center</aligny> + <label>[B][COLOR FFAAAAAA]$INFO[ListItem.Property(servername)][/COLOR][/B]</label> + </control> + </control> + <control type="textbox"> + <visible>!ListItem.Property(quality)</visible> + <description>Server Title</description> + <left>150</left> + <top>35</top> + <width>450</width> + <height>60</height> + <!-- <font>font30_title</font> --> + <textcolor>FFFFFFFF</textcolor> + <shadowcolor>00000000</shadowcolor> + <align>left</align> + <aligny>center</aligny> + <label>[B][COLOR FFAAAAAA]$INFO[ListItem.Property(servername)][/COLOR][/B]</label> + </control> + </focusedlayout> + </control> <!-- END Servers List --> + </control> <!-- END SERVERS GROUP --> + </control> <!-- CONTROLS GROUP --> + + <control type="progress" id="500"> + <description>Progress Bar</description> + <top>350</top> + <left>240</left> + <width>800</width> + <height>20</height> + <reveal>true</reveal> + <lefttexture colordiffuse="FF232323">white.png</lefttexture> + <righttexture colordiffuse="FF0082C2">white.png</righttexture> + <texturebg colordiffuse="22ffffff">progress.png</texturebg> + <midtexture colordiffuse="FF0082C2">progress.png</midtexture> + <animation effect="zoom" center="auto" end="163,40" time="600" condition="Integer.IsGreater(Container(102).NumItems, 0)">Conditional</animation> + <animation effect="slide" tween="linear" center="auto" end="0,-890" time="600" condition="Integer.IsGreater(Container(102).NumItems, 0)">Conditional</animation> + </control> + + <control type="textbox" id="501"> + <description>Progress Count</description> + <top>340</top> + <right>130</right> + <width>200</width> + <height>40</height> + <!-- <font>font30_title</font> --> + <textcolor>FFFFFFFF</textcolor> + <shadowcolor>00000000</shadowcolor> + <align>right</align> + <aligny>center</aligny> + <animation effect="slide" tween="linear" center="auto" end="-40,-311" time="200" condition="Integer.IsGreater(Container(101).NumItems, 0)">Conditional</animation> + <visible>Control.IsVisible(500)</visible> + </control> + + <control type="textbox"> + <description>No Results</description> + <top>340</top> + <left>0</left> + <width>100%</width> + <height>40</height> + <!-- <font>font30_title</font> --> + <textcolor>FFFFFFFF</textcolor> + <shadowcolor>00000000</shadowcolor> + <align>center</align> + <aligny>center</aligny> + <label>[UPPERCASE]$ADDON[plugin.video.kod 60473][/UPPERCASE]</label> + <visible>Control.IsVisible(4)</visible> + </control> + + <control type="textbox"> + <description>Load Channels</description> + <top>300</top> + <left>0</left> + <width>100%</width> + <height>40</height> + <!-- <font>font30_title</font> --> + <textcolor>FFFFFFFF</textcolor> + <shadowcolor>00000000</shadowcolor> + <align>center</align> + <aligny>center</aligny> + <label>[UPPERCASE]$ADDON[plugin.video.kod 60519][/UPPERCASE]</label> + <visible>Control.IsVisible(5)</visible> + </control> + + <control type="button" id="502"> + <description>Menu</description> + <top>30</top> + <right>110</right> + <height>40</height> + <width>40</width> + <onleft>504</onleft> + <onright>503</onright> + <texturefocus colordiffuse="FFFFFFFF">menu.png</texturefocus> + <texturenofocus colordiffuse="80FFFFFF">menu.png</texturenofocus> + <visible>Integer.IsGreater(Container(101).NumItems, 0)</visible> + </control> + + <control type="button" id="503"> + <description>Back</description> + <top>30</top> + <right>70</right> + <height>40</height> + <width>40</width> + <onleft>502</onleft> + <onright>504</onright> + <texturefocus colordiffuse="FFFFFFFF">left.png</texturefocus> + <texturenofocus colordiffuse="80FFFFFF">left.png</texturenofocus> + </control> + + <control type="button" id="504"> + <description>Close</description> + <top>30</top> + <right>30</right> + <height>40</height> + <width>40</width> + <onleft>503</onleft> + <onright>502</onright> + <texturefocus colordiffuse="FFFFFFFF">close.png</texturefocus> + <texturenofocus colordiffuse="80FFFFFF">close.png</texturenofocus> + </control> + + <control type="textbox" id="505"> + <description>Load Channels</description> + <top>670</top> + <right>20</right> + <width>200</width> + <height>40</height> + <textcolor>FFFFFFFF</textcolor> + <shadowcolor>00000000</shadowcolor> + <align>right</align> + <aligny>center</aligny> + <visible>Control.IsVisible(3)</visible> + </control> + + </controls> +</window> diff --git a/resources/skins/Default/720p/InfoPlus.xml b/resources/skins/Default/720p/InfoPlus.xml index 89de92d5..6f75b392 100644 --- a/resources/skins/Default/720p/InfoPlus.xml +++ b/resources/skins/Default/720p/InfoPlus.xml @@ -67,7 +67,7 @@ <itemlayout height="570" width="180"> <!-- Poster --> <control type="image"> - <bottom>0</bottom> + <top>300</top> <left>0</left> <width>180</width> <height>270</height> @@ -83,7 +83,7 @@ <top>10</top> <width>840</width> <height>30</height> - <font>font30_title</font> + <!-- <font>font30_title</font> --> <textcolor>FFFFFFFF</textcolor> <shadowcolor>00000000</shadowcolor> <label>[B]$INFO[ListItem.Label] [COLOR FFAAAAAA] $INFO[ListItem.Property(year)][/COLOR][/B] </label> @@ -96,7 +96,7 @@ <top>70</top> <width>840</width> <height>190</height> - <font>font13</font> + <!-- <font>font13</font> --> <textcolor>FFFFFFFF</textcolor> <shadowcolor>00000000</shadowcolor> <label>$INFO[ListItem.Property(plot)]</label> @@ -105,7 +105,7 @@ </control> <!-- Poster --> <control type="image"> - <bottom>0</bottom> + <top>0</top> <left>0</left> <width>380</width> <height>570</height> @@ -125,7 +125,7 @@ <itemlayout height="570" width="180"> <!-- Poster --> <control type="image"> - <bottom>0</bottom> + <top>300</top> <left>0</left> <width>180</width> <height>270</height> @@ -141,7 +141,7 @@ <top>10</top> <width>840</width> <height>30</height> - <font>font30_title</font> + <!-- <font>font30_title</font> --> <textcolor>FFFFFFFF</textcolor> <shadowcolor>00000000</shadowcolor> <label>[B]$INFO[ListItem.Property(title)][/B] </label> @@ -154,7 +154,7 @@ <top>50</top> <width>840</width> <height>30</height> - <font>font30</font> + <!-- <font>font30</font> --> <textcolor>FFFFFFFF</textcolor> <shadowcolor>00000000</shadowcolor> <label>[B]$INFO[ListItem.Property(job)][/B]</label> @@ -166,7 +166,7 @@ <top>90</top> <width>830</width> <height>180</height> - <font>font13</font> + <!-- <font>font13</font> --> <textcolor>FFFFFFFF</textcolor> <shadowcolor>00000000</shadowcolor> <label>$INFO[ListItem.Property(bio)]</label> @@ -175,7 +175,7 @@ </control> <!-- Poster --> <control type="image"> - <bottom>0</bottom> + <top>0</top> <left>0</left> <width>380</width> <height>570</height> @@ -195,7 +195,7 @@ <itemlayout height="570" width="180"> <!-- Background --> <control type="image"> - <bottom>0</bottom> + <top>300</top> <left>0</left> <width>180</width> <height>270</height> @@ -205,7 +205,7 @@ </control> <!-- Poster --> <control type="image"> - <bottom>0</bottom> + <top>300</top> <left>0</left> <width>180</width> <height>270</height> @@ -216,11 +216,11 @@ <!-- DEPARTMENT --> <control type="textbox"> <visible>String.IsEmpty(ListItem.Property(thumbnail))</visible> - <left>0</left> - <bottom>0</bottom> - <width>180</width> + <left>10</left> + <top>300</top> + <width>160</width> <height>270</height> - <font>font13</font> + <!-- <font>font13</font> --> <textcolor>FFFFFFFF</textcolor> <label>[B]$INFO[ListItem.Property(department)][/B]</label> <autoscroll delay="3000" time="2000" repeat="3000"></autoscroll> @@ -235,7 +235,7 @@ <top>10</top> <width>840</width> <height>30</height> - <font>font30_title</font> + <!-- <font>font30_title</font> --> <textcolor>FFFFFFFF</textcolor> <shadowcolor>00000000</shadowcolor> <label>[B]$INFO[ListItem.Property(title)][/B] </label> @@ -248,7 +248,7 @@ <top>50</top> <width>840</width> <height>30</height> - <font>font30</font> + <!-- <font>font30</font> --> <textcolor>FFFFFFFF</textcolor> <shadowcolor>00000000</shadowcolor> <label>[B]$INFO[ListItem.Property(job)][/B]</label> @@ -260,7 +260,7 @@ <top>90</top> <width>830</width> <height>180</height> - <font>font13</font> + <!-- <font>font13</font> --> <textcolor>FFFFFFFF</textcolor> <shadowcolor>00000000</shadowcolor> <label>$INFO[ListItem.Property(bio)]</label> @@ -269,7 +269,7 @@ </control> <!-- Background --> <control type="image"> - <bottom>0</bottom> + <top>0</top> <left>0</left> <width>380</width> <height>570</height> @@ -279,7 +279,7 @@ </control> <!-- Poster --> <control type="image"> - <bottom>0</bottom> + <top>0</top> <left>0</left> <width>380</width> <height>570</height> @@ -294,7 +294,7 @@ <top>0</top> <width>380</width> <height>570</height> - <font>font30_title</font> + <!-- <font>font30_title</font> --> <textcolor>FFFFFFFF</textcolor> <label>$INFO[ListItem.Property(department)]</label> <autoscroll delay="3000" time="2000" repeat="3000"></autoscroll> @@ -322,7 +322,7 @@ <height>50</height> <textcolor>FFFFFFFF</textcolor> <shadowcolor>00000000</shadowcolor> - <font>font30_title</font> + <!-- <font>font30_title</font> --> <label></label> <align>left</align> <aligny>center</aligny> diff --git a/resources/skins/Default/720p/InfoWindow.xml b/resources/skins/Default/720p/InfoWindow.xml index 6c3d099d..f1fcc1e3 100644 --- a/resources/skins/Default/720p/InfoWindow.xml +++ b/resources/skins/Default/720p/InfoWindow.xml @@ -6,18 +6,6 @@ <top>0</top> </coordinates> <controls> - <!-- CLOSE BUTTON / BACKGROUND --> - <control type="button"> - <left>0</left> - <top>0</top> - <width>100%</width> - <height>100%</height> - <texturefocus colordiffuse="FF232323">white.png</texturefocus> - <texturenofocus colordiffuse="FF232323">white.png</texturenofocus> - <animation effect="fade" time="200">WindowOpen</animation> - <animation effect="fade" time="200">WindowClose</animation> - <onclick>Action(close)</onclick> - </control> <!-- GROUP CONTROLS --> <control type="group"> <left>0</left> @@ -32,6 +20,13 @@ </animation> <!-- BACKGROUND --> + <control type="image"> + <width>1280</width> + <height>720</height> + <texture colordiffuse="FF232323">white.png</texture> + </control> + + <!-- FANART --> <control type="image" id='30000'> <width>1280</width> <height>720</height> @@ -48,6 +43,19 @@ <animation effect="zoom" pulse ="true" center="auto" start="0,100" end="100,100" time="1000" condition="Control.IsVisible(30001)">Conditional</animation> </control> + <control type="button" id="30003"> + <top>40</top> + <right>40</right> + <height>50</height> + <width>50</width> + <textcolor>FFFFFFFF</textcolor> + <aligny>center</aligny> + <align>center</align> + <texturefocus colordiffuse="FFFFFFFF">close.png</texturefocus> + <texturenofocus colordiffuse="80FFFFFF">close.png</texturenofocus> + <ondown>30002</ondown> + </control> + <!-- SELECTION --> <control type="fixedlist" id="30002"> <top>40</top> @@ -56,10 +64,11 @@ <viewtype>wrap</viewtype> <orientation>horizontal</orientation> <scrolltime tween="cubic" easing="out">300</scrolltime> - <itemlayout height="640" width="180"> + <onup>30003</onup> + <itemlayout width="180"> <!-- Poster --> <control type="image"> - <bottom>0</bottom> + <top>370</top> <left>0</left> <width>180</width> <height>270</height> @@ -68,14 +77,14 @@ <bordersize>10</bordersize> </control> </itemlayout> - <focusedlayout height="640" width="480"> + <focusedlayout width="427"> <!-- Title --> <control type="textbox"> - <left>500</left> + <left>447</left> <top>10</top> - <width>730</width> + <width>783</width> <height>30</height> - <font>font30_title</font> + <!-- <font>font30_title</font> --> <textcolor>FFFFFFFF</textcolor> <shadowcolor>00000000</shadowcolor> <label>[B]$INFO[ListItem.Label] [COLOR FFAAAAAA] $INFO[ListItem.Property(year)][/COLOR][/B] </label> @@ -84,11 +93,11 @@ </control> <!-- info --> <control type="textbox"> - <left>500</left> + <left>447</left> <top>50</top> - <width>730</width> + <width>783</width> <height>30</height> - <font>font13</font> + <!-- <font>font13</font> --> <textcolor>FFFFFFFF</textcolor> <shadowcolor>00000000</shadowcolor> <label>$ADDON[plugin.video.kod 60382] $INFO[ListItem.Property(genre)] | $ADDON[plugin.video.kod 60380] [B]$INFO[ListItem.Property(rating)][/B]</label> @@ -96,11 +105,11 @@ </control> <!-- Plot --> <control type="textbox"> - <left>500</left> + <left>447</left> <top>90</top> - <width>730</width> + <width>783</width> <height>250</height> - <font>font13</font> + <!-- <font>font13</font> --> <textcolor>FFFFFFFF</textcolor> <shadowcolor>00000000</shadowcolor> <label>$INFO[ListItem.Property(plot)]</label> @@ -111,7 +120,7 @@ <control type="image"> <bottom>0</bottom> <left>0</left> - <width>480</width> + <width>427</width> <height>640</height> <texture>$INFO[ListItem.Property(thumbnail)]</texture> <aspectratio>scale</aspectratio> diff --git a/resources/skins/Default/720p/Renumber.xml b/resources/skins/Default/720p/Renumber.xml new file mode 100644 index 00000000..440a0937 --- /dev/null +++ b/resources/skins/Default/720p/Renumber.xml @@ -0,0 +1,631 @@ +<?xml version="1.0" encoding="utf-8"?> +<window> + <allowoverlays>false</allowoverlays> + <animation type="WindowOpen" reversible="false"> + <effect type="fade" start="0" end="100" time="200" /> + </animation> + <animation type="WindowClose" reversible="false"> + <effect type="fade" start="100" end="0" time="200" /> + </animation> + <controls> + <!-- MAIN SELECTION --> + <control type='group' id='100'> + <height>100%</height> + <width>100%</width> + <!-- Background --> + <control type="image"> + <height>100%</height> + <width>100%</width> + <texture colordiffuse="CC232323">white.png</texture> + </control> + <control type="textbox"> + <top>640</top> + <left>40</left> + <height>40</height> + <width>1200</width> + <align>center</align> + <aligny>center</aligny> + <textcolor>80FFFFFF</textcolor> + <label>$ADDON[plugin.video.kod 70830]</label> + </control> + <!-- main selection window --> + <control type="group"> + <top>288.5</top> + <left>370</left> + <height>140</height> + <width>540</width> + <!-- Beckground --> + <control type="image"> + <height>100%</height> + <width>100%</width> + <texture colordiffuse="FF232323">white.png</texture> + </control> + <control type="button" id="101"> + <top>30</top> + <left>20</left> + <height>60</height> + <width>100</width> + <align>center</align> + <aligny>center</aligny> + <textcolor>FFFFFFFF</textcolor> + <focusedcolor>FFFFFFFF</focusedcolor> + <texturefocus colordiffuse="FFFFFFFF" border="-20,0,-20,0">updn.png</texturefocus> + <texturenofocus colordiffuse="80FFFFFF" border="-20,0,-20,0">updn.png</texturenofocus> + </control> + <!-- divider --> + <control type="textbox"> + <top>30</top> + <left>120</left> + <height>60</height> + <width>20</width> + <textcolor>FFFFFFFF</textcolor> + <align>center</align> + <aligny>center</aligny> + <label>[B]X[/B]</label> + </control> + <control type="button" id="102"> + <top>30</top> + <left>140</left> + <height>60</height> + <width>100</width> + <align>center</align> + <aligny>center</aligny> + <textcolor>FFFFFFFF</textcolor> + <focusedcolor>FFFFFFFF</focusedcolor> + <texturefocus colordiffuse="FFFFFFFF" border="-20,0,-20,0">updn.png</texturefocus> + <texturenofocus colordiffuse="80FFFFFF" border="-20,0,-20,0">updn.png</texturenofocus> + </control> + <!-- ok --> + <control type="button" id="103"> + <top>35</top> + <left>260</left> + <height>50</height> + <width>50</width> + <textcolor>FFFFFFFF</textcolor> + <aligny>center</aligny> + <align>center</align> + <texturefocus colordiffuse="FFFFFFFF">ok.png</texturefocus> + <texturenofocus colordiffuse="80FFFFFF">ok.png</texturenofocus> + </control> + <!-- Select Specials --> + <control type="button" id="104"> + <top>35</top> + <left>310</left> + <height>50</height> + <width>50</width> + <textcolor>FFFFFFFF</textcolor> + <aligny>center</aligny> + <align>center</align> + <texturefocus colordiffuse="FFFFFFFF">specials.png</texturefocus> + <texturenofocus colordiffuse="80FFFFFF">specials.png</texturenofocus> + </control> + <!-- Manual renumeration --> + <control type="button" id="105"> + <top>35</top> + <left>360</left> + <height>50</height> + <width>50</width> + <textcolor>FFFFFFFF</textcolor> + <aligny>center</aligny> + <align>center</align> + <texturefocus colordiffuse="FFFFFFFF">manual.png</texturefocus> + <texturenofocus colordiffuse="80FFFFFF">manual.png</texturenofocus> + </control> + <!-- delete --> + <control type="button" id="106"> + <top>35</top> + <left>410</left> + <height>50</height> + <width>50</width> + <textcolor>FFFFFFFF</textcolor> + <aligny>center</aligny> + <align>center</align> + <texturefocus colordiffuse="FFFFFFFF">delete.png</texturefocus> + <texturenofocus colordiffuse="80FFFFFF">delete.png</texturenofocus> + </control> + <!-- annulla --> + <control type="button" id="107"> + <top>35</top> + <left>460</left> + <height>50</height> + <width>50</width> + <textcolor>FFFFFFFF</textcolor> + <aligny>center</aligny> + <align>center</align> + <texturefocus colordiffuse="FFFFFFFF">close.png</texturefocus> + <texturenofocus colordiffuse="80FFFFFF">close.png</texturenofocus> + </control> + <control type="label" id="108"> + <bottom>5</bottom> + <width>100%</width> + <height>30</height> + <textcolor>FFFFFFFF</textcolor> + <align>center</align> + <aligny>center</aligny> + </control> + </control> + </control> + <!-- END MAIN SELECTION --> + + <!-- SPECIALS --> + <control type='group' id='200'> + <height>100%</height> + <width>100%</width> + + <!-- BACKGROUND --> + <control type="image" id="208"> + <top>0</top> + <left>0</left> + <height>100%</height> + <width>100%</width> + <texture colordiffuse="FF232323">white.png</texture> + </control> + + <!-- POSTER --> + <control type="image" id="201"> + <top>0</top> + <left>0</left> + <height>720</height> + <width>480</width> + <texture/> + </control> + + <!-- EPISODES LIST --> + <control type="list" id="202"> + <top>140</top> + <left>520</left> + <height>540</height> + <width>340</width> + <onleft>10002</onleft> + <onright>203</onright> + <itemlayout width="340" height="60"> + <control type="label"> + <height>100%</height> + <width>300</width> + <left>20</left> + <textcolor>FFFFFFFF</textcolor> + <label>[B]Episodio $INFO[ListItem.Label()][/B]</label> + <aligny>center</aligny> + </control> + </itemlayout> + <focusedlayout width="340" height="60"> + <control type="image"> + <height>100%</height> + <width>100%</width> + <texture colordiffuse="22FFFFFF">white.png</texture> + <visible allowhiddenfocus="true">Control.HasFocus(202)</visible> + </control> + <control type="image"> + <top>10</top> + <left>290</left> + <height>40</height> + <width>40</width> + <texture colordiffuse="FFFFFFFF">add.png</texture> + <visible allowhiddenfocus="true">Control.HasFocus(202)</visible> + </control> + <control type="label"> + <height>100%</height> + <width>300</width> + <left>20</left> + <textcolor>FFFFFFFF</textcolor> + <label>[B]Episodio $INFO[ListItem.Label()][/B]</label> + <aligny>center</aligny> + </control> + </focusedlayout> + </control> + + <!-- SPECIALS LIST --> + <control type='group'> + <top>140</top> + <left>900</left> + <height>540</height> + <width>340</width> + <control type="list" id="203"> + <height>540</height> + <width>340</width> + <onleft>202</onleft> + <onright>204</onright> + <itemlayout width="340" height="60"> + <!-- EP NUMBER --> + <control type="label"> + <left>20</left> + <height>60</height> + <width>140</width> + <textcolor>80FFFFFF</textcolor> + <aligny>center</aligny> + <label>[B]0x$INFO[ListItem.Label()] - Ep. $INFO[ListItem.Property(title)][/B]</label> + </control> + </itemlayout> + <focusedlayout width="340" height="60"> + <control type="image"> + <height>100%</height> + <width>100%</width> + <texture colordiffuse="22FFFFFF">white.png</texture> + <visible allowhiddenfocus="true">!Control.HasFocus(202)</visible> + </control> + <!-- EP NUMBER --> + <control type="label"> + <left>20</left> + <height>60</height> + <width>140</width> + <textcolor>FFFFFFFF</textcolor> + <aligny>center</aligny> + <label>[B]0x$INFO[ListItem.Label()] - Ep. $INFO[ListItem.Property(title)][/B]</label> + </control> + </focusedlayout> + </control> + + <!-- ITEM ACTIONS --> + <control type="group" id='204'> + <visible allowhiddenfocus="true">Integer.IsGreater(Container(203).Position,-1)</visible> + <animation effect="slide" end="0,60" condition="Integer.IsGreater(Container(203).Position,0)">Conditional</animation> + <animation effect="slide" end="0,60" condition="Integer.IsGreater(Container(203).Position,1)">Conditional</animation> + <animation effect="slide" end="0,60" condition="Integer.IsGreater(Container(203).Position,2)">Conditional</animation> + <animation effect="slide" end="0,60" condition="Integer.IsGreater(Container(203).Position,3)">Conditional</animation> + <animation effect="slide" end="0,60" condition="Integer.IsGreater(Container(203).Position,4)">Conditional</animation> + <animation effect="slide" end="0,60" condition="Integer.IsGreater(Container(203).Position,5)">Conditional</animation> + <animation effect="slide" end="0,60" condition="Integer.IsGreater(Container(203).Position,6)">Conditional</animation> + <animation effect="slide" end="0,60" condition="Integer.IsGreater(Container(203).Position,7)">Conditional</animation> + <!-- move up --> + <control type="button" id="205"> + <top>10</top> + <right>90</right> + <height>40</height> + <width>40</width> + <onleft>203</onleft> + <onright>206</onright> + <onup>Control.Move(203,-1)</onup> + <ondown>Control.Move(203,1)</ondown> + <texturefocus colordiffuse="FFFFFFFF">up.png</texturefocus> + <texturenofocus colordiffuse="80FFFFFF">up.png</texturenofocus> + </control> + <!-- move down --> + <control type="button" id="206"> + <top>10</top> + <right>50</right> + <height>40</height> + <width>40</width> + <onleft>205</onleft> + <onright>207</onright> + <onup>Control.Move(203,-1)</onup> + <ondown>Control.Move(203,1)</ondown> + <texturefocus colordiffuse="FFFFFFFF">down.png</texturefocus> + <texturenofocus colordiffuse="80FFFFFF">down.png</texturenofocus> + </control> + <!-- remove --> + <control type="button" id="207"> + <top>10</top> + <right>10</right> + <height>40</height> + <width>40</width> + <onleft>206</onleft> + <onright>10002</onright> + <onup>Control.Move(203,-1)</onup> + <ondown>Control.Move(203,1)</ondown> + <texturefocus colordiffuse="FFFFFFFF">delete.png</texturefocus> + <texturenofocus colordiffuse="80FFFFFF">delete.png</texturenofocus> + </control> + </control> + </control> + </control> + <!-- END SPECIALS --> + + <!-- MANUAL --> + <control type='group' id='300'> + <height>100%</height> + <width>100%</width> + + <!-- BACKGROUND --> + <control type="image" id="310"> + <top>0</top> + <left>0</left> + <height>100%</height> + <width>100%</width> + <texture colordiffuse="FF232323">white.png</texture> + </control> + + <!-- POSTER --> + <control type="image" id="301"> + <top>0</top> + <left>0</left> + <height>720</height> + <width>480</width> + </control> + + <!-- EPISODES LIST --> + <control type='group'> + <top>140</top> + <left>520</left> + <height>540</height> + <width>340</width> + <onleft>10002</onleft> + <onright>306</onright> + <control type="list" id="302"> + <height>100%</height> + <width>100%</width> + <onleft>10002</onleft> + <onright>306</onright> + <itemlayout width="340" height="60"> + <control type="label"> + <height>100%</height> + <width>120</width> + <left>20</left> + <textcolor>FFFFFFFF</textcolor> + <label>[B]Episodio $INFO[ListItem.Label()][/B]</label> + <aligny>center</aligny> + </control> + <!-- first season number --> + <control type="textbox"> + <right>100</right> + <height>60</height> + <width>60</width> + <onleft>302</onleft> + <onright>307</onright> + <align>center</align> + <aligny>center</aligny> + <textcolor>80FFFFFF</textcolor> + <label>[B]$INFO[ListItem.Property(season)][/B]</label> + </control> + <!-- divider --> + <control type="textbox"> + <right>80</right> + <height>60</height> + <width>20</width> + <textcolor>80FFFFFF</textcolor> + <align>center</align> + <aligny>center</aligny> + <label>[B]X[/B]</label> + </control> + <!-- first episode number --> + <control type="textbox"> + <right>20</right> + <height>60</height> + <width>60</width> + <onleft>306</onleft> + <onright>308</onright> + <align>center</align> + <aligny>center</aligny> + <textcolor>80FFFFFF</textcolor> + <label>[B]$INFO[ListItem.Property(episode)][/B]</label> + </control> + </itemlayout> + <focusedlayout width="340" height="60"> + <control type="image"> + <height>100%</height> + <width>100%</width> + <texture colordiffuse="22FFFFFF">white.png</texture> + <visible allowhiddenfocus="true">Control.HasFocus(302)</visible> + </control> + <control type="label"> + <height>100%</height> + <width>120</width> + <left>20</left> + <textcolor>FFFFFFFF</textcolor> + <label>[B]Episodio $INFO[ListItem.Label()][/B]</label> + <aligny>center</aligny> + </control> + <!-- first season number --> + <control type="textbox"> + <right>100</right> + <height>60</height> + <width>60</width> + <onleft>302</onleft> + <onright>307</onright> + <align>center</align> + <aligny>center</aligny> + <textcolor>FFFFFFFF</textcolor> + <label>[B]$INFO[ListItem.Property(season)][/B]</label> + </control> + <!-- divider --> + <control type="textbox"> + <right>80</right> + <height>60</height> + <width>20</width> + <textcolor>FFFFFFFF</textcolor> + <align>center</align> + <aligny>center</aligny> + <label>[B]X[/B]</label> + </control> + <!-- first episode number --> + <control type="textbox"> + <right>20</right> + <height>60</height> + <width>60</width> + <onleft>306</onleft> + <onright>308</onright> + <align>center</align> + <aligny>center</aligny> + <textcolor>FFFFFFFF</textcolor> + <label>[B]$INFO[ListItem.Property(episode)][/B]</label> + </control> + </focusedlayout> + </control> + + <!-- MANUAL EPISODE CONTROL --> + <control type='group' id='305'> + <visible allowhiddenfocus="true">Integer.IsGreater(Container(302).Position,-1)</visible> + <animation effect="slide" end="0,60" condition="Integer.IsGreater(Container(302).Position,0)">Conditional</animation> + <animation effect="slide" end="0,60" condition="Integer.IsGreater(Container(302).Position,1)">Conditional</animation> + <animation effect="slide" end="0,60" condition="Integer.IsGreater(Container(302).Position,2)">Conditional</animation> + <animation effect="slide" end="0,60" condition="Integer.IsGreater(Container(302).Position,3)">Conditional</animation> + <animation effect="slide" end="0,60" condition="Integer.IsGreater(Container(302).Position,4)">Conditional</animation> + <animation effect="slide" end="0,60" condition="Integer.IsGreater(Container(302).Position,5)">Conditional</animation> + <animation effect="slide" end="0,60" condition="Integer.IsGreater(Container(302).Position,6)">Conditional</animation> + <animation effect="slide" end="0,60" condition="Integer.IsGreater(Container(302).Position,7)">Conditional</animation> + <!-- first season number --> + <control type="button" id="306"> + <right>100</right> + <height>60</height> + <width>60</width> + <onleft>302</onleft> + <onright>307</onright> + <align>center</align> + <aligny>center</aligny> + <textcolor>FFFFFFFF</textcolor> + <texturefocus colordiffuse="FFFFFFFF">updn.png</texturefocus> + <texturenofocus colordiffuse="80FFFFFF">updn.png</texturenofocus> + </control> + <!-- first episode number --> + <control type="button" id="307"> + <right>20</right> + <height>60</height> + <width>60</width> + <onleft>306</onleft> + <onright>303</onright> + <align>center</align> + <aligny>center</aligny> + <textcolor>FFFFFFFF</textcolor> + <texturefocus colordiffuse="FFFFFFFF">updn.png</texturefocus> + <texturenofocus colordiffuse="80FFFFFF">updn.png</texturenofocus> + </control> + </control> + </control> + + <!-- SEASONS LIST --> + <control type='list' id='303'> + <top>140</top> + <left>880</left> + <height>540</height> + <width>80</width> + <onleft>302</onleft> + <onright>304</onright> + <itemlayout width="80" height="60"> + <control type="label"> + <height>100%</height> + <width>100%</width> + <textcolor>80FFFFFF</textcolor> + <label>[B]$INFO[ListItem.Label()][/B]</label> + <align>center</align> + <aligny>center</aligny> + </control> + </itemlayout> + <focusedlayout width="80" height="60"> + <control type="image"> + <height>100%</height> + <width>100%</width> + <texture colordiffuse="22FFFFFF">white.png</texture> + <visible allowhiddenfocus="true">Control.HasFocus(303)</visible> + </control> + <control type="label"> + <height>100%</height> + <width>100%</width> + <textcolor>FFFFFFFF</textcolor> + <label>[B]$INFO[ListItem.Label()][/B]</label> + <align>center</align> + <aligny>center</aligny> + <visible allowhiddenfocus="true">Control.HasFocus(303)</visible> + </control> + <control type="image"> + <height>100%</height> + <width>100%</width> + <texture colordiffuse="11FFFFFF">white.png</texture> + <visible allowhiddenfocus="true">!Control.HasFocus(303)</visible> + </control> + <control type="label"> + <height>100%</height> + <width>100%</width> + <textcolor>80FFFFFF</textcolor> + <label>[B]$INFO[ListItem.Label()][/B]</label> + <align>center</align> + <aligny>center</aligny> + <visible allowhiddenfocus="true">!Control.HasFocus(303)</visible> + </control> + </focusedlayout> + </control> + + <control type="image"> + <top>140</top> + <left>960</left> + <height>540</height> + <height>100%</height> + <width>290</width> + <texture colordiffuse="11FFFFFF">white.png</texture> + </control> + + <!-- EPISODES LIST --> + <control type='list' id='304'> + <top>140</top> + <left>970</left> + <height>540</height> + <width>270</width> + <onleft>303</onleft> + <onright>10002</onright> + <itemlayout width="270" height="60"> + <control type="label"> + <height>100%</height> + <width>200</width> + <left>40</left> + <textcolor>FFFFFFFF</textcolor> + <label>[B]$INFO[ListItem.Label()][/B]</label> + <aligny>center</aligny> + </control> + </itemlayout> + <focusedlayout width="270" height="60"> + <control type="image"> + <height>100%</height> + <width>100%</width> + <texture colordiffuse="22FFFFFF">white.png</texture> + <visible allowhiddenfocus="true">Control.HasFocus(304)</visible> + </control> + <control type="label"> + <height>100%</height> + <width>200</width> + <left>40</left> + <textcolor>FFFFFFFF</textcolor> + <label>[B]$INFO[ListItem.Label()][/B]</label> + <aligny>center</aligny> + </control> + </focusedlayout> + </control> + </control> + <!-- END MANUAL --> + + <!-- MAIN ACTIONS --> + <control type='group' id='10000'> + <visible allowhiddenfocus="true">Control.IsVisible(200) | Control.IsVisible(300)</visible> + <!-- info --> + <control type="label" id="10001"> + <top>40</top> + <left>540</left> + <height>50</height> + <width>560</width> + <textcolor>FFFFFFFF</textcolor> + <aligny>center</aligny> + </control> + <!-- ok --> + <control type="button" id="10002"> + <top>40</top> + <right>90</right> + <height>50</height> + <width>50</width> + <textcolor>FFFFFFFF</textcolor> + <aligny>center</aligny> + <align>center</align> + <texturefocus colordiffuse="FFFFFFFF">ok.png</texturefocus> + <texturenofocus colordiffuse="80FFFFFF">ok.png</texturenofocus> + <ondown condition="Control.IsVisible(200)">202</ondown> + <ondown condition="Control.IsVisible(300)">302</ondown> + <onleft>10003</onleft> + <onright>10003</onright> + </control> + <!-- annulla --> + <control type="button" id="10003"> + <top>40</top> + <right>40</right> + <height>50</height> + <width>50</width> + <textcolor>FFFFFFFF</textcolor> + <aligny>center</aligny> + <align>center</align> + <texturefocus colordiffuse="FFFFFFFF">close.png</texturefocus> + <texturenofocus colordiffuse="80FFFFFF">close.png</texturenofocus> + <ondown condition="Control.IsVisible(200)">202</ondown> + <ondown condition="Control.IsVisible(300)">302</ondown> + <onleft>10002</onleft> + <onright>10002</onright> + </control> + </control> + <!-- END MAIN ACTIONS --> + + </controls> +</window> diff --git a/resources/skins/Default/720p/SearchWindow.xml b/resources/skins/Default/720p/SearchWindow.xml index 5e33ff22..64610e7e 100644 --- a/resources/skins/Default/720p/SearchWindow.xml +++ b/resources/skins/Default/720p/SearchWindow.xml @@ -52,7 +52,7 @@ <top>50</top> <width>1000</width> <height>30</height> - <font>font30_title</font> + <!-- <font>font30_title</font> --> <textcolor>FFFFFFFF</textcolor> <shadowcolor>00000000</shadowcolor> <label>[UPPERCASE]$ADDON[plugin.video.kod 70821][/UPPERCASE]</label> @@ -71,7 +71,7 @@ <itemlayout height="570" width="180"> <!-- Poster --> <control type="image"> - <bottom>0</bottom> + <top>300</top> <left>0</left> <width>180</width> <height>270</height> @@ -87,7 +87,7 @@ <top>10</top> <width>800</width> <height>30</height> - <font>font30_title</font> + <!-- <font>font30_title</font> --> <textcolor>FFFFFFFF</textcolor> <shadowcolor>00000000</shadowcolor> <label>[B]$INFO[ListItem.Label] [COLOR FFAAAAAA]$INFO[ListItem.Property(year)][/COLOR][/B] </label> @@ -100,7 +100,7 @@ <top>50</top> <width>800</width> <height>30</height> - <font>font30</font> + <!-- <font>font30</font> --> <textcolor>FFFFFFFF</textcolor> <shadowcolor>00000000</shadowcolor> <label>[B]$INFO[ListItem.Property(channel)][/B]</label> @@ -112,7 +112,7 @@ <top>90</top> <width>800</width> <height>170</height> - <font>font13</font> + <!-- <font>font13</font> --> <textcolor>FFFFFFFF</textcolor> <shadowcolor>00000000</shadowcolor> <label>$INFO[ListItem.Property(plot)]</label> @@ -120,7 +120,7 @@ </control> <!-- Poster --> <control type="image"> - <bottom>0</bottom> + <top>0</top> <left>0</left> <width>380</width> <height>570</height> diff --git a/resources/skins/Default/720p/ServersWindow.xml b/resources/skins/Default/720p/ServersWindow.xml index ecac85eb..9767e65d 100644 --- a/resources/skins/Default/720p/ServersWindow.xml +++ b/resources/skins/Default/720p/ServersWindow.xml @@ -52,7 +52,7 @@ <top>50</top> <width>1000</width> <height>30</height> - <font>font30_title</font> + <!-- <font>font30_title</font> --> <textcolor>FFFFFFFF</textcolor> <shadowcolor>00000000</shadowcolor> <label>[UPPERCASE]$ADDON[plugin.video.kod 70145][/UPPERCASE]</label> @@ -71,7 +71,7 @@ <itemlayout height="400" width="200"> <!-- Poster --> <control type="image"> - <bottom>0</bottom> + <top>200</top> <left>0</left> <width>200</width> <height>200</height> @@ -87,7 +87,7 @@ <top>0</top> <width>800</width> <height>30</height> - <font>font30_title</font> + <!-- <font>font30_title</font> --> <textcolor>FFFFFFFF</textcolor> <shadowcolor>00000000</shadowcolor> <label>[B]$INFO[ListItem.Property(server)] [COLOR FFAAAAAA]$INFO[ListItem.Property(quality)][/COLOR][/B] </label> @@ -100,7 +100,7 @@ <top>40</top> <width>800</width> <height>120</height> - <font>font13</font> + <!-- <font>font13</font> --> <textcolor>FFFFFFFF</textcolor> <shadowcolor>00000000</shadowcolor> <label>$INFO[ListItem.Property(plot)]</label> diff --git a/resources/skins/Default/720p/TitleOrIDWindow.xml b/resources/skins/Default/720p/TitleOrIDWindow.xml new file mode 100644 index 00000000..4f9061ce --- /dev/null +++ b/resources/skins/Default/720p/TitleOrIDWindow.xml @@ -0,0 +1,105 @@ +<?xml version="1.0" encoding="utf-8"?> +<window> + <allowoverlays>false</allowoverlays> + <animation type="WindowOpen" reversible="false"> + <effect type="fade" start="0" end="100" time="300" /> + </animation> + <animation type="WindowClose" reversible="false"> + <effect type="fade" start="100" end="0" time="300" /> + </animation> + <controls> + <control type="button" id="104"> + <description>CLOSE</description> + <top>0</top> + <left>0</left> + <height>100%</height> + <width>100%</width> + <texturefocus colordiffuse="80232323">white.png</texturefocus> + <texturenofocus colordiffuse="80232323">white.png</texturenofocus> + </control> + <control type="textbox" id="100"> + <description>Not Found</description> + <top>150</top> + <left>40</left> + <height>40</height> + <width>1200</width> + <!-- <font>font30_title</font> --> + <textcolor>CCFFFFFF</textcolor> + <aligny>center</aligny> + <align>center</align> + <label/> + </control> + <control type="group"> + <top>260</top> + <left>400</left> + <height>200</height> + <width>480</width> + <control type="image"> + <description>Background</description> + <height>100%</height> + <width>100%</width> + <texture colordiffuse="FF232323">white.png</texture> + </control> + <control type="textbox"> + <description>Modify</description> + <top>20</top> + <left>40</left> + <height>40</height> + <width>100</width> + <!-- <font>font30_title</font> --> + <textcolor>FFFFFFFF</textcolor> + <label>$ADDON[plugin.video.kod 70714]</label> + <aligny>center</aligny> + <align>left</align> + <label/> + </control> + <control type="image"> + <description>Separator</description> + <top>80</top> + <left>0</left> + <height>1</height> + <width>480</width> + <texture colordiffuse="FFFFFFFF">white.png</texture> + </control> + <control type="button" id="103"> + <description>CLOSE</description> + <top>20</top> + <right>20</right> + <height>40</height> + <width>40</width> + <texturefocus colordiffuse="FFFFFFFF">close.png</texturefocus> + <texturenofocus colordiffuse="88FFFFFF">close.png</texturenofocus> + <ondown>101</ondown> + </control> + <control type="grouplist"> + <top>120</top> + <left>40</left> + <height>40</height> + <width>400</width> + <orientation>horizontal</orientation> + <itemgap>40</itemgap> + <onup>103</onup> + <control type="button" id="101"> + <description>Title</description> + <height>40</height> + <width>180</width> + <textcolor>FFFFFFFF</textcolor> + <aligny>center</aligny> + <align>center</align> + <texturefocus border="10" colordiffuse="22FFFFFF">white.png</texturefocus> + <texturenofocus border="10"></texturenofocus> + </control> + <control type="button" id="102"> + <description>ID</description> + <height>40</height> + <width>180</width> + <textcolor>FFFFFFFF</textcolor> + <aligny>center</aligny> + <align>center</align> + <texturefocus border="10" colordiffuse="22FFFFFF">white.png</texturefocus> + <texturenofocus border="10"></texturenofocus> + </control> + </control> + </control> + </controls> +</window> diff --git a/resources/skins/Default/media/add.png b/resources/skins/Default/media/add.png new file mode 100644 index 00000000..36e84450 Binary files /dev/null and b/resources/skins/Default/media/add.png differ diff --git a/resources/skins/Default/media/close.png b/resources/skins/Default/media/close.png index 0eb9e133..b647ddca 100644 Binary files a/resources/skins/Default/media/close.png and b/resources/skins/Default/media/close.png differ diff --git a/resources/skins/Default/media/delete.png b/resources/skins/Default/media/delete.png new file mode 100644 index 00000000..f8105582 Binary files /dev/null and b/resources/skins/Default/media/delete.png differ diff --git a/resources/skins/Default/media/down.png b/resources/skins/Default/media/down.png new file mode 100644 index 00000000..c43ed16b Binary files /dev/null and b/resources/skins/Default/media/down.png differ diff --git a/resources/skins/Default/media/exit.png b/resources/skins/Default/media/exit.png new file mode 100644 index 00000000..b2dc396c Binary files /dev/null and b/resources/skins/Default/media/exit.png differ diff --git a/resources/skins/Default/media/fhd.png b/resources/skins/Default/media/fhd.png new file mode 100644 index 00000000..c3b4d3f5 Binary files /dev/null and b/resources/skins/Default/media/fhd.png differ diff --git a/resources/skins/Default/media/hd.png b/resources/skins/Default/media/hd.png new file mode 100644 index 00000000..b14a6eb2 Binary files /dev/null and b/resources/skins/Default/media/hd.png differ diff --git a/resources/skins/Default/media/left.png b/resources/skins/Default/media/left.png new file mode 100644 index 00000000..fc853099 Binary files /dev/null and b/resources/skins/Default/media/left.png differ diff --git a/resources/skins/Default/media/manual.png b/resources/skins/Default/media/manual.png new file mode 100644 index 00000000..df30ed0c Binary files /dev/null and b/resources/skins/Default/media/manual.png differ diff --git a/resources/skins/Default/media/menu.png b/resources/skins/Default/media/menu.png new file mode 100644 index 00000000..eac13daf Binary files /dev/null and b/resources/skins/Default/media/menu.png differ diff --git a/resources/skins/Default/media/ok.png b/resources/skins/Default/media/ok.png new file mode 100644 index 00000000..d9292366 Binary files /dev/null and b/resources/skins/Default/media/ok.png differ diff --git a/resources/skins/Default/media/pause.png b/resources/skins/Default/media/pause.png new file mode 100644 index 00000000..c6510af6 Binary files /dev/null and b/resources/skins/Default/media/pause.png differ diff --git a/resources/skins/Default/media/play.png b/resources/skins/Default/media/play.png new file mode 100644 index 00000000..94a92ade Binary files /dev/null and b/resources/skins/Default/media/play.png differ diff --git a/resources/skins/Default/media/progress.png b/resources/skins/Default/media/progress.png new file mode 100644 index 00000000..433a70ad Binary files /dev/null and b/resources/skins/Default/media/progress.png differ diff --git a/resources/skins/Default/media/right.png b/resources/skins/Default/media/right.png new file mode 100644 index 00000000..4e5c97fe Binary files /dev/null and b/resources/skins/Default/media/right.png differ diff --git a/resources/skins/Default/media/sd.png b/resources/skins/Default/media/sd.png new file mode 100644 index 00000000..d381dfbe Binary files /dev/null and b/resources/skins/Default/media/sd.png differ diff --git a/resources/skins/Default/media/specials.png b/resources/skins/Default/media/specials.png new file mode 100644 index 00000000..17c187e1 Binary files /dev/null and b/resources/skins/Default/media/specials.png differ diff --git a/resources/skins/Default/media/stop.png b/resources/skins/Default/media/stop.png new file mode 100644 index 00000000..07b7d8d3 Binary files /dev/null and b/resources/skins/Default/media/stop.png differ diff --git a/resources/skins/Default/media/uhd.png b/resources/skins/Default/media/uhd.png new file mode 100644 index 00000000..399ae8e8 Binary files /dev/null and b/resources/skins/Default/media/uhd.png differ diff --git a/resources/skins/Default/media/up.png b/resources/skins/Default/media/up.png new file mode 100644 index 00000000..e2de1263 Binary files /dev/null and b/resources/skins/Default/media/up.png differ diff --git a/resources/skins/Default/media/updn.png b/resources/skins/Default/media/updn.png new file mode 100644 index 00000000..884b049e Binary files /dev/null and b/resources/skins/Default/media/updn.png differ diff --git a/resources/skins/Default/media/white.png b/resources/skins/Default/media/white.png index 528c66f6..fe67a33a 100644 Binary files a/resources/skins/Default/media/white.png and b/resources/skins/Default/media/white.png differ diff --git a/servers/akvideo.py b/servers/akvideo.py index 346b3ff3..cabb9b02 100644 --- a/servers/akvideo.py +++ b/servers/akvideo.py @@ -13,7 +13,7 @@ headers = [['User-Agent', 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:54.0) Gecko/20 def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) # page_url = re.sub('akvideo.stream/(?:video/|video\\.php\\?file_code=)?(?:embed-)?([a-zA-Z0-9]+)','akvideo.stream/video/\\1',page_url) global data page = httptools.downloadpage(page_url, headers=headers) @@ -32,18 +32,18 @@ def test_video_exists(page_url): # ID, code = scrapertools.find_single_match(data, r"""input\D*id=(?:'|")([^'"]+)(?:'|").*?value='([a-z0-9]+)""") # post = urllib.urlencode({ID: code}) - # logger.info('PAGE DATA' + data) + # logger.debug('PAGE DATA' + data) if "File Not Found" in data: return False, config.get_localized_string(70449) % "Akvideo" return True, "" def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info(" url=" + page_url) + logger.debug(" url=" + page_url) video_urls = [] global data - # logger.info('PAGE DATA' + data) + # logger.debug('PAGE DATA' + data) # sitekey = scrapertools.find_single_match(data, 'data-sitekey="([^"]+)') # captcha = platformtools.show_recaptcha(sitekey, page_url) if sitekey else '' # diff --git a/servers/anavids.py b/servers/anavids.py index c4169620..dd506dd3 100644 --- a/servers/anavids.py +++ b/servers/anavids.py @@ -6,7 +6,7 @@ from platformcode import config, logger def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) global data data = httptools.downloadpage(page_url, cookies=False).data if 'File you are looking for is not found.' in data: diff --git a/servers/animeid.py b/servers/animeid.py index 3cec4070..1eb8414a 100644 --- a/servers/animeid.py +++ b/servers/animeid.py @@ -6,7 +6,7 @@ from platformcode import logger def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url).data if "no longer exists" in data or "to copyright issues" in data: return False, config.get_localized_string(70449) % "animeid" @@ -16,7 +16,7 @@ def test_video_exists(page_url): def get_video_url(page_url, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url).data video_urls = [] label, videourl = scrapertools.find_single_match(data, 'label":"([^"]+)".*?file":"([^"]+)') diff --git a/servers/anonfile.py b/servers/anonfile.py index 4191caff..9d48396b 100644 --- a/servers/anonfile.py +++ b/servers/anonfile.py @@ -9,7 +9,7 @@ from platformcode import logger, config def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) response = httptools.downloadpage(page_url) if not response.success or "Not Found" in response.data or "File was deleted" in response.data or "is no longer available" in response.data: return False, config.get_localized_string(70449) % "anonfile" @@ -17,7 +17,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) video_urls = [] data = httptools.downloadpage(page_url).data patron = 'download-url.*?href="([^"]+)"' diff --git a/servers/archiveorg.py b/servers/archiveorg.py index 61677b02..df81e381 100644 --- a/servers/archiveorg.py +++ b/servers/archiveorg.py @@ -9,7 +9,7 @@ from platformcode import logger def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url) if data.code == 404: return False, config.get_localized_string(70449) % "ArchiveOrg" @@ -17,7 +17,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("url=" + page_url) + logger.debug("url=" + page_url) video_urls = [] data = httptools.downloadpage(page_url).data patron = '<meta property="og:video" content="([^"]+)">' diff --git a/servers/backin.py b/servers/backin.py index c070d5d6..8214a944 100644 --- a/servers/backin.py +++ b/servers/backin.py @@ -9,7 +9,7 @@ except ImportError: from urllib import urlencode def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) if 'http://' in page_url: # fastids page_url = httptools.downloadpage(page_url, follow_redirects=False, only_headers=True).headers['location'] @@ -24,7 +24,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("page_url=" + page_url) + logger.debug("page_url=" + page_url) video_urls = [] @@ -36,18 +36,18 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= if data_pack: from lib import jsunpack data = jsunpack.unpack(data_pack) - logger.info("page_url=" + data) + logger.debug("page_url=" + data) # URL url = scrapertools.find_single_match(data, r'"src"value="([^"]+)"') if not url: url = scrapertools.find_single_match(data, r'file\s*:\s*"([^"]+)"') - logger.info("URL=" + str(url)) + logger.debug("URL=" + str(url)) # URL del vídeo video_urls.append([".mp4" + " [backin]", url]) for video_url in video_urls: - logger.info("%s - %s" % (video_url[0], httptools.get_url_headers(video_url[1]))) + logger.debug("%s - %s" % (video_url[0], httptools.get_url_headers(video_url[1]))) return video_urls diff --git a/servers/badshare.py b/servers/badshare.py index 78c62986..7e713a9a 100644 --- a/servers/badshare.py +++ b/servers/badshare.py @@ -11,7 +11,7 @@ from platformcode import logger def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) global page page = httptools.downloadpage(page_url) if not page.success: @@ -20,7 +20,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("url=" + page_url) + logger.debug("url=" + page_url) video_urls = [] ext = '.mp4' diff --git a/servers/bdupload.py b/servers/bdupload.py index 6321dd3a..cd72b590 100644 --- a/servers/bdupload.py +++ b/servers/bdupload.py @@ -10,7 +10,7 @@ headers = {'User-Agent': 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url).data if "Archive no Encontrado" in data: return False, config.get_localized_string(70449) % "bdupload" @@ -19,7 +19,7 @@ def test_video_exists(page_url): def get_video_url(page_url, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url).data post = "" patron = '(?s)type="hidden" name="([^"]+)".*?value="([^"]*)"' diff --git a/servers/cinemaupload.py b/servers/cinemaupload.py index c3f05da8..a4d75d56 100644 --- a/servers/cinemaupload.py +++ b/servers/cinemaupload.py @@ -11,7 +11,7 @@ from platformcode import logger def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url) if data.code == 404: return False, config.get_localized_string(70449) % "CinemaUpload" @@ -19,7 +19,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("url=" + page_url) + logger.debug("url=" + page_url) video_urls = [] data = httptools.downloadpage(page_url).data data = re.sub(r'\n|\r|\t| |<br>|\s{2,}', "", data) diff --git a/servers/clicknupload.py b/servers/clicknupload.py index f54c5d5c..a2f95930 100755 --- a/servers/clicknupload.py +++ b/servers/clicknupload.py @@ -22,7 +22,7 @@ excption = False def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = get_data(page_url.replace(".org", ".me")) if "File Not Found" in data: return False, config.get_localized_string(70449) % "Clicknupload" @@ -31,7 +31,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("url=" + page_url) + logger.debug("url=" + page_url) data = get_data(page_url.replace(".org", ".me")) @@ -51,7 +51,7 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= media_url = media.rsplit('/', 1)[0] + "/" + url_strip video_urls.append([scrapertools.get_filename_from_url(media_url)[-4:] + " [clicknupload]", media_url]) for video_url in video_urls: - logger.info("%s - %s" % (video_url[0], video_url[1])) + logger.debug("%s - %s" % (video_url[0], video_url[1])) return video_urls diff --git a/servers/clipwatching.py b/servers/clipwatching.py index 93aa4804..6ac3ec78 100644 --- a/servers/clipwatching.py +++ b/servers/clipwatching.py @@ -6,7 +6,7 @@ from lib import jsunpack from platformcode import logger, config def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) global data data = httptools.downloadpage(page_url).data if "File Not Found" in data or "File was deleted" in data: @@ -15,7 +15,7 @@ def test_video_exists(page_url): def get_video_url(page_url, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) video_urls = [] try: diff --git a/servers/cloudvideo.py b/servers/cloudvideo.py index b7885afe..36e299fc 100644 --- a/servers/cloudvideo.py +++ b/servers/cloudvideo.py @@ -8,7 +8,7 @@ from lib import jsunpack def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) html = httptools.downloadpage(page_url) global data data = html.data @@ -18,7 +18,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("url=" + page_url) + logger.debug("url=" + page_url) video_urls = [] global data # data = httptools.downloadpage(page_url).data diff --git a/servers/crunchyroll.py b/servers/crunchyroll.py index 2d252b3a..31384209 100755 --- a/servers/crunchyroll.py +++ b/servers/crunchyroll.py @@ -30,7 +30,7 @@ proxy = "https://www.usa-proxy.org/" def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url, headers=GLOBAL_HEADER).data if "Este es un clip de muestra" in data: @@ -44,7 +44,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): #page_url='https://www.crunchyroll.com/es-es/one-piece/episode-891-climbing-up-a-waterfall-a-great-journey-through-the-land-of-wanos-sea-zone-786643' - logger.info("url=" + page_url) + logger.debug("url=" + page_url) video_urls = [] if "crunchyroll.com" in page_url: media_id = page_url.rsplit("-", 1)[1] @@ -94,7 +94,7 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= file_sub = "" video_urls.append(["%s %sp [crunchyroll]" % (filename, quality), media_url, 0, file_sub]) for video_url in video_urls: - logger.info("%s - %s" % (video_url[0], video_url[1])) + logger.debug("%s - %s" % (video_url[0], video_url[1])) return video_urls diff --git a/servers/dailymotion.py b/servers/dailymotion.py index 7de4b4fd..b6d3ddbc 100644 --- a/servers/dailymotion.py +++ b/servers/dailymotion.py @@ -6,7 +6,7 @@ from platformcode import logger, config def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) global response response = httptools.downloadpage(page_url, cookies=False) @@ -18,7 +18,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) video_urls = [] cookie = {'Cookie': response.headers["set-cookie"]} data = response.data.replace("\\", "") @@ -40,5 +40,5 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= stream_url = stream_url_http video_urls.append(["%sp .%s [dailymotion]" % (calidad, stream_type), stream_url, 0, subtitle]) for video_url in video_urls: - logger.info("%s - %s" % (video_url[0], video_url[1])) + logger.debug("%s - %s" % (video_url[0], video_url[1])) return video_urls \ No newline at end of file diff --git a/servers/debriders/alldebrid.py b/servers/debriders/alldebrid.py index 6f0bd18a..3818fffd 100644 --- a/servers/debriders/alldebrid.py +++ b/servers/debriders/alldebrid.py @@ -7,7 +7,7 @@ from platformcode import logger # Returns an array of possible video url's from the page_url def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info() + logger.debug() page_url = correct_url(page_url) dd1 = httptools.downloadpage("https://api.alldebrid.com/user/login?agent=mySoft&username=%s&password=%s" %(user, password)).data token = scrapertools.find_single_match(dd1, 'token":"([^"]+)') diff --git a/servers/debriders/realdebrid.py b/servers/debriders/realdebrid.py index 7d8b855d..d8e6d4f5 100755 --- a/servers/debriders/realdebrid.py +++ b/servers/debriders/realdebrid.py @@ -22,7 +22,7 @@ headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:65.0) Gecko/20 # Returns an array of possible video url's from the page_url def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("(page_url='%s' , video_password=%s)" % (page_url, video_password)) + logger.debug("(page_url='%s' , video_password=%s)" % (page_url, video_password)) page_url = page_url.replace(".nz/embed", ".nz/") # Se comprueba si existe un token guardado y sino se ejecuta el proceso de autentificación token_auth = config.get_setting("token", server="realdebrid") @@ -99,7 +99,7 @@ def get_enlaces(data): def authentication(): - logger.info() + logger.debug() try: client_id = "YTWNFBIJEEBP6" diff --git a/servers/decrypters/adfly.py b/servers/decrypters/adfly.py index 20b34d18..8acac50a 100755 --- a/servers/decrypters/adfly.py +++ b/servers/decrypters/adfly.py @@ -8,7 +8,7 @@ from platformcode import logger def get_long_url(short_url): - logger.info("short_url = '%s'" % short_url) + logger.debug("short_url = '%s'" % short_url) data = httptools.downloadpage(short_url).data ysmm = scrapertools.find_single_match(data, "var ysmm = '([^']+)';") diff --git a/servers/decrypters/linkbucks.py b/servers/decrypters/linkbucks.py index 5b15ec00..e1a7b521 100755 --- a/servers/decrypters/linkbucks.py +++ b/servers/decrypters/linkbucks.py @@ -17,7 +17,7 @@ from platformcode import logger # Obtiene la URL que hay detrás de un enlace a linkbucks def get_long_url(short_url): - logger.info("(short_url='%s')" % short_url) + logger.debug("(short_url='%s')" % short_url) request_headers = [] request_headers.append(["User-Agent", @@ -33,17 +33,17 @@ def get_long_url(short_url): while True: for name, value in response_headers: if name == "set-cookie": - logger.info("Set-Cookie: " + value) + logger.debug("Set-Cookie: " + value) cookie_name = scrapertools.scrapertools.find_single_match(value, '(.*?)\=.*?\;') cookie_value = scrapertools.scrapertools.find_single_match(value, '.*?\=(.*?)\;') request_headers.append(["Cookie", cookie_name + "=" + cookie_value]) body, response_headers = scrapertools.read_body_and_headers(url, headers=request_headers) - logger.info("body=" + body) + logger.debug("body=" + body) try: location = scrapertools.scrapertools.find_single_match(body, '<textarea.*?class="caja_des">([^<]+)</textarea>') - logger.info("location=" + location) + logger.debug("location=" + location) break except: n = n + 1 diff --git a/servers/decrypters/longurl.py b/servers/decrypters/longurl.py index 4c6cb012..b434f95b 100755 --- a/servers/decrypters/longurl.py +++ b/servers/decrypters/longurl.py @@ -38,15 +38,15 @@ servers = get_server_list() def get_long_urls(data): - logger.info() + logger.debug() patron = '<a href="http://([^"]+)"' matches = re.compile(patron, re.DOTALL).findall(data) for short_url in matches: if short_url.startswith(tuple(servers)): - logger.info(": " + short_url) + logger.debug(": " + short_url) longurl_data = httptools.downloadpage( "http://api.longurl.org/v2/expand?url=" + urllib.quote_plus(short_url)).data - logger.info(longurl_data) + logger.debug(longurl_data) try: long_url = scrapertools.scrapertools.find_single_match(longurl_data, '<long-url><!\[CDATA\[(.*?)\]\]></long-url>') except: diff --git a/servers/decrypters/safelinking.py b/servers/decrypters/safelinking.py index 912177e0..e63271e3 100644 --- a/servers/decrypters/safelinking.py +++ b/servers/decrypters/safelinking.py @@ -5,9 +5,9 @@ from platformcode import logger def get_long_url(short_url): - logger.info("(short_url='%s')" % short_url) + logger.debug("(short_url='%s')" % short_url) location = scrapertools.get_header_from_response(short_url, header_to_get="location") - logger.info("location=" + location) + logger.debug("location=" + location) return location diff --git a/servers/deltabit.py b/servers/deltabit.py index 957dca72..859c777c 100644 --- a/servers/deltabit.py +++ b/servers/deltabit.py @@ -1,36 +1,31 @@ # -*- coding: utf-8 -*- import time, sys -if sys.version_info[0] >= 3: - import urllib.parse as urllib -else: - import urllib -from core import httptools, scrapertools +from core import httptools, scrapertools, support from lib import jsunpack from platformcode import logger, config def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) - global data - data = httptools.downloadpage(page_url).data.replace('"', "'") + logger.debug("(page_url='%s')" % page_url) + global data, real_url + page = httptools.downloadpage(page_url) + data = page.data.replace('"', "'") + real_url = page.url + if "Not Found" in data or "File Does not Exist" in data: return False, config.get_localized_string(70449) % "DeltaBit" return True, "" + def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("(deltabit page_url='%s')" % page_url) - video_urls = [] - global data - - post = urllib.urlencode({k: v for k, v in scrapertools.find_multiple_matches(data, "name='([^']+)' value='([^']*)'")}) + logger.debug("(deltabit page_url='%s')" % page_url) + global data, real_url + post = {k: v for k, v in scrapertools.find_multiple_matches(data, "name='([^']+)' value='([^']*)'")} time.sleep(2.5) - data = httptools.downloadpage(page_url, post=post).data + data = httptools.downloadpage(real_url, post=post).data - videos_packed = scrapertools.find_single_match(data, r"</div>\s*<script type='text/javascript'>(eval.function.p,a,c,k,e,.*?)\s*</script>") - - video_unpacked = jsunpack.unpack(videos_packed) - videos = scrapertools.find_single_match(video_unpacked, r'sources:\["([^"]+)"\]') - video_urls.append([videos.split('.')[-1] + ' [DeltaBit]', videos.replace('https:','http:')]) - return video_urls + # videos_packed = scrapertools.find_single_match(data, r"<script type='text/javascript'>(eval.function.p,a,c,k,e,.*?)\s*</script>") + # video_unpacked = jsunpack.unpack(videos_packed) + return support.get_jwplayer_mediaurl(data, 'DeltaBit', True) diff --git a/servers/directo.py b/servers/directo.py index 1efbe7fc..2ad2e56b 100644 --- a/servers/directo.py +++ b/servers/directo.py @@ -8,7 +8,7 @@ def test_video_exists(page_url): # Returns an array of possible video url's from the page_url def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) video_urls = [["%s %s" % (page_url[-4:], config.get_localized_string(30137)), page_url]] diff --git a/servers/doodstream.py b/servers/doodstream.py index 84f0c0e7..42f2cc01 100644 --- a/servers/doodstream.py +++ b/servers/doodstream.py @@ -1,13 +1,13 @@ # -*- coding: utf-8 -*- -import re, time +import re, time from lib import js2py from core import httptools, scrapertools from platformcode import logger, config def test_video_exists(page_url): global data - logger.info('page url=', page_url) + logger.debug('page url=', page_url) response = httptools.downloadpage(page_url) if response.code == 404 or 'File you are looking for is not found' in response.data: @@ -18,29 +18,20 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): + # from core.support import dbg;dbg() global data - logger.info("URL", page_url) - - video_urls = list() - host = "https://dood.watch" + logger.debug("URL", page_url) + # from core.support import dbg;dbg() + video_urls = [] + host = scrapertools.find_single_match(page_url, r'http[s]?://[^/]+') new_url = scrapertools.find_single_match(data, r'<iframe src="([^"]+)"') - if new_url: - data = httptools.downloadpage(host + new_url).data - logger.info('DATA', data) + if new_url: data = httptools.downloadpage(host + new_url).data label = scrapertools.find_single_match(data, r'type:\s*"video/([^"]+)"') - js_code = scrapertools.find_single_match(data, ('(function makePlay.*?;})')) - js_code = re.sub(r"\+Date.now\(\)", '', js_code) - js = js2py.eval_js(js_code) - makeplay = js() + str(int(time.time()*1000)) - - base_url = scrapertools.find_single_match(data, r"\$.get\('([^']+)'") - data = httptools.downloadpage("%s%s" % (host, base_url), headers={"referer": page_url}).data - data = re.sub(r'\s+', '', data) - - url = data + makeplay + "|Referer=%s" % page_url + base_url, token = scrapertools.find_single_match(data, r'''dsplayer\.hotkeys[^']+'([^']+).+?function\s*makePlay.+?return[^?]+([^"]+)''') + url = '{}{}{}|Referer={}'.format(httptools.downloadpage(host + base_url, headers={"Referer": page_url}).data, token, str(int(time.time() * 1000)), page_url) video_urls.append([ label + ' [DooD Stream]', url]) return video_urls \ No newline at end of file diff --git a/servers/dostream.py b/servers/dostream.py index 1fdc4883..258509f4 100644 --- a/servers/dostream.py +++ b/servers/dostream.py @@ -9,7 +9,7 @@ from platformcode import logger def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url) if data.code == 404: return False, config.get_localized_string(70449) % "Dostream" @@ -17,7 +17,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("url=" + page_url) + logger.debug("url=" + page_url) video_urls = [] data = httptools.downloadpage(page_url, headers={"Referer":page_url}).data patron = '"label":"([^"]+)".*?' diff --git a/servers/downace.py b/servers/downace.py index a238f2de..9fec2d44 100644 --- a/servers/downace.py +++ b/servers/downace.py @@ -6,7 +6,7 @@ from platformcode import logger def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url).data if "no longer exists" in data or "to copyright issues" in data: return False, "[Downace] El video ha sido borrado" @@ -18,7 +18,7 @@ def test_video_exists(page_url): def get_video_url(page_url, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url).data video_urls = [] videourl = scrapertools.find_single_match(data, 'controls preload.*?src="([^"]+)') diff --git a/servers/facebook.py b/servers/facebook.py index 15721d9a..3b540c62 100755 --- a/servers/facebook.py +++ b/servers/facebook.py @@ -19,10 +19,10 @@ from platformcode import logger def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) page_url = page_url.replace("amp;", "") data = httptools.downloadpage(page_url).data - logger.info("data=" + data) + logger.debug("data=" + data) video_urls = [] patron = "video_src.*?(http.*?)%22%2C%22video_timestamp" matches = re.compile(patron, re.DOTALL).findall(data) @@ -33,5 +33,5 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= videourl = urllib.unquote(videourl) video_urls.append(["[facebook]", videourl]) for video_url in video_urls: - logger.info("%s - %s" % (video_url[0], video_url[1])) + logger.debug("%s - %s" % (video_url[0], video_url[1])) return video_urls diff --git a/servers/fastplay.py b/servers/fastplay.py index 74468000..2c39e1d9 100644 --- a/servers/fastplay.py +++ b/servers/fastplay.py @@ -7,7 +7,7 @@ from platformcode import logger def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url) if "Object not found" in data.data or "longer exists on our servers" in data.data: @@ -18,7 +18,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url).data if "p,a,c,k,e,d" in data: @@ -38,6 +38,6 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= except: pass for video_url in video_urls: - logger.info(" %s - %s" % (video_url[0], video_url[1])) + logger.debug(" %s - %s" % (video_url[0], video_url[1])) return video_urls diff --git a/servers/fembed.py b/servers/fembed.py index f11a5d88..ea96190c 100644 --- a/servers/fembed.py +++ b/servers/fembed.py @@ -6,7 +6,7 @@ from core import jsontools from platformcode import logger, config def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) global data page_url = re.sub('://[^/]+/', '://feurl.com/', page_url) @@ -16,14 +16,14 @@ def test_video_exists(page_url): page_url = page_url.replace("/f/","/v/") page_url = page_url.replace("/v/","/api/source/") data = httptools.downloadpage(page_url, post={}).json - logger.info(data) + logger.debug(data) if "Video not found or" in data or "We are encoding this video" in data: return False, config.get_localized_string(70449) % "Fembed" return True, "" def get_video_url(page_url, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) video_urls = [] for file in data['data']: media_url = file['file'] diff --git a/servers/fex.py b/servers/fex.py index d64e6434..ed4cbb90 100644 --- a/servers/fex.py +++ b/servers/fex.py @@ -8,7 +8,7 @@ from platformcode import logger def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url, follow_redirects=False) @@ -18,7 +18,7 @@ def test_video_exists(page_url): return True, "" def get_video_url(page_url, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) video_urls = [] data = httptools.downloadpage(page_url, follow_redirects=False, only_headers=True) logger.debug(data.headers) diff --git a/servers/filefactory.py b/servers/filefactory.py index 75e75a53..d7e04be2 100644 --- a/servers/filefactory.py +++ b/servers/filefactory.py @@ -4,12 +4,12 @@ from platformcode import logger def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) return True, "" def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) video_urls = [] return video_urls diff --git a/servers/filepup.py b/servers/filepup.py index 0636c120..2f69930d 100644 --- a/servers/filepup.py +++ b/servers/filepup.py @@ -9,7 +9,7 @@ from platformcode import logger, config def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) response = httptools.downloadpage(page_url) if "File was deleted" in response.data or "is no longer available" in response.data: return False, config.get_localized_string(70449) % "filepup" @@ -17,7 +17,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) video_urls = [] page_url = page_url.replace("https","http") + "?wmode=transparent" data = httptools.downloadpage(page_url).data @@ -36,5 +36,5 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= video_urls.sort(key=lambda x: x[2]) for video_url in video_urls: video_url[2] = 0 - logger.info("%s - %s" % (video_url[0], video_url[1])) + logger.debug("%s - %s" % (video_url[0], video_url[1])) return video_urls diff --git a/servers/filescdn.py b/servers/filescdn.py index 3a260b47..42cb6b3f 100644 --- a/servers/filescdn.py +++ b/servers/filescdn.py @@ -6,7 +6,7 @@ from platformcode import logger def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url).data if "File was deleted" in data: @@ -16,7 +16,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("url=" + page_url) + logger.debug("url=" + page_url) video_urls = [] data = httptools.downloadpage(page_url).data url = scrapertools.find_single_match(data, '(?i)link:\s*"(https://.*?filescdn\.com.*?mp4)"') diff --git a/servers/filesmonster.py b/servers/filesmonster.py index 7a1cb05d..d4ea7d2a 100644 --- a/servers/filesmonster.py +++ b/servers/filesmonster.py @@ -6,7 +6,7 @@ from platformcode import logger def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("( page_url='%s')") + logger.debug("( page_url='%s')") video_urls = [] itemlist = [] data1 = '' diff --git a/servers/flashx.py b/servers/flashx.py index 8ce5d966..1197c4ec 100644 --- a/servers/flashx.py +++ b/servers/flashx.py @@ -23,7 +23,7 @@ flashx_hash_f = "" flashx_post = "" def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) global flashx_data try: flashx_data = httptools.downloadpage(page_url, cookies="xfsts=pfp5dj3e6go1l2o1").data @@ -53,7 +53,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("url=" + page_url) + logger.debug("url=" + page_url) pfxfx = "" data = flashx_data data = data.replace("\n", "") @@ -70,8 +70,8 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= matches = scrapertools.find_multiple_matches(mfxfx, '(\w+):(\w+)') for f, v in matches: pfxfx += f + "=" + v + "&" - logger.info("mfxfxfx1= %s" % js_fxfx) - logger.info("mfxfxfx2= %s" % pfxfx) + logger.debug("mfxfxfx1= %s" % js_fxfx) + logger.debug("mfxfxfx2= %s" % pfxfx) if pfxfx == "": pfxfx = "f=fail&fxfx=6" coding_url = 'https://www.flashx.co/flashx.php?%s' % pfxfx @@ -119,14 +119,14 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= filetools.write(subtitle, data) except: import traceback - logger.info("Error al descargar el subtítulo: " + traceback.format_exc()) + logger.debug("Error al descargar el subtítulo: " + traceback.format_exc()) for media_url, label in media_urls: if not media_url.endswith("png") and not media_url.endswith(".srt"): video_urls.append(["." + media_url.rsplit('.', 1)[1] + " [flashx]", media_url, 0, subtitle]) for video_url in video_urls: - logger.info("%s - %s" % (video_url[0], video_url[1])) + logger.debug("%s - %s" % (video_url[0], video_url[1])) except: pass diff --git a/servers/fourshared.py b/servers/fourshared.py index 802fa1d3..9d6927b2 100644 --- a/servers/fourshared.py +++ b/servers/fourshared.py @@ -8,7 +8,7 @@ from platformcode import logger # Returns an array of possible video url's from the page_url def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) video_urls = [] @@ -17,7 +17,7 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= page_url = scrapertools.get_header_from_response(page_url, header_to_get="location") # http://www.4shared.com/flash/player.swf?file=http://dc237.4shared.com/img/392975628/ff297d3f/dlink__2Fdownload_2Flj9Qu-tF_3Ftsid_3D20101030-200423-87e3ba9b/preview.flv&d - logger.info("redirect a '%s'" % page_url) + logger.debug("redirect a '%s'" % page_url) patron = "file\=([^\&]+)\&" matches = re.compile(patron, re.DOTALL).findall(page_url) @@ -29,6 +29,6 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= video_urls.append(["[fourshared]", page_url]) for video_url in video_urls: - logger.info("%s - %s" % (video_url[0], video_url[1])) + logger.debug("%s - %s" % (video_url[0], video_url[1])) return video_urls diff --git a/servers/gamovideo.py b/servers/gamovideo.py index 7a4e6f20..f9078151 100755 --- a/servers/gamovideo.py +++ b/servers/gamovideo.py @@ -22,7 +22,7 @@ headers = {"User-Agent":"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:%s.0) Geck DATA = '' def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = alfaresolver.get_data(page_url, False) @@ -46,7 +46,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = DATA @@ -87,7 +87,7 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= video_urls.append([scrapertools.get_filename_from_url(mediaurl)[-4:] + " [gamovideo]", mediaurl]) for video_url in video_urls: - logger.info("%s - %s" % (video_url[0], video_url[1])) + logger.debug("%s - %s" % (video_url[0], video_url[1])) return video_urls diff --git a/servers/gigasize.py b/servers/gigasize.py index 85faa514..25b2c1a2 100644 --- a/servers/gigasize.py +++ b/servers/gigasize.py @@ -5,7 +5,7 @@ from platformcode import logger def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url).data if '<h2 class="error">Download error</h2>' in data: return False, "El enlace no es válido<br/>o ha sido borrado de gigasize" @@ -13,7 +13,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) video_urls = [] return video_urls diff --git a/servers/googlevideo.py b/servers/googlevideo.py index 3a411e80..8f857fde 100755 --- a/servers/googlevideo.py +++ b/servers/googlevideo.py @@ -7,14 +7,14 @@ from platformcode import logger def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) video_urls = [] # Lo extrae a partir de flashvideodownloader.org if page_url.startswith("http://"): url = 'http://www.flashvideodownloader.org/download.php?u=' + page_url else: url = 'http://www.flashvideodownloader.org/download.php?u=http://video.google.com/videoplay?docid=' + page_url - logger.info("url=" + url) + logger.debug("url=" + url) data = httptools.downloadpage(url).data # Extrae el vídeo @@ -24,6 +24,6 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= video_urls.append(["[googlevideo]", newmatches[0]]) for video_url in video_urls: - logger.info("%s - %s" % (video_url[0], video_url[1])) + logger.debug("%s - %s" % (video_url[0], video_url[1])) return video_urls diff --git a/servers/gounlimited.py b/servers/gounlimited.py index 8bba60b4..53642c2f 100644 --- a/servers/gounlimited.py +++ b/servers/gounlimited.py @@ -20,14 +20,14 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("url=" + page_url) + logger.debug("url=" + page_url) video_urls = [] global data data = re.sub(r'"|\n|\r|\t| |<br>|\s{2,}', "", data) - # logger.info('GOUN DATA= '+data) + # logger.debug('GOUN DATA= '+data) packed_data = scrapertools.find_single_match(data, "javascript'>(eval.*?)</script>") unpacked = jsunpack.unpack(packed_data) - # logger.info('GOUN DATA= '+unpacked) + # logger.debug('GOUN DATA= '+unpacked) patron = r"sources..([^\]]+)" matches = re.compile(patron, re.DOTALL).findall(unpacked) if not matches: diff --git a/servers/gvideo.py b/servers/gvideo.py index aa9712c4..edaaf8ac 100644 --- a/servers/gvideo.py +++ b/servers/gvideo.py @@ -42,7 +42,7 @@ def test_video_exists(page_url): def get_video_url(page_url, user="", password="", video_password=""): - logger.info() + logger.debug() video_urls = [] urls = [] streams =[] diff --git a/servers/hdload.py b/servers/hdload.py index 30240022..35392d6a 100644 --- a/servers/hdload.py +++ b/servers/hdload.py @@ -7,7 +7,7 @@ from platformcode import config, logger def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url, cookies=False).data if 'Not found id' in data: @@ -17,12 +17,12 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info() + logger.debug() itemlist = [] - logger.info(page_url) + logger.debug(page_url) data = httptools.downloadpage(page_url, post='').data - logger.info(data) + logger.debug(data) url = base64.b64decode(data) itemlist.append([".mp4 [HDLoad]", url]) diff --git a/servers/hdmario.py b/servers/hdmario.py index bf50a3cb..1eea0d75 100644 --- a/servers/hdmario.py +++ b/servers/hdmario.py @@ -8,12 +8,12 @@ from lib.fakeMail import Gmailnator baseUrl = 'https://hdmario.live' def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) global page, data page = httptools.downloadpage(page_url) data = page.data - logger.info(page.url) + logger.debug(page.url) if "the page you are looking for could not be found" in data: return False, config.get_localized_string(70449) % "HDmario" @@ -54,12 +54,12 @@ def registerOrLogin(page_url): else: import random import string - logger.info('Registrazione automatica in corso') + logger.debug('Registrazione automatica in corso') mailbox = Gmailnator() randPsw = ''.join(random.choice(string.ascii_letters + string.digits) for i in range(10)) captcha = httptools.downloadpage(baseUrl + '/captchaInfo').json - logger.info('email: ' + mailbox.address) - logger.info('pass: ' + randPsw) + logger.debug('email: ' + mailbox.address) + logger.debug('pass: ' + randPsw) reg = platformtools.dialog_register(baseUrl + '/register/', email=True, password=True, email_default=mailbox.address, password_default=randPsw, captcha_img=captcha['captchaUrl']) if not reg: return False @@ -90,7 +90,7 @@ def registerOrLogin(page_url): else: platformtools.dialog_ok('HDmario', 'Hai modificato la mail quindi KoD non sarà in grado di effettuare la verifica in autonomia, apri la casella ' + reg['email'] + ' e clicca sul link. Premi ok quando fatto') - logger.info('Registrazione completata') + logger.debug('Registrazione completata') return True @@ -98,7 +98,7 @@ def registerOrLogin(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): global page, data page_url = page_url.replace('?', '') - logger.info("url=" + page_url) + logger.debug("url=" + page_url) if 'unconfirmed' in page.url: id = page_url.split('/')[-1] @@ -109,7 +109,7 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= } httptools.downloadpage(page.url, post=postData) mail = mailbox.waitForMail() - logger.info(mail) + logger.debug(mail) if mail: code = mail.subject.split(' - ')[0] page = httptools.downloadpage(page_url + '?code=' + code) @@ -122,12 +122,12 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= page = httptools.downloadpage(page_url) data = page.data - logger.info(data) + logger.debug(data) from lib import jsunpack_js2py unpacked = jsunpack_js2py.unpack(scrapertools.find_single_match(data, '<script type="text/javascript">\n*\s*\n*(eval.*)')) # p,a,c,k,e,d data -> xhr.setRequestHeader secureProof = scrapertools.find_single_match(unpacked, """X-Secure-Proof['"]\s*,\s*['"]([^"']+)""") - logger.info('X-Secure-Proof=' + secureProof) + logger.debug('X-Secure-Proof=' + secureProof) data = httptools.downloadpage(baseUrl + '/pl/' + page_url.split('/')[-1].replace('?', '') + '.m3u8', headers=[['X-Secure-Proof', secureProof]]).data filetools.write(xbmc.translatePath('special://temp/hdmario.m3u8'), data, 'w') diff --git a/servers/hugefiles.py b/servers/hugefiles.py index 8bc848be..f0efc272 100755 --- a/servers/hugefiles.py +++ b/servers/hugefiles.py @@ -20,7 +20,7 @@ from platformcode import logger def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url).data post = {} r = re.findall(r'type="hidden" name="(.+?)"\s* value="?(.+?)">', data) @@ -45,6 +45,6 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= video_urls.append([scrapertools.get_filename_from_url(mediaurl)[-4:] + " [hugefiles]", mediaurl]) for video_url in video_urls: - logger.info("%s - %s" % (video_url[0], video_url[1])) + logger.debug("%s - %s" % (video_url[0], video_url[1])) return video_urls diff --git a/servers/idtbox.py b/servers/idtbox.py index b4b8f0e2..7840c22a 100644 --- a/servers/idtbox.py +++ b/servers/idtbox.py @@ -10,7 +10,7 @@ from platformcode import logger data = "" def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) global data data = httptools.downloadpage(page_url) @@ -22,7 +22,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("url=" + page_url) + logger.debug("url=" + page_url) logger.error(data) video_urls = [] patron = 'source src="([^"]+)" type="([^"]+)" res=(\d+)' diff --git a/servers/jawcloud.py b/servers/jawcloud.py index 8fde2947..60e633dc 100644 --- a/servers/jawcloud.py +++ b/servers/jawcloud.py @@ -6,7 +6,7 @@ from platformcode import logger def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url).data if "The file you were looking for could not be found" in data: return False, config.get_localized_string(70449) % "jawcloud" @@ -14,7 +14,7 @@ def test_video_exists(page_url): def get_video_url(page_url, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url).data video_urls = [] videourl = scrapertools.find_single_match(data, 'source src="([^"]+)') diff --git a/servers/jetload.py b/servers/jetload.py index bd3df40d..0a6465a2 100644 --- a/servers/jetload.py +++ b/servers/jetload.py @@ -9,7 +9,7 @@ from platformcode import logger video_urls = [] def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) subtitles = "" response = httptools.downloadpage(page_url) @@ -21,7 +21,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) video_urls = [] media_url = scrapertools.find_single_match(data, '<video src="([^"]+)"') if media_url: diff --git a/servers/mailru.py b/servers/mailru.py index 8383103d..3cfdf8cd 100644 --- a/servers/mailru.py +++ b/servers/mailru.py @@ -7,7 +7,7 @@ from platformcode import logger def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) page_url = page_url.replace("embed/", "").replace(".html", ".json") data = httptools.downloadpage(page_url).data if '"error":"video_not_found"' in data or '"error":"Can\'t find VideoInstance"' in data: @@ -17,7 +17,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("(page_url='%s')" % (page_url)) + logger.debug("(page_url='%s')" % (page_url)) video_urls = [] # Carga la página para coger las cookies @@ -45,6 +45,6 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= pass for video_url in video_urls: - logger.info("%s - %s" % (video_url[0], video_url[1])) + logger.debug("%s - %s" % (video_url[0], video_url[1])) return video_urls diff --git a/servers/mediafire.py b/servers/mediafire.py index 5b452878..fb36ad9a 100755 --- a/servers/mediafire.py +++ b/servers/mediafire.py @@ -6,7 +6,7 @@ from platformcode import logger def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url).data if "Invalid or Deleted File" in data or "Well, looks like we" in data: return False, config.get_localized_string(70449) % "Mediafire" @@ -16,7 +16,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) video_urls = [] data = httptools.downloadpage(page_url).data patron = "DownloadButtonAd-startDownload gbtnSecondary.*?href='([^']+)'" @@ -27,5 +27,5 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= if len(matches) > 0: video_urls.append([matches[0][-4:] + " [mediafire]", matches[0]]) for video_url in video_urls: - logger.info("%s - %s" % (video_url[0], video_url[1])) + logger.debug("%s - %s" % (video_url[0], video_url[1])) return video_urls diff --git a/servers/mega.py b/servers/mega.py index 83d20c24..e621ea6e 100755 --- a/servers/mega.py +++ b/servers/mega.py @@ -79,7 +79,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): page_url = page_url.replace('/embed#', '/#') - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) video_urls = [] # si hay mas de 5 archivos crea un playlist con todos diff --git a/servers/mixdrop.py b/servers/mixdrop.py index 43b6eb26..03ef9cf8 100644 --- a/servers/mixdrop.py +++ b/servers/mixdrop.py @@ -10,7 +10,7 @@ from platformcode import logger, config def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) global data data = httptools.downloadpage(page_url).data @@ -25,7 +25,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("url=" + page_url) + logger.debug("url=" + page_url) video_urls = [] ext = '.mp4' diff --git a/servers/mp4upload.py b/servers/mp4upload.py index 769d8699..5b9d91d6 100644 --- a/servers/mp4upload.py +++ b/servers/mp4upload.py @@ -15,7 +15,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = re.sub(r"\n|\r|\t|\s{2}", "", httptools.downloadpage(page_url).data) match = scrapertools.find_single_match(data, "<script type='text/javascript'>(.*?)</script>") data = jsunpack.unpack(match) @@ -23,9 +23,9 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= media_url = scrapertools.find_single_match(data, '{type:"video/mp4",src:"([^"]+)"}') if not media_url: media_url = scrapertools.find_single_match(data, '"file":"([^"]+)') - logger.info("media_url=" + media_url) + logger.debug("media_url=" + media_url) video_urls = list() video_urls.append([scrapertools.get_filename_from_url(media_url)[-4:] + " [mp4upload]", media_url]) for video_url in video_urls: - logger.info("%s - %s" % (video_url[0], video_url[1])) + logger.debug("%s - %s" % (video_url[0], video_url[1])) return video_urls diff --git a/servers/mydaddy.py b/servers/mydaddy.py index 1f6ec8e4..d09dc5da 100644 --- a/servers/mydaddy.py +++ b/servers/mydaddy.py @@ -21,7 +21,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info() + logger.debug() video_urls = [] data = httptools.downloadpage(page_url).data data = scrapertools.find_single_match(data, 'var srca = \[(.*?)\]') diff --git a/servers/mystream.py b/servers/mystream.py index 5a7bc97a..4da94a91 100644 --- a/servers/mystream.py +++ b/servers/mystream.py @@ -10,7 +10,7 @@ import re def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url) global page_data page_data = data.data @@ -21,7 +21,7 @@ def test_video_exists(page_url): return True, "" def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) video_urls = [] global page_data video_url = scrapertools.find_single_match(decode(page_data), r"'src',\s*'([^']+)") diff --git a/servers/myupload.py b/servers/myupload.py index 04eb0724..6b7eaf18 100644 --- a/servers/myupload.py +++ b/servers/myupload.py @@ -23,7 +23,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info() + logger.debug() video_urls = [] data = httptools.downloadpage(page_url).data matches = scrapertools.find_multiple_matches(data, 'tracker: "([^"]+)"') diff --git a/servers/netutv.py b/servers/netutv.py index 8e391f6a..1ae2f344 100755 --- a/servers/netutv.py +++ b/servers/netutv.py @@ -25,7 +25,7 @@ from platformcode import logger def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) #Deshabilitamos el server hasta nueva orden return False, "[netutv] Servidor deshabilitado" # http://netu.tv/watch_video.php=XX solo contiene una redireccion, ir directamente a http://hqq.tv/player/embed_player.php?vid=XX @@ -37,7 +37,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("url=" + page_url) + logger.debug("url=" + page_url) video_urls = [] if "hash=" in page_url: diff --git a/servers/nowvideo.py b/servers/nowvideo.py index 95271e63..4623f7aa 100644 --- a/servers/nowvideo.py +++ b/servers/nowvideo.py @@ -12,7 +12,7 @@ from platformcode import logger, config headers = [['User-Agent', 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:54.0) Gecko/20100101 Firefox/54.0']] def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url).data if "Not Found" in data or "File was deleted" in data or "The file is being converted" in data or "Please try again later" in data: return False, config.get_localized_string(70293) % "NowVideo" @@ -23,7 +23,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): host = 'http://nowvideo.club' - logger.info("(nowvideo page_url='%s')" % page_url) + logger.debug("(nowvideo page_url='%s')" % page_url) video_urls = [] data = httptools.downloadpage(page_url).data page_url_post = scrapertools.find_single_match(data, '<Form id="[^"]+" method="POST" action="([^"]+)">') @@ -31,7 +31,7 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= imhuman = '&imhuman=' + scrapertools.find_single_match(data, 'name="imhuman" value="([^"]+)"').replace(" ", "+") post = urllib.urlencode({k: v for k, v in scrapertools.find_multiple_matches(data, 'name="([^"]+)" value="([^"]*)"')}) + imhuman data = httptools.downloadpage(host + page_url_post, post=post).data - logger.info("nowvideo data page_url2 ='%s'" % data) + logger.debug("nowvideo data page_url2 ='%s'" % data) headers.append(['Referer', page_url]) post_data = scrapertools.find_single_match(data,"</div>\s*<script>(eval.function.p,a,c,k,e,.*?)\s*</script>") @@ -46,11 +46,11 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= _headers = urllib.urlencode(dict(headers)) for media_url in media_urls: - #logger.info("nowvideo data page_url2 ='%s'" % media_url) + #logger.debug("nowvideo data page_url2 ='%s'" % media_url) video_urls.append([" mp4 [nowvideo] ", media_url + '|' + _headers]) for video_url in media_urls: - logger.info("[nowvideo.py] %s - %s" % (video_url[0], video_url[1])) + logger.debug("[nowvideo.py] %s - %s" % (video_url[0], video_url[1])) return video_urls @@ -60,7 +60,7 @@ def find_videos(data): devuelve = [] patronvideos = r"nowvideo.club/(?:play|videos)?([a-z0-9A-Z]+)" - logger.info("[nowvideo.py] find_videos #" + patronvideos + "#") + logger.debug("[nowvideo.py] find_videos #" + patronvideos + "#") matches = re.compile(patronvideos, re.DOTALL).findall(data) for match in matches: @@ -68,10 +68,10 @@ def find_videos(data): url = 'http://nowvideo.club/%s' % match if url not in encontrados: - logger.info(" url=" + url) + logger.debug(" url=" + url) devuelve.append([titulo, url, 'nowvideo']) encontrados.add(url) else: - logger.info(" url duplicada=" + url) + logger.debug(" url duplicada=" + url) return devuelve diff --git a/servers/okru.py b/servers/okru.py index f8fb8f83..c18c422c 100644 --- a/servers/okru.py +++ b/servers/okru.py @@ -8,7 +8,7 @@ from platformcode import logger, config def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url).data if "copyrightsRestricted" in data or "COPYRIGHTS_RESTRICTED" in data: @@ -20,7 +20,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("url=" + page_url) + logger.debug("url=" + page_url) video_urls = [] data = httptools.downloadpage(page_url).data diff --git a/servers/onefichier.py b/servers/onefichier.py index 971d954e..54c678e6 100644 --- a/servers/onefichier.py +++ b/servers/onefichier.py @@ -21,25 +21,25 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) if config.get_setting("premium", server="onefichier"): user = config.get_setting("user", server="onefichier") password = config.get_setting("password", server="onefichier") url = "https://1fichier.com/login.pl" - logger.info("url=" + url) + logger.debug("url=" + url) post_parameters = {"mail": user, "pass": password, "lt": "on", "purge": "on", "valider": "Send"} post = urllib.urlencode(post_parameters) - logger.info("post=" + post) + logger.debug("post=" + post) data = httptools.downloadpage(url, post=post).data - # logger.info("data="+data) + # logger.debug("data="+data) cookies = config.get_cookie_data() - logger.info("cookies=" + cookies) + logger.debug("cookies=" + cookies) # 1fichier.com TRUE / FALSE 1443553315 SID imC3q8MQ7cARw5tkXeWvKyrH493rR=1yvrjhxDAA0T0iEmqRfNF9GXwjrwPHssAQ sid_cookie_value = scrapertools.find_single_match(cookies, "1fichier.com.*?SID\s+([A-Za-z0-9\+\=]+)") - logger.info("sid_cookie_value=" + sid_cookie_value) + logger.debug("sid_cookie_value=" + sid_cookie_value) # .1fichier.com TRUE / FALSE 1443553315 SID imC3q8MQ7cARw5tkXeWvKyrH493rR=1yvrjhxDAA0T0iEmqRfNF9GXwjrwPHssAQ cookie = urllib.urlencode({"SID": sid_cookie_value}) @@ -50,16 +50,16 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= 'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; es-ES; rv:1.9.2.12) Gecko/20101026 Firefox/3.6.12']) headers.append(['Cookie', cookie]) filename = scrapertools.get_header_from_response(page_url, header_to_get="Content-Disposition") - logger.info("filename=" + filename) + logger.debug("filename=" + filename) # Construye la URL final para Kodi location = page_url + "|Cookie=" + cookie - logger.info("location=" + location) + logger.debug("location=" + location) video_urls = [] video_urls.append([filename[-4:] + " (Premium) [1fichier]", location]) for video_url in video_urls: - logger.info("%s - %s" % (video_url[0], video_url[1])) + logger.debug("%s - %s" % (video_url[0], video_url[1])) return video_urls diff --git a/servers/onlystream.py b/servers/onlystream.py index 2deef1a7..214f4e12 100644 --- a/servers/onlystream.py +++ b/servers/onlystream.py @@ -6,7 +6,7 @@ from platformcode import config, logger def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) global data data = httptools.downloadpage(page_url).data @@ -17,8 +17,8 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("url=" + page_url) + logger.debug("url=" + page_url) global data - # logger.info(data) + # logger.debug(data) video_urls = support.get_jwplayer_mediaurl(data, 'Onlystream') return video_urls diff --git a/servers/rapidgator.py b/servers/rapidgator.py index ed25c540..fdc262e3 100644 --- a/servers/rapidgator.py +++ b/servers/rapidgator.py @@ -8,6 +8,6 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) video_urls = [] return video_urls diff --git a/servers/rcdnme.py b/servers/rcdnme.py index d7fc0b2c..fbb09582 100644 --- a/servers/rcdnme.py +++ b/servers/rcdnme.py @@ -10,7 +10,7 @@ from platformcode import logger def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url) if "Object not found" in data.data or "longer exists on our servers" in data.data: @@ -21,7 +21,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url).data if "p,a,c,k,e,d" in data: @@ -44,6 +44,6 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= except: pass for video_url in video_urls: - logger.info(" %s - %s" % (video_url[0], video_url[1])) + logger.debug(" %s - %s" % (video_url[0], video_url[1])) return video_urls diff --git a/servers/rutube.py b/servers/rutube.py index e6414be7..aa153868 100644 --- a/servers/rutube.py +++ b/servers/rutube.py @@ -24,13 +24,13 @@ from core import jsontools def get_source(url): - logger.info() + logger.debug() data = httptools.downloadpage(url).data data = re.sub(r'\n|\r|\t| |<br>|\s{2,}', "", data) return data def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = get_source(page_url) if "File was deleted" in data or "File Not Found" in data: @@ -40,7 +40,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("url=" + page_url) + logger.debug("url=" + page_url) video_urls = [] referer = '' diff --git a/servers/samaup.py b/servers/samaup.py index cc6285c0..5d168a9c 100644 --- a/servers/samaup.py +++ b/servers/samaup.py @@ -10,7 +10,7 @@ from platformcode import logger def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) global data data = httptools.downloadpage(page_url).data if "Not Found" in data or "File was deleted" in data: @@ -19,7 +19,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("url=" + page_url) + logger.debug("url=" + page_url) video_urls = [] ext = 'mp4' diff --git a/servers/sendvid.py b/servers/sendvid.py index ed11f426..7cdd94b4 100755 --- a/servers/sendvid.py +++ b/servers/sendvid.py @@ -9,7 +9,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) video_urls = [] data = scrapertools.httptools.downloadpage(page_url).data media_url = scrapertools.find_single_match(data, 'var\s+video_source\s+\=\s+"([^"]+)"') @@ -24,5 +24,5 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= else: video_urls.append([scrapertools.get_filename_from_url(media_url)[-4:] + " [sendvid]", media_url]) for video_url in video_urls: - logger.info("%s - %s" % (video_url[0], video_url[1])) + logger.debug("%s - %s" % (video_url[0], video_url[1])) return video_urls diff --git a/servers/speedvideo.py b/servers/speedvideo.py index c9a2e42e..4f4d50a2 100644 --- a/servers/speedvideo.py +++ b/servers/speedvideo.py @@ -5,7 +5,7 @@ from core import httptools, scrapertools from platformcode import config, logger def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url).data @@ -15,22 +15,22 @@ def test_video_exists(page_url): return True, "" def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("url=" + page_url) + logger.debug("url=" + page_url) video_urls = [] quality ={'MOBILE':1, 'NORMAL':2, 'HD':3} data = httptools.downloadpage(page_url).data - logger.info('SPEEDVIDEO DATA '+ data) + logger.debug('SPEEDVIDEO DATA '+ data) media_urls = scrapertools.find_multiple_matches(data, r"file:[^']'([^']+)',\s*label:[^\"]\"([^\"]+)\"") - logger.info("speed video - media urls: %s " % media_urls) + logger.debug("speed video - media urls: %s " % media_urls) for media_url, label in media_urls: media_url = httptools.downloadpage(media_url, only_headers=True, follow_redirects=False).headers.get("location", "") if media_url: video_urls.append([media_url.split('.')[-1] + ' - ' + label + ' - ' + ' [Speedvideo]', media_url]) - logger.info("speed video - media urls: %s " % video_urls) + logger.debug("speed video - media urls: %s " % video_urls) return sorted(video_urls, key=lambda x: quality[x[0].split(' - ')[1]]) diff --git a/servers/streamtape.py b/servers/streamtape.py index a2b4b4a2..05fbb07b 100644 --- a/servers/streamtape.py +++ b/servers/streamtape.py @@ -11,7 +11,7 @@ if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) global data referer = {"Referer": page_url} @@ -25,7 +25,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("url=" + page_url) + logger.debug("url=" + page_url) video_urls = [] possible_url = scrapertools.find_single_match(data, 'innerHTML = "([^"]+)') diff --git a/servers/streamz.py b/servers/streamz.py index 5540558d..ac0fb568 100644 --- a/servers/streamz.py +++ b/servers/streamz.py @@ -8,7 +8,7 @@ from lib import jsunpack def test_video_exists(page_url): global data - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url).data if "<font color=\"red\"><b>File not found, sorry!" in data: @@ -17,7 +17,7 @@ def test_video_exists(page_url): def get_video_url(page_url, video_password): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) video_urls = [] from core.support import match matches = match(data, patron=r'(eval\(function\(p,a,c,k,e,d\).*?)\s+</script>').matches diff --git a/servers/supervideo.py b/servers/supervideo.py index 1665152d..2ab9ae1a 100644 --- a/servers/supervideo.py +++ b/servers/supervideo.py @@ -9,7 +9,7 @@ from platformcode import config, logger def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) global data data = httptools.downloadpage(page_url, cookies=False).data if 'File is no longer available as it expired or has been deleted' in data: @@ -19,7 +19,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("url=" + page_url) + logger.debug("url=" + page_url) video_urls = [] # data = httptools.downloadpage(page_url).data global data diff --git a/servers/thevid.py b/servers/thevid.py index e061856b..13ee9582 100644 --- a/servers/thevid.py +++ b/servers/thevid.py @@ -8,7 +8,7 @@ from platformcode import logger, config def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url).data if "Video not found..." in data or "Video removed due to copyright" in data: return False, config.get_localized_string(70292) % "Thevid" @@ -31,5 +31,5 @@ def get_video_url(page_url, user="", password="", video_password=""): continue video = "https:" + video video_urls.append(["mp4 [Thevid]", video]) - logger.info("Url: %s" % videos) + logger.debug("Url: %s" % videos) return video_urls diff --git a/servers/thevideobee.py b/servers/thevideobee.py index f319dcfe..001d3471 100644 --- a/servers/thevideobee.py +++ b/servers/thevideobee.py @@ -9,7 +9,7 @@ from platformcode import logger def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url).data if "no longer exists" in data or "to copyright issues" in data: return False, config.get_localized_string(70449) % "thevideobee" @@ -17,7 +17,7 @@ def test_video_exists(page_url): def get_video_url(page_url, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url).data video_urls = [] videourl = scrapertools.find_single_match(data, 'src: "([^"]+)') diff --git a/servers/turbobit.py b/servers/turbobit.py index d370f03f..9f254dc9 100644 --- a/servers/turbobit.py +++ b/servers/turbobit.py @@ -4,6 +4,6 @@ from platformcode import logger def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) video_urls = [] return video_urls diff --git a/servers/turbovid.py b/servers/turbovid.py index 4bb6b993..d1f1e19c 100644 --- a/servers/turbovid.py +++ b/servers/turbovid.py @@ -12,7 +12,7 @@ from platformcode import logger, config def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url).data if "Not Found" in data or "File Does not Exist" in data: return False, config.get_localized_string(70449) % "Turbovid" @@ -21,7 +21,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password="", server='Turbovid'): - logger.info("(turbovid page_url='%s')" % page_url) + logger.debug("(turbovid page_url='%s')" % page_url) video_urls = [] data = httptools.downloadpage(page_url).data data = data.replace('"', "'") @@ -31,6 +31,6 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= time.sleep(6) data = httptools.downloadpage(page_url_post, post=post).data - logger.info("(data page_url='%s')" % data) + logger.debug("(data page_url='%s')" % data) video_urls = support.get_jwplayer_mediaurl(data, 'Turbovid') return video_urls diff --git a/servers/tusfiles.py b/servers/tusfiles.py index 18cec1ee..6390c208 100644 --- a/servers/tusfiles.py +++ b/servers/tusfiles.py @@ -9,7 +9,7 @@ from platformcode import logger def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url).data if "no longer exists" in data or "to copyright issues" in data: return False, config.get_localized_string(70449) % "tusfiles" @@ -17,7 +17,7 @@ def test_video_exists(page_url): def get_video_url(page_url, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url).data video_urls = [] videourl = scrapertools.find_single_match(data, 'source src="([^"]+)') diff --git a/servers/uploadedto.py b/servers/uploadedto.py index 72b83510..37529ab7 100755 --- a/servers/uploadedto.py +++ b/servers/uploadedto.py @@ -6,7 +6,7 @@ from platformcode import logger def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) real_url = page_url.replace("uploaded.to", "uploaded.net") code = httptools.downloadpage(real_url, only_headers=True).code @@ -16,36 +16,36 @@ def test_video_exists(page_url): return True, "" def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) video_urls = [] if premium: #Si no hay almacenada una cookie activa, hacemos login if check_cookie("uploaded.net", "login") != True: # Login para conseguir la cookie - logger.info("-------------------------------------------") - logger.info("login") - logger.info("-------------------------------------------") + logger.debug("-------------------------------------------") + logger.debug("login") + logger.debug("-------------------------------------------") login_url = "http://uploaded.net/io/login" post = "id=" + user + "&pw=" + password setcookie = httptools.downloadpage(login_url, post=post, follow_redirects=False, only_headers=True).headers.get("set-cookie", "") - logger.info("-------------------------------------------") - logger.info("obtiene la url") - logger.info("-------------------------------------------") + logger.debug("-------------------------------------------") + logger.debug("obtiene la url") + logger.debug("-------------------------------------------") location = httptools.downloadpage(page_url, follow_redirects=False, only_headers=True).headers.get("location", "") - logger.info("location=" + location) + logger.debug("location=" + location) #fix descarga no directa if location == "": data = httptools.downloadpage(page_url).data - #logger.info("data: %s" % data) + #logger.debug("data: %s" % data) if "<h1>Premium Download</h1>" in data: location = scrapertools.find_single_match(data, '<form method="post" action="([^"]+)"') - #logger.info("location: %s" % location) + #logger.debug("location: %s" % location) elif "Hybrid-Traffic is completely exhausted" in data: logger.error("Trafico agotado") @@ -53,13 +53,13 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= logger.error("Cuenta Free") else: logger.error("Error Desconocido") - logger.info("-------------------------------------------") - logger.info("obtiene el nombre del fichero") - logger.info("-------------------------------------------") + logger.debug("-------------------------------------------") + logger.debug("obtiene el nombre del fichero") + logger.debug("-------------------------------------------") try: content_disposition = httptools.downloadpage(location, post="", follow_redirects=False, only_headers=True).headers.get("content-disposition", "") - logger.info("content_disposition=" + content_disposition) + logger.debug("content_disposition=" + content_disposition) if content_disposition != "": filename = scrapertools.find_single_match(content_disposition, 'filename="([^"]+)"') extension = filename[-4:] @@ -74,7 +74,7 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= video_urls.append([extension + " (Premium) [uploaded.to]", location]) for video_url in video_urls: - logger.info("%s - %s" % (video_url[0], video_url[1])) + logger.debug("%s - %s" % (video_url[0], video_url[1])) return video_urls diff --git a/servers/uppom.py b/servers/uppom.py index 12b19d30..2ddeb5a4 100644 --- a/servers/uppom.py +++ b/servers/uppom.py @@ -9,7 +9,7 @@ from platformcode import logger def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = get_source(page_url) if "File was deleted" in data or "File Not Found" in data: @@ -19,7 +19,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info() + logger.debug() video_urls = [] data = httptools.downloadpage(page_url).data fid = scrapertools.find_single_match(data, ' name="id" value="([^"]+)"') diff --git a/servers/upstream.py b/servers/upstream.py index 8c4be435..801d2831 100644 --- a/servers/upstream.py +++ b/servers/upstream.py @@ -9,7 +9,7 @@ from platformcode import logger, config def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) global data data = httptools.downloadpage(page_url).data if "<h2>WE ARE SORRY</h2>" in data or '<title>404 Not Found' in data: diff --git a/servers/uptobox.py b/servers/uptobox.py index 52cc08aa..cda39d75 100755 --- a/servers/uptobox.py +++ b/servers/uptobox.py @@ -19,7 +19,7 @@ from platformcode import logger def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url).data @@ -37,7 +37,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) # Si el enlace es directo de upstream if "uptobox" not in page_url: data = httptools.downloadpage(page_url).data @@ -60,7 +60,7 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= video_urls = uptobox(page_url, data) for video_url in video_urls: - logger.info("%s - %s" % (video_url[0], video_url[1])) + logger.debug("%s - %s" % (video_url[0], video_url[1])) return video_urls diff --git a/servers/upvid.py b/servers/upvid.py index 4523d5dc..e1de4583 100644 --- a/servers/upvid.py +++ b/servers/upvid.py @@ -13,7 +13,7 @@ from platformcode import logger def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url) if data.code == 404: return False, config.get_localized_string(70449) % "upvid" @@ -23,7 +23,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium = False, user = "", password = "", video_password = ""): - logger.info("url=" + page_url) + logger.debug("url=" + page_url) video_urls = [] headers = {'referer': page_url} for i in range(0, 3): diff --git a/servers/uqload.py b/servers/uqload.py index 3c254960..46ec18ce 100644 --- a/servers/uqload.py +++ b/servers/uqload.py @@ -10,7 +10,7 @@ from platformcode import logger def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url) @@ -21,7 +21,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("url=" + page_url) + logger.debug("url=" + page_url) video_urls = [] data = httptools.downloadpage(page_url).data diff --git a/servers/userscloud.py b/servers/userscloud.py index 8988cdb9..d1a85478 100644 --- a/servers/userscloud.py +++ b/servers/userscloud.py @@ -7,7 +7,7 @@ from platformcode import logger, config def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) response = httptools.downloadpage(page_url) @@ -18,7 +18,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("url=" + page_url) + logger.debug("url=" + page_url) video_urls = [] unpacked = "" data = httptools.downloadpage(page_url).data @@ -37,6 +37,6 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= video_urls.append(["%s [userscloud]" % ext, media_url]) for video_url in video_urls: - logger.info("%s - %s" % (video_url[0], video_url[1])) + logger.debug("%s - %s" % (video_url[0], video_url[1])) return video_urls diff --git a/servers/vevio.py b/servers/vevio.py index f0c1a6c0..e244e0a0 100644 --- a/servers/vevio.py +++ b/servers/vevio.py @@ -18,7 +18,7 @@ from platformcode import logger, config def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url).data if "File was deleted" in data or "Page Cannot Be Found" in data or "Video not found" in data: return False, config.get_localized_string(70449) % "vevio" @@ -26,7 +26,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("url=" + page_url) + logger.debug("url=" + page_url) video_urls = [] post = {} post = urllib.urlencode(post) diff --git a/servers/vidcloud.py b/servers/vidcloud.py index 4a28eb22..137e2950 100644 --- a/servers/vidcloud.py +++ b/servers/vidcloud.py @@ -13,7 +13,7 @@ from platformcode import logger, config def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url).data if "We're Sorry" in data: return False, config.get_localized_string(70292) % "Vidcloud" @@ -22,7 +22,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("url=" + page_url) + logger.debug("url=" + page_url) video_urls = [] @@ -56,6 +56,6 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= video_urls.append(["%s [Vidcloud" % ext, media_url]) for video_url in video_urls: - logger.info("%s - %s" % (video_url[0], video_url[1])) + logger.debug("%s - %s" % (video_url[0], video_url[1])) return video_urls diff --git a/servers/videobin.py b/servers/videobin.py index c2d0125a..fc07160a 100644 --- a/servers/videobin.py +++ b/servers/videobin.py @@ -11,7 +11,7 @@ from platformcode import logger def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url).data if "borrado" in data or "Deleted" in data: return False, config.get_localized_string(70449) % "videobin" @@ -19,7 +19,7 @@ def test_video_exists(page_url): return True, "" def get_video_url(page_url, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) video_urls = [] data = httptools.downloadpage(page_url).data bloque = scrapertools.find_single_match(data, 'sources:.\[.*?]') diff --git a/servers/videomega.py b/servers/videomega.py index de473b1d..c78b1bc1 100644 --- a/servers/videomega.py +++ b/servers/videomega.py @@ -5,7 +5,7 @@ from platformcode import logger def get_video_url(page_url, video_password): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) video_urls = [] data = httptools.downloadpage(page_url).data m= scrapertools.find_single_match(data, '<link href="(Br74.*?==.css)"') diff --git a/servers/vidfast.py b/servers/vidfast.py index 382cb0bb..9ffb07b6 100644 --- a/servers/vidfast.py +++ b/servers/vidfast.py @@ -9,7 +9,7 @@ from platformcode import logger video_urls = [] def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) subtitles = "" response = httptools.downloadpage(page_url) @@ -21,9 +21,9 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) video_urls = [] - logger.info("Intel11 %s" %data) + logger.debug("Intel11 %s" %data) media_url = scrapertools.find_single_match(data, 'file:"([^"]+)') if media_url: ext = media_url[-4:] diff --git a/servers/vidlox.py b/servers/vidlox.py index 7cbb684e..7c5336af 100644 --- a/servers/vidlox.py +++ b/servers/vidlox.py @@ -11,7 +11,7 @@ from platformcode import logger def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) global data data = httptools.downloadpage(page_url).data if "borrado" in data or "Deleted" in data: @@ -21,7 +21,7 @@ def test_video_exists(page_url): def get_video_url(page_url, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) video_urls = [] bloque = scrapertools.find_single_match(data, 'sources:.\[.*?]') diff --git a/servers/vidmoly.py b/servers/vidmoly.py index 6252edff..7332b2cd 100644 --- a/servers/vidmoly.py +++ b/servers/vidmoly.py @@ -5,7 +5,7 @@ from platformcode import logger, config def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) global data resp = httptools.downloadpage(page_url) data = resp.data @@ -15,7 +15,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("url=" + page_url) + logger.debug("url=" + page_url) global data video_urls = support.get_jwplayer_mediaurl(data, 'Vidmoly') diff --git a/servers/vidoza.py b/servers/vidoza.py index c7a39de0..04743d0b 100644 --- a/servers/vidoza.py +++ b/servers/vidoza.py @@ -8,7 +8,7 @@ from platformcode import logger, config def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) global data data = httptools.downloadpage(page_url).data if "Page not found" in data or "File was deleted" in data: @@ -20,7 +20,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) global data video_urls = [] diff --git a/servers/vidtodo.py b/servers/vidtodo.py index ba08b6b9..f438dbe5 100755 --- a/servers/vidtodo.py +++ b/servers/vidtodo.py @@ -8,7 +8,7 @@ from platformcode import logger id_server = "vidtodo" response = "" def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) global response response = httptools.downloadpage(page_url) if not response.success or "Not Found" in response.data: @@ -19,7 +19,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) video_urls = [] data = response.data packed_data = scrapertools.find_single_match(data, "javascript'>(eval.*?)</script>") @@ -41,5 +41,5 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= video_urls.append([".mp4 [%s] %s" % (id_server, inf), mp4 % h]) video_urls.append(["RTMP [%s] %s" % (id_server, inf), "%s playpath=%s" % (rtmp, playpath)]) for video_url in video_urls: - logger.info("video_url: %s - %s" % (video_url[0], video_url[1])) + logger.debug("video_url: %s - %s" % (video_url[0], video_url[1])) return video_urls diff --git a/servers/vidtome.py b/servers/vidtome.py index 9f1eeafc..bf5db104 100644 --- a/servers/vidtome.py +++ b/servers/vidtome.py @@ -6,7 +6,7 @@ from lib import jsunpack def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) global data data = httptools.downloadpage(page_url).data if "Not Found" in data or "File Does not Exist" in data: @@ -15,7 +15,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("url=" + page_url) + logger.debug("url=" + page_url) global data video_urls = [] code = scrapertools.find_single_match(data, 'name="code" value="([^"]+)') diff --git a/servers/vidup.py b/servers/vidup.py index c300d778..b3ff4939 100755 --- a/servers/vidup.py +++ b/servers/vidup.py @@ -20,7 +20,7 @@ from platformcode import logger def test_video_exists(page_url): return False, "[Vidup] Servidor Deshabilitado" - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) page = httptools.downloadpage(page_url) url = page.url if "Not Found" in page.data or "/404" in url: @@ -29,7 +29,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("url=" + page_url) + logger.debug("url=" + page_url) video_urls = [] post= {} post = urllib.urlencode(post) diff --git a/servers/vimeo.py b/servers/vimeo.py index 5308551f..b3f3077f 100644 --- a/servers/vimeo.py +++ b/servers/vimeo.py @@ -5,7 +5,7 @@ from core import scrapertools from platformcode import logger, config headers = [['User-Agent', 'Mozilla/5.0']] def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) global data if "|" in page_url: @@ -23,7 +23,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) video_urls = [] global data @@ -38,6 +38,6 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= video_urls.sort(key=lambda x: x[2]) for video_url in video_urls: video_url[2] = 0 - logger.info("%s - %s" % (video_url[0], video_url[1])) + logger.debug("%s - %s" % (video_url[0], video_url[1])) return video_urls diff --git a/servers/vimpleru.py b/servers/vimpleru.py index dc21c4aa..f06848df 100644 --- a/servers/vimpleru.py +++ b/servers/vimpleru.py @@ -7,7 +7,7 @@ from platformcode import config, logger def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url).data if '"title":"Video Not Found"' in data: return False, config.get_localized_string(70449) % "Vimple" @@ -16,7 +16,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("(page_url=%s)" % page_url) + logger.debug("(page_url=%s)" % page_url) data = httptools.downloadpage(page_url).data @@ -34,6 +34,6 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= video_urls.append([scrapertools.get_filename_from_url(media_url)[-4:] + " [vimple.ru]", media_url]) for video_url in video_urls: - logger.info("%s - %s" % (video_url[0], video_url[1])) + logger.debug("%s - %s" % (video_url[0], video_url[1])) return video_urls diff --git a/servers/vivo.py b/servers/vivo.py index 4c7b73a3..115faaf1 100644 --- a/servers/vivo.py +++ b/servers/vivo.py @@ -9,7 +9,7 @@ from platformcode import logger def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url) if data.code == 404: return False, config.get_localized_string(70449) % "Vivo" @@ -17,7 +17,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("url=" + page_url) + logger.debug("url=" + page_url) video_urls = [] data = httptools.downloadpage(page_url).data enc_data = scrapertools.find_single_match(data, 'data-stream="([^"]+)') diff --git a/servers/vk.py b/servers/vk.py index 0468cf8a..5fdbee13 100755 --- a/servers/vk.py +++ b/servers/vk.py @@ -17,7 +17,7 @@ from platformcode import config, logger def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) if not login(): return False, "Falta Ingresar/Actualizar las credenciales en el servidor vk. Configuracion - Preferencias - Ajustes de servidores - Configuración del servidor vk" data = httptools.downloadpage(page_url).data @@ -28,7 +28,7 @@ def test_video_exists(page_url): # Returns an array of possible video url's from the page_url def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) video_urls = [] data = httptools.downloadpage(page_url).data matches = scrapertools.find_multiple_matches(data, '<source src="([^"]+)" type="video/(\w+)') @@ -37,7 +37,7 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= video_urls.append([calidad + "p ." + ext + " [vk]", media_url]) video_urls.sort(key=lambda it: int(it[0].split("p ", 1)[0])) for video_url in video_urls: - logger.info("%s - %s" % (video_url[0], video_url[1])) + logger.debug("%s - %s" % (video_url[0], video_url[1])) return video_urls diff --git a/servers/vshare.py b/servers/vshare.py index 20f1a000..a07425ba 100644 --- a/servers/vshare.py +++ b/servers/vshare.py @@ -9,7 +9,7 @@ from platformcode import logger def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) response = httptools.downloadpage(page_url) if response.code != 200 or "No longer available!" in response.data: return False, config.get_localized_string(70449) % "vshare" @@ -18,7 +18,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("url = " + page_url) + logger.debug("url = " + page_url) headers = {"Referer":page_url} data = httptools.downloadpage(page_url, headers=headers).data flowplayer = re.search("url: [\"']([^\"']+)", data) diff --git a/servers/vudeo.py b/servers/vudeo.py index 4768e8ea..c8ad45f3 100644 --- a/servers/vudeo.py +++ b/servers/vudeo.py @@ -6,7 +6,7 @@ data = "" def test_video_exists(page_url): global data - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) response = httptools.downloadpage(page_url) if response.code == 404: @@ -18,5 +18,5 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): global data - logger.info("url=" + page_url) + logger.debug("url=" + page_url) return support.get_jwplayer_mediaurl(data, 'Vudeo') diff --git a/servers/vupplayer.py b/servers/vupplayer.py index 4b228eb6..b1eaf15e 100644 --- a/servers/vupplayer.py +++ b/servers/vupplayer.py @@ -5,7 +5,7 @@ from platformcode import logger, config def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) page = httptools.downloadpage(page_url) global data data = page.data @@ -15,7 +15,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("url=" + page_url) + logger.debug("url=" + page_url) video_urls = [] global data patron = r'sources:\s*\[\{src:\s*"([^"]+)"' @@ -33,5 +33,5 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= url = url.split(',') video_url = url[0] quality = url[1].replace('label:','') - video_urls.append(['VUP Player [%s]' % quality, video_url]) + video_urls.append(['VUP Player [%s]' % quality, video_url.replace(',','')]) return video_urls diff --git a/servers/vvvvid.py b/servers/vvvvid.py index 53f0568c..b24af9a7 100644 --- a/servers/vvvvid.py +++ b/servers/vvvvid.py @@ -10,16 +10,22 @@ from platformcode import logger, config # Creating persistent session current_session = requests.Session() -headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:62.0) Gecko/20100101 Firefox/62.0'} +headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.82 Safari/537.36'} # Getting conn_id token from vvvvid and creating payload login_page = 'https://www.vvvvid.it/user/login' -conn_id = current_session.get(login_page, headers=headers).json()['data']['conn_id'] -payload = {'conn_id': conn_id} +try: + res = current_session.get(login_page, headers=headers) + conn_id = res.json()['data']['conn_id'] + payload = {'conn_id': conn_id} + headers = {'User-Agent': 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.8.1.14) Gecko/20080404 Firefox/2.0.0.14', 'Cookie': res.headers['set-cookie']} +except: + conn_id = '' + def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url).data if "Not Found" in data or "File was deleted" in data: return False, config.get_localized_string(70449) % "VVVVID" @@ -45,7 +51,7 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= # Getting info from Site json_url = "https://www.vvvvid.it/vvvvid/ondemand/" + show_id + '/season/' +season_id + '/' json_file = current_session.get(json_url, headers=headers, params=payload).json() - logger.info(json_file['data']) + logger.debug(json_file['data']) # Search for the correct episode for episode in json_file['data']: diff --git a/servers/watchanimestream.py b/servers/watchanimestream.py index 3df92201..54093ff3 100644 --- a/servers/watchanimestream.py +++ b/servers/watchanimestream.py @@ -5,7 +5,7 @@ from platformcode import logger def get_video_url(page_url, video_password): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) video_urls = [] url = page_url.replace("/v/", "/api/source/") post = "r=&d=watchanimestream.net" diff --git a/servers/watchvideo.py b/servers/watchvideo.py index 81ce9abe..b7db7671 100644 --- a/servers/watchvideo.py +++ b/servers/watchvideo.py @@ -7,7 +7,7 @@ from platformcode import logger, config def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) global data data = httptools.downloadpage(page_url).data if "Not Found" in data or "File was deleted" in data: @@ -16,7 +16,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("url=" + page_url) + logger.debug("url=" + page_url) video_urls = [] media_urls = scrapertools.find_multiple_matches(data, 'file:"([^"]+)"') if not media_urls: @@ -34,5 +34,5 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= video_urls.append(["%s [watchvideo]" % (ext), media_url]) video_urls.reverse() for video_url in video_urls: - logger.info("%s - %s" % (video_url[0], video_url[1])) + logger.debug("%s - %s" % (video_url[0], video_url[1])) return video_urls diff --git a/servers/wstream.json b/servers/wstream.json index 5f7cd897..f9668a6a 100644 --- a/servers/wstream.json +++ b/servers/wstream.json @@ -31,7 +31,7 @@ "visible": true }, { - "default": "100", + "default": 0, "enabled": true, "id": "favorites_servers_list", "label": "$ADDON[plugin.video.kod 60655]", diff --git a/servers/wstream.py b/servers/wstream.py index 57643a51..dbaca6db 100644 --- a/servers/wstream.py +++ b/servers/wstream.py @@ -12,6 +12,9 @@ from core import httptools, scrapertools from platformcode import logger, config, platformtools # real_host = 'wstream.video' +errorsStr = ['Sorry this file is not longer available', 'Sorry this video is unavailable', 'Video is processing' + 'File was deleted', 'Not Found'] + def test_video_exists(page_url): global headers @@ -19,7 +22,7 @@ def test_video_exists(page_url): headers = [['User-Agent', 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:54.0) Gecko/20100101 Firefox/54.0'], ['Host', scrapertools.get_domain_from_url(page_url)]] - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) if 'wstream' in page_url: resp = httptools.downloadpage(page_url.replace(headers[1][1], real_host), headers=headers, verify=False) else: @@ -31,7 +34,7 @@ def test_video_exists(page_url): page_url = resp.url.replace(headers[1][1], real_host) if '/streaming.php' in page_url in page_url: code = httptools.downloadpage(page_url, headers=headers, follow_redirects=False, only_headers=True, verify=False).headers['location'].split('/')[-1].replace('.html', '') - # logger.info('WCODE=' + code) + # logger.debug('WCODE=' + code) page_url = 'https://' + real_host + '/video.php?file_code=' + code data = httptools.downloadpage(page_url, headers=headers, follow_redirects=True, verify=False).data @@ -45,15 +48,15 @@ def test_video_exists(page_url): page_url = 'https://' + real_host + '/video.php?file_code=' + scrapertools.find_single_match(dec, "src='([^']+)").split('/')[-1].replace('.html','') headers = [['User-Agent', 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:54.0) Gecko/20100101 Firefox/54.0'],['Host', 'wstream.video']] new_data = httptools.downloadpage(page_url, headers=headers, follow_redirects=True, verify=False).data - logger.info('NEW DATA: \n' + new_data) + logger.debug('NEW DATA: \n' + new_data) if new_data: data = new_data real_url = page_url - if "Not Found" in data or "File was deleted" in data or 'Video is processing' in data or 'Sorry this video is unavailable' in data: - return False, config.get_localized_string(70449) % 'Wstream' - else: - return True, "" + for e in errorsStr: + if e in data: + return False, config.get_localized_string(70449) % 'Wstream' + return True, "" # Returns an array of possible video url's from the page_url @@ -93,7 +96,7 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= except: pass - logger.info("[Wstream] url=" + page_url) + logger.debug("[Wstream] url=" + page_url) video_urls = [] global data, real_url, headers @@ -102,7 +105,7 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= captcha = platformtools.show_recaptcha(sitekey, page_url.replace('116.202.226.34', headers[1][1]).replace('nored.icu', headers[1][1])) if sitekey else '' possibleParam = scrapertools.find_multiple_matches(data,r"""<input.*?(?:name=["']([^'"]+).*?value=["']([^'"]*)['"]>|>)""") - if possibleParam[0][0]: + if possibleParam and possibleParam[0][0]: post = {param[0]: param[1] for param in possibleParam if param[0]} if captcha: post['g-recaptcha-response'] = captcha if post: diff --git a/servers/youdbox.py b/servers/youdbox.py index a58912b4..a371e7d3 100644 --- a/servers/youdbox.py +++ b/servers/youdbox.py @@ -5,7 +5,7 @@ from platformcode import logger def get_video_url(page_url, video_password): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) video_urls = [] data = httptools.downloadpage(page_url).data url = scrapertools.find_single_match(data, '<source src="([^"]+)"') diff --git a/servers/yourupload.py b/servers/yourupload.py index 7625c0a3..6959c681 100755 --- a/servers/yourupload.py +++ b/servers/yourupload.py @@ -6,7 +6,7 @@ from platformcode import logger def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) global data data = httptools.downloadpage(page_url).data @@ -17,7 +17,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) video_urls = [] referer = {'Referer': page_url} diff --git a/servers/youtube.py b/servers/youtube.py index f142696b..be8c2934 100644 --- a/servers/youtube.py +++ b/servers/youtube.py @@ -6,7 +6,7 @@ from platformcode import config, logger, platformtools name = 'plugin.video.youtube' def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url).data @@ -18,12 +18,12 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): import xbmc from xbmcaddon import Addon - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) video_urls = [] if not page_url.startswith("http"): page_url = "http://www.youtube.com/watch?v=%s" % page_url - logger.info(" page_url->'%s'" % page_url) + logger.debug(" page_url->'%s'" % page_url) video_id = scrapertools.find_single_match(page_url, '(?:v=|embed/)([A-z0-9_-]{11})') inputstream = platformtools.install_inputstream() @@ -34,7 +34,7 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= else: __settings__.setSetting('kodion.video.quality.mpd', 'false') video_urls = [['con YouTube', 'plugin://plugin.video.youtube/play/?video_id=' + video_id ]] except: - if filetools.exists(xbmc.translatePath('special://profile/addon_data/' + name)): + if filetools.exists(xbmc.translatePath('special://profile/addons/' + name)): if platformtools.dialog_yesno(config.get_localized_string(70784), config.get_localized_string(70818)): xbmc.executeJSONRPC('{"jsonrpc": "2.0", "id":1, "method": "Addons.SetAddonEnabled", "params": { "addonid": "' + name + '", "enabled": true }}') else: return [['','']] diff --git a/servers/youwatch.py b/servers/youwatch.py index 82f461c3..5bc49f55 100644 --- a/servers/youwatch.py +++ b/servers/youwatch.py @@ -6,7 +6,7 @@ from platformcode import logger def test_video_exists(page_url): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url).data if "File Not Found" in data: return False, config.get_localized_string(70449) % "Youwatch" @@ -20,7 +20,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) data = httptools.downloadpage(page_url).data url_redirect = scrapertools.find_single_match(data, '<iframe src="([^"]+)"') @@ -31,6 +31,6 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= video_urls = [[scrapertools.get_filename_from_url(url)[-4:] + " [youwatch]", video_url]] for video_url in video_urls: - logger.info("%s - %s" % (video_url[0], video_url[1])) + logger.debug("%s - %s" % (video_url[0], video_url[1])) return video_urls diff --git a/servers/zippyshare.py b/servers/zippyshare.py index 65c45a70..aec47843 100755 --- a/servers/zippyshare.py +++ b/servers/zippyshare.py @@ -29,7 +29,7 @@ def test_video_exists(page_url): def get_video_url(page_url, premium=False, user="", password="", video_password=""): - logger.info("(page_url='%s')" % page_url) + logger.debug("(page_url='%s')" % page_url) video_urls = [] data = httptools.downloadpage(page_url).data @@ -43,5 +43,5 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= mediaurl = '%s%s' % (domain, url) extension = "." + mediaurl.split('.')[-1] video_urls.append([extension + " [zippyshare]", mediaurl]) - logger.info("url=%s" %video_urls) + logger.debug("url=%s" %video_urls) return video_urls diff --git a/service.py b/service.py index ad4182fe..5ab5e931 100644 --- a/service.py +++ b/service.py @@ -25,7 +25,7 @@ from servers import torrent def update(path, p_dialog, i, t, serie, overwrite): - logger.info("Updating " + path) + logger.debug("Updating " + path) insertados_total = 0 nfo_file = xbmc.translatePath(filetools.join(path, 'tvshow.nfo')) @@ -57,8 +57,7 @@ def update(path, p_dialog, i, t, serie, overwrite): p_dialog.update(int(math.ceil((i + 1) * t)), heading, config.get_localized_string(60389) % (serie.contentSerieName, serie.channel.capitalize())) try: pathchannels = filetools.join(config.get_runtime_path(), "channels", serie.channel + '.py') - logger.info("loading channel: " + pathchannels + " " + - serie.channel) + logger.debug("loading channel: " + pathchannels + " " + serie.channel) if serie.library_filter_show: serie.show = serie.library_filter_show.get(serie.channel, serie.contentSerieName) @@ -110,7 +109,7 @@ def update(path, p_dialog, i, t, serie, overwrite): def check_for_update(overwrite=True): - logger.info("Update Series...") + logger.debug("Update Series...") p_dialog = None serie_actualizada = False update_when_finished = False @@ -142,7 +141,7 @@ def check_for_update(overwrite=True): filetools.write(tvshow_file, head_nfo + serie.tojson()) path = filetools.dirname(tvshow_file) - logger.info("serie=" + serie.contentSerieName) + logger.debug("serie=" + serie.contentSerieName) p_dialog.update(int(math.ceil((i + 1) * t)), heading, serie.contentSerieName) #Verificamos el estado del serie.library_playcounts de la Serie por si está incompleto @@ -254,7 +253,7 @@ def check_for_update(overwrite=True): p_dialog.close() else: - logger.info("Not update the video library, it is disabled") + logger.debug("Not update the video library, it is disabled") except Exception as ex: logger.error("An error occurred while updating the series") @@ -284,7 +283,7 @@ def viewmodeMonitor(): if content: defaultMode = int(config.get_setting('view_mode_%s' % content).split(',')[-1]) if currentMode != defaultMode: - logger.info('viewmode changed: ' + currentModeName + '-' + str(currentMode) + ' - content: ' + content) + logger.debug('viewmode changed: ' + currentModeName + '-' + str(currentMode) + ' - content: ' + content) config.set_setting('view_mode_%s' % content, currentModeName + ', ' + str(currentMode)) except: logger.error(traceback.print_exc()) @@ -329,6 +328,8 @@ class AddonMonitor(xbmc.Monitor): if settings_post: # backup settings filetools.copy(os.path.join(config.get_data_path(), "settings.xml"), os.path.join(config.get_data_path(), "settings.bak"), True) + logger.debug({k: self.settings_pre[k] for k in self.settings_pre + if k in settings_post and self.settings_pre[k] != settings_post[k]}) from platformcode import xbmc_videolibrary if self.settings_pre.get('downloadpath', None) != settings_post.get('downloadpath', None): @@ -368,11 +369,11 @@ class AddonMonitor(xbmc.Monitor): self.settings_pre = settings_post def onScreensaverActivated(self): - logger.info('screensaver activated, un-scheduling screen-on jobs') + logger.debug('screensaver activated, un-scheduling screen-on jobs') schedule.clear('screenOn') def onScreensaverDeactivated(self): - logger.info('screensaver deactivated, re-scheduling screen-on jobs') + logger.debug('screensaver deactivated, re-scheduling screen-on jobs') self.scheduleScreenOnJobs() def scheduleUpdater(self): @@ -380,7 +381,7 @@ class AddonMonitor(xbmc.Monitor): updaterCheck() self.updaterPeriod = config.get_setting('addon_update_timer') schedule.every(self.updaterPeriod).hours.do(updaterCheck).tag('updater') - logger.info('scheduled updater every ' + str(self.updaterPeriod) + ' hours') + logger.debug('scheduled updater every ' + str(self.updaterPeriod) + ' hours') def scheduleVideolibrary(self): self.update_setting = config.get_setting("update", "videolibrary") @@ -388,18 +389,18 @@ class AddonMonitor(xbmc.Monitor): if self.update_setting == 2 or self.update_setting == 3: self.update_hour = config.get_setting("everyday_delay", "videolibrary") * 4 schedule.every().day.at(str(self.update_hour).zfill(2) + ':00').do(run_threaded, check_for_update, (False,)).tag('videolibrary') - logger.info('scheduled videolibrary at ' + str(self.update_hour).zfill(2) + ':00') + logger.debug('scheduled videolibrary at ' + str(self.update_hour).zfill(2) + ':00') def scheduleScreenOnJobs(self): schedule.every().second.do(viewmodeMonitor).tag('screenOn') schedule.every().second.do(torrent.elementum_monitor).tag('screenOn') def onDPMSActivated(self): - logger.info('DPMS activated, un-scheduling screen-on jobs') + logger.debug('DPMS activated, un-scheduling screen-on jobs') schedule.clear('screenOn') def onDPMSDeactivated(self): - logger.info('DPMS deactivated, re-scheduling screen-on jobs') + logger.debug('DPMS deactivated, re-scheduling screen-on jobs') self.scheduleScreenOnJobs() diff --git a/specials/community.py b/specials/community.py index 0c587ba9..1dca0fb1 100644 --- a/specials/community.py +++ b/specials/community.py @@ -6,7 +6,7 @@ import re, inspect, xbmcgui from core import httptools, jsontools, tmdb, support, filetools from core.item import Item -from platformcode import config, platformtools +from platformcode import config, platformtools, logger from channelselector import get_thumb from collections import OrderedDict @@ -25,7 +25,7 @@ list_quality = ['SD', '720', '1080', '4k'] tmdb_api = 'a1ab8b8669da03637a4b98fa39c39228' def mainlist(item): - support.info() + logger.debug() path = filetools.join(config.get_data_path(), 'community_channels.json') if not filetools.exists(path): @@ -37,7 +37,7 @@ def mainlist(item): def show_channels(item): - support.info() + logger.debug() itemlist = [] # add context menu @@ -77,7 +77,7 @@ def show_channels(item): def show_menu(item): - support.info() + logger.debug() itemlist = [] @@ -116,12 +116,12 @@ def show_menu(item): if 'channel_name' in json and not 'disable_search' in json and 'search' not in json: itemlist += get_search_menu(item, json, channel_name=json['channel_name']) - support.info('PAGINATION:', disable_pagination) + logger.debug('PAGINATION:', disable_pagination) return itemlist def search(item, text): - support.info(text) + logger.info('search',text) itemlist = [] if item.custom_search: @@ -170,7 +170,7 @@ def global_search(item, text): def peliculas(item, json='', key='', itemlist=[]): item.plot = item.thumb = item.fanart ='' - support.info('PAGINATION:', item.disable_pagination) + logger.debug('PAGINATION:', item.disable_pagination) if not json: key = item.key json = load_json(item)[key] @@ -241,7 +241,7 @@ def peliculas(item, json='', key='', itemlist=[]): def get_seasons(item): - support.info() + logger.debug() itemlist = [] infoLabels = item.infoLabels json = item.url if type(item.url) == dict else item.url @@ -281,7 +281,7 @@ def get_seasons(item): def episodios(item, json ='', key='', itemlist =[]): - support.info() + logger.debug() infoLabels = item.infoLabels itm=item @@ -325,8 +325,10 @@ def episodios(item, json ='', key='', itemlist =[]): season_number = int(match[0]) else: season_number = option['season'] if 'season' in option else season if season else 1 - episode_number = option['number'] if 'number' in option else '' - if not episode_number.isdigit(): + episode_number = option['number'] if 'number' in option else option['episode'] if 'episode' else '' + if type(episode_number) == int: + pass + elif not episode_number.isdigit(): episode_number = support.match(option['title'], patron=r'(?P<episode>\d+)').match ep = int(episode_number) if episode_number else ep if not episode_number: @@ -392,7 +394,7 @@ def episodios(item, json ='', key='', itemlist =[]): # Find Servers def findvideos(item): - support.info() + logger.debug() itemlist = [] if 'links' in item.url: json = item.url['links'] @@ -412,7 +414,7 @@ def findvideos(item): ################################ Menu ################################ def get_menu(item, json, key, itemlist=[]): - support.info() + logger.debug() json = json[key] for option in json: title = option['title'] if 'title' in option else json[option] if 'search' not in option else '' @@ -447,7 +449,7 @@ def get_menu(item, json, key, itemlist=[]): def get_sub_menu(item, json, key, itemlist=[]): - support.info() + logger.debug() json = json[key] search = False if item.menu: @@ -486,7 +488,7 @@ def get_sub_menu(item, json, key, itemlist=[]): def get_search_menu(item, json='', itemlist=[], channel_name=''): - support.info() + logger.debug() if 'title' in json: title = json['title'] elif channel_name: @@ -512,7 +514,7 @@ def get_search_menu(item, json='', itemlist=[], channel_name=''): def submenu(item, json, key, itemlist = [], filter_list = []): - support.info(item) + logger.debug(item) import sys if sys.version_info[0] >= 3: from concurrent import futures @@ -583,7 +585,6 @@ def filter_thread(filter, key, item, description): if id: thumbnail = 'https://image.tmdb.org/t/p/original' + results['profile_path'] if results['profile_path'] else item.thumbnail json_file = httptools.downloadpage('http://api.themoviedb.org/3/person/'+ str(id) + '?api_key=' + tmdb_api + '&language=en', use_requests=True).data - support.info(json_file) plot += jsontools.load(json_file)['biography'] if description: @@ -618,7 +619,7 @@ def filter_thread(filter, key, item, description): # for load json from item or url def load_json(item, no_order=False): - support.info() + logger.debug() if type(item) == Item: url = item.url filterkey = item.filterkey @@ -643,7 +644,7 @@ def load_json(item, no_order=False): # Load Channels json and check that the paths and channel titles are correct def load_and_check(item): - support.info() + logger.debug() path = filetools.join(config.get_data_path(), 'community_channels.json') file = open(path, "r") json = jsontools.load(file.read()) @@ -665,7 +666,7 @@ def load_and_check(item): # set extra values def set_extra_values(item, json, path): - support.info() + logger.debug() ret = Item() for key in json: if key == 'quality': @@ -711,7 +712,7 @@ def set_extra_values(item, json, path): # format titles def set_title(title, language='', quality=''): - support.info() + logger.debug() t = support.match(title, patron=r'\{([^\}]+)\}').match if 'bold' not in t: t += ' bold' @@ -732,7 +733,7 @@ def set_title(title, language='', quality=''): # for relative path def relative(key, json, path): - support.info() + logger.debug() ret = '' if key in json: if key in ['thumbnail', 'poster']: @@ -744,7 +745,7 @@ def relative(key, json, path): def pagination(item, itemlist = []): - support.info() + logger.debug() itlist = [] if not itemlist: @@ -784,7 +785,7 @@ def pagination(item, itemlist = []): return itlist def add_channel(item): - support.info() + logger.debug() channel_to_add = {} json_file = '' result = platformtools.dialog_select(config.get_localized_string(70676), [config.get_localized_string(70678), config.get_localized_string(70679)]) @@ -838,7 +839,7 @@ def add_channel(item): return def remove_channel(item): - support.info() + logger.debug() path = filetools.join(config.get_data_path(), 'community_channels.json') diff --git a/specials/downloads.py b/specials/downloads.py index c10ba045..45dd43a4 100644 --- a/specials/downloads.py +++ b/specials/downloads.py @@ -311,7 +311,7 @@ def menu(item): # Show Dialog seleccion = platformtools.dialog_select(config.get_localized_string(30163), opciones) - logger.info('SELECTION: '+ op[seleccion]) + logger.debug('SELECTION: '+ op[seleccion]) # -1 is cancel if seleccion == -1: return diff --git a/specials/favorites.py b/specials/favorites.py index d7c885bc..60634bb5 100644 --- a/specials/favorites.py +++ b/specials/favorites.py @@ -24,7 +24,7 @@ except: def mainlist(item): - logger.info() + logger.debug() itemlist = [] for name, thumb, data in read_favourites(): @@ -81,7 +81,7 @@ def save_favourites(favourites_list): def addFavourite(item): - logger.info() + logger.debug() # logger.debug(item.tostring('\n')) # If you get here through the context menu, you must retrieve the action and channel parameters @@ -100,7 +100,7 @@ def addFavourite(item): def delFavourite(item): - logger.info() + logger.debug() # logger.debug(item.tostring('\n')) if item.from_title: @@ -118,7 +118,7 @@ def delFavourite(item): def renameFavourite(item): - logger.info() + logger.debug() # logger.debug(item.tostring('\n')) # Find the item we want to rename in favorites.xml @@ -137,7 +137,7 @@ def renameFavourite(item): ################################################## # Features to migrate old favorites (.txt) def readbookmark(filepath): - logger.info() + logger.debug() try: import urllib.parse as urllib except ImportError: @@ -233,6 +233,6 @@ try: if config.get_setting("bookmarkpath") != "": check_bookmark(config.get_setting("bookmarkpath")) else: - logger.info("No path to old version favorites") + logger.debug("No path to old version favorites") except: pass diff --git a/specials/filmontv.py b/specials/filmontv.py index 5def299e..0a44c78c 100644 --- a/specials/filmontv.py +++ b/specials/filmontv.py @@ -18,7 +18,7 @@ TIMEOUT_TOTAL = 60 def mainlist(item): - logger.info(" mainlist") + logger.debug(" mainlist") itemlist = [#Item(channel="search", action='discover_list', title=config.get_localized_string(70309), #search_type='list', list_type='movie/now_playing', # thumbnail=get_thumb("now_playing.png")), @@ -91,7 +91,7 @@ def server_config(item): return platformtools.show_channel_settings(channelpath=filetools.join(config.get_runtime_path(), "specials", item.config)) def now_on_misc_film(item): - logger.info("filmontv tvoggi") + logger.debug("filmontv tvoggi") itemlist = [] # Carica la pagina @@ -126,7 +126,7 @@ def now_on_misc_film(item): return itemlist def now_on_misc(item): - logger.info("filmontv tvoggi") + logger.debug("filmontv tvoggi") itemlist = [] # Carica la pagina @@ -161,7 +161,7 @@ def now_on_misc(item): return itemlist def now_on_tv(item): - logger.info("filmontv tvoggi") + logger.debug("filmontv tvoggi") itemlist = [] # Carica la pagina @@ -195,7 +195,7 @@ def now_on_tv(item): return itemlist def primafila(item): - logger.info("filmontv tvoggi") + logger.debug("filmontv tvoggi") itemlist = [] # Carica la pagina diff --git a/specials/help.py b/specials/help.py index 53d9cac8..f90cfe96 100644 --- a/specials/help.py +++ b/specials/help.py @@ -37,7 +37,7 @@ if config.is_xbmc(): def mainlist(item): - logger.info() + logger.debug() itemlist = [] if config.is_xbmc(): diff --git a/specials/kodfavorites.py b/specials/kodfavorites.py index 97274fcb..af732369 100644 --- a/specials/kodfavorites.py +++ b/specials/kodfavorites.py @@ -126,7 +126,7 @@ class KodfavouritesData(object): # ============================ def addFavourite(item): - logger.info() + logger.debug() alfav = KodfavouritesData() # If you get here through the context menu, you must retrieve the action and channel parameters @@ -179,7 +179,7 @@ def addFavourite(item): # ==================== def mainlist(item): - logger.info() + logger.debug() alfav = KodfavouritesData() item.category = get_name_from_filename(os.path.basename(alfav.user_favorites_file)) @@ -216,7 +216,7 @@ def mainlist(item): def mostrar_perfil(item): - logger.info() + logger.debug() alfav = KodfavouritesData() itemlist = [] @@ -289,7 +289,7 @@ def _crea_perfil(alfav): # Profile and link management def crear_perfil(item): - logger.info() + logger.debug() alfav = KodfavouritesData() if not _crea_perfil(alfav): return False @@ -299,7 +299,7 @@ def crear_perfil(item): def editar_perfil_titulo(item): - logger.info() + logger.debug() alfav = KodfavouritesData() if not alfav.user_favorites[item.i_perfil]: return False @@ -316,7 +316,7 @@ def editar_perfil_titulo(item): def eliminar_perfil(item): - logger.info() + logger.debug() alfav = KodfavouritesData() if not alfav.user_favorites[item.i_perfil]: return False @@ -332,7 +332,7 @@ def eliminar_perfil(item): def acciones_enlace(item): - logger.info() + logger.debug() acciones = [config.get_localized_string(70620), config.get_localized_string(70621), config.get_localized_string(70622), config.get_localized_string(70623), config.get_localized_string(70624), config.get_localized_string(70548), config.get_localized_string(70625), @@ -364,7 +364,7 @@ def acciones_enlace(item): def editar_enlace_titulo(item): - logger.info() + logger.debug() alfav = KodfavouritesData() if not alfav.user_favorites[item.i_perfil]: return False @@ -386,7 +386,7 @@ def editar_enlace_titulo(item): def editar_enlace_color(item): - logger.info() + logger.debug() alfav = KodfavouritesData() if not alfav.user_favorites[item.i_perfil]: return False @@ -410,7 +410,7 @@ def editar_enlace_color(item): def editar_enlace_thumbnail(item): - logger.info() + logger.debug() alfav = KodfavouritesData() if not alfav.user_favorites[item.i_perfil]: return False @@ -470,7 +470,7 @@ def editar_enlace_thumbnail(item): def editar_enlace_carpeta(item): - logger.info() + logger.debug() alfav = KodfavouritesData() if not alfav.user_favorites[item.i_perfil]: return False @@ -489,7 +489,7 @@ def editar_enlace_carpeta(item): def editar_enlace_lista(item): - logger.info() + logger.debug() alfav = KodfavouritesData() if not alfav.user_favorites[item.i_perfil]: return False @@ -527,7 +527,7 @@ def editar_enlace_lista(item): def eliminar_enlace(item): - logger.info() + logger.debug() alfav = KodfavouritesData() if not alfav.user_favorites[item.i_perfil]: return False @@ -542,7 +542,7 @@ def eliminar_enlace(item): # Move profiles and links (up, down, top, bottom) def mover_perfil(item): - logger.info() + logger.debug() alfav = KodfavouritesData() alfav.user_favorites = _mover_item(alfav.user_favorites, item.i_perfil, item.direccion) @@ -552,7 +552,7 @@ def mover_perfil(item): return True def mover_enlace(item): - logger.info() + logger.debug() alfav = KodfavouritesData() if not alfav.user_favorites[item.i_perfil]: return False @@ -597,7 +597,7 @@ def _mover_item(lista, i_selected, direccion): # ------------------------------------------ def mainlist_listas(item): - logger.info() + logger.debug() itemlist = [] item.category = 'Listas' @@ -619,7 +619,7 @@ def mainlist_listas(item): def acciones_lista(item): - logger.info() + logger.debug() acciones = [config.get_localized_string(70604), config.get_localized_string(70629), config.get_localized_string(70605), config.get_localized_string(70606), config.get_localized_string(70607)] @@ -641,7 +641,7 @@ def acciones_lista(item): def activar_lista(item): - logger.info() + logger.debug() fullfilename = os.path.join(config.get_data_path(), item.lista) if not os.path.exists(fullfilename): @@ -659,7 +659,7 @@ def activar_lista(item): def renombrar_lista(item): - logger.info() + logger.debug() fullfilename_current = os.path.join(config.get_data_path(), item.lista) if not os.path.exists(fullfilename_current): @@ -695,7 +695,7 @@ def renombrar_lista(item): def eliminar_lista(item): - logger.info() + logger.debug() fullfilename = os.path.join(config.get_data_path(), item.lista) if not os.path.exists(fullfilename): @@ -714,7 +714,7 @@ def eliminar_lista(item): def informacion_lista(item): - logger.info() + logger.debug() fullfilename = os.path.join(config.get_data_path(), item.lista) if not os.path.exists(fullfilename): @@ -741,7 +741,7 @@ def informacion_lista(item): def compartir_lista(item): - logger.info() + logger.debug() fullfilename = os.path.join(config.get_data_path(), item.lista) if not os.path.exists(fullfilename): @@ -796,7 +796,7 @@ def compartir_lista(item): def acciones_nueva_lista(item): - logger.info() + logger.debug() acciones = [config.get_localized_string(70651), config.get_localized_string(70652), @@ -830,7 +830,7 @@ def acciones_nueva_lista(item): def crear_lista(item): - logger.info() + logger.debug() titulo = platformtools.dialog_input(default='', heading=config.get_localized_string(70612)) if titulo is None or titulo == '': @@ -853,7 +853,7 @@ def crear_lista(item): def descargar_lista(item, url): - logger.info() + logger.debug() from core import httptools, scrapertools if 'tinyupload.com/' in url: diff --git a/specials/news.py b/specials/news.py index eecbf8f3..17e2289e 100644 --- a/specials/news.py +++ b/specials/news.py @@ -42,7 +42,7 @@ menu_settings_path = os.path.join(config.get_data_path(), "settings_channels", ' def mainlist(item): - logger.info() + logger.debug() itemlist = [] # list_canales, any_active = get_channels_list() @@ -128,7 +128,7 @@ def set_category_context(item): def get_channels_list(): - logger.info() + logger.debug() ## import web_pdb; web_pdb.set_trace() ## list_canales = {'peliculas': [], '4k': [], 'terror': [], 'infantiles': [], 'series': [], 'anime': [], ## 'castellano': [], 'latino':[], 'italiano':[], 'torrent':[], 'documentales': []} @@ -166,14 +166,14 @@ def get_channels_list(): return list_canales, any_active def set_cache(item): - logger.info() + logger.debug() item.mode = 'set_cache' t = Thread(target=novedades, args=[item]) t.start() #t.join() def get_from_cache(item): - logger.info() + logger.debug() itemlist=[] cache_node = jsontools.get_node_from_file('menu_cache_data.json', 'cached') first=item.last @@ -198,7 +198,7 @@ def get_from_cache(item): return itemlist def add_menu_items(item, itemlist): - logger.info() + logger.debug() menu_icon = get_thumb('menu.png') menu = Item(channel="channelselector", action="getmainlist", viewmode="movie", thumbnail=menu_icon, title='Menu') @@ -215,7 +215,7 @@ def add_menu_items(item, itemlist): return itemlist def novedades(item): - logger.info() + logger.debug() global list_newest threads = [] @@ -231,7 +231,7 @@ def novedades(item): return get_from_cache(item) multithread = config.get_setting("multithread", "news") - logger.info("multithread= " + str(multithread)) + logger.debug("multithread= " + str(multithread)) if not multithread: if platformtools.dialog_yesno(config.get_localized_string(60515), @@ -270,7 +270,7 @@ def novedades(item): # if progreso.iscanceled(): # progreso.close() - # logger.info("Búsqueda cancelada") + # logger.debug("Búsqueda cancelada") # return itemlist # Modo Multi Thread @@ -284,7 +284,7 @@ def novedades(item): # Modo single Thread else: if mode == 'normal': - logger.info("Obteniendo novedades de channel_id=" + channel_id) + logger.debug("Obteniendo novedades de channel_id=" + channel_id) progreso.update(percentage, "", config.get_localized_string(60520) % channel_title) get_newest(channel_id, item.extra) @@ -304,7 +304,7 @@ def novedades(item): logger.debug(mensaje) if progreso.iscanceled(): - logger.info("Busqueda de novedades cancelada") + logger.debug("Busqueda de novedades cancelada") break time.sleep(0.5) @@ -312,7 +312,7 @@ def novedades(item): if mode == 'normal': mensaje = config.get_localized_string(60522) % (len(list_newest), time.time() - start_time) progreso.update(100, mensaje) - logger.info(mensaje) + logger.debug(mensaje) start_time = time.time() # logger.debug(start_time) @@ -345,7 +345,7 @@ def novedades(item): def get_newest(channel_id, categoria): - logger.info("channel_id=" + channel_id + ", categoria=" + categoria) + logger.debug("channel_id=" + channel_id + ", categoria=" + categoria) global list_newest global list_newest_tourl @@ -366,9 +366,9 @@ def get_newest(channel_id, categoria): if not puede: return - logger.info("running channel " + modulo.__name__ + " " + modulo.__file__) + logger.debug("running channel " + modulo.__name__ + " " + modulo.__file__) list_result = modulo.newest(categoria) - logger.info("canal= %s %d resultados" % (channel_id, len(list_result))) + logger.debug("canal= %s %d resultados" % (channel_id, len(list_result))) exist=False if os.path.exists(menu_cache_path): cache_node = jsontools.get_node_from_file('menu_cache_data.json', 'cached') @@ -377,7 +377,7 @@ def get_newest(channel_id, categoria): cache_node = {} # logger.debug('cache node: %s' % cache_node) for item in list_result: - # logger.info("item="+item.tostring()) + # logger.debug("item="+item.tostring()) item.channel = channel_id list_newest.append(item) list_newest_tourl.append(item.tourl()) @@ -532,7 +532,7 @@ def group_by_content(list_result_canal): def show_channels(item): - logger.info() + logger.debug() global channels_id_name channels_id_name = item.extra itemlist = [] diff --git a/specials/search.py b/specials/search.py index 5ea500e2..1968dc9b 100644 --- a/specials/search.py +++ b/specials/search.py @@ -18,12 +18,17 @@ if PY3: else: from concurrent_py2 import futures from core.item import Item -from core import tmdb, scrapertools, channeltools, filetools, jsontools +from core import tmdb, scrapertools, channeltools, filetools, jsontools, servertools from channelselector import get_thumb from platformcode import logger, config, platformtools, unify from core.support import typo +import xbmcgui import gc + +import xbmc +from threading import Thread +from core.support import dbg gc.disable() info_language = ["de", "en", "es", "fr", "it", "pt"] # from videolibrary.json @@ -31,7 +36,7 @@ def_lang = info_language[config.get_setting("info_language", "videolibrary")] def mainlist(item): - logger.info() + logger.debug() itemlist = [Item(channel=item.channel, title=config.get_localized_string(70276), action='new_search', mode='all', thumbnail=get_thumb("search.png")), Item(channel=item.channel, title=config.get_localized_string(70741) % config.get_localized_string(30122), action='new_search', mode='movie', thumbnail=get_thumb("search_movie.png")), @@ -48,7 +53,7 @@ def mainlist(item): def sub_menu(item): - logger.info() + logger.debug() itemlist = [Item(channel=item.channel, action='genres_menu', title=config.get_localized_string(70306), mode='movie', thumbnail=get_thumb("movie_genre.png")), Item(channel=item.channel, action='years_menu', title=config.get_localized_string(70742), mode='movie', thumbnail=get_thumb("movie_year.png")), @@ -66,7 +71,7 @@ def sub_menu(item): def saved_search(item): - logger.info() + logger.debug() itemlist = list() saved_searches_list = get_saved_searches() @@ -93,7 +98,7 @@ def saved_search(item): def new_search(item): - logger.info() + logger.debug() temp_search_file = config.get_temp_file('temp-search') if filetools.isfile(temp_search_file): @@ -199,10 +204,8 @@ def channel_search(item): searching += channel_list searching_titles += channel_titles cnt = 0 - progress = platformtools.dialog_progress(config.get_localized_string(30993) % item.title, config.get_localized_string(70744) % len(channel_list) + '\n' + ', '.join(searching_titles)) config.set_setting('tmdb_active', False) - search_action_list = [] module_dict = {} for ch in channel_list: @@ -391,14 +394,14 @@ def get_servers(item, module_dict): def get_info(itemlist): - logger.info() + logger.debug() tmdb.set_infoLabels_itemlist(itemlist, True, forced=True) return itemlist def get_channels(item): - logger.info() + logger.debug() channels_list = list() title_list = list() @@ -720,7 +723,7 @@ def discover_list(item): def from_context(item): - logger.info() + logger.debug() select = setting_channel_new(item) @@ -741,7 +744,7 @@ def from_context(item): def set_context(itemlist): - logger.info() + logger.debug() for elem in itemlist: elem.context = [{"title": config.get_localized_string(60412), @@ -758,7 +761,7 @@ def set_context(itemlist): def get_from_temp(item): - logger.info() + logger.debug() n = 30 nTotal = len(item.itemlist) @@ -810,4 +813,4 @@ def get_saved_searches(): else: saved_searches_list = list(current_saved_searches_list) - return saved_searches_list \ No newline at end of file + return saved_searches_list diff --git a/specials/setting.py b/specials/setting.py index cf7b9fa0..2de2882e 100644 --- a/specials/setting.py +++ b/specials/setting.py @@ -20,7 +20,7 @@ CHANNELNAME = "setting" def menu_channels(item): - logger.info() + logger.debug() itemlist = list() itemlist.append(Item(channel=CHANNELNAME, title=config.get_localized_string(60545), action="conf_tools", folder=False, @@ -55,7 +55,7 @@ def channel_config(item): # def setting_torrent(item): -# logger.info() +# logger.debug() # LIBTORRENT_PATH = config.get_setting("libtorrent_path", server="torrent", default="") # LIBTORRENT_ERROR = config.get_setting("libtorrent_error", server="torrent", default="") @@ -192,7 +192,7 @@ def channel_config(item): # config.set_setting("magnet2torrent", dict_data_saved["magnet2torrent"], server="torrent") def menu_servers(item): - logger.info() + logger.debug() itemlist = list() itemlist.append(Item(channel=CHANNELNAME, title=config.get_localized_string(60550), action="servers_blacklist", folder=False, @@ -221,7 +221,7 @@ def menu_servers(item): for server in sorted(server_list): server_parameters = servertools.get_server_parameters(server) - logger.info(server_parameters) + logger.debug(server_parameters) if server_parameters["has_settings"] and [x for x in server_parameters["settings"] if x["id"] not in ["black_list", "white_list"]]: itemlist.append( Item(channel=CHANNELNAME, title=". " + config.get_localized_string(60553) % server_parameters["name"], @@ -269,11 +269,9 @@ def cb_servers_blacklist(dict_values): n = len(dict_values) i = 1 for k, v in list(dict_values.items()): - config.set_setting("black_list", v, server=k) if v: # If the server is blacklisted it cannot be in the favorites list config.set_setting("favorites_servers_list", 0, server=k) blaklisted.append(k) - f = True progreso.update(old_div((i * 100), n), config.get_localized_string(60559) % k) i += 1 config.set_setting("black_list", blaklisted, server='servers') @@ -355,7 +353,7 @@ def cb_servers_favorites(server_names, dict_values): i += 1 c = 1 - logger.info(dict_favorites) + logger.debug(dict_favorites) favorites_servers_list = [] while c in dict_favorites: favorites_servers_list.append(dict_favorites[c]) @@ -373,7 +371,7 @@ def settings(item): def submenu_tools(item): - logger.info() + logger.debug() itemlist = list() # Custom tools @@ -414,7 +412,7 @@ def submenu_tools(item): def check_quickfixes(item): - logger.info() + logger.debug() if not config.dev_mode(): from platformcode import updater @@ -425,7 +423,7 @@ def check_quickfixes(item): # def update_quasar(item): -# logger.info() +# logger.debug() # from platformcode import custom_code, platformtools # stat = False @@ -437,7 +435,7 @@ def check_quickfixes(item): def conf_tools(item): - logger.info() + logger.debug() # Enable or disable channels if item.extra == "channels_onoff": @@ -539,14 +537,14 @@ def conf_tools(item): action="", folder=False, thumbnail=channel.thumbnail)) continue - # logger.info(channel.channel + " SALTADO!") + # logger.debug(channel.channel + " SALTADO!") # The json file settings of the channel are loaded file_settings = os.path.join(config.get_data_path(), "settings_channels", channel.channel + "_data.json") dict_settings = {} dict_file = {} if filetools.exists(file_settings): - # logger.info(channel.channel + " Has _data.json file") + # logger.debug(channel.channel + " Has _data.json file") channeljson_exists = True # We get saved settings from ../settings/channel_data.json try: @@ -556,7 +554,7 @@ def conf_tools(item): except EnvironmentError: logger.error("ERROR when reading the file: %s" % file_settings) else: - # logger.info(channel.channel + " No _data.json file") + # logger.debug(channel.channel + " No _data.json file") channeljson_exists = False if channeljson_exists: @@ -576,7 +574,7 @@ def conf_tools(item): # Default settings are loaded list_controls, default_settings = channeltools.get_channel_controls_settings( channel.channel) - # logger.info(channel.title + " | Default: %s" % default_settings) + # logger.debug(channel.title + " | Default: %s" % default_settings) except: import traceback logger.error(channel.title + config.get_localized_string(60570) % traceback.format_exc()) @@ -598,7 +596,7 @@ def conf_tools(item): list_status = config.get_localized_string(60571) else: - # logger.info(channel.channel + " - NO correction needed!") + # logger.debug(channel.channel + " - NO correction needed!") needsfix = False # If the channel status has been set it is added to the list @@ -828,7 +826,7 @@ def restore_tools(item): def report_menu(item): - logger.info('URL: ' + item.url) + logger.debug('URL: ' + item.url) from channelselector import get_thumb diff --git a/specials/trailertools.py b/specials/trailertools.py index cb23706b..e2230931 100644 --- a/specials/trailertools.py +++ b/specials/trailertools.py @@ -43,7 +43,7 @@ else: def buscartrailer(item, trailers=[]): - logger.info() + logger.debug() # List of actions if run from context menu if item.action == "manual_search" and item.contextual: @@ -78,8 +78,8 @@ def buscartrailer(item, trailers=[]): item.year = item.infoLabels['year'] - logger.info("Search: %s" % item.contentTitle) - logger.info("Year: %s" % item.year) + logger.debug("Search: %s" % item.contentTitle) + logger.debug("Year: %s" % item.year) if item.infoLabels['trailer'] and not trailers: url = item.infoLabels['trailer'] if "youtube" in url: @@ -122,7 +122,7 @@ def buscartrailer(item, trailers=[]): def manual_search(item): - logger.info() + logger.debug() texto = platformtools.dialog_input(default=item.contentTitle, heading=config.get_localized_string(30112)) if texto is not None: if item.extra == "mymovies": @@ -134,7 +134,7 @@ def manual_search(item): def tmdb_trailers(item, tipo="movie"): - logger.info() + logger.debug() from core.tmdb import Tmdb itemlist = [] @@ -153,7 +153,7 @@ def tmdb_trailers(item, tipo="movie"): def youtube_search(item): - logger.info() + logger.debug() itemlist = [] title = item.contentTitle if item.extra != "youtube": @@ -192,7 +192,7 @@ def youtube_search(item): def mymovies_search(item): - logger.info() + logger.debug() import json title = item.contentTitle @@ -216,7 +216,7 @@ def mymovies_search(item): def search_links_mymovies(item): - logger.info() + logger.debug() trailer_url = match(item, patron=r'<li class="bottone_playlist"[^>]+><a href="([^"]+)"').match itemlist = [] data = httptools.downloadpage(item.url).data @@ -236,7 +236,7 @@ def search_links_mymovies(item): def filmaffinity_search(item): - logger.info() + logger.debug() if item.filmaffinity: item.url = item.filmaffinity @@ -284,7 +284,7 @@ def filmaffinity_search(item): def search_links_filmaff(item): - logger.info() + logger.debug() itemlist = [] data = httptools.downloadpage(item.url).data diff --git a/specials/tvmoviedb.py b/specials/tvmoviedb.py index a181f044..6041fe42 100644 --- a/specials/tvmoviedb.py +++ b/specials/tvmoviedb.py @@ -28,7 +28,7 @@ default_fan = filetools.join(config.get_runtime_path(), "fanart.jpg") def mainlist(item): - logger.info() + logger.debug() itemlist = [ # TMDB # item.clone(title=typo(config.get_localized_string(70021), 'bold'), action=""), @@ -55,7 +55,7 @@ def configuracion(item): return ret def search_star(item): - logger.info() + logger.debug() itemlist = [] item.type='movie' @@ -97,7 +97,7 @@ def search_(item): def searcing(item): - logger.info() + logger.debug() new_item = Item(title=item.contentTitle, text=item.contentTitle.replace("+", " "), mode=item.contentType, infoLabels=item.infoLabels) @@ -625,7 +625,7 @@ def indices_tmdb(item): def filter(item): - logger.info() + logger.debug() from datetime import datetime list_controls = [] @@ -705,7 +705,7 @@ def filtered(item, values): def musica_movie(item): - logger.info() + logger.debug() itemlist = [] data = match(item).data matches = match(data, patron=r'<td class="left">([^<]+)<br><small>([^<]+)</small>.*?<td>(\d+:\d+).*?<p id="([^"]+)"').matches @@ -729,7 +729,7 @@ def list_imdb(item): url = 'http://www.imdb.com/search/title?' + item.url # data = httptools.downloadpage(url, headers=headers, replace_headers=True).data data = match(url, headers=headers).data - logger.info(data) + logger.debug(data) # data = re.sub(r"\n|\r|\t| ", "", data) # data = re.sub(r"\s{2}", " ", data) @@ -812,7 +812,7 @@ def list_imdb(item): def filter_imdb(item): - logger.info() + logger.debug() from datetime import datetime list_controls = [] @@ -1392,7 +1392,7 @@ def indices_imdb(item): # def filter_fa(item): -# logger.info() +# logger.debug() # from datetime import datetime # list_controls = [] @@ -1494,7 +1494,7 @@ def indices_imdb(item): # def login_fa(): -# logger.info() +# logger.debug() # try: # user = config.get_setting("usuariofa", "tvmoviedb") @@ -1519,7 +1519,7 @@ def indices_imdb(item): # userid = scrapertools.find_single_match(data, 'id-user=(\d+)') # if userid: # config.set_setting("userid", userid, "tvmoviedb") -# logger.info("Login correcto") +# logger.debug("Login correcto") # return True, "" # except: # import traceback @@ -1644,7 +1644,7 @@ def indices_imdb(item): # def votar_fa(item): # # Window to select the vote -# logger.info() +# logger.debug() # list_controls = [] # valores = {} @@ -1921,7 +1921,7 @@ def acciones_trakt(item): ratings = [] try: for i, entry in enumerate(data): - logger.info('ENTRY:',entry) + logger.debug('ENTRY:',entry) if i <= item.pagina: continue # try: entry = entry[item.args] # except: pass @@ -1997,7 +1997,7 @@ def acciones_trakt(item): def order_list(item): - logger.info() + logger.debug() list_controls = [] valores1 = ['rating', 'added', 'title', 'released', 'runtime', 'popularity', 'percentage', 'votes'] @@ -2339,7 +2339,7 @@ def indices_mal(item): matches = match("https://myanimelist.net/anime.php", cookies=False, patronBlock=patronBlock, patron=patron).matches for url, title in matches: genero = title.split(" (", 1)[0] - logger.info(url_base, genero) + logger.debug(url_base, genero) thumbnail = url_base + genero.lower().replace(" ", "%20") if genero in ["Hentai", "Yaoi", "Yuri"] and not adult_mal: continue @@ -2488,7 +2488,7 @@ def detail_staff(item): patron_bio = r'<?<div class="spaceit_pad">(.*?)</td>' bio = match(data, patron=patron_bio).match bio = htmlclean(bio.replace("</div>", "\n")) - logger.info(bio) + logger.debug(bio) infoLabels = {'plot': bio} if not "No voice acting roles" in data: itemlist.append(Item(channel=item.channel, title=typo(config.get_localized_string(70374),'bold bullet'), action="", thumbnail=item.thumbnail, infoLabels=infoLabels)) @@ -2626,7 +2626,7 @@ def info_anidb(item, itemlist, url): def filter_mal(item): - logger.info() + logger.debug() list_controls = [] valores = {} @@ -2704,7 +2704,7 @@ def callback_mal(item, values): def musica_anime(item): # List available anime and songs similar to the anime title - logger.info() + logger.debug() itemlist = [] data = match("http://www.freeanimemusic.org/song_search.php", post=item.post).data @@ -2739,7 +2739,7 @@ def musica_anime(item): def login_mal(from_list=False): - logger.info() + logger.debug() from core import httptools from base64 import b64decode as bdec @@ -2768,7 +2768,7 @@ def login_mal(from_list=False): else: if generic: return False, config.get_localized_string(70393), user - logger.info("Correct login") + logger.debug("Correct login") return True, "", user except: import traceback @@ -2800,7 +2800,7 @@ def cuenta_mal(item): def items_mal(item): # Scraper for personal lists - logger.info() + logger.debug() itemlist = [] data = match(item.url).data diff --git a/specials/videolibrary.py b/specials/videolibrary.py index a46fcd6a..b543b98e 100644 --- a/specials/videolibrary.py +++ b/specials/videolibrary.py @@ -20,7 +20,7 @@ else: def mainlist(item): - logger.info() + logger.debug() itemlist = [Item(channel=item.channel, action="list_movies", title=config.get_localized_string(60509), category=config.get_localized_string(70270), thumbnail=thumb("videolibrary_movie")), @@ -37,7 +37,7 @@ def channel_config(item): def list_movies(item, silent=False): - logger.info() + logger.debug() itemlist = [] movies_path = [] for root, folders, files in filetools.walk(videolibrarytools.MOVIES_PATH): @@ -61,9 +61,7 @@ def list_movies(item, silent=False): def list_tvshows(item): - from time import time - start = time() - logger.info() + logger.debug() itemlist = [] lista = [] tvshows_path = [] @@ -88,7 +86,6 @@ def list_tvshows(item): title=typo(config.get_localized_string(70269), 'bold color kod'), folder=False), Item(channel=item.channel, action="configure_update_videolibrary", thumbnail=item.thumbnail, title=typo(config.get_localized_string(60599), 'bold color kod'), lista=lista, folder=False)] - logger.info('TEMPO= ' + str(time() - start)) return itemlist @@ -188,7 +185,7 @@ def get_results(nfo_path, root, Type, local=False): # Contextual menu: Mark as seen / not seen visto = item.library_playcounts.get(item.contentTitle, 0) item.infoLabels["playcount"] = visto - logger.info('item\n' + str(item)) + logger.debug('item\n' + str(item)) if visto > 0: seen_text = config.get_localized_string(60020) counter = 0 @@ -260,7 +257,7 @@ def configure_update_videolibrary(item): def get_seasons(item): - logger.info() + logger.debug() # logger.debug("item:\n" + item.tostring('\n')) itemlist = [] dict_temp = {} @@ -319,7 +316,7 @@ def get_seasons(item): def get_episodes(item): - logger.info() + logger.debug() # logger.debug("item:\n" + item.tostring('\n')) itemlist = [] @@ -383,7 +380,7 @@ def get_episodes(item): def findvideos(item): from core import autoplay - logger.info() + logger.debug() # logger.debug("item:\n" + item.tostring('\n')) videolibrarytools.check_renumber_options(item) itemlist = [] @@ -535,7 +532,7 @@ def findvideos(item): def play(item): - logger.info() + logger.debug() # logger.debug("item:\n" + item.tostring('\n')) if not item.contentChannel == "local": @@ -575,7 +572,7 @@ def play(item): def update_videolibrary(item=''): - logger.info() + logger.debug() # Update active series by overwriting import service @@ -595,7 +592,7 @@ def update_videolibrary(item=''): def move_videolibrary(current_path, new_path, current_movies_folder, new_movies_folder, current_tvshows_folder, new_tvshows_folder): - logger.info() + logger.debug() backup_current_path = current_path backup_new_path = new_path @@ -668,7 +665,7 @@ def move_videolibrary(current_path, new_path, current_movies_folder, new_movies_ def delete_videolibrary(item): - logger.info() + logger.debug() if not platformtools.dialog_yesno(config.get_localized_string(20000), config.get_localized_string(80037)): return @@ -694,7 +691,7 @@ def delete_videolibrary(item): # context menu methods def update_tvshow(item): - logger.info() + logger.debug() # logger.debug("item:\n" + item.tostring('\n')) heading = config.get_localized_string(60037) @@ -720,11 +717,11 @@ def update_tvshow(item): def add_local_episodes(item): - logger.info() + logger.debug() done, local_episodes_path = videolibrarytools.config_local_episodes_path(item.path, item, silent=True) if done < 0: - logger.info("An issue has occurred while configuring local episodes") + logger.debug("An issue has occurred while configuring local episodes") elif local_episodes_path: nfo_path = filetools.join(item.path, "tvshow.nfo") head_nfo, item_nfo = videolibrarytools.read_nfo(nfo_path) @@ -739,7 +736,7 @@ def add_local_episodes(item): def remove_local_episodes(item): - logger.info() + logger.debug() nfo_path = filetools.join(item.path, "tvshow.nfo") head_nfo, item_nfo = videolibrarytools.read_nfo(nfo_path) @@ -757,7 +754,7 @@ def remove_local_episodes(item): def verify_playcount_series(item, path): - logger.info() + logger.debug() """ This method reviews and repairs the PlayCount of a series that has become out of sync with the actual list of episodes in its folder. Entries for missing episodes, seasons, or series are created with the "not seen" mark. Later it is sent to verify the counters of Seasons and Series @@ -820,7 +817,7 @@ def verify_playcount_series(item, path): def mark_content_as_watched2(item): - logger.info() + logger.debug() # logger.debug("item:\n" + item.tostring('\n')) if filetools.isfile(item.nfo): head_nfo, it = videolibrarytools.read_nfo(item.nfo) @@ -858,7 +855,7 @@ def mark_content_as_watched2(item): def mark_content_as_watched(item): - logger.info() + logger.debug() #logger.debug("item:\n" + item.tostring('\n')) if filetools.exists(item.nfo): @@ -896,7 +893,7 @@ def mark_content_as_watched(item): def mark_season_as_watched(item): - logger.info() + logger.debug() # logger.debug("item:\n" + item.tostring('\n')) # Get dictionary of marked episodes @@ -949,7 +946,7 @@ def mark_season_as_watched(item): def mark_tvshow_as_updatable(item, silent=False): - logger.info() + logger.debug() head_nfo, it = videolibrarytools.read_nfo(item.nfo) it.active = item.active filetools.write(item.nfo, head_nfo + it.tojson()) @@ -1063,7 +1060,7 @@ def delete(item): def check_season_playcount(item, season): - logger.info() + logger.debug() if season: episodios_temporada = 0 @@ -1085,7 +1082,7 @@ def check_season_playcount(item, season): def check_tvshow_playcount(item, season): - logger.info() + logger.debug() if season: temporadas_serie = 0 temporadas_vistas_serie = 0 diff --git a/tools/updateDomains.py b/tools/updateDomains.py new file mode 100644 index 00000000..98d1b0b4 --- /dev/null +++ b/tools/updateDomains.py @@ -0,0 +1,79 @@ +import json, os, sys +import socket + +path = os.getcwd() +sys.path.insert(0, path) +if sys.version_info[0] >= 3: + from lib.httplib2 import py3 as httplib2 +else: + from lib.httplib2 import py2 as httplib2 + + +def http_Resp(lst_urls): + rslt = {} + for sito in lst_urls: + try: + s = httplib2.Http() + code, resp = s.request(sito, body=None) + if code.previous: + print("r1 http_Resp: %s %s %s %s" % + (code.status, code.reason, code.previous['status'], + code.previous['-x-permanent-redirect-url'])) + rslt['code'] = code.previous['status'] + rslt['redirect'] = code.previous['-x-permanent-redirect-url'] + rslt['status'] = code.status + else: + rslt['code'] = code.status + except httplib2.ServerNotFoundError as msg: + # both for lack of ADSL and for non-existent sites + rslt['code'] = -2 + except socket.error as msg: + # for unreachable sites without correct DNS + # [Errno 111] Connection refused + rslt['code'] = 111 + except: + rslt['code'] = 'Connection error' + return rslt + + +if __name__ == '__main__': + fileJson = 'channels.json' + + with open(fileJson) as f: + data = json.load(f) + + result = data['direct'] + + for chann, host in sorted(data['direct'].items()): + # to get an idea of the timing + # useful only if you control all channels + # for channels with error 522 about 40 seconds are lost ... + print("check #### INIZIO #### channel - host :%s - %s " % (chann, host)) + + rslt = http_Resp([host]) + + # all right + if rslt['code'] == 200: + result[chann] = host + # redirect + elif str(rslt['code']).startswith('3'): + # result[chann] = str(rslt['code']) +' - '+ rslt['redirect'][:-1] + if rslt['redirect'].endswith('/'): + rslt['redirect'] = rslt['redirect'][:-1] + result[chann] = rslt['redirect'] + # non-existent site + elif rslt['code'] == -2: + print('Host Sconosciuto - '+ str(rslt['code']) +' - '+ host) + # site not reachable + elif rslt['code'] == 111: + print('Host non raggiungibile - '+ str(rslt['code']) +' - ' + host) + else: + # other types of errors + print('Errore Sconosciuto - '+str(rslt['code']) +' - '+ host) + + print("check #### FINE #### rslt :%s " % (rslt)) + + result = {'findhost': data['findhost'], 'direct': result} + # I write the updated file + with open(fileJson, 'w') as f: + json.dump(result, f, sort_keys=True, indent=4) diff --git a/updatetvshow.py b/updatetvshow.py index 7227ec72..1b39f173 100644 --- a/updatetvshow.py +++ b/updatetvshow.py @@ -49,7 +49,7 @@ def search_paths(Id): def execute_sql(sql): - logger.info() + logger.debug() file_db = "" records = None @@ -69,14 +69,14 @@ def execute_sql(sql): break if file_db: - logger.info("DB file: %s" % file_db) + logger.debug("DB file: %s" % file_db) conn = None try: import sqlite3 conn = sqlite3.connect(file_db) cursor = conn.cursor() - logger.info("Running sql: %s" % sql) + logger.debug("Running sql: %s" % sql) cursor.execute(sql) conn.commit() @@ -86,7 +86,7 @@ def execute_sql(sql): records = [] conn.close() - logger.info("Query executed. Records: %s" % nun_records) + logger.debug("Query executed. Records: %s" % nun_records) except: logger.error("Error executing sql query")