Fix AnimeWorld, supporto episodi uniti
This commit is contained in:
@@ -177,7 +177,7 @@ def episodios(item):
|
||||
pagination = 50
|
||||
# data = get_data(item)
|
||||
patronBlock= r'<div class="server\s*active\s*"(?P<block>.*?)(?:<div class="server|<link)'
|
||||
patron = r'<li[^>]*>\s*<a.*?href="(?P<url>[^"]+)"[^>]*>(?P<episode>[^<]+)<'
|
||||
patron = r'<li[^>]*>\s*<a.*?href="(?P<url>[^"]+)"[^>]*>(?P<episode>[^-<]+)(?:-(?P<episode2>[^<]+))?'
|
||||
def itemHook(item):
|
||||
item.number = support.re.sub(r'\[[^\]]+\]', '', item.title)
|
||||
item.title += support.typo(item.fulltitle,'-- bold')
|
||||
@@ -200,9 +200,9 @@ def findvideos(item):
|
||||
match = support.match(data, patronBlock=r'data-name="' + ID + r'"[^>]+>(.*?)(?:<div class="(?:server|download)|link)', patron=r'data-id="([^"]+)" data-episode-num="' + (item.number if item.number else '1') + '"' + r'.*?href="([^"]+)"').match
|
||||
if match:
|
||||
epID, epurl = match
|
||||
if 'vvvvid' in name.lower():
|
||||
urls.append(support.match(host + '/api/episode/ugly/serverPlayerAnimeWorld?id=' + epID, headers=headers, patron=r'<a.*?href="([^"]+)"').match)
|
||||
elif 'animeworld' in name.lower():
|
||||
# if 'vvvvid' in name.lower():
|
||||
# urls.append(support.match(host + '/api/episode/ugly/serverPlayerAnimeWorld?id=' + epID, headers=headers, patron=r'<a.*?href="([^"]+)"', debug=True).match)
|
||||
if 'animeworld' in name.lower():
|
||||
url = support.match(data, patron=r'href="([^"]+)"\s*id="alternativeDownloadLink"', headers=headers).match
|
||||
title = support.match(url, patron=r'http[s]?://(?:www.)?([^.]+)', string=True).match
|
||||
itemlist.append(item.clone(action="play", title=title, url=url, server='directo'))
|
||||
|
||||
@@ -172,7 +172,7 @@ def scrapeBlock(item, args, block, patron, headers, action, pagination, debug, t
|
||||
matches = scrapertools.find_multiple_matches_groups(block, patron)
|
||||
logger.debug('MATCHES =', matches)
|
||||
|
||||
known_keys = ['url', 'title', 'title2', 'season', 'episode', 'thumb', 'quality', 'year', 'plot', 'duration', 'genere', 'rating', 'type', 'lang', 'other', 'size', 'seed']
|
||||
known_keys = ['url', 'title', 'title2', 'season', 'episode', 'episode2', 'thumb', 'quality', 'year', 'plot', 'duration', 'genere', 'rating', 'type', 'lang', 'other', 'size', 'seed']
|
||||
# Legenda known_keys per i groups nei patron
|
||||
# known_keys = ['url', 'title', 'title2', 'season', 'episode', 'thumb', 'quality',
|
||||
# 'year', 'plot', 'duration', 'genere', 'rating', 'type', 'lang']
|
||||
@@ -290,12 +290,15 @@ def scrapeBlock(item, args, block, patron, headers, action, pagination, debug, t
|
||||
episode = str(int(ep[0])).zfill(1) + 'x' + str(int(ep[1])).zfill(2)
|
||||
infolabels['season'] = int(ep[0])
|
||||
infolabels['episode'] = int(ep[1])
|
||||
second_episode = scrapertools.find_single_match(episode, r'x\d+x(\d+)')
|
||||
second_episode = scrapertools.find_single_match(episode, r'x\d+x-\d+)')
|
||||
if second_episode: episode = re.sub(r'(\d+x\d+)x\d+',r'\1-', episode) + second_episode.zfill(2)
|
||||
except:
|
||||
logger.debug('invalid episode: ' + episode)
|
||||
pass
|
||||
|
||||
if scraped['episode2']:
|
||||
episode += '-' + scrapertools.find_single_match(scraped['episode2'], r'(\d+)')
|
||||
|
||||
# make formatted Title [longtitle]
|
||||
s = ' - '
|
||||
# title = episode + (s if episode and title else '') + title
|
||||
|
||||
Reference in New Issue
Block a user