Merge remote-tracking branch 'origin/master'
This commit is contained in:
@@ -134,35 +134,40 @@ def peliculas(item):
|
||||
def episodios(item):
|
||||
itemlist = []
|
||||
json_file = current_session.get(item.url, headers=headers, params=payload).json()
|
||||
show_id = str(json_file['data'][0]['show_id'])
|
||||
season_id = str(json_file['data'][0]['season_id'])
|
||||
episodes = []
|
||||
support.info('SEASON ID= ',season_id)
|
||||
for episode in json_file['data']:
|
||||
episodes.append(episode['episodes'])
|
||||
for episode in episodes:
|
||||
for key in episode:
|
||||
if 'stagione' in encode(key['title']).lower():
|
||||
season = support.match(encode(key['title']), patron=r'[Ss]tagione\s*(\d+)').match
|
||||
episode = support.match(encode(key['title']), patron=r'[Ee]pisodio\s*(\d+)').match
|
||||
if season and episode:
|
||||
title = season + 'x' + episode + ' - ' + item.fulltitle
|
||||
make_item = True
|
||||
elif int(key['season_id']) == int(season_id):
|
||||
try:
|
||||
title = 'Episodio ' + key['number'] + ' - ' + key['title'].encode('utf8')
|
||||
except:
|
||||
title = 'Episodio ' + key['number'] + ' - ' + key['title']
|
||||
make_item = True
|
||||
else:
|
||||
make_item = False
|
||||
if make_item == True:
|
||||
if type(title) == tuple: title = title[0]
|
||||
itemlist.append(
|
||||
item.clone(title = title,
|
||||
url= host + show_id + '/season/' + str(key['season_id']) + '/',
|
||||
action= 'findvideos',
|
||||
video_id= key['video_id']))
|
||||
for i, block in enumerate(json_file['data']):
|
||||
if len(json_file['data']) > 1:
|
||||
prepend = str(i + 1) + 'x'
|
||||
else:
|
||||
prepend = 'Episodio '
|
||||
show_id = str(block['show_id'])
|
||||
season_id = str(block['season_id'])
|
||||
episodes = []
|
||||
support.info('SEASON ID= ',season_id)
|
||||
for episode in json_file['data']:
|
||||
episodes.append(episode['episodes'])
|
||||
for episode in episodes:
|
||||
for key in episode:
|
||||
if 'stagione' in encode(key['title']).lower():
|
||||
season = support.match(encode(key['title']), patron=r'[Ss]tagione\s*(\d+)').match
|
||||
episode = support.match(encode(key['title']), patron=r'[Ee]pisodio\s*(\d+)').match
|
||||
if season and episode:
|
||||
title = season + 'x' + episode + ' - ' + item.fulltitle
|
||||
make_item = True
|
||||
elif int(key['season_id']) == int(season_id):
|
||||
try:
|
||||
title = prepend + key['number'] + ' - ' + key['title'].encode('utf8')
|
||||
except:
|
||||
title = prepend + key['number'] + ' - ' + key['title']
|
||||
make_item = True
|
||||
else:
|
||||
make_item = False
|
||||
if make_item == True:
|
||||
if type(title) == tuple: title = title[0]
|
||||
itemlist.append(
|
||||
item.clone(title = title,
|
||||
url= host + show_id + '/season/' + str(key['season_id']) + '/',
|
||||
action= 'findvideos',
|
||||
video_id= key['video_id']))
|
||||
autorenumber.start(itemlist, item)
|
||||
if autorenumber.check(item) == True \
|
||||
or support.match(itemlist[0].title, patron=r"(\d+x\d+)").match:
|
||||
|
||||
@@ -2,9 +2,8 @@
|
||||
# --------------------------------------------------------
|
||||
# Conector streamtape By Alfa development Group
|
||||
# --------------------------------------------------------
|
||||
from core import httptools
|
||||
from core import scrapertools
|
||||
from platformcode import logger
|
||||
from core import httptools, scrapertools
|
||||
from platformcode import logger, config
|
||||
import sys
|
||||
|
||||
PY3 = False
|
||||
@@ -20,7 +19,7 @@ def test_video_exists(page_url):
|
||||
data = httptools.downloadpage(page_url, headers=referer).data
|
||||
|
||||
if "Video not found" in data:
|
||||
return False, "[streamtape] El archivo no existe o ha sido borrado"
|
||||
return False, config.get_localized_string(70449) % 'Streamtape'
|
||||
|
||||
return True, ""
|
||||
|
||||
@@ -29,7 +28,10 @@ def get_video_url(page_url, premium=False, user="", password="", video_password=
|
||||
logger.info("url=" + page_url)
|
||||
|
||||
video_urls = []
|
||||
url = "https:" + scrapertools.find_single_match(data, 'innerHTML = "([^"]+)')
|
||||
possible_url = scrapertools.find_single_match(data, 'innerHTML = "([^"]+)')
|
||||
if not '\\' in possible_url:
|
||||
possible_url = scrapertools.find_single_match(data, "innerHTML\\'\]=\\'([^']+)")
|
||||
url = "https:" + possible_url
|
||||
url = httptools.downloadpage(url, follow_redirects=False, only_headers=True).headers.get("location", "")
|
||||
video_urls.append(['MP4 [streamtape]', url])
|
||||
video_urls.append(['MP4 [Streamtape]', url])
|
||||
return video_urls
|
||||
Reference in New Issue
Block a user