Merge remote-tracking branch 'origin/master'

This commit is contained in:
marco
2020-02-16 19:49:49 +01:00
2 changed files with 22 additions and 33 deletions

View File

@@ -246,7 +246,7 @@ def findvid_serie(item):
matches = re.compile(patron, re.DOTALL).finditer(html)
for match in matches:
scrapedurl = match.group(1)
scrapedtitle = match.group(2)
scrapedtitle = ("%s" % support.color(blktxt,"orange") + " - " if blktxt else "") + match.group(2)
# title = item.title + " [COLOR blue][" + scrapedtitle + "][/COLOR]"
itemlist.append(
Item(channel=item.channel,
@@ -266,34 +266,16 @@ def findvid_serie(item):
lnkblkp = []
data = item.url
# First blocks of links
if data[0:data.find('<a')].find(':') > 0:
lnkblk.append(data[data.find(' - ') + 3:data[0:data.find('<a')].find(':') + 1])
lnkblkp.append(data.find(' - ') + 3)
else:
lnkblk.append(' ')
lnkblkp.append(data.find('<a'))
# Find new blocks of links
patron = r'<a\s[^>]+>[^<]+</a>([^<]+)'
matches = re.compile(patron, re.DOTALL).finditer(data)
for match in matches:
sep = match.group(1)
if sep != ' - ':
lnkblk.append(sep)
i = 0
if len(lnkblk) > 1:
for lb in lnkblk[1:]:
lnkblkp.append(data.find(lb, lnkblkp[i] + len(lnkblk[i])))
i = i + 1
for i in range(0, len(lnkblk)):
if i == len(lnkblk) - 1:
load_vid_series(data[lnkblkp[i]:], item, itemlist, lnkblk[i])
else:
load_vid_series(data[lnkblkp[i]:lnkblkp[i + 1]], item, itemlist, lnkblk[i])
# Blocks with split
blk=re.split("(?:>\s*)?([A-Za-z\s0-9]*):\s*<",data,re.S)
blktxt=""
for b in blk:
if b[0:3]=="a h" or b[0:4]=="<a h":
load_vid_series("<%s>"%b, item, itemlist, blktxt)
blktxt=""
elif len(b.strip())>1:
blktxt=b.strip()
return support.server(item, itemlist=itemlist)

View File

@@ -14,6 +14,12 @@ from platformcode import logger, config
headers = [['User-Agent', 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:54.0) Gecko/20100101 Firefox/54.0']]
def test_video_exists(page_url):
def int_bckup_method():
global data,headers
page_url = scrapertools.find_single_match(data, r"""<center><a href='(https?:\/\/wstream[^']+)'\s*title='bkg'""")
if page_url:
data = httptools.downloadpage(page_url, headers=headers, follow_redirects=True).data
logger.info("(page_url='%s')" % page_url)
resp = httptools.downloadpage(page_url)
global data
@@ -28,11 +34,12 @@ def test_video_exists(page_url):
possibleParam = scrapertools.find_multiple_matches(data, r"""<input.*?(?:name=["']([^'"]+).*?value=["']([^'"]*)['"]>|>)""")
if possibleParam:
post = urllib.urlencode({param[0]: param[1] for param in possibleParam if param[0]})
data = httptools.downloadpage(page_url, headers=headers, post=post, follow_redirects=True).data
if post:
data = httptools.downloadpage(page_url, headers=headers, post=post, follow_redirects=True).data
else:
int_bckup_method()
else:
page_url = scrapertools.find_single_match(data, r"""<center><a href='(https?:\/\/wstream[^']+)'\s*title='bkg'""")
if page_url:
data = httptools.downloadpage(page_url, headers=headers, follow_redirects=True).data
int_bckup_method()
if "Not Found" in data or "File was deleted" in data:
return False, config.get_localized_string(70449) % 'Wstream'