add akstream and backin servers

This commit is contained in:
mac12m99
2019-03-17 15:42:56 +01:00
parent 38774a6f8b
commit 5e735fbefc
4 changed files with 190 additions and 0 deletions

View File

@@ -0,0 +1,45 @@
{
"active": true,
"find_videos": {
"ignore_urls": ["http://akvideo.stream/video"],
"patterns": [
{
"pattern": "akvideo.stream\/video\/(?:embed-)?([a-zA-Z0-9]+)",
"url": "http://akvideo.stream\/video\/\\1"
},
{
"pattern": "akvideo.stream/(?:embed-)?([a-zA-Z0-9]+)",
"url": "http://akvideo.stream/\\1"
}
]
},
"free": true,
"id": "akstream",
"name": "akstream",
"settings": [
{
"default": false,
"enabled": true,
"id": "black_list",
"label": "$ADDON[plugin.video.kod 60654]",
"type": "bool",
"visible": true
},
{
"default": 0,
"enabled": true,
"id": "favorites_servers_list",
"label": "$ADDON[plugin.video.kod 60655]",
"lvalues": [
"No",
"1",
"2",
"3",
"4",
"5"
],
"type": "list",
"visible": false
}
]
}

View File

@@ -0,0 +1,45 @@
# -*- coding: utf-8 -*-
# by DrZ3r0
import urllib
from core import httptools
from core import scrapertools
from platformcode import logger, config
def test_video_exists(page_url):
logger.info("(page_url='%s')" % page_url)
return True, ""
data = httptools.downloadpage(page_url).data
if "File was deleted" in data or "Page Cannot Be Found" in data:
return False, config.get_localized_string(70449) % "Akstream"
return True, ""
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
logger.info(" url=" + page_url)
video_urls = []
data = httptools.downloadpage(page_url).data.replace('https','http')
vres = scrapertools.find_multiple_matches(data, 'nowrap[^>]+>([^,]+)')
data_pack = scrapertools.find_single_match(data, "(eval.function.p,a,c,k,e,.*?)\s*</script>")
if data_pack != "":
from lib import jsunpack
data = jsunpack.unpack(data_pack)
# URL
matches = scrapertools.find_multiple_matches(data, '(http.*?\.mp4)')
_headers = urllib.urlencode(httptools.default_headers)
i = 0
for media_url in matches:
# URL del vídeo
video_urls.append([vres[i] + " mp4 [Akstream] ", media_url + '|' + _headers])
i = i + 1
for video_url in video_urls:
logger.info(" %s - %s" % (video_url[0], video_url[1]))
return video_urls

View File

@@ -0,0 +1,41 @@
{
"active": true,
"find_videos": {
"ignore_urls": [],
"patterns": [
{
"pattern": "backin.net/([a-zA-Z0-9]+)",
"url": "\\1"
}
]
},
"free": true,
"id": "backin",
"name": "backin",
"settings": [
{
"default": false,
"enabled": true,
"id": "black_list",
"label": "@60654",
"type": "bool",
"visible": true
},
{
"default": 0,
"enabled": true,
"id": "favorites_servers_list",
"label": "@60655",
"lvalues": [
"No",
"1",
"2",
"3",
"4",
"5"
],
"type": "list",
"visible": false
}
]
}

View File

@@ -0,0 +1,59 @@
# -*- coding: utf-8 -*-
import urllib
import xbmc
from platformcode import logger
from core import httptools
from core import scrapertools
def test_video_exists(page_url):
logger.info("(page_url='%s')" % page_url)
data = scrapertools.cache_page(page_url)
# if '<meta property="og:title" content=""/>' in data:
# return False,"The video has been cancelled from Backin.net"
return True, ""
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
logger.info("page_url=" + page_url)
video_urls = []
headers = [["User-Agent", "Mozilla/5.0 (Windows NT 6.1; rv:54.0) Gecko/20100101 Firefox/54.0"]]
import web_pdb;
web_pdb.set_trace()
# First access
httptools.downloadpage("http://backin.net/s/streams.php?s=%s" % page_url, headers=headers)
# xbmc.sleep(10000)
headers.append(["Referer", "http://backin.net/%s" % page_url])
#xbmc.sleep(10000)
data = httptools.downloadpage("http://backin.net/stream-%s-500x400.html" % page_url, headers=headers).data
data_pack = scrapertools.find_single_match(data, r"(eval.function.p,a,c,k,e,.*?)\s*</script>")
if data_pack:
from lib import jsunpack
data = jsunpack.unpack(data_pack)
logger.info("page_url=" + data)
# URL
url = scrapertools.find_single_match(data, r'"src"value="([^"]+)"')
if not url:
url = scrapertools.find_single_match(data, r'file\s*:\s*"([^"]+)"')
logger.info("URL=" + str(url))
# URL del vídeo
video_urls.append([".mp4" + " [backin]", url + '|' + urllib.urlencode(dict(headers))])
for video_url in video_urls:
logger.info("%s - %s" % (video_url[0], video_url[1]))
return video_urls