Add files via upload
This commit is contained in:
48
plugin.video.alfa/servers/gvideo.json
Normal file
48
plugin.video.alfa/servers/gvideo.json
Normal file
@@ -0,0 +1,48 @@
|
||||
{
|
||||
"active": true,
|
||||
"changes": [
|
||||
{
|
||||
"date": "18/07/2017",
|
||||
"description": "Versión incial"
|
||||
}
|
||||
],
|
||||
"find_videos": {
|
||||
"ignore_urls": [],
|
||||
"patterns": [
|
||||
{
|
||||
"pattern": "(?s)https://youtube.googleapis.com.*?docid=([^(?:&|\")]+)",
|
||||
"url": "http://docs.google.com/get_video_info?docid=\\1"
|
||||
}
|
||||
]
|
||||
},
|
||||
"free": true,
|
||||
"id": "gvideo",
|
||||
"name": "gvideo",
|
||||
"settings": [
|
||||
{
|
||||
"default": false,
|
||||
"enabled": true,
|
||||
"id": "black_list",
|
||||
"label": "Incluir en lista negra",
|
||||
"type": "bool",
|
||||
"visible": true
|
||||
},
|
||||
{
|
||||
"default": 0,
|
||||
"enabled": true,
|
||||
"id": "favorites_servers_list",
|
||||
"label": "Incluir en lista de favoritos",
|
||||
"lvalues": [
|
||||
"No",
|
||||
"1",
|
||||
"2",
|
||||
"3",
|
||||
"4",
|
||||
"5"
|
||||
],
|
||||
"type": "list",
|
||||
"visible": false
|
||||
}
|
||||
],
|
||||
"version": 1
|
||||
}
|
||||
40
plugin.video.alfa/servers/gvideo.py
Normal file
40
plugin.video.alfa/servers/gvideo.py
Normal file
@@ -0,0 +1,40 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import urllib
|
||||
|
||||
from core import httptools
|
||||
from core import logger
|
||||
from core import scrapertools
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
response = httptools.downloadpage(page_url, cookies=False, headers={"Referer": page_url})
|
||||
if "no+existe" in response.data:
|
||||
return False, "[gvideo] El video no existe o ha sido borrado"
|
||||
if "Se+ha+excedido+el" in response.data:
|
||||
return False, "[gvideo] Se ha excedido el número de reproducciones permitidas"
|
||||
return True, ""
|
||||
|
||||
|
||||
def get_video_url(page_url, user="", password="", video_password=""):
|
||||
video_urls = []
|
||||
urls = []
|
||||
response = httptools.downloadpage(page_url, cookies=False, headers={"Referer": page_url})
|
||||
cookies = ""
|
||||
cookie = response.headers["set-cookie"].split("HttpOnly, ")
|
||||
for c in cookie:
|
||||
cookies += c.split(";", 1)[0] + "; "
|
||||
data = response.data.decode('unicode-escape')
|
||||
data = urllib.unquote_plus(urllib.unquote_plus(data))
|
||||
headers_string = "|Cookie=" + cookies
|
||||
url_streams = scrapertools.find_single_match(data, 'url_encoded_fmt_stream_map=(.*)')
|
||||
streams = scrapertools.find_multiple_matches(url_streams,
|
||||
'itag=(\d+)&url=(.*?)(?:;.*?quality=.*?(?:,|&)|&quality=.*?(?:,|&))')
|
||||
itags = {'18':'360p', '22':'720p', '34':'360p', '35':'480p', '37':'1080p', '43':'360p', '59':'480p'}
|
||||
for itag, video_url in streams:
|
||||
if not video_url in urls:
|
||||
video_url += headers_string
|
||||
video_urls.append([itags[itag], video_url])
|
||||
urls.append(video_url)
|
||||
video_urls.sort(key=lambda video_urls: int(video_urls[0].replace("p", "")))
|
||||
return video_urls
|
||||
Reference in New Issue
Block a user