From 376397ebdd599835b73dfbc13adc12ba45cb1646 Mon Sep 17 00:00:00 2001 From: pipcat Date: Tue, 17 Jul 2018 11:15:58 +0200 Subject: [PATCH] Correcciones powvideo y mega --- plugin.video.alfa/lib/megaserver/client.py | 17 ++++++++++++++--- plugin.video.alfa/servers/powvideo.py | 5 ----- 2 files changed, 14 insertions(+), 8 deletions(-) diff --git a/plugin.video.alfa/lib/megaserver/client.py b/plugin.video.alfa/lib/megaserver/client.py index 41adc3e2..09df12a9 100644 --- a/plugin.video.alfa/lib/megaserver/client.py +++ b/plugin.video.alfa/lib/megaserver/client.py @@ -4,6 +4,7 @@ import random import struct import time import urllib +import requests from threading import Thread from file import File @@ -132,9 +133,19 @@ class Client(object): raise Exception("Enlace no valido") def api_req(self, req, get=""): - seqno = random.randint(0, 0xFFFFFFFF) - url = 'https://g.api.mega.co.nz/cs?id=%d%s' % (seqno, get) - return json.loads(self.post(url, json.dumps([req])))[0] + seqno = random.randint(0, 0xFFFFFFFF) + url = 'https://g.api.mega.co.nz/cs?id=%d%s' % (seqno, get) + # ~ return json.loads(self.post(url, json.dumps([req])))[0] + + req = requests.post( + url, + params={'id': seqno}, + data=json.dumps([req]), + timeout=30) + json_resp = json.loads(req.text) + if isinstance(json_resp, int): + raise Exception("MEGA api_req error %d" % json_resp) + return json_resp[0] def base64urldecode(self,data): data += '=='[(2 - len(data) * 3) % 4:] diff --git a/plugin.video.alfa/servers/powvideo.py b/plugin.video.alfa/servers/powvideo.py index 79991591..2696c972 100755 --- a/plugin.video.alfa/servers/powvideo.py +++ b/plugin.video.alfa/servers/powvideo.py @@ -27,11 +27,6 @@ def get_video_url(page_url, premium=False, user="", password="", video_password= referer = page_url.replace('iframe', 'preview') data = httptools.downloadpage(page_url, headers={'referer': referer}).data - if data == "File was deleted": - return "El archivo no existe o ha sido borrado" - - if 'Video is processing now' in data: - return "El vídeo está siendo procesado, intentalo de nuevo mas tarde" packed = scrapertools.find_single_match(data, "") unpacked = jsunpack.unpack(packed)