From 408bca028f8c1d18667c49eb7246abab1aa8ba70 Mon Sep 17 00:00:00 2001
From: mac12m99 <10120390+mac12m99@users.noreply.github.com>
Date: Sun, 26 Sep 2021 14:33:42 +0200
Subject: [PATCH] Fix toonitalia, alcuni link su eurostreaming, aggiunto server
VOE (by alfa)
---
lib/proxytranslate.py | 67 +++++++++++++++++++++----------------------
lib/unshortenit.py | 8 +++++-
servers/voe.json | 42 +++++++++++++++++++++++++++
servers/voe.py | 36 +++++++++++++++++++++++
4 files changed, 118 insertions(+), 35 deletions(-)
create mode 100644 servers/voe.json
create mode 100644 servers/voe.py
diff --git a/lib/proxytranslate.py b/lib/proxytranslate.py
index 47be03b9..aabc6958 100644
--- a/lib/proxytranslate.py
+++ b/lib/proxytranslate.py
@@ -53,47 +53,46 @@ def process_request_proxy(url):
else:
print(target_url)
- return_html = session.get(target_url, timeout=20)
-
- if not return_html:
+ result = session.get(target_url, timeout=20)
+ if not result:
return
+ data = result.text
+ # logger.debug(data)
+ if '
Google Traduttore' in data:
+ url_request = checker_url(
+ result.text,
+ BASE_URL_PROXY + '/translate_p?hl=it&sl=' + SL + '&tl=' + TL + '&u='
+ )
- url_request = checker_url(
- return_html.text,
- BASE_URL_PROXY + '/translate_p?hl=it&sl=' + SL + '&tl=' + TL + '&u='
- )
+ if logger:
+ logger.debug(url_request)
+ else:
+ print(url_request)
- if logger:
- logger.debug(url_request)
- else:
- print(url_request)
-
- request_final = session.get(
- url_request,
- timeout=20
- )
-
- url_request_proxy = checker_url(
- request_final.text, 'translate.google')
-
- if logger:
- logger.debug(url_request_proxy)
- else:
- print(url_request_proxy)
-
- data = None
- result = None
- while not data or 'Sto traducendo' in data:
- time.sleep(0.5)
- result = session.get(
- url_request_proxy,
+ request_final = session.get(
+ url_request,
timeout=20
)
- data = result.content.decode('utf-8', 'ignore')
- if not PY3:
- data = data.encode('utf-8')
+
+ url_request_proxy = checker_url(
+ request_final.text, 'translate.google')
+
if logger:
logger.debug(url_request_proxy)
+ else:
+ print(url_request_proxy)
+
+ data = None
+ result = None
+ while not data or 'Sto traducendo' in data:
+ time.sleep(0.5)
+ result = session.get(
+ url_request_proxy,
+ timeout=20
+ )
+ data = result.text
+ if logger:
+ logger.debug(url_request_proxy)
data = re.sub('\s(\w+)=(?!")([^<>\s]+)', r' \1="\2"', data)
data = re.sub('https://translate\.googleusercontent\.com/.*?u=(.*?)&usg=[A-Za-z0-9_-]+', '\\1', data)
diff --git a/lib/unshortenit.py b/lib/unshortenit.py
index b9f78684..6636603f 100644
--- a/lib/unshortenit.py
+++ b/lib/unshortenit.py
@@ -42,6 +42,7 @@ class UnshortenIt(object):
_swzz_regex = r'swzz\.xyz'
_stayonline_regex = r'stayonline\.pro'
_snip_regex = r'[0-9a-z]+snip\.|uprotector\.xyz'
+ _linksafe_regex = r'linksafe\.cc'
# for services that only include real link inside iframe
_simple_iframe_regex = r'cryptmango|xshield\.net|vcrypt\.club'
# for services that only do redirects
@@ -49,7 +50,7 @@ class UnshortenIt(object):
listRegex = [_adfly_regex, _linkbucks_regex, _adfocus_regex, _lnxlu_regex, _shst_regex, _hrefli_regex, _anonymz_regex,
_shrink_service_regex, _rapidcrypt_regex, _simple_iframe_regex, _linkup_regex, _linkhub_regex,
- _swzz_regex, _stayonline_regex, _snip_regex, _simple_redirect]
+ _swzz_regex, _stayonline_regex, _snip_regex, _linksafe_regex, _simple_redirect]
_maxretries = 5
@@ -98,6 +99,8 @@ class UnshortenIt(object):
uri, code = self._unshorten_stayonline(uri)
if re.search(self._snip_regex, uri, re.IGNORECASE):
uri, code = self._unshorten_snip(uri)
+ if re.search(self._linksafe_regex, uri, re.IGNORECASE):
+ uri, code = self._unshorten_linksafe(uri)
if re.search(self._simple_redirect, uri, re.IGNORECASE):
p = httptools.downloadpage(uri)
uri = p.url
@@ -666,6 +669,9 @@ class UnshortenIt(object):
# uri = new_uri
return uri, 200
+ def _unshorten_linksafe(self, uri):
+ return b64decode(uri.split('?url=')[-1]).decode(), 200
+
def decrypt_aes(text, key):
try:
diff --git a/servers/voe.json b/servers/voe.json
new file mode 100644
index 00000000..c2c75f3e
--- /dev/null
+++ b/servers/voe.json
@@ -0,0 +1,42 @@
+{
+ "active": true,
+ "find_videos": {
+ "ignore_urls": [],
+ "patterns": [
+ {
+ "pattern": "voe.sx/(?:e/|)([A-z0-9]+)",
+ "url": "https://voe.sx/e/\\1"
+ }
+ ]
+ },
+ "free": true,
+ "id": "voe",
+ "name": "VOE",
+ "settings": [
+ {
+ "default": false,
+ "enabled": true,
+ "id": "black_list",
+ "label": "@60654",
+ "type": "bool",
+ "visible": true
+ },
+ {
+ "default": 0,
+ "enabled": true,
+ "id": "favorites_servers_list",
+ "label": "@60655",
+ "lvalues": [
+ "No",
+ "1",
+ "2",
+ "3",
+ "4",
+ "5"
+ ],
+ "type": "list",
+ "visible": false
+ }
+ ],
+ "thumbnail": "https://i.postimg.cc/4xzBfwBd/voe.png"
+}
\ No newline at end of file
diff --git a/servers/voe.py b/servers/voe.py
new file mode 100644
index 00000000..cfc780a9
--- /dev/null
+++ b/servers/voe.py
@@ -0,0 +1,36 @@
+# -*- coding: utf-8 -*-
+# -*- Server Voe -*-
+# -*- Created for Alfa-addon -*-
+# -*- By the Alfa Develop Group -*-
+
+from core import httptools
+from core import scrapertools
+from platformcode import logger
+from platformcode import config
+import sys
+
+PY3 = False
+if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
+
+
+def test_video_exists(page_url):
+ global data
+ logger.info("(page_url='%s')" % page_url)
+ data = httptools.downloadpage(page_url).data
+
+ if "File not found" in data or "File is no longer available" in data:
+ return False, config.get_localized_string(70449) % "VOE"
+ return True, ""
+
+
+def get_video_url(page_url, premium=False, user="", password="", video_password=""):
+ logger.info("(page_url='%s')" % page_url)
+ video_urls = []
+ video_srcs = scrapertools.find_multiple_matches(data, r"src: '([^']+)'")
+ if not video_srcs:
+ bloque = scrapertools.find_single_match(data, "sources.*?\}")
+ video_srcs = scrapertools.find_multiple_matches(bloque, ': "([^"]+)')
+ for url in video_srcs:
+ video_urls.append([" [Voe]", url])
+
+ return video_urls