fix vidtome e migliorie vcrypt
This commit is contained in:
@@ -44,7 +44,7 @@ directIP = {
|
||||
'akvideo.stream': '31.220.1.77',
|
||||
'vcrypt.net': '31.220.1.77',
|
||||
'vcrypt.pw': '31.220.1.77',
|
||||
'vidtome.host': '94.75.219.1"',
|
||||
# 'vidtome.host': '94.75.219.1',
|
||||
'nored.icu': '31.220.1.77',
|
||||
'wstream.icu': '31.220.1.77',
|
||||
'wstream.video': '31.220.1.77',
|
||||
|
||||
@@ -506,29 +506,14 @@ class UnshortenIt(object):
|
||||
return uri, 0
|
||||
r = None
|
||||
|
||||
def decrypt(str):
|
||||
try:
|
||||
from Cryptodome.Cipher import AES
|
||||
except:
|
||||
from Crypto.Cipher import AES
|
||||
|
||||
str = str.replace("_ppl_", "+").replace("_eqq_", "=").replace("_sll_", "/")
|
||||
iv = b"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
|
||||
key = b"naphajU2usWUswec"
|
||||
decoded = b64decode(str)
|
||||
decoded = decoded + b'\0' * (len(decoded) % 16)
|
||||
crypt_object = AES.new(key, AES.MODE_CBC, iv)
|
||||
decrypted = b''
|
||||
for p in range(0, len(decoded), 16):
|
||||
decrypted += crypt_object.decrypt(decoded[p:p + 16]).replace(b'\0', b'')
|
||||
return decrypted.decode('ascii')
|
||||
if 'shield' in uri.split('/')[-2]:
|
||||
uri = decrypt(uri.split('/')[-1])
|
||||
uri = decrypt_aes(uri.split('/')[-1], b"naphajU2usWUswec")
|
||||
else:
|
||||
if 'sb/' in uri or 'akv/' in uri or 'wss/' in uri or 'wsd/' in uri:
|
||||
import datetime, hashlib
|
||||
from base64 import b64encode
|
||||
ip = urlopen('https://api.ipify.org/').read()
|
||||
# ip = urlopen('https://api.ipify.org/').read()
|
||||
ip = b'31.220.1.77'
|
||||
day = datetime.date.today().strftime('%Y%m%d')
|
||||
if PY3: day = day.encode()
|
||||
headers = {
|
||||
@@ -536,9 +521,9 @@ class UnshortenIt(object):
|
||||
}
|
||||
spl = uri.split('/')
|
||||
spl[3] += '1'
|
||||
if spl[3] == 'wss1':
|
||||
if spl[3] in ['wss1', 'sb1']:
|
||||
spl[4] = b64encode(spl[4].encode('utf-8')).decode('utf-8')
|
||||
uri = '/'.join(spl)
|
||||
uri = '/'.join(spl)
|
||||
r = httptools.downloadpage(uri, timeout=self._timeout, headers=headers, follow_redirects=False, verify=False)
|
||||
if 'Wait 1 hour' in r.data:
|
||||
uri = ''
|
||||
@@ -656,22 +641,40 @@ class UnshortenIt(object):
|
||||
if 'out_generator' in uri:
|
||||
uri = re.findall('url=(.*)$', uri)[0]
|
||||
elif '/decode/' in uri:
|
||||
scheme, netloc, path, query, fragment = urlsplit(uri)
|
||||
splitted = path.split('/')
|
||||
splitted[1] = 'outlink'
|
||||
r = httptools.downloadpage(uri, follow_redirects=False, post={'url': splitted[2]})
|
||||
if 'location' in r.headers and r.headers['location']:
|
||||
new_uri = r.headers['location']
|
||||
else:
|
||||
r = httptools.downloadpage(scheme + '://' + netloc + "/".join(splitted) + query + fragment,
|
||||
follow_redirects=False, post={'url': splitted[2]})
|
||||
if 'location' in r.headers and r.headers['location']:
|
||||
new_uri = r.headers['location']
|
||||
if new_uri and new_uri != uri:
|
||||
uri = new_uri
|
||||
uri = decrypt_aes(uri.split('/')[-1], b"whdbegdhsnchdbeh")
|
||||
|
||||
# scheme, netloc, path, query, fragment = urlsplit(uri)
|
||||
# splitted = path.split('/')
|
||||
# splitted[1] = 'outlink'
|
||||
# r = httptools.downloadpage(uri, follow_redirects=False, post={'url': splitted[2]})
|
||||
# if 'location' in r.headers and r.headers['location']:
|
||||
# new_uri = r.headers['location']
|
||||
# else:
|
||||
# r = httptools.downloadpage(scheme + '://' + netloc + "/".join(splitted) + query + fragment,
|
||||
# follow_redirects=False, post={'url': splitted[2]})
|
||||
# if 'location' in r.headers and r.headers['location']:
|
||||
# new_uri = r.headers['location']
|
||||
# if new_uri and new_uri != uri:
|
||||
# uri = new_uri
|
||||
return uri, 200
|
||||
|
||||
|
||||
def decrypt_aes(text, key):
|
||||
try:
|
||||
from Cryptodome.Cipher import AES
|
||||
except:
|
||||
from Crypto.Cipher import AES
|
||||
|
||||
text = text.replace("_ppl_", "+").replace("_eqq_", "=").replace("_sll_", "/")
|
||||
iv = b"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
|
||||
decoded = b64decode(text)
|
||||
decoded = decoded + b'\0' * (len(decoded) % 16)
|
||||
crypt_object = AES.new(key, AES.MODE_CBC, iv)
|
||||
decrypted = b''
|
||||
for p in range(0, len(decoded), 16):
|
||||
decrypted += crypt_object.decrypt(decoded[p:p + 16]).replace(b'\0', b'')
|
||||
return decrypted.decode('ascii')
|
||||
|
||||
def unwrap_30x_only(uri, timeout=10):
|
||||
unshortener = UnshortenIt()
|
||||
uri, status = unshortener.unwrap_30x(uri, timeout=timeout)
|
||||
@@ -709,22 +712,22 @@ def findlinks(text):
|
||||
text += '\n' + unshorten(matches[0])[0]
|
||||
elif matches:
|
||||
# non threaded for webpdb
|
||||
# for match in matches:
|
||||
# sh = unshorten(match)[0]
|
||||
# text += '\n' + sh
|
||||
import sys
|
||||
if sys.version_info[0] >= 3:
|
||||
from concurrent import futures
|
||||
else:
|
||||
from concurrent_py2 import futures
|
||||
with futures.ThreadPoolExecutor() as executor:
|
||||
unshList = [executor.submit(unshorten, match) for match in matches]
|
||||
for link in futures.as_completed(unshList):
|
||||
if link.result()[0] not in matches:
|
||||
links = link.result()[0]
|
||||
if type(links) == list:
|
||||
for l in links:
|
||||
text += '\n' + l
|
||||
else:
|
||||
text += '\n' + str(link.result()[0])
|
||||
for match in matches:
|
||||
sh = unshorten(match)[0]
|
||||
text += '\n' + sh
|
||||
# import sys
|
||||
# if sys.version_info[0] >= 3:
|
||||
# from concurrent import futures
|
||||
# else:
|
||||
# from concurrent_py2 import futures
|
||||
# with futures.ThreadPoolExecutor() as executor:
|
||||
# unshList = [executor.submit(unshorten, match) for match in matches]
|
||||
# for link in futures.as_completed(unshList):
|
||||
# if link.result()[0] not in matches:
|
||||
# links = link.result()[0]
|
||||
# if type(links) == list:
|
||||
# for l in links:
|
||||
# text += '\n' + l
|
||||
# else:
|
||||
# text += '\n' + str(link.result()[0])
|
||||
return text
|
||||
|
||||
@@ -4,12 +4,16 @@
|
||||
"ignore_urls": [],
|
||||
"patterns": [
|
||||
{
|
||||
"pattern": "(backin.net/fastids/[0-9]+)",
|
||||
"pattern": "https?://(backin.net/fastid[a-z]+/[0-9]+)",
|
||||
"url": "http://\\1"
|
||||
},
|
||||
{
|
||||
"pattern": "backin.net/([a-zA-Z0-9]{10,})",
|
||||
"url": "\\1"
|
||||
"pattern": "https?://backin.net/([a-zA-Z0-9]{10,})",
|
||||
"url": "http://backin.net/stream-\\1-500x400.html"
|
||||
},
|
||||
{
|
||||
"pattern": "https?://backin.net/s/streams.php?s=([a-zA-Z0-9]{10,})",
|
||||
"url": "http://backin.net/stream-\\1-500x400.html"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
@@ -11,11 +11,11 @@ except ImportError:
|
||||
def test_video_exists(page_url):
|
||||
logger.debug("(page_url='%s')" % page_url)
|
||||
|
||||
if 'http://' in page_url: # fastids
|
||||
if 'fastid' in page_url: # fastid
|
||||
page_url = httptools.downloadpage(page_url, follow_redirects=False, only_headers=True).headers['location']
|
||||
page_url = scrapertools.find_single_match(page_url, 'backin.net/([a-zA-Z0-9]+)')
|
||||
page_url = "http://backin.net/stream-%s-500x400.html" % scrapertools.find_single_match(page_url, 'backin.net/([a-zA-Z0-9]+)')
|
||||
global data
|
||||
data = httptools.downloadpage("http://backin.net/stream-%s-500x400.html" % page_url).data
|
||||
data = httptools.downloadpage(page_url).data
|
||||
|
||||
if 'File Not Found' in data:
|
||||
return False, config.get_localized_string(70449) % "backin"
|
||||
|
||||
@@ -4,12 +4,12 @@
|
||||
"ignore_urls": [],
|
||||
"patterns": [
|
||||
{
|
||||
"pattern": "(?:vidtome.stream|vidto.me)/(?!api)(?:embed-)?([A-z0-9]+)",
|
||||
"url": "http://vidtome.stream/\\1.html"
|
||||
"pattern": "https?://vidtome.host/(?!api)(?:embed-)?([A-z0-9]+)",
|
||||
"url": "http://vidtome.host/\\1.html"
|
||||
},
|
||||
{
|
||||
"pattern": "vidtome.stream/api/fastredirect/streaming\\.php\\?file_real=([A-z0-9]+)",
|
||||
"url": "http://vidtome.stream/api/fastredirect/streaming.php?file_real=\\1"
|
||||
"pattern": "https?://vidtome.host/api/fastredirect/streaming\\.php\\?file_real=([A-z0-9]+)",
|
||||
"url": "http://vidtome.host/api/fastredirect/streaming.php?file_real=\\1"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from core import httptools, scrapertools
|
||||
from core import httptools, scrapertools, servertools
|
||||
from platformcode import logger, config
|
||||
from lib import jsunpack
|
||||
|
||||
@@ -21,7 +21,7 @@ def get_video_url(page_url, premium=False, user="", password="", video_password=
|
||||
code = scrapertools.find_single_match(data, 'name="code" value="([^"]+)')
|
||||
hash = scrapertools.find_single_match(data, 'name="hash" value="([^"]+)')
|
||||
post = "op=download1&code=%s&hash=%s&imhuman=Proceed+to+video" %(code, hash)
|
||||
data = httptools.downloadpage("http://vidtome.co/playvideos/%s" %code, post=post).data
|
||||
data = httptools.downloadpage("http://%s/playvideos/%s" % (servertools.get_server_host("vidtome")[0], code), post=post).data
|
||||
packed = scrapertools.find_multiple_matches(data, r'(eval\s?\(function\(p,a,c,k,e,d\).*?\n)')
|
||||
for p in packed:
|
||||
data = jsunpack.unpack(p)
|
||||
|
||||
Reference in New Issue
Block a user