Compare commits
15 Commits
master
...
prove_inte
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fdfaacbd4e | ||
|
|
dc96c1705d | ||
|
|
c0a5629d8f | ||
|
|
dc9f552708 | ||
|
|
746738a868 | ||
|
|
a0e45bd896 | ||
|
|
073b755100 | ||
|
|
26ac0660f7 | ||
|
|
5a2c9fb6ac | ||
|
|
b5b4427941 | ||
|
|
7ff96d2248 | ||
|
|
f544552b5f | ||
|
|
b0a4688038 | ||
|
|
4e1e761c0a | ||
|
|
f394372f78 |
49
core/app.py
Normal file
49
core/app.py
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
import base64
|
||||||
|
import socket
|
||||||
|
from http.server import BaseHTTPRequestHandler, HTTPServer
|
||||||
|
import xbmc, random
|
||||||
|
from core import jsontools, httptools
|
||||||
|
from platformcode import logger
|
||||||
|
|
||||||
|
hostName = xbmc.getIPAddress()
|
||||||
|
serverPort = random.randint(49152, 65535)
|
||||||
|
ret = []
|
||||||
|
call = 'kodapp://app.kod/open?s={}&ua={}&cb=http://{}:{}/'
|
||||||
|
|
||||||
|
|
||||||
|
class MyServer(BaseHTTPRequestHandler):
|
||||||
|
def do_POST(self):
|
||||||
|
global ret
|
||||||
|
length = int(self.headers['content-length'])
|
||||||
|
postvars = self.rfile.read(length).decode()
|
||||||
|
ret = jsontools.load(postvars)
|
||||||
|
logger.info(ret)
|
||||||
|
self.send_response(200)
|
||||||
|
self.send_header("Content-type", "text/plain")
|
||||||
|
self.end_headers()
|
||||||
|
self.wfile.write(b"OK")
|
||||||
|
|
||||||
|
|
||||||
|
def call_url(url):
|
||||||
|
global serverPort
|
||||||
|
webServer = None
|
||||||
|
for t in range(10): # try up to 10 port if already in use
|
||||||
|
try:
|
||||||
|
webServer = HTTPServer((hostName, serverPort), MyServer)
|
||||||
|
break
|
||||||
|
except socket.error:
|
||||||
|
serverPort += 1
|
||||||
|
if webServer:
|
||||||
|
logger.info("Server started http://%s:%s" % (hostName, serverPort))
|
||||||
|
s = base64.b64encode(jsontools.dump({'url': url}).encode()).decode()
|
||||||
|
ua = base64.b64encode(httptools.get_user_agent().encode()).decode()
|
||||||
|
uri = call.format(s, ua, hostName, serverPort)
|
||||||
|
if logger.DEBUG_ENABLED:
|
||||||
|
uri += '&l=1'
|
||||||
|
activity = 'StartAndroidActivity("com.kodapp","android.intent.action.VIEW","",{})'.format(uri)
|
||||||
|
logger.info(activity)
|
||||||
|
xbmc.executebuiltin(activity)
|
||||||
|
while not ret:
|
||||||
|
webServer.handle_request()
|
||||||
|
logger.info("Server stopped.")
|
||||||
|
return ret
|
||||||
@@ -30,7 +30,7 @@ cookies_file = os.path.join(config.get_data_path(), "cookies.dat")
|
|||||||
|
|
||||||
# Headers by default, if nothing is specified
|
# Headers by default, if nothing is specified
|
||||||
default_headers = dict()
|
default_headers = dict()
|
||||||
default_headers["User-Agent"] = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36" % config.get_setting("chrome_ua_version")
|
default_headers["User-Agent"] = "Mozilla/5.0 (Linux; Android 13; SM-A205U) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36" % config.get_setting("chrome_ua_version")
|
||||||
default_headers["Accept"] = "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8"
|
default_headers["Accept"] = "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8"
|
||||||
default_headers["Accept-Language"] = "it-IT,it;q=0.8,en-US;q=0.5,en;q=0.3"
|
default_headers["Accept-Language"] = "it-IT,it;q=0.8,en-US;q=0.5,en;q=0.3"
|
||||||
default_headers["Accept-Charset"] = "UTF-8"
|
default_headers["Accept-Charset"] = "UTF-8"
|
||||||
@@ -418,6 +418,8 @@ def downloadpage(url, **opt):
|
|||||||
return type('HTTPResponse', (), response)
|
return type('HTTPResponse', (), response)
|
||||||
|
|
||||||
response_code = req.status_code
|
response_code = req.status_code
|
||||||
|
response_headers = req.headers
|
||||||
|
cookies = req.cookies
|
||||||
response['url'] = req.url
|
response['url'] = req.url
|
||||||
|
|
||||||
response['data'] = req.content if req.content else ''
|
response['data'] = req.content if req.content else ''
|
||||||
@@ -431,13 +433,25 @@ def downloadpage(url, **opt):
|
|||||||
if req.headers.get('Server', '').startswith('cloudflare') and response_code in [429, 503, 403]\
|
if req.headers.get('Server', '').startswith('cloudflare') and response_code in [429, 503, 403]\
|
||||||
and not opt.get('CF', False) and 'Ray ID' in response['data'] and not opt.get('post', None):
|
and not opt.get('CF', False) and 'Ray ID' in response['data'] and not opt.get('post', None):
|
||||||
logger.debug("CF retry... for domain: %s" % domain)
|
logger.debug("CF retry... for domain: %s" % domain)
|
||||||
from lib import proxytranslate
|
from core import app
|
||||||
gResp = proxytranslate.process_request_proxy(url)
|
ret = app.call_url(url)
|
||||||
if gResp:
|
for elem in ret:
|
||||||
req = gResp['result']
|
if elem['key'] == 'html':
|
||||||
response_code = req.status_code
|
response['data'] = elem['html']
|
||||||
response['url'] = gResp['url']
|
elif elem['key'] == 'cookie':
|
||||||
response['data'] = gResp['data']
|
for cookie in elem:
|
||||||
|
if cookie == 'key': # not a cookie
|
||||||
|
continue
|
||||||
|
elem[cookie]['domain'] = domain
|
||||||
|
set_cookies(elem[cookie], False)
|
||||||
|
response_code = [r['status'] for r in ret if r['key'] == 'request'][0]
|
||||||
|
# from lib import proxytranslate
|
||||||
|
# gResp = proxytranslate.process_request_proxy(url)
|
||||||
|
# if gResp:
|
||||||
|
# req = gResp['result']
|
||||||
|
# response_code = req.status_code
|
||||||
|
# response['url'] = gResp['url']
|
||||||
|
# response['data'] = gResp['data']
|
||||||
|
|
||||||
if not response['data']:
|
if not response['data']:
|
||||||
response['data'] = ''
|
response['data'] = ''
|
||||||
@@ -448,8 +462,8 @@ def downloadpage(url, **opt):
|
|||||||
response['json'] = dict()
|
response['json'] = dict()
|
||||||
|
|
||||||
response['code'] = response_code
|
response['code'] = response_code
|
||||||
response['headers'] = req.headers
|
response['headers'] = response_headers
|
||||||
response['cookies'] = req.cookies
|
response['cookies'] = cookies
|
||||||
|
|
||||||
info_dict, response = fill_fields_post(info_dict, req, response, req_headers, inicio)
|
info_dict, response = fill_fields_post(info_dict, req, response, req_headers, inicio)
|
||||||
|
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ addonname = addon.getAddonInfo('name')
|
|||||||
|
|
||||||
_hdr_pat = re.compile("^@@ -(\d+),?(\d+)? \+(\d+),?(\d+)? @@.*")
|
_hdr_pat = re.compile("^@@ -(\d+),?(\d+)? \+(\d+),?(\d+)? @@.*")
|
||||||
|
|
||||||
branch = 'master'
|
branch = 'prove_interne'
|
||||||
user = 'kodiondemand'
|
user = 'kodiondemand'
|
||||||
repo = 'addon'
|
repo = 'addon'
|
||||||
addonDir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
addonDir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||||
|
|||||||
@@ -301,7 +301,7 @@ def get_ua_list():
|
|||||||
try:
|
try:
|
||||||
current_ver = config.get_setting("chrome_ua_version", default="").split(".")
|
current_ver = config.get_setting("chrome_ua_version", default="").split(".")
|
||||||
data = httptools.downloadpage(url, alfa_s=True).data
|
data = httptools.downloadpage(url, alfa_s=True).data
|
||||||
new_ua_ver = scrapertools.find_single_match(data, "win64,stable,([^,]+),")
|
new_ua_ver = scrapertools.find_single_match(data, "android,stable,([^,]+),")
|
||||||
|
|
||||||
if not current_ver:
|
if not current_ver:
|
||||||
config.set_setting("chrome_ua_version", new_ua_ver)
|
config.set_setting("chrome_ua_version", new_ua_ver)
|
||||||
|
|||||||
Reference in New Issue
Block a user