cache DoH

This commit is contained in:
marco
2019-12-21 16:47:29 +01:00
parent ed5903d8e8
commit ac556bf6e3
3 changed files with 93 additions and 35 deletions

View File

@@ -10,21 +10,23 @@ from platformcode import config
# __channel__ = "streamingaltadefinizione"
# host = config.get_channel_url(__channel__)
host = headers = ''
# host = headers = ''
host = 'https://www.popcornstream.fun'
list_servers = ['verystream', 'openload', 'wstream']
list_quality = ['1080p', 'HD', 'DVDRIP', 'SD', 'CAM']
def findhost():
global host, headers
permUrl = httptools.downloadpage('https://www.popcornstream.info', follow_redirects=False).headers
if 'google' in permUrl['location']:
if host[:4] != 'http':
host = 'https://'+permUrl['location'].replace('https://www.google.it/search?q=site:', '')
else:
host = permUrl['location'].replace('https://www.google.it/search?q=site:', '')
else:
host = permUrl['location']
headers = [['Referer', host]]
pass
# global host, headers
# permUrl = httptools.downloadpage('https://www.popcornstream.info', follow_redirects=False).headers
# if 'google' in permUrl['location']:
# if host[:4] != 'http':
# host = 'https://'+permUrl['location'].replace('https://www.google.it/search?q=site:', '')
# else:
# host = permUrl['location'].replace('https://www.google.it/search?q=site:', '')
# else:
# host = permUrl['location']
# headers = [['Referer', host]]
@support.menu
def mainlist(item):

View File

@@ -413,12 +413,12 @@ def downloadpage(url, **opt):
from specials import resolverdns
session = resolverdns.session()
if opt.get('session', False):
session = opt['session'] # same session to speed up search
logger.info('same session')
elif not opt.get('use_requests', True):
from lib import cloudscraper
session = cloudscraper.create_scraper()
# if opt.get('session', False):
# session = opt['session'] # same session to speed up search
# logger.info('same session')
# elif not opt.get('use_requests', True):
# from lib import cloudscraper
# session = cloudscraper.create_scraper()
# Headers by default, if nothing is specified
req_headers = default_headers.copy()
@@ -518,7 +518,6 @@ def downloadpage(url, **opt):
# Makes the request with GET method
req = session.get(url, allow_redirects=opt.get('follow_redirects', True),
timeout=opt['timeout'])
except Exception as e:
from lib import requests
if not opt.get('ignore_response_code', False) and not proxy_data.get('stat', ''):

View File

@@ -1,11 +1,19 @@
# -*- coding: utf-8 -*-
import os
import ssl
import xbmc
from core import jsontools
from lib.requests_toolbelt.adapters import host_header_ssl
from lib import requests, doh
from platformcode import logger
from platformcode import logger, config
import re
try:
import _sqlite3 as sql
except:
import sqlite3 as sql
class CustomSocket(ssl.SSLSocket):
def __init__(self, *args, **kwargs):
@@ -52,26 +60,75 @@ class CipherSuiteAdapter(host_header_ssl.HostHeaderSSLAdapter):
# ------------------------------------------------------------------------------- #
db = os.path.join(config.get_data_path(), 'kod_db.sqlite')
class session(requests.Session):
def request(self, method, url,
params=None, data=None, headers=None, cookies=None, files=None,
auth=None, timeout=None, allow_redirects=True, proxies=None,
hooks=None, stream=None, verify=None, cert=None, json=None):
def __init__(self):
self.conn = sql.connect(db)
self.cur = self.conn.cursor()
super(session, self).__init__()
def getIp(self, domain):
import time
t = time.time()
ip = None
try:
self.cur.execute('select ip from dnscache where domain=?', (domain,))
ip = self.cur.fetchall()[0][0]
except:
pass
if not ip: # not cached
try:
ip = doh.query(domain)[0]
logger.info('Query DoH: ' + domain + ' = ' + str(ip))
self.writeToCache(domain, ip)
except Exception:
logger.error('Failed to resolve hostname, fallback to normal dns')
import traceback
logger.error(traceback.print_exc())
ip = [domain]
logger.info('tempo getIP: ' + str(time.time()-t))
return ip
def writeToCache(self, domain, ip):
try:
self.cur.execute('insert into dnscache values(?,?)', (domain, ip))
except:
self.cur.execute("""CREATE TABLE IF NOT EXISTS dnscache(
"domain" TEXT NOT NULL UNIQUE,
"ip" TEXT NOT NULL,
PRIMARY KEY("domain")
);""")
self.conn.commit()
def flushDns(self, method, realUrl, domain, **kwargs):
self.cur.execute('delete from dnscache where domain=?', (domain,))
self.conn.commit()
return self.request(method, realUrl, flushedDns=True, **kwargs)
def request(self, method, url, headers=None, flushedDns=False, **kwargs):
import time
t = time.time()
protocol, domain, port, resource = re.match(r'^(http[s]?:\/\/)?([^:\/\s]+)(?::([^\/]*))?([^\s]*)$', url, flags=re.IGNORECASE).groups()
self.mount('https://', CipherSuiteAdapter(domain, cipherSuite="ALL"))
try:
ip = doh.query(domain)
logger.info('Query DoH: ' + domain + ' = ' + str(ip))
url = protocol + ip[0] + (':' + port if port else '') + resource
except Exception:
logger.error('Failed to resolve hostname, fallback to normal dns')
import traceback
logger.error(traceback.print_exc())
realUrl = url
ip = self.getIp(domain)
if headers:
headers["Host"] = domain
else:
headers = {"Host": domain}
return super(session, self).request(method, url,
params, data, headers, cookies, files,
auth, timeout, allow_redirects, proxies,
hooks, stream, verify, cert, json)
ret = None
tryFlush = False
url = protocol + ip + (':' + port if port else '') + resource
try:
ret = super(session, self).request(method, url, headers=headers, **kwargs)
except:
logger.info('Request for ' + domain + ' with ip ' + ip + ' failed')
tryFlush = True
if (tryFlush or not ret) and not flushedDns: # re-request ips and update cache
logger.info('Flushing dns cache for ' + domain)
return self.flushDns(method, realUrl, domain, **kwargs)
logger.info('tempo dns: ' + str(time.time()-t))
return ret