fix resolverdns

This commit is contained in:
marco
2020-02-21 23:28:58 +01:00
parent de6e1d8149
commit d2ba398587
4 changed files with 38 additions and 45 deletions

View File

@@ -259,9 +259,6 @@ def downloadpage(url, **opt):
from lib import cloudscraper
session = cloudscraper.create_scraper()
CF = True
elif opt.get('session', False):
session = opt['session'] # same session to speed up search
logger.info('same session')
else:
from lib import requests
session = requests.session()
@@ -360,6 +357,7 @@ def downloadpage(url, **opt):
timeout=opt['timeout'])
except Exception as e:
from lib import requests
req = requests.Response()
if not opt.get('ignore_response_code', False) and not proxy_data.get('stat', ''):
response['data'] = ''
response['sucess'] = False
@@ -371,7 +369,6 @@ def downloadpage(url, **opt):
show_infobox(info_dict)
return type('HTTPResponse', (), response)
else:
req = requests.Response()
req.status_code = str(e)
else:

View File

@@ -373,7 +373,7 @@ def scrape(func):
log('PATRON= ', patron)
if not data:
page = httptools.downloadpage(item.url, headers=headers, ignore_response_code=True, session=item.session)
page = httptools.downloadpage(item.url, headers=headers, ignore_response_code=True)
# if url may be changed and channel has findhost to update
if (not page.data or scrapertools.get_domain_from_url(page.url) != scrapertools.get_domain_from_url(item.url)) and 'findhost' in func.__globals__:
host = func.__globals__['findhost']()
@@ -382,8 +382,7 @@ def scrape(func):
jsontools.update_node(host, func.__module__.split('.')[-1], 'url')
parse[1] = scrapertools.get_domain_from_url(host)
item.url = urlparse.urlunparse(parse)
page = httptools.downloadpage(item.url, headers=headers, ignore_response_code=True,
session=item.session)
page = httptools.downloadpage(item.url, headers=headers, ignore_response_code=True)
data = page.data.replace("'", '"')
data = re.sub('\n|\t', ' ', data)
data = re.sub(r'>\s+<', '> <', data)

View File

@@ -77,7 +77,6 @@ class CipherSuiteAdapter(host_header_ssl.HostHeaderSSLAdapter):
logger.error('Failed to resolve hostname, fallback to normal dns')
import traceback
logger.error(traceback.print_exc())
ip = domain
return ip
def writeToCache(self, domain, ip):
@@ -108,39 +107,41 @@ class CipherSuiteAdapter(host_header_ssl.HostHeaderSSLAdapter):
domain = parse.netloc
else:
raise requests.exceptions.URLRequired
self.ssl_context = CustomContext(protocol, domain)
if self.CF:
self.ssl_context.options |= (ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3 | ssl.OP_NO_TLSv1 | ssl.OP_NO_TLSv1_1)
self.ssl_context.set_ciphers(self.cipherSuite)
self.init_poolmanager(self._pool_connections, self._pool_maxsize, block=self._pool_block)
ip = self.getIp(domain)
if ip:
self.ssl_context = CustomContext(protocol, domain)
if self.CF:
self.ssl_context.options |= (ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3 | ssl.OP_NO_TLSv1 | ssl.OP_NO_TLSv1_1)
self.ssl_context.set_ciphers(self.cipherSuite)
self.init_poolmanager(self._pool_connections, self._pool_maxsize, block=self._pool_block)
realUrl = request.url
realUrl = request.url
if request.headers:
request.headers["Host"] = domain
else:
request.headers = {"Host": domain}
ret = None
tryFlush = False
if request.headers:
request.headers["Host"] = domain
parse = list(parse)
parse[1] = ip
request.url = urlparse.urlunparse(parse)
try:
ret = super(CipherSuiteAdapter, self).send(request, **kwargs)
except Exception as e:
logger.info('Request for ' + domain + ' with ip ' + ip + ' failed')
logger.info(e)
# if 'SSLError' in str(e):
# # disabilito
# config.set_setting("resolver_dns", False)
# request.url = realUrl
# ret = super(CipherSuiteAdapter, self).send(request, **kwargs)
# else:
tryFlush = True
if tryFlush and not flushedDns: # re-request ips and update cache
logger.info('Flushing dns cache for ' + domain)
return self.flushDns(request, domain, **kwargs)
ret.url = realUrl
else:
request.headers = {"Host": domain}
ret = None
tryFlush = False
parse = list(parse)
parse[1] = ip
request.url = urlparse.urlunparse(parse)
try:
ret = super(CipherSuiteAdapter, self).send(request, **kwargs)
except Exception as e:
logger.info('Request for ' + domain + ' with ip ' + ip + ' failed')
logger.info(e)
# if 'SSLError' in str(e):
# # disabilito
# config.set_setting("resolver_dns", False)
# request.url = realUrl
# ret = super(CipherSuiteAdapter, self).send(request, **kwargs)
# else:
tryFlush = True
if tryFlush and not flushedDns: # re-request ips and update cache
logger.info('Flushing dns cache for ' + domain)
return self.flushDns(request, domain, **kwargs)
ret.url = realUrl
ret = super(host_header_ssl.HostHeaderSSLAdapter, self).send(request, **kwargs)
return ret

View File

@@ -189,9 +189,6 @@ def channel_search(item):
channel_list, channel_titles = get_channels(item)
import requests
session = requests.Session()
searching += channel_list
searching_titles += channel_titles
cnt = 0
@@ -201,7 +198,7 @@ def channel_search(item):
config.set_setting('tmdb_active', False)
with futures.ThreadPoolExecutor(max_workers=set_workers()) as executor:
c_results = [executor.submit(get_channel_results, ch, item, session) for ch in channel_list]
c_results = [executor.submit(get_channel_results, ch, item) for ch in channel_list]
for res in futures.as_completed(c_results):
cnt += 1
@@ -294,7 +291,7 @@ def channel_search(item):
return valid + results
def get_channel_results(ch, item, session):
def get_channel_results(ch, item):
max_results = 10
results = list()
@@ -306,7 +303,6 @@ def get_channel_results(ch, item, session):
if search_action:
for search_ in search_action:
search_.session = session
try:
results.extend(module.search(search_, item.text))
except: