diff --git a/core/httptools.py b/core/httptools.py index 979109a4..136cc946 100755 --- a/core/httptools.py +++ b/core/httptools.py @@ -395,6 +395,8 @@ def downloadpage(url, **opt): @type ignore_response_code: bool @return: Result of the petition @rtype: HTTPResponse + @param use_requests: Use requests.session() + @type: bool Parameter Type Description -------------------------------------------------- -------------------------------------------------- ------------ @@ -422,6 +424,9 @@ def downloadpage(url, **opt): if opt.get('session', False): session = opt['session'] # same session to speed up search logger.info('same session') + elif opt.get('use_requests', False): + from lib import requests + session = requests.session() else: from lib import cloudscraper session = cloudscraper.create_scraper() @@ -452,7 +457,8 @@ def downloadpage(url, **opt): file_name = '' opt['proxy_retries_counter'] += 1 - # session.verify = False + session.verify = opt.get('verify', True) + if opt.get('cookies', True): session.cookies = cj session.headers.update(req_headers) diff --git a/lib/cloudscraper/__init__.py b/lib/cloudscraper/__init__.py index 0ef9890e..541b32cf 100644 --- a/lib/cloudscraper/__init__.py +++ b/lib/cloudscraper/__init__.py @@ -163,9 +163,7 @@ class CloudScraper(Session): def request(self, method, url, *args, **kwargs): # pylint: disable=E0203 - from time import time - from platformcode import logger - start= time() + if kwargs.get('proxies') and kwargs.get('proxies') != self.proxies: self.proxies = kwargs.get('proxies') @@ -200,7 +198,6 @@ class CloudScraper(Session): else: if not resp.is_redirect and resp.status_code not in [429, 503]: self._solveDepthCnt = 0 - logger.debug('CF TIME= '+str(time()-start)) return resp # ------------------------------------------------------------------------------- #