httptools possibilità di utilizzare requess
This commit is contained in:
@@ -395,6 +395,8 @@ def downloadpage(url, **opt):
|
||||
@type ignore_response_code: bool
|
||||
@return: Result of the petition
|
||||
@rtype: HTTPResponse
|
||||
@param use_requests: Use requests.session()
|
||||
@type: bool
|
||||
|
||||
Parameter Type Description
|
||||
-------------------------------------------------- -------------------------------------------------- ------------
|
||||
@@ -422,6 +424,9 @@ def downloadpage(url, **opt):
|
||||
if opt.get('session', False):
|
||||
session = opt['session'] # same session to speed up search
|
||||
logger.info('same session')
|
||||
elif opt.get('use_requests', False):
|
||||
from lib import requests
|
||||
session = requests.session()
|
||||
else:
|
||||
from lib import cloudscraper
|
||||
session = cloudscraper.create_scraper()
|
||||
@@ -452,7 +457,8 @@ def downloadpage(url, **opt):
|
||||
file_name = ''
|
||||
opt['proxy_retries_counter'] += 1
|
||||
|
||||
# session.verify = False
|
||||
session.verify = opt.get('verify', True)
|
||||
|
||||
if opt.get('cookies', True):
|
||||
session.cookies = cj
|
||||
session.headers.update(req_headers)
|
||||
|
||||
@@ -163,9 +163,7 @@ class CloudScraper(Session):
|
||||
|
||||
def request(self, method, url, *args, **kwargs):
|
||||
# pylint: disable=E0203
|
||||
from time import time
|
||||
from platformcode import logger
|
||||
start= time()
|
||||
|
||||
if kwargs.get('proxies') and kwargs.get('proxies') != self.proxies:
|
||||
self.proxies = kwargs.get('proxies')
|
||||
|
||||
@@ -200,7 +198,6 @@ class CloudScraper(Session):
|
||||
else:
|
||||
if not resp.is_redirect and resp.status_code not in [429, 503]:
|
||||
self._solveDepthCnt = 0
|
||||
logger.debug('CF TIME= '+str(time()-start))
|
||||
return resp
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
Reference in New Issue
Block a user