test sessione condivisa
This commit is contained in:
11
core/httptools.py
Normal file → Executable file
11
core/httptools.py
Normal file → Executable file
@@ -407,8 +407,8 @@ def downloadpage(url, **opt):
|
||||
|
||||
"""
|
||||
load_cookies()
|
||||
import requests
|
||||
from lib import cloudscraper
|
||||
if not opt.get('session', False):
|
||||
from lib import cloudscraper
|
||||
|
||||
# Headers by default, if nothing is specified
|
||||
req_headers = default_headers.copy()
|
||||
@@ -435,8 +435,10 @@ def downloadpage(url, **opt):
|
||||
files = {}
|
||||
file_name = ''
|
||||
opt['proxy_retries_counter'] += 1
|
||||
|
||||
session = cloudscraper.create_scraper()
|
||||
if not opt.get('session', False):
|
||||
session = cloudscraper.create_scraper()
|
||||
else:
|
||||
session = opt['session']
|
||||
# session.verify = False
|
||||
if opt.get('cookies', True):
|
||||
session.cookies = cj
|
||||
@@ -511,6 +513,7 @@ def downloadpage(url, **opt):
|
||||
|
||||
except Exception as e:
|
||||
if not opt.get('ignore_response_code', False) and not proxy_data.get('stat', ''):
|
||||
import requests
|
||||
req = requests.Response()
|
||||
response['data'] = ''
|
||||
response['sucess'] = False
|
||||
|
||||
2
core/support.py
Normal file → Executable file
2
core/support.py
Normal file → Executable file
@@ -377,7 +377,7 @@ def scrape(func):
|
||||
|
||||
log('PATRON= ', patron)
|
||||
if not data:
|
||||
data = httptools.downloadpage(item.url, headers=headers, ignore_response_code=True).data.replace("'", '"')
|
||||
data = httptools.downloadpage(item.url, headers=headers, ignore_response_code=True, session=item.session).data.replace("'", '"')
|
||||
data = re.sub('\n|\t', ' ', data)
|
||||
data = re.sub(r'>\s+<', '> <', data)
|
||||
# replace all ' with " and eliminate newline, so we don't need to worry about
|
||||
|
||||
7
specials/search.py
Normal file → Executable file
7
specials/search.py
Normal file → Executable file
@@ -424,7 +424,7 @@ def show_result(item):
|
||||
return getattr(channel, item.action)(item)
|
||||
|
||||
|
||||
def channel_search(search_results, channel_parameters, tecleado):
|
||||
def channel_search(search_results, channel_parameters, tecleado, session):
|
||||
try:
|
||||
exec("from channels import " + channel_parameters["channel"] + " as module")
|
||||
mainlist = module.mainlist(Item(channel=channel_parameters["channel"]))
|
||||
@@ -433,6 +433,7 @@ def channel_search(search_results, channel_parameters, tecleado):
|
||||
search_items = [Item(channel=channel_parameters["channel"], action="search")]
|
||||
|
||||
for item in search_items:
|
||||
item.session = session
|
||||
result = module.search(item.clone(), tecleado)
|
||||
if result is None:
|
||||
result = []
|
||||
@@ -548,6 +549,8 @@ def do_search(item, categories=None):
|
||||
logger.error(traceback.format_exc())
|
||||
continue
|
||||
|
||||
from lib import cloudscraper
|
||||
session = cloudscraper.create_scraper()
|
||||
|
||||
for index, infile in enumerate(list_channels_search):
|
||||
try:
|
||||
@@ -566,7 +569,7 @@ def do_search(item, categories=None):
|
||||
logger.info("Búsqueda cancelada")
|
||||
return itemlist
|
||||
if multithread:
|
||||
t = Thread(target=channel_search, args=[search_results, channel_parameters, tecleado],
|
||||
t = Thread(target=channel_search, args=[search_results, channel_parameters, tecleado, session],
|
||||
name=channel_parameters["title"])
|
||||
t.setDaemon(True)
|
||||
t.start()
|
||||
|
||||
Reference in New Issue
Block a user