- logger.info -> logger.log
- fix unshortenit kodi 19
This commit is contained in:
@@ -9,77 +9,67 @@ addon = config.__settings__
|
||||
downloadenabled = addon.getSetting('downloadenabled')
|
||||
|
||||
def getmainlist(view="thumb_"):
|
||||
logger.info()
|
||||
logger.log()
|
||||
itemlist = list()
|
||||
|
||||
if config.dev_mode():
|
||||
itemlist.append(Item(title="Redirect", channel="checkhost", action="check_channels",
|
||||
thumbnail='',
|
||||
category=config.get_localized_string(30119), viewmode="thumbnails"))
|
||||
itemlist.append(Item(title="Redirect", channel="checkhost", action="check_channels", thumbnail='',
|
||||
category=config.get_localized_string(30119), viewmode="thumbnails"))
|
||||
# Main Menu Channels
|
||||
if addon.getSetting('enable_news_menu') == "true":
|
||||
itemlist.append(Item(title=config.get_localized_string(30130), channel="news", action="mainlist",
|
||||
thumbnail=get_thumb("news.png", view),
|
||||
category=config.get_localized_string(30119), viewmode="thumbnails",
|
||||
context=[{"title": config.get_localized_string(70285), "channel": "shortcuts", "action": "SettingOnPosition", "category":7, "setting":1}]))
|
||||
thumbnail=get_thumb("news.png", view), category=config.get_localized_string(30119), viewmode="thumbnails",
|
||||
context=[{"title": config.get_localized_string(70285), "channel": "shortcuts", "action": "SettingOnPosition", "category":7, "setting":1}]))
|
||||
|
||||
if addon.getSetting('enable_channels_menu') == "true":
|
||||
itemlist.append(Item(title=config.get_localized_string(30118), channel="channelselector", action="getchanneltypes",
|
||||
thumbnail=get_thumb("channels.png", view), view=view,
|
||||
category=config.get_localized_string(30119), viewmode="thumbnails"))
|
||||
thumbnail=get_thumb("channels.png", view), view=view, category=config.get_localized_string(30119), viewmode="thumbnails"))
|
||||
|
||||
if addon.getSetting('enable_search_menu') == "true":
|
||||
itemlist.append(Item(title=config.get_localized_string(30103), channel="search", path='special', action="mainlist",
|
||||
thumbnail=get_thumb("search.png", view),
|
||||
category=config.get_localized_string(30119), viewmode="list",
|
||||
context = [{"title": config.get_localized_string(60412), "action": "setting_channel_new", "channel": "search"},
|
||||
{"title": config.get_localized_string(70286), "channel": "shortcuts", "action": "SettingOnPosition", "category":5 , "setting":1}]))
|
||||
thumbnail=get_thumb("search.png", view), category=config.get_localized_string(30119), viewmode="list",
|
||||
context = [{"title": config.get_localized_string(60412), "action": "setting_channel_new", "channel": "search"},
|
||||
{"title": config.get_localized_string(70286), "channel": "shortcuts", "action": "SettingOnPosition", "category":5 , "setting":1}]))
|
||||
|
||||
if addon.getSetting('enable_onair_menu') == "true":
|
||||
itemlist.append(Item(channel="filmontv", action="mainlist", title=config.get_localized_string(50001),
|
||||
thumbnail=get_thumb("on_the_air.png"), viewmode="thumbnails"))
|
||||
thumbnail=get_thumb("on_the_air.png"), viewmode="thumbnails"))
|
||||
|
||||
if addon.getSetting('enable_link_menu') == "true":
|
||||
itemlist.append(Item(title=config.get_localized_string(70527), channel="kodfavorites", action="mainlist",
|
||||
thumbnail=get_thumb("mylink.png", view), view=view,
|
||||
category=config.get_localized_string(70527), viewmode="thumbnails"))
|
||||
itemlist.append(Item(title=config.get_localized_string(70527), channel="kodfavorites", action="mainlist", thumbnail=get_thumb("mylink.png", view),
|
||||
view=view, category=config.get_localized_string(70527), viewmode="thumbnails"))
|
||||
|
||||
if addon.getSetting('enable_fav_menu') == "true":
|
||||
itemlist.append(Item(title=config.get_localized_string(30102), channel="favorites", action="mainlist",
|
||||
thumbnail=get_thumb("favorites.png", view),
|
||||
category=config.get_localized_string(30102), viewmode="thumbnails"))
|
||||
thumbnail=get_thumb("favorites.png", view), category=config.get_localized_string(30102), viewmode="thumbnails"))
|
||||
|
||||
if config.get_videolibrary_support() and addon.getSetting('enable_library_menu') == "true":
|
||||
itemlist.append(Item(title=config.get_localized_string(30131), channel="videolibrary", action="mainlist",
|
||||
thumbnail=get_thumb("videolibrary.png", view),
|
||||
category=config.get_localized_string(30119), viewmode="thumbnails",
|
||||
thumbnail=get_thumb("videolibrary.png", view), category=config.get_localized_string(30119), viewmode="thumbnails",
|
||||
context=[{"title": config.get_localized_string(70287), "channel": "shortcuts", "action": "SettingOnPosition", "category":2, "setting":1},
|
||||
{"title": config.get_localized_string(60568), "channel": "videolibrary", "action": "update_videolibrary"}]))
|
||||
{"title": config.get_localized_string(60568), "channel": "videolibrary", "action": "update_videolibrary"}]))
|
||||
if downloadenabled != "false":
|
||||
itemlist.append(Item(title=config.get_localized_string(30101), channel="downloads", action="mainlist",
|
||||
thumbnail=get_thumb("downloads.png", view), viewmode="list",
|
||||
context=[{"title": config.get_localized_string(70288), "channel": "shortcuts", "action": "SettingOnPosition", "category":6}]))
|
||||
itemlist.append(Item(title=config.get_localized_string(30101), channel="downloads", action="mainlist", thumbnail=get_thumb("downloads.png", view), viewmode="list",
|
||||
context=[{"title": config.get_localized_string(70288), "channel": "shortcuts", "action": "SettingOnPosition", "category":6}]))
|
||||
|
||||
thumb_setting = "setting_%s.png" % 0 # config.get_setting("plugin_updates_available")
|
||||
|
||||
itemlist.append(Item(title=config.get_localized_string(30100), channel="setting", action="settings",
|
||||
thumbnail=get_thumb(thumb_setting, view),
|
||||
category=config.get_localized_string(30100), viewmode="list"))
|
||||
thumbnail=get_thumb(thumb_setting, view), category=config.get_localized_string(30100), viewmode="list"))
|
||||
itemlist.append(Item(title=config.get_localized_string(30104) + " (v" + config.get_addon_version(with_fix=True) + ")", channel="help", action="mainlist",
|
||||
thumbnail=get_thumb("help.png", view),
|
||||
category=config.get_localized_string(30104), viewmode="list"))
|
||||
thumbnail=get_thumb("help.png", view), category=config.get_localized_string(30104), viewmode="list"))
|
||||
return itemlist
|
||||
|
||||
|
||||
def getchanneltypes(view="thumb_"):
|
||||
logger.info()
|
||||
logger.log()
|
||||
|
||||
# Category List
|
||||
channel_types = ["movie", "tvshow", "anime", "documentary", "vos", "live", "torrent", "music"] #, "direct"
|
||||
|
||||
# Channel Language
|
||||
channel_language = auto_filter()
|
||||
logger.info("channel_language=%s" % channel_language)
|
||||
logger.log("channel_language=%s" % channel_language)
|
||||
|
||||
# Build Itemlist
|
||||
itemlist = list()
|
||||
@@ -102,7 +92,7 @@ def getchanneltypes(view="thumb_"):
|
||||
|
||||
def filterchannels(category, view="thumb_"):
|
||||
from core import channeltools
|
||||
logger.info('Filter Channels ' + category)
|
||||
logger.log('Filter Channels ' + category)
|
||||
|
||||
channelslist = []
|
||||
|
||||
@@ -113,17 +103,17 @@ def filterchannels(category, view="thumb_"):
|
||||
appenddisabledchannels = True
|
||||
|
||||
channel_path = os.path.join(config.get_runtime_path(), 'channels', '*.json')
|
||||
logger.info("channel_path = %s" % channel_path)
|
||||
logger.log("channel_path = %s" % channel_path)
|
||||
|
||||
channel_files = glob.glob(channel_path)
|
||||
logger.info("channel_files found %s" % (len(channel_files)))
|
||||
logger.log("channel_files found %s" % (len(channel_files)))
|
||||
|
||||
# Channel Language
|
||||
channel_language = auto_filter()
|
||||
logger.info("channel_language=%s" % channel_language)
|
||||
logger.log("channel_language=%s" % channel_language)
|
||||
|
||||
for channel_path in channel_files:
|
||||
logger.info("channel in for = %s" % channel_path)
|
||||
logger.log("channel in for = %s" % channel_path)
|
||||
|
||||
channel = os.path.basename(channel_path).replace(".json", "")
|
||||
|
||||
@@ -136,7 +126,7 @@ def filterchannels(category, view="thumb_"):
|
||||
# If it's not a channel we skip it
|
||||
if not channel_parameters["channel"]:
|
||||
continue
|
||||
logger.info("channel_parameters=%s" % repr(channel_parameters))
|
||||
logger.log("channel_parameters=%s" % repr(channel_parameters))
|
||||
|
||||
# If you prefer the banner and the channel has it, now change your mind
|
||||
if view == "banner_" and "banner" in channel_parameters:
|
||||
@@ -231,7 +221,7 @@ def get_thumb(thumb_name, view="thumb_"):
|
||||
|
||||
|
||||
def set_channel_info(parameters):
|
||||
logger.info()
|
||||
logger.log()
|
||||
|
||||
info = ''
|
||||
language = ''
|
||||
|
||||
@@ -15,7 +15,7 @@ default_file = dict()
|
||||
remote_path = 'https://raw.githubusercontent.com/kodiondemand/media/master/'
|
||||
|
||||
def is_enabled(channel_name):
|
||||
logger.info("channel_name=" + channel_name)
|
||||
logger.log("channel_name=" + channel_name)
|
||||
return get_channel_parameters(channel_name)["active"] and get_channel_setting("enabled", channel=channel_name,
|
||||
default=True)
|
||||
|
||||
@@ -87,7 +87,7 @@ def get_channel_parameters(channel_name):
|
||||
|
||||
|
||||
def get_channel_json(channel_name):
|
||||
# logger.info("channel_name=" + channel_name)
|
||||
# logger.log("channel_name=" + channel_name)
|
||||
from core import filetools
|
||||
channel_json = None
|
||||
try:
|
||||
@@ -101,9 +101,9 @@ def get_channel_json(channel_name):
|
||||
channel_name + ".json")
|
||||
|
||||
if filetools.isfile(channel_path):
|
||||
# logger.info("channel_data=" + channel_path)
|
||||
# logger.log("channel_data=" + channel_path)
|
||||
channel_json = jsontools.load(filetools.read(channel_path))
|
||||
# logger.info("channel_json= %s" % channel_json)
|
||||
# logger.log("channel_json= %s" % channel_json)
|
||||
|
||||
except Exception as ex:
|
||||
template = "An exception of type %s occured. Arguments:\n%r"
|
||||
@@ -114,7 +114,7 @@ def get_channel_json(channel_name):
|
||||
|
||||
|
||||
def get_channel_controls_settings(channel_name):
|
||||
# logger.info("channel_name=" + channel_name)
|
||||
# logger.log("channel_name=" + channel_name)
|
||||
dict_settings = {}
|
||||
# import web_pdb; web_pdb.set_trace()
|
||||
# list_controls = get_channel_json(channel_name).get('settings', list())
|
||||
@@ -137,7 +137,7 @@ def get_lang(channel_name):
|
||||
if hasattr(channel, 'list_language'):
|
||||
for language in channel.list_language:
|
||||
list_language.append(language)
|
||||
logger.info(list_language)
|
||||
logger.log(list_language)
|
||||
else:
|
||||
sub = False
|
||||
langs = []
|
||||
|
||||
@@ -253,12 +253,12 @@ class Downloader(object):
|
||||
self.file.seek(2 ** 31, 0)
|
||||
except OverflowError:
|
||||
self._seekable = False
|
||||
logger.info("Cannot do seek() or tell() in files larger than 2GB")
|
||||
logger.log("Cannot do seek() or tell() in files larger than 2GB")
|
||||
|
||||
self.__get_download_info__()
|
||||
|
||||
try:
|
||||
logger.info("Download started: Parts: %s | Path: %s | File: %s | Size: %s" % (str(len(self._download_info["parts"])), self._pathencode('utf-8'), self._filenameencode('utf-8'), str(self._download_info["size"])))
|
||||
logger.log("Download started: Parts: %s | Path: %s | File: %s | Size: %s" % (str(len(self._download_info["parts"])), self._pathencode('utf-8'), self._filenameencode('utf-8'), str(self._download_info["size"])))
|
||||
except:
|
||||
pass
|
||||
|
||||
@@ -410,7 +410,7 @@ class Downloader(object):
|
||||
return id == 0 or (len(self.completed_parts) >= id and sorted(self.completed_parts)[id - 1] == id - 1)
|
||||
|
||||
def __save_file__(self):
|
||||
logger.info("Thread started: %s" % threading.current_thread().name)
|
||||
logger.log("Thread started: %s" % threading.current_thread().name)
|
||||
|
||||
while self._state == self.states.downloading:
|
||||
if not self.pending_parts and not self.download_parts and not self.save_parts: # Download finished
|
||||
@@ -449,7 +449,7 @@ class Downloader(object):
|
||||
self._download_info["parts"][s]["status"] = self.states.stopped
|
||||
self._download_info["parts"][s]["current"] = self._download_info["parts"][s]["start"]
|
||||
|
||||
logger.info("Thread stopped: %s" % threading.current_thread().name)
|
||||
logger.log("Thread stopped: %s" % threading.current_thread().name)
|
||||
|
||||
def __get_part_id__(self):
|
||||
self._download_lock.acquire()
|
||||
@@ -464,21 +464,21 @@ class Downloader(object):
|
||||
return None
|
||||
|
||||
def __set_part_connecting__(self, id):
|
||||
logger.info("ID: %s Establishing connection" % id)
|
||||
logger.log("ID: %s Establishing connection" % id)
|
||||
self._download_info["parts"][id]["status"] = self.states.connecting
|
||||
|
||||
def __set_part__error__(self, id):
|
||||
logger.info("ID: %s Download failed" % id)
|
||||
logger.log("ID: %s Download failed" % id)
|
||||
self._download_info["parts"][id]["status"] = self.states.error
|
||||
self.pending_parts.add(id)
|
||||
self.download_parts.remove(id)
|
||||
|
||||
def __set_part__downloading__(self, id):
|
||||
logger.info("ID: %s Downloading data ..." % id)
|
||||
logger.log("ID: %s Downloading data ..." % id)
|
||||
self._download_info["parts"][id]["status"] = self.states.downloading
|
||||
|
||||
def __set_part_completed__(self, id):
|
||||
logger.info("ID: %s Download finished!" % id)
|
||||
logger.log("ID: %s Download finished!" % id)
|
||||
self._download_info["parts"][id]["status"] = self.states.saving
|
||||
self.download_parts.remove(id)
|
||||
self.save_parts.add(id)
|
||||
@@ -501,7 +501,7 @@ class Downloader(object):
|
||||
return file
|
||||
|
||||
def __start_part__(self):
|
||||
logger.info("Thread Started: %s" % threading.current_thread().name)
|
||||
logger.log("Thread Started: %s" % threading.current_thread().name)
|
||||
while self._state == self.states.downloading:
|
||||
id = self.__get_part_id__()
|
||||
if id is None: break
|
||||
@@ -528,7 +528,7 @@ class Downloader(object):
|
||||
buffer = connection.read(self._block_size)
|
||||
speed.append(old_div(len(buffer), ((time.time() - start) or 0.001)))
|
||||
except:
|
||||
logger.info("ID: %s Error downloading data" % id)
|
||||
logger.log("ID: %s Error downloading data" % id)
|
||||
self._download_info["parts"][id]["status"] = self.states.error
|
||||
self.pending_parts.add(id)
|
||||
self.download_parts.remove(id)
|
||||
@@ -546,7 +546,7 @@ class Downloader(object):
|
||||
|
||||
if velocidad_minima > speed[-1] and velocidad_minima > speed[-2] and self._download_info["parts"][id]["current"] < self._download_info["parts"][id]["end"]:
|
||||
if connection.fp: connection.fp._sock.close()
|
||||
logger.info("ID: %s Restarting connection! | Minimum Speed: %.2f %s/s | Speed: %.2f %s/s" % (id, vm[1], vm[2], v[1], v[2]))
|
||||
logger.log("ID: %s Restarting connection! | Minimum Speed: %.2f %s/s | Speed: %.2f %s/s" % (id, vm[1], vm[2], v[1], v[2]))
|
||||
# file.close()
|
||||
break
|
||||
else:
|
||||
@@ -556,7 +556,7 @@ class Downloader(object):
|
||||
break
|
||||
|
||||
self.__set_part_stopped__(id)
|
||||
logger.info("Thread stopped: %s" % threading.current_thread().name)
|
||||
logger.log("Thread stopped: %s" % threading.current_thread().name)
|
||||
|
||||
def __update_json(self, started=True):
|
||||
text = filetools.read(self._json_path)
|
||||
@@ -564,10 +564,10 @@ class Downloader(object):
|
||||
if self._json_text != text:
|
||||
self._json_text = text
|
||||
self._json_item = Item().fromjson(text)
|
||||
logger.info('item loaded')
|
||||
logger.log('item loaded')
|
||||
progress = int(self.progress)
|
||||
if started and self._json_item.downloadStatus == 0: # stopped
|
||||
logger.info('Download paused')
|
||||
logger.log('Download paused')
|
||||
self.stop()
|
||||
elif self._json_item.downloadProgress != progress or not started:
|
||||
params = {"downloadStatus": 4, "downloadComplete": 0, "downloadProgress": progress}
|
||||
|
||||
@@ -97,11 +97,11 @@ def limpia_nombre_excepto_1(s):
|
||||
try:
|
||||
s = unicode(s, "utf-8")
|
||||
except UnicodeError:
|
||||
# logger.info("no es utf-8")
|
||||
# logger.log("no es utf-8")
|
||||
try:
|
||||
s = unicode(s, "iso-8859-1")
|
||||
except UnicodeError:
|
||||
# logger.info("no es iso-8859-1")
|
||||
# logger.log("no es iso-8859-1")
|
||||
pass
|
||||
# Remove accents
|
||||
s = limpia_nombre_sin_acentos(s)
|
||||
@@ -125,29 +125,29 @@ def limpia_nombre_excepto_2(s):
|
||||
|
||||
def getfilefromtitle(url, title):
|
||||
# Print in the log what you will discard
|
||||
logger.info("title=" + title)
|
||||
logger.info("url=" + url)
|
||||
logger.log("title=" + title)
|
||||
logger.log("url=" + url)
|
||||
plataforma = config.get_system_platform()
|
||||
logger.info("platform=" + plataforma)
|
||||
logger.log("platform=" + plataforma)
|
||||
|
||||
# filename = xbmc.makeLegalFilename(title + url[-4:])
|
||||
from core import scrapertools
|
||||
|
||||
nombrefichero = title + scrapertools.get_filename_from_url(url)[-4:]
|
||||
logger.info("filename= %s" % nombrefichero)
|
||||
logger.log("filename= %s" % nombrefichero)
|
||||
if "videobb" in url or "videozer" in url or "putlocker" in url:
|
||||
nombrefichero = title + ".flv"
|
||||
if "videobam" in url:
|
||||
nombrefichero = title + "." + url.rsplit(".", 1)[1][0:3]
|
||||
|
||||
logger.info("filename= %s" % nombrefichero)
|
||||
logger.log("filename= %s" % nombrefichero)
|
||||
|
||||
nombrefichero = limpia_nombre_caracteres_especiales(nombrefichero)
|
||||
|
||||
logger.info("filename= %s" % nombrefichero)
|
||||
logger.log("filename= %s" % nombrefichero)
|
||||
|
||||
fullpath = filetools.join(config.get_setting("downloadpath"), nombrefichero)
|
||||
logger.info("fullpath= %s" % fullpath)
|
||||
logger.log("fullpath= %s" % fullpath)
|
||||
|
||||
if config.is_xbmc() and fullpath.startswith("special://"):
|
||||
import xbmc
|
||||
@@ -162,7 +162,7 @@ def downloadtitle(url, title):
|
||||
|
||||
|
||||
def downloadbest(video_urls, title, continuar=False):
|
||||
logger.info()
|
||||
logger.log()
|
||||
|
||||
# Flip it over, to put the highest quality one first (list () is for you to make a copy of)
|
||||
invertida = list(video_urls)
|
||||
@@ -172,9 +172,9 @@ def downloadbest(video_urls, title, continuar=False):
|
||||
# videotitle = elemento[0]
|
||||
url = elemento[1]
|
||||
if not PY3:
|
||||
logger.info("Downloading option " + title + " " + url.encode('ascii', 'ignore'))
|
||||
logger.log("Downloading option " + title + " " + url.encode('ascii', 'ignore'))
|
||||
else:
|
||||
logger.info("Downloading option " + title + " " + url.encode('ascii', 'ignore').decode('utf-8'))
|
||||
logger.log("Downloading option " + title + " " + url.encode('ascii', 'ignore').decode('utf-8'))
|
||||
|
||||
# Calculate the file where you should record
|
||||
try:
|
||||
@@ -200,25 +200,25 @@ def downloadbest(video_urls, title, continuar=False):
|
||||
else:
|
||||
# EThe file doesn't even exist
|
||||
if not filetools.exists(fullpath):
|
||||
logger.info("-> You have not downloaded anything, testing with the following option if there is")
|
||||
logger.log("-> You have not downloaded anything, testing with the following option if there is")
|
||||
# The file exists
|
||||
else:
|
||||
tamanyo = filetools.getsize(fullpath)
|
||||
|
||||
# It has size 0
|
||||
if tamanyo == 0:
|
||||
logger.info("-> Download a file with size 0, testing with the following option if it exists")
|
||||
logger.log("-> Download a file with size 0, testing with the following option if it exists")
|
||||
os.remove(fullpath)
|
||||
else:
|
||||
logger.info("-> Download a file with size %d, he takes it for good" % tamanyo)
|
||||
logger.log("-> Download a file with size %d, he takes it for good" % tamanyo)
|
||||
return 0
|
||||
|
||||
return -2
|
||||
|
||||
|
||||
def downloadfile(url, nombrefichero, headers=None, silent=False, continuar=False, resumir=True, header=''):
|
||||
logger.info("url= " + url)
|
||||
logger.info("filename= " + nombrefichero)
|
||||
logger.log("url= " + url)
|
||||
logger.log("filename= " + nombrefichero)
|
||||
|
||||
if headers is None:
|
||||
headers = []
|
||||
@@ -242,14 +242,14 @@ def downloadfile(url, nombrefichero, headers=None, silent=False, continuar=False
|
||||
nombrefichero = xbmc.makeLegalFilename(nombrefichero)
|
||||
except:
|
||||
pass
|
||||
logger.info("filename= " + nombrefichero)
|
||||
logger.log("filename= " + nombrefichero)
|
||||
|
||||
# The file exists and you want to continue
|
||||
if filetools.exists(nombrefichero) and continuar:
|
||||
f = filetools.file_open(nombrefichero, 'r+b', vfs=VFS)
|
||||
if resumir:
|
||||
exist_size = filetools.getsize(nombrefichero)
|
||||
logger.info("the file exists, size= %d" % exist_size)
|
||||
logger.log("the file exists, size= %d" % exist_size)
|
||||
grabado = exist_size
|
||||
f.seek(exist_size)
|
||||
else:
|
||||
@@ -258,13 +258,13 @@ def downloadfile(url, nombrefichero, headers=None, silent=False, continuar=False
|
||||
|
||||
# the file already exists and you don't want to continue, it aborts
|
||||
elif filetools.exists(nombrefichero) and not continuar:
|
||||
logger.info("the file exists, it does not download again")
|
||||
logger.log("the file exists, it does not download again")
|
||||
return -3
|
||||
|
||||
# the file does not exist
|
||||
else:
|
||||
exist_size = 0
|
||||
logger.info("the file does not exist")
|
||||
logger.log("the file does not exist")
|
||||
|
||||
f = filetools.file_open(nombrefichero, 'wb', vfs=VFS)
|
||||
grabado = 0
|
||||
@@ -285,13 +285,13 @@ def downloadfile(url, nombrefichero, headers=None, silent=False, continuar=False
|
||||
additional_headers = [additional_headers]
|
||||
|
||||
for additional_header in additional_headers:
|
||||
logger.info("additional_header: " + additional_header)
|
||||
logger.log("additional_header: " + additional_header)
|
||||
name = re.findall("(.*?)=.*?", additional_header)[0]
|
||||
value = urllib.parse.unquote_plus(re.findall(".*?=(.*?)$", additional_header)[0])
|
||||
headers.append([name, value])
|
||||
|
||||
url = url.split("|")[0]
|
||||
logger.info("url=" + url)
|
||||
logger.log("url=" + url)
|
||||
|
||||
# Socket timeout at 60 seconds
|
||||
socket.setdefaulttimeout(60)
|
||||
@@ -299,7 +299,7 @@ def downloadfile(url, nombrefichero, headers=None, silent=False, continuar=False
|
||||
h = urllib.request.HTTPHandler(debuglevel=0)
|
||||
request = urllib.request.Request(url)
|
||||
for header in headers:
|
||||
logger.info("Header= " + header[0] + ": " + header[1])
|
||||
logger.log("Header= " + header[0] + ": " + header[1])
|
||||
request.add_header(header[0], header[1])
|
||||
|
||||
if exist_size > 0:
|
||||
@@ -328,12 +328,12 @@ def downloadfile(url, nombrefichero, headers=None, silent=False, continuar=False
|
||||
if exist_size > 0:
|
||||
totalfichero = totalfichero + exist_size
|
||||
|
||||
logger.info("Content-Length= %s" % totalfichero)
|
||||
logger.log("Content-Length= %s" % totalfichero)
|
||||
|
||||
blocksize = 100 * 1024
|
||||
|
||||
bloqueleido = connexion.read(blocksize)
|
||||
logger.info("Starting downloading the file, blocked= %s" % len(bloqueleido))
|
||||
logger.log("Starting downloading the file, blocked= %s" % len(bloqueleido))
|
||||
|
||||
maxreintentos = 10
|
||||
|
||||
@@ -360,7 +360,7 @@ def downloadfile(url, nombrefichero, headers=None, silent=False, continuar=False
|
||||
tiempofalta = old_div(falta, velocidad)
|
||||
else:
|
||||
tiempofalta = 0
|
||||
# logger.info(sec_to_hms(tiempofalta))
|
||||
# logger.log(sec_to_hms(tiempofalta))
|
||||
if not silent:
|
||||
progreso.update(percent, "%.2fMB/%.2fMB (%d%%) %.2f Kb/s %s" %
|
||||
(descargadosmb, totalmb, percent, old_div(velocidad, 1024),
|
||||
@@ -368,14 +368,14 @@ def downloadfile(url, nombrefichero, headers=None, silent=False, continuar=False
|
||||
break
|
||||
except:
|
||||
reintentos += 1
|
||||
logger.info("ERROR in block download, retry %d" % reintentos)
|
||||
logger.log("ERROR in block download, retry %d" % reintentos)
|
||||
import traceback
|
||||
logger.error(traceback.print_exc())
|
||||
|
||||
# The user cancels the download
|
||||
try:
|
||||
if progreso.iscanceled():
|
||||
logger.info("Download of file canceled")
|
||||
logger.log("Download of file canceled")
|
||||
f.close()
|
||||
progreso.close()
|
||||
return -1
|
||||
@@ -384,7 +384,7 @@ def downloadfile(url, nombrefichero, headers=None, silent=False, continuar=False
|
||||
|
||||
# There was an error in the download
|
||||
if reintentos > maxreintentos:
|
||||
logger.info("ERROR in the file download")
|
||||
logger.log("ERROR in the file download")
|
||||
f.close()
|
||||
if not silent:
|
||||
progreso.close()
|
||||
@@ -430,7 +430,7 @@ def downloadfile(url, nombrefichero, headers=None, silent=False, continuar=False
|
||||
except:
|
||||
pass
|
||||
|
||||
logger.info("End of file download")
|
||||
logger.log("End of file download")
|
||||
|
||||
|
||||
def downloadfileRTMP(url, nombrefichero, silent):
|
||||
@@ -476,7 +476,7 @@ def downloadfileRTMP(url, nombrefichero, silent):
|
||||
try:
|
||||
rtmpdump_args = [rtmpdump_cmd] + rtmpdump_args + ["-o", nombrefichero]
|
||||
from os import spawnv, P_NOWAIT
|
||||
logger.info("Initiating file download: %s" % " ".join(rtmpdump_args))
|
||||
logger.log("Initiating file download: %s" % " ".join(rtmpdump_args))
|
||||
rtmpdump_exit = spawnv(P_NOWAIT, rtmpdump_cmd, rtmpdump_args)
|
||||
if not silent:
|
||||
from platformcode import platformtools
|
||||
@@ -488,18 +488,18 @@ def downloadfileRTMP(url, nombrefichero, silent):
|
||||
|
||||
|
||||
def downloadfileGzipped(url, pathfichero):
|
||||
logger.info("url= " + url)
|
||||
logger.log("url= " + url)
|
||||
nombrefichero = pathfichero
|
||||
logger.info("filename= " + nombrefichero)
|
||||
logger.log("filename= " + nombrefichero)
|
||||
|
||||
import xbmc
|
||||
nombrefichero = xbmc.makeLegalFilename(nombrefichero)
|
||||
logger.info("filename= " + nombrefichero)
|
||||
logger.log("filename= " + nombrefichero)
|
||||
patron = "(http://[^/]+)/.+"
|
||||
matches = re.compile(patron, re.DOTALL).findall(url)
|
||||
|
||||
if len(matches):
|
||||
logger.info("Main URL: " + matches[0])
|
||||
logger.log("Main URL: " + matches[0])
|
||||
url1 = matches[0]
|
||||
else:
|
||||
url1 = url
|
||||
@@ -546,9 +546,9 @@ def downloadfileGzipped(url, pathfichero):
|
||||
|
||||
nombre_fichero_base = filetools.basename(nombrefichero)
|
||||
if len(nombre_fichero_base) == 0:
|
||||
logger.info("Searching for name in the answer Headers")
|
||||
logger.log("Searching for name in the answer Headers")
|
||||
nombre_base = connexion.headers["Content-Disposition"]
|
||||
logger.info(nombre_base)
|
||||
logger.log(nombre_base)
|
||||
patron = 'filename="([^"]+)"'
|
||||
matches = re.compile(patron, re.DOTALL).findall(nombre_base)
|
||||
if len(matches) > 0:
|
||||
@@ -556,7 +556,7 @@ def downloadfileGzipped(url, pathfichero):
|
||||
titulo = GetTitleFromFile(titulo)
|
||||
nombrefichero = filetools.join(pathfichero, titulo)
|
||||
else:
|
||||
logger.info("Name of the file not found, Placing temporary name: no_name.txt")
|
||||
logger.log("Name of the file not found, Placing temporary name: no_name.txt")
|
||||
titulo = "no_name.txt"
|
||||
nombrefichero = filetools.join(pathfichero, titulo)
|
||||
totalfichero = int(connexion.headers["Content-Length"])
|
||||
@@ -564,10 +564,10 @@ def downloadfileGzipped(url, pathfichero):
|
||||
# then
|
||||
f = filetools.file_open(nombrefichero, 'w', vfs=VFS)
|
||||
|
||||
logger.info("new file open")
|
||||
logger.log("new file open")
|
||||
|
||||
grabado = 0
|
||||
logger.info("Content-Length= %s" % totalfichero)
|
||||
logger.log("Content-Length= %s" % totalfichero)
|
||||
|
||||
blocksize = 100 * 1024
|
||||
|
||||
@@ -580,7 +580,7 @@ def downloadfileGzipped(url, pathfichero):
|
||||
gzipper = gzip.GzipFile(fileobj=compressedstream)
|
||||
bloquedata = gzipper.read()
|
||||
gzipper.close()
|
||||
logger.info("Starting downloading the file, blocked= %s" % len(bloqueleido))
|
||||
logger.log("Starting downloading the file, blocked= %s" % len(bloqueleido))
|
||||
except:
|
||||
logger.error("ERROR: The file to be downloaded is not compressed with Gzip")
|
||||
f.close()
|
||||
@@ -619,32 +619,32 @@ def downloadfileGzipped(url, pathfichero):
|
||||
tiempofalta = old_div(falta, velocidad)
|
||||
else:
|
||||
tiempofalta = 0
|
||||
logger.info(sec_to_hms(tiempofalta))
|
||||
logger.log(sec_to_hms(tiempofalta))
|
||||
progreso.update(percent, "%.2fMB/%.2fMB (%d%%) %.2f Kb/s %s left " % (descargadosmb, totalmb, percent, old_div(velocidad, 1024), sec_to_hms(tiempofalta)))
|
||||
break
|
||||
except:
|
||||
reintentos += 1
|
||||
logger.info("ERROR in block download, retry %d" % reintentos)
|
||||
logger.log("ERROR in block download, retry %d" % reintentos)
|
||||
for line in sys.exc_info():
|
||||
logger.error("%s" % line)
|
||||
|
||||
# The user cancels the download
|
||||
if progreso.iscanceled():
|
||||
logger.info("Download of file canceled")
|
||||
logger.log("Download of file canceled")
|
||||
f.close()
|
||||
progreso.close()
|
||||
return -1
|
||||
|
||||
# There was an error in the download
|
||||
if reintentos > maxreintentos:
|
||||
logger.info("ERROR in the file download")
|
||||
logger.log("ERROR in the file download")
|
||||
f.close()
|
||||
progreso.close()
|
||||
|
||||
return -2
|
||||
|
||||
except:
|
||||
logger.info("ERROR in the file download")
|
||||
logger.log("ERROR in the file download")
|
||||
for line in sys.exc_info():
|
||||
logger.error("%s" % line)
|
||||
f.close()
|
||||
@@ -655,15 +655,15 @@ def downloadfileGzipped(url, pathfichero):
|
||||
|
||||
# print data
|
||||
progreso.close()
|
||||
logger.info("End download of the file")
|
||||
logger.log("End download of the file")
|
||||
return nombrefichero
|
||||
|
||||
|
||||
def GetTitleFromFile(title):
|
||||
# Print in the log what you will discard
|
||||
logger.info("title= " + title)
|
||||
logger.log("title= " + title)
|
||||
plataforma = config.get_system_platform()
|
||||
logger.info("plataform= " + plataforma)
|
||||
logger.log("plataform= " + plataforma)
|
||||
|
||||
# nombrefichero = xbmc.makeLegalFilename(title + url[-4:])
|
||||
nombrefichero = title
|
||||
@@ -677,11 +677,11 @@ def sec_to_hms(seconds):
|
||||
|
||||
|
||||
def downloadIfNotModifiedSince(url, timestamp):
|
||||
logger.info("(" + url + "," + time.ctime(timestamp) + ")")
|
||||
logger.log("(" + url + "," + time.ctime(timestamp) + ")")
|
||||
|
||||
# Convert date to GMT
|
||||
fecha_formateada = time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime(timestamp))
|
||||
logger.info("Formatted date= %s" % fecha_formateada)
|
||||
logger.log("Formatted date= %s" % fecha_formateada)
|
||||
|
||||
# Check if it has changed
|
||||
inicio = time.clock()
|
||||
@@ -702,9 +702,9 @@ def downloadIfNotModifiedSince(url, timestamp):
|
||||
except urllib.error.URLError as e:
|
||||
# If it returns 304 it is that it has not changed
|
||||
if hasattr(e, 'code'):
|
||||
logger.info("HTTP response code : %d" % e.code)
|
||||
logger.log("HTTP response code : %d" % e.code)
|
||||
if e.code == 304:
|
||||
logger.info("It has not changed")
|
||||
logger.log("It has not changed")
|
||||
updated = False
|
||||
# Grab errors with response code from requested external server
|
||||
else:
|
||||
@@ -713,13 +713,13 @@ def downloadIfNotModifiedSince(url, timestamp):
|
||||
data = ""
|
||||
|
||||
fin = time.clock()
|
||||
logger.info("Downloaded in %d seconds " % (fin - inicio + 1))
|
||||
logger.log("Downloaded in %d seconds " % (fin - inicio + 1))
|
||||
|
||||
return updated, data
|
||||
|
||||
|
||||
def download_all_episodes(item, channel, first_episode="", preferred_server="vidspot", filter_language=""):
|
||||
logger.info("show= " + item.show)
|
||||
logger.log("show= " + item.show)
|
||||
show_title = item.show
|
||||
|
||||
# Gets the listing from which it was called
|
||||
@@ -749,9 +749,9 @@ def download_all_episodes(item, channel, first_episode="", preferred_server="vid
|
||||
|
||||
for episode_item in episode_itemlist:
|
||||
try:
|
||||
logger.info("episode= " + episode_item.title)
|
||||
logger.log("episode= " + episode_item.title)
|
||||
episode_title = scrapertools.find_single_match(episode_item.title, r"(\d+x\d+)")
|
||||
logger.info("episode= " + episode_title)
|
||||
logger.log("episode= " + episode_title)
|
||||
except:
|
||||
import traceback
|
||||
logger.error(traceback.format_exc())
|
||||
@@ -815,7 +815,7 @@ def download_all_episodes(item, channel, first_episode="", preferred_server="vid
|
||||
new_mirror_itemlist_4 + new_mirror_itemlist_5 + new_mirror_itemlist_6)
|
||||
|
||||
for mirror_item in mirrors_itemlist:
|
||||
logger.info("mirror= " + mirror_item.title)
|
||||
logger.log("mirror= " + mirror_item.title)
|
||||
|
||||
if "(Italiano)" in mirror_item.title:
|
||||
idioma = "(Italiano)"
|
||||
@@ -836,11 +836,11 @@ def download_all_episodes(item, channel, first_episode="", preferred_server="vid
|
||||
idioma = "(Desconocido)"
|
||||
codigo_idioma = "desconocido"
|
||||
|
||||
logger.info("filter_language=#" + filter_language + "#, codigo_idioma=#" + codigo_idioma + "#")
|
||||
logger.log("filter_language=#" + filter_language + "#, codigo_idioma=#" + codigo_idioma + "#")
|
||||
if filter_language == "" or (filter_language != "" and filter_language == codigo_idioma):
|
||||
logger.info("downloading mirror")
|
||||
logger.log("downloading mirror")
|
||||
else:
|
||||
logger.info("language " + codigo_idioma + " filtered, skipping")
|
||||
logger.log("language " + codigo_idioma + " filtered, skipping")
|
||||
continue
|
||||
|
||||
if hasattr(channel, 'play'):
|
||||
@@ -856,14 +856,14 @@ def download_all_episodes(item, channel, first_episode="", preferred_server="vid
|
||||
|
||||
# Adds it to the download list
|
||||
if puedes:
|
||||
logger.info("downloading mirror started...")
|
||||
logger.log("downloading mirror started...")
|
||||
# The highest quality video is the latest
|
||||
# mediaurl = video_urls[len(video_urls) - 1][1]
|
||||
devuelve = downloadbest(video_urls, show_title + " " + episode_title + " " + idioma +
|
||||
" [" + video_item.server + "]", continuar=False)
|
||||
|
||||
if devuelve == 0:
|
||||
logger.info("download ok")
|
||||
logger.log("download ok")
|
||||
descargado = True
|
||||
break
|
||||
elif devuelve == -1:
|
||||
@@ -874,14 +874,14 @@ def download_all_episodes(item, channel, first_episode="", preferred_server="vid
|
||||
pass
|
||||
return
|
||||
else:
|
||||
logger.info("download error, try another mirror")
|
||||
logger.log("download error, try another mirror")
|
||||
continue
|
||||
|
||||
else:
|
||||
logger.info("downloading mirror not available... trying next")
|
||||
logger.log("downloading mirror not available... trying next")
|
||||
|
||||
if not descargado:
|
||||
logger.info("UNDOWNLOADED EPISODE " + episode_title)
|
||||
logger.log("UNDOWNLOADED EPISODE " + episode_title)
|
||||
|
||||
|
||||
def episodio_ya_descargado(show_title, episode_title):
|
||||
@@ -889,9 +889,9 @@ def episodio_ya_descargado(show_title, episode_title):
|
||||
ficheros = filetools.listdir(".")
|
||||
|
||||
for fichero in ficheros:
|
||||
# logger.info("fichero="+fichero)
|
||||
# logger.log("fichero="+fichero)
|
||||
if fichero.lower().startswith(show_title.lower()) and scrapertools.find_single_match(fichero, "(\d+x\d+)") == episode_title:
|
||||
logger.info("found!")
|
||||
logger.log("found!")
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
@@ -814,7 +814,7 @@ def remove_tags(title):
|
||||
@rtype: str
|
||||
@return: string without tags
|
||||
"""
|
||||
logger.info()
|
||||
logger.log()
|
||||
|
||||
title_without_tags = scrapertools.find_single_match(title, r'\[color .+?\](.+)\[\/color\]')
|
||||
|
||||
@@ -832,7 +832,7 @@ def remove_smb_credential(path):
|
||||
@return: chain without credentials
|
||||
@rtype: str
|
||||
"""
|
||||
logger.info()
|
||||
logger.log()
|
||||
|
||||
if not scrapertools.find_single_match(path, r'(^\w+:\/\/)'):
|
||||
return path
|
||||
|
||||
@@ -125,11 +125,11 @@ def set_cookies(dict_cookie, clear=True, alfa_s=False):
|
||||
def load_cookies(alfa_s=False):
|
||||
cookies_lock.acquire()
|
||||
if os.path.isfile(cookies_file):
|
||||
if not alfa_s: logger.info("Reading cookies file")
|
||||
if not alfa_s: logger.log("Reading cookies file")
|
||||
try:
|
||||
cj.load(cookies_file, ignore_discard=True)
|
||||
except:
|
||||
if not alfa_s: logger.info("The cookie file exists but is illegible, it is deleted")
|
||||
if not alfa_s: logger.log("The cookie file exists but is illegible, it is deleted")
|
||||
os.remove(cookies_file)
|
||||
cookies_lock.release()
|
||||
|
||||
@@ -137,7 +137,7 @@ load_cookies()
|
||||
|
||||
def save_cookies(alfa_s=False):
|
||||
cookies_lock.acquire()
|
||||
if not alfa_s: logger.info("Saving cookies...")
|
||||
if not alfa_s: logger.log("Saving cookies...")
|
||||
cj.save(cookies_file, ignore_discard=True)
|
||||
cookies_lock.release()
|
||||
|
||||
@@ -161,7 +161,7 @@ def random_useragent():
|
||||
|
||||
|
||||
def show_infobox(info_dict):
|
||||
logger.info()
|
||||
logger.log()
|
||||
from textwrap import wrap
|
||||
|
||||
box_items_kodi = {'r_up_corner': u'\u250c',
|
||||
@@ -186,16 +186,16 @@ def show_infobox(info_dict):
|
||||
|
||||
|
||||
|
||||
width = 60
|
||||
width = 100
|
||||
version = '%s: %s' % (config.get_localized_string(20000), __version)
|
||||
if config.is_xbmc():
|
||||
box = box_items_kodi
|
||||
else:
|
||||
box = box_items
|
||||
|
||||
logger.info('%s%s%s' % (box['r_up_corner'], box['fill'] * width, box['l_up_corner']))
|
||||
logger.info('%s%s%s' % (box['center'], version.center(width), box['center']))
|
||||
logger.info('%s%s%s' % (box['r_center'], box['fill'] * width, box['l_center']))
|
||||
logger.log('%s%s%s' % (box['r_up_corner'], box['fill'] * width, box['l_up_corner']))
|
||||
logger.log('%s%s%s' % (box['center'], version.center(width), box['center']))
|
||||
logger.log('%s%s%s' % (box['r_center'], box['fill'] * width, box['l_center']))
|
||||
|
||||
count = 0
|
||||
for key, value in info_dict:
|
||||
@@ -210,19 +210,19 @@ def show_infobox(info_dict):
|
||||
for line in text:
|
||||
if len(line) < width:
|
||||
line = line.ljust(width, ' ')
|
||||
logger.info('%s%s%s' % (box['center'], line, box['center']))
|
||||
logger.log('%s%s%s' % (box['center'], line, box['center']))
|
||||
else:
|
||||
logger.info('%s%s%s' % (box['center'], text, box['center']))
|
||||
logger.log('%s%s%s' % (box['center'], text, box['center']))
|
||||
if count < len(info_dict):
|
||||
logger.info('%s%s%s' % (box['r_center'], box['fill'] * width, box['l_center']))
|
||||
logger.log('%s%s%s' % (box['r_center'], box['fill'] * width, box['l_center']))
|
||||
else:
|
||||
logger.info('%s%s%s' % (box['r_dn_corner'], box['fill'] * width, box['l_dn_corner']))
|
||||
logger.log('%s%s%s' % (box['r_dn_corner'], box['fill'] * width, box['l_dn_corner']))
|
||||
return
|
||||
|
||||
|
||||
|
||||
def downloadpage(url, **opt):
|
||||
# logger.info()
|
||||
# logger.log()
|
||||
"""
|
||||
Open a url and return the data obtained
|
||||
|
||||
|
||||
@@ -298,7 +298,7 @@ class Item(object):
|
||||
def tostring(self, separator=", "):
|
||||
"""
|
||||
Generate a text string with the item's data for the log
|
||||
Use: logger.info(item.tostring())
|
||||
Use: logger.log(item.tostring())
|
||||
@param separator: string to be used as a separator
|
||||
@type separator: str
|
||||
'"""
|
||||
|
||||
@@ -11,24 +11,24 @@ from inspect import stack
|
||||
try:
|
||||
import json
|
||||
except:
|
||||
logger.info("json included in the interpreter **NOT** available")
|
||||
logger.log("json included in the interpreter **NOT** available")
|
||||
|
||||
try:
|
||||
import simplejson as json
|
||||
except:
|
||||
logger.info("simplejson included in the interpreter **NOT** available")
|
||||
logger.log("simplejson included in the interpreter **NOT** available")
|
||||
try:
|
||||
from lib import simplejson as json
|
||||
except:
|
||||
logger.info("simplejson in lib directory **NOT** available")
|
||||
logger.log("simplejson in lib directory **NOT** available")
|
||||
logger.error("A valid JSON parser was not found")
|
||||
json = None
|
||||
else:
|
||||
logger.info("Using simplejson in the lib directory")
|
||||
logger.log("Using simplejson in the lib directory")
|
||||
else:
|
||||
logger.info("Using simplejson included in the interpreter")
|
||||
logger.log("Using simplejson included in the interpreter")
|
||||
# ~ else:
|
||||
# ~ logger.info("Usando json incluido en el interprete")
|
||||
# ~ logger.log("Usando json incluido en el interprete")
|
||||
|
||||
import sys
|
||||
PY3 = False
|
||||
@@ -90,7 +90,7 @@ def get_node_from_file(name_file, node, path=None):
|
||||
@return: dict with the node to return
|
||||
@rtype: dict
|
||||
"""
|
||||
logger.info()
|
||||
logger.log()
|
||||
from platformcode import config
|
||||
from core import filetools
|
||||
|
||||
@@ -129,7 +129,7 @@ def check_to_backup(data, fname, dict_data):
|
||||
@param dict_data: dictionary name
|
||||
@type dict_data: dict
|
||||
"""
|
||||
logger.info()
|
||||
logger.log()
|
||||
|
||||
if not dict_data:
|
||||
logger.error("Error loading json from file %s" % fname)
|
||||
@@ -161,7 +161,7 @@ def update_node(dict_node, name_file, node, path=None, silent=False):
|
||||
@return json_data
|
||||
@rtype: dict
|
||||
"""
|
||||
if not silent: logger.info()
|
||||
if not silent: logger.log()
|
||||
|
||||
from platformcode import config
|
||||
from core import filetools
|
||||
|
||||
@@ -61,7 +61,7 @@ def find_and_set_infoLabels(item):
|
||||
# Check if there is a 'code'
|
||||
if scraper_result and item.infoLabels['code']:
|
||||
# correct code
|
||||
logger.info("Identificador encontrado: %s" % item.infoLabels['code'])
|
||||
logger.log("Identificador encontrado: %s" % item.infoLabels['code'])
|
||||
scraper.completar_codigos(item)
|
||||
return True
|
||||
elif scraper_result:
|
||||
@@ -71,7 +71,7 @@ def find_and_set_infoLabels(item):
|
||||
# Content not found
|
||||
msg = config.get_localized_string(60228) % title
|
||||
|
||||
logger.info(msg)
|
||||
logger.log(msg)
|
||||
# Show box with other options:
|
||||
if scrapers_disponibles[scraper_actual] in list_opciones_cuadro:
|
||||
list_opciones_cuadro.remove(scrapers_disponibles[scraper_actual])
|
||||
@@ -95,10 +95,10 @@ def find_and_set_infoLabels(item):
|
||||
|
||||
elif index == 1:
|
||||
# You have to create a dialog box to enter the data
|
||||
logger.info("Complete information")
|
||||
logger.log("Complete information")
|
||||
if cuadro_completar(item):
|
||||
# correct code
|
||||
logger.info("Identifier found: %s" % str(item.infoLabels['code']))
|
||||
logger.log("Identifier found: %s" % str(item.infoLabels['code']))
|
||||
return True
|
||||
# raise
|
||||
|
||||
@@ -121,7 +121,7 @@ def find_and_set_infoLabels(item):
|
||||
|
||||
|
||||
def cuadro_completar(item):
|
||||
logger.info()
|
||||
logger.log()
|
||||
|
||||
global dict_default
|
||||
dict_default = {}
|
||||
@@ -234,7 +234,7 @@ def get_nfo(item):
|
||||
@rtype: str
|
||||
@return:
|
||||
"""
|
||||
logger.info()
|
||||
logger.log()
|
||||
if "infoLabels" in item and "noscrap_id" in item.infoLabels:
|
||||
# Create the xml file with the data obtained from the item since there is no active scraper
|
||||
info_nfo = '<?xml version="1.0" encoding="UTF-8" standalone="yes" ?>'
|
||||
|
||||
@@ -34,7 +34,7 @@ from platformcode import logger
|
||||
def printMatches(matches):
|
||||
i = 0
|
||||
for match in matches:
|
||||
logger.info("%d %s" % (i, match))
|
||||
logger.log("%d %s" % (i, match))
|
||||
i = i + 1
|
||||
|
||||
|
||||
@@ -446,7 +446,7 @@ def get_season_and_episode(title):
|
||||
except:
|
||||
pass
|
||||
|
||||
logger.info("'" + title + "' -> '" + filename + "'")
|
||||
logger.log("'" + title + "' -> '" + filename + "'")
|
||||
|
||||
return filename
|
||||
|
||||
|
||||
@@ -47,7 +47,7 @@ def find_video_items(item=None, data=None):
|
||||
@return: returns the itemlist with the results
|
||||
@rtype: list
|
||||
"""
|
||||
logger.info()
|
||||
logger.log()
|
||||
itemlist = []
|
||||
|
||||
# Download the page
|
||||
@@ -97,7 +97,7 @@ def get_servers_itemlist(itemlist, fnc=None, sort=False):
|
||||
|
||||
# Walk the patterns
|
||||
for pattern in server_parameters.get("find_videos", {}).get("patterns", []):
|
||||
logger.info(pattern["pattern"])
|
||||
logger.log(pattern["pattern"])
|
||||
# Scroll through the results
|
||||
for match in re.compile(pattern["pattern"], re.DOTALL).finditer(
|
||||
"\n".join([item.url.split('|')[0] for item in itemlist if not item.server])):
|
||||
@@ -144,7 +144,7 @@ def findvideos(data, skip=False):
|
||||
return some link. It can also be an integer greater than 1, which would represent the maximum number of links to search.
|
||||
:return:
|
||||
"""
|
||||
logger.info()
|
||||
logger.log()
|
||||
devuelve = []
|
||||
skip = int(skip)
|
||||
servers_list = list(get_servers_list().keys())
|
||||
@@ -165,7 +165,7 @@ def findvideos(data, skip=False):
|
||||
devuelve = devuelve[:skip]
|
||||
break
|
||||
# if config.get_setting("filter_servers") == False: is_filter_servers = False
|
||||
# logger.info('DEVUELVE: ' + str(devuelve))
|
||||
# logger.log('DEVUELVE: ' + str(devuelve))
|
||||
# if not devuelve and is_filter_servers:
|
||||
# platformtools.dialog_ok(config.get_localized_string(60000), config.get_localized_string(60001))
|
||||
return devuelve
|
||||
@@ -194,7 +194,7 @@ def findvideosbyserver(data, serverid):
|
||||
value = translate_server_name(server_parameters["name"]) , url, serverid, server_parameters.get("thumbnail", "")
|
||||
if value not in devuelve and url not in server_parameters["find_videos"].get("ignore_urls", []):
|
||||
devuelve.append(value)
|
||||
logger.info(msg)
|
||||
logger.log(msg)
|
||||
|
||||
return devuelve
|
||||
|
||||
@@ -206,7 +206,7 @@ def guess_server_thumbnail(serverid):
|
||||
|
||||
|
||||
def get_server_from_url(url):
|
||||
logger.info()
|
||||
logger.log()
|
||||
servers_list = list(get_servers_list().keys())
|
||||
|
||||
# Run findvideos on each active server
|
||||
@@ -224,7 +224,7 @@ def get_server_from_url(url):
|
||||
for n, pattern in enumerate(server_parameters["find_videos"].get("patterns", [])):
|
||||
msg = "%s\npattern: %s" % (serverid, pattern["pattern"])
|
||||
if not "pattern_compiled" in pattern:
|
||||
# logger.info('compiled ' + serverid)
|
||||
# logger.log('compiled ' + serverid)
|
||||
pattern["pattern_compiled"] = re.compile(pattern["pattern"])
|
||||
dict_servers_parameters[serverid]["find_videos"]["patterns"][n]["pattern_compiled"] = pattern["pattern_compiled"]
|
||||
# Scroll through the results
|
||||
@@ -237,7 +237,7 @@ def get_server_from_url(url):
|
||||
msg += "\nurl encontrada: %s" % url
|
||||
value = translate_server_name(server_parameters["name"]), url, serverid, server_parameters.get("thumbnail", "")
|
||||
if url not in server_parameters["find_videos"].get("ignore_urls", []):
|
||||
logger.info(msg)
|
||||
logger.log(msg)
|
||||
return value
|
||||
|
||||
return None
|
||||
@@ -260,7 +260,7 @@ def resolve_video_urls_for_playing(server, url, video_password="", muestra_dialo
|
||||
@return: returns the url of the video
|
||||
@rtype: list
|
||||
"""
|
||||
logger.info("Server: %s, Url: %s" % (server, url))
|
||||
logger.log("Server: %s, Url: %s" % (server, url))
|
||||
|
||||
server = server.lower()
|
||||
|
||||
@@ -273,7 +273,7 @@ def resolve_video_urls_for_playing(server, url, video_password="", muestra_dialo
|
||||
if server == "directo" or server == "local":
|
||||
if isinstance(video_password, list):
|
||||
return video_password, len(video_password) > 0, "<br/>".join(error_messages)
|
||||
logger.info("Server: %s, url is good" % server)
|
||||
logger.log("Server: %s, url is good" % server)
|
||||
video_urls.append(["%s [%s]" % (urlparse.urlparse(url)[2][-4:], config.get_localized_string(30137)), url])
|
||||
|
||||
# Find out the video URL
|
||||
@@ -304,7 +304,7 @@ def resolve_video_urls_for_playing(server, url, video_password="", muestra_dialo
|
||||
priority = int(config.get_setting("resolve_priority"))
|
||||
opciones = sorted(opciones, key=lambda x: orden[priority].index(x))
|
||||
|
||||
logger.info("Available options: %s | %s" % (len(opciones), opciones))
|
||||
logger.log("Available options: %s | %s" % (len(opciones), opciones))
|
||||
else:
|
||||
logger.error("There is no connector for the server %s" % server)
|
||||
error_messages.append(config.get_localized_string(60004) % server)
|
||||
@@ -313,7 +313,7 @@ def resolve_video_urls_for_playing(server, url, video_password="", muestra_dialo
|
||||
# Import the server
|
||||
try:
|
||||
server_module = __import__('servers.%s' % server, None, None, ["servers.%s" % server])
|
||||
logger.info("Imported server: %s" % server_module)
|
||||
logger.log("Imported server: %s" % server_module)
|
||||
except:
|
||||
server_module = None
|
||||
if muestra_dialogo:
|
||||
@@ -324,17 +324,17 @@ def resolve_video_urls_for_playing(server, url, video_password="", muestra_dialo
|
||||
|
||||
# If it has a function to see if the video exists, check it now
|
||||
if hasattr(server_module, 'test_video_exists'):
|
||||
logger.info("Invoking a %s.test_video_exists" % server)
|
||||
logger.log("Invoking a %s.test_video_exists" % server)
|
||||
try:
|
||||
video_exists, message = server_module.test_video_exists(page_url=url)
|
||||
|
||||
if not video_exists:
|
||||
error_messages.append(message)
|
||||
logger.info("test_video_exists says video doesn't exist")
|
||||
logger.log("test_video_exists says video doesn't exist")
|
||||
if muestra_dialogo:
|
||||
progreso.close()
|
||||
else:
|
||||
logger.info("test_video_exists says the video DOES exist")
|
||||
logger.log("test_video_exists says the video DOES exist")
|
||||
except:
|
||||
logger.error("Could not verify if the video exists")
|
||||
import traceback
|
||||
@@ -361,7 +361,7 @@ def resolve_video_urls_for_playing(server, url, video_password="", muestra_dialo
|
||||
# Free mode
|
||||
if opcion == "free":
|
||||
try:
|
||||
logger.info("Invoking a %s.get_video_url" % server)
|
||||
logger.log("Invoking a %s.get_video_url" % server)
|
||||
response = serverid.get_video_url(page_url=url, video_password=video_password)
|
||||
video_urls.extend(response)
|
||||
except:
|
||||
@@ -373,7 +373,7 @@ def resolve_video_urls_for_playing(server, url, video_password="", muestra_dialo
|
||||
# Premium mode
|
||||
else:
|
||||
try:
|
||||
logger.info("Invoking a %s.get_video_url" % opcion)
|
||||
logger.log("Invoking a %s.get_video_url" % opcion)
|
||||
response = serverid.get_video_url(page_url=url, premium=True,
|
||||
user=config.get_setting("user", server=opcion),
|
||||
password=config.get_setting("password", server=opcion),
|
||||
@@ -483,7 +483,7 @@ def get_server_parameters(server):
|
||||
@return: server data
|
||||
@rtype: dict
|
||||
"""
|
||||
# logger.info("server %s" % server)
|
||||
# logger.log("server %s" % server)
|
||||
global dict_servers_parameters
|
||||
server = server.split('.')[0]
|
||||
if not server:
|
||||
@@ -533,15 +533,15 @@ def get_server_parameters(server):
|
||||
|
||||
|
||||
# def get_server_json(server_name):
|
||||
# # logger.info("server_name=" + server_name)
|
||||
# # logger.log("server_name=" + server_name)
|
||||
# try:
|
||||
# server_path = filetools.join(config.get_runtime_path(), "servers", server_name + ".json")
|
||||
# if not filetools.exists(server_path):
|
||||
# server_path = filetools.join(config.get_runtime_path(), "servers", "debriders", server_name + ".json")
|
||||
#
|
||||
# # logger.info("server_path=" + server_path)
|
||||
# # logger.log("server_path=" + server_path)
|
||||
# server_json = jsontools.load(filetools.read(server_path))
|
||||
# # logger.info("server_json= %s" % server_json)
|
||||
# # logger.log("server_json= %s" % server_json)
|
||||
#
|
||||
# except Exception as ex:
|
||||
# template = "An exception of type %s occured. Arguments:\n%r"
|
||||
@@ -613,7 +613,7 @@ def get_server_setting(name, server, default=None):
|
||||
if isinstance(dict_file, dict) and 'settings' in dict_file:
|
||||
dict_settings = dict_file['settings']
|
||||
except EnvironmentError:
|
||||
logger.info("ERROR when reading the file: %s" % file_settings)
|
||||
logger.log("ERROR when reading the file: %s" % file_settings)
|
||||
|
||||
if not dict_settings or name not in dict_settings:
|
||||
# We get controls from the file ../servers/server.json
|
||||
@@ -627,7 +627,7 @@ def get_server_setting(name, server, default=None):
|
||||
dict_file['settings'] = dict_settings
|
||||
# We create the file ../settings/channel_data.json
|
||||
if not filetools.write(file_settings, jsontools.dump(dict_file)):
|
||||
logger.info("ERROR saving file: %s" % file_settings)
|
||||
logger.log("ERROR saving file: %s" % file_settings)
|
||||
|
||||
# We return the value of the local parameter 'name' if it exists, if default is not returned
|
||||
return dict_settings.get(name, default)
|
||||
@@ -649,7 +649,7 @@ def set_server_setting(name, value, server):
|
||||
dict_file = jsontools.load(filetools.read(file_settings))
|
||||
dict_settings = dict_file.get('settings', {})
|
||||
except EnvironmentError:
|
||||
logger.info("ERROR when reading the file: %s" % file_settings)
|
||||
logger.log("ERROR when reading the file: %s" % file_settings)
|
||||
|
||||
dict_settings[name] = value
|
||||
|
||||
@@ -661,7 +661,7 @@ def set_server_setting(name, value, server):
|
||||
|
||||
# We create the file ../settings/channel_data.json
|
||||
if not filetools.write(file_settings, jsontools.dump(dict_file)):
|
||||
logger.info("ERROR saving file: %s" % file_settings)
|
||||
logger.log("ERROR saving file: %s" % file_settings)
|
||||
return None
|
||||
|
||||
return value
|
||||
@@ -696,7 +696,7 @@ def get_debriders_list():
|
||||
if server.endswith(".json"):
|
||||
server_parameters = get_server_parameters(server)
|
||||
if server_parameters["active"] == True:
|
||||
logger.info(server_parameters)
|
||||
logger.log(server_parameters)
|
||||
server_list[server.split(".")[0]] = server_parameters
|
||||
return server_list
|
||||
|
||||
@@ -742,7 +742,7 @@ def check_list_links(itemlist, numero='', timeout=3):
|
||||
it = res[0]
|
||||
verificacion = res[1]
|
||||
it.title = verificacion + ' ' + it.title.strip()
|
||||
logger.info('VERIFICATION= ' + verificacion)
|
||||
logger.log('VERIFICATION= ' + verificacion)
|
||||
it.alive = verificacion
|
||||
return itemlist
|
||||
|
||||
@@ -763,7 +763,7 @@ def check_video_link(item, timeout=3):
|
||||
server_module = __import__('servers.%s' % server, None, None, ["servers.%s" % server])
|
||||
except:
|
||||
server_module = None
|
||||
logger.info("[check_video_link] Cannot import server! %s" % server)
|
||||
logger.log("[check_video_link] Cannot import server! %s" % server)
|
||||
return item, NK
|
||||
|
||||
if hasattr(server_module, 'test_video_exists'):
|
||||
@@ -773,20 +773,20 @@ def check_video_link(item, timeout=3):
|
||||
try:
|
||||
video_exists, message = server_module.test_video_exists(page_url=url)
|
||||
if not video_exists:
|
||||
logger.info("[check_video_link] Does not exist! %s %s %s" % (message, server, url))
|
||||
logger.log("[check_video_link] Does not exist! %s %s %s" % (message, server, url))
|
||||
resultado = KO
|
||||
else:
|
||||
logger.info("[check_video_link] check ok %s %s" % (server, url))
|
||||
logger.log("[check_video_link] check ok %s %s" % (server, url))
|
||||
resultado = OK
|
||||
except:
|
||||
logger.info("[check_video_link] Can't check now! %s %s" % (server, url))
|
||||
logger.log("[check_video_link] Can't check now! %s %s" % (server, url))
|
||||
resultado = NK
|
||||
|
||||
finally:
|
||||
httptools.HTTPTOOLS_DEFAULT_DOWNLOAD_TIMEOUT = ant_timeout # Restore download time
|
||||
return item, resultado
|
||||
|
||||
logger.info("[check_video_link] There is no test_video_exists for server: %s" % server)
|
||||
logger.log("[check_video_link] There is no test_video_exists for server: %s" % server)
|
||||
return item, NK
|
||||
|
||||
def translate_server_name(name):
|
||||
|
||||
18
core/tmdb.py
18
core/tmdb.py
@@ -87,7 +87,7 @@ create_bd()
|
||||
|
||||
# The function name is the name of the decorator and receives the function that decorates.
|
||||
def cache_response(fn):
|
||||
logger.info()
|
||||
logger.log()
|
||||
|
||||
# import time
|
||||
# start_time = time.time()
|
||||
@@ -441,7 +441,7 @@ def set_infoLabels_item(item, seekTmdb=True, idioma_busqueda=def_lang, lock=None
|
||||
|
||||
|
||||
def find_and_set_infoLabels(item):
|
||||
logger.info()
|
||||
logger.log()
|
||||
|
||||
global otmdb_global
|
||||
tmdb_result = None
|
||||
@@ -851,7 +851,7 @@ class Tmdb(object):
|
||||
cls.dic_generos[idioma][tipo] = {}
|
||||
url = ('http://api.themoviedb.org/3/genre/%s/list?api_key=a1ab8b8669da03637a4b98fa39c39228&language=%s' % (tipo, idioma))
|
||||
try:
|
||||
logger.info("[Tmdb.py] Filling in dictionary of genres")
|
||||
logger.log("[Tmdb.py] Filling in dictionary of genres")
|
||||
|
||||
resultado = cls.get_json(url)
|
||||
if not isinstance(resultado, dict):
|
||||
@@ -883,7 +883,7 @@ class Tmdb(object):
|
||||
'&language=%s' % (self.busqueda_id, source, self.busqueda_idioma))
|
||||
buscando = "%s: %s" % (source.capitalize(), self.busqueda_id)
|
||||
|
||||
logger.info("[Tmdb.py] Searching %s:\n%s" % (buscando, url))
|
||||
logger.log("[Tmdb.py] Searching %s:\n%s" % (buscando, url))
|
||||
resultado = self.get_json(url)
|
||||
if not isinstance(resultado, dict):
|
||||
resultado = ast.literal_eval(resultado.decode('utf-8'))
|
||||
@@ -925,7 +925,7 @@ class Tmdb(object):
|
||||
url += '&year=%s' % self.busqueda_year
|
||||
|
||||
buscando = self.busqueda_texto.capitalize()
|
||||
logger.info("[Tmdb.py] Searching %s on page %s:\n%s" % (buscando, page, url))
|
||||
logger.log("[Tmdb.py] Searching %s on page %s:\n%s" % (buscando, page, url))
|
||||
resultado = self.get_json(url)
|
||||
if not isinstance(resultado, dict):
|
||||
resultado = ast.literal_eval(resultado.decode('utf-8'))
|
||||
@@ -986,7 +986,7 @@ class Tmdb(object):
|
||||
url = ('http://api.themoviedb.org/3/%s?api_key=a1ab8b8669da03637a4b98fa39c39228&%s'
|
||||
% (type_search, "&".join(params)))
|
||||
|
||||
logger.info("[Tmdb.py] Searcing %s:\n%s" % (type_search, url))
|
||||
logger.log("[Tmdb.py] Searcing %s:\n%s" % (type_search, url))
|
||||
resultado = self.get_json(url)
|
||||
if not isinstance(resultado, dict):
|
||||
resultado = ast.literal_eval(resultado.decode('utf-8'))
|
||||
@@ -1051,7 +1051,7 @@ class Tmdb(object):
|
||||
return True
|
||||
|
||||
def get_list_resultados(self, num_result=20):
|
||||
# logger.info("self %s" % str(self))
|
||||
# logger.log("self %s" % str(self))
|
||||
res = []
|
||||
|
||||
if num_result <= 0:
|
||||
@@ -1271,7 +1271,7 @@ class Tmdb(object):
|
||||
"&append_to_response=credits" % (self.result["id"], numtemporada, self.busqueda_idioma)
|
||||
|
||||
buscando = "id_Tmdb: " + str(self.result["id"]) + " season: " + str(numtemporada) + "\nURL: " + url
|
||||
logger.info("[Tmdb.py] Searcing " + buscando)
|
||||
logger.log("[Tmdb.py] Searcing " + buscando)
|
||||
try:
|
||||
self.temporada[numtemporada] = self.get_json(url)
|
||||
if not isinstance(self.temporada[numtemporada], dict):
|
||||
@@ -1460,7 +1460,7 @@ class Tmdb(object):
|
||||
|
||||
items.extend(list(self.get_episodio(ret_infoLabels['season'], episodio).items()))
|
||||
|
||||
# logger.info("ret_infoLabels" % ret_infoLabels)
|
||||
# logger.log("ret_infoLabels" % ret_infoLabels)
|
||||
|
||||
for k, v in items:
|
||||
if not v:
|
||||
|
||||
@@ -129,7 +129,7 @@ def token_trakt(item):
|
||||
|
||||
|
||||
def set_trakt_info(item):
|
||||
logger.info()
|
||||
logger.log()
|
||||
import xbmcgui
|
||||
# Envia los datos a trakt
|
||||
try:
|
||||
@@ -140,7 +140,7 @@ def set_trakt_info(item):
|
||||
pass
|
||||
|
||||
def get_trakt_watched(id_type, mediatype, update=False):
|
||||
logger.info()
|
||||
logger.log()
|
||||
|
||||
id_list = []
|
||||
id_dict = dict()
|
||||
@@ -228,7 +228,7 @@ def trakt_check(itemlist):
|
||||
|
||||
|
||||
def get_sync_from_file():
|
||||
logger.info()
|
||||
logger.log()
|
||||
sync_path = os.path.join(config.get_data_path(), 'settings_channels', 'trakt_data.json')
|
||||
trakt_node = {}
|
||||
if os.path.exists(sync_path):
|
||||
@@ -240,7 +240,7 @@ def get_sync_from_file():
|
||||
|
||||
|
||||
def update_trakt_data(mediatype, trakt_data):
|
||||
logger.info()
|
||||
logger.log()
|
||||
|
||||
sync_path = os.path.join(config.get_data_path(), 'settings_channels', 'trakt_data.json')
|
||||
if os.path.exists(sync_path):
|
||||
@@ -250,7 +250,7 @@ def update_trakt_data(mediatype, trakt_data):
|
||||
|
||||
|
||||
def ask_install_script():
|
||||
logger.info()
|
||||
logger.log()
|
||||
|
||||
from platformcode import platformtools
|
||||
|
||||
@@ -264,7 +264,7 @@ def ask_install_script():
|
||||
|
||||
|
||||
def wait_for_update_trakt():
|
||||
logger.info()
|
||||
logger.log()
|
||||
t = Thread(update_all)
|
||||
t.setDaemon(True)
|
||||
t.start()
|
||||
@@ -273,7 +273,7 @@ def wait_for_update_trakt():
|
||||
def update_all():
|
||||
# from core.support import dbg;dbg()
|
||||
from time import sleep
|
||||
logger.info()
|
||||
logger.log()
|
||||
sleep(20)
|
||||
while xbmc.Player().isPlaying():
|
||||
sleep(20)
|
||||
|
||||
32
core/tvdb.py
32
core/tvdb.py
@@ -73,8 +73,8 @@ otvdb_global = None
|
||||
|
||||
|
||||
def find_and_set_infoLabels(item):
|
||||
logger.info()
|
||||
# logger.info("item es %s" % item)
|
||||
logger.log()
|
||||
# logger.log("item es %s" % item)
|
||||
|
||||
p_dialog = None
|
||||
if not item.contentSeason:
|
||||
@@ -368,7 +368,7 @@ class Tvdb(object):
|
||||
|
||||
@classmethod
|
||||
def __check_token(cls):
|
||||
# logger.info()
|
||||
# logger.log()
|
||||
if TOKEN == "":
|
||||
cls.__login()
|
||||
else:
|
||||
@@ -383,7 +383,7 @@ class Tvdb(object):
|
||||
|
||||
@staticmethod
|
||||
def __login():
|
||||
# logger.info()
|
||||
# logger.log()
|
||||
global TOKEN
|
||||
|
||||
apikey = "106B699FDC04301C"
|
||||
@@ -413,7 +413,7 @@ class Tvdb(object):
|
||||
|
||||
@classmethod
|
||||
def __refresh_token(cls):
|
||||
# logger.info()
|
||||
# logger.log()
|
||||
global TOKEN
|
||||
is_success = False
|
||||
|
||||
@@ -512,7 +512,7 @@ class Tvdb(object):
|
||||
]
|
||||
}
|
||||
"""
|
||||
logger.info()
|
||||
logger.log()
|
||||
if id_episode and self.episodes.get(id_episode):
|
||||
return self.episodes.get(id_episode)
|
||||
|
||||
@@ -582,7 +582,7 @@ class Tvdb(object):
|
||||
}
|
||||
}
|
||||
"""
|
||||
logger.info()
|
||||
logger.log()
|
||||
|
||||
try:
|
||||
url = HOST + "/series/%s/episodes?page=%s" % (_id, page)
|
||||
@@ -600,7 +600,7 @@ class Tvdb(object):
|
||||
else:
|
||||
self.list_episodes[page] = jsontools.load(html)
|
||||
|
||||
# logger.info("dict_html %s" % self.list_episodes)
|
||||
# logger.log("dict_html %s" % self.list_episodes)
|
||||
|
||||
return self.list_episodes[page]
|
||||
|
||||
@@ -668,7 +668,7 @@ class Tvdb(object):
|
||||
"""
|
||||
if semaforo:
|
||||
semaforo.acquire()
|
||||
logger.info()
|
||||
logger.log()
|
||||
|
||||
url = HOST + "/episodes/%s" % _id
|
||||
|
||||
@@ -691,7 +691,7 @@ class Tvdb(object):
|
||||
dict_html = jsontools.load(html)
|
||||
dict_html = dict_html.pop("data")
|
||||
|
||||
logger.info("dict_html %s" % dict_html)
|
||||
logger.log("dict_html %s" % dict_html)
|
||||
self.episodes[_id] = dict_html
|
||||
|
||||
if semaforo:
|
||||
@@ -722,7 +722,7 @@ class Tvdb(object):
|
||||
"status": "string"
|
||||
}
|
||||
"""
|
||||
logger.info()
|
||||
logger.log()
|
||||
|
||||
try:
|
||||
|
||||
@@ -820,7 +820,7 @@ class Tvdb(object):
|
||||
}
|
||||
}
|
||||
"""
|
||||
logger.info()
|
||||
logger.log()
|
||||
resultado = {}
|
||||
|
||||
url = HOST + "/series/%s" % _id
|
||||
@@ -879,7 +879,7 @@ class Tvdb(object):
|
||||
@rtype: dict
|
||||
|
||||
"""
|
||||
logger.info()
|
||||
logger.log()
|
||||
|
||||
if self.result.get('image_season_%s' % season):
|
||||
return self.result['image_season_%s' % season]
|
||||
@@ -931,7 +931,7 @@ class Tvdb(object):
|
||||
@return: dictionary with actors
|
||||
@rtype: dict
|
||||
"""
|
||||
logger.info()
|
||||
logger.log()
|
||||
|
||||
url = HOST + "/series/%s/actors" % _id
|
||||
DEFAULT_HEADERS["Accept-Language"] = lang
|
||||
@@ -961,7 +961,7 @@ class Tvdb(object):
|
||||
@rtype: list
|
||||
@return: list of results
|
||||
"""
|
||||
logger.info()
|
||||
logger.log()
|
||||
list_results = []
|
||||
|
||||
# if we have a result and it has seriesName, we already have the info of the series, it is not necessary to search again
|
||||
@@ -1102,7 +1102,7 @@ class Tvdb(object):
|
||||
# ret_infoLabels['title'] = v + " " + origen.get('aliases', [''])[0]
|
||||
# else:
|
||||
# ret_infoLabels['title'] = v
|
||||
# logger.info("el titulo es %s " % ret_infoLabels['title'])
|
||||
# logger.log("el titulo es %s " % ret_infoLabels['title'])
|
||||
ret_infoLabels['title'] = v
|
||||
|
||||
elif k == 'cast':
|
||||
|
||||
@@ -78,7 +78,7 @@ def save_movie(item, silent=False):
|
||||
@rtype fallidos: int
|
||||
@return: the number of failed items or -1 if all failed
|
||||
"""
|
||||
logger.info()
|
||||
logger.log()
|
||||
# logger.debug(item.tostring('\n'))
|
||||
insertados = 0
|
||||
sobreescritos = 0
|
||||
@@ -144,7 +144,7 @@ def save_movie(item, silent=False):
|
||||
if not path:
|
||||
# Create folder
|
||||
path = filetools.join(MOVIES_PATH, ("%s [%s]" % (base_name, _id)).strip())
|
||||
logger.info("Creating movie directory:" + path)
|
||||
logger.log("Creating movie directory:" + path)
|
||||
if not filetools.mkdir(path):
|
||||
logger.debug("Could not create directory")
|
||||
return 0, 0, -1, path
|
||||
@@ -159,7 +159,7 @@ def save_movie(item, silent=False):
|
||||
|
||||
if not nfo_exists:
|
||||
# We create .nfo if it doesn't exist
|
||||
logger.info("Creating .nfo: " + nfo_path)
|
||||
logger.log("Creating .nfo: " + nfo_path)
|
||||
head_nfo = scraper.get_nfo(item)
|
||||
|
||||
item_nfo = Item(title=item.contentTitle, channel="videolibrary", action='findvideos',
|
||||
@@ -182,7 +182,7 @@ def save_movie(item, silent=False):
|
||||
if item_nfo and strm_exists:
|
||||
|
||||
if json_exists:
|
||||
logger.info("The file exists. Is overwritten")
|
||||
logger.log("The file exists. Is overwritten")
|
||||
sobreescritos += 1
|
||||
else:
|
||||
insertados += 1
|
||||
@@ -209,7 +209,7 @@ def save_movie(item, silent=False):
|
||||
item_nfo.library_urls[item.channel] = item.url
|
||||
|
||||
if filetools.write(nfo_path, head_nfo + item_nfo.tojson()):
|
||||
#logger.info("FOLDER_MOVIES : %s" % FOLDER_MOVIES)
|
||||
#logger.log("FOLDER_MOVIES : %s" % FOLDER_MOVIES)
|
||||
# We update the Kodi video library with the movie
|
||||
if config.is_xbmc() and config.get_setting("videolibrary_kodi") and not silent:
|
||||
from platformcode import xbmc_videolibrary
|
||||
@@ -238,7 +238,7 @@ def update_renumber_options(item, head_nfo, path):
|
||||
json = json_file['TVSHOW_AUTORENUMBER']
|
||||
if item.fulltitle in json:
|
||||
item.channel_prefs[channel]['TVSHOW_AUTORENUMBER'] = json[item.fulltitle]
|
||||
logger.info('UPDATED=\n' + str(item.channel_prefs))
|
||||
logger.log('UPDATED=\n' + str(item.channel_prefs))
|
||||
filetools.write(tvshow_path, head_nfo + item.tojson())
|
||||
|
||||
def add_renumber_options(item, head_nfo, path):
|
||||
@@ -426,7 +426,7 @@ def save_tvshow(item, episodelist, silent=False):
|
||||
@rtype path: str
|
||||
@return: serial directory
|
||||
"""
|
||||
logger.info()
|
||||
logger.log()
|
||||
# logger.debug(item.tostring('\n'))
|
||||
path = ""
|
||||
|
||||
@@ -483,7 +483,7 @@ def save_tvshow(item, episodelist, silent=False):
|
||||
|
||||
if not path:
|
||||
path = filetools.join(TVSHOWS_PATH, ("%s [%s]" % (base_name, _id)).strip())
|
||||
logger.info("Creating series directory: " + path)
|
||||
logger.log("Creating series directory: " + path)
|
||||
try:
|
||||
filetools.mkdir(path)
|
||||
except OSError as exception:
|
||||
@@ -493,7 +493,7 @@ def save_tvshow(item, episodelist, silent=False):
|
||||
tvshow_path = filetools.join(path, "tvshow.nfo")
|
||||
if not filetools.exists(tvshow_path):
|
||||
# We create tvshow.nfo, if it does not exist, with the head_nfo, series info and watched episode marks
|
||||
logger.info("Creating tvshow.nfo: " + tvshow_path)
|
||||
logger.log("Creating tvshow.nfo: " + tvshow_path)
|
||||
head_nfo = scraper.get_nfo(item)
|
||||
item.infoLabels['mediatype'] = "tvshow"
|
||||
item.infoLabels['title'] = item.contentSerieName
|
||||
@@ -567,11 +567,11 @@ def save_episodes(path, episodelist, serie, silent=False, overwrite=True):
|
||||
@rtype fallidos: int
|
||||
@return: the number of failed episodes
|
||||
"""
|
||||
logger.info()
|
||||
logger.log()
|
||||
episodelist = filter_list(episodelist, serie.action, path)
|
||||
# No episode list, nothing to save
|
||||
if not len(episodelist):
|
||||
logger.info("There is no episode list, we go out without creating strm")
|
||||
logger.log("There is no episode list, we go out without creating strm")
|
||||
return 0, 0, 0
|
||||
|
||||
# process local episodes
|
||||
@@ -586,7 +586,7 @@ def save_episodes(path, episodelist, serie, silent=False, overwrite=True):
|
||||
elif config.get_setting("local_episodes", "videolibrary"):
|
||||
done, local_episodes_path = config_local_episodes_path(path, serie)
|
||||
if done < 0:
|
||||
logger.info("An issue has occurred while configuring local episodes, going out without creating strm")
|
||||
logger.log("An issue has occurred while configuring local episodes, going out without creating strm")
|
||||
return 0, 0, done
|
||||
item_nfo.local_episodes_path = local_episodes_path
|
||||
filetools.write(nfo_path, head_nfo + item_nfo.tojson())
|
||||
@@ -710,7 +710,7 @@ def save_episodes(path, episodelist, serie, silent=False, overwrite=True):
|
||||
|
||||
# No episode list, nothing to save
|
||||
if not len(new_episodelist):
|
||||
logger.info("There is no episode list, we go out without creating strm")
|
||||
logger.log("There is no episode list, we go out without creating strm")
|
||||
return 0, 0, 0
|
||||
|
||||
local_episodelist += get_local_content(path)
|
||||
@@ -742,12 +742,12 @@ def save_episodes(path, episodelist, serie, silent=False, overwrite=True):
|
||||
json_path = filetools.join(path, ("%s [%s].json" % (season_episode, e.channel)).lower())
|
||||
|
||||
if season_episode in local_episodelist:
|
||||
logger.info('Skipped: Serie ' + serie.contentSerieName + ' ' + season_episode + ' available as local content')
|
||||
logger.log('Skipped: Serie ' + serie.contentSerieName + ' ' + season_episode + ' available as local content')
|
||||
continue
|
||||
|
||||
# check if the episode has been downloaded
|
||||
if filetools.join(path, "%s [downloads].json" % season_episode) in ficheros:
|
||||
logger.info('INFO: "%s" episode %s has been downloaded, skipping it' % (serie.contentSerieName, season_episode))
|
||||
logger.log('INFO: "%s" episode %s has been downloaded, skipping it' % (serie.contentSerieName, season_episode))
|
||||
continue
|
||||
|
||||
strm_exists = strm_path in ficheros
|
||||
@@ -800,7 +800,7 @@ def save_episodes(path, episodelist, serie, silent=False, overwrite=True):
|
||||
|
||||
if filetools.write(json_path, e.tojson()):
|
||||
if not json_exists:
|
||||
logger.info("Inserted: %s" % json_path)
|
||||
logger.log("Inserted: %s" % json_path)
|
||||
insertados += 1
|
||||
# We mark episode as unseen
|
||||
news_in_playcounts[season_episode] = 0
|
||||
@@ -811,14 +811,14 @@ def save_episodes(path, episodelist, serie, silent=False, overwrite=True):
|
||||
news_in_playcounts[serie.contentSerieName] = 0
|
||||
|
||||
else:
|
||||
logger.info("Overwritten: %s" % json_path)
|
||||
logger.log("Overwritten: %s" % json_path)
|
||||
sobreescritos += 1
|
||||
else:
|
||||
logger.info("Failed: %s" % json_path)
|
||||
logger.log("Failed: %s" % json_path)
|
||||
fallidos += 1
|
||||
|
||||
else:
|
||||
logger.info("Failed: %s" % json_path)
|
||||
logger.log("Failed: %s" % json_path)
|
||||
fallidos += 1
|
||||
|
||||
if not silent and p_dialog.iscanceled():
|
||||
@@ -888,7 +888,7 @@ def save_episodes(path, episodelist, serie, silent=False, overwrite=True):
|
||||
|
||||
|
||||
def config_local_episodes_path(path, item, silent=False):
|
||||
logger.info(item)
|
||||
logger.log(item)
|
||||
from platformcode.xbmc_videolibrary import search_local_path
|
||||
local_episodes_path=search_local_path(item)
|
||||
if not local_episodes_path:
|
||||
@@ -900,11 +900,11 @@ def config_local_episodes_path(path, item, silent=False):
|
||||
platformtools.dialog_ok(config.get_localized_string(30131), config.get_localized_string(80043))
|
||||
local_episodes_path = platformtools.dialog_browse(0, config.get_localized_string(80046))
|
||||
if local_episodes_path == '':
|
||||
logger.info("User has canceled the dialog")
|
||||
logger.log("User has canceled the dialog")
|
||||
return -2, local_episodes_path
|
||||
elif path in local_episodes_path:
|
||||
platformtools.dialog_ok(config.get_localized_string(30131), config.get_localized_string(80045))
|
||||
logger.info("Selected folder is the same of the TV show one")
|
||||
logger.log("Selected folder is the same of the TV show one")
|
||||
return -2, local_episodes_path
|
||||
|
||||
if local_episodes_path:
|
||||
@@ -919,7 +919,7 @@ def config_local_episodes_path(path, item, silent=False):
|
||||
|
||||
|
||||
def process_local_episodes(local_episodes_path, path):
|
||||
logger.info()
|
||||
logger.log()
|
||||
|
||||
sub_extensions = ['.srt', '.sub', '.sbv', '.ass', '.idx', '.ssa', '.smi']
|
||||
artwork_extensions = ['.jpg', '.jpeg', '.png']
|
||||
@@ -958,7 +958,7 @@ def process_local_episodes(local_episodes_path, path):
|
||||
|
||||
|
||||
def get_local_content(path):
|
||||
logger.info()
|
||||
logger.log()
|
||||
|
||||
local_episodelist = []
|
||||
for root, folders, files in filetools.walk(path):
|
||||
@@ -987,7 +987,7 @@ def add_movie(item):
|
||||
@type item: item
|
||||
@param item: item to be saved.
|
||||
"""
|
||||
logger.info()
|
||||
logger.log()
|
||||
from platformcode.launcher import set_search_temp; set_search_temp(item)
|
||||
|
||||
# To disambiguate titles, TMDB is caused to ask for the really desired title
|
||||
@@ -1034,7 +1034,7 @@ def add_tvshow(item, channel=None):
|
||||
@param channel: channel from which the series will be saved. By default, item.from_channel or item.channel will be imported.
|
||||
|
||||
"""
|
||||
logger.info("show=#" + item.show + "#")
|
||||
logger.log("show=#" + item.show + "#")
|
||||
from platformcode.launcher import set_search_temp; set_search_temp(item)
|
||||
|
||||
if item.channel == "downloads":
|
||||
@@ -1105,7 +1105,7 @@ def add_tvshow(item, channel=None):
|
||||
|
||||
else:
|
||||
platformtools.dialog_ok(config.get_localized_string(30131), config.get_localized_string(60070) % item.show)
|
||||
logger.info("%s episodes of series %s have been added to the video library" % (insertados, item.show))
|
||||
logger.log("%s episodes of series %s have been added to the video library" % (insertados, item.show))
|
||||
if config.is_xbmc():
|
||||
if config.get_setting("sync_trakt_new_tvshow", "videolibrary"):
|
||||
import xbmc
|
||||
@@ -1121,7 +1121,7 @@ def add_tvshow(item, channel=None):
|
||||
|
||||
|
||||
def emergency_urls(item, channel=None, path=None, headers={}):
|
||||
logger.info()
|
||||
logger.log()
|
||||
import re
|
||||
from servers import torrent
|
||||
try:
|
||||
|
||||
@@ -17,8 +17,8 @@ from core import filetools
|
||||
|
||||
class ziptools(object):
|
||||
def extract(self, file, dir, folder_to_extract="", overwrite_question=False, backup=False):
|
||||
logger.info("file= %s" % file)
|
||||
logger.info("dir= %s" % dir)
|
||||
logger.log("file= %s" % file)
|
||||
logger.log("dir= %s" % dir)
|
||||
|
||||
if not dir.endswith(':') and not filetools.exists(dir):
|
||||
filetools.mkdir(dir)
|
||||
@@ -30,13 +30,13 @@ class ziptools(object):
|
||||
|
||||
for nameo in zf.namelist():
|
||||
name = nameo.replace(':', '_').replace('<', '_').replace('>', '_').replace('|', '_').replace('"', '_').replace('?', '_').replace('*', '_')
|
||||
logger.info("name=%s" % nameo)
|
||||
logger.log("name=%s" % nameo)
|
||||
if not name.endswith('/'):
|
||||
logger.info("it's not a directory")
|
||||
logger.log("it's not a directory")
|
||||
try:
|
||||
(path, filename) = filetools.split(filetools.join(dir, name))
|
||||
logger.info("path=%s" % path)
|
||||
logger.info("name=%s" % name)
|
||||
logger.log("path=%s" % path)
|
||||
logger.log("name=%s" % name)
|
||||
if folder_to_extract:
|
||||
if path != filetools.join(dir, folder_to_extract):
|
||||
break
|
||||
@@ -49,7 +49,7 @@ class ziptools(object):
|
||||
|
||||
else:
|
||||
outfilename = filetools.join(dir, name)
|
||||
logger.info("outfilename=%s" % outfilename)
|
||||
logger.log("outfilename=%s" % outfilename)
|
||||
try:
|
||||
if filetools.exists(outfilename) and overwrite_question:
|
||||
from platformcode import platformtools
|
||||
@@ -74,7 +74,7 @@ class ziptools(object):
|
||||
try:
|
||||
zf.close()
|
||||
except:
|
||||
logger.info("Error closing .zip " + file)
|
||||
logger.log("Error closing .zip " + file)
|
||||
|
||||
def _createstructure(self, file, dir):
|
||||
self._makedirs(self._listdirs(file), dir)
|
||||
|
||||
@@ -9,7 +9,7 @@ import sys
|
||||
import xbmc
|
||||
from platformcode import config, logger
|
||||
|
||||
logger.info("init...")
|
||||
logger.log("init...")
|
||||
|
||||
librerias = xbmc.translatePath(os.path.join(config.get_runtime_path(), 'lib'))
|
||||
sys.path.insert(0, librerias)
|
||||
|
||||
@@ -27,7 +27,7 @@ class ChromeOSImage:
|
||||
"""
|
||||
|
||||
def __init__(self, imgpath):
|
||||
logger.info('Image Path: ' + imgpath)
|
||||
logger.log('Image Path: ' + imgpath)
|
||||
"""Prepares the image"""
|
||||
self.imgpath = imgpath
|
||||
self.bstream = self.get_bstream(imgpath)
|
||||
@@ -59,7 +59,7 @@ class ChromeOSImage:
|
||||
self.seek_stream(entries_start * lba_size)
|
||||
|
||||
if not calcsize(part_format) == entry_size:
|
||||
logger.info('Partition table entries are not 128 bytes long')
|
||||
logger.log('Partition table entries are not 128 bytes long')
|
||||
return 0
|
||||
|
||||
for index in range(1, entries_num + 1): # pylint: disable=unused-variable
|
||||
@@ -71,7 +71,7 @@ class ChromeOSImage:
|
||||
break
|
||||
|
||||
if not offset:
|
||||
logger.info('Failed to calculate losetup offset.')
|
||||
logger.log('Failed to calculate losetup offset.')
|
||||
return 0
|
||||
|
||||
return offset
|
||||
@@ -93,7 +93,7 @@ class ChromeOSImage:
|
||||
while True:
|
||||
chunk2 = self.read_stream(chunksize)
|
||||
if not chunk2:
|
||||
logger.info('File %s not found in the ChromeOS image' % filename)
|
||||
logger.log('File %s not found in the ChromeOS image' % filename)
|
||||
return False
|
||||
|
||||
chunk = chunk1 + chunk2
|
||||
|
||||
@@ -25,7 +25,7 @@ intervenido_sucuri = 'Access Denied - Sucuri Website Firewall'
|
||||
|
||||
|
||||
def update_title(item):
|
||||
logger.info()
|
||||
logger.log()
|
||||
from core import scraper,support
|
||||
|
||||
|
||||
@@ -41,7 +41,7 @@ def update_title(item):
|
||||
The channel must add a method to be able to receive the call from Kodi / Alfa, and be able to call this method:
|
||||
|
||||
def actualizar_titulos(item):
|
||||
logger.info()
|
||||
logger.log()
|
||||
itemlist = []
|
||||
from lib import generictools
|
||||
from platformcode import launcher
|
||||
@@ -205,7 +205,7 @@ def update_title(item):
|
||||
|
||||
|
||||
def refresh_screen(item):
|
||||
logger.info()
|
||||
logger.log()
|
||||
|
||||
"""
|
||||
#### Kodi 18 compatibility ####
|
||||
@@ -239,7 +239,7 @@ def refresh_screen(item):
|
||||
|
||||
|
||||
def post_tmdb_listado(item, itemlist):
|
||||
logger.info()
|
||||
logger.log()
|
||||
itemlist_fo = []
|
||||
|
||||
"""
|
||||
@@ -484,7 +484,7 @@ def post_tmdb_listado(item, itemlist):
|
||||
|
||||
|
||||
def post_tmdb_seasons(item, itemlist):
|
||||
logger.info()
|
||||
logger.log()
|
||||
|
||||
"""
|
||||
|
||||
@@ -644,7 +644,7 @@ def post_tmdb_seasons(item, itemlist):
|
||||
|
||||
|
||||
def post_tmdb_episodios(item, itemlist):
|
||||
logger.info()
|
||||
logger.log()
|
||||
itemlist_fo = []
|
||||
|
||||
"""
|
||||
@@ -995,7 +995,7 @@ def post_tmdb_episodios(item, itemlist):
|
||||
|
||||
|
||||
def post_tmdb_findvideos(item, itemlist):
|
||||
logger.info()
|
||||
logger.log()
|
||||
|
||||
"""
|
||||
|
||||
@@ -1215,7 +1215,7 @@ def post_tmdb_findvideos(item, itemlist):
|
||||
|
||||
|
||||
def get_field_from_kodi_DB(item, from_fields='*', files='file'):
|
||||
logger.info()
|
||||
logger.log()
|
||||
"""
|
||||
|
||||
Call to read from the Kodi DB the input fields received (from_fields, by default "*") of the video indicated in Item
|
||||
@@ -1293,7 +1293,7 @@ def get_field_from_kodi_DB(item, from_fields='*', files='file'):
|
||||
|
||||
|
||||
def fail_over_newpct1(item, patron, patron2=None, timeout=None):
|
||||
logger.info()
|
||||
logger.log()
|
||||
import ast
|
||||
|
||||
"""
|
||||
@@ -1494,7 +1494,7 @@ def fail_over_newpct1(item, patron, patron2=None, timeout=None):
|
||||
|
||||
|
||||
def web_intervenida(item, data, desactivar=True):
|
||||
logger.info()
|
||||
logger.log()
|
||||
|
||||
"""
|
||||
|
||||
@@ -1577,7 +1577,7 @@ def web_intervenida(item, data, desactivar=True):
|
||||
|
||||
|
||||
def regenerate_clones():
|
||||
logger.info()
|
||||
logger.log()
|
||||
import json
|
||||
from core import videolibrarytools
|
||||
|
||||
@@ -1591,7 +1591,7 @@ def regenerate_clones():
|
||||
# Find the paths where to leave the control .json file, and the Video Library
|
||||
json_path = filetools.exists(filetools.join(config.get_runtime_path(), 'verify_cached_torrents.json'))
|
||||
if json_path:
|
||||
logger.info('Previously repaired video library: WE ARE GOING')
|
||||
logger.log('Previously repaired video library: WE ARE GOING')
|
||||
return False
|
||||
json_path = filetools.join(config.get_runtime_path(), 'verify_cached_torrents.json')
|
||||
filetools.write(json_path, json.dumps({"CINE_verify": True})) # Prevents another simultaneous process from being launched
|
||||
@@ -1631,7 +1631,7 @@ def regenerate_clones():
|
||||
|
||||
# Delete the Tvshow.nfo files and check if the .nfo has more than one channel and one is clone Newpct1
|
||||
for file in files:
|
||||
# logger.info('file - nfos: ' + file)
|
||||
# logger.log('file - nfos: ' + file)
|
||||
if 'tvshow.nfo' in file:
|
||||
file_path = filetools.join(root, 'tvshow.nfo')
|
||||
filetools.remove(file_path)
|
||||
@@ -1697,7 +1697,7 @@ def regenerate_clones():
|
||||
for file in files:
|
||||
file_path = filetools.join(root, file)
|
||||
if '.json' in file:
|
||||
logger.info('** file: ' + file)
|
||||
logger.log('** file: ' + file)
|
||||
canal_json = scrapertools.find_single_match(file, r'\[(\w+)\].json')
|
||||
if canal_json not in nfo.library_urls:
|
||||
filetools.remove(file_path) # we delete the .json is a zombie
|
||||
@@ -1740,7 +1740,7 @@ def regenerate_clones():
|
||||
|
||||
|
||||
def dejuice(data):
|
||||
logger.info()
|
||||
logger.log()
|
||||
# Method to unobtrusive JuicyCodes data
|
||||
|
||||
import base64
|
||||
|
||||
@@ -45,7 +45,7 @@ class Client(object):
|
||||
t= Thread(target=self._auto_shutdown)
|
||||
t.setDaemon(True)
|
||||
t.start()
|
||||
logger.info("MEGA Server Started")
|
||||
logger.log("MEGA Server Started")
|
||||
|
||||
def _auto_shutdown(self):
|
||||
while self.running:
|
||||
@@ -75,7 +75,7 @@ class Client(object):
|
||||
def stop(self):
|
||||
self.running = False
|
||||
self._server.stop()
|
||||
logger.info("MEGA Server Stopped")
|
||||
logger.log("MEGA Server Stopped")
|
||||
|
||||
def get_play_list(self):
|
||||
if len(self.files) > 1:
|
||||
@@ -103,7 +103,7 @@ class Client(object):
|
||||
return files
|
||||
|
||||
except:
|
||||
logger.info(traceback.format_exc())
|
||||
logger.log(traceback.format_exc())
|
||||
pass
|
||||
|
||||
return files
|
||||
|
||||
@@ -14,7 +14,7 @@ remote = None
|
||||
|
||||
|
||||
def parse_url(url):
|
||||
# logger.info("Url: %s" % url)
|
||||
# logger.log("Url: %s" % url)
|
||||
url = url.strip()
|
||||
patron = "^smb://(?:([^;\n]+);)?(?:([^:@\n]+)[:|@])?(?:([^@\n]+)@)?([^/]+)/([^/\n]+)([/]?.*?)$"
|
||||
domain, user, password, server_name, share_name, path = re.compile(patron, re.DOTALL).match(url).groups()
|
||||
@@ -27,7 +27,7 @@ def parse_url(url):
|
||||
if path.endswith("/"): path = path[:-1]
|
||||
if not path: path = "/"
|
||||
|
||||
# logger.info("Dominio: '%s' |Usuario: '%s' | Password: '%s' | Servidor: '%s' | IP: '%s' | Share Name: '%s' | Path: '%s'" % (domain, user, password, server_name, server_ip, share_name, path))
|
||||
# logger.log("Dominio: '%s' |Usuario: '%s' | Password: '%s' | Servidor: '%s' | IP: '%s' | Share Name: '%s' | Path: '%s'" % (domain, user, password, server_name, server_ip, share_name, path))
|
||||
return server_name, server_ip, share_name, unicode(path, "utf8"), user, password, domain
|
||||
|
||||
|
||||
@@ -46,7 +46,7 @@ def get_server_name_ip(server):
|
||||
|
||||
|
||||
def connect(url):
|
||||
# logger.info("Url: %s" % url)
|
||||
# logger.log("Url: %s" % url)
|
||||
global remote
|
||||
server_name, server_ip, share_name, path, user, password, domain = parse_url(url)
|
||||
|
||||
@@ -63,7 +63,7 @@ def connect(url):
|
||||
|
||||
|
||||
def listdir(url):
|
||||
logger.info("Url: %s" % url)
|
||||
logger.log("Url: %s" % url)
|
||||
remote, share_name, path = connect(url)
|
||||
try:
|
||||
files = [f.filename for f in remote.listPath(share_name, path) if not f.filename in [".", ".."]]
|
||||
@@ -73,7 +73,7 @@ def listdir(url):
|
||||
|
||||
|
||||
def walk(url, topdown=True, onerror=None):
|
||||
logger.info("Url: %s" % url)
|
||||
logger.log("Url: %s" % url)
|
||||
remote, share_name, path = connect(url)
|
||||
|
||||
try:
|
||||
@@ -103,7 +103,7 @@ def walk(url, topdown=True, onerror=None):
|
||||
|
||||
|
||||
def get_attributes(url):
|
||||
logger.info("Url: %s" % url)
|
||||
logger.log("Url: %s" % url)
|
||||
remote, share_name, path = connect(url)
|
||||
try:
|
||||
return remote.getAttributes(share_name, path)
|
||||
@@ -112,7 +112,7 @@ def get_attributes(url):
|
||||
|
||||
|
||||
def mkdir(url):
|
||||
logger.info("Url: %s" % url)
|
||||
logger.log("Url: %s" % url)
|
||||
remote, share_name, path = connect(url)
|
||||
try:
|
||||
remote.createDirectory(share_name, path)
|
||||
@@ -121,12 +121,12 @@ def mkdir(url):
|
||||
|
||||
|
||||
def smb_open(url, mode):
|
||||
logger.info("Url: %s" % url)
|
||||
logger.log("Url: %s" % url)
|
||||
return SMBFile(url, mode)
|
||||
|
||||
|
||||
def isfile(url):
|
||||
logger.info("Url: %s" % url)
|
||||
logger.log("Url: %s" % url)
|
||||
remote, share_name, path = connect(url)
|
||||
try:
|
||||
files = [f.filename for f in remote.listPath(share_name, os.path.dirname(path)) if not f.isDirectory]
|
||||
@@ -136,7 +136,7 @@ def isfile(url):
|
||||
|
||||
|
||||
def isdir(url):
|
||||
logger.info("Url: %s" % url)
|
||||
logger.log("Url: %s" % url)
|
||||
remote, share_name, path = connect(url)
|
||||
try:
|
||||
folders = [f.filename for f in remote.listPath(share_name, os.path.dirname(path)) if f.isDirectory]
|
||||
@@ -146,7 +146,7 @@ def isdir(url):
|
||||
|
||||
|
||||
def exists(url):
|
||||
logger.info("Url: %s" % url)
|
||||
logger.log("Url: %s" % url)
|
||||
remote, share_name, path = connect(url)
|
||||
try:
|
||||
files = [f.filename for f in remote.listPath(share_name, os.path.dirname(path))]
|
||||
@@ -156,7 +156,7 @@ def exists(url):
|
||||
|
||||
|
||||
def remove(url):
|
||||
logger.info("Url: %s" % url)
|
||||
logger.log("Url: %s" % url)
|
||||
remote, share_name, path = connect(url)
|
||||
try:
|
||||
remote.deleteFiles(share_name, path)
|
||||
@@ -165,7 +165,7 @@ def remove(url):
|
||||
|
||||
|
||||
def rmdir(url):
|
||||
logger.info("Url: %s" % url)
|
||||
logger.log("Url: %s" % url)
|
||||
remote, share_name, path = connect(url)
|
||||
try:
|
||||
remote.deleteDirectory(share_name, path)
|
||||
@@ -174,7 +174,7 @@ def rmdir(url):
|
||||
|
||||
|
||||
def rename(url, new_name):
|
||||
logger.info("Url: %s" % url)
|
||||
logger.log("Url: %s" % url)
|
||||
remote, share_name, path = connect(url)
|
||||
_, _, _, new_name, _, _, _ = parse_url(new_name)
|
||||
try:
|
||||
|
||||
@@ -1,16 +1,15 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
try:
|
||||
from urllib.parse import urlsplit, urlparse, parse_qs, urljoin
|
||||
except:
|
||||
import os, re, sys, json, time
|
||||
|
||||
if sys.version_info[0] >= 3:
|
||||
from urllib.parse import urlsplit, urlparse, parse_qs, urljoin, urlencode
|
||||
from urllib.request import urlopen
|
||||
else:
|
||||
from urllib import urlencode, urlopen
|
||||
from urlparse import urlsplit, urlparse, parse_qs, urljoin
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
import urllib
|
||||
from base64 import b64decode
|
||||
|
||||
from core import httptools, scrapertools
|
||||
@@ -61,17 +60,13 @@ class UnshortenIt(object):
|
||||
return uri, "No domain found in URI!"
|
||||
had_google_outbound, uri = self._clear_google_outbound_proxy(uri)
|
||||
|
||||
if re.search(self._adfly_regex, domain,
|
||||
re.IGNORECASE) or type == 'adfly':
|
||||
if re.search(self._adfly_regex, domain, re.IGNORECASE) or type == 'adfly':
|
||||
uri, code = self._unshorten_adfly(uri)
|
||||
if re.search(self._adfocus_regex, domain,
|
||||
re.IGNORECASE) or type == 'adfocus':
|
||||
if re.search(self._adfocus_regex, domain, re.IGNORECASE) or type == 'adfocus':
|
||||
uri, code = self._unshorten_adfocus(uri)
|
||||
if re.search(self._linkbucks_regex, domain,
|
||||
re.IGNORECASE) or type == 'linkbucks':
|
||||
if re.search(self._linkbucks_regex, domain, re.IGNORECASE) or type == 'linkbucks':
|
||||
uri, code = self._unshorten_linkbucks(uri)
|
||||
if re.search(self._lnxlu_regex, domain,
|
||||
re.IGNORECASE) or type == 'lnxlu':
|
||||
if re.search(self._lnxlu_regex, domain, re.IGNORECASE) or type == 'lnxlu':
|
||||
uri, code = self._unshorten_lnxlu(uri)
|
||||
if re.search(self._shrink_service_regex, domain, re.IGNORECASE):
|
||||
uri, code = self._unshorten_shrink_service(uri)
|
||||
@@ -99,7 +94,7 @@ class UnshortenIt(object):
|
||||
if oldUri == uri:
|
||||
break
|
||||
|
||||
logger.info(uri)
|
||||
logger.log(uri)
|
||||
|
||||
return uri, code
|
||||
|
||||
@@ -368,7 +363,7 @@ class UnshortenIt(object):
|
||||
if len(code) > 0:
|
||||
payload = {'click': code[0]}
|
||||
r = httptools.downloadpage(
|
||||
'http://lnx.lu?' + urllib.urlencode(payload),
|
||||
'http://lnx.lu?' + urlencode(payload),
|
||||
timeout=self._timeout)
|
||||
return r.url, r.code
|
||||
else:
|
||||
@@ -400,7 +395,7 @@ class UnshortenIt(object):
|
||||
payload = {'adSessionId': session_id, 'callback': 'c'}
|
||||
r = httptools.downloadpage(
|
||||
'http://sh.st/shortest-url/end-adsession?' +
|
||||
urllib.urlencode(payload),
|
||||
urlencode(payload),
|
||||
headers=http_header,
|
||||
timeout=self._timeout)
|
||||
response = r.data[6:-2].decode('utf-8')
|
||||
@@ -519,7 +514,7 @@ class UnshortenIt(object):
|
||||
else:
|
||||
if 'sb/' in uri or 'akv/' in uri or 'wss/' in uri or 'wsd/' in uri:
|
||||
import datetime, hashlib
|
||||
ip = urllib.urlopen('https://api.ipify.org/').read()
|
||||
ip = urlopen('https://api.ipify.org/').read()
|
||||
day = datetime.date.today().strftime('%Y%m%d')
|
||||
headers = {
|
||||
"Cookie": hashlib.md5(ip+day).hexdigest() + "=1"
|
||||
@@ -531,12 +526,12 @@ class UnshortenIt(object):
|
||||
r = httptools.downloadpage(uri, timeout=self._timeout, headers=headers, follow_redirects=False)
|
||||
if 'Wait 1 hour' in r.data:
|
||||
uri = ''
|
||||
logger.info('IP bannato da vcrypt, aspetta un ora')
|
||||
logger.log('IP bannato da vcrypt, aspetta un ora')
|
||||
else:
|
||||
prev_uri = uri
|
||||
uri = r.headers['location']
|
||||
if uri == prev_uri:
|
||||
logger.info('Use Cloudscraper')
|
||||
logger.log('Use Cloudscraper')
|
||||
uri = httptools.downloadpage(uri, timeout=self._timeout, headers=headers, follow_redirects=False, cf=True).headers['location']
|
||||
|
||||
if "4snip" in uri:
|
||||
@@ -593,7 +588,7 @@ class UnshortenIt(object):
|
||||
r = httptools.downloadpage(uri, follow_redirect=True, timeout=self._timeout, cookies=False)
|
||||
if 'get/' in r.url:
|
||||
uri = 'https://linkhub.icu/view/' + re.search('\.\./view/([^"]+)', r.data).group(1)
|
||||
logger.info(uri)
|
||||
logger.log(uri)
|
||||
r = httptools.downloadpage(uri, follow_redirect=True, timeout=self._timeout, cookies=False)
|
||||
uri = re.search('<div id="text-url".*\n\s+<a href="([^"]+)', r.data).group(0)
|
||||
return uri, r.code
|
||||
@@ -641,7 +636,7 @@ class UnshortenIt(object):
|
||||
try:
|
||||
id = uri.split('/')[-2]
|
||||
reqUrl = 'https://stayonline.pro/ajax/linkView.php'
|
||||
p = urllib.urlencode({"id": id})
|
||||
p = urlencode({"id": id})
|
||||
r = httptools.downloadpage(reqUrl, post=p)
|
||||
data = r.data
|
||||
try:
|
||||
@@ -683,7 +678,7 @@ def findlinks(text):
|
||||
regex = '(?:https?://(?:[\w\d]+\.)?)?(?:' + regex + ')/[a-zA-Z0-9_=/]+'
|
||||
for match in re.findall(regex, text):
|
||||
matches.append(match)
|
||||
logger.info('matches=' + str(matches))
|
||||
logger.log('matches=' + str(matches))
|
||||
if len(matches) == 1:
|
||||
text += '\n' + unshorten(matches[0])[0]
|
||||
elif matches:
|
||||
|
||||
@@ -214,23 +214,23 @@ def get_setting(name, channel="", server="", default=None):
|
||||
|
||||
# Specific channel setting
|
||||
if channel:
|
||||
# logger.info("get_setting reading channel setting '"+name+"' from channel json")
|
||||
# logger.log("get_setting reading channel setting '"+name+"' from channel json")
|
||||
from core import channeltools
|
||||
value = channeltools.get_channel_setting(name, channel, default)
|
||||
# logger.info("get_setting -> '"+repr(value)+"'")
|
||||
# logger.log("get_setting -> '"+repr(value)+"'")
|
||||
return value
|
||||
|
||||
# Specific server setting
|
||||
elif server:
|
||||
# logger.info("get_setting reading server setting '"+name+"' from server json")
|
||||
# logger.log("get_setting reading server setting '"+name+"' from server json")
|
||||
from core import servertools
|
||||
value = servertools.get_server_setting(name, server, default)
|
||||
# logger.info("get_setting -> '"+repr(value)+"'")
|
||||
# logger.log("get_setting -> '"+repr(value)+"'")
|
||||
return value
|
||||
|
||||
# Global setting
|
||||
else:
|
||||
# logger.info("get_setting reading main setting '"+name+"'")
|
||||
# logger.log("get_setting reading main setting '"+name+"'")
|
||||
value = __settings__.getSetting(name)
|
||||
if not value:
|
||||
return default
|
||||
|
||||
@@ -22,17 +22,17 @@ from platformcode import config, logger
|
||||
# Download a file and start playing while downloading
|
||||
def download_and_play(url, file_name, download_path):
|
||||
# Start thread
|
||||
logger.info("Active threads " + str(threading.active_count()))
|
||||
logger.info("" + repr(threading.enumerate()))
|
||||
logger.info("Starting download thread...")
|
||||
logger.log("Active threads " + str(threading.active_count()))
|
||||
logger.log("" + repr(threading.enumerate()))
|
||||
logger.log("Starting download thread...")
|
||||
download_thread = DownloadThread(url, file_name, download_path)
|
||||
download_thread.start()
|
||||
logger.info("Download thread started")
|
||||
logger.info("Active threads " + str(threading.active_count()))
|
||||
logger.info("" + repr(threading.enumerate()))
|
||||
logger.log("Download thread started")
|
||||
logger.log("Active threads " + str(threading.active_count()))
|
||||
logger.log("" + repr(threading.enumerate()))
|
||||
|
||||
# Wait
|
||||
logger.info("Waiting...")
|
||||
logger.log("Waiting...")
|
||||
|
||||
while True:
|
||||
cancelled = False
|
||||
@@ -53,7 +53,7 @@ def download_and_play(url, file_name, download_path):
|
||||
|
||||
dialog.close()
|
||||
|
||||
logger.info("End of waiting")
|
||||
logger.log("End of waiting")
|
||||
|
||||
# Launch the player
|
||||
player = CustomPlayer()
|
||||
@@ -61,66 +61,66 @@ def download_and_play(url, file_name, download_path):
|
||||
player.PlayStream(download_thread.get_file_name())
|
||||
|
||||
# End of playback
|
||||
logger.info("End of playback")
|
||||
logger.log("End of playback")
|
||||
|
||||
if player.is_stopped():
|
||||
logger.info("Terminated by user")
|
||||
logger.log("Terminated by user")
|
||||
break
|
||||
else:
|
||||
if not download_thread.isAlive():
|
||||
logger.info("Download has finished")
|
||||
logger.log("Download has finished")
|
||||
break
|
||||
else:
|
||||
logger.info("Continua la descarga")
|
||||
logger.log("Continua la descarga")
|
||||
|
||||
# When the player finishes, if you continue downloading it for now
|
||||
logger.info("Download thread alive=" + str(download_thread.isAlive()))
|
||||
logger.log("Download thread alive=" + str(download_thread.isAlive()))
|
||||
if download_thread.isAlive():
|
||||
logger.info("Killing download thread")
|
||||
logger.log("Killing download thread")
|
||||
download_thread.force_stop()
|
||||
|
||||
|
||||
class CustomPlayer(xbmc.Player):
|
||||
def __init__(self, *args, **kwargs):
|
||||
logger.info()
|
||||
logger.log()
|
||||
self.actualtime = 0
|
||||
self.totaltime = 0
|
||||
self.stopped = False
|
||||
xbmc.Player.__init__(self)
|
||||
|
||||
def PlayStream(self, url):
|
||||
logger.info("url=" + url)
|
||||
logger.log("url=" + url)
|
||||
self.play(url)
|
||||
self.actualtime = 0
|
||||
self.url = url
|
||||
while self.isPlaying():
|
||||
self.actualtime = self.getTime()
|
||||
self.totaltime = self.getTotalTime()
|
||||
logger.info("actualtime=" + str(self.actualtime) + " totaltime=" + str(self.totaltime))
|
||||
logger.log("actualtime=" + str(self.actualtime) + " totaltime=" + str(self.totaltime))
|
||||
xbmc.sleep(3000)
|
||||
|
||||
def set_download_thread(self, download_thread):
|
||||
logger.info()
|
||||
logger.log()
|
||||
self.download_thread = download_thread
|
||||
|
||||
def force_stop_download_thread(self):
|
||||
logger.info()
|
||||
logger.log()
|
||||
|
||||
if self.download_thread.isAlive():
|
||||
logger.info("Killing download thread")
|
||||
logger.log("Killing download thread")
|
||||
self.download_thread.force_stop()
|
||||
|
||||
# while self.download_thread.isAlive():
|
||||
# xbmc.sleep(1000)
|
||||
|
||||
def onPlayBackStarted(self):
|
||||
logger.info("PLAYBACK STARTED")
|
||||
logger.log("PLAYBACK STARTED")
|
||||
|
||||
def onPlayBackEnded(self):
|
||||
logger.info("PLAYBACK ENDED")
|
||||
logger.log("PLAYBACK ENDED")
|
||||
|
||||
def onPlayBackStopped(self):
|
||||
logger.info("PLAYBACK STOPPED")
|
||||
logger.log("PLAYBACK STOPPED")
|
||||
self.stopped = True
|
||||
self.force_stop_download_thread()
|
||||
|
||||
@@ -131,7 +131,7 @@ class CustomPlayer(xbmc.Player):
|
||||
# Download in background
|
||||
class DownloadThread(threading.Thread):
|
||||
def __init__(self, url, file_name, download_path):
|
||||
# logger.info(repr(file))
|
||||
# logger.log(repr(file))
|
||||
self.url = url
|
||||
self.download_path = download_path
|
||||
self.file_name = os.path.join(download_path, file_name)
|
||||
@@ -148,16 +148,16 @@ class DownloadThread(threading.Thread):
|
||||
threading.Thread.__init__(self)
|
||||
|
||||
def run(self):
|
||||
logger.info("Download starts...")
|
||||
logger.log("Download starts...")
|
||||
|
||||
if "megacrypter.com" in self.url:
|
||||
self.download_file_megacrypter()
|
||||
else:
|
||||
self.download_file()
|
||||
logger.info("Download ends")
|
||||
logger.log("Download ends")
|
||||
|
||||
def force_stop(self):
|
||||
logger.info()
|
||||
logger.log()
|
||||
force_stop_file = open(self.force_stop_file_name, "w")
|
||||
force_stop_file.write("0")
|
||||
force_stop_file.close()
|
||||
@@ -181,38 +181,38 @@ class DownloadThread(threading.Thread):
|
||||
return self.total_size
|
||||
|
||||
def download_file_megacrypter(self):
|
||||
logger.info()
|
||||
logger.log()
|
||||
|
||||
comando = "./megacrypter.sh"
|
||||
logger.info("command= " + comando)
|
||||
logger.log("command= " + comando)
|
||||
|
||||
oldcwd = os.getcwd()
|
||||
logger.info("oldcwd= " + oldcwd)
|
||||
logger.log("oldcwd= " + oldcwd)
|
||||
|
||||
cwd = os.path.join(config.get_runtime_path(), "tools")
|
||||
logger.info("cwd= " + cwd)
|
||||
logger.log("cwd= " + cwd)
|
||||
os.chdir(cwd)
|
||||
logger.info("directory changed to= " + os.getcwd())
|
||||
logger.log("directory changed to= " + os.getcwd())
|
||||
|
||||
logger.info("destination= " + self.download_path)
|
||||
logger.log("destination= " + self.download_path)
|
||||
|
||||
os.system(comando + " '" + self.url + "' \"" + self.download_path + "\"")
|
||||
# p = subprocess.Popen([comando , self.url , self.download_path], cwd=cwd, stdout=subprocess.PIPE , stderr=subprocess.PIPE )
|
||||
# out, err = p.communicate()
|
||||
# logger.info("DownloadThread.download_file out="+out)
|
||||
# logger.log("DownloadThread.download_file out="+out)
|
||||
|
||||
os.chdir(oldcwd)
|
||||
|
||||
def download_file(self):
|
||||
logger.info("Direct download")
|
||||
logger.log("Direct download")
|
||||
|
||||
headers = []
|
||||
|
||||
# Ensures that the file can be created
|
||||
logger.info("filename= " + self.file_name)
|
||||
logger.log("filename= " + self.file_name)
|
||||
self.file_name = xbmc.makeLegalFilename(self.file_name)
|
||||
logger.info("filename= " + self.file_name)
|
||||
logger.info("url= " + self.url)
|
||||
logger.log("filename= " + self.file_name)
|
||||
logger.log("url= " + self.url)
|
||||
|
||||
# Create the file
|
||||
existSize = 0
|
||||
@@ -228,13 +228,13 @@ class DownloadThread(threading.Thread):
|
||||
additional_headers = [additional_headers]
|
||||
|
||||
for additional_header in additional_headers:
|
||||
logger.info("additional_header: " + additional_header)
|
||||
logger.log("additional_header: " + additional_header)
|
||||
name = re.findall("(.*?)=.*?", additional_header)[0]
|
||||
value = urllib.parse.unquote_plus(re.findall(".*?=(.*?)$", additional_header)[0])
|
||||
headers.append([name, value])
|
||||
|
||||
self.url = self.url.split("|")[0]
|
||||
logger.info("url= " + self.url)
|
||||
logger.log("url= " + self.url)
|
||||
|
||||
# Timeout del socket a 60 segundos
|
||||
socket.setdefaulttimeout(60)
|
||||
@@ -243,7 +243,7 @@ class DownloadThread(threading.Thread):
|
||||
h = urllib.request.HTTPHandler(debuglevel=0)
|
||||
request = urllib.request.Request(self.url)
|
||||
for header in headers:
|
||||
logger.info("Header= " + header[0] + ": " + header[1])
|
||||
logger.log("Header= " + header[0] + ": " + header[1])
|
||||
request.add_header(header[0], header[1])
|
||||
|
||||
# Lanza la petición
|
||||
@@ -272,18 +272,18 @@ class DownloadThread(threading.Thread):
|
||||
|
||||
self.total_size = int(float(totalfichero) / float(1024 * 1024))
|
||||
|
||||
logger.info("Content-Length=%s" % totalfichero)
|
||||
logger.log("Content-Length=%s" % totalfichero)
|
||||
blocksize = 100 * 1024
|
||||
|
||||
bloqueleido = connexion.read(blocksize)
|
||||
logger.info("Starting file download, blocked= %s" % len(bloqueleido))
|
||||
logger.log("Starting file download, blocked= %s" % len(bloqueleido))
|
||||
|
||||
maxreintentos = 10
|
||||
|
||||
while len(bloqueleido) > 0:
|
||||
try:
|
||||
if os.path.exists(self.force_stop_file_name):
|
||||
logger.info("Force_stop file detected, download is interrupted")
|
||||
logger.log("Force_stop file detected, download is interrupted")
|
||||
f.close()
|
||||
|
||||
xbmc.executebuiltin("XBMC.Notification(%s,%s,300)" % (config.get_localized_string(60319),config.get_localized_string(60320)))
|
||||
@@ -297,7 +297,7 @@ class DownloadThread(threading.Thread):
|
||||
# except:
|
||||
f.write(bloqueleido)
|
||||
grabado = grabado + len(bloqueleido)
|
||||
logger.info("grabado=%d de %d" % (grabado, totalfichero))
|
||||
logger.log("grabado=%d de %d" % (grabado, totalfichero))
|
||||
percent = int(float(grabado) * 100 / float(totalfichero))
|
||||
self.progress = percent
|
||||
totalmb = float(float(totalfichero) / (1024 * 1024))
|
||||
@@ -323,7 +323,7 @@ class DownloadThread(threading.Thread):
|
||||
except:
|
||||
import sys
|
||||
reintentos = reintentos + 1
|
||||
logger.info("ERROR in block download, retry %d" % reintentos)
|
||||
logger.log("ERROR in block download, retry %d" % reintentos)
|
||||
for line in sys.exc_info():
|
||||
logger.error("%s" % line)
|
||||
|
||||
|
||||
@@ -344,31 +344,31 @@ def list_env(environment={}):
|
||||
if environment['debug'] == 'False':
|
||||
logger.log_enable(True)
|
||||
|
||||
logger.info(sep)
|
||||
logger.info('KoD environment variables: ' + environment['addon_version'] + ' Debug: ' + environment['debug'])
|
||||
logger.info(sep)
|
||||
logger.log(sep)
|
||||
logger.log('KoD environment variables: ' + environment['addon_version'] + ' Debug: ' + environment['debug'])
|
||||
logger.log(sep)
|
||||
|
||||
logger.info(environment['os_name'] + ' ' + environment['prod_model'] + ' ' +
|
||||
logger.log(environment['os_name'] + ' ' + environment['prod_model'] + ' ' +
|
||||
environment['os_release'] + ' ' + environment['machine'] + ' ' +
|
||||
environment['architecture'] + ' ' + environment['language'])
|
||||
|
||||
logger.info('Kodi ' + environment['num_version'] + ', Vídeo: ' +
|
||||
logger.log('Kodi ' + environment['num_version'] + ', Vídeo: ' +
|
||||
environment['video_db'] + ', Python ' + environment['python_version'])
|
||||
|
||||
if environment['cpu_usage']:
|
||||
logger.info('CPU: ' + environment['cpu_usage'])
|
||||
logger.log('CPU: ' + environment['cpu_usage'])
|
||||
|
||||
if environment['mem_total'] or environment['mem_free']:
|
||||
logger.info('Memory: Total: ' + environment['mem_total'] + ' MB | Disp.: ' +
|
||||
logger.log('Memory: Total: ' + environment['mem_total'] + ' MB | Disp.: ' +
|
||||
environment['mem_free'] + ' MB | Buffers: ' +
|
||||
str(int(environment['kodi_buffer']) * 3) + ' MB | Buffermode: ' +
|
||||
environment['kodi_bmode'] + ' | Readfactor: ' +
|
||||
environment['kodi_rfactor'])
|
||||
|
||||
logger.info('Userdata: ' + environment['userdata_path'] + ' - Free: ' +
|
||||
logger.log('Userdata: ' + environment['userdata_path'] + ' - Free: ' +
|
||||
environment['userdata_free'].replace('.', ',') + ' GB')
|
||||
|
||||
logger.info('Videolibrary: Series/Episodes: ' + environment['videolab_series'] + '/' +
|
||||
logger.log('Videolibrary: Series/Episodes: ' + environment['videolab_series'] + '/' +
|
||||
environment['videolab_episodios'] + ' - Pelis: ' +
|
||||
environment['videolab_pelis'] + ' - Upd: ' +
|
||||
environment['videolab_update'] + ' - Path: ' +
|
||||
@@ -380,24 +380,24 @@ def list_env(environment={}):
|
||||
# if x == 0:
|
||||
# cliente_alt = cliente.copy()
|
||||
# del cliente_alt['Torrent_opt']
|
||||
# logger.info('Torrent: Opt: %s, %s' % (str(cliente['Torrent_opt']), \
|
||||
# logger.log('Torrent: Opt: %s, %s' % (str(cliente['Torrent_opt']), \
|
||||
# str(cliente_alt).replace('{', '').replace('}', '') \
|
||||
# .replace("'", '').replace('_', ' ')))
|
||||
# elif x == 1 and environment['torrent_error']:
|
||||
# logger.info('- ' + str(cliente).replace('{', '').replace('}', '') \
|
||||
# logger.log('- ' + str(cliente).replace('{', '').replace('}', '') \
|
||||
# .replace("'", '').replace('_', ' '))
|
||||
# else:
|
||||
# cliente_alt = cliente.copy()
|
||||
# del cliente_alt['Plug_in']
|
||||
# cliente_alt['Libre'] = cliente_alt['Libre'].replace('.', ',') + ' GB'
|
||||
# logger.info('- %s: %s' % (str(cliente['Plug_in']), str(cliente_alt) \
|
||||
# logger.log('- %s: %s' % (str(cliente['Plug_in']), str(cliente_alt) \
|
||||
# .replace('{', '').replace('}', '').replace("'", '') \
|
||||
# .replace('\\\\', '\\')))
|
||||
|
||||
# logger.info('Proxy: ' + environment['proxy_active'])
|
||||
# logger.log('Proxy: ' + environment['proxy_active'])
|
||||
|
||||
logger.info('LOG Size: ' + environment['log_size'].replace('.', ',') + ' MB')
|
||||
logger.info(sep)
|
||||
logger.log('LOG Size: ' + environment['log_size'].replace('.', ',') + ' MB')
|
||||
logger.log(sep)
|
||||
|
||||
if environment['debug'] == 'False':
|
||||
logger.log_enable(False)
|
||||
|
||||
@@ -19,7 +19,7 @@ def start():
|
||||
Within this function all calls should go to
|
||||
functions that we want to execute as soon as we open the plugin.
|
||||
"""
|
||||
logger.info()
|
||||
logger.log()
|
||||
# config.set_setting('show_once', True)
|
||||
# Test if all the required directories are created
|
||||
config.verify_directories_created()
|
||||
@@ -37,7 +37,7 @@ def start():
|
||||
updater.showSavedChangelog()
|
||||
|
||||
def run(item=None):
|
||||
logger.info()
|
||||
logger.log()
|
||||
if not item:
|
||||
# Extract item from sys.argv
|
||||
if sys.argv[2]:
|
||||
@@ -76,7 +76,7 @@ def run(item=None):
|
||||
xbmc_videolibrary.ask_set_content(silent=False)
|
||||
config.set_setting('show_once', True)
|
||||
|
||||
logger.info(item.tostring())
|
||||
logger.log(item.tostring())
|
||||
|
||||
try:
|
||||
if not config.get_setting('tmdb_active'):
|
||||
@@ -84,7 +84,7 @@ def run(item=None):
|
||||
|
||||
# If item has no action, stops here
|
||||
if item.action == "":
|
||||
logger.info("Item without action")
|
||||
logger.log("Item without action")
|
||||
return
|
||||
|
||||
# Action for main menu in channelselector
|
||||
@@ -154,7 +154,7 @@ def run(item=None):
|
||||
|
||||
channel_file = os.path.join(config.get_runtime_path(), CHANNELS, item.channel + ".py")
|
||||
|
||||
logger.info("channel_file= " + channel_file + ' - ' + CHANNELS + ' - ' + item.channel)
|
||||
logger.log("channel_file= " + channel_file + ' - ' + CHANNELS + ' - ' + item.channel)
|
||||
|
||||
channel = None
|
||||
|
||||
@@ -164,7 +164,7 @@ def run(item=None):
|
||||
except ImportError:
|
||||
exec("import " + CHANNELS + "." + item.channel + " as channel")
|
||||
|
||||
logger.info("Running channel %s | %s" % (channel.__name__, channel.__file__))
|
||||
logger.log("Running channel %s | %s" % (channel.__name__, channel.__file__))
|
||||
|
||||
# Special play action
|
||||
if item.action == "play":
|
||||
@@ -174,12 +174,12 @@ def run(item=None):
|
||||
trakt_tools.set_trakt_info(item)
|
||||
except:
|
||||
pass
|
||||
logger.info("item.action=%s" % item.action.upper())
|
||||
logger.log("item.action=%s" % item.action.upper())
|
||||
# logger.debug("item_toPlay: " + "\n" + item.tostring('\n'))
|
||||
|
||||
# First checks if channel has a "play" function
|
||||
if hasattr(channel, 'play'):
|
||||
logger.info("Executing channel 'play' method")
|
||||
logger.log("Executing channel 'play' method")
|
||||
itemlist = channel.play(item)
|
||||
b_favourite = item.isFavourite
|
||||
# Play should return a list of playable URLS
|
||||
@@ -200,7 +200,7 @@ def run(item=None):
|
||||
|
||||
# If player don't have a "play" function, not uses the standard play from platformtools
|
||||
else:
|
||||
logger.info("Executing core 'play' method")
|
||||
logger.log("Executing core 'play' method")
|
||||
platformtools.play_video(item)
|
||||
|
||||
# Special action for findvideos, where the plugin looks for known urls
|
||||
@@ -213,7 +213,7 @@ def run(item=None):
|
||||
|
||||
# If not, uses the generic findvideos function
|
||||
else:
|
||||
logger.info("No channel 'findvideos' method, "
|
||||
logger.log("No channel 'findvideos' method, "
|
||||
"executing core method")
|
||||
itemlist = servertools.find_video_items(item)
|
||||
|
||||
@@ -258,7 +258,7 @@ def run(item=None):
|
||||
else:
|
||||
filetools.remove(temp_search_file)
|
||||
|
||||
logger.info("item.action=%s" % item.action.upper())
|
||||
logger.log("item.action=%s" % item.action.upper())
|
||||
from core import channeltools
|
||||
|
||||
if config.get_setting('last_search'):
|
||||
@@ -279,7 +279,7 @@ def run(item=None):
|
||||
# For all other actions
|
||||
else:
|
||||
# import web_pdb; web_pdb.set_trace()
|
||||
logger.info("Executing channel '%s' method" % item.action)
|
||||
logger.log("Executing channel '%s' method" % item.action)
|
||||
itemlist = getattr(channel, item.action)(item)
|
||||
if config.get_setting('trakt_sync'):
|
||||
from core import trakt_tools
|
||||
@@ -361,7 +361,7 @@ def set_search_temp(item):
|
||||
filetools.write(temp_search_file, f)
|
||||
|
||||
def reorder_itemlist(itemlist):
|
||||
logger.info()
|
||||
logger.log()
|
||||
# logger.debug("Inlet itemlist size: %i" % len(itemlist))
|
||||
|
||||
new_list = []
|
||||
@@ -399,7 +399,7 @@ def reorder_itemlist(itemlist):
|
||||
new_list.extend(mod_list)
|
||||
new_list.extend(not_mod_list)
|
||||
|
||||
logger.info("Modified Titles:%i |Unmodified:%i" % (modified, not_modified))
|
||||
logger.log("Modified Titles:%i |Unmodified:%i" % (modified, not_modified))
|
||||
|
||||
if len(new_list) == 0:
|
||||
new_list = itemlist
|
||||
@@ -409,7 +409,7 @@ def reorder_itemlist(itemlist):
|
||||
|
||||
|
||||
def limit_itemlist(itemlist):
|
||||
logger.info()
|
||||
logger.log()
|
||||
# logger.debug("Inlet itemlist size: %i" % len(itemlist))
|
||||
|
||||
try:
|
||||
@@ -442,7 +442,7 @@ def play_from_library(item):
|
||||
|
||||
itemlist=[]
|
||||
item.fromLibrary = True
|
||||
logger.info()
|
||||
logger.log()
|
||||
# logger.debug("item: \n" + item.tostring('\n'))
|
||||
|
||||
# Try to reproduce an image (this does nothing and also does not give an error)
|
||||
|
||||
@@ -3,12 +3,9 @@
|
||||
# Logger (kodi)
|
||||
# --------------------------------------------------------------------------------
|
||||
|
||||
import inspect
|
||||
|
||||
import xbmc
|
||||
import inspect, sys, os, xbmc
|
||||
from platformcode import config
|
||||
|
||||
import sys
|
||||
PY3 = False
|
||||
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
|
||||
|
||||
@@ -105,13 +102,13 @@ def error(texto=""):
|
||||
def log(*args):
|
||||
# Function to simplify the log
|
||||
# Automatically returns File Name and Function Name
|
||||
import os
|
||||
string = ''
|
||||
for arg in args: string += ' '+str(arg)
|
||||
frame = inspect.stack()[1]
|
||||
filename = frame[0].f_code.co_filename
|
||||
filename = os.path.basename(filename)
|
||||
info("[" + filename + "] - [" + inspect.stack()[1][3] + "] " + string)
|
||||
if loggeractive:
|
||||
string = ''
|
||||
for arg in args: string += ' '+str(arg)
|
||||
frame = inspect.stack()[1]
|
||||
filename = frame[0].f_code.co_filename
|
||||
filename = os.path.basename(filename)
|
||||
xbmc.log("[" + filename + "] [" + inspect.stack()[1][3] + "] " + string, xbmc.LOGNOTICE)
|
||||
|
||||
|
||||
class WebErrorException(Exception):
|
||||
|
||||
@@ -117,7 +117,7 @@ def dialog_browse(_type, heading, shares="files", mask="", useThumbs=False, trea
|
||||
|
||||
def itemlist_refresh():
|
||||
# pos = Item().fromurl(xbmc.getInfoLabel('ListItem.FileNameAndPath')).itemlistPosition
|
||||
# logger.info('Current position: ' + str(pos))
|
||||
# logger.log('Current position: ' + str(pos))
|
||||
xbmc.executebuiltin("Container.Refresh")
|
||||
|
||||
# while Item().fromurl(xbmc.getInfoLabel('ListItem.FileNameAndPath')).itemlistPosition != pos:
|
||||
@@ -138,7 +138,7 @@ def render_items(itemlist, parent_item):
|
||||
"""
|
||||
Function used to render itemlist on kodi
|
||||
"""
|
||||
logger.info('START render_items')
|
||||
logger.log('START render_items')
|
||||
thumb_type = config.get_setting('video_thumbnail_type')
|
||||
from specials import shortcuts
|
||||
from core import httptools
|
||||
@@ -223,7 +223,7 @@ def render_items(itemlist, parent_item):
|
||||
set_view_mode(itemlist[0], parent_item)
|
||||
|
||||
xbmcplugin.endOfDirectory(_handle)
|
||||
logger.info('END render_items')
|
||||
logger.log('END render_items')
|
||||
|
||||
|
||||
def getCurrentView(item=None, parent_item=None):
|
||||
@@ -280,11 +280,11 @@ def set_view_mode(item, parent_item):
|
||||
if content:
|
||||
mode = int(config.get_setting('view_mode_%s' % content).split(',')[-1])
|
||||
if mode == 0:
|
||||
logger.info('default mode')
|
||||
logger.log('default mode')
|
||||
mode = 55
|
||||
xbmcplugin.setContent(handle=int(sys.argv[1]), content=Type)
|
||||
xbmc.executebuiltin('Container.SetViewMode(%s)' % mode)
|
||||
logger.info('TYPE: ' + Type + ' - ' + 'CONTENT: ' + content)
|
||||
logger.log('TYPE: ' + Type + ' - ' + 'CONTENT: ' + content)
|
||||
|
||||
|
||||
def set_infolabels(listitem, item, player=False):
|
||||
@@ -504,10 +504,10 @@ def is_playing():
|
||||
|
||||
|
||||
def play_video(item, strm=False, force_direct=False, autoplay=False):
|
||||
logger.info()
|
||||
logger.log()
|
||||
logger.debug(item.tostring('\n'))
|
||||
if item.channel == 'downloads':
|
||||
logger.info("Play local video: %s [%s]" % (item.title, item.url))
|
||||
logger.log("Play local video: %s [%s]" % (item.title, item.url))
|
||||
xlistitem = xbmcgui.ListItem(path=item.url)
|
||||
xlistitem.setArt({"thumb": item.thumbnail})
|
||||
set_infolabels(xlistitem, item, True)
|
||||
@@ -515,7 +515,7 @@ def play_video(item, strm=False, force_direct=False, autoplay=False):
|
||||
return
|
||||
|
||||
default_action = config.get_setting("default_action")
|
||||
logger.info("default_action=%s" % default_action)
|
||||
logger.log("default_action=%s" % default_action)
|
||||
|
||||
# Open the selection dialog to see the available options
|
||||
opciones, video_urls, seleccion, salir = get_dialogo_opciones(item, default_action, strm, autoplay)
|
||||
@@ -525,8 +525,8 @@ def play_video(item, strm=False, force_direct=False, autoplay=False):
|
||||
seleccion = get_seleccion(default_action, opciones, seleccion, video_urls)
|
||||
if seleccion < 0: return # Canceled box
|
||||
|
||||
logger.info("selection=%d" % seleccion)
|
||||
logger.info("selection=%s" % opciones[seleccion])
|
||||
logger.log("selection=%d" % seleccion)
|
||||
logger.log("selection=%s" % opciones[seleccion])
|
||||
|
||||
# run the available option, jdwonloader, download, favorites, add to the video library ... IF IT IS NOT PLAY
|
||||
salir = set_opcion(item, seleccion, opciones, video_urls)
|
||||
@@ -687,7 +687,7 @@ def alert_unsopported_server():
|
||||
|
||||
|
||||
def handle_wait(time_to_wait, title, text):
|
||||
logger.info("handle_wait(time_to_wait=%d)" % time_to_wait)
|
||||
logger.log("handle_wait(time_to_wait=%d)" % time_to_wait)
|
||||
espera = dialog_progress(' ' + title, "")
|
||||
|
||||
secs = 0
|
||||
@@ -706,15 +706,15 @@ def handle_wait(time_to_wait, title, text):
|
||||
break
|
||||
|
||||
if cancelled:
|
||||
logger.info('Wait canceled')
|
||||
logger.log('Wait canceled')
|
||||
return False
|
||||
else:
|
||||
logger.info('Wait finished')
|
||||
logger.log('Wait finished')
|
||||
return True
|
||||
|
||||
|
||||
def get_dialogo_opciones(item, default_action, strm, autoplay):
|
||||
logger.info()
|
||||
logger.log()
|
||||
# logger.debug(item.tostring('\n'))
|
||||
from core import servertools
|
||||
|
||||
@@ -798,7 +798,7 @@ def get_dialogo_opciones(item, default_action, strm, autoplay):
|
||||
|
||||
|
||||
def set_opcion(item, seleccion, opciones, video_urls):
|
||||
logger.info()
|
||||
logger.log()
|
||||
# logger.debug(item.tostring('\n'))
|
||||
salir = False
|
||||
# You have not chosen anything, most likely because you have given the ESC
|
||||
@@ -848,7 +848,7 @@ def set_opcion(item, seleccion, opciones, video_urls):
|
||||
|
||||
|
||||
def get_video_seleccionado(item, seleccion, video_urls):
|
||||
logger.info()
|
||||
logger.log()
|
||||
mediaurl = ""
|
||||
view = False
|
||||
wait_time = 0
|
||||
@@ -874,7 +874,7 @@ def get_video_seleccionado(item, seleccion, video_urls):
|
||||
mpd = True
|
||||
|
||||
# If there is no mediaurl it is because the video is not there :)
|
||||
logger.info("mediaurl=" + mediaurl)
|
||||
logger.log("mediaurl=" + mediaurl)
|
||||
if mediaurl == "":
|
||||
if item.server == "unknown":
|
||||
alert_unsopported_server()
|
||||
@@ -891,7 +891,7 @@ def get_video_seleccionado(item, seleccion, video_urls):
|
||||
|
||||
|
||||
def set_player(item, xlistitem, mediaurl, view, strm, nfo_path=None, head_nfo=None, item_nfo=None):
|
||||
logger.info()
|
||||
logger.log()
|
||||
# logger.debug("item:\n" + item.tostring('\n'))
|
||||
# Moved del conector "torrent" here
|
||||
if item.server == "torrent":
|
||||
@@ -908,10 +908,10 @@ def set_player(item, xlistitem, mediaurl, view, strm, nfo_path=None, head_nfo=No
|
||||
player_mode = config.get_setting("player_mode")
|
||||
if (player_mode == 3 and mediaurl.startswith("rtmp")) or item.play_from == 'window' or item.nfo: player_mode = 0
|
||||
elif "megacrypter.com" in mediaurl: player_mode = 3
|
||||
logger.info("mediaurl=" + mediaurl)
|
||||
logger.log("mediaurl=" + mediaurl)
|
||||
|
||||
if player_mode == 0:
|
||||
logger.info('Player Mode: Direct')
|
||||
logger.log('Player Mode: Direct')
|
||||
# Add the listitem to a playlist
|
||||
playlist = xbmc.PlayList(xbmc.PLAYLIST_VIDEO)
|
||||
playlist.clear()
|
||||
@@ -924,24 +924,24 @@ def set_player(item, xlistitem, mediaurl, view, strm, nfo_path=None, head_nfo=No
|
||||
trakt_tools.wait_for_update_trakt()
|
||||
|
||||
elif player_mode == 1:
|
||||
logger.info('Player Mode: setResolvedUrl')
|
||||
logger.log('Player Mode: setResolvedUrl')
|
||||
xlistitem.setPath(mediaurl)
|
||||
xbmcplugin.setResolvedUrl(int(sys.argv[1]), True, xlistitem)
|
||||
xbmc.sleep(2500)
|
||||
|
||||
elif player_mode == 2:
|
||||
logger.info('Player Mode: Built-In')
|
||||
logger.log('Player Mode: Built-In')
|
||||
xbmc.executebuiltin("PlayMedia(" + mediaurl + ")")
|
||||
|
||||
elif player_mode == 3:
|
||||
logger.info('Player Mode: Download and Play')
|
||||
logger.log('Player Mode: Download and Play')
|
||||
from platformcode import download_and_play
|
||||
download_and_play.download_and_play(mediaurl, "download_and_play.tmp", config.get_setting("downloadpath"))
|
||||
return
|
||||
|
||||
# ALL LOOKING TO REMOVE VIEW
|
||||
if item.subtitle and view:
|
||||
logger.info("External subtitles: " + item.subtitle)
|
||||
logger.log("External subtitles: " + item.subtitle)
|
||||
xbmc.sleep(2000)
|
||||
xbmc_player.setSubtitles(item.subtitle)
|
||||
|
||||
@@ -967,7 +967,7 @@ def torrent_client_installed(show_tuple=False):
|
||||
|
||||
|
||||
def play_torrent(item, xlistitem, mediaurl):
|
||||
logger.info()
|
||||
logger.log()
|
||||
import time
|
||||
from servers import torrent
|
||||
|
||||
@@ -1087,17 +1087,17 @@ def install_inputstream():
|
||||
# Check if InputStream add-on exists!
|
||||
Addon('inputstream.adaptive')
|
||||
|
||||
logger.info('InputStream add-on installed from repo.')
|
||||
logger.log('InputStream add-on installed from repo.')
|
||||
except RuntimeError:
|
||||
logger.info('InputStream add-on not installed.')
|
||||
logger.log('InputStream add-on not installed.')
|
||||
dialog_ok(config.get_localized_string(20000), config.get_localized_string(30126))
|
||||
return False
|
||||
else:
|
||||
try:
|
||||
Addon('inputstream.adaptive')
|
||||
logger.info('InputStream add-on is installed and enabled')
|
||||
logger.log('InputStream add-on is installed and enabled')
|
||||
except:
|
||||
logger.info('enabling InputStream add-on')
|
||||
logger.log('enabling InputStream add-on')
|
||||
xbmc.executebuiltin('UpdateLocalAddons')
|
||||
xbmc.executeJSONRPC('{"jsonrpc": "2.0", "id":1, "method": "Addons.SetAddonEnabled", "params": { "addonid": "inputstream.adaptive", "enabled": true }}')
|
||||
return True
|
||||
@@ -1212,13 +1212,13 @@ def best_chromeos_image(devices):
|
||||
# Select the newest version
|
||||
from distutils.version import LooseVersion # pylint: disable=import-error,no-name-in-module,useless-suppression
|
||||
if LooseVersion(device['version']) > LooseVersion(best['version']):
|
||||
logger.info('%s (%s) is newer than %s (%s)' % (device['hwid'], device['version'], best['hwid'], best['version']))
|
||||
logger.log('%s (%s) is newer than %s (%s)' % (device['hwid'], device['version'], best['hwid'], best['version']))
|
||||
best = device
|
||||
|
||||
# Select the smallest image (disk space requirement)
|
||||
elif LooseVersion(device['version']) == LooseVersion(best['version']):
|
||||
if int(device['filesize']) + int(device['zipfilesize']) < int(best['filesize']) + int(best['zipfilesize']):
|
||||
logger.info('%s (%d) is smaller than %s (%d)' % (device['hwid'], int(device['filesize']) + int(device['zipfilesize']), best['hwid'], int(best['filesize']) + int(best['zipfilesize'])))
|
||||
logger.log('%s (%d) is smaller than %s (%d)' % (device['hwid'], int(device['filesize']) + int(device['zipfilesize']), best['hwid'], int(best['filesize']) + int(best['zipfilesize'])))
|
||||
best = device
|
||||
return best
|
||||
|
||||
|
||||
@@ -65,7 +65,7 @@ class Recaptcha(xbmcgui.WindowXMLDialog):
|
||||
|
||||
data = httptools.downloadpage(self.url, post=post, headers=self.headers).data
|
||||
from platformcode import logger
|
||||
logger.info(data)
|
||||
logger.log(data)
|
||||
self.result = scrapertools.find_single_match(data, '<div class="fbc-verification-token">.*?>([^<]+)<')
|
||||
if self.result:
|
||||
platformtools.dialog_notification("Captcha corretto", "Verifica conclusa")
|
||||
|
||||
@@ -84,7 +84,7 @@ def regex_tvshow(compare, file, sub=""):
|
||||
|
||||
|
||||
def set_Subtitle():
|
||||
logger.info()
|
||||
logger.log()
|
||||
|
||||
exts = [".srt", ".sub", ".txt", ".smi", ".ssa", ".ass"]
|
||||
subtitle_folder_path = filetools.join(config.get_data_path(), "subtitles")
|
||||
@@ -93,7 +93,7 @@ def set_Subtitle():
|
||||
|
||||
if subtitle_type == "2":
|
||||
subtitle_path = config.get_setting("subtitlepath_file")
|
||||
logger.info("Con subtitulo : " + subtitle_path)
|
||||
logger.log("Con subtitulo : " + subtitle_path)
|
||||
xbmc.Player().setSubtitles(subtitle_path)
|
||||
else:
|
||||
if subtitle_type == "0":
|
||||
@@ -106,7 +106,7 @@ def set_Subtitle():
|
||||
long_v = len(subtitle_path)
|
||||
if long_v > 0:
|
||||
if subtitle_path.startswith("http") or subtitle_path[long_v - 4, long] in exts:
|
||||
logger.info("Con subtitulo : " + subtitle_path)
|
||||
logger.log("Con subtitulo : " + subtitle_path)
|
||||
xbmc.Player().setSubtitles(subtitle_path)
|
||||
return
|
||||
else:
|
||||
@@ -125,7 +125,7 @@ def set_Subtitle():
|
||||
Subnames = glob.glob(filetools.join(subtitle_path, "Movies", subtitle_name + "*.??.???"))
|
||||
for Subname in Subnames:
|
||||
if os.path.splitext(Subname)[1] in exts:
|
||||
logger.info("Con subtitulo : " + filetools.split(Subname)[1])
|
||||
logger.log("Con subtitulo : " + filetools.split(Subname)[1])
|
||||
xbmc.Player().setSubtitles((Subname))
|
||||
except:
|
||||
logger.error("error al cargar subtitulos")
|
||||
@@ -216,7 +216,7 @@ def searchSubtitle(item):
|
||||
filetools.mkdir(full_path_tvshow) # title_new + ".mp4"
|
||||
full_path_video_new = xbmc.translatePath(
|
||||
filetools.join(full_path_tvshow, "%s %sx%s.mp4" % (tvshow_title, season, episode)))
|
||||
logger.info(full_path_video_new)
|
||||
logger.log(full_path_video_new)
|
||||
listitem = xbmcgui.ListItem(title_new, iconImage="DefaultVideo.png", thumbnailImage="")
|
||||
listitem.setInfo("video", {"Title": title_new, "Genre": "Tv shows", "episode": int(episode), "season": int(season), "tvshowtitle": tvshow_title})
|
||||
|
||||
@@ -230,7 +230,7 @@ def searchSubtitle(item):
|
||||
try:
|
||||
filetools.copy(path_video_temp, full_path_video_new)
|
||||
copy = True
|
||||
logger.info("nuevo path =" + full_path_video_new)
|
||||
logger.log("nuevo path =" + full_path_video_new)
|
||||
time.sleep(2)
|
||||
playlist = xbmc.PlayList(xbmc.PLAYLIST_VIDEO)
|
||||
playlist.clear()
|
||||
@@ -288,7 +288,7 @@ def get_from_subdivx(sub_url):
|
||||
:return: The path to the unzipped subtitle
|
||||
"""
|
||||
|
||||
logger.info()
|
||||
logger.log()
|
||||
|
||||
sub = ''
|
||||
sub_dir = os.path.join(config.get_data_path(), 'temp_subs')
|
||||
@@ -312,9 +312,9 @@ def get_from_subdivx(sub_url):
|
||||
filetools.write(filename, data_dl)
|
||||
sub = extract_file_online(sub_dir, filename)
|
||||
except:
|
||||
logger.info('sub invalid')
|
||||
logger.log('sub invalid')
|
||||
else:
|
||||
logger.info('sub invalid')
|
||||
logger.log('sub invalid')
|
||||
return sub
|
||||
|
||||
|
||||
@@ -328,7 +328,7 @@ def extract_file_online(path, filename):
|
||||
:return:
|
||||
"""
|
||||
|
||||
logger.info()
|
||||
logger.log()
|
||||
|
||||
url = "http://online.b1.org/rest/online/upload"
|
||||
|
||||
|
||||
@@ -98,7 +98,7 @@ thumb_dict = {"movies": "https://s10.postimg.cc/fxtqzdog9/peliculas.png",
|
||||
|
||||
|
||||
def set_genre(string):
|
||||
# logger.info()
|
||||
# logger.log()
|
||||
|
||||
genres_dict = {'accion': ['accion', 'action', 'accion y aventura', 'action & adventure'],
|
||||
'adultos': ['adultos', 'adultos +', 'adulto'],
|
||||
@@ -140,7 +140,7 @@ def set_genre(string):
|
||||
|
||||
|
||||
def remove_format(string):
|
||||
# logger.info()
|
||||
# logger.log()
|
||||
# logger.debug('enter remove: %s' % string)
|
||||
string = string.rstrip()
|
||||
string = re.sub(r'(\[|\[\/)(?:color|COLOR|b|B|i|I).*?\]|\[|\]|\(|\)|\:|\.', '', string)
|
||||
@@ -156,7 +156,7 @@ def normalize(string):
|
||||
|
||||
|
||||
def simplify(string):
|
||||
# logger.info()
|
||||
# logger.log()
|
||||
# logger.debug('enter simplify: %s'%string)
|
||||
string = remove_format(string)
|
||||
string = string.replace('-', ' ').replace('_', ' ')
|
||||
@@ -175,7 +175,7 @@ def simplify(string):
|
||||
|
||||
|
||||
def add_languages(title, languages):
|
||||
# logger.info()
|
||||
# logger.log()
|
||||
|
||||
if isinstance(languages, list):
|
||||
for language in languages:
|
||||
@@ -186,7 +186,7 @@ def add_languages(title, languages):
|
||||
|
||||
|
||||
def add_info_plot(plot, languages, quality):
|
||||
# logger.info()
|
||||
# logger.log()
|
||||
last = '[/I][/B]\n'
|
||||
|
||||
if languages:
|
||||
@@ -221,7 +221,7 @@ def add_info_plot(plot, languages, quality):
|
||||
|
||||
|
||||
def set_color(title, category):
|
||||
# logger.info()
|
||||
# logger.log()
|
||||
from core import jsontools
|
||||
|
||||
styles_path = os.path.join(config.get_runtime_path(), 'resources', 'color_styles.json')
|
||||
@@ -262,7 +262,7 @@ def set_color(title, category):
|
||||
|
||||
|
||||
def set_lang(language):
|
||||
# logger.info()
|
||||
# logger.log()
|
||||
|
||||
cast = ['castellano', 'español', 'espanol', 'cast', 'esp', 'espaol', 'es', 'zc', 'spa', 'spanish', 'vc']
|
||||
ita = ['italiano', 'italian', 'ita', 'it']
|
||||
@@ -303,7 +303,7 @@ def set_lang(language):
|
||||
|
||||
|
||||
def title_format(item):
|
||||
# logger.info()
|
||||
# logger.log()
|
||||
|
||||
lang = False
|
||||
valid = True
|
||||
@@ -567,7 +567,7 @@ def title_format(item):
|
||||
|
||||
|
||||
def thumbnail_type(item):
|
||||
# logger.info()
|
||||
# logger.log()
|
||||
# Check what type of thumbnail will be used in findvideos, Poster or Logo of the server
|
||||
|
||||
thumb_type = config.get_setting('video_thumbnail_type')
|
||||
|
||||
@@ -34,7 +34,7 @@ changelogFile = "special://profile/addon_data/plugin.video.kod/changelog.txt"
|
||||
|
||||
def loadCommits(page=1):
|
||||
apiLink = 'https://api.github.com/repos/' + user + '/' + repo + '/commits?sha=' + branch + "&page=" + str(page)
|
||||
logger.info(apiLink)
|
||||
logger.log(apiLink)
|
||||
# riprova ogni secondo finchè non riesce (ad esempio per mancanza di connessione)
|
||||
for n in range(10):
|
||||
try:
|
||||
@@ -54,7 +54,7 @@ def loadCommits(page=1):
|
||||
def check(background=False):
|
||||
if not addon.getSetting('addon_update_enabled'):
|
||||
return False, False
|
||||
logger.info('Cerco aggiornamenti..')
|
||||
logger.log('Cerco aggiornamenti..')
|
||||
commits = loadCommits()
|
||||
if not commits:
|
||||
return False, False
|
||||
@@ -66,7 +66,7 @@ def check(background=False):
|
||||
localCommitFile = open(os.path.join(addonDir, trackingFile), 'r+')
|
||||
localCommitSha = localCommitFile.read()
|
||||
localCommitSha = localCommitSha.replace('\n', '') # da testare
|
||||
logger.info('Commit locale: ' + localCommitSha)
|
||||
logger.log('Commit locale: ' + localCommitSha)
|
||||
updated = False
|
||||
serviceChanged = False
|
||||
|
||||
@@ -91,7 +91,7 @@ def check(background=False):
|
||||
# evitiamo di applicare i merge commit
|
||||
if 'Merge' in commitJson['commit']['message']:
|
||||
continue
|
||||
logger.info('aggiornando a ' + commitJson['sha'])
|
||||
logger.log('aggiornando a ' + commitJson['sha'])
|
||||
alreadyApplied = True
|
||||
|
||||
# major update
|
||||
@@ -108,7 +108,7 @@ def check(background=False):
|
||||
if file["filename"] == trackingFile: # il file di tracking non si modifica
|
||||
continue
|
||||
else:
|
||||
logger.info(file["filename"])
|
||||
logger.log(file["filename"])
|
||||
if 'resources/language' in file["filename"]:
|
||||
poFilesChanged = True
|
||||
if 'service.py' in file["filename"]:
|
||||
@@ -138,7 +138,7 @@ def check(background=False):
|
||||
localFile.writelines(patched)
|
||||
localFile.close()
|
||||
else: # nel caso ci siano stati problemi
|
||||
logger.info('lo sha non corrisponde, scarico il file')
|
||||
logger.log('lo sha non corrisponde, scarico il file')
|
||||
localFile.close()
|
||||
urllib.urlretrieve(file['raw_url'], os.path.join(addonDir, file['filename']))
|
||||
else: # è un file NON testuale, lo devo scaricare
|
||||
@@ -191,7 +191,7 @@ def check(background=False):
|
||||
elif changelog:
|
||||
platformtools.dialog_ok(config.get_localized_string(20000), config.get_localized_string(80041) + changelog)
|
||||
else:
|
||||
logger.info('Nessun nuovo aggiornamento')
|
||||
logger.log('Nessun nuovo aggiornamento')
|
||||
|
||||
return updated, serviceChanged
|
||||
|
||||
@@ -207,7 +207,7 @@ def showSavedChangelog():
|
||||
|
||||
def calcCurrHash():
|
||||
treeHash = githash.tree_hash(addonDir).hexdigest()
|
||||
logger.info('tree hash: ' + treeHash)
|
||||
logger.log('tree hash: ' + treeHash)
|
||||
commits = loadCommits()
|
||||
lastCommitSha = commits[0]['sha']
|
||||
page = 1
|
||||
@@ -227,7 +227,7 @@ def calcCurrHash():
|
||||
if found:
|
||||
break
|
||||
else:
|
||||
logger.info('Non sono riuscito a trovare il commit attuale, scarico lo zip')
|
||||
logger.log('Non sono riuscito a trovare il commit attuale, scarico lo zip')
|
||||
hash = updateFromZip()
|
||||
# se ha scaricato lo zip si trova di sicuro all'ultimo commit
|
||||
localCommitFile = open(os.path.join(xbmc.translatePath("special://home/addons/"), 'plugin.video.kod', trackingFile), 'w')
|
||||
@@ -294,9 +294,9 @@ def updateFromZip(message=config.get_localized_string(80050)):
|
||||
destpathname = xbmc.translatePath("special://home/addons/")
|
||||
extractedDir = filetools.join(destpathname, "addon-" + branch)
|
||||
|
||||
logger.info("remotefilename=%s" % remotefilename)
|
||||
logger.info("localfilename=%s" % localfilename)
|
||||
logger.info('extract dir: ' + extractedDir)
|
||||
logger.log("remotefilename=%s" % remotefilename)
|
||||
logger.log("localfilename=%s" % localfilename)
|
||||
logger.log('extract dir: ' + extractedDir)
|
||||
|
||||
# pulizia preliminare
|
||||
remove(localfilename)
|
||||
@@ -307,24 +307,24 @@ def updateFromZip(message=config.get_localized_string(80050)):
|
||||
lambda nb, bs, fs, url=remotefilename: _pbhook(nb, bs, fs, url, dp))
|
||||
except Exception as e:
|
||||
platformtools.dialog_ok(config.get_localized_string(20000), config.get_localized_string(80031))
|
||||
logger.info('Non sono riuscito a scaricare il file zip')
|
||||
logger.info(e)
|
||||
logger.log('Non sono riuscito a scaricare il file zip')
|
||||
logger.log(e)
|
||||
dp.close()
|
||||
return False
|
||||
|
||||
# Lo descomprime
|
||||
logger.info("decompressione...")
|
||||
logger.info("destpathname=%s" % destpathname)
|
||||
logger.log("decompressione...")
|
||||
logger.log("destpathname=%s" % destpathname)
|
||||
|
||||
if os.path.isfile(localfilename):
|
||||
logger.info('il file esiste')
|
||||
logger.log('il file esiste')
|
||||
|
||||
dp.update(80, config.get_localized_string(20000) + '\n' + config.get_localized_string(80032))
|
||||
|
||||
import zipfile
|
||||
try:
|
||||
hash = fixZipGetHash(localfilename)
|
||||
logger.info(hash)
|
||||
logger.log(hash)
|
||||
|
||||
with zipfile.ZipFile(filetools.file_open(localfilename, 'rb', vfs=False)) as zip:
|
||||
size = sum([zinfo.file_size for zinfo in zip.filelist])
|
||||
@@ -335,7 +335,7 @@ def updateFromZip(message=config.get_localized_string(80050)):
|
||||
dp.update(int(80 + cur_size * 15 / size))
|
||||
|
||||
except Exception as e:
|
||||
logger.info('Non sono riuscito ad estrarre il file zip')
|
||||
logger.log('Non sono riuscito ad estrarre il file zip')
|
||||
logger.error(e)
|
||||
import traceback
|
||||
logger.error(traceback.print_exc())
|
||||
@@ -355,7 +355,7 @@ def updateFromZip(message=config.get_localized_string(80050)):
|
||||
rename(extractedDir, 'plugin.video.kod')
|
||||
addonDir = filetools.join(destpathname, 'plugin.video.kod')
|
||||
|
||||
logger.info("Cancellando il file zip...")
|
||||
logger.log("Cancellando il file zip...")
|
||||
remove(localfilename)
|
||||
|
||||
dp.update(100)
|
||||
@@ -384,7 +384,7 @@ def remove(file):
|
||||
try:
|
||||
os.remove(file)
|
||||
except:
|
||||
logger.info('File ' + file + ' NON eliminato')
|
||||
logger.log('File ' + file + ' NON eliminato')
|
||||
|
||||
|
||||
def onerror(func, path, exc_info):
|
||||
@@ -411,7 +411,7 @@ def removeTree(dir):
|
||||
try:
|
||||
shutil.rmtree(dir, ignore_errors=False, onerror=onerror)
|
||||
except Exception as e:
|
||||
logger.info('Cartella ' + dir + ' NON eliminata')
|
||||
logger.log('Cartella ' + dir + ' NON eliminata')
|
||||
logger.error(e)
|
||||
|
||||
|
||||
@@ -419,7 +419,7 @@ def rename(dir1, dir2):
|
||||
try:
|
||||
filetools.rename(dir1, dir2, silent=True, vfs=False)
|
||||
except:
|
||||
logger.info('cartella ' + dir1 + ' NON rinominata')
|
||||
logger.log('cartella ' + dir1 + ' NON rinominata')
|
||||
|
||||
|
||||
# https://stackoverflow.com/questions/3083235/unzipping-file-results-in-badzipfile-file-is-not-a-zip-file
|
||||
|
||||
@@ -261,7 +261,7 @@ class InfoWindow(xbmcgui.WindowXMLDialog):
|
||||
return self.return_value
|
||||
|
||||
def onClick(self, _id):
|
||||
logger.info("onClick id=" + repr(_id))
|
||||
logger.log("onClick id=" + repr(_id))
|
||||
if _id == ID_BUTTON_PREVIOUS and self.indexList > 0:
|
||||
self.indexList -= 1
|
||||
self.get_scraper_data(self.listData[self.indexList])
|
||||
@@ -281,7 +281,7 @@ class InfoWindow(xbmcgui.WindowXMLDialog):
|
||||
self.return_value = None
|
||||
|
||||
def onAction(self, action):
|
||||
logger.info("action=" + repr(action.getId()))
|
||||
logger.log("action=" + repr(action.getId()))
|
||||
action = action.getId()
|
||||
|
||||
# Find Focus
|
||||
|
||||
@@ -17,7 +17,7 @@ from xml.dom import minidom
|
||||
|
||||
def mark_auto_as_watched(item, nfo_path=None, head_nfo=None, item_nfo=None):
|
||||
def mark_as_watched_subThread(item, nfo_path, head_nfo, item_nfo):
|
||||
logger.info()
|
||||
logger.log()
|
||||
# logger.debug("item:\n" + item.tostring('\n'))
|
||||
|
||||
time_limit = time.time() + 30
|
||||
@@ -99,7 +99,7 @@ def sync_trakt_addon(path_folder):
|
||||
"""
|
||||
Updates the values of episodes seen if
|
||||
"""
|
||||
logger.info()
|
||||
logger.log()
|
||||
# if the addon exists we do the search
|
||||
if xbmc.getCondVisibility('System.HasAddon("script.trakt")'):
|
||||
# we import dependencies
|
||||
@@ -225,7 +225,7 @@ def sync_trakt_kodi(silent=True):
|
||||
notificacion = False
|
||||
|
||||
xbmc.executebuiltin('RunScript(script.trakt,action=sync,silent=%s)' % silent)
|
||||
logger.info("Synchronization with Trakt started")
|
||||
logger.log("Synchronization with Trakt started")
|
||||
|
||||
if notificacion:
|
||||
platformtools.dialog_notification(config.get_localized_string(20000), config.get_localized_string(60045), sound=False, time=2000)
|
||||
@@ -239,7 +239,7 @@ def mark_content_as_watched_on_kodi(item, value=1):
|
||||
@type value: int
|
||||
@param value: > 0 for seen, 0 for not seen
|
||||
"""
|
||||
logger.info()
|
||||
logger.log()
|
||||
# logger.debug("item:\n" + item.tostring('\n'))
|
||||
payload_f = ''
|
||||
|
||||
@@ -311,7 +311,7 @@ def mark_season_as_watched_on_kodi(item, value=1):
|
||||
@type value: int
|
||||
@param value: > 0 for seen, 0 for not seen
|
||||
"""
|
||||
logger.info()
|
||||
logger.log()
|
||||
# logger.debug("item:\n" + item.tostring('\n'))
|
||||
|
||||
# We can only mark the season as seen in the Kodi database if the database is local, in case of sharing database this functionality will not work
|
||||
@@ -345,7 +345,7 @@ def mark_content_as_watched_on_kod(path):
|
||||
@type str: path
|
||||
@param path: content folder to mark
|
||||
"""
|
||||
logger.info()
|
||||
logger.log()
|
||||
#logger.debug("path: " + path)
|
||||
|
||||
FOLDER_MOVIES = config.get_setting("folder_movies")
|
||||
@@ -435,7 +435,7 @@ def get_data(payload):
|
||||
:return:
|
||||
"""
|
||||
import urllib.request, urllib.error
|
||||
logger.info("payload: %s" % payload)
|
||||
logger.log("payload: %s" % payload)
|
||||
# Required header for XBMC JSON-RPC calls, otherwise you'll get a 415 HTTP response code - Unsupported media type
|
||||
headers = {'content-type': 'application/json'}
|
||||
|
||||
@@ -452,7 +452,7 @@ def get_data(payload):
|
||||
response = f.read()
|
||||
f.close()
|
||||
|
||||
logger.info("get_data: response %s" % response)
|
||||
logger.log("get_data: response %s" % response)
|
||||
data = jsontools.load(response)
|
||||
except Exception as ex:
|
||||
template = "An exception of type %s occured. Arguments:\n%r"
|
||||
@@ -468,7 +468,7 @@ def get_data(payload):
|
||||
logger.error("error en xbmc.executeJSONRPC: %s" % message)
|
||||
data = ["error"]
|
||||
|
||||
logger.info("data: %s" % data)
|
||||
logger.log("data: %s" % data)
|
||||
|
||||
return data
|
||||
|
||||
@@ -482,7 +482,7 @@ def update(folder_content=config.get_setting("folder_tvshows"), folder=""):
|
||||
@type folder: str
|
||||
@param folder: name of the folder to scan.
|
||||
"""
|
||||
logger.info(folder)
|
||||
logger.log(folder)
|
||||
|
||||
payload = {
|
||||
"jsonrpc": "2.0",
|
||||
@@ -546,7 +546,7 @@ def set_content(content_type, silent=False, custom=False):
|
||||
@type content_type: str ('movie' o 'tvshow')
|
||||
@param content_type: content type to configure, series or movies
|
||||
"""
|
||||
logger.info()
|
||||
logger.log()
|
||||
continuar = True
|
||||
msg_text = ""
|
||||
videolibrarypath = config.get_setting("videolibrarypath")
|
||||
@@ -572,7 +572,7 @@ def set_content(content_type, silent=False, custom=False):
|
||||
try:
|
||||
# Install metadata.themoviedb.org
|
||||
xbmc.executebuiltin('xbmc.installaddon(metadata.themoviedb.org)', True)
|
||||
logger.info("Instalado el Scraper de películas de TheMovieDB")
|
||||
logger.log("Instalado el Scraper de películas de TheMovieDB")
|
||||
except:
|
||||
pass
|
||||
|
||||
@@ -626,7 +626,7 @@ def set_content(content_type, silent=False, custom=False):
|
||||
try:
|
||||
# Install metadata.tvdb.com
|
||||
xbmc.executebuiltin('xbmc.installaddon(metadata.tvdb.com)', True)
|
||||
logger.info("The TVDB series Scraper installed ")
|
||||
logger.log("The TVDB series Scraper installed ")
|
||||
except:
|
||||
pass
|
||||
|
||||
@@ -721,7 +721,7 @@ def set_content(content_type, silent=False, custom=False):
|
||||
strScraper = 'metadata.universal'
|
||||
path_settings = xbmc.translatePath("special://profile/addon_data/metadata.universal/settings.xml")
|
||||
if not os.path.exists(path_settings):
|
||||
logger.info("%s: %s" % (content_type, path_settings + " doesn't exist"))
|
||||
logger.log("%s: %s" % (content_type, path_settings + " doesn't exist"))
|
||||
return continuar
|
||||
settings_data = filetools.read(path_settings)
|
||||
strSettings = ' '.join(settings_data.split()).replace("> <", "><")
|
||||
@@ -740,7 +740,7 @@ def set_content(content_type, silent=False, custom=False):
|
||||
strScraper = 'metadata.tvshows.themoviedb.org'
|
||||
path_settings = xbmc.translatePath("special://profile/addon_data/metadata.tvshows.themoviedb.org/settings.xml")
|
||||
if not os.path.exists(path_settings):
|
||||
logger.info("%s: %s" % (content_type, path_settings + " doesn't exist"))
|
||||
logger.log("%s: %s" % (content_type, path_settings + " doesn't exist"))
|
||||
return continuar
|
||||
settings_data = filetools.read(path_settings)
|
||||
strSettings = ' '.join(settings_data.split()).replace("> <", "><")
|
||||
@@ -750,7 +750,7 @@ def set_content(content_type, silent=False, custom=False):
|
||||
videolibrarypath += sep
|
||||
strPath = videolibrarypath + config.get_setting("folder_tvshows") + sep
|
||||
|
||||
logger.info("%s: %s" % (content_type, strPath))
|
||||
logger.log("%s: %s" % (content_type, strPath))
|
||||
# We check if strPath already exists in the DB to avoid duplicates
|
||||
sql = 'SELECT idPath FROM path where strPath="%s"' % strPath
|
||||
nun_records, records = execute_sql_kodi(sql)
|
||||
@@ -792,15 +792,15 @@ def set_content(content_type, silent=False, custom=False):
|
||||
heading = config.get_localized_string(70103) % content_type
|
||||
msg_text = config.get_localized_string(70104)
|
||||
|
||||
logger.info("%s: %s" % (heading, msg_text))
|
||||
logger.log("%s: %s" % (heading, msg_text))
|
||||
return continuar
|
||||
|
||||
|
||||
def update_db(old_path, new_path, old_movies_folder, new_movies_folder, old_tvshows_folder, new_tvshows_folder, progress):
|
||||
def path_replace(path, old, new):
|
||||
|
||||
logger.info()
|
||||
logger.info('path: ' + path + ', old: ' + old + ', new: ' + new)
|
||||
logger.log()
|
||||
logger.log('path: ' + path + ', old: ' + old + ', new: ' + new)
|
||||
|
||||
if new.startswith("special://") or '://' in new: sep = '/'
|
||||
else: sep = os.sep
|
||||
@@ -811,7 +811,7 @@ def update_db(old_path, new_path, old_movies_folder, new_movies_folder, old_tvsh
|
||||
|
||||
return path
|
||||
|
||||
logger.info()
|
||||
logger.log()
|
||||
|
||||
sql_old_path = old_path
|
||||
if sql_old_path.startswith("special://"):
|
||||
@@ -823,10 +823,10 @@ def update_db(old_path, new_path, old_movies_folder, new_movies_folder, old_tvsh
|
||||
if not sql_old_path.endswith(sep):
|
||||
sql_old_path += sep
|
||||
|
||||
logger.info('sql_old_path: ' + sql_old_path)
|
||||
logger.log('sql_old_path: ' + sql_old_path)
|
||||
# search MAIN path in the DB
|
||||
sql = 'SELECT idPath, strPath FROM path where strPath LIKE "%s"' % sql_old_path
|
||||
logger.info('sql: ' + sql)
|
||||
logger.log('sql: ' + sql)
|
||||
nun_records, records = execute_sql_kodi(sql)
|
||||
|
||||
# change main path
|
||||
@@ -834,7 +834,7 @@ def update_db(old_path, new_path, old_movies_folder, new_movies_folder, old_tvsh
|
||||
idPath = records[0][0]
|
||||
strPath = path_replace(records[0][1], old_path, new_path)
|
||||
sql = 'UPDATE path SET strPath="%s" WHERE idPath=%s' % (strPath, idPath)
|
||||
logger.info('sql: ' + sql)
|
||||
logger.log('sql: ' + sql)
|
||||
nun_records, records = execute_sql_kodi(sql)
|
||||
else:
|
||||
progress.update(100)
|
||||
@@ -851,7 +851,7 @@ def update_db(old_path, new_path, old_movies_folder, new_movies_folder, old_tvsh
|
||||
|
||||
# Search Main Sub Folder
|
||||
sql = 'SELECT idPath, strPath FROM path where strPath LIKE "%s"' % sql_old_folder
|
||||
logger.info('sql: ' + sql)
|
||||
logger.log('sql: ' + sql)
|
||||
nun_records, records = execute_sql_kodi(sql)
|
||||
|
||||
# Change Main Sub Folder
|
||||
@@ -860,13 +860,13 @@ def update_db(old_path, new_path, old_movies_folder, new_movies_folder, old_tvsh
|
||||
idPath = record[0]
|
||||
strPath = path_replace(record[1], filetools.join(old_path, OldFolder), filetools.join(new_path, NewFolder))
|
||||
sql = 'UPDATE path SET strPath="%s" WHERE idPath=%s' % (strPath, idPath)
|
||||
logger.info('sql: ' + sql)
|
||||
logger.log('sql: ' + sql)
|
||||
nun_records, records = execute_sql_kodi(sql)
|
||||
|
||||
# Search if Sub Folder exixt in all paths
|
||||
sql_old_folder += '%'
|
||||
sql = 'SELECT idPath, strPath FROM path where strPath LIKE "%s"' % sql_old_folder
|
||||
logger.info('sql: ' + sql)
|
||||
logger.log('sql: ' + sql)
|
||||
nun_records, records = execute_sql_kodi(sql)
|
||||
|
||||
#Change Sub Folder in all paths
|
||||
@@ -875,7 +875,7 @@ def update_db(old_path, new_path, old_movies_folder, new_movies_folder, old_tvsh
|
||||
idPath = record[0]
|
||||
strPath = path_replace(record[1], filetools.join(old_path, OldFolder), filetools.join(new_path, NewFolder))
|
||||
sql = 'UPDATE path SET strPath="%s" WHERE idPath=%s' % (strPath, idPath)
|
||||
logger.info('sql: ' + sql)
|
||||
logger.log('sql: ' + sql)
|
||||
nun_records, records = execute_sql_kodi(sql)
|
||||
|
||||
|
||||
@@ -883,27 +883,27 @@ def update_db(old_path, new_path, old_movies_folder, new_movies_folder, old_tvsh
|
||||
# if is Movie Folder
|
||||
# search and modify in "movie"
|
||||
sql = 'SELECT idMovie, c22 FROM movie where c22 LIKE "%s"' % sql_old_folder
|
||||
logger.info('sql: ' + sql)
|
||||
logger.log('sql: ' + sql)
|
||||
nun_records, records = execute_sql_kodi(sql)
|
||||
if records:
|
||||
for record in records:
|
||||
idMovie = record[0]
|
||||
strPath = path_replace(record[1], filetools.join(old_path, OldFolder), filetools.join(new_path, NewFolder))
|
||||
sql = 'UPDATE movie SET c22="%s" WHERE idMovie=%s' % (strPath, idMovie)
|
||||
logger.info('sql: ' + sql)
|
||||
logger.log('sql: ' + sql)
|
||||
nun_records, records = execute_sql_kodi(sql)
|
||||
else:
|
||||
# if is TV Show Folder
|
||||
# search and modify in "episode"
|
||||
sql = 'SELECT idEpisode, c18 FROM episode where c18 LIKE "%s"' % sql_old_folder
|
||||
logger.info('sql: ' + sql)
|
||||
logger.log('sql: ' + sql)
|
||||
nun_records, records = execute_sql_kodi(sql)
|
||||
if records:
|
||||
for record in records:
|
||||
idEpisode = record[0]
|
||||
strPath = path_replace(record[1], filetools.join(old_path, OldFolder), filetools.join(new_path, NewFolder))
|
||||
sql = 'UPDATE episode SET c18="%s" WHERE idEpisode=%s' % (strPath, idEpisode)
|
||||
logger.info('sql: ' + sql)
|
||||
logger.log('sql: ' + sql)
|
||||
nun_records, records = execute_sql_kodi(sql)
|
||||
p += 5
|
||||
progress.update(p, config.get_localized_string(20000) + '\n' + config.get_localized_string(80013))
|
||||
@@ -928,26 +928,26 @@ def clean(path_list=[]):
|
||||
|
||||
return path, sep
|
||||
|
||||
logger.info()
|
||||
logger.log()
|
||||
|
||||
progress = platformtools.dialog_progress_bg(config.get_localized_string(20000), config.get_localized_string(80025))
|
||||
progress.update(0)
|
||||
|
||||
# if the path list is empty, clean the entire video library
|
||||
if not path_list:
|
||||
logger.info('the path list is empty, clean the entire video library')
|
||||
logger.log('the path list is empty, clean the entire video library')
|
||||
if not config.get_setting("videolibrary_kodi"):
|
||||
sql_path, sep = sql_format(config.get_setting("videolibrarypath"))
|
||||
if not sql_path.endswith(sep): sql_path += sep
|
||||
sql = 'SELECT idPath FROM path where strPath LIKE "%s"' % sql_path
|
||||
logger.info('sql: ' + sql)
|
||||
logger.log('sql: ' + sql)
|
||||
nun_records, records = execute_sql_kodi(sql)
|
||||
idPath = records[0][0]
|
||||
sql = 'DELETE from path WHERE idPath=%s' % idPath
|
||||
logger.info('sql: ' + sql)
|
||||
logger.log('sql: ' + sql)
|
||||
nun_records, records = execute_sql_kodi(sql)
|
||||
sql = 'DELETE from path WHERE idParentPath=%s' % idPath
|
||||
logger.info('sql: ' + sql)
|
||||
logger.log('sql: ' + sql)
|
||||
nun_records, records = execute_sql_kodi(sql)
|
||||
|
||||
from core import videolibrarytools
|
||||
@@ -961,7 +961,7 @@ def clean(path_list=[]):
|
||||
if filetools.exists(tvshow_nfo):
|
||||
path_list.append(filetools.join(config.get_setting("videolibrarypath"), videolibrarytools.FOLDER_TVSHOWS, folder))
|
||||
|
||||
logger.info('path_list: ' + str(path_list))
|
||||
logger.log('path_list: ' + str(path_list))
|
||||
if path_list: t = float(100) / len(path_list)
|
||||
for i, path in enumerate(path_list):
|
||||
progress.update(int(math.ceil((i + 1) * t)))
|
||||
@@ -971,13 +971,13 @@ def clean(path_list=[]):
|
||||
|
||||
sql_path, sep = sql_format(path)
|
||||
if filetools.isdir(path) and not sql_path.endswith(sep): sql_path += sep
|
||||
logger.info('path: ' + path)
|
||||
logger.info('sql_path: ' + sql_path)
|
||||
logger.log('path: ' + path)
|
||||
logger.log('sql_path: ' + sql_path)
|
||||
|
||||
if filetools.isdir(path):
|
||||
# search movie in the DB
|
||||
sql = 'SELECT idMovie FROM movie where c22 LIKE "%s"' % (sql_path + '%')
|
||||
logger.info('sql: ' + sql)
|
||||
logger.log('sql: ' + sql)
|
||||
nun_records, records = execute_sql_kodi(sql)
|
||||
# delete movie
|
||||
if records:
|
||||
@@ -986,7 +986,7 @@ def clean(path_list=[]):
|
||||
continue
|
||||
# search TV show in the DB
|
||||
sql = 'SELECT idShow FROM tvshow_view where strPath LIKE "%s"' % sql_path
|
||||
logger.info('sql: ' + sql)
|
||||
logger.log('sql: ' + sql)
|
||||
nun_records, records = execute_sql_kodi(sql)
|
||||
# delete TV show
|
||||
if records:
|
||||
@@ -995,7 +995,7 @@ def clean(path_list=[]):
|
||||
elif config.get_setting("folder_movies") in sql_path:
|
||||
# search movie in the DB
|
||||
sql = 'SELECT idMovie FROM movie where c22 LIKE "%s"' % sql_path
|
||||
logger.info('sql: ' + sql)
|
||||
logger.log('sql: ' + sql)
|
||||
nun_records, records = execute_sql_kodi(sql)
|
||||
# delete movie
|
||||
if records:
|
||||
@@ -1004,7 +1004,7 @@ def clean(path_list=[]):
|
||||
else:
|
||||
# search episode in the DB
|
||||
sql = 'SELECT idEpisode FROM episode where c18 LIKE "%s"' % sql_path
|
||||
logger.info('sql: ' + sql)
|
||||
logger.log('sql: ' + sql)
|
||||
nun_records, records = execute_sql_kodi(sql)
|
||||
# delete episode
|
||||
if records:
|
||||
@@ -1023,7 +1023,7 @@ def check_db(path):
|
||||
ret = False
|
||||
sql_path = '%' + sep + path.split(sep)[-1] + sep + '%'
|
||||
sql = 'SELECT idShow FROM tvshow_view where strPath LIKE "%s"' % sql_path
|
||||
logger.info('sql: ' + sql)
|
||||
logger.log('sql: ' + sql)
|
||||
nun_records, records = execute_sql_kodi(sql)
|
||||
if records:
|
||||
ret = True
|
||||
@@ -1040,7 +1040,7 @@ def execute_sql_kodi(sql):
|
||||
@return: list with the query result
|
||||
@rtype records: list of tuples
|
||||
"""
|
||||
logger.info()
|
||||
logger.log()
|
||||
file_db = ""
|
||||
nun_records = 0
|
||||
records = None
|
||||
@@ -1061,14 +1061,14 @@ def execute_sql_kodi(sql):
|
||||
break
|
||||
|
||||
if file_db:
|
||||
logger.info("DB file: %s" % file_db)
|
||||
logger.log("DB file: %s" % file_db)
|
||||
conn = None
|
||||
try:
|
||||
import sqlite3
|
||||
conn = sqlite3.connect(file_db)
|
||||
cursor = conn.cursor()
|
||||
|
||||
logger.info("Running sql: %s" % sql)
|
||||
logger.log("Running sql: %s" % sql)
|
||||
cursor.execute(sql)
|
||||
conn.commit()
|
||||
|
||||
@@ -1082,7 +1082,7 @@ def execute_sql_kodi(sql):
|
||||
nun_records = conn.total_changes
|
||||
|
||||
conn.close()
|
||||
logger.info("Query executed. Records: %s" % nun_records)
|
||||
logger.log("Query executed. Records: %s" % nun_records)
|
||||
|
||||
except:
|
||||
logger.error("Error executing sql query")
|
||||
@@ -1102,7 +1102,7 @@ def check_sources(new_movies_path='', new_tvshows_path=''):
|
||||
if not path.endswith(sep): path += sep
|
||||
return path
|
||||
|
||||
logger.info()
|
||||
logger.log()
|
||||
|
||||
new_movies_path = format_path(new_movies_path)
|
||||
new_tvshows_path = format_path(new_tvshows_path)
|
||||
@@ -1132,7 +1132,7 @@ def check_sources(new_movies_path='', new_tvshows_path=''):
|
||||
|
||||
|
||||
def update_sources(new='', old=''):
|
||||
logger.info()
|
||||
logger.log()
|
||||
if new == old: return
|
||||
|
||||
SOURCES_PATH = xbmc.translatePath("special://userdata/sources.xml")
|
||||
@@ -1174,9 +1174,9 @@ def update_sources(new='', old=''):
|
||||
# create new path
|
||||
list_path = [p.firstChild.data for p in paths_node]
|
||||
if new in list_path:
|
||||
logger.info("The path %s already exists in sources.xml" % new)
|
||||
logger.log("The path %s already exists in sources.xml" % new)
|
||||
return
|
||||
logger.info("The path %s does not exist in sources.xml" % new)
|
||||
logger.log("The path %s does not exist in sources.xml" % new)
|
||||
|
||||
# if the path does not exist we create one
|
||||
source_node = xmldoc.createElement("source")
|
||||
@@ -1215,7 +1215,7 @@ def update_sources(new='', old=''):
|
||||
|
||||
|
||||
def ask_set_content(silent=False):
|
||||
logger.info()
|
||||
logger.log()
|
||||
logger.debug("videolibrary_kodi %s" % config.get_setting("videolibrary_kodi"))
|
||||
|
||||
def do_config(custom=False):
|
||||
@@ -1272,7 +1272,7 @@ def ask_set_content(silent=False):
|
||||
|
||||
def next_ep(item):
|
||||
from core.item import Item
|
||||
logger.info()
|
||||
logger.log()
|
||||
item.next_ep = False
|
||||
|
||||
# check if next file exist
|
||||
@@ -1288,7 +1288,7 @@ def next_ep(item):
|
||||
nextIndex = fileList.index(current_filename) + 1
|
||||
if nextIndex == 0 or nextIndex == len(fileList): next_file = None
|
||||
else: next_file = fileList[nextIndex]
|
||||
logger.info('Next File:' + str(next_file))
|
||||
logger.log('Next File:' + str(next_file))
|
||||
|
||||
# start next episode window afther x time
|
||||
if next_file:
|
||||
|
||||
@@ -9,7 +9,7 @@ from platformcode import logger, config
|
||||
headers = [['User-Agent', 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:54.0) Gecko/20100101 Firefox/54.0']]
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
# page_url = re.sub('akvideo.stream/(?:video/|video\\.php\\?file_code=)?(?:embed-)?([a-zA-Z0-9]+)','akvideo.stream/video/\\1',page_url)
|
||||
global data
|
||||
page = httptools.downloadpage(page_url, headers=headers)
|
||||
@@ -28,18 +28,18 @@ def test_video_exists(page_url):
|
||||
|
||||
# ID, code = scrapertools.find_single_match(data, r"""input\D*id=(?:'|")([^'"]+)(?:'|").*?value='([a-z0-9]+)""")
|
||||
# post = urllib.urlencode({ID: code})
|
||||
# logger.info('PAGE DATA' + data)
|
||||
# logger.log('PAGE DATA' + data)
|
||||
if "File Not Found" in data:
|
||||
return False, config.get_localized_string(70449) % "Akvideo"
|
||||
return True, ""
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info(" url=" + page_url)
|
||||
logger.log(" url=" + page_url)
|
||||
video_urls = []
|
||||
|
||||
global data
|
||||
# logger.info('PAGE DATA' + data)
|
||||
# logger.log('PAGE DATA' + data)
|
||||
# sitekey = scrapertools.find_single_match(data, 'data-sitekey="([^"]+)')
|
||||
# captcha = platformtools.show_recaptcha(sitekey, page_url) if sitekey else ''
|
||||
#
|
||||
|
||||
@@ -6,7 +6,7 @@ from platformcode import config, logger
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
global data
|
||||
data = httptools.downloadpage(page_url, cookies=False).data
|
||||
if 'File you are looking for is not found.' in data:
|
||||
|
||||
@@ -6,7 +6,7 @@ from platformcode import logger
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
data = httptools.downloadpage(page_url).data
|
||||
if "no longer exists" in data or "to copyright issues" in data:
|
||||
return False, config.get_localized_string(70449) % "animeid"
|
||||
@@ -16,7 +16,7 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, user="", password="", video_password=""):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
data = httptools.downloadpage(page_url).data
|
||||
video_urls = []
|
||||
label, videourl = scrapertools.find_single_match(data, 'label":"([^"]+)".*?file":"([^"]+)')
|
||||
|
||||
@@ -9,7 +9,7 @@ from platformcode import logger, config
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
response = httptools.downloadpage(page_url)
|
||||
if not response.success or "Not Found" in response.data or "File was deleted" in response.data or "is no longer available" in response.data:
|
||||
return False, config.get_localized_string(70449) % "anonfile"
|
||||
@@ -17,7 +17,7 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
video_urls = []
|
||||
data = httptools.downloadpage(page_url).data
|
||||
patron = 'download-url.*?href="([^"]+)"'
|
||||
|
||||
@@ -9,7 +9,7 @@ from platformcode import logger
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
data = httptools.downloadpage(page_url)
|
||||
if data.code == 404:
|
||||
return False, config.get_localized_string(70449) % "ArchiveOrg"
|
||||
@@ -17,7 +17,7 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("url=" + page_url)
|
||||
logger.log("url=" + page_url)
|
||||
video_urls = []
|
||||
data = httptools.downloadpage(page_url).data
|
||||
patron = '<meta property="og:video" content="([^"]+)">'
|
||||
|
||||
@@ -9,7 +9,7 @@ except ImportError:
|
||||
from urllib import urlencode
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
|
||||
if 'http://' in page_url: # fastids
|
||||
page_url = httptools.downloadpage(page_url, follow_redirects=False, only_headers=True).headers['location']
|
||||
@@ -24,7 +24,7 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("page_url=" + page_url)
|
||||
logger.log("page_url=" + page_url)
|
||||
|
||||
video_urls = []
|
||||
|
||||
@@ -36,18 +36,18 @@ def get_video_url(page_url, premium=False, user="", password="", video_password=
|
||||
if data_pack:
|
||||
from lib import jsunpack
|
||||
data = jsunpack.unpack(data_pack)
|
||||
logger.info("page_url=" + data)
|
||||
logger.log("page_url=" + data)
|
||||
|
||||
# URL
|
||||
url = scrapertools.find_single_match(data, r'"src"value="([^"]+)"')
|
||||
if not url:
|
||||
url = scrapertools.find_single_match(data, r'file\s*:\s*"([^"]+)"')
|
||||
logger.info("URL=" + str(url))
|
||||
logger.log("URL=" + str(url))
|
||||
|
||||
# URL del vídeo
|
||||
video_urls.append([".mp4" + " [backin]", url])
|
||||
|
||||
for video_url in video_urls:
|
||||
logger.info("%s - %s" % (video_url[0], httptools.get_url_headers(video_url[1])))
|
||||
logger.log("%s - %s" % (video_url[0], httptools.get_url_headers(video_url[1])))
|
||||
|
||||
return video_urls
|
||||
|
||||
@@ -11,7 +11,7 @@ from platformcode import logger
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
global page
|
||||
page = httptools.downloadpage(page_url)
|
||||
if not page.success:
|
||||
@@ -20,7 +20,7 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("url=" + page_url)
|
||||
logger.log("url=" + page_url)
|
||||
video_urls = []
|
||||
ext = '.mp4'
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ headers = {'User-Agent': 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N)
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
data = httptools.downloadpage(page_url).data
|
||||
if "Archive no Encontrado" in data:
|
||||
return False, config.get_localized_string(70449) % "bdupload"
|
||||
@@ -19,7 +19,7 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, user="", password="", video_password=""):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
data = httptools.downloadpage(page_url).data
|
||||
post = ""
|
||||
patron = '(?s)type="hidden" name="([^"]+)".*?value="([^"]*)"'
|
||||
|
||||
@@ -11,7 +11,7 @@ from platformcode import logger
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
data = httptools.downloadpage(page_url)
|
||||
if data.code == 404:
|
||||
return False, config.get_localized_string(70449) % "CinemaUpload"
|
||||
@@ -19,7 +19,7 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("url=" + page_url)
|
||||
logger.log("url=" + page_url)
|
||||
video_urls = []
|
||||
data = httptools.downloadpage(page_url).data
|
||||
data = re.sub(r'\n|\r|\t| |<br>|\s{2,}', "", data)
|
||||
|
||||
@@ -22,7 +22,7 @@ excption = False
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
|
||||
data = get_data(page_url.replace(".org", ".me"))
|
||||
if "File Not Found" in data: return False, config.get_localized_string(70449) % "Clicknupload"
|
||||
@@ -31,7 +31,7 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("url=" + page_url)
|
||||
logger.log("url=" + page_url)
|
||||
|
||||
data = get_data(page_url.replace(".org", ".me"))
|
||||
|
||||
@@ -51,7 +51,7 @@ def get_video_url(page_url, premium=False, user="", password="", video_password=
|
||||
media_url = media.rsplit('/', 1)[0] + "/" + url_strip
|
||||
video_urls.append([scrapertools.get_filename_from_url(media_url)[-4:] + " [clicknupload]", media_url])
|
||||
for video_url in video_urls:
|
||||
logger.info("%s - %s" % (video_url[0], video_url[1]))
|
||||
logger.log("%s - %s" % (video_url[0], video_url[1]))
|
||||
|
||||
return video_urls
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ from lib import jsunpack
|
||||
from platformcode import logger, config
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
global data
|
||||
data = httptools.downloadpage(page_url).data
|
||||
if "File Not Found" in data or "File was deleted" in data:
|
||||
@@ -15,7 +15,7 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, user="", password="", video_password=""):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
video_urls = []
|
||||
|
||||
try:
|
||||
|
||||
@@ -8,7 +8,7 @@ from lib import jsunpack
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
html = httptools.downloadpage(page_url)
|
||||
global data
|
||||
data = html.data
|
||||
@@ -18,7 +18,7 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("url=" + page_url)
|
||||
logger.log("url=" + page_url)
|
||||
video_urls = []
|
||||
global data
|
||||
# data = httptools.downloadpage(page_url).data
|
||||
|
||||
@@ -30,7 +30,7 @@ proxy = "https://www.usa-proxy.org/"
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
|
||||
data = httptools.downloadpage(page_url, headers=GLOBAL_HEADER).data
|
||||
if "Este es un clip de muestra" in data:
|
||||
@@ -44,7 +44,7 @@ def test_video_exists(page_url):
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
#page_url='https://www.crunchyroll.com/es-es/one-piece/episode-891-climbing-up-a-waterfall-a-great-journey-through-the-land-of-wanos-sea-zone-786643'
|
||||
logger.info("url=" + page_url)
|
||||
logger.log("url=" + page_url)
|
||||
video_urls = []
|
||||
if "crunchyroll.com" in page_url:
|
||||
media_id = page_url.rsplit("-", 1)[1]
|
||||
@@ -94,7 +94,7 @@ def get_video_url(page_url, premium=False, user="", password="", video_password=
|
||||
file_sub = ""
|
||||
video_urls.append(["%s %sp [crunchyroll]" % (filename, quality), media_url, 0, file_sub])
|
||||
for video_url in video_urls:
|
||||
logger.info("%s - %s" % (video_url[0], video_url[1]))
|
||||
logger.log("%s - %s" % (video_url[0], video_url[1]))
|
||||
return video_urls
|
||||
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ from platformcode import logger, config
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
global response
|
||||
|
||||
response = httptools.downloadpage(page_url, cookies=False)
|
||||
@@ -18,7 +18,7 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
video_urls = []
|
||||
cookie = {'Cookie': response.headers["set-cookie"]}
|
||||
data = response.data.replace("\\", "")
|
||||
@@ -40,5 +40,5 @@ def get_video_url(page_url, premium=False, user="", password="", video_password=
|
||||
stream_url = stream_url_http
|
||||
video_urls.append(["%sp .%s [dailymotion]" % (calidad, stream_type), stream_url, 0, subtitle])
|
||||
for video_url in video_urls:
|
||||
logger.info("%s - %s" % (video_url[0], video_url[1]))
|
||||
logger.log("%s - %s" % (video_url[0], video_url[1]))
|
||||
return video_urls
|
||||
@@ -7,7 +7,7 @@ from platformcode import logger
|
||||
|
||||
# Returns an array of possible video url's from the page_url
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info()
|
||||
logger.log()
|
||||
page_url = correct_url(page_url)
|
||||
dd1 = httptools.downloadpage("https://api.alldebrid.com/user/login?agent=mySoft&username=%s&password=%s" %(user, password)).data
|
||||
token = scrapertools.find_single_match(dd1, 'token":"([^"]+)')
|
||||
|
||||
@@ -22,7 +22,7 @@ headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:65.0) Gecko/20
|
||||
|
||||
# Returns an array of possible video url's from the page_url
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("(page_url='%s' , video_password=%s)" % (page_url, video_password))
|
||||
logger.log("(page_url='%s' , video_password=%s)" % (page_url, video_password))
|
||||
page_url = page_url.replace(".nz/embed", ".nz/")
|
||||
# Se comprueba si existe un token guardado y sino se ejecuta el proceso de autentificación
|
||||
token_auth = config.get_setting("token", server="realdebrid")
|
||||
@@ -99,7 +99,7 @@ def get_enlaces(data):
|
||||
|
||||
|
||||
def authentication():
|
||||
logger.info()
|
||||
logger.log()
|
||||
try:
|
||||
client_id = "YTWNFBIJEEBP6"
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ from platformcode import logger
|
||||
|
||||
|
||||
def get_long_url(short_url):
|
||||
logger.info("short_url = '%s'" % short_url)
|
||||
logger.log("short_url = '%s'" % short_url)
|
||||
|
||||
data = httptools.downloadpage(short_url).data
|
||||
ysmm = scrapertools.find_single_match(data, "var ysmm = '([^']+)';")
|
||||
|
||||
@@ -17,7 +17,7 @@ from platformcode import logger
|
||||
|
||||
# Obtiene la URL que hay detrás de un enlace a linkbucks
|
||||
def get_long_url(short_url):
|
||||
logger.info("(short_url='%s')" % short_url)
|
||||
logger.log("(short_url='%s')" % short_url)
|
||||
|
||||
request_headers = []
|
||||
request_headers.append(["User-Agent",
|
||||
@@ -33,17 +33,17 @@ def get_long_url(short_url):
|
||||
while True:
|
||||
for name, value in response_headers:
|
||||
if name == "set-cookie":
|
||||
logger.info("Set-Cookie: " + value)
|
||||
logger.log("Set-Cookie: " + value)
|
||||
cookie_name = scrapertools.scrapertools.find_single_match(value, '(.*?)\=.*?\;')
|
||||
cookie_value = scrapertools.scrapertools.find_single_match(value, '.*?\=(.*?)\;')
|
||||
request_headers.append(["Cookie", cookie_name + "=" + cookie_value])
|
||||
|
||||
body, response_headers = scrapertools.read_body_and_headers(url, headers=request_headers)
|
||||
logger.info("body=" + body)
|
||||
logger.log("body=" + body)
|
||||
|
||||
try:
|
||||
location = scrapertools.scrapertools.find_single_match(body, '<textarea.*?class="caja_des">([^<]+)</textarea>')
|
||||
logger.info("location=" + location)
|
||||
logger.log("location=" + location)
|
||||
break
|
||||
except:
|
||||
n = n + 1
|
||||
|
||||
@@ -38,15 +38,15 @@ servers = get_server_list()
|
||||
|
||||
|
||||
def get_long_urls(data):
|
||||
logger.info()
|
||||
logger.log()
|
||||
patron = '<a href="http://([^"]+)"'
|
||||
matches = re.compile(patron, re.DOTALL).findall(data)
|
||||
for short_url in matches:
|
||||
if short_url.startswith(tuple(servers)):
|
||||
logger.info(": " + short_url)
|
||||
logger.log(": " + short_url)
|
||||
longurl_data = httptools.downloadpage(
|
||||
"http://api.longurl.org/v2/expand?url=" + urllib.quote_plus(short_url)).data
|
||||
logger.info(longurl_data)
|
||||
logger.log(longurl_data)
|
||||
try:
|
||||
long_url = scrapertools.scrapertools.find_single_match(longurl_data, '<long-url><!\[CDATA\[(.*?)\]\]></long-url>')
|
||||
except:
|
||||
|
||||
@@ -5,9 +5,9 @@ from platformcode import logger
|
||||
|
||||
|
||||
def get_long_url(short_url):
|
||||
logger.info("(short_url='%s')" % short_url)
|
||||
logger.log("(short_url='%s')" % short_url)
|
||||
|
||||
location = scrapertools.get_header_from_response(short_url, header_to_get="location")
|
||||
logger.info("location=" + location)
|
||||
logger.log("location=" + location)
|
||||
|
||||
return location
|
||||
|
||||
@@ -12,7 +12,7 @@ from platformcode import logger, config
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
global data
|
||||
data = httptools.downloadpage(page_url).data.replace('"', "'")
|
||||
if "Not Found" in data or "File Does not Exist" in data:
|
||||
@@ -20,7 +20,7 @@ def test_video_exists(page_url):
|
||||
return True, ""
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("(deltabit page_url='%s')" % page_url)
|
||||
logger.log("(deltabit page_url='%s')" % page_url)
|
||||
video_urls = []
|
||||
global data
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ def test_video_exists(page_url):
|
||||
|
||||
# Returns an array of possible video url's from the page_url
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
|
||||
video_urls = [["%s %s" % (page_url[-4:], config.get_localized_string(30137)), page_url]]
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ from platformcode import logger
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
data = httptools.downloadpage(page_url)
|
||||
if data.code == 404:
|
||||
return False, config.get_localized_string(70449) % "Dostream"
|
||||
@@ -17,7 +17,7 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("url=" + page_url)
|
||||
logger.log("url=" + page_url)
|
||||
video_urls = []
|
||||
data = httptools.downloadpage(page_url, headers={"Referer":page_url}).data
|
||||
patron = '"label":"([^"]+)".*?'
|
||||
|
||||
@@ -6,7 +6,7 @@ from platformcode import logger
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
data = httptools.downloadpage(page_url).data
|
||||
if "no longer exists" in data or "to copyright issues" in data:
|
||||
return False, "[Downace] El video ha sido borrado"
|
||||
@@ -18,7 +18,7 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, user="", password="", video_password=""):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
data = httptools.downloadpage(page_url).data
|
||||
video_urls = []
|
||||
videourl = scrapertools.find_single_match(data, 'controls preload.*?src="([^"]+)')
|
||||
|
||||
@@ -19,10 +19,10 @@ from platformcode import logger
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
page_url = page_url.replace("amp;", "")
|
||||
data = httptools.downloadpage(page_url).data
|
||||
logger.info("data=" + data)
|
||||
logger.log("data=" + data)
|
||||
video_urls = []
|
||||
patron = "video_src.*?(http.*?)%22%2C%22video_timestamp"
|
||||
matches = re.compile(patron, re.DOTALL).findall(data)
|
||||
@@ -33,5 +33,5 @@ def get_video_url(page_url, premium=False, user="", password="", video_password=
|
||||
videourl = urllib.unquote(videourl)
|
||||
video_urls.append(["[facebook]", videourl])
|
||||
for video_url in video_urls:
|
||||
logger.info("%s - %s" % (video_url[0], video_url[1]))
|
||||
logger.log("%s - %s" % (video_url[0], video_url[1]))
|
||||
return video_urls
|
||||
|
||||
@@ -7,7 +7,7 @@ from platformcode import logger
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
data = httptools.downloadpage(page_url)
|
||||
|
||||
if "Object not found" in data.data or "longer exists on our servers" in data.data:
|
||||
@@ -18,7 +18,7 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
|
||||
data = httptools.downloadpage(page_url).data
|
||||
if "p,a,c,k,e,d" in data:
|
||||
@@ -38,6 +38,6 @@ def get_video_url(page_url, premium=False, user="", password="", video_password=
|
||||
except:
|
||||
pass
|
||||
for video_url in video_urls:
|
||||
logger.info(" %s - %s" % (video_url[0], video_url[1]))
|
||||
logger.log(" %s - %s" % (video_url[0], video_url[1]))
|
||||
|
||||
return video_urls
|
||||
|
||||
@@ -6,7 +6,7 @@ from core import jsontools
|
||||
from platformcode import logger, config
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
global data
|
||||
|
||||
page_url = re.sub('://[^/]+/', '://feurl.com/', page_url)
|
||||
@@ -16,14 +16,14 @@ def test_video_exists(page_url):
|
||||
page_url = page_url.replace("/f/","/v/")
|
||||
page_url = page_url.replace("/v/","/api/source/")
|
||||
data = httptools.downloadpage(page_url, post={}).json
|
||||
logger.info(data)
|
||||
logger.log(data)
|
||||
if "Video not found or" in data or "We are encoding this video" in data:
|
||||
return False, config.get_localized_string(70449) % "Fembed"
|
||||
return True, ""
|
||||
|
||||
|
||||
def get_video_url(page_url, user="", password="", video_password=""):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
video_urls = []
|
||||
for file in data['data']:
|
||||
media_url = file['file']
|
||||
|
||||
@@ -8,7 +8,7 @@ from platformcode import logger
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
|
||||
data = httptools.downloadpage(page_url, follow_redirects=False)
|
||||
|
||||
@@ -18,7 +18,7 @@ def test_video_exists(page_url):
|
||||
return True, ""
|
||||
|
||||
def get_video_url(page_url, user="", password="", video_password=""):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
video_urls = []
|
||||
data = httptools.downloadpage(page_url, follow_redirects=False, only_headers=True)
|
||||
logger.debug(data.headers)
|
||||
|
||||
@@ -4,12 +4,12 @@ from platformcode import logger
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
|
||||
return True, ""
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
video_urls = []
|
||||
return video_urls
|
||||
|
||||
@@ -9,7 +9,7 @@ from platformcode import logger, config
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
response = httptools.downloadpage(page_url)
|
||||
if "File was deleted" in response.data or "is no longer available" in response.data:
|
||||
return False, config.get_localized_string(70449) % "filepup"
|
||||
@@ -17,7 +17,7 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
video_urls = []
|
||||
page_url = page_url.replace("https","http") + "?wmode=transparent"
|
||||
data = httptools.downloadpage(page_url).data
|
||||
@@ -36,5 +36,5 @@ def get_video_url(page_url, premium=False, user="", password="", video_password=
|
||||
video_urls.sort(key=lambda x: x[2])
|
||||
for video_url in video_urls:
|
||||
video_url[2] = 0
|
||||
logger.info("%s - %s" % (video_url[0], video_url[1]))
|
||||
logger.log("%s - %s" % (video_url[0], video_url[1]))
|
||||
return video_urls
|
||||
|
||||
@@ -6,7 +6,7 @@ from platformcode import logger
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
data = httptools.downloadpage(page_url).data
|
||||
|
||||
if "File was deleted" in data:
|
||||
@@ -16,7 +16,7 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("url=" + page_url)
|
||||
logger.log("url=" + page_url)
|
||||
video_urls = []
|
||||
data = httptools.downloadpage(page_url).data
|
||||
url = scrapertools.find_single_match(data, '(?i)link:\s*"(https://.*?filescdn\.com.*?mp4)"')
|
||||
|
||||
@@ -6,7 +6,7 @@ from platformcode import logger
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("( page_url='%s')")
|
||||
logger.log("( page_url='%s')")
|
||||
video_urls = []
|
||||
itemlist = []
|
||||
data1 = ''
|
||||
|
||||
@@ -23,7 +23,7 @@ flashx_hash_f = ""
|
||||
flashx_post = ""
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
global flashx_data
|
||||
try:
|
||||
flashx_data = httptools.downloadpage(page_url, cookies="xfsts=pfp5dj3e6go1l2o1").data
|
||||
@@ -53,7 +53,7 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("url=" + page_url)
|
||||
logger.log("url=" + page_url)
|
||||
pfxfx = ""
|
||||
data = flashx_data
|
||||
data = data.replace("\n", "")
|
||||
@@ -70,8 +70,8 @@ def get_video_url(page_url, premium=False, user="", password="", video_password=
|
||||
matches = scrapertools.find_multiple_matches(mfxfx, '(\w+):(\w+)')
|
||||
for f, v in matches:
|
||||
pfxfx += f + "=" + v + "&"
|
||||
logger.info("mfxfxfx1= %s" % js_fxfx)
|
||||
logger.info("mfxfxfx2= %s" % pfxfx)
|
||||
logger.log("mfxfxfx1= %s" % js_fxfx)
|
||||
logger.log("mfxfxfx2= %s" % pfxfx)
|
||||
if pfxfx == "":
|
||||
pfxfx = "f=fail&fxfx=6"
|
||||
coding_url = 'https://www.flashx.co/flashx.php?%s' % pfxfx
|
||||
@@ -119,14 +119,14 @@ def get_video_url(page_url, premium=False, user="", password="", video_password=
|
||||
filetools.write(subtitle, data)
|
||||
except:
|
||||
import traceback
|
||||
logger.info("Error al descargar el subtítulo: " + traceback.format_exc())
|
||||
logger.log("Error al descargar el subtítulo: " + traceback.format_exc())
|
||||
|
||||
for media_url, label in media_urls:
|
||||
if not media_url.endswith("png") and not media_url.endswith(".srt"):
|
||||
video_urls.append(["." + media_url.rsplit('.', 1)[1] + " [flashx]", media_url, 0, subtitle])
|
||||
|
||||
for video_url in video_urls:
|
||||
logger.info("%s - %s" % (video_url[0], video_url[1]))
|
||||
logger.log("%s - %s" % (video_url[0], video_url[1]))
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ from platformcode import logger
|
||||
|
||||
# Returns an array of possible video url's from the page_url
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
|
||||
video_urls = []
|
||||
|
||||
@@ -17,7 +17,7 @@ def get_video_url(page_url, premium=False, user="", password="", video_password=
|
||||
page_url = scrapertools.get_header_from_response(page_url, header_to_get="location")
|
||||
|
||||
# http://www.4shared.com/flash/player.swf?file=http://dc237.4shared.com/img/392975628/ff297d3f/dlink__2Fdownload_2Flj9Qu-tF_3Ftsid_3D20101030-200423-87e3ba9b/preview.flv&d
|
||||
logger.info("redirect a '%s'" % page_url)
|
||||
logger.log("redirect a '%s'" % page_url)
|
||||
patron = "file\=([^\&]+)\&"
|
||||
matches = re.compile(patron, re.DOTALL).findall(page_url)
|
||||
|
||||
@@ -29,6 +29,6 @@ def get_video_url(page_url, premium=False, user="", password="", video_password=
|
||||
video_urls.append(["[fourshared]", page_url])
|
||||
|
||||
for video_url in video_urls:
|
||||
logger.info("%s - %s" % (video_url[0], video_url[1]))
|
||||
logger.log("%s - %s" % (video_url[0], video_url[1]))
|
||||
|
||||
return video_urls
|
||||
|
||||
@@ -22,7 +22,7 @@ headers = {"User-Agent":"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:%s.0) Geck
|
||||
DATA = ''
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
|
||||
|
||||
data = alfaresolver.get_data(page_url, False)
|
||||
@@ -46,7 +46,7 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
|
||||
data = DATA
|
||||
|
||||
@@ -87,7 +87,7 @@ def get_video_url(page_url, premium=False, user="", password="", video_password=
|
||||
video_urls.append([scrapertools.get_filename_from_url(mediaurl)[-4:] + " [gamovideo]", mediaurl])
|
||||
|
||||
for video_url in video_urls:
|
||||
logger.info("%s - %s" % (video_url[0], video_url[1]))
|
||||
logger.log("%s - %s" % (video_url[0], video_url[1]))
|
||||
|
||||
return video_urls
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ from platformcode import logger
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
data = httptools.downloadpage(page_url).data
|
||||
if '<h2 class="error">Download error</h2>' in data:
|
||||
return False, "El enlace no es válido<br/>o ha sido borrado de gigasize"
|
||||
@@ -13,7 +13,7 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
video_urls = []
|
||||
|
||||
return video_urls
|
||||
|
||||
@@ -7,14 +7,14 @@ from platformcode import logger
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
video_urls = []
|
||||
# Lo extrae a partir de flashvideodownloader.org
|
||||
if page_url.startswith("http://"):
|
||||
url = 'http://www.flashvideodownloader.org/download.php?u=' + page_url
|
||||
else:
|
||||
url = 'http://www.flashvideodownloader.org/download.php?u=http://video.google.com/videoplay?docid=' + page_url
|
||||
logger.info("url=" + url)
|
||||
logger.log("url=" + url)
|
||||
data = httptools.downloadpage(url).data
|
||||
|
||||
# Extrae el vídeo
|
||||
@@ -24,6 +24,6 @@ def get_video_url(page_url, premium=False, user="", password="", video_password=
|
||||
video_urls.append(["[googlevideo]", newmatches[0]])
|
||||
|
||||
for video_url in video_urls:
|
||||
logger.info("%s - %s" % (video_url[0], video_url[1]))
|
||||
logger.log("%s - %s" % (video_url[0], video_url[1]))
|
||||
|
||||
return video_urls
|
||||
|
||||
@@ -20,15 +20,15 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("url=" + page_url)
|
||||
logger.log("url=" + page_url)
|
||||
video_urls = []
|
||||
global data
|
||||
# data = httptools.downloadpage(page_url, use_requests=True, verify=False).data
|
||||
data = re.sub(r'"|\n|\r|\t| |<br>|\s{2,}', "", data)
|
||||
logger.info('GOUN DATA= '+data)
|
||||
logger.log('GOUN DATA= '+data)
|
||||
packed_data = scrapertools.find_single_match(data, "javascript'>(eval.*?)</script>")
|
||||
unpacked = jsunpack.unpack(packed_data)
|
||||
logger.info('GOUN DATA= '+unpacked)
|
||||
logger.log('GOUN DATA= '+unpacked)
|
||||
patron = r"sources..([^\]]+)"
|
||||
matches = re.compile(patron, re.DOTALL).findall(unpacked)
|
||||
if not matches:
|
||||
|
||||
@@ -42,7 +42,7 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, user="", password="", video_password=""):
|
||||
logger.info()
|
||||
logger.log()
|
||||
video_urls = []
|
||||
urls = []
|
||||
streams =[]
|
||||
|
||||
@@ -7,7 +7,7 @@ from platformcode import config, logger
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
|
||||
data = httptools.downloadpage(page_url, cookies=False).data
|
||||
if 'Not found id' in data:
|
||||
@@ -17,12 +17,12 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info()
|
||||
logger.log()
|
||||
itemlist = []
|
||||
|
||||
logger.info(page_url)
|
||||
logger.log(page_url)
|
||||
data = httptools.downloadpage(page_url, post='').data
|
||||
logger.info(data)
|
||||
logger.log(data)
|
||||
url = base64.b64decode(data)
|
||||
|
||||
itemlist.append([".mp4 [HDLoad]", url])
|
||||
|
||||
@@ -7,12 +7,12 @@ from platformcode import logger, config, platformtools
|
||||
baseUrl = 'https://hdmario.live'
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
global page, data
|
||||
|
||||
page = httptools.downloadpage(page_url)
|
||||
data = page.data
|
||||
logger.info(page.url)
|
||||
logger.log(page.url)
|
||||
|
||||
if "the page you are looking for could not be found" in data:
|
||||
return False, config.get_localized_string(70449) % "HDmario"
|
||||
@@ -37,13 +37,13 @@ def registerOrLogin(page_url, forced=False):
|
||||
setting.server_config(Item(config='hdmario'))
|
||||
login()
|
||||
else:
|
||||
logger.info('Registrazione automatica in corso')
|
||||
logger.log('Registrazione automatica in corso')
|
||||
import random
|
||||
import string
|
||||
randEmail = ''.join(random.choice(string.ascii_letters + string.digits) for i in range(random.randint(9, 14))) + '@gmail.com'
|
||||
randPsw = ''.join(random.choice(string.ascii_letters + string.digits) for i in range(10))
|
||||
logger.info('email: ' + randEmail)
|
||||
logger.info('pass: ' + randPsw)
|
||||
logger.log('email: ' + randEmail)
|
||||
logger.log('pass: ' + randPsw)
|
||||
nTry = 0
|
||||
while nTry < 5:
|
||||
nTry += 1
|
||||
@@ -59,7 +59,7 @@ def registerOrLogin(page_url, forced=False):
|
||||
break
|
||||
else:
|
||||
platformtools.dialog_ok('HDmario', 'Impossibile registrarsi automaticamente')
|
||||
logger.info('Registrazione completata')
|
||||
logger.log('Registrazione completata')
|
||||
global page, data
|
||||
page = httptools.downloadpage(page_url)
|
||||
data = page.data
|
||||
@@ -67,7 +67,7 @@ def registerOrLogin(page_url, forced=False):
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
global page, data
|
||||
page_url = page_url.replace('?', '')
|
||||
logger.info("url=" + page_url)
|
||||
logger.log("url=" + page_url)
|
||||
|
||||
if 'unconfirmed' in page.url:
|
||||
from lib import onesecmail
|
||||
@@ -79,7 +79,7 @@ def get_video_url(page_url, premium=False, user="", password="", video_password=
|
||||
}
|
||||
httptools.downloadpage(page.url, post=postData)
|
||||
jsonMail = onesecmail.waitForMail(mail)
|
||||
logger.info(jsonMail)
|
||||
logger.log(jsonMail)
|
||||
if jsonMail:
|
||||
code = jsonMail['subject'].split(' - ')[0]
|
||||
page = httptools.downloadpage(page_url + '?code=' + code)
|
||||
@@ -91,12 +91,12 @@ def get_video_url(page_url, premium=False, user="", password="", video_password=
|
||||
if 'Registrati' in data:
|
||||
platformtools.dialog_ok('HDmario', 'Username/password non validi')
|
||||
registerOrLogin(page_url, True)
|
||||
logger.info(data)
|
||||
logger.log(data)
|
||||
from lib import jsunpack_js2py
|
||||
unpacked = jsunpack_js2py.unpack(scrapertools.find_single_match(data, '<script type="text/javascript">\n*\s*\n*(eval.*)'))
|
||||
# p,a,c,k,e,d data -> xhr.setRequestHeader
|
||||
secureProof = scrapertools.find_single_match(unpacked, """X-Secure-Proof['"]\s*,\s*['"]([^"']+)""")
|
||||
logger.info('X-Secure-Proof=' + secureProof)
|
||||
logger.log('X-Secure-Proof=' + secureProof)
|
||||
|
||||
data = httptools.downloadpage(baseUrl + '/pl/' + page_url.split('/')[-1].replace('?', '') + '.m3u8', headers=[['X-Secure-Proof', secureProof]]).data
|
||||
filetools.write(xbmc.translatePath('special://temp/hdmario.m3u8'), data, 'w')
|
||||
|
||||
@@ -20,7 +20,7 @@ from platformcode import logger
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
data = httptools.downloadpage(page_url).data
|
||||
post = {}
|
||||
r = re.findall(r'type="hidden" name="(.+?)"\s* value="?(.+?)">', data)
|
||||
@@ -45,6 +45,6 @@ def get_video_url(page_url, premium=False, user="", password="", video_password=
|
||||
video_urls.append([scrapertools.get_filename_from_url(mediaurl)[-4:] + " [hugefiles]", mediaurl])
|
||||
|
||||
for video_url in video_urls:
|
||||
logger.info("%s - %s" % (video_url[0], video_url[1]))
|
||||
logger.log("%s - %s" % (video_url[0], video_url[1]))
|
||||
|
||||
return video_urls
|
||||
|
||||
@@ -10,7 +10,7 @@ from platformcode import logger
|
||||
|
||||
data = ""
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
global data
|
||||
data = httptools.downloadpage(page_url)
|
||||
|
||||
@@ -22,7 +22,7 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("url=" + page_url)
|
||||
logger.log("url=" + page_url)
|
||||
logger.error(data)
|
||||
video_urls = []
|
||||
patron = 'source src="([^"]+)" type="([^"]+)" res=(\d+)'
|
||||
|
||||
@@ -6,7 +6,7 @@ from platformcode import logger
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
data = httptools.downloadpage(page_url).data
|
||||
if "The file you were looking for could not be found" in data:
|
||||
return False, config.get_localized_string(70449) % "jawcloud"
|
||||
@@ -14,7 +14,7 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, user="", password="", video_password=""):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
data = httptools.downloadpage(page_url).data
|
||||
video_urls = []
|
||||
videourl = scrapertools.find_single_match(data, 'source src="([^"]+)')
|
||||
|
||||
@@ -9,7 +9,7 @@ from platformcode import logger
|
||||
|
||||
video_urls = []
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
|
||||
subtitles = ""
|
||||
response = httptools.downloadpage(page_url)
|
||||
@@ -21,7 +21,7 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
video_urls = []
|
||||
media_url = scrapertools.find_single_match(data, '<video src="([^"]+)"')
|
||||
if media_url:
|
||||
|
||||
@@ -7,7 +7,7 @@ from platformcode import logger
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
page_url = page_url.replace("embed/", "").replace(".html", ".json")
|
||||
data = httptools.downloadpage(page_url).data
|
||||
if '"error":"video_not_found"' in data or '"error":"Can\'t find VideoInstance"' in data:
|
||||
@@ -17,7 +17,7 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("(page_url='%s')" % (page_url))
|
||||
logger.log("(page_url='%s')" % (page_url))
|
||||
|
||||
video_urls = []
|
||||
# Carga la página para coger las cookies
|
||||
@@ -45,6 +45,6 @@ def get_video_url(page_url, premium=False, user="", password="", video_password=
|
||||
pass
|
||||
|
||||
for video_url in video_urls:
|
||||
logger.info("%s - %s" % (video_url[0], video_url[1]))
|
||||
logger.log("%s - %s" % (video_url[0], video_url[1]))
|
||||
|
||||
return video_urls
|
||||
|
||||
@@ -6,7 +6,7 @@ from platformcode import logger
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
data = httptools.downloadpage(page_url).data
|
||||
if "Invalid or Deleted File" in data or "Well, looks like we" in data:
|
||||
return False, config.get_localized_string(70449) % "Mediafire"
|
||||
@@ -16,7 +16,7 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
video_urls = []
|
||||
data = httptools.downloadpage(page_url).data
|
||||
patron = "DownloadButtonAd-startDownload gbtnSecondary.*?href='([^']+)'"
|
||||
@@ -27,5 +27,5 @@ def get_video_url(page_url, premium=False, user="", password="", video_password=
|
||||
if len(matches) > 0:
|
||||
video_urls.append([matches[0][-4:] + " [mediafire]", matches[0]])
|
||||
for video_url in video_urls:
|
||||
logger.info("%s - %s" % (video_url[0], video_url[1]))
|
||||
logger.log("%s - %s" % (video_url[0], video_url[1]))
|
||||
return video_urls
|
||||
|
||||
@@ -78,7 +78,7 @@ def test_video_exists(page_url):
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
page_url = page_url.replace('/embed#', '/#')
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
video_urls = []
|
||||
|
||||
# si hay mas de 5 archivos crea un playlist con todos
|
||||
|
||||
@@ -10,7 +10,7 @@ from platformcode import logger, config
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
global data
|
||||
data = httptools.downloadpage(page_url).data
|
||||
|
||||
@@ -25,7 +25,7 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("url=" + page_url)
|
||||
logger.log("url=" + page_url)
|
||||
video_urls = []
|
||||
ext = '.mp4'
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
data = re.sub(r"\n|\r|\t|\s{2}", "", httptools.downloadpage(page_url).data)
|
||||
match = scrapertools.find_single_match(data, "<script type='text/javascript'>(.*?)</script>")
|
||||
data = jsunpack.unpack(match)
|
||||
@@ -23,9 +23,9 @@ def get_video_url(page_url, premium=False, user="", password="", video_password=
|
||||
media_url = scrapertools.find_single_match(data, '{type:"video/mp4",src:"([^"]+)"}')
|
||||
if not media_url:
|
||||
media_url = scrapertools.find_single_match(data, '"file":"([^"]+)')
|
||||
logger.info("media_url=" + media_url)
|
||||
logger.log("media_url=" + media_url)
|
||||
video_urls = list()
|
||||
video_urls.append([scrapertools.get_filename_from_url(media_url)[-4:] + " [mp4upload]", media_url])
|
||||
for video_url in video_urls:
|
||||
logger.info("%s - %s" % (video_url[0], video_url[1]))
|
||||
logger.log("%s - %s" % (video_url[0], video_url[1]))
|
||||
return video_urls
|
||||
|
||||
@@ -21,7 +21,7 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info()
|
||||
logger.log()
|
||||
video_urls = []
|
||||
data = httptools.downloadpage(page_url).data
|
||||
data = scrapertools.find_single_match(data, 'var srca = \[(.*?)\]')
|
||||
|
||||
@@ -10,7 +10,7 @@ import re
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
data = httptools.downloadpage(page_url)
|
||||
global page_data
|
||||
page_data = data.data
|
||||
@@ -21,7 +21,7 @@ def test_video_exists(page_url):
|
||||
return True, ""
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
video_urls = []
|
||||
global page_data
|
||||
video_url = scrapertools.find_single_match(decode(page_data), r"'src',\s*'([^']+)")
|
||||
|
||||
@@ -23,7 +23,7 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info()
|
||||
logger.log()
|
||||
video_urls = []
|
||||
data = httptools.downloadpage(page_url).data
|
||||
matches = scrapertools.find_multiple_matches(data, 'tracker: "([^"]+)"')
|
||||
|
||||
@@ -25,7 +25,7 @@ from platformcode import logger
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
#Deshabilitamos el server hasta nueva orden
|
||||
return False, "[netutv] Servidor deshabilitado"
|
||||
# http://netu.tv/watch_video.php=XX solo contiene una redireccion, ir directamente a http://hqq.tv/player/embed_player.php?vid=XX
|
||||
@@ -37,7 +37,7 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("url=" + page_url)
|
||||
logger.log("url=" + page_url)
|
||||
video_urls = []
|
||||
|
||||
if "hash=" in page_url:
|
||||
|
||||
@@ -9,7 +9,7 @@ from platformcode import logger, config
|
||||
headers = [['User-Agent', 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:54.0) Gecko/20100101 Firefox/54.0']]
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
data = httptools.downloadpage(page_url).data
|
||||
if "Not Found" in data or "File was deleted" in data or "The file is being converted" in data or "Please try again later" in data:
|
||||
return False, config.get_localized_string(70293) % "NowVideo"
|
||||
@@ -20,7 +20,7 @@ def test_video_exists(page_url):
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
host = 'http://nowvideo.club'
|
||||
logger.info("(nowvideo page_url='%s')" % page_url)
|
||||
logger.log("(nowvideo page_url='%s')" % page_url)
|
||||
video_urls = []
|
||||
data = httptools.downloadpage(page_url).data
|
||||
page_url_post = scrapertools.find_single_match(data, '<Form id="[^"]+" method="POST" action="([^"]+)">')
|
||||
@@ -28,7 +28,7 @@ def get_video_url(page_url, premium=False, user="", password="", video_password=
|
||||
imhuman = '&imhuman=' + scrapertools.find_single_match(data, 'name="imhuman" value="([^"]+)"').replace(" ", "+")
|
||||
post = urllib.urlencode({k: v for k, v in scrapertools.find_multiple_matches(data, 'name="([^"]+)" value="([^"]*)"')}) + imhuman
|
||||
data = httptools.downloadpage(host + page_url_post, post=post).data
|
||||
logger.info("nowvideo data page_url2 ='%s'" % data)
|
||||
logger.log("nowvideo data page_url2 ='%s'" % data)
|
||||
|
||||
headers.append(['Referer', page_url])
|
||||
post_data = scrapertools.find_single_match(data,"</div>\s*<script>(eval.function.p,a,c,k,e,.*?)\s*</script>")
|
||||
@@ -43,11 +43,11 @@ def get_video_url(page_url, premium=False, user="", password="", video_password=
|
||||
_headers = urllib.urlencode(dict(headers))
|
||||
|
||||
for media_url in media_urls:
|
||||
#logger.info("nowvideo data page_url2 ='%s'" % media_url)
|
||||
#logger.log("nowvideo data page_url2 ='%s'" % media_url)
|
||||
video_urls.append([" mp4 [nowvideo] ", media_url + '|' + _headers])
|
||||
|
||||
for video_url in media_urls:
|
||||
logger.info("[nowvideo.py] %s - %s" % (video_url[0], video_url[1]))
|
||||
logger.log("[nowvideo.py] %s - %s" % (video_url[0], video_url[1]))
|
||||
|
||||
return video_urls
|
||||
|
||||
@@ -57,7 +57,7 @@ def find_videos(data):
|
||||
devuelve = []
|
||||
|
||||
patronvideos = r"nowvideo.club/(?:play|videos)?([a-z0-9A-Z]+)"
|
||||
logger.info("[nowvideo.py] find_videos #" + patronvideos + "#")
|
||||
logger.log("[nowvideo.py] find_videos #" + patronvideos + "#")
|
||||
matches = re.compile(patronvideos, re.DOTALL).findall(data)
|
||||
|
||||
for match in matches:
|
||||
@@ -65,10 +65,10 @@ def find_videos(data):
|
||||
url = 'http://nowvideo.club/%s' % match
|
||||
|
||||
if url not in encontrados:
|
||||
logger.info(" url=" + url)
|
||||
logger.log(" url=" + url)
|
||||
devuelve.append([titulo, url, 'nowvideo'])
|
||||
encontrados.add(url)
|
||||
else:
|
||||
logger.info(" url duplicada=" + url)
|
||||
logger.log(" url duplicada=" + url)
|
||||
|
||||
return devuelve
|
||||
|
||||
@@ -8,7 +8,7 @@ from platformcode import logger, config
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
|
||||
data = httptools.downloadpage(page_url).data
|
||||
if "copyrightsRestricted" in data or "COPYRIGHTS_RESTRICTED" in data:
|
||||
@@ -20,7 +20,7 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("url=" + page_url)
|
||||
logger.log("url=" + page_url)
|
||||
video_urls = []
|
||||
|
||||
data = httptools.downloadpage(page_url).data
|
||||
|
||||
@@ -21,25 +21,25 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
if config.get_setting("premium", server="onefichier"):
|
||||
user = config.get_setting("user", server="onefichier")
|
||||
password = config.get_setting("password", server="onefichier")
|
||||
url = "https://1fichier.com/login.pl"
|
||||
logger.info("url=" + url)
|
||||
logger.log("url=" + url)
|
||||
post_parameters = {"mail": user, "pass": password, "lt": "on", "purge": "on", "valider": "Send"}
|
||||
post = urllib.urlencode(post_parameters)
|
||||
logger.info("post=" + post)
|
||||
logger.log("post=" + post)
|
||||
|
||||
data = httptools.downloadpage(url, post=post).data
|
||||
# logger.info("data="+data)
|
||||
# logger.log("data="+data)
|
||||
|
||||
cookies = config.get_cookie_data()
|
||||
logger.info("cookies=" + cookies)
|
||||
logger.log("cookies=" + cookies)
|
||||
|
||||
# 1fichier.com TRUE / FALSE 1443553315 SID imC3q8MQ7cARw5tkXeWvKyrH493rR=1yvrjhxDAA0T0iEmqRfNF9GXwjrwPHssAQ
|
||||
sid_cookie_value = scrapertools.find_single_match(cookies, "1fichier.com.*?SID\s+([A-Za-z0-9\+\=]+)")
|
||||
logger.info("sid_cookie_value=" + sid_cookie_value)
|
||||
logger.log("sid_cookie_value=" + sid_cookie_value)
|
||||
|
||||
# .1fichier.com TRUE / FALSE 1443553315 SID imC3q8MQ7cARw5tkXeWvKyrH493rR=1yvrjhxDAA0T0iEmqRfNF9GXwjrwPHssAQ
|
||||
cookie = urllib.urlencode({"SID": sid_cookie_value})
|
||||
@@ -50,16 +50,16 @@ def get_video_url(page_url, premium=False, user="", password="", video_password=
|
||||
'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; es-ES; rv:1.9.2.12) Gecko/20101026 Firefox/3.6.12'])
|
||||
headers.append(['Cookie', cookie])
|
||||
filename = scrapertools.get_header_from_response(page_url, header_to_get="Content-Disposition")
|
||||
logger.info("filename=" + filename)
|
||||
logger.log("filename=" + filename)
|
||||
|
||||
# Construye la URL final para Kodi
|
||||
location = page_url + "|Cookie=" + cookie
|
||||
logger.info("location=" + location)
|
||||
logger.log("location=" + location)
|
||||
|
||||
video_urls = []
|
||||
video_urls.append([filename[-4:] + " (Premium) [1fichier]", location])
|
||||
|
||||
for video_url in video_urls:
|
||||
logger.info("%s - %s" % (video_url[0], video_url[1]))
|
||||
logger.log("%s - %s" % (video_url[0], video_url[1]))
|
||||
|
||||
return video_urls
|
||||
|
||||
@@ -6,7 +6,7 @@ from platformcode import config, logger
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
|
||||
data = httptools.downloadpage(page_url, cookies=False).data
|
||||
if 'File you are looking for is not found.' in data:
|
||||
@@ -16,8 +16,8 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("url=" + page_url)
|
||||
logger.log("url=" + page_url)
|
||||
data = httptools.downloadpage(page_url).data
|
||||
# logger.info(data)
|
||||
# logger.log(data)
|
||||
video_urls = support.get_jwplayer_mediaurl(data, 'Onlystream')
|
||||
return video_urls
|
||||
|
||||
@@ -8,6 +8,6 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
video_urls = []
|
||||
return video_urls
|
||||
|
||||
@@ -10,7 +10,7 @@ from platformcode import logger
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
data = httptools.downloadpage(page_url)
|
||||
|
||||
if "Object not found" in data.data or "longer exists on our servers" in data.data:
|
||||
@@ -21,7 +21,7 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
|
||||
data = httptools.downloadpage(page_url).data
|
||||
if "p,a,c,k,e,d" in data:
|
||||
@@ -44,6 +44,6 @@ def get_video_url(page_url, premium=False, user="", password="", video_password=
|
||||
except:
|
||||
pass
|
||||
for video_url in video_urls:
|
||||
logger.info(" %s - %s" % (video_url[0], video_url[1]))
|
||||
logger.log(" %s - %s" % (video_url[0], video_url[1]))
|
||||
|
||||
return video_urls
|
||||
|
||||
@@ -24,13 +24,13 @@ from core import jsontools
|
||||
|
||||
|
||||
def get_source(url):
|
||||
logger.info()
|
||||
logger.log()
|
||||
data = httptools.downloadpage(url).data
|
||||
data = re.sub(r'\n|\r|\t| |<br>|\s{2,}', "", data)
|
||||
return data
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
data = get_source(page_url)
|
||||
|
||||
if "File was deleted" in data or "File Not Found" in data:
|
||||
@@ -40,7 +40,7 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("url=" + page_url)
|
||||
logger.log("url=" + page_url)
|
||||
|
||||
video_urls = []
|
||||
referer = ''
|
||||
|
||||
@@ -10,7 +10,7 @@ from platformcode import logger
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
global data
|
||||
data = httptools.downloadpage(page_url).data
|
||||
if "Not Found" in data or "File was deleted" in data:
|
||||
@@ -19,7 +19,7 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("url=" + page_url)
|
||||
logger.log("url=" + page_url)
|
||||
video_urls = []
|
||||
ext = 'mp4'
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
video_urls = []
|
||||
data = scrapertools.httptools.downloadpage(page_url).data
|
||||
media_url = scrapertools.find_single_match(data, 'var\s+video_source\s+\=\s+"([^"]+)"')
|
||||
@@ -24,5 +24,5 @@ def get_video_url(page_url, premium=False, user="", password="", video_password=
|
||||
else:
|
||||
video_urls.append([scrapertools.get_filename_from_url(media_url)[-4:] + " [sendvid]", media_url])
|
||||
for video_url in video_urls:
|
||||
logger.info("%s - %s" % (video_url[0], video_url[1]))
|
||||
logger.log("%s - %s" % (video_url[0], video_url[1]))
|
||||
return video_urls
|
||||
|
||||
@@ -5,7 +5,7 @@ from core import httptools, scrapertools
|
||||
from platformcode import config, logger
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
|
||||
data = httptools.downloadpage(page_url).data
|
||||
|
||||
@@ -15,22 +15,22 @@ def test_video_exists(page_url):
|
||||
return True, ""
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("url=" + page_url)
|
||||
logger.log("url=" + page_url)
|
||||
video_urls = []
|
||||
quality ={'MOBILE':1,
|
||||
'NORMAL':2,
|
||||
'HD':3}
|
||||
data = httptools.downloadpage(page_url).data
|
||||
logger.info('SPEEDVIDEO DATA '+ data)
|
||||
logger.log('SPEEDVIDEO DATA '+ data)
|
||||
|
||||
media_urls = scrapertools.find_multiple_matches(data, r"file:[^']'([^']+)',\s*label:[^\"]\"([^\"]+)\"")
|
||||
logger.info("speed video - media urls: %s " % media_urls)
|
||||
logger.log("speed video - media urls: %s " % media_urls)
|
||||
for media_url, label in media_urls:
|
||||
media_url = httptools.downloadpage(media_url, only_headers=True, follow_redirects=False).headers.get("location", "")
|
||||
|
||||
if media_url:
|
||||
video_urls.append([media_url.split('.')[-1] + ' - ' + label + ' - ' + ' [Speedvideo]', media_url])
|
||||
logger.info("speed video - media urls: %s " % video_urls)
|
||||
logger.log("speed video - media urls: %s " % video_urls)
|
||||
|
||||
return sorted(video_urls, key=lambda x: quality[x[0].split(' - ')[1]])
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ from platformcode import logger, config
|
||||
|
||||
|
||||
def test_video_exists(page_url):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
global data
|
||||
data = httptools.downloadpage(page_url).data
|
||||
if "Video not found" in data:
|
||||
@@ -15,7 +15,7 @@ def test_video_exists(page_url):
|
||||
|
||||
|
||||
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
|
||||
logger.info("url=" + page_url)
|
||||
logger.log("url=" + page_url)
|
||||
video_urls = []
|
||||
global data
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ from platformcode import logger
|
||||
|
||||
|
||||
def get_video_url(page_url, video_password):
|
||||
logger.info("(page_url='%s')" % page_url)
|
||||
logger.log("(page_url='%s')" % page_url)
|
||||
video_urls = []
|
||||
url = httptools.downloadpage(page_url).url
|
||||
data = httptools.downloadpage(url).data
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user