test rebase
This commit is contained in:
@@ -28,6 +28,7 @@ from __future__ import division
|
||||
from __future__ import absolute_import
|
||||
from future.builtins import range
|
||||
from future.builtins import bytes
|
||||
from future.builtins import str
|
||||
|
||||
__all__ = [
|
||||
'body_decode',
|
||||
|
||||
0
lib/future/backports/test/pystone.py
Normal file → Executable file
0
lib/future/backports/test/pystone.py
Normal file → Executable file
@@ -11,9 +11,9 @@ an application may want to handle an exception like a regular
|
||||
response.
|
||||
"""
|
||||
from __future__ import absolute_import, division, unicode_literals
|
||||
from ... import standard_library
|
||||
from future import standard_library
|
||||
|
||||
from . import response as urllib_response
|
||||
from future.backports.urllib import response as urllib_response
|
||||
|
||||
|
||||
__all__ = ['URLError', 'HTTPError', 'ContentTooShortError']
|
||||
|
||||
@@ -87,7 +87,7 @@ def clear_cache():
|
||||
# decoding and encoding. If valid use cases are
|
||||
# presented, we may relax this by using latin-1
|
||||
# decoding internally for 3.3
|
||||
_implicit_encoding = 'utf8'
|
||||
_implicit_encoding = 'ascii'
|
||||
_implicit_errors = 'strict'
|
||||
|
||||
def _noop(obj):
|
||||
@@ -122,7 +122,7 @@ class _ResultMixinStr(object):
|
||||
"""Standard approach to encoding parsed results from str to bytes"""
|
||||
__slots__ = ()
|
||||
|
||||
def encode(self, encoding='utf8', errors='strict'):
|
||||
def encode(self, encoding='ascii', errors='strict'):
|
||||
return self._encoded_counterpart(*(x.encode(encoding, errors) for x in self))
|
||||
|
||||
|
||||
@@ -130,7 +130,7 @@ class _ResultMixinBytes(object):
|
||||
"""Standard approach to decoding parsed results from bytes to str"""
|
||||
__slots__ = ()
|
||||
|
||||
def decode(self, encoding='utf8', errors='strict'):
|
||||
def decode(self, encoding='ascii', errors='strict'):
|
||||
return self._decoded_counterpart(*(x.decode(encoding, errors) for x in self))
|
||||
|
||||
|
||||
@@ -730,7 +730,7 @@ def quote_from_bytes(bs, safe='/'):
|
||||
###
|
||||
if isinstance(safe, str):
|
||||
# Normalize 'safe' by converting to bytes and removing non-ASCII chars
|
||||
safe = str(safe).encode('utf8', 'ignore')
|
||||
safe = str(safe).encode('ascii', 'ignore')
|
||||
else:
|
||||
### For Python-Future:
|
||||
safe = bytes(safe)
|
||||
|
||||
@@ -827,7 +827,7 @@ class ProxyHandler(BaseHandler):
|
||||
if user and password:
|
||||
user_pass = '%s:%s' % (unquote(user),
|
||||
unquote(password))
|
||||
creds = base64.b64encode(user_pass.encode()).decode("utf8")
|
||||
creds = base64.b64encode(user_pass.encode()).decode("ascii")
|
||||
req.add_header('Proxy-authorization', 'Basic ' + creds)
|
||||
hostport = unquote(hostport)
|
||||
req.set_proxy(hostport, proxy_type)
|
||||
@@ -977,7 +977,7 @@ class AbstractBasicAuthHandler(object):
|
||||
user, pw = self.passwd.find_user_password(realm, host)
|
||||
if pw is not None:
|
||||
raw = "%s:%s" % (user, pw)
|
||||
auth = "Basic " + base64.b64encode(raw.encode()).decode("utf8")
|
||||
auth = "Basic " + base64.b64encode(raw.encode()).decode("ascii")
|
||||
if req.headers.get(self.auth_header, None) == auth:
|
||||
return None
|
||||
req.add_unredirected_header(self.auth_header, auth)
|
||||
@@ -1080,7 +1080,7 @@ class AbstractDigestAuthHandler(object):
|
||||
# authentication, and to provide some message integrity protection.
|
||||
# This isn't a fabulous effort, but it's probably Good Enough.
|
||||
s = "%s:%s:%s:" % (self.nonce_count, nonce, time.ctime())
|
||||
b = s.encode("utf8") + _randombytes(8)
|
||||
b = s.encode("ascii") + _randombytes(8)
|
||||
dig = hashlib.sha1(b).hexdigest()
|
||||
return dig[:16]
|
||||
|
||||
@@ -1147,9 +1147,9 @@ class AbstractDigestAuthHandler(object):
|
||||
def get_algorithm_impls(self, algorithm):
|
||||
# lambdas assume digest modules are imported at the top level
|
||||
if algorithm == 'MD5':
|
||||
H = lambda x: hashlib.md5(x.encode("utf8")).hexdigest()
|
||||
H = lambda x: hashlib.md5(x.encode("ascii")).hexdigest()
|
||||
elif algorithm == 'SHA':
|
||||
H = lambda x: hashlib.sha1(x.encode("utf8")).hexdigest()
|
||||
H = lambda x: hashlib.sha1(x.encode("ascii")).hexdigest()
|
||||
# XXX MD5-sess
|
||||
KD = lambda s, d: H("%s:%s" % (s, d))
|
||||
return H, KD
|
||||
@@ -1829,13 +1829,13 @@ class URLopener(object):
|
||||
|
||||
if proxy_passwd:
|
||||
proxy_passwd = unquote(proxy_passwd)
|
||||
proxy_auth = base64.b64encode(proxy_passwd.encode()).decode('utf8')
|
||||
proxy_auth = base64.b64encode(proxy_passwd.encode()).decode('ascii')
|
||||
else:
|
||||
proxy_auth = None
|
||||
|
||||
if user_passwd:
|
||||
user_passwd = unquote(user_passwd)
|
||||
auth = base64.b64encode(user_passwd.encode()).decode('utf8')
|
||||
auth = base64.b64encode(user_passwd.encode()).decode('ascii')
|
||||
else:
|
||||
auth = None
|
||||
http_conn = connection_factory(host)
|
||||
@@ -2040,7 +2040,7 @@ class URLopener(object):
|
||||
msg.append('Content-type: %s' % type)
|
||||
if encoding == 'base64':
|
||||
# XXX is this encoding/decoding ok?
|
||||
data = base64.decodebytes(data.encode('utf8')).decode('latin-1')
|
||||
data = base64.decodebytes(data.encode('ascii')).decode('latin-1')
|
||||
else:
|
||||
data = unquote(data)
|
||||
msg.append('Content-Length: %d' % len(data))
|
||||
@@ -2498,17 +2498,7 @@ def _proxy_bypass_macosx_sysconf(host, proxy_settings):
|
||||
|
||||
|
||||
if sys.platform == 'darwin':
|
||||
try:
|
||||
from _scproxy import _get_proxy_settings, _get_proxies
|
||||
except:
|
||||
try:
|
||||
# By default use environment variables
|
||||
_get_proxy_settings = getproxies_environment
|
||||
_get_proxies = proxy_bypass_environment
|
||||
getproxies = getproxies_environment
|
||||
proxy_bypass = proxy_bypass_environment
|
||||
except:
|
||||
pass
|
||||
from _scproxy import _get_proxy_settings, _get_proxies
|
||||
|
||||
def proxy_bypass_macosx_sysconf(host):
|
||||
proxy_settings = _get_proxy_settings()
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from __future__ import absolute_import, division, unicode_literals
|
||||
from ...builtins import str
|
||||
from future.builtins import str
|
||||
""" robotparser.py
|
||||
|
||||
Copyright (C) 2000 Bastian Kleineidam
|
||||
@@ -13,8 +13,8 @@ from ...builtins import str
|
||||
"""
|
||||
|
||||
# Was: import urllib.parse, urllib.request
|
||||
from .. import urllib
|
||||
from . import parse as _parse, request as _request
|
||||
from future.backports import urllib
|
||||
from future.backports.urllib import parse as _parse, request as _request
|
||||
urllib.parse = _parse
|
||||
urllib.request = _request
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
``python-future``: pure Python implementation of Python 3 round().
|
||||
"""
|
||||
|
||||
from __future__ import division
|
||||
from future.utils import PYPY, PY26, bind_method
|
||||
|
||||
# Use the decimal module for simplicity of implementation (and
|
||||
@@ -29,8 +30,6 @@ def newround(number, ndigits=None):
|
||||
if hasattr(number, '__round__'):
|
||||
return number.__round__(ndigits)
|
||||
|
||||
if ndigits < 0:
|
||||
raise NotImplementedError('negative ndigits not supported yet')
|
||||
exponent = Decimal('10') ** (-ndigits)
|
||||
|
||||
if PYPY:
|
||||
@@ -42,15 +41,19 @@ def newround(number, ndigits=None):
|
||||
d = number
|
||||
else:
|
||||
if not PY26:
|
||||
d = Decimal.from_float(number).quantize(exponent,
|
||||
rounding=ROUND_HALF_EVEN)
|
||||
d = Decimal.from_float(number)
|
||||
else:
|
||||
d = from_float_26(number).quantize(exponent, rounding=ROUND_HALF_EVEN)
|
||||
d = from_float_26(number)
|
||||
|
||||
if ndigits < 0:
|
||||
result = newround(d / exponent) * exponent
|
||||
else:
|
||||
result = d.quantize(exponent, rounding=ROUND_HALF_EVEN)
|
||||
|
||||
if return_int:
|
||||
return int(d)
|
||||
return int(result)
|
||||
else:
|
||||
return float(d)
|
||||
return float(result)
|
||||
|
||||
|
||||
### From Python 2.7's decimal.py. Only needed to support Py2.6:
|
||||
|
||||
@@ -10,3 +10,9 @@ else:
|
||||
except ImportError:
|
||||
raise ImportError('The FileDialog module is missing. Does your Py2 '
|
||||
'installation include tkinter?')
|
||||
|
||||
try:
|
||||
from tkFileDialog import *
|
||||
except ImportError:
|
||||
raise ImportError('The tkFileDialog module is missing. Does your Py2 '
|
||||
'installation include tkinter?')
|
||||
|
||||
@@ -450,63 +450,35 @@ def install_aliases():
|
||||
# if hasattr(install_aliases, 'run_already'):
|
||||
# return
|
||||
for (newmodname, newobjname, oldmodname, oldobjname) in MOVES:
|
||||
try:
|
||||
__import__(newmodname)
|
||||
# We look up the module in sys.modules because __import__ just returns the
|
||||
# top-level package:
|
||||
newmod = sys.modules[newmodname]
|
||||
# newmod.__future_module__ = True
|
||||
__import__(newmodname)
|
||||
# We look up the module in sys.modules because __import__ just returns the
|
||||
# top-level package:
|
||||
newmod = sys.modules[newmodname]
|
||||
# newmod.__future_module__ = True
|
||||
|
||||
__import__(oldmodname)
|
||||
oldmod = sys.modules[oldmodname]
|
||||
__import__(oldmodname)
|
||||
oldmod = sys.modules[oldmodname]
|
||||
|
||||
obj = getattr(oldmod, oldobjname)
|
||||
setattr(newmod, newobjname, obj)
|
||||
except:
|
||||
try:
|
||||
flog.warning('*** FUTURE ERROR in module %s %s ' % (str(oldmod), str(oldobjname)))
|
||||
except:
|
||||
pass
|
||||
obj = getattr(oldmod, oldobjname)
|
||||
setattr(newmod, newobjname, obj)
|
||||
|
||||
# Hack for urllib so it appears to have the same structure on Py2 as on Py3
|
||||
try:
|
||||
import urllib
|
||||
from future.backports.urllib import response
|
||||
urllib.response = response
|
||||
sys.modules['urllib.response'] = response
|
||||
from future.backports.urllib import parse
|
||||
urllib.parse = parse
|
||||
sys.modules['urllib.parse'] = parse
|
||||
from future.backports.urllib import error
|
||||
urllib.error = error
|
||||
sys.modules['urllib.error'] = error
|
||||
except ImportError:
|
||||
try:
|
||||
flog.warning('*** FUTURE ERROR importing URLLIB.response, parse, error')
|
||||
urllib.response = urllib
|
||||
sys.modules['urllib.response'] = urllib
|
||||
urllib.parse = urllib
|
||||
sys.modules['urllib.parse'] = urllib
|
||||
urllib.error = urllib
|
||||
sys.modules['urllib.error'] = urllib
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
from future.backports.urllib import request
|
||||
urllib.request = request
|
||||
sys.modules['urllib.request'] = request
|
||||
from future.backports.urllib import robotparser
|
||||
urllib.robotparser = robotparser
|
||||
sys.modules['urllib.robotparser'] = robotparser
|
||||
except ImportError:
|
||||
try:
|
||||
flog.warning('*** FUTURE ERROR importing URLLIB.Request')
|
||||
urllib.request = urllib
|
||||
sys.modules['urllib.request'] = urllib
|
||||
urllib.robotparser = urllib
|
||||
sys.modules['urllib.robotparser'] = urllib
|
||||
except:
|
||||
pass
|
||||
import urllib
|
||||
from future.backports.urllib import request
|
||||
from future.backports.urllib import response
|
||||
from future.backports.urllib import parse
|
||||
from future.backports.urllib import error
|
||||
from future.backports.urllib import robotparser
|
||||
urllib.request = request
|
||||
urllib.response = response
|
||||
urllib.parse = parse
|
||||
urllib.error = error
|
||||
urllib.robotparser = robotparser
|
||||
sys.modules['urllib.request'] = request
|
||||
sys.modules['urllib.response'] = response
|
||||
sys.modules['urllib.parse'] = parse
|
||||
sys.modules['urllib.error'] = error
|
||||
sys.modules['urllib.robotparser'] = robotparser
|
||||
|
||||
# Patch the test module so it appears to have the same structure on Py2 as on Py3
|
||||
try:
|
||||
@@ -518,11 +490,8 @@ def install_aliases():
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
test.support = support
|
||||
sys.modules['test.support'] = support
|
||||
except:
|
||||
pass
|
||||
test.support = support
|
||||
sys.modules['test.support'] = support
|
||||
|
||||
# Patch the dbm module so it appears to have the same structure on Py2 as on Py3
|
||||
try:
|
||||
@@ -530,26 +499,23 @@ def install_aliases():
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
from future.moves.dbm import dumb
|
||||
dbm.dumb = dumb
|
||||
sys.modules['dbm.dumb'] = dumb
|
||||
try:
|
||||
from future.moves.dbm import dumb
|
||||
dbm.dumb = dumb
|
||||
sys.modules['dbm.dumb'] = dumb
|
||||
try:
|
||||
from future.moves.dbm import gnu
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
dbm.gnu = gnu
|
||||
sys.modules['dbm.gnu'] = gnu
|
||||
try:
|
||||
from future.moves.dbm import ndbm
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
dbm.ndbm = ndbm
|
||||
sys.modules['dbm.ndbm'] = ndbm
|
||||
except:
|
||||
flog.warning('*** FUTURE ERROR importing MOVES.dbm')
|
||||
from future.moves.dbm import gnu
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
dbm.gnu = gnu
|
||||
sys.modules['dbm.gnu'] = gnu
|
||||
try:
|
||||
from future.moves.dbm import ndbm
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
dbm.ndbm = ndbm
|
||||
sys.modules['dbm.ndbm'] = ndbm
|
||||
|
||||
# install_aliases.run_already = True
|
||||
|
||||
|
||||
@@ -527,9 +527,9 @@ def implements_iterator(cls):
|
||||
return cls
|
||||
|
||||
if PY3:
|
||||
get_next = lambda x: x.next
|
||||
else:
|
||||
get_next = lambda x: x.__next__
|
||||
else:
|
||||
get_next = lambda x: x.next
|
||||
|
||||
|
||||
def encode_filename(filename):
|
||||
|
||||
@@ -3,6 +3,11 @@
|
||||
# Parámetros de configuración (kodi)
|
||||
# ------------------------------------------------------------
|
||||
|
||||
#from builtins import str
|
||||
import sys
|
||||
PY3 = False
|
||||
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
@@ -62,10 +67,12 @@ def get_platform(full_version=False):
|
||||
ret = {}
|
||||
codename = {"10": "dharma", "11": "eden", "12": "frodo",
|
||||
"13": "gotham", "14": "helix", "15": "isengard",
|
||||
"16": "jarvis", "17": "krypton", "18": "leia"}
|
||||
"16": "jarvis", "17": "krypton", "18": "leia",
|
||||
"19": "matrix"}
|
||||
code_db = {'10': 'MyVideos37.db', '11': 'MyVideos60.db', '12': 'MyVideos75.db',
|
||||
'13': 'MyVideos78.db', '14': 'MyVideos90.db', '15': 'MyVideos93.db',
|
||||
'16': 'MyVideos99.db', '17': 'MyVideos107.db', '18': 'MyVideos116.db'}
|
||||
'16': 'MyVideos99.db', '17': 'MyVideos107.db', '18': 'MyVideos116.db',
|
||||
'19': 'MyVideos116.db'}
|
||||
|
||||
num_version = xbmc.getInfoLabel('System.BuildVersion')
|
||||
num_version = re.match("\d+\.\d+", num_version).group(0)
|
||||
@@ -334,7 +341,7 @@ def set_setting(name, value, channel="", server=""):
|
||||
|
||||
__settings__.setSetting(name, value)
|
||||
|
||||
except Exception, ex:
|
||||
except Exception as ex:
|
||||
from platformcode import logger
|
||||
logger.error("Error al convertir '%s' no se guarda el valor \n%s" % (name, ex))
|
||||
return None
|
||||
@@ -346,7 +353,18 @@ def get_localized_string(code):
|
||||
dev = __language__(code)
|
||||
|
||||
try:
|
||||
dev = dev.encode("utf-8")
|
||||
# Unicode to utf8
|
||||
if isinstance(dev, unicode):
|
||||
dev = dev.encode("utf8")
|
||||
if PY3: dev = dev.decode("utf8")
|
||||
|
||||
# All encodings to utf8
|
||||
elif not PY3 and isinstance(dev, str):
|
||||
dev = unicode(dev, "utf8", errors="replace").encode("utf8")
|
||||
|
||||
# Bytes encodings to utf8
|
||||
elif PY3 and isinstance(dev, bytes):
|
||||
dev = dev.decode("utf8")
|
||||
except:
|
||||
pass
|
||||
|
||||
@@ -391,6 +409,14 @@ def get_data_path():
|
||||
return dev
|
||||
|
||||
|
||||
def get_icon():
|
||||
return xbmc.translatePath(__settings__.getAddonInfo('icon'))
|
||||
|
||||
|
||||
def get_fanart():
|
||||
return xbmc.translatePath(__settings__.getAddonInfo('fanart'))
|
||||
|
||||
|
||||
def get_cookie_data():
|
||||
import os
|
||||
ficherocookies = os.path.join(get_data_path(), 'cookies.dat')
|
||||
|
||||
@@ -3,17 +3,23 @@
|
||||
# Updater (kodi)
|
||||
# --------------------------------------------------------------------------------
|
||||
|
||||
import json
|
||||
import os
|
||||
import traceback
|
||||
#from builtins import str
|
||||
import sys
|
||||
PY3 = False
|
||||
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
|
||||
|
||||
import traceback
|
||||
import xbmc
|
||||
import xbmcaddon
|
||||
import threading
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
from core import filetools
|
||||
from core import jsontools
|
||||
from platformcode import config, logger, platformtools
|
||||
|
||||
from core import jsontools
|
||||
from core import filetools
|
||||
|
||||
json_data_file_name = 'custom_code.json'
|
||||
|
||||
|
||||
@@ -21,10 +27,9 @@ def init():
|
||||
logger.info()
|
||||
|
||||
"""
|
||||
Todo el código añadido al add-on se borra con cada actualización. Esta función permite restaurarlo automáticamente con cada actualización.
|
||||
Esto permite al usuario tener su propio código, bajo su responsabilidad, y restaurarlo al add-on cada vez que se actualiza.
|
||||
Todo el código añadido al add-on se borra con cada actualización. Esta función permite restaurarlo automáticamente con cada actualización. Esto permite al usuario tener su propio código, bajo su responsabilidad, y restaurarlo al add-on cada vez que se actualiza.
|
||||
|
||||
El mecanismo funciona copiando el contenido de la carpeta-arbol ".\userdata\addon_data\plugin.video.alfa\custom_code\..." sobre
|
||||
El mecanismo funciona copiando el contenido de la carpeta-arbol "./userdata/addon_data/plugin.video.alfa/custom_code/..." sobre
|
||||
las carpetas de código del add-on. No verifica el contenido, solo vuelca(reemplaza) el contenido de "custom_code".
|
||||
|
||||
El usuario almacenará en las subcarpetas de "custom_code" su código actualizado y listo para ser copiado en cualquier momento.
|
||||
@@ -37,7 +42,7 @@ def init():
|
||||
from platformcode import custom_code
|
||||
custom_code.init()
|
||||
|
||||
2.- En el inicio de Kodi, comprueba si existe la carpeta "custom_code" en ".\userdata\addon_data\plugin.video.alfa\".
|
||||
2.- En el inicio de Kodi, comprueba si existe la carpeta "custom_code" en "./userdata/addon_data/plugin.video.alfa/".
|
||||
Si no existe, la crea y sale sin más, dando al ususario la posibilidad de copiar sobre esa estructura su código,
|
||||
y que la función la vuelque sobre el add-on en el próximo inicio de Kodi.
|
||||
|
||||
@@ -55,31 +60,45 @@ def init():
|
||||
|
||||
Tiempos: Copiando 7 archivos de prueba, el proceso ha tardado una décima de segundo.
|
||||
"""
|
||||
|
||||
|
||||
try:
|
||||
#Borra el .zip de instalación de Alfa de la carpeta Packages, por si está corrupto, y que así se pueda descargar de nuevo
|
||||
version = 'plugin.video.alfa-%s.zip' % config.get_addon_version(with_fix=False)
|
||||
filetools.remove(filetools.join(xbmc.translatePath('special://home'), 'addons', 'packages', version), True)
|
||||
|
||||
#Verifica si Kodi tiene algún achivo de Base de Datos de Vídeo de versiones anteriores, entonces los borra
|
||||
verify_Kodi_video_DB()
|
||||
|
||||
#LIBTORRENT: se descarga el binario de Libtorrent cada vez que se actualiza Alfa
|
||||
try:
|
||||
threading.Thread(target=update_libtorrent).start() # Creamos un Thread independiente, hasta el fin de Kodi
|
||||
time.sleep(2) # Dejamos terminar la inicialización...
|
||||
except: # Si hay problemas de threading, nos vamos
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
#QUASAR: Preguntamos si se hacen modificaciones a Quasar
|
||||
if not filetools.exists(os.path.join(config.get_data_path(), "quasar.json")) and not config.get_setting('addon_quasar_update', default=False):
|
||||
if not filetools.exists(filetools.join(config.get_data_path(), "quasar.json")) \
|
||||
and not config.get_setting('addon_quasar_update', default=False):
|
||||
question_update_external_addon("quasar")
|
||||
|
||||
#QUASAR: Hacemos las modificaciones a Quasar, si está permitido, y si está instalado
|
||||
if config.get_setting('addon_quasar_update', default=False):
|
||||
if config.get_setting('addon_quasar_update', default=False) or \
|
||||
(filetools.exists(filetools.join(config.get_data_path(), \
|
||||
"quasar.json")) and not xbmc.getCondVisibility('System.HasAddon("plugin.video.quasar")')):
|
||||
if not update_external_addon("quasar"):
|
||||
platformtools.dialog_notification("Actualización Quasar", "Ha fallado. Consulte el log")
|
||||
|
||||
#Existe carpeta "custom_code" ? Si no existe se crea y se sale
|
||||
custom_code_dir = os.path.join(config.get_data_path(), 'custom_code')
|
||||
if os.path.exists(custom_code_dir) == False:
|
||||
custom_code_dir = filetools.join(config.get_data_path(), 'custom_code')
|
||||
if not filetools.exists(custom_code_dir):
|
||||
create_folder_structure(custom_code_dir)
|
||||
return
|
||||
|
||||
else:
|
||||
#Existe "custom_code.json" ? Si no existe se crea
|
||||
custom_code_json_path = config.get_runtime_path()
|
||||
custom_code_json = os.path.join(custom_code_json_path, 'custom_code.json')
|
||||
if os.path.exists(custom_code_json) == False:
|
||||
custom_code_json = filetools.join(custom_code_json_path, 'custom_code.json')
|
||||
if not filetools.exists(custom_code_json):
|
||||
create_json(custom_code_json_path)
|
||||
|
||||
#Se verifica si la versión del .json y del add-on son iguales. Si es así se sale. Si no se copia "custom_code" al add-on
|
||||
@@ -92,13 +111,13 @@ def create_folder_structure(custom_code_dir):
|
||||
logger.info()
|
||||
|
||||
#Creamos todas las carpetas. La importante es "custom_code". Las otras sirven meramente de guía para evitar errores de nombres...
|
||||
os.mkdir(custom_code_dir)
|
||||
os.mkdir(filetools.join(custom_code_dir, 'channels'))
|
||||
os.mkdir(filetools.join(custom_code_dir, 'core'))
|
||||
os.mkdir(filetools.join(custom_code_dir, 'lib'))
|
||||
os.mkdir(filetools.join(custom_code_dir, 'platformcode'))
|
||||
os.mkdir(filetools.join(custom_code_dir, 'resources'))
|
||||
os.mkdir(filetools.join(custom_code_dir, 'servers'))
|
||||
filetools.mkdir(custom_code_dir)
|
||||
filetools.mkdir(filetools.join(custom_code_dir, 'channels'))
|
||||
filetools.mkdir(filetools.join(custom_code_dir, 'core'))
|
||||
filetools.mkdir(filetools.join(custom_code_dir, 'lib'))
|
||||
filetools.mkdir(filetools.join(custom_code_dir, 'platformcode'))
|
||||
filetools.mkdir(filetools.join(custom_code_dir, 'resources'))
|
||||
filetools.mkdir(filetools.join(custom_code_dir, 'servers'))
|
||||
|
||||
return
|
||||
|
||||
@@ -108,9 +127,9 @@ def create_json(custom_code_json_path, json_name=json_data_file_name):
|
||||
|
||||
#Guardamaos el json con la versión de Alfa vacía, para permitir hacer la primera copia
|
||||
json_data_file = filetools.join(custom_code_json_path, json_name)
|
||||
json_file = open(json_data_file, "a+")
|
||||
json_file.write(json.dumps({"addon_version": ""}))
|
||||
json_file.close()
|
||||
if filetools.exists(json_data_file):
|
||||
filetools.remove(json_data_file)
|
||||
result = filetools.write(json_data_file, jsontools.dump({"addon_version": ""}))
|
||||
|
||||
return
|
||||
|
||||
@@ -122,15 +141,21 @@ def verify_copy_folders(custom_code_dir, custom_code_json_path):
|
||||
json_data_file = filetools.join(custom_code_json_path, json_data_file_name)
|
||||
json_data = jsontools.load(filetools.read(json_data_file))
|
||||
current_version = config.get_addon_version(with_fix=False)
|
||||
if current_version == json_data['addon_version']:
|
||||
return
|
||||
if not json_data or not 'addon_version' in json_data:
|
||||
create_json(custom_code_json_path)
|
||||
json_data = jsontools.load(filetools.read(json_data_file))
|
||||
try:
|
||||
if current_version == json_data['addon_version']:
|
||||
return
|
||||
except:
|
||||
logger.error(traceback.format_exc(1))
|
||||
|
||||
#Ahora copiamos los archivos desde el área de Userdata, Custom_code, sobre las carpetas del add-on
|
||||
for root, folders, files in os.walk(custom_code_dir):
|
||||
for root, folders, files in filetools.walk(custom_code_dir):
|
||||
for file in files:
|
||||
input_file = filetools.join(root, file)
|
||||
output_file = input_file.replace(custom_code_dir, custom_code_json_path)
|
||||
if filetools.copy(input_file, output_file, silent=True) == False:
|
||||
if not filetools.copy(input_file, output_file, silent=True):
|
||||
return
|
||||
|
||||
#Guardamaos el json con la versión actual de Alfa, para no volver a hacer la copia hasta la nueva versión
|
||||
@@ -160,38 +185,163 @@ def question_update_external_addon(addon_name):
|
||||
create_json(config.get_data_path(), "%s.json" % addon_name)
|
||||
|
||||
return stat
|
||||
|
||||
|
||||
|
||||
def update_external_addon(addon_name):
|
||||
logger.info(addon_name)
|
||||
|
||||
#Verificamos que el addon está instalado
|
||||
if xbmc.getCondVisibility('System.HasAddon("plugin.video.%s")' % addon_name):
|
||||
#Path de actuali<aciones de Alfa
|
||||
alfa_addon_updates = filetools.join(config.get_runtime_path(), filetools.join("lib", addon_name))
|
||||
|
||||
#Path de destino en addon externo
|
||||
__settings__ = xbmcaddon.Addon(id="plugin.video." + addon_name)
|
||||
if addon_name.lower() in ['quasar', 'elementum']:
|
||||
addon_path = filetools.join(xbmc.translatePath(__settings__.getAddonInfo('Path')), filetools.join("resources", filetools.join("site-packages", addon_name)))
|
||||
try:
|
||||
#Verificamos que el addon está instalado
|
||||
if xbmc.getCondVisibility('System.HasAddon("plugin.video.%s")' % addon_name):
|
||||
#Path de actualizaciones de Alfa
|
||||
alfa_addon_updates_mig = filetools.join(config.get_runtime_path(), "lib")
|
||||
alfa_addon_updates = filetools.join(alfa_addon_updates_mig, addon_name)
|
||||
|
||||
#Path de destino en addon externo
|
||||
__settings__ = xbmcaddon.Addon(id="plugin.video." + addon_name)
|
||||
if addon_name.lower() in ['quasar', 'elementum']:
|
||||
addon_path_mig = filetools.join(xbmc.translatePath(__settings__.getAddonInfo('Path')), \
|
||||
filetools.join("resources", "site-packages"))
|
||||
addon_path = filetools.join(addon_path_mig, addon_name)
|
||||
else:
|
||||
addon_path_mig = ''
|
||||
addon_path = ''
|
||||
|
||||
#Hay modificaciones en Alfa? Las copiamos al addon, incuidas las carpetas de migración a PY3
|
||||
if filetools.exists(alfa_addon_updates) and filetools.exists(addon_path):
|
||||
for root, folders, files in filetools.walk(alfa_addon_updates_mig):
|
||||
if ('future' in root or 'past' in root) and not 'concurrent' in root:
|
||||
for file in files:
|
||||
alfa_addon_updates_mig_folder = root.replace(alfa_addon_updates_mig, addon_path_mig)
|
||||
if not filetools.exists(alfa_addon_updates_mig_folder):
|
||||
filetools.mkdir(alfa_addon_updates_mig_folder)
|
||||
if file.endswith('.pyo') or file.endswith('.pyd'):
|
||||
continue
|
||||
input_file = filetools.join(root, file)
|
||||
output_file = input_file.replace(alfa_addon_updates_mig, addon_path_mig)
|
||||
if not filetools.copy(input_file, output_file, silent=True):
|
||||
logger.error('Error en la copia de MIGRACIÓN: Input: %s o Output: %s' % (input_file, output_file))
|
||||
return False
|
||||
|
||||
for root, folders, files in filetools.walk(alfa_addon_updates):
|
||||
for file in files:
|
||||
input_file = filetools.join(root, file)
|
||||
output_file = input_file.replace(alfa_addon_updates, addon_path)
|
||||
if not filetools.copy(input_file, output_file, silent=True):
|
||||
logger.error('Error en la copia: Input: %s o Output: %s' % (input_file, output_file))
|
||||
return False
|
||||
return True
|
||||
else:
|
||||
logger.error('Alguna carpeta no existe: Alfa: %s o %s: %s' % (alfa_addon_updates, addon_name, addon_path))
|
||||
# Se ha desinstalado Quasar, reseteamos la opción
|
||||
else:
|
||||
addon_path = ''
|
||||
|
||||
#Hay modificaciones en Alfa? Las copiamos al addon
|
||||
if filetools.exists(alfa_addon_updates) and filetools.exists(addon_path):
|
||||
for root, folders, files in os.walk(alfa_addon_updates):
|
||||
for file in files:
|
||||
input_file = filetools.join(root, file)
|
||||
output_file = input_file.replace(alfa_addon_updates, addon_path)
|
||||
if filetools.copy(input_file, output_file, silent=True) == False:
|
||||
logger.error('Error en la copia: Input: %s o Output: %s' % (input_file, output_file))
|
||||
return False
|
||||
config.set_setting('addon_quasar_update', False)
|
||||
if filetools.exists(filetools.join(config.get_data_path(), "%s.json" % addon_name)):
|
||||
filetools.remove(filetools.join(config.get_data_path(), "%s.json" % addon_name))
|
||||
return True
|
||||
else:
|
||||
logger.error('Alguna carpeta no existe: Alfa: %s o %s: %s' % (alfa_addon_updates, addon_name, addon_path))
|
||||
except:
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def update_libtorrent():
|
||||
logger.info()
|
||||
|
||||
if not config.get_setting("mct_buffer", server="torrent", default=""):
|
||||
default = config.get_setting("torrent_client", server="torrent", default=0)
|
||||
config.set_setting("torrent_client", default, server="torrent")
|
||||
config.set_setting("mct_buffer", "50", server="torrent")
|
||||
if config.get_setting("mct_download_path", server="torrent", default=config.get_setting("downloadpath")):
|
||||
config.set_setting("mct_download_path", config.get_setting("downloadpath"), server="torrent")
|
||||
config.set_setting("mct_background_download", True, server="torrent")
|
||||
config.set_setting("mct_rar_unpack", True, server="torrent")
|
||||
config.set_setting("bt_buffer", "50", server="torrent")
|
||||
if config.get_setting("bt_download_path", server="torrent", default=config.get_setting("downloadpath")):
|
||||
config.set_setting("bt_download_path", config.get_setting("downloadpath"), server="torrent")
|
||||
config.set_setting("mct_download_limit", "", server="torrent")
|
||||
config.set_setting("magnet2torrent", False, server="torrent")
|
||||
|
||||
if not filetools.exists(filetools.join(config.get_runtime_path(), "custom_code.json")) or not \
|
||||
config.get_setting("unrar_path", server="torrent", default=""):
|
||||
|
||||
path = filetools.join(config.get_runtime_path(), 'lib', 'rarfiles')
|
||||
creationflags = ''
|
||||
sufix = ''
|
||||
unrar = ''
|
||||
for device in filetools.listdir(path):
|
||||
if xbmc.getCondVisibility("system.platform.android") and 'android' not in device: continue
|
||||
if xbmc.getCondVisibility("system.platform.windows") and 'windows' not in device: continue
|
||||
if not xbmc.getCondVisibility("system.platform.windows") and not xbmc.getCondVisibility("system.platform.android") \
|
||||
and ('android' in device or 'windows' in device): continue
|
||||
if 'windows' in device:
|
||||
creationflags = 0x08000000
|
||||
sufix = '.exe'
|
||||
else:
|
||||
creationflags = ''
|
||||
sufix = ''
|
||||
unrar = filetools.join(path, device, 'unrar%s') % sufix
|
||||
if not filetools.exists(unrar): unrar = ''
|
||||
if unrar:
|
||||
if not xbmc.getCondVisibility("system.platform.windows"):
|
||||
try:
|
||||
if xbmc.getCondVisibility("system.platform.android"):
|
||||
# Para Android copiamos el binario a la partición del sistema
|
||||
unrar_org = unrar
|
||||
unrar = filetools.join(xbmc.translatePath('special://xbmc/'), 'files').replace('/cache/apk/assets', '')
|
||||
if not filetools.exists(unrar):
|
||||
filetools.mkdir(unrar)
|
||||
unrar = filetools.join(unrar, 'unrar')
|
||||
filetools.copy(unrar_org, unrar, silent=True)
|
||||
|
||||
command = ['chmod', '777', '%s' % unrar]
|
||||
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
output_cmd, error_cmd = p.communicate()
|
||||
command = ['ls', '-l', unrar]
|
||||
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
output_cmd, error_cmd = p.communicate()
|
||||
xbmc.log('######## UnRAR file: %s' % str(output_cmd), xbmc.LOGNOTICE)
|
||||
except:
|
||||
xbmc.log('######## UnRAR ERROR in path: %s' % str(unrar), xbmc.LOGNOTICE)
|
||||
logger.error(traceback.format_exc(1))
|
||||
|
||||
try:
|
||||
if xbmc.getCondVisibility("system.platform.windows"):
|
||||
p = subprocess.Popen(unrar, stdout=subprocess.PIPE, stderr=subprocess.PIPE, creationflags=creationflags)
|
||||
else:
|
||||
p = subprocess.Popen(unrar, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
output_cmd, error_cmd = p.communicate()
|
||||
if p.returncode != 0 or error_cmd:
|
||||
xbmc.log('######## UnRAR returncode in module %s: %s, %s in %s' % \
|
||||
(device, str(p.returncode), str(error_cmd), unrar), xbmc.LOGNOTICE)
|
||||
unrar = ''
|
||||
else:
|
||||
xbmc.log('######## UnRAR OK in %s: %s' % (device, unrar), xbmc.LOGNOTICE)
|
||||
break
|
||||
except:
|
||||
xbmc.log('######## UnRAR ERROR in module %s: %s' % (device, unrar), xbmc.LOGNOTICE)
|
||||
logger.error(traceback.format_exc(1))
|
||||
unrar = ''
|
||||
|
||||
if unrar: config.set_setting("unrar_path", unrar, server="torrent")
|
||||
|
||||
if filetools.exists(filetools.join(config.get_runtime_path(), "custom_code.json")) and \
|
||||
config.get_setting("libtorrent_path", server="torrent", default="") :
|
||||
return
|
||||
|
||||
try:
|
||||
from lib.python_libtorrent.python_libtorrent import get_libtorrent
|
||||
except Exception as e:
|
||||
logger.error(traceback.format_exc(1))
|
||||
if not PY3:
|
||||
e = unicode(str(e), "utf8", errors="replace").encode("utf8")
|
||||
config.set_setting("libtorrent_path", "", server="torrent")
|
||||
if not config.get_setting("libtorrent_error", server="torrent", default=''):
|
||||
config.set_setting("libtorrent_error", str(e), server="torrent")
|
||||
|
||||
return
|
||||
|
||||
|
||||
def verify_Kodi_video_DB():
|
||||
logger.info()
|
||||
import random
|
||||
@@ -204,12 +354,12 @@ def verify_Kodi_video_DB():
|
||||
path = filetools.join(xbmc.translatePath("special://masterprofile/"), "Database")
|
||||
if filetools.exists(path):
|
||||
platform = config.get_platform(full_version=True)
|
||||
if platform:
|
||||
if platform and platform['num_version'] <= 19:
|
||||
db_files = filetools.walk(path)
|
||||
if filetools.exists(filetools.join(path, platform['video_db'])):
|
||||
for root, folders, files in db_files:
|
||||
for file in files:
|
||||
if file != platform['video_db']:
|
||||
if platform['video_db'] not in file:
|
||||
if file.startswith('MyVideos'):
|
||||
randnum = str(random.randrange(1, 999999))
|
||||
filetools.rename(filetools.join(path, file), 'OLD_' + randnum +'_' + file)
|
||||
|
||||
@@ -5,17 +5,25 @@
|
||||
# Based on code from the Mega add-on (xbmchub.com)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
from __future__ import division
|
||||
from future import standard_library
|
||||
standard_library.install_aliases()
|
||||
#from builtins import str
|
||||
from past.utils import old_div
|
||||
import sys
|
||||
PY3 = False
|
||||
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
|
||||
|
||||
import urllib.request, urllib.parse, urllib.error
|
||||
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
import threading
|
||||
import time
|
||||
import urllib
|
||||
|
||||
import urllib2
|
||||
import xbmc
|
||||
import xbmcgui
|
||||
|
||||
from core import downloadtools
|
||||
from platformcode import config, logger
|
||||
|
||||
@@ -43,7 +51,7 @@ def download_and_play(url, file_name, download_path):
|
||||
|
||||
while not cancelled and download_thread.isAlive():
|
||||
dialog.update(download_thread.get_progress(), config.get_localized_string(60313),
|
||||
"Velocidad: " + str(int(download_thread.get_speed() / 1024)) + " KB/s " + str(
|
||||
"Velocidad: " + str(int(old_div(download_thread.get_speed(), 1024))) + " KB/s " + str(
|
||||
download_thread.get_actual_size()) + "MB de " + str(
|
||||
download_thread.get_total_size()) + "MB",
|
||||
"Tiempo restante: " + str(downloadtools.sec_to_hms(download_thread.get_remaining_time())))
|
||||
@@ -232,7 +240,7 @@ class DownloadThread(threading.Thread):
|
||||
for additional_header in additional_headers:
|
||||
logger.info("additional_header: " + additional_header)
|
||||
name = re.findall("(.*?)=.*?", additional_header)[0]
|
||||
value = urllib.unquote_plus(re.findall(".*?=(.*?)$", additional_header)[0])
|
||||
value = urllib.parse.unquote_plus(re.findall(".*?=(.*?)$", additional_header)[0])
|
||||
headers.append([name, value])
|
||||
|
||||
self.url = self.url.split("|")[0]
|
||||
@@ -242,18 +250,18 @@ class DownloadThread(threading.Thread):
|
||||
socket.setdefaulttimeout(60)
|
||||
|
||||
# Crea la petición y añade las cabeceras
|
||||
h = urllib2.HTTPHandler(debuglevel=0)
|
||||
request = urllib2.Request(self.url)
|
||||
h = urllib.request.HTTPHandler(debuglevel=0)
|
||||
request = urllib.request.Request(self.url)
|
||||
for header in headers:
|
||||
logger.info("Header=" + header[0] + ": " + header[1])
|
||||
request.add_header(header[0], header[1])
|
||||
|
||||
# Lanza la petición
|
||||
opener = urllib2.build_opener(h)
|
||||
urllib2.install_opener(opener)
|
||||
opener = urllib.request.build_opener(h)
|
||||
urllib.request.install_opener(opener)
|
||||
try:
|
||||
connexion = opener.open(request)
|
||||
except urllib2.HTTPError, e:
|
||||
except urllib.error.HTTPError as e:
|
||||
logger.error("error %d (%s) al abrir la url %s" % (e.code, e.msg, self.url))
|
||||
# print e.code
|
||||
# print e.msg
|
||||
@@ -315,10 +323,10 @@ class DownloadThread(threading.Thread):
|
||||
bloqueleido = connexion.read(blocksize)
|
||||
after = time.time()
|
||||
if (after - before) > 0:
|
||||
self.velocidad = len(bloqueleido) / ((after - before))
|
||||
self.velocidad = old_div(len(bloqueleido), ((after - before)))
|
||||
falta = totalfichero - grabado
|
||||
if self.velocidad > 0:
|
||||
self.tiempofalta = falta / self.velocidad
|
||||
self.tiempofalta = old_div(falta, self.velocidad)
|
||||
else:
|
||||
self.tiempofalta = 0
|
||||
break
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from builtins import map
|
||||
#from builtins import str
|
||||
import sys
|
||||
PY3 = False
|
||||
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
|
||||
from threading import Timer
|
||||
|
||||
import xbmc
|
||||
@@ -113,7 +118,7 @@ class Main(xbmcgui.WindowXMLDialog):
|
||||
if config.get_platform(True)['num_version'] < 18:
|
||||
self.setCoordinateResolution(2)
|
||||
|
||||
for menuentry in MAIN_MENU.keys():
|
||||
for menuentry in list(MAIN_MENU.keys()):
|
||||
item = xbmcgui.ListItem(MAIN_MENU[menuentry]["label"])
|
||||
item.setProperty("thumb", str(MAIN_MENU[menuentry]["icon"]))
|
||||
item.setProperty("identifier", str(menuentry))
|
||||
|
||||
@@ -3,24 +3,33 @@
|
||||
# XBMC Launcher (xbmc / kodi)
|
||||
# ------------------------------------------------------------
|
||||
|
||||
#from future import standard_library
|
||||
#standard_library.install_aliases()
|
||||
#from builtins import str
|
||||
import sys
|
||||
PY3 = False
|
||||
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
|
||||
|
||||
if PY3:
|
||||
import urllib.error as urllib2 # Es muy lento en PY2. En PY3 es nativo
|
||||
else:
|
||||
import urllib2 # Usamos el nativo de PY2 que es más rápido
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
import urllib2
|
||||
import time
|
||||
|
||||
from core import channeltools
|
||||
from core import scrapertools
|
||||
from core import servertools
|
||||
from core import trakt_tools
|
||||
from core import videolibrarytools
|
||||
from core import trakt_tools
|
||||
from core.item import Item
|
||||
from platformcode import config, logger
|
||||
from platformcode import platformtools
|
||||
from platformcode.logger import WebErrorException
|
||||
|
||||
|
||||
|
||||
def start():
|
||||
""" Primera funcion que se ejecuta al entrar en el plugin.
|
||||
Dentro de esta funcion deberian ir todas las llamadas a las
|
||||
@@ -30,21 +39,19 @@ def start():
|
||||
#config.set_setting('show_once', True)
|
||||
# Test if all the required directories are created
|
||||
config.verify_directories_created()
|
||||
|
||||
# controlla se l'utente ha qualche problema di connessione
|
||||
# se lo ha: non lo fa entrare nell'addon
|
||||
# se ha problemi di DNS avvia ma lascia entrare
|
||||
# se tutto ok: entra nell'addon
|
||||
from specials import resolverdns
|
||||
|
||||
from specials.checkhost import test_conn
|
||||
import threading
|
||||
threading.Thread(target=test_conn, args=(True, not config.get_setting('resolver_dns'), True, [], [], True)).start()
|
||||
# check_adsl = test_conn(is_exit = True, check_dns = True, view_msg = True,
|
||||
# lst_urls = [], lst_site_check_dns = [], in_addon = True)
|
||||
|
||||
|
||||
threading.Thread(target=test_conn,
|
||||
args=(True, not config.get_setting('resolver_dns'), True, [], [], True)).start()
|
||||
|
||||
def run(item=None):
|
||||
logger.info()
|
||||
|
||||
if not item:
|
||||
# Extract item from sys.argv
|
||||
if sys.argv[2]:
|
||||
@@ -88,6 +95,9 @@ def run(item=None):
|
||||
logger.info(item.tostring())
|
||||
|
||||
try:
|
||||
if not config.get_setting('tmdb_active'):
|
||||
config.set_setting('tmdb_active', True)
|
||||
|
||||
# If item has no action, stops here
|
||||
if item.action == "":
|
||||
logger.info("Item sin accion")
|
||||
@@ -169,28 +179,28 @@ def run(item=None):
|
||||
|
||||
# Checks if channel exists
|
||||
if os.path.isfile(os.path.join(config.get_runtime_path(), 'channels', item.channel + ".py")):
|
||||
CHANNELS = 'channels'
|
||||
CHANNELS = 'channels'
|
||||
elif os.path.isfile(os.path.join(config.get_runtime_path(), 'channels', 'porn', item.channel + ".py")):
|
||||
CHANNELS = 'channels.porn'
|
||||
else:
|
||||
CHANNELS ='specials'
|
||||
CHANNELS = 'specials'
|
||||
|
||||
if CHANNELS != 'channels.porn':
|
||||
channel_file = os.path.join(config.get_runtime_path(), CHANNELS, item.channel + ".py")
|
||||
else:
|
||||
channel_file = os.path.join(config.get_runtime_path(), 'channels', 'porn', item.channel + ".py")
|
||||
channel_file = os.path.join(config.get_runtime_path(), 'channels', 'porn',
|
||||
item.channel + ".py")
|
||||
|
||||
logger.info("channel_file= " + channel_file + ' - ' + CHANNELS +' - ' + item.channel)
|
||||
logger.info("channel_file= " + channel_file + ' - ' + CHANNELS + ' - ' + item.channel)
|
||||
|
||||
channel = None
|
||||
|
||||
if os.path.exists(channel_file):
|
||||
try:
|
||||
channel = __import__(CHANNELS + item.channel, None, None, [CHANNELS + item.channel])
|
||||
channel = __import__('channels.%s' % item.channel, None,
|
||||
None, ["channels.%s" % item.channel])
|
||||
except ImportError:
|
||||
importer = "import " + CHANNELS + "." + item.channel + " as channel "
|
||||
|
||||
exec(importer)
|
||||
exec("import channels." + item.channel + " as channel")
|
||||
|
||||
logger.info("Running channel %s | %s" % (channel.__name__, channel.__file__))
|
||||
|
||||
@@ -270,14 +280,22 @@ def run(item=None):
|
||||
# Special action for searching, first asks for the words then call the "search" function
|
||||
elif item.action == "search":
|
||||
logger.info("item.action=%s" % item.action.upper())
|
||||
if channeltools.get_channel_setting('last_search', 'search'):
|
||||
last_search = channeltools.get_channel_setting('Last_searched', 'search', '')
|
||||
else:
|
||||
last_search = ''
|
||||
|
||||
# last_search = ""
|
||||
# last_search_active = config.get_setting("last_search", "search")
|
||||
# if last_search_active:
|
||||
# try:
|
||||
# current_saved_searches_list = list(config.get_setting("saved_searches_list", "search"))
|
||||
# last_search = current_saved_searches_list[0]
|
||||
# except:
|
||||
# pass
|
||||
|
||||
last_search = channeltools.get_channel_setting('Last_searched', 'search', '')
|
||||
|
||||
tecleado = platformtools.dialog_input(last_search)
|
||||
|
||||
if tecleado is not None:
|
||||
channeltools.set_channel_setting('Last_searched', tecleado, 'search')
|
||||
|
||||
if 'search' in dir(channel):
|
||||
itemlist = channel.search(item, tecleado)
|
||||
else:
|
||||
@@ -308,7 +326,7 @@ def run(item=None):
|
||||
|
||||
platformtools.render_items(itemlist, item)
|
||||
|
||||
except urllib2.URLError, e:
|
||||
except urllib2.URLError as e:
|
||||
import traceback
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
@@ -323,11 +341,12 @@ def run(item=None):
|
||||
logger.error("Codigo de error HTTP : %d" % e.code)
|
||||
# "El sitio web no funciona correctamente (error http %d)"
|
||||
platformtools.dialog_ok(config.get_localized_string(20000), config.get_localized_string(30051) % e.code)
|
||||
except WebErrorException, e:
|
||||
except WebErrorException as e:
|
||||
import traceback
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
patron = 'File "' + os.path.join(config.get_runtime_path(), CHANNELS, "").replace("\\", "\\\\") + '([^.]+)\.py"'
|
||||
patron = 'File "' + os.path.join(config.get_runtime_path(), "channels", "").replace("\\",
|
||||
"\\\\") + '([^.]+)\.py"'
|
||||
canal = scrapertools.find_single_match(traceback.format_exc(), patron)
|
||||
|
||||
platformtools.dialog_ok(
|
||||
@@ -382,13 +401,19 @@ def reorder_itemlist(itemlist):
|
||||
[config.get_localized_string(60336), '[D]']]
|
||||
|
||||
for item in itemlist:
|
||||
old_title = unicode(item.title, "utf8").lower().encode("utf8")
|
||||
if not PY3:
|
||||
old_title = unicode(item.title, "utf8").lower().encode("utf8")
|
||||
else:
|
||||
old_title = item.title.lower()
|
||||
for before, after in to_change:
|
||||
if before in item.title:
|
||||
item.title = item.title.replace(before, after)
|
||||
break
|
||||
|
||||
new_title = unicode(item.title, "utf8").lower().encode("utf8")
|
||||
if not PY3:
|
||||
new_title = unicode(item.title, "utf8").lower().encode("utf8")
|
||||
else:
|
||||
new_title = item.title.lower()
|
||||
if old_title != new_title:
|
||||
mod_list.append(item)
|
||||
modified += 1
|
||||
|
||||
@@ -6,9 +6,12 @@
|
||||
import inspect
|
||||
|
||||
import xbmc
|
||||
|
||||
from platformcode import config
|
||||
|
||||
import sys
|
||||
PY3 = False
|
||||
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
|
||||
|
||||
loggeractive = (config.get_setting("debug") == True)
|
||||
|
||||
|
||||
@@ -18,13 +21,19 @@ def log_enable(active):
|
||||
|
||||
|
||||
def encode_log(message=""):
|
||||
|
||||
# Unicode to utf8
|
||||
if type(message) == unicode:
|
||||
if isinstance(message, unicode):
|
||||
message = message.encode("utf8")
|
||||
if PY3: message = message.decode("utf8")
|
||||
|
||||
# All encodings to utf8
|
||||
elif type(message) == str:
|
||||
elif not PY3 and isinstance(message, str):
|
||||
message = unicode(message, "utf8", errors="replace").encode("utf8")
|
||||
|
||||
# Bytes encodings to utf8
|
||||
elif PY3 and isinstance(message, bytes):
|
||||
message = message.decode("utf8")
|
||||
|
||||
# Objects to string
|
||||
else:
|
||||
@@ -34,6 +43,17 @@ def encode_log(message=""):
|
||||
|
||||
|
||||
def get_caller(message=None):
|
||||
|
||||
if message and isinstance(message, unicode):
|
||||
message = message.encode("utf8")
|
||||
if PY3: message = message.decode("utf8")
|
||||
elif message and PY3 and isinstance(message, bytes):
|
||||
message = message.decode("utf8")
|
||||
elif message and not PY3:
|
||||
message = unicode(message, "utf8", errors="replace").encode("utf8")
|
||||
elif message:
|
||||
message = str(message)
|
||||
|
||||
module = inspect.getmodule(inspect.currentframe().f_back.f_back)
|
||||
|
||||
if module == None:
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from builtins import range
|
||||
import xbmcgui
|
||||
|
||||
from core import httptools
|
||||
from core import scrapertools
|
||||
from platformcode import config
|
||||
|
||||
@@ -1,17 +1,34 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import print_function
|
||||
#from builtins import str
|
||||
import sys
|
||||
PY3 = False
|
||||
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
|
||||
|
||||
if PY3:
|
||||
#from future import standard_library
|
||||
#standard_library.install_aliases()
|
||||
import urllib.parse as urllib # Es muy lento en PY2. En PY3 es nativo
|
||||
else:
|
||||
import urllib # Usamos el nativo de PY2 que es más rápido
|
||||
|
||||
import os
|
||||
import re
|
||||
import string
|
||||
import urllib
|
||||
|
||||
from unicodedata import normalize
|
||||
from core import filetools
|
||||
from core import httptools
|
||||
from core import jsontools
|
||||
from core import scrapertools
|
||||
|
||||
import xbmc
|
||||
import xbmcgui
|
||||
|
||||
from platformcode import config, logger
|
||||
|
||||
allchars = string.maketrans('', '')
|
||||
if PY3: allchars = str.maketrans('', '')
|
||||
if not PY3: allchars = string.maketrans('', '')
|
||||
deletechars = ',\\/:*"<>|?'
|
||||
|
||||
|
||||
@@ -38,14 +55,14 @@ def regex_tvshow(compare, file, sub=""):
|
||||
for regex in regex_expressions:
|
||||
response_file = re.findall(regex, file)
|
||||
if len(response_file) > 0:
|
||||
print "Regex File Se: %s, Ep: %s," % (str(response_file[0][0]), str(response_file[0][1]),)
|
||||
print("Regex File Se: %s, Ep: %s," % (str(response_file[0][0]), str(response_file[0][1]),))
|
||||
tvshow = 1
|
||||
if not compare:
|
||||
title = re.split(regex, file)[0]
|
||||
for char in ['[', ']', '_', '(', ')', '.', '-']:
|
||||
title = title.replace(char, ' ')
|
||||
if title.endswith(" "): title = title.strip()
|
||||
print "title: %s" % title
|
||||
print("title: %s" % title)
|
||||
return title, response_file[0][0], response_file[0][1]
|
||||
else:
|
||||
break
|
||||
@@ -74,7 +91,7 @@ def set_Subtitle():
|
||||
logger.info()
|
||||
|
||||
exts = [".srt", ".sub", ".txt", ".smi", ".ssa", ".ass"]
|
||||
subtitle_folder_path = os.path.join(config.get_data_path(), "subtitles")
|
||||
subtitle_folder_path = filetools.join(config.get_data_path(), "subtitles")
|
||||
|
||||
subtitle_type = config.get_setting("subtitle_type")
|
||||
|
||||
@@ -90,9 +107,9 @@ def set_Subtitle():
|
||||
config.set_setting("subtitlepath_folder", subtitle_path)
|
||||
else:
|
||||
subtitle_path = config.get_setting("subtitlepath_keyboard")
|
||||
long = len(subtitle_path)
|
||||
if long > 0:
|
||||
if subtitle_path.startswith("http") or subtitle_path[long - 4, long] in exts:
|
||||
long_v = len(subtitle_path)
|
||||
if long_v > 0:
|
||||
if subtitle_path.startswith("http") or subtitle_path[long_v - 4, long] in exts:
|
||||
logger.info("Con subtitulo : " + subtitle_path)
|
||||
xbmc.Player().setSubtitles(subtitle_path)
|
||||
return
|
||||
@@ -106,13 +123,13 @@ def set_Subtitle():
|
||||
tvshow_title, season, episode = regex_tvshow(False, subtitle_name)
|
||||
try:
|
||||
if episode != "":
|
||||
Subnames = glob.glob(os.path.join(subtitle_path, "Tvshows", tvshow_title,
|
||||
Subnames = glob.glob(filetools.join(subtitle_path, "Tvshows", tvshow_title,
|
||||
"%s %sx%s" % (tvshow_title, season, episode) + "*.??.???"))
|
||||
else:
|
||||
Subnames = glob.glob(os.path.join(subtitle_path, "Movies", subtitle_name + "*.??.???"))
|
||||
Subnames = glob.glob(filetools.join(subtitle_path, "Movies", subtitle_name + "*.??.???"))
|
||||
for Subname in Subnames:
|
||||
if os.path.splitext(Subname)[1] in exts:
|
||||
logger.info("Con subtitulo : " + os.path.split(Subname)[1])
|
||||
logger.info("Con subtitulo : " + filetools.split(Subname)[1])
|
||||
xbmc.Player().setSubtitles((Subname))
|
||||
except:
|
||||
logger.error("error al cargar subtitulos")
|
||||
@@ -147,13 +164,13 @@ def searchSubtitle(item):
|
||||
if config.get_setting("subtitle_type") == 0:
|
||||
subtitlepath = config.get_setting("subtitlepath_folder")
|
||||
if subtitlepath == "":
|
||||
subtitlepath = os.path.join(config.get_data_path(), "subtitles")
|
||||
subtitlepath = filetools.join(config.get_data_path(), "subtitles")
|
||||
config.set_setting("subtitlepath_folder", subtitlepath)
|
||||
|
||||
elif config.get_setting("subtitle_type") == 1:
|
||||
subtitlepath = config.get_setting("subtitlepath_keyboard")
|
||||
if subtitlepath == "":
|
||||
subtitlepath = os.path.join(config.get_data_path(), "subtitles")
|
||||
subtitlepath = filetools.join(config.get_data_path(), "subtitles")
|
||||
config.set_setting("subtitlepathkeyboard", subtitlepath)
|
||||
elif subtitlepath.startswith("http"):
|
||||
subtitlepath = config.get_setting("subtitlepath_folder")
|
||||
@@ -161,27 +178,27 @@ def searchSubtitle(item):
|
||||
else:
|
||||
subtitlepath = config.get_setting("subtitlepath_folder")
|
||||
if subtitlepath == "":
|
||||
subtitlepath = os.path.join(config.get_data_path(), "subtitles")
|
||||
subtitlepath = filetools.join(config.get_data_path(), "subtitles")
|
||||
config.set_setting("subtitlepath_folder", subtitlepath)
|
||||
if not os.path.exists(subtitlepath):
|
||||
if not filetools.exists(subtitlepath):
|
||||
try:
|
||||
os.mkdir(subtitlepath)
|
||||
filetools.mkdir(subtitlepath)
|
||||
except:
|
||||
logger.error("error no se pudo crear path subtitulos")
|
||||
return
|
||||
|
||||
path_movie_subt = xbmc.translatePath(os.path.join(subtitlepath, "Movies"))
|
||||
if not os.path.exists(path_movie_subt):
|
||||
path_movie_subt = xbmc.translatePath(filetools.join(subtitlepath, "Movies"))
|
||||
if not filetools.exists(path_movie_subt):
|
||||
try:
|
||||
os.mkdir(path_movie_subt)
|
||||
filetools.mkdir(path_movie_subt)
|
||||
except:
|
||||
logger.error("error no se pudo crear el path Movies")
|
||||
return
|
||||
full_path_tvshow = ""
|
||||
path_tvshow_subt = xbmc.translatePath(os.path.join(subtitlepath, "Tvshows"))
|
||||
if not os.path.exists(path_tvshow_subt):
|
||||
path_tvshow_subt = xbmc.translatePath(filetools.join(subtitlepath, "Tvshows"))
|
||||
if not filetools.exists(path_tvshow_subt):
|
||||
try:
|
||||
os.mkdir(path_tvshow_subt)
|
||||
filetools.mkdir(path_tvshow_subt)
|
||||
except:
|
||||
logger.error("error no pudo crear el path Tvshows")
|
||||
return
|
||||
@@ -189,20 +206,20 @@ def searchSubtitle(item):
|
||||
title_new = title = urllib.unquote_plus(item.title)
|
||||
else:
|
||||
title_new = title = urllib.unquote_plus(item.show + " - " + item.title)
|
||||
path_video_temp = xbmc.translatePath(os.path.join(config.get_runtime_path(), "resources", "subtitle.mp4"))
|
||||
if not os.path.exists(path_video_temp):
|
||||
path_video_temp = xbmc.translatePath(filetools.join(config.get_runtime_path(), "resources", "subtitle.mp4"))
|
||||
if not filetools.exists(path_video_temp):
|
||||
logger.error("error : no existe el video temporal de subtitulos")
|
||||
return
|
||||
# path_video_temp = xbmc.translatePath(os.path.join( ,video_temp + ".mp4" ))
|
||||
# path_video_temp = xbmc.translatePath(filetools.join( ,video_temp + ".mp4" ))
|
||||
|
||||
title_new = _normalize(title_new)
|
||||
tvshow_title, season, episode = regex_tvshow(False, title_new)
|
||||
if episode != "":
|
||||
full_path_tvshow = xbmc.translatePath(os.path.join(path_tvshow_subt, tvshow_title))
|
||||
if not os.path.exists(full_path_tvshow):
|
||||
os.mkdir(full_path_tvshow) # title_new + ".mp4"
|
||||
full_path_tvshow = xbmc.translatePath(filetools.join(path_tvshow_subt, tvshow_title))
|
||||
if not filetools.exists(full_path_tvshow):
|
||||
filetools.mkdir(full_path_tvshow) # title_new + ".mp4"
|
||||
full_path_video_new = xbmc.translatePath(
|
||||
os.path.join(full_path_tvshow, "%s %sx%s.mp4" % (tvshow_title, season, episode)))
|
||||
filetools.join(full_path_tvshow, "%s %sx%s.mp4" % (tvshow_title, season, episode)))
|
||||
logger.info(full_path_video_new)
|
||||
listitem = xbmcgui.ListItem(title_new, iconImage="DefaultVideo.png", thumbnailImage="")
|
||||
listitem.setInfo("video",
|
||||
@@ -210,14 +227,14 @@ def searchSubtitle(item):
|
||||
"tvshowtitle": tvshow_title})
|
||||
|
||||
else:
|
||||
full_path_video_new = xbmc.translatePath(os.path.join(path_movie_subt, title_new + ".mp4"))
|
||||
full_path_video_new = xbmc.translatePath(filetools.join(path_movie_subt, title_new + ".mp4"))
|
||||
listitem = xbmcgui.ListItem(title, iconImage="DefaultVideo.png", thumbnailImage="")
|
||||
listitem.setInfo("video", {"Title": title_new, "Genre": "Movies"})
|
||||
|
||||
import shutil, time
|
||||
import time
|
||||
|
||||
try:
|
||||
shutil.copy(path_video_temp, full_path_video_new)
|
||||
filetools.copy(path_video_temp, full_path_video_new)
|
||||
copy = True
|
||||
logger.info("nuevo path =" + full_path_video_new)
|
||||
time.sleep(2)
|
||||
@@ -242,10 +259,10 @@ def searchSubtitle(item):
|
||||
continue
|
||||
|
||||
time.sleep(1)
|
||||
os.remove(full_path_video_new)
|
||||
filetools.remove(full_path_video_new)
|
||||
try:
|
||||
if full_path_tvshow != "":
|
||||
os.rmdir(full_path_tvshow)
|
||||
filetools.rmdir(full_path_tvshow)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
@@ -267,3 +284,70 @@ def saveSubtitleName(item):
|
||||
else:
|
||||
config.set_setting("subtitle_name", title)
|
||||
return
|
||||
|
||||
|
||||
def get_from_subdivx(sub_url):
|
||||
|
||||
"""
|
||||
:param sub_url: Url de descarga del subtitulo alojado en suvdivx.com
|
||||
Por Ejemplo: http://www.subdivx.com/bajar.php?id=573942&u=8
|
||||
|
||||
:return: La ruta al subtitulo descomprimido
|
||||
"""
|
||||
|
||||
logger.info()
|
||||
|
||||
sub = ''
|
||||
sub_dir = os.path.join(config.get_data_path(), 'temp_subs')
|
||||
|
||||
if os.path.exists(sub_dir):
|
||||
for sub_file in os.listdir(sub_dir):
|
||||
old_sub = os.path.join(sub_dir, sub_file)
|
||||
os.remove(old_sub)
|
||||
else:
|
||||
os.mkdir(sub_dir)
|
||||
|
||||
sub_url = sub_url.replace("&", "&")
|
||||
sub_data = httptools.downloadpage(sub_url, follow_redirects=False)
|
||||
if 'x-frame-options' not in sub_data.headers:
|
||||
sub_url = '%s' % sub_data.headers['location']
|
||||
ext = sub_url[-4::]
|
||||
file_id = "subtitle%s" % ext
|
||||
filename = os.path.join(sub_dir, file_id)
|
||||
try:
|
||||
data_dl = httptools.downloadpage(sub_url).data
|
||||
filetools.write(filename, data_dl)
|
||||
sub = extract_file_online(sub_dir, filename)
|
||||
except:
|
||||
logger.info('sub no valido')
|
||||
else:
|
||||
logger.info('sub no valido')
|
||||
return sub
|
||||
|
||||
|
||||
def extract_file_online(path, filename):
|
||||
|
||||
"""
|
||||
:param path: Ruta donde se encuentra el archivo comprimido
|
||||
|
||||
:param filename: Nombre del archivo comprimido
|
||||
|
||||
:return: Devuelve la ruta al subtitulo descomprimido
|
||||
"""
|
||||
|
||||
logger.info()
|
||||
|
||||
url = "http://online.b1.org/rest/online/upload"
|
||||
|
||||
data = httptools.downloadpage(url, file=filename).data
|
||||
|
||||
result = jsontools.load(scrapertools.find_single_match(data, "result.listing = ([^;]+);"))
|
||||
compressed = result["name"]
|
||||
extracted = result["children"][0]["name"]
|
||||
|
||||
dl_url = "http://online.b1.org/rest/online/download/%s/%s" % (compressed, extracted)
|
||||
extracted_path = os.path.join(path, extracted)
|
||||
data_dl = httptools.downloadpage(dl_url).data
|
||||
filetools.write(extracted_path, data_dl)
|
||||
|
||||
return extracted_path
|
||||
|
||||
@@ -6,189 +6,167 @@
|
||||
# datos obtenidos de las paginas
|
||||
# ----------------------------------------------------------
|
||||
|
||||
import re
|
||||
# from builtins import str
|
||||
import sys
|
||||
|
||||
PY3 = False
|
||||
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
|
||||
|
||||
import os
|
||||
import unicodedata
|
||||
import re
|
||||
|
||||
import config
|
||||
|
||||
from platformcode import config
|
||||
from core.item import Item
|
||||
from core import scrapertools
|
||||
from platformcode import logger
|
||||
|
||||
thumb_dict = {
|
||||
"numbers": "http://icons.iconarchive.com/icons/custom-icon-design/pretty-office-10/256/Numbers-icon.png",
|
||||
"a": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-A-black-icon.png",
|
||||
"accion": "https://s14.postimg.cc/sqy3q2aht/action.png",
|
||||
"actors": "https://i.postimg.cc/tC2HMhVV/actors.png",
|
||||
"adolescente" : "https://s10.postimg.cc/inq7u4p61/teens.png",
|
||||
"adultos": "https://s10.postimg.cc/s8raxc51l/adultos.png",
|
||||
"adults": "https://s10.postimg.cc/s8raxc51l/adultos.png",
|
||||
"alcinema": "http://icons.iconarchive.com/icons/chromatix/aerial/256/movie-icon.png", #"http://icons.iconarchive.com/icons/itzikgur/my-seven/256/Movies-Films-icon.png",
|
||||
"all": "https://s10.postimg.cc/h1igpgw0p/todas.png",
|
||||
"alphabet": "https://s10.postimg.cc/4dy3ytmgp/a-z.png",
|
||||
"animacion": "https://s14.postimg.cc/vl193mupd/animation.png",
|
||||
"anime" : "https://s10.postimg.cc/n9mc2ikzt/anime.png",
|
||||
"artes marciales" : "https://s10.postimg.cc/4u1v51tzt/martial_arts.png",
|
||||
"asiaticas" : "https://i.postimg.cc/Xq0HXD5d/asiaticas.png",
|
||||
"audio": "https://s10.postimg.cc/b34nern7d/audio.png",
|
||||
"aventura": "http://icons.iconarchive.com/icons/sirubico/movie-genre/256/Adventure-2-icon.png",#"https://s14.postimg.cc/ky7fy5he9/adventure.png",
|
||||
"b": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-B-black-icon.png",
|
||||
"belico": "https://s14.postimg.cc/5e027lru9/war.png",
|
||||
"biografia" : "https://s10.postimg.cc/jq0ecjxnt/biographic.png",
|
||||
"c": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-C-black-icon.png",
|
||||
"carreras": "https://s14.postimg.cc/yt5qgdr69/races.png",
|
||||
"cast": "https://i.postimg.cc/qvfP5Xvt/cast.png",
|
||||
"categories": "https://s10.postimg.cc/v0ako5lmh/categorias.png",
|
||||
"ciencia ficcion": "https://s14.postimg.cc/8kulr2jy9/scifi.png",
|
||||
"cine negro" : "https://s10.postimg.cc/6ym862qgp/noir.png",
|
||||
"colections": "https://s10.postimg.cc/ywnwjvytl/colecciones.png",
|
||||
"comedia": "https://s14.postimg.cc/9ym8moog1/comedy.png",
|
||||
"cortometraje" : "https://s10.postimg.cc/qggvlxndl/shortfilm.png",
|
||||
"country": "https://s10.postimg.cc/yz0h81j15/pais.png",
|
||||
"crimen": "https://s14.postimg.cc/duzkipjq9/crime.png",
|
||||
"d": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-D-black-icon.png",
|
||||
"de la tv": "https://s10.postimg.cc/94gj0iwh5/image.png",
|
||||
"deporte": "https://s14.postimg.cc/x1crlnnap/sports.png",
|
||||
"destacadas": "https://s10.postimg.cc/yu40x8q2x/destacadas.png",
|
||||
"documental": "https://s10.postimg.cc/68aygmmcp/documentales.png",
|
||||
"documentaries": "https://s10.postimg.cc/68aygmmcp/documentales.png",
|
||||
"doramas":"https://s10.postimg.cc/h4dyr4nfd/doramas.png",
|
||||
"drama": "https://s14.postimg.cc/fzjxjtnxt/drama.png",
|
||||
"e": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-E-black-icon.png",
|
||||
"erotica" : "https://s10.postimg.cc/dcbb9bfx5/erotic.png",
|
||||
"espanolas" : "https://s10.postimg.cc/x1y6zikx5/spanish.png",
|
||||
"estrenos" : "https://s10.postimg.cc/sk8r9xdq1/estrenos.png",
|
||||
"extranjera": "https://s10.postimg.cc/f44a4eerd/foreign.png",
|
||||
"f": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-F-black-icon.png",
|
||||
"familiar": "https://s14.postimg.cc/jj5v9ndsx/family.png",
|
||||
"fantasia": "https://s14.postimg.cc/p7c60ksg1/fantasy.png",
|
||||
"fantastico" : "https://s10.postimg.cc/tedufx5eh/fantastic.png",
|
||||
"favorites": "https://s10.postimg.cc/rtg147gih/favoritas.png",
|
||||
"g": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-G-black-icon.png",
|
||||
"genres": "https://s10.postimg.cc/6c4rx3x1l/generos.png",
|
||||
"h": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-H-black-icon.png",
|
||||
"historica": "https://s10.postimg.cc/p1faxj6yh/historic.png",
|
||||
"horror" : "https://s10.postimg.cc/8exqo6yih/horror2.png",
|
||||
"hot": "https://s10.postimg.cc/yu40x8q2x/destacadas.png",
|
||||
"i": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-I-black-icon.png",
|
||||
"infantil": "https://s14.postimg.cc/4zyq842mp/childish.png",
|
||||
"intriga": "https://s14.postimg.cc/5qrgdimw1/intrigue.png",
|
||||
"j": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-J-black-icon.png",
|
||||
"k": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-K-black-icon.png",
|
||||
"l": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-L-black-icon.png",
|
||||
"language": "https://s10.postimg.cc/6wci189ft/idioma.png",
|
||||
"last": "https://s10.postimg.cc/i6ciuk0eh/ultimas.png",
|
||||
"lat": "https://i.postimg.cc/Gt8fMH0J/lat.png",
|
||||
"latino" : "https://s10.postimg.cc/swip0b86h/latin.png",
|
||||
"m": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-M-black-icon.png",
|
||||
"mexicanas" : "https://s10.postimg.cc/swip0b86h/latin.png",
|
||||
"misterio": "https://s14.postimg.cc/3m73cg8ep/mistery.png",
|
||||
"more voted": "https://s10.postimg.cc/lwns2d015/masvotadas.png",
|
||||
"more watched": "https://s10.postimg.cc/c6orr5neh/masvistas.png",
|
||||
"movies": "https://s10.postimg.cc/fxtqzdog9/peliculas.png",
|
||||
"musical": "https://s10.postimg.cc/hy7fhtecp/musical.png",
|
||||
"n": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-N-black-icon.png",
|
||||
"new episodes": "https://s10.postimg.cc/fu4iwpnqh/nuevoscapitulos.png",
|
||||
"newest": "http://icons.iconarchive.com/icons/laurent-baumann/creme/128/Location-News-icon.png", #"http://icons.iconarchive.com/icons/uiconstock/ios8-setting/128/news-icon.png",
|
||||
"nextpage": "http://icons.iconarchive.com/icons/custom-icon-design/pretty-office-5/256/navigate-right-icon.png", #"http://icons.iconarchive.com/icons/custom-icon-design/office/256/forward-icon.png", #"http://icons.iconarchive.com/icons/ahmadhania/spherical/128/forward-icon.png",
|
||||
"o": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-O-black-icon.png",
|
||||
"others": "http://icons.iconarchive.com/icons/limav/movie-genres-folder/128/Others-icon.png",
|
||||
"p": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-P-black-icon.png",
|
||||
"peleas" : "https://s10.postimg.cc/7a3ojbjwp/Fight.png",
|
||||
"policial" : "https://s10.postimg.cc/wsw0wbgbd/cops.png",
|
||||
"premieres": "https://s10.postimg.cc/sk8r9xdq1/estrenos.png",
|
||||
"q": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-Q-black-icon.png",
|
||||
"quality": "https://s10.postimg.cc/9bbojsbjd/calidad.png",
|
||||
"r": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-R-black-icon.png",
|
||||
"recents": "https://s10.postimg.cc/649u24kp5/recents.png",
|
||||
"recomendadas": "https://s10.postimg.cc/7xk1oqccp/recomendadas.png",
|
||||
"recomended": "https://s10.postimg.cc/7xk1oqccp/recomendadas.png",
|
||||
"religion" : "https://s10.postimg.cc/44j2skquh/religion.png",
|
||||
"romance" : "https://s10.postimg.cc/yn8vdll6x/romance.png",
|
||||
"romantica": "https://s14.postimg.cc/8xlzx7cht/romantic.png",
|
||||
"s": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-S-black-icon.png",
|
||||
"search": "http://icons.iconarchive.com/icons/jamespeng/movie/256/database-icon.png",
|
||||
"suspenso": "https://s10.postimg.cc/7peybxdfd/suspense.png",
|
||||
"t": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-T-black-icon.png",
|
||||
"telenovelas": "https://i.postimg.cc/QCXZkyDM/telenovelas.png",
|
||||
"terror": "https://s14.postimg.cc/thqtvl52p/horror.png",
|
||||
"thriller": "https://s14.postimg.cc/uwsekl8td/thriller.png",
|
||||
"tvshows": "https://s10.postimg.cc/kxvslawe1/series.png",
|
||||
"u": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-U-black-icon.png",
|
||||
"ultimiarrivi" : "http://icons.iconarchive.com/icons/saki/snowish/128/Extras-internet-download-icon.png",
|
||||
"updated" : "https://s10.postimg.cc/46m3h6h9l/updated.png",
|
||||
"v": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-V-black-icon.png",
|
||||
"vose": "https://i.postimg.cc/kgmnbd8h/vose.png",
|
||||
"w": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-W-black-icon.png",
|
||||
"western": "https://s10.postimg.cc/5wc1nokjt/western.png",
|
||||
"x": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-X-black-icon.png",
|
||||
"y": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-Y-black-icon.png",
|
||||
"year": "https://s10.postimg.cc/atzrqg921/a_o.png",
|
||||
"z": "http://icons.iconarchive.com/icons/hydrattz/multipurpose-alphabet/256/Letter-Z-black-icon.png"
|
||||
}
|
||||
thumb_dict = {"movies": "https://s10.postimg.cc/fxtqzdog9/peliculas.png",
|
||||
"tvshows": "https://s10.postimg.cc/kxvslawe1/series.png",
|
||||
"on air": "https://i.postimg.cc/HLLJWMcr/en-emision.png",
|
||||
"all": "https://s10.postimg.cc/h1igpgw0p/todas.png",
|
||||
"genres": "https://s10.postimg.cc/6c4rx3x1l/generos.png",
|
||||
"search": "https://s10.postimg.cc/v985e2izd/buscar.png",
|
||||
"quality": "https://s10.postimg.cc/9bbojsbjd/calidad.png",
|
||||
"audio": "https://s10.postimg.cc/b34nern7d/audio.png",
|
||||
"newest": "https://s10.postimg.cc/g1s5tf1bt/novedades.png",
|
||||
"last": "https://s10.postimg.cc/i6ciuk0eh/ultimas.png",
|
||||
"hot": "https://s10.postimg.cc/yu40x8q2x/destacadas.png",
|
||||
"year": "https://s10.postimg.cc/atzrqg921/a_o.png",
|
||||
"alphabet": "https://s10.postimg.cc/4dy3ytmgp/a-z.png",
|
||||
"recomended": "https://s10.postimg.cc/7xk1oqccp/recomendadas.png",
|
||||
"more watched": "https://s10.postimg.cc/c6orr5neh/masvistas.png",
|
||||
"more voted": "https://s10.postimg.cc/lwns2d015/masvotadas.png",
|
||||
"favorites": "https://s10.postimg.cc/rtg147gih/favoritas.png",
|
||||
"colections": "https://s10.postimg.cc/ywnwjvytl/colecciones.png",
|
||||
"categories": "https://s10.postimg.cc/v0ako5lmh/categorias.png",
|
||||
"premieres": "https://s10.postimg.cc/sk8r9xdq1/estrenos.png",
|
||||
"documentaries": "https://s10.postimg.cc/68aygmmcp/documentales.png",
|
||||
"language": "https://s10.postimg.cc/6wci189ft/idioma.png",
|
||||
"new episodes": "https://s10.postimg.cc/fu4iwpnqh/nuevoscapitulos.png",
|
||||
"country": "https://s10.postimg.cc/yz0h81j15/pais.png",
|
||||
"adults": "https://s10.postimg.cc/s8raxc51l/adultos.png",
|
||||
"recents": "https://s10.postimg.cc/649u24kp5/recents.png",
|
||||
"updated": "https://s10.postimg.cc/46m3h6h9l/updated.png",
|
||||
"actors": "https://i.postimg.cc/tC2HMhVV/actors.png",
|
||||
"cast": "https://i.postimg.cc/qvfP5Xvt/cast.png",
|
||||
"lat": "https://i.postimg.cc/Gt8fMH0J/lat.png",
|
||||
"vose": "https://i.postimg.cc/kgmnbd8h/vose.png",
|
||||
"accion": "https://s14.postimg.cc/sqy3q2aht/action.png",
|
||||
"adolescente": "https://s10.postimg.cc/inq7u4p61/teens.png",
|
||||
"adultos": "https://s10.postimg.cc/s8raxc51l/adultos.png",
|
||||
"animacion": "https://s14.postimg.cc/vl193mupd/animation.png",
|
||||
"anime": "https://s10.postimg.cc/n9mc2ikzt/anime.png",
|
||||
"artes marciales": "https://s10.postimg.cc/4u1v51tzt/martial_arts.png",
|
||||
"asiaticas": "https://i.postimg.cc/Xq0HXD5d/asiaticas.png",
|
||||
"aventura": "https://s14.postimg.cc/ky7fy5he9/adventure.png",
|
||||
"belico": "https://s14.postimg.cc/5e027lru9/war.png",
|
||||
"biografia": "https://s10.postimg.cc/jq0ecjxnt/biographic.png",
|
||||
"carreras": "https://s14.postimg.cc/yt5qgdr69/races.png",
|
||||
"ciencia ficcion": "https://s14.postimg.cc/8kulr2jy9/scifi.png",
|
||||
"cine negro": "https://s10.postimg.cc/6ym862qgp/noir.png",
|
||||
"comedia": "https://s14.postimg.cc/9ym8moog1/comedy.png",
|
||||
"cortometraje": "https://s10.postimg.cc/qggvlxndl/shortfilm.png",
|
||||
"crimen": "https://s14.postimg.cc/duzkipjq9/crime.png",
|
||||
"de la tv": "https://s10.postimg.cc/94gj0iwh5/image.png",
|
||||
"deporte": "https://s14.postimg.cc/x1crlnnap/sports.png",
|
||||
"destacadas": "https://s10.postimg.cc/yu40x8q2x/destacadas.png",
|
||||
"documental": "https://s10.postimg.cc/68aygmmcp/documentales.png",
|
||||
"doramas": "https://s10.postimg.cc/h4dyr4nfd/doramas.png",
|
||||
"drama": "https://s14.postimg.cc/fzjxjtnxt/drama.png",
|
||||
"erotica": "https://s10.postimg.cc/dcbb9bfx5/erotic.png",
|
||||
"espanolas": "https://s10.postimg.cc/x1y6zikx5/spanish.png",
|
||||
"estrenos": "https://s10.postimg.cc/sk8r9xdq1/estrenos.png",
|
||||
"extranjera": "https://s10.postimg.cc/f44a4eerd/foreign.png",
|
||||
"familiar": "https://s14.postimg.cc/jj5v9ndsx/family.png",
|
||||
"fantasia": "https://s14.postimg.cc/p7c60ksg1/fantasy.png",
|
||||
"fantastico": "https://s10.postimg.cc/tedufx5eh/fantastic.png",
|
||||
"historica": "https://s10.postimg.cc/p1faxj6yh/historic.png",
|
||||
"horror": "https://s10.postimg.cc/8exqo6yih/horror2.png",
|
||||
"infantil": "https://s14.postimg.cc/4zyq842mp/childish.png",
|
||||
"intriga": "https://s14.postimg.cc/5qrgdimw1/intrigue.png",
|
||||
"latino": "https://s10.postimg.cc/swip0b86h/latin.png",
|
||||
"mexicanas": "https://s10.postimg.cc/swip0b86h/latin.png",
|
||||
"misterio": "https://s14.postimg.cc/3m73cg8ep/mistery.png",
|
||||
"musical": "https://s10.postimg.cc/hy7fhtecp/musical.png",
|
||||
"peleas": "https://s10.postimg.cc/7a3ojbjwp/Fight.png",
|
||||
"policial": "https://s10.postimg.cc/wsw0wbgbd/cops.png",
|
||||
"recomendadas": "https://s10.postimg.cc/7xk1oqccp/recomendadas.png",
|
||||
"religion": "https://s10.postimg.cc/44j2skquh/religion.png",
|
||||
"romance": "https://s10.postimg.cc/yn8vdll6x/romance.png",
|
||||
"romantica": "https://s14.postimg.cc/8xlzx7cht/romantic.png",
|
||||
"suspenso": "https://s10.postimg.cc/7peybxdfd/suspense.png",
|
||||
"telenovelas": "https://i.postimg.cc/QCXZkyDM/telenovelas.png",
|
||||
"terror": "https://s14.postimg.cc/thqtvl52p/horror.png",
|
||||
"thriller": "https://s14.postimg.cc/uwsekl8td/thriller.png",
|
||||
"western": "https://s10.postimg.cc/5wc1nokjt/western.png"
|
||||
}
|
||||
|
||||
|
||||
def set_genre(string):
|
||||
#logger.info()
|
||||
# logger.info()
|
||||
|
||||
genres_dict = {'accion':['azione'],
|
||||
'adultos':['adulto','adulti'],
|
||||
'animacion':['animazione'],
|
||||
'adolescente':['adolescente', 'adolescenti'],
|
||||
'aventura':['avventura'],
|
||||
'belico':['guerra','guerriglia'],
|
||||
'biografia':['biografia', 'biografie', 'biografico'],
|
||||
'ciencia ficcion':['ciencia ficcion', 'cienciaficcion', 'sci fi', 'c ficcion'],
|
||||
'cine negro':['film noir'],
|
||||
'comedia':['commedia', 'commedie'],
|
||||
'cortometraje':['cortometraggio', 'corto', 'corti'],
|
||||
'de la tv':['della tv', 'televisione', 'tv'],
|
||||
'deporte':['deporte', 'deportes'],
|
||||
'destacadas':['destacada', 'destacadas'],
|
||||
'documental':['documentario', 'documentari'],
|
||||
'erotica':['erotica', 'erotica +', 'eroticas', 'eroticas +', 'erotico', 'erotico +'],
|
||||
'estrenos':['estrenos', 'estrenos'],
|
||||
'extranjera':['extrajera', 'extrajeras', 'foreign'],
|
||||
'familiar':['familiare', 'famiglia'],
|
||||
'fantastico':['fantastico', 'fantastica', 'fantastici'],
|
||||
'historica':['storico', 'storia'],
|
||||
'infantil':['bambini', 'infanzia'],
|
||||
'musical':['musicale', 'musical', 'musica'],
|
||||
'numbers': ['0','1','2','3','4','5','6','7','8','9'],
|
||||
'policial':['politico', 'politici', 'politica'],
|
||||
'recomendadas':['raccomandato', 'raccomandati'],
|
||||
'religion':['religione', 'religioso', 'religiosa','religiosi'],
|
||||
'romantica':['romantica', 'romantico', 'romantici'],
|
||||
'suspenso':['suspenso', 'suspense'],
|
||||
'thriller':['thriller', 'thrillers'],
|
||||
'western':['western', 'westerns']
|
||||
genres_dict = {'accion': ['accion', 'action', 'accion y aventura', 'action & adventure'],
|
||||
'adultos': ['adultos', 'adultos +', 'adulto'],
|
||||
'animacion': ['animacion', 'animacion e infantil', 'dibujos animados'],
|
||||
'adolescente': ['adolescente', 'adolescentes', 'adolescencia', 'adolecentes'],
|
||||
'aventura': ['aventura', 'aventuras'],
|
||||
'belico': ['belico', 'belica', 'belicas', 'guerra', 'belico guerra'],
|
||||
'biografia': ['biografia', 'biografias', 'biografica', 'biograficas', 'biografico'],
|
||||
'ciencia ficcion': ['ciencia ficcion', 'cienciaficcion', 'sci fi', 'c ficcion'],
|
||||
'cine negro': ['film noir', 'negro'],
|
||||
'comedia': ['comedia', 'comedias'],
|
||||
'cortometraje': ['cortometraje', 'corto', 'cortos'],
|
||||
'de la tv': ['de la tv', 'television', 'tv'],
|
||||
'deporte': ['deporte', 'deportes'],
|
||||
'destacadas': ['destacada', 'destacadas'],
|
||||
'documental': ['documental', 'documentales'],
|
||||
'erotica': ['erotica', 'erotica +', 'eroticas', 'eroticas +', 'erotico', 'erotico +'],
|
||||
'estrenos': ['estrenos', 'estrenos'],
|
||||
'extranjera': ['extrajera', 'extrajeras', 'foreign'],
|
||||
'familiar': ['familiar', 'familia'],
|
||||
'fantastico': ['fantastico', 'fantastica', 'fantasticas'],
|
||||
'historica': ['historica', 'historicas', 'historico', 'historia'],
|
||||
'infantil': ['infantil', 'kids'],
|
||||
'musical': ['musical', 'musicales', 'musica'],
|
||||
'policial': ['policial', 'policiaco', 'policiaca'],
|
||||
'recomendadas': ['recomedada', 'recomendadas'],
|
||||
'religion': ['religion', 'religiosa', 'religiosas'],
|
||||
'romantica': ['romantica', 'romanticas', 'romantico'],
|
||||
'suspenso': ['suspenso', 'suspense'],
|
||||
'thriller': ['thriller', 'thrillers'],
|
||||
'western': ['western', 'westerns', 'oeste western']
|
||||
}
|
||||
string = re.sub(r'peliculas de |pelicula de la |peli |cine ','', string)
|
||||
for genre, variants in genres_dict.items():
|
||||
string = re.sub(r'peliculas de |pelicula de la |peli |cine ', '', string)
|
||||
for genre, variants in list(genres_dict.items()):
|
||||
if string in variants:
|
||||
string = genre
|
||||
|
||||
return string
|
||||
|
||||
|
||||
def remove_format(string):
|
||||
#logger.info()
|
||||
#logger.debug('entra en remove: %s' % string)
|
||||
# logger.info()
|
||||
# logger.debug('entra en remove: %s' % string)
|
||||
string = string.rstrip()
|
||||
string = re.sub(r'(\[|\[\/)(?:color|COLOR|b|B|i|I).*?\]|\[|\]|\(|\)|\:|\.', '', string)
|
||||
#logger.debug('sale de remove: %s' % string)
|
||||
# logger.debug('sale de remove: %s' % string)
|
||||
return string
|
||||
|
||||
|
||||
def normalize(string):
|
||||
string = string.decode('utf-8')
|
||||
if not PY3 and isinstance(string, str):
|
||||
string = string.decode('utf-8')
|
||||
normal = ''.join((c for c in unicodedata.normalize('NFD', unicode(string)) if unicodedata.category(c) != 'Mn'))
|
||||
return normal
|
||||
|
||||
|
||||
def simplify(string):
|
||||
|
||||
#logger.info()
|
||||
#logger.debug('entra en simplify: %s'%string)
|
||||
# logger.info()
|
||||
# logger.debug('entra en simplify: %s'%string)
|
||||
string = remove_format(string)
|
||||
string = string.replace('-',' ').replace('_',' ')
|
||||
string = re.sub(r'\d+','', string)
|
||||
string = string.replace('-', ' ').replace('_', ' ')
|
||||
string = re.sub(r'\d+', '', string)
|
||||
string = string.strip()
|
||||
|
||||
notilde = normalize(string)
|
||||
@@ -197,12 +175,13 @@ def simplify(string):
|
||||
except:
|
||||
pass
|
||||
string = string.lower()
|
||||
#logger.debug('sale de simplify: %s' % string)
|
||||
# logger.debug('sale de simplify: %s' % string)
|
||||
|
||||
return string
|
||||
|
||||
|
||||
def add_languages(title, languages):
|
||||
#logger.info()
|
||||
# logger.info()
|
||||
|
||||
if isinstance(languages, list):
|
||||
for language in languages:
|
||||
@@ -211,14 +190,55 @@ def add_languages(title, languages):
|
||||
title = '%s %s' % (title, set_color(languages, languages))
|
||||
return title
|
||||
|
||||
|
||||
def add_info_plot(plot, languages, quality):
|
||||
# logger.info()
|
||||
last = '[/I][/B]\n'
|
||||
|
||||
if languages:
|
||||
l_part = '[COLOR yellowgreen][B][I]Idiomas:[/COLOR] '
|
||||
mid = ''
|
||||
|
||||
if isinstance(languages, list):
|
||||
for language in languages:
|
||||
mid += '%s ' % (set_color(language, language))
|
||||
else:
|
||||
mid = '%s ' % (set_color(languages, languages))
|
||||
|
||||
p_lang = '%s%s%s' % (l_part, mid, last)
|
||||
|
||||
if quality:
|
||||
q_part = '[COLOR yellowgreen][B][I]Calidad:[/COLOR] '
|
||||
p_quality = '%s%s%s' % (q_part, quality, last)
|
||||
|
||||
if languages and quality:
|
||||
plot_ = '%s%s\n%s' % (p_lang, p_quality, plot)
|
||||
|
||||
elif languages:
|
||||
plot_ = '%s\n%s' % (p_lang, plot)
|
||||
|
||||
elif quality:
|
||||
plot_ = '%s\n%s' % (p_quality, plot)
|
||||
|
||||
else:
|
||||
plot_ = plot
|
||||
|
||||
return plot_
|
||||
|
||||
|
||||
def set_color(title, category):
|
||||
#logger.info()
|
||||
# logger.info()
|
||||
from core import jsontools
|
||||
|
||||
styles_path = os.path.join(config.get_runtime_path(), 'resources', 'color_styles.json')
|
||||
preset = config.get_setting("preset_style", default="Estilo 1")
|
||||
color_setting = jsontools.load((open(styles_path, "r").read()))[preset]
|
||||
|
||||
color_scheme = {'otro': 'white', 'dual': 'white'}
|
||||
|
||||
#logger.debug('category antes de remove: %s' % category)
|
||||
# logger.debug('category antes de remove: %s' % category)
|
||||
category = remove_format(category).lower()
|
||||
#logger.debug('category despues de remove: %s' % category)
|
||||
# logger.debug('category despues de remove: %s' % category)
|
||||
# Lista de elementos posibles en el titulo
|
||||
color_list = ['movie', 'tvshow', 'year', 'rating_1', 'rating_2', 'rating_3', 'quality', 'cast', 'lat', 'vose',
|
||||
'vos', 'vo', 'server', 'library', 'update', 'no_update']
|
||||
@@ -234,46 +254,45 @@ def set_color(title, category):
|
||||
if custom_colors:
|
||||
color_scheme[element] = remove_format(config.get_setting('%s_color' % element))
|
||||
else:
|
||||
color_scheme[element] = 'white'
|
||||
color_scheme[element] = remove_format(color_setting.get(element, 'white'))
|
||||
# color_scheme[element] = 'white'
|
||||
|
||||
if category in ['update', 'no_update']:
|
||||
#logger.debug('title antes de updates: %s' % title)
|
||||
title= re.sub(r'\[COLOR .*?\]','[COLOR %s]' % color_scheme[category],title)
|
||||
# logger.debug('title antes de updates: %s' % title)
|
||||
title = re.sub(r'\[COLOR .*?\]', '[COLOR %s]' % color_scheme[category], title)
|
||||
else:
|
||||
if category not in ['movie', 'tvshow', 'library', 'otro']:
|
||||
title = "[COLOR %s][%s][/COLOR]"%(color_scheme[category], title)
|
||||
title = "[COLOR %s][%s][/COLOR]" % (color_scheme[category], title)
|
||||
else:
|
||||
title = "[COLOR %s]%s[/COLOR]" % (color_scheme[category], title)
|
||||
return title
|
||||
|
||||
def set_lang(language):
|
||||
#logger.info()
|
||||
|
||||
cast =['castellano','espanol','cast','esp','espaol', 'es','zc', 'spa', 'spanish', 'vc']
|
||||
ita =['italiano','italian','ita','it']
|
||||
lat=['latino','lat','la', 'espanol latino', 'espaol latino', 'zl', 'mx', 'co', 'vl']
|
||||
vose=['subtitulado','subtitulada','sub','sub espanol','vose','espsub','su','subs castellano',
|
||||
'sub: español', 'vs', 'zs', 'vs', 'english-spanish subs', 'ingles sub espanol']
|
||||
sub_ita=['sottotitolato','sottotitolata','sub','sub ita','subs italiano',
|
||||
'sub: italiano', 'inglese sottotitolato']
|
||||
vos=['vos', 'sub ingles', 'engsub','ingles subtitulado', 'sub: ingles']
|
||||
vo=['ingles', 'en','vo', 'ovos', 'eng','v.o', 'english']
|
||||
dual=['dual']
|
||||
def set_lang(language):
|
||||
# logger.info()
|
||||
|
||||
cast = ['castellano', 'español', 'espanol', 'cast', 'esp', 'espaol', 'es', 'zc', 'spa', 'spanish', 'vc']
|
||||
ita = ['italiano', 'italian', 'ita', 'it']
|
||||
lat = ['latino', 'lat', 'la', 'español latino', 'espanol latino', 'espaol latino', 'zl', 'mx', 'co', 'vl']
|
||||
vose = ['subtitulado', 'subtitulada', 'sub', 'sub espanol', 'vose', 'espsub', 'su', 'subs castellano',
|
||||
'sub: español', 'vs', 'zs', 'vs', 'english-spanish subs', 'ingles sub espanol', 'ingles sub español']
|
||||
vos = ['vos', 'sub ingles', 'engsub', 'vosi', 'ingles subtitulado', 'sub: ingles']
|
||||
vo = ['ingles', 'en', 'vo', 'ovos', 'eng', 'v.o', 'english']
|
||||
dual = ['dual']
|
||||
|
||||
language = scrapertools.decodeHtmlentities(language)
|
||||
old_lang = language
|
||||
|
||||
language = simplify(language)
|
||||
|
||||
#logger.debug('language before simplify: %s' % language)
|
||||
#logger.debug('old language: %s' % old_lang)
|
||||
# logger.debug('language before simplify: %s' % language)
|
||||
# logger.debug('old language: %s' % old_lang)
|
||||
if language in cast:
|
||||
language = 'cast'
|
||||
elif language in lat:
|
||||
language = 'lat'
|
||||
elif language in ita:
|
||||
language = 'ita'
|
||||
elif language in sub_ita:
|
||||
language = 'sub-ita'
|
||||
elif language in vose:
|
||||
language = 'vose'
|
||||
elif language in vos:
|
||||
@@ -285,67 +304,67 @@ def set_lang(language):
|
||||
else:
|
||||
language = 'otro'
|
||||
|
||||
#logger.debug('language after simplify: %s' % language)
|
||||
# logger.debug('language after simplify: %s' % language)
|
||||
|
||||
return language
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def title_format(item):
|
||||
#logger.info()
|
||||
# logger.info()
|
||||
|
||||
lang = False
|
||||
valid = True
|
||||
language_color = 'otro'
|
||||
simple_language = ''
|
||||
|
||||
#logger.debug('item.title antes de formatear: %s' % item.title.lower())
|
||||
# logger.debug('item.title antes de formatear: %s' % item.title.lower())
|
||||
|
||||
# TODO se deberia quitar cualquier elemento que no sea un enlace de la lista de findvideos para quitar esto
|
||||
|
||||
#Palabras "prohibidas" en los titulos (cualquier titulo que contengas estas no se procesara en unify)
|
||||
# Palabras "prohibidas" en los titulos (cualquier titulo que contengas estas no se procesara en unify)
|
||||
excluded_words = ['online', 'descarga', 'downloads', 'trailer', 'videoteca', 'gb', 'autoplay']
|
||||
|
||||
# Actions excluidos, (se define canal y action) los titulos que contengan ambos valores no se procesaran en unify
|
||||
excluded_actions = [('videolibrary','get_episodes')]
|
||||
excluded_actions = [('videolibrary', 'get_episodes')]
|
||||
|
||||
# Verifica si hay marca de visto de trakt
|
||||
|
||||
visto = False
|
||||
#logger.debug('titlo con visto? %s' % item.title)
|
||||
|
||||
if '[[I]v[/I]]' in item.title or '[COLOR limegreen][v][/COLOR]' in item.title:
|
||||
visto = True
|
||||
|
||||
# Se elimina cualquier formato previo en el titulo
|
||||
if item.action != '' and item.action !='mainlist':
|
||||
item.title = remove_format(item.title)
|
||||
|
||||
#logger.debug('visto? %s' % visto)
|
||||
|
||||
# Evita que aparezcan los idiomas en los mainlist de cada canal
|
||||
if item.action == 'mainlist':
|
||||
item.language =''
|
||||
|
||||
info = item.infoLabels
|
||||
#logger.debug('item antes de formatear: %s'%item)
|
||||
|
||||
if hasattr(item,'text_color'):
|
||||
item.text_color=''
|
||||
|
||||
#Verifica el item sea valido para ser formateado por unify
|
||||
# Verifica el item sea valido para ser formateado por unify
|
||||
|
||||
if item.channel == 'trailertools' or (item.channel.lower(), item.action.lower()) in excluded_actions or \
|
||||
item.action=='':
|
||||
item.action == '':
|
||||
valid = False
|
||||
else:
|
||||
for word in excluded_words:
|
||||
if word in item.title.lower():
|
||||
valid = False
|
||||
break
|
||||
if not valid:
|
||||
return item
|
||||
|
||||
if valid and item.unify!=False:
|
||||
# Verifica si hay marca de visto de trakt
|
||||
|
||||
visto = False
|
||||
# logger.debug('titlo con visto? %s' % item.title)
|
||||
|
||||
if '[[I]v[/I]]' in item.title or '[COLOR limegreen][v][/COLOR]' in item.title:
|
||||
visto = True
|
||||
|
||||
# Se elimina cualquier formato previo en el titulo
|
||||
if item.action != '' and item.action != 'mainlist' and item.unify:
|
||||
item.title = remove_format(item.title)
|
||||
|
||||
# logger.debug('visto? %s' % visto)
|
||||
|
||||
# Evita que aparezcan los idiomas en los mainlist de cada canal
|
||||
if item.action == 'mainlist':
|
||||
item.language = ''
|
||||
|
||||
info = item.infoLabels
|
||||
# logger.debug('item antes de formatear: %s'%item)
|
||||
|
||||
if hasattr(item, 'text_color'):
|
||||
item.text_color = ''
|
||||
|
||||
if valid and item.unify != False:
|
||||
|
||||
# Formamos el titulo para serie, se debe definir contentSerieName
|
||||
# o show en el item para que esto funcione.
|
||||
@@ -354,25 +373,26 @@ def title_format(item):
|
||||
# Si se tiene la informacion en infolabels se utiliza
|
||||
if item.contentType == 'episode' and info['episode'] != '':
|
||||
if info['title'] == '':
|
||||
info['title'] = '%s - Episodio %s'% (info['tvshowtitle'], info['episode'])
|
||||
info['title'] = '%s - Episodio %s' % (info['tvshowtitle'], info['episode'])
|
||||
elif 'Episode' in info['title']:
|
||||
episode = info['title'].lower().replace('episode', 'episodio')
|
||||
info['title'] = '%s - %s' % (info['tvshowtitle'], episode.capitalize())
|
||||
elif info['episodio_titulo']!='':
|
||||
#logger.debug('info[episode_titulo]: %s' % info['episodio_titulo'])
|
||||
elif info['episodio_titulo'] != '':
|
||||
# logger.debug('info[episode_titulo]: %s' % info['episodio_titulo'])
|
||||
if 'episode' in info['episodio_titulo'].lower():
|
||||
episode = info['episodio_titulo'].lower().replace('episode', 'episodio')
|
||||
item.title = '%sx%s - %s' % (info['season'],info['episode'], episode.capitalize())
|
||||
item.title = '%sx%s - %s' % (info['season'], info['episode'], episode.capitalize())
|
||||
else:
|
||||
item.title = '%sx%s - %s' % (info['season'], info['episode'], info['episodio_titulo'].capitalize())
|
||||
item.title = '%sx%s - %s' % (
|
||||
info['season'], info['episode'], info['episodio_titulo'].capitalize())
|
||||
else:
|
||||
item.title = '%sx%s - %s' % (info['season'],info['episode'], info['title'])
|
||||
item.title = '%sx%s - %s' % (info['season'], info['episode'], info['title'])
|
||||
item.title = set_color(item.title, 'tvshow')
|
||||
|
||||
else:
|
||||
|
||||
# En caso contrario se utiliza el titulo proporcionado por el canal
|
||||
#logger.debug ('color_scheme[tvshow]: %s' % color_scheme['tvshow'])
|
||||
# logger.debug ('color_scheme[tvshow]: %s' % color_scheme['tvshow'])
|
||||
item.title = '%s' % set_color(item.title, 'tvshow')
|
||||
|
||||
elif item.contentTitle:
|
||||
@@ -386,27 +406,27 @@ def title_format(item):
|
||||
item.title = '%s [V.Extend.]' % set_color(item.contentTitle, 'movie')
|
||||
else:
|
||||
item.title = '%s' % set_color(item.contentTitle, 'movie')
|
||||
if item.contentType=='movie':
|
||||
if item.contentType == 'movie':
|
||||
if item.context:
|
||||
if isinstance(item.context, list):
|
||||
item.context.append('Buscar esta pelicula en otros canales')
|
||||
|
||||
if 'Novedades' in item.category and item.from_channel=='news':
|
||||
#logger.debug('novedades')
|
||||
item.title = '%s [%s]'%(item.title, item.channel)
|
||||
if ('Novedades' in item.category and item.from_channel == 'news'):
|
||||
# logger.debug('novedades')
|
||||
item.title = '%s [%s]' % (item.title, item.channel)
|
||||
|
||||
# Verificamos si item.language es una lista, si lo es se toma
|
||||
# cada valor y se normaliza formado una nueva lista
|
||||
|
||||
if hasattr(item,'language') and item.language !='':
|
||||
#logger.debug('tiene language: %s'%item.language)
|
||||
if hasattr(item, 'language') and item.language != '':
|
||||
# logger.debug('tiene language: %s'%item.language)
|
||||
if isinstance(item.language, list):
|
||||
language_list =[]
|
||||
language_list = []
|
||||
for language in item.language:
|
||||
if language != '':
|
||||
lang = True
|
||||
language_list.append(set_lang(remove_format(language)).upper())
|
||||
#logger.debug('language_list: %s' % language_list)
|
||||
# logger.debug('language_list: %s' % language_list)
|
||||
simple_language = language_list
|
||||
else:
|
||||
# Si item.language es un string se normaliza
|
||||
@@ -416,19 +436,19 @@ def title_format(item):
|
||||
else:
|
||||
simple_language = ''
|
||||
|
||||
#item.language = simple_language
|
||||
# item.language = simple_language
|
||||
|
||||
# Damos formato al año si existiera y lo agregamos
|
||||
# al titulo excepto que sea un episodio
|
||||
if info and info.get("year", "") not in [""," "] and item.contentType != 'episode' and not info['season']:
|
||||
if info and info.get("year", "") not in ["", " "] and item.contentType != 'episode' and not info['season']:
|
||||
try:
|
||||
year = '%s' % set_color(info['year'], 'year')
|
||||
item.title = item.title = '%s %s' % (item.title, year)
|
||||
except:
|
||||
logger.debug('infoLabels: %s'%info)
|
||||
logger.debug('infoLabels: %s' % info)
|
||||
|
||||
# Damos formato al puntaje si existiera y lo agregamos al titulo
|
||||
if info and info['rating'] and info['rating']!='0.0' and not info['season']:
|
||||
if info and info['rating'] and info['rating'] != '0.0' and not info['season']:
|
||||
|
||||
# Se normaliza el puntaje del rating
|
||||
|
||||
@@ -454,13 +474,29 @@ def title_format(item):
|
||||
# Damos formato a la calidad si existiera y lo agregamos al titulo
|
||||
if item.quality and isinstance(item.quality, str):
|
||||
quality = item.quality.strip()
|
||||
item.title = '%s %s' % (item.title, set_color(quality, 'quality'))
|
||||
else:
|
||||
quality = ''
|
||||
|
||||
# Damos formato al idioma si existiera y lo agregamos al titulo
|
||||
if lang:
|
||||
item.title = add_languages(item.title, simple_language)
|
||||
# Damos formato al idioma-calidad si existieran y los agregamos al plot
|
||||
quality_ = set_color(quality, 'quality')
|
||||
|
||||
if (lang or quality) and item.action == "play":
|
||||
if hasattr(item, "clean_plot"):
|
||||
item.contentPlot = item.clear_plot
|
||||
|
||||
if lang: item.title = add_languages(item.title, simple_language)
|
||||
if quality: item.title = '%s %s' % (item.title, quality_)
|
||||
|
||||
elif (lang or quality) and item.action != "play":
|
||||
|
||||
if item.contentPlot:
|
||||
item.clean_plot = item.contentPlot
|
||||
plot_ = add_info_plot(item.contentPlot, simple_language, quality_)
|
||||
item.contentPlot = plot_
|
||||
else:
|
||||
item.clean_plot = None
|
||||
plot_ = add_info_plot('', simple_language, quality_)
|
||||
item.contentPlot = plot_
|
||||
|
||||
# Para las busquedas por canal
|
||||
if item.from_channel != '':
|
||||
@@ -469,17 +505,16 @@ def title_format(item):
|
||||
logger.debug(channel_parameters)
|
||||
item.title = '%s [%s]' % (item.title, channel_parameters['title'])
|
||||
|
||||
|
||||
# Formato para actualizaciones de series en la videoteca sobreescribe los colores anteriores
|
||||
|
||||
if item.channel=='videolibrary' and item.context!='':
|
||||
if item.action=='get_seasons':
|
||||
if item.channel == 'videolibrary' and item.context != '':
|
||||
if item.action == 'get_seasons':
|
||||
if 'Desactivar' in item.context[1]['title']:
|
||||
item.title= '%s' % (set_color(item.title, 'update'))
|
||||
item.title = '%s' % (set_color(item.title, 'update'))
|
||||
if 'Activar' in item.context[1]['title']:
|
||||
item.title= '%s' % (set_color(item.title, 'no_update'))
|
||||
item.title = '%s' % (set_color(item.title, 'no_update'))
|
||||
|
||||
#logger.debug('Despues del formato: %s' % item)
|
||||
# logger.debug('Despues del formato: %s' % item)
|
||||
# Damos formato al servidor si existiera
|
||||
if item.server:
|
||||
server = '%s' % set_color(item.server.strip().capitalize(), 'server')
|
||||
@@ -487,18 +522,28 @@ def title_format(item):
|
||||
# Compureba si estamos en findvideos, y si hay server, si es asi no se muestra el
|
||||
# titulo sino el server, en caso contrario se muestra el titulo normalmente.
|
||||
|
||||
#logger.debug('item.title antes de server: %s'%item.title)
|
||||
# logger.debug('item.title antes de server: %s'%item.title)
|
||||
if item.action != 'play' and item.server:
|
||||
item.title ='%s %s'%(item.title, server.strip())
|
||||
item.title = '%s %s' % (item.title, server.strip())
|
||||
|
||||
elif item.action == 'play' and item.server:
|
||||
if hasattr(item, "clean_plot"):
|
||||
item.contentPlot = item.clean_plot
|
||||
|
||||
if item.quality == 'default':
|
||||
quality = ''
|
||||
#logger.debug('language_color: %s'%language_color)
|
||||
item.title = '%s %s' % (server, set_color(quality,'quality'))
|
||||
# logger.debug('language_color: %s'%language_color)
|
||||
item.title = '%s %s' % (server, set_color(quality, 'quality'))
|
||||
if lang:
|
||||
item.title = add_languages(item.title, simple_language)
|
||||
#logger.debug('item.title: %s' % item.title)
|
||||
# logger.debug('item.title: %s' % item.title)
|
||||
# Torrent_info
|
||||
if item.server == 'torrent' and item.torrent_info != '':
|
||||
item.title = '%s [%s]' % (item.title, item.torrent_info)
|
||||
|
||||
if item.channel == 'videolibrary':
|
||||
item.title += ' [%s]' % item.contentChannel
|
||||
|
||||
# si hay verificacion de enlaces
|
||||
if item.alive != '':
|
||||
if item.alive.lower() == 'no':
|
||||
@@ -507,14 +552,15 @@ def title_format(item):
|
||||
item.title = '[[COLOR yellow][B]?[/B][/COLOR]] %s' % item.title
|
||||
else:
|
||||
item.title = '%s' % item.title
|
||||
#logger.debug('item.title despues de server: %s' % item.title)
|
||||
|
||||
# logger.debug('item.title despues de server: %s' % item.title)
|
||||
elif 'library' in item.action:
|
||||
item.title = '%s' % set_color(item.title, 'library')
|
||||
elif item.action == '' and item.title !='':
|
||||
item.title='**- %s -**'%item.title
|
||||
else:
|
||||
elif item.action == '' and item.title != '':
|
||||
item.title = '**- %s -**' % item.title
|
||||
elif item.unify:
|
||||
item.title = '%s' % set_color(item.title, 'otro')
|
||||
#logger.debug('antes de salir %s' % item.title)
|
||||
# logger.debug('antes de salir %s' % item.title)
|
||||
if visto:
|
||||
try:
|
||||
check = u'\u221a'
|
||||
@@ -528,8 +574,9 @@ def title_format(item):
|
||||
|
||||
return item
|
||||
|
||||
|
||||
def thumbnail_type(item):
|
||||
#logger.info()
|
||||
# logger.info()
|
||||
# Se comprueba que tipo de thumbnail se utilizara en findvideos,
|
||||
# Poster o Logo del servidor
|
||||
|
||||
@@ -539,7 +586,7 @@ def thumbnail_type(item):
|
||||
item.contentThumbnail = item.thumbnail
|
||||
|
||||
if info:
|
||||
if info['thumbnail'] !='':
|
||||
if info['thumbnail'] != '':
|
||||
item.contentThumbnail = info['thumbnail']
|
||||
|
||||
if item.action == 'play':
|
||||
@@ -548,7 +595,7 @@ def thumbnail_type(item):
|
||||
item.thumbnail = info['thumbnail']
|
||||
elif thumb_type == 1:
|
||||
from core.servertools import get_server_parameters
|
||||
#logger.debug('item.server: %s'%item.server)
|
||||
# logger.debug('item.server: %s'%item.server)
|
||||
server_parameters = get_server_parameters(item.server.lower())
|
||||
item.thumbnail = server_parameters.get("thumbnail", item.contentThumbnail)
|
||||
|
||||
@@ -574,7 +621,7 @@ def check_rating(rating):
|
||||
try:
|
||||
# convertimos los deciamles p.e. 7.1
|
||||
return "%.1f" % round(_rating, 1)
|
||||
except Exception, ex_dl:
|
||||
except Exception as ex_dl:
|
||||
template = "An exception of type %s occured. Arguments:\n%r"
|
||||
message = template % (type(ex_dl).__name__, ex_dl.args)
|
||||
logger.error(message)
|
||||
@@ -601,18 +648,18 @@ def check_rating(rating):
|
||||
def convert_float(_rating):
|
||||
try:
|
||||
return float(_rating)
|
||||
except ValueError, ex_ve:
|
||||
except ValueError as ex_ve:
|
||||
template = "An exception of type %s occured. Arguments:\n%r"
|
||||
message = template % (type(ex_ve).__name__, ex_ve.args)
|
||||
logger.error(message)
|
||||
return None
|
||||
|
||||
if type(rating) != float:
|
||||
if not isinstance(rating, float):
|
||||
# logger.debug("no soy float")
|
||||
if type(rating) == int:
|
||||
if isinstance(rating, int):
|
||||
# logger.debug("soy int")
|
||||
rating = convert_float(rating)
|
||||
elif type(rating) == str:
|
||||
elif isinstance(rating, str):
|
||||
# logger.debug("soy str")
|
||||
|
||||
rating = rating.replace("<", "")
|
||||
@@ -634,4 +681,4 @@ def check_rating(rating):
|
||||
rating = check_decimal_length(rating)
|
||||
rating = check_range(rating)
|
||||
|
||||
return rating
|
||||
return rating
|
||||
@@ -3,6 +3,14 @@
|
||||
# XBMC Config Menu
|
||||
# ------------------------------------------------------------
|
||||
|
||||
from __future__ import division
|
||||
#from builtins import str
|
||||
import sys
|
||||
PY3 = False
|
||||
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
|
||||
from builtins import range
|
||||
from past.utils import old_div
|
||||
|
||||
import inspect
|
||||
import os
|
||||
|
||||
@@ -161,7 +169,7 @@ class SettingsWindow(xbmcgui.WindowXMLDialog):
|
||||
self.callback = callback
|
||||
self.item = item
|
||||
|
||||
if type(custom_button) == dict:
|
||||
if isinstance(custom_button, dict):
|
||||
self.custom_button = {}
|
||||
self.custom_button["label"] = custom_button.get("label", "")
|
||||
self.custom_button["function"] = custom_button.get("function", "")
|
||||
@@ -245,8 +253,10 @@ class SettingsWindow(xbmcgui.WindowXMLDialog):
|
||||
def evaluate(self, index, cond):
|
||||
import re
|
||||
|
||||
ok = False
|
||||
|
||||
# Si la condicion es True o False, no hay mas que evaluar, ese es el valor
|
||||
if type(cond) == bool:
|
||||
if isinstance(cond, bool):
|
||||
return cond
|
||||
|
||||
# Obtenemos las condiciones
|
||||
@@ -294,9 +304,9 @@ class SettingsWindow(xbmcgui.WindowXMLDialog):
|
||||
pass
|
||||
|
||||
# valor bool
|
||||
if value.lower() == "true":
|
||||
if not isinstance(value, int) and value.lower() == "true":
|
||||
value = True
|
||||
elif value.lower() == "false":
|
||||
elif not isinstance(value, int) and value.lower() == "false":
|
||||
value = False
|
||||
|
||||
# operacion "eq" "igual a"
|
||||
@@ -515,7 +525,7 @@ class SettingsWindow(xbmcgui.WindowXMLDialog):
|
||||
continue
|
||||
if c["type"] == "list" and "lvalues" not in c:
|
||||
continue
|
||||
if c["type"] == "list" and not type(c["lvalues"]) == list:
|
||||
if c["type"] == "list" and not isinstance(c["lvalues"], list):
|
||||
continue
|
||||
if c["type"] == "list" and not len(c["lvalues"]) > 0:
|
||||
continue
|
||||
@@ -590,7 +600,7 @@ class SettingsWindow(xbmcgui.WindowXMLDialog):
|
||||
self.check_ok(self.values)
|
||||
|
||||
def dispose_controls(self, index, focus=False, force=False):
|
||||
show_controls = self.controls_height / self.height_control - 1
|
||||
show_controls = old_div(self.controls_height, self.height_control) - 1
|
||||
|
||||
visible_count = 0
|
||||
|
||||
@@ -609,7 +619,7 @@ class SettingsWindow(xbmcgui.WindowXMLDialog):
|
||||
if index < 0: index = 0
|
||||
new_index = index
|
||||
|
||||
if self.index <> new_index or force:
|
||||
if self.index != new_index or force:
|
||||
for x, c in enumerate(self.visible_controls):
|
||||
if x < new_index or visible_count > show_controls or not c["show"]:
|
||||
self.set_visible(c, False)
|
||||
@@ -693,7 +703,7 @@ class SettingsWindow(xbmcgui.WindowXMLDialog):
|
||||
else:
|
||||
self.return_value = getattr(cb_channel, self.custom_button['function'])(self.item, self.values)
|
||||
if not self.custom_button["close"]:
|
||||
if isinstance(self.return_value, dict) and self.return_value.has_key("label"):
|
||||
if isinstance(self.return_value, dict) and "label" in self.return_value:
|
||||
self.getControl(10006).setLabel(self.return_value['label'])
|
||||
|
||||
for c in self.list_controls:
|
||||
@@ -936,11 +946,11 @@ class SettingsWindow(xbmcgui.WindowXMLDialog):
|
||||
elif action == 504:
|
||||
|
||||
if self.xx > raw_action.getAmount2():
|
||||
if (self.xx - int(raw_action.getAmount2())) / self.height_control:
|
||||
if old_div((self.xx - int(raw_action.getAmount2())), self.height_control):
|
||||
self.xx -= self.height_control
|
||||
self.dispose_controls(self.index + 1)
|
||||
else:
|
||||
if (int(raw_action.getAmount2()) - self.xx) / self.height_control:
|
||||
if old_div((int(raw_action.getAmount2()) - self.xx), self.height_control):
|
||||
self.xx += self.height_control
|
||||
self.dispose_controls(self.index - 1)
|
||||
return
|
||||
@@ -981,7 +991,7 @@ class ControlEdit(xbmcgui.ControlButton):
|
||||
|
||||
def setWidth(self, w):
|
||||
xbmcgui.ControlButton.setWidth(self, w)
|
||||
self.textControl.setWidth(w / 2)
|
||||
self.textControl.setWidth(old_div(w, 2))
|
||||
|
||||
def setHeight(self, w):
|
||||
xbmcgui.ControlButton.setHeight(self, w)
|
||||
@@ -992,7 +1002,7 @@ class ControlEdit(xbmcgui.ControlButton):
|
||||
if xbmcgui.__version__ == "1.2":
|
||||
self.textControl.setPosition(x + self.getWidth(), y)
|
||||
else:
|
||||
self.textControl.setPosition(x + self.getWidth() / 2, y)
|
||||
self.textControl.setPosition(x + old_div(self.getWidth(), 2), y)
|
||||
|
||||
def setText(self, text):
|
||||
self.text = text
|
||||
|
||||
@@ -91,8 +91,7 @@ class InfoWindow(xbmcgui.WindowXMLDialog):
|
||||
En caso de peliculas:
|
||||
Coge el titulo de los siguientes campos (en este orden)
|
||||
1. contentTitle (este tiene prioridad 1)
|
||||
2. fulltitle (este tiene prioridad 2)
|
||||
3. title (este tiene prioridad 3)
|
||||
2. title (este tiene prioridad 2)
|
||||
El primero que contenga "algo" lo interpreta como el titulo (es importante asegurarse que el titulo este en
|
||||
su sitio)
|
||||
|
||||
|
||||
@@ -3,17 +3,24 @@
|
||||
# XBMC Library Tools
|
||||
# ------------------------------------------------------------
|
||||
|
||||
from future import standard_library
|
||||
standard_library.install_aliases()
|
||||
#from builtins import str
|
||||
import sys
|
||||
PY3 = False
|
||||
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
|
||||
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
import re
|
||||
|
||||
import urllib2
|
||||
import xbmc
|
||||
|
||||
from core import filetools
|
||||
from core import jsontools
|
||||
from platformcode import config, logger
|
||||
from platformcode import platformtools
|
||||
from core import scrapertools
|
||||
|
||||
|
||||
def mark_auto_as_watched(item):
|
||||
@@ -83,7 +90,6 @@ def sync_trakt_addon(path_folder):
|
||||
"special://home/addons/script.trakt/"]
|
||||
|
||||
for path in paths:
|
||||
import sys
|
||||
sys.path.append(xbmc.translatePath(path))
|
||||
|
||||
# se obtiene las series vistas
|
||||
@@ -94,10 +100,9 @@ def sync_trakt_addon(path_folder):
|
||||
return
|
||||
|
||||
shows = traktapi.getShowsWatched({})
|
||||
shows = shows.items()
|
||||
shows = list(shows.items())
|
||||
|
||||
# obtenemos el id de la serie para comparar
|
||||
import re
|
||||
_id = re.findall("\[(.*?)\]", path_folder, flags=re.DOTALL)[0]
|
||||
logger.debug("el id es %s" % _id)
|
||||
|
||||
@@ -329,9 +334,7 @@ def mark_season_as_watched_on_kodi(item, value=1):
|
||||
def mark_content_as_watched_on_alfa(path):
|
||||
from specials import videolibrary
|
||||
from core import videolibrarytools
|
||||
from core import scrapertools
|
||||
from core import filetools
|
||||
import re
|
||||
|
||||
"""
|
||||
marca toda la serie o película como vista o no vista en la Videoteca de Alfa basado en su estado en la Videoteca de Kodi
|
||||
@type str: path
|
||||
@@ -361,6 +364,9 @@ def mark_content_as_watched_on_alfa(path):
|
||||
if "\\" in path:
|
||||
path = path.replace("/", "\\")
|
||||
head_nfo, item = videolibrarytools.read_nfo(path) #Leo el .nfo del contenido
|
||||
if not item:
|
||||
logger.error('.NFO no encontrado: ' + path)
|
||||
return
|
||||
|
||||
if FOLDER_TVSHOWS in path: #Compruebo si es CINE o SERIE
|
||||
contentType = "episode_view" #Marco la tabla de BBDD de Kodi Video
|
||||
@@ -379,7 +385,7 @@ def mark_content_as_watched_on_alfa(path):
|
||||
nfo_name = scrapertools.find_single_match(path2, '\]\/(.*?)$') #Construyo el nombre del .nfo
|
||||
path1 = path1.replace(nfo_name, '') #para la SQL solo necesito la carpeta
|
||||
path2 = path2.replace(nfo_name, '') #para la SQL solo necesito la carpeta
|
||||
path2 = filetools.remove_smb_credential(path2) #Si el archivo está en un servidor SMB, quiamos las credenciales
|
||||
path2 = filetools.remove_smb_credential(path2) #Si el archivo está en un servidor SMB, quitamos las credenciales
|
||||
|
||||
#Ejecutmos la sentencia SQL
|
||||
sql = 'select strFileName, playCount from %s where (strPath like "%s" or strPath like "%s")' % (contentType, path1, path2)
|
||||
@@ -399,7 +405,11 @@ def mark_content_as_watched_on_alfa(path):
|
||||
playCount_final = 0
|
||||
elif playCount >= 1:
|
||||
playCount_final = 1
|
||||
title_plain = title_plain.decode("utf-8").encode("utf-8") #Hacemos esto porque si no genera esto: u'title_plain'
|
||||
|
||||
elif not PY3 and isinstance(title_plain, (str, unicode)):
|
||||
title_plain = title_plain.decode("utf-8").encode("utf-8") #Hacemos esto porque si no genera esto: u'title_plain'
|
||||
elif PY3 and isinstance(var, bytes):
|
||||
title_plain = title_plain.decode('utf-8')
|
||||
item.library_playcounts.update({title_plain: playCount_final}) #actualizamos el playCount del .nfo
|
||||
|
||||
if item.infoLabels['mediatype'] == "tvshow": #Actualizamos los playCounts de temporadas y Serie
|
||||
@@ -420,6 +430,7 @@ def get_data(payload):
|
||||
@param payload: data
|
||||
:return:
|
||||
"""
|
||||
import urllib.request, urllib.error
|
||||
logger.info("payload: %s" % payload)
|
||||
# Required header for XBMC JSON-RPC calls, otherwise you'll get a 415 HTTP response code - Unsupported media type
|
||||
headers = {'content-type': 'application/json'}
|
||||
@@ -433,14 +444,14 @@ def get_data(payload):
|
||||
|
||||
xbmc_json_rpc_url = "http://" + config.get_setting("xbmc_host", "videolibrary") + ":" + str(
|
||||
xbmc_port) + "/jsonrpc"
|
||||
req = urllib2.Request(xbmc_json_rpc_url, data=jsontools.dump(payload), headers=headers)
|
||||
f = urllib2.urlopen(req)
|
||||
req = urllib.request.Request(xbmc_json_rpc_url, data=jsontools.dump(payload), headers=headers)
|
||||
f = urllib.request.urlopen(req)
|
||||
response = f.read()
|
||||
f.close()
|
||||
|
||||
logger.info("get_data: response %s" % response)
|
||||
data = jsontools.load(response)
|
||||
except Exception, ex:
|
||||
except Exception as ex:
|
||||
template = "An exception of type %s occured. Arguments:\n%r"
|
||||
message = template % (type(ex).__name__, ex.args)
|
||||
logger.error("error en xbmc_json_rpc_url: %s" % message)
|
||||
@@ -448,7 +459,7 @@ def get_data(payload):
|
||||
else:
|
||||
try:
|
||||
data = jsontools.load(xbmc.executeJSONRPC(jsontools.dump(payload)))
|
||||
except Exception, ex:
|
||||
except Exception as ex:
|
||||
template = "An exception of type %s occured. Arguments:\n%r"
|
||||
message = template % (type(ex).__name__, ex.args)
|
||||
logger.error("error en xbmc.executeJSONRPC: %s" % message)
|
||||
@@ -477,6 +488,7 @@ def update(folder_content=config.get_setting("folder_tvshows"), folder=""):
|
||||
}
|
||||
|
||||
if folder:
|
||||
folder = str(folder)
|
||||
videolibrarypath = config.get_videolibrary_config_path()
|
||||
|
||||
if folder.endswith('/') or folder.endswith('\\'):
|
||||
@@ -489,9 +501,10 @@ def update(folder_content=config.get_setting("folder_tvshows"), folder=""):
|
||||
videolibrarypath = videolibrarypath[:-1]
|
||||
update_path = videolibrarypath + "/" + folder_content + "/" + folder + "/"
|
||||
else:
|
||||
update_path = filetools.join(videolibrarypath, folder_content, folder) + "/"
|
||||
#update_path = filetools.join(videolibrarypath, folder_content, folder) + "/" # Problemas de encode en "folder"
|
||||
update_path = filetools.join(videolibrarypath, folder_content, ' ').rstrip()
|
||||
|
||||
if not update_path.startswith("smb://"):
|
||||
if not scrapertools.find_single_match(update_path, '(^\w+:\/\/)'):
|
||||
payload["params"] = {"directory": update_path}
|
||||
|
||||
while xbmc.getCondVisibility('Library.IsScanningVideo()'):
|
||||
@@ -663,7 +676,7 @@ def set_content(content_type, silent=False):
|
||||
if sql_videolibrarypath.startswith("special://"):
|
||||
sql_videolibrarypath = sql_videolibrarypath.replace('/profile/', '/%/').replace('/home/userdata/', '/%/')
|
||||
sep = '/'
|
||||
elif sql_videolibrarypath.startswith("smb://"):
|
||||
elif scrapertools.find_single_match(sql_videolibrarypath, '(^\w+:\/\/)'):
|
||||
sep = '/'
|
||||
else:
|
||||
sep = os.sep
|
||||
@@ -881,7 +894,7 @@ def add_sources(path):
|
||||
# Nodo <name>
|
||||
nodo_name = xmldoc.createElement("name")
|
||||
sep = os.sep
|
||||
if path.startswith("special://") or path.startswith("smb://"):
|
||||
if path.startswith("special://") or scrapertools.find_single_match(path, '(^\w+:\/\/)'):
|
||||
sep = "/"
|
||||
name = path
|
||||
if path.endswith(sep):
|
||||
|
||||
Reference in New Issue
Block a user