removed unused modules and methods

This commit is contained in:
alfa_addon_10
2017-10-27 19:10:12 +02:00
parent f5a7f6383c
commit a56513b5df
8 changed files with 75 additions and 863 deletions

View File

@@ -3,8 +3,6 @@
# Configuracion
# ------------------------------------------------------------
import os
from channelselector import get_thumb
from core import filetools
from core import servertools
@@ -15,7 +13,6 @@ from platformcode import platformtools
CHANNELNAME = "setting"
# todo revisar elementos de update
def mainlist(item):
logger.info()
@@ -23,19 +20,6 @@ def mainlist(item):
itemlist.append(Item(channel=CHANNELNAME, title="Preferencias", action="settings", folder=False,
thumbnail=get_thumb("setting_0.png")))
# if config.get_setting("plugin_updates_available") == 0:
# nuevas = ""
# elif config.get_setting("plugin_updates_available") == 1:
# nuevas = " (1 nueva)"
# else:
# nuevas = " (%s nuevas)" % config.get_setting("plugin_updates_available")
#
# thumb_configuracion = "setting_%s.png" % config.get_setting("plugin_updates_available")
#
# itemlist.append(Item(channel=CHANNELNAME, title="Descargar e instalar otras versiones" + nuevas,
# action="get_all_versions", folder=True,
# thumbnail=get_thumb(thumb_configuracion)))
itemlist.append(Item(channel=CHANNELNAME, title="", action="", folder=False, thumbnail=get_thumb("setting_0.png")))
itemlist.append(Item(channel=CHANNELNAME, title="Ajustes especiales", action="", folder=False,
@@ -59,8 +43,6 @@ def mainlist(item):
itemlist.append(Item(channel=CHANNELNAME, title=" Ajustes de cliente Torrent", action="setting_torrent",
folder=True, thumbnail=get_thumb("channels_torrent.png")))
# itemlist.append(Item(channel=CHANNELNAME, title=" Añadir o Actualizar canal/conector desde una URL",
# action="menu_addchannels"))
itemlist.append(Item(channel=CHANNELNAME, action="", title="", folder=False, thumbnail=get_thumb("setting_0.png")))
itemlist.append(Item(channel=CHANNELNAME, title="Otras herramientas", action="submenu_tools", folder=True,
thumbnail=get_thumb("setting_0.png")))
@@ -263,7 +245,7 @@ def servers_favorites(item):
for x in range(1, 6):
control = {'id': x,
'type': "list",
'label': " Servidor #%s" % (x),
'label': " Servidor #%s" % x,
'lvalues': server_names,
'default': 0,
'enabled': "eq(-%s,True)" % x,
@@ -301,261 +283,13 @@ def cb_servers_favorites(server_names, dict_values):
progreso.close()
def get_all_versions(item):
logger.info()
itemlist = []
# Lee la versión local
from core import versiontools
# Descarga la lista de versiones
from core import api
api_response = api.plugins_get_all_packages()
if api_response["error"]:
platformtools.dialog_ok("Error", "Se ha producido un error al descargar la lista de versiones")
return
for entry in api_response["body"]:
if entry["package"] == "plugin":
title = "alfa " + entry["tag"] + " (Publicada " + entry["date"] + ")"
local_version_number = versiontools.get_current_plugin_version()
elif entry["package"] == "channels":
title = "Canales (Publicada " + entry["date"] + ")"
local_version_number = versiontools.get_current_channels_version()
elif entry["package"] == "servers":
title = "Servidores (Publicada " + entry["date"] + ")"
local_version_number = versiontools.get_current_servers_version()
else:
title = entry["package"] + " (Publicada " + entry["date"] + ")"
local_version_number = None
title_color = ""
if local_version_number is None:
title = title
elif entry["version"] == local_version_number:
title += " ACTUAL"
elif entry["version"] > local_version_number:
title_color = "yellow"
else:
title_color = "0xFF666666"
itemlist.append(Item(channel=CHANNELNAME, title=title, url=entry["url"], filename=entry["filename"],
package=entry["package"], version=str(entry["version"]), text_color=title_color,
action="download_and_install_package", folder=False))
return itemlist
def download_and_install_package(item):
logger.info()
from core import updater
from core import versiontools
if item.package == "plugin":
if int(item.version) < versiontools.get_current_plugin_version():
if not platformtools.dialog_yesno("Instalando versión anterior",
"¿Seguro que quieres instalar una versión anterior?"):
return
elif int(item.version) == versiontools.get_current_plugin_version():
if not platformtools.dialog_yesno("Reinstalando versión actual",
"¿Seguro que quieres reinstalar la misma versión que ya tienes?"):
return
elif int(item.version) > versiontools.get_current_plugin_version():
if not platformtools.dialog_yesno("Instalando nueva versión",
"¿Seguro que quieres instalar esta nueva versión?"):
return
else:
if not platformtools.dialog_yesno("Instalando paquete", "¿Seguro que quieres instalar este paquete?"):
return
local_file_name = os.path.join(config.get_data_path(), item.filename)
updater.download_and_install(item.url, local_file_name)
if config.is_xbmc():
import xbmc
xbmc.executebuiltin("Container.Refresh")
def settings(item):
config.open_settings()
def menu_addchannels(item):
logger.info()
itemlist = list()
itemlist.append(Item(channel=CHANNELNAME, title="# Copia de seguridad automática en caso de sobrescritura",
action="", text_color="green"))
itemlist.append(Item(channel=CHANNELNAME, title="Añadir o actualizar canal", action="addchannel", folder=False))
itemlist.append(Item(channel=CHANNELNAME, title="Añadir o actualizar conector", action="addchannel", folder=False))
itemlist.append(Item(channel=CHANNELNAME, title="Mostrar ruta de carpeta para copias de seguridad",
action="backups", folder=False))
itemlist.append(Item(channel=CHANNELNAME, title="Eliminar copias de seguridad guardadas", action="backups",
folder=False))
return itemlist
def addchannel(item):
import os
import time
logger.info()
tecleado = platformtools.dialog_input("", "Introduzca la URL")
if not tecleado:
return
logger.info("url=%s" % tecleado)
local_folder = config.get_runtime_path()
if "canal" in item.title:
local_folder = filetools.join(local_folder, 'channels')
folder_to_extract = "channels"
info_accion = "canal"
else:
local_folder = filetools.join(local_folder, 'servers')
folder_to_extract = "servers"
info_accion = "conector"
# Detecta si es un enlace a un .py o .xml (pensado sobre todo para enlaces de github)
try:
extension = tecleado.rsplit(".", 1)[1]
except:
extension = ""
files = []
zip = False
if extension == "py" or extension == "xml":
filename = tecleado.rsplit("/", 1)[1]
localfilename = filetools.join(local_folder, filename)
files.append([tecleado, localfilename, filename])
else:
import re
from core import scrapertools
# Comprueba si la url apunta a una carpeta completa (channels o servers) de github
if re.search(r'https://github.com/[^\s]+/' + folder_to_extract, tecleado):
try:
data = scrapertools.downloadpage(tecleado)
matches = scrapertools.find_multiple_matches(data,
'<td class="content">.*?href="([^"]+)".*?title="([^"]+)"')
for url, filename in matches:
url = "https://raw.githubusercontent.com" + url.replace("/blob/", "/")
localfilename = filetools.join(local_folder, filename)
files.append([url, localfilename, filename])
except:
import traceback
logger.error("Detalle del error: %s" % traceback.format_exc())
platformtools.dialog_ok("Error", "La url no es correcta o no está disponible")
return
else:
filename = 'new%s.zip' % info_accion
localfilename = filetools.join(config.get_data_path(), filename)
files.append([tecleado, localfilename, filename])
zip = True
logger.info("localfilename=%s" % localfilename)
logger.info("descarga fichero...")
try:
if len(files) > 1:
lista_opciones = ["No", "", "Sí (Sobrescribir todos)"]
overwrite_all = False
from core import downloadtools
for url, localfilename, filename in files:
result = downloadtools.downloadfile(url, localfilename, continuar=False, resumir=False)
if result == -3:
if len(files) == 1:
dyesno = platformtools.dialog_yesno("El archivo ya existe", "Ya existe el %s %s. "
"¿Desea sobrescribirlo?" %
(info_accion, filename))
else:
if not overwrite_all:
dyesno = platformtools.dialog_select("El archivo %s ya existe, ¿desea sobrescribirlo?"
% filename, lista_opciones)
else:
dyesno = 1
# Diálogo cancelado
if dyesno == -1:
return
# Caso de carpeta github, opción sobrescribir todos
elif dyesno == 2:
overwrite_all = True
elif dyesno:
hora_folder = "Copia seguridad [%s]" % time.strftime("%d-%m_%H-%M", time.localtime())
backup = filetools.join(config.get_data_path(), 'backups', hora_folder, folder_to_extract)
if not filetools.exists(backup):
os.makedirs(backup)
import shutil
shutil.copy2(localfilename, filetools.join(backup, filename))
downloadtools.downloadfile(url, localfilename, continuar=True, resumir=False)
else:
if len(files) == 1:
return
else:
continue
except:
import traceback
logger.error("Detalle del error: %s" % traceback.format_exc())
return
if zip:
try:
# Lo descomprime
logger.info("descomprime fichero...")
from core import ziptools
unzipper = ziptools.ziptools()
logger.info("destpathname=%s" % local_folder)
unzipper.extract(localfilename, local_folder, folder_to_extract, True, True)
except:
import traceback
logger.error("Detalle del error: %s" % traceback.format_exc())
# Borra el zip descargado
filetools.remove(localfilename)
platformtools.dialog_ok("Error", "Se ha producido un error extrayendo el archivo")
return
# Borra el zip descargado
logger.info("borra fichero...")
filetools.remove(localfilename)
logger.info("...fichero borrado")
platformtools.dialog_ok("Éxito", "Actualización/Instalación realizada correctamente")
def backups(item):
logger.info()
ruta = filetools.join(config.get_data_path(), 'backups')
ruta_split = ""
if "ruta" in item.title:
heading = "Ruta de copias de seguridad"
if not filetools.exists(ruta):
folders = "Carpeta no creada"
else:
folders = str(len(filetools.listdir(ruta))) + " copia/s de seguridad guardadas"
if len(ruta) > 55:
ruta_split = ruta[55:]
ruta = ruta[:55]
platformtools.dialog_ok(heading, ruta, ruta_split, folders)
else:
if not filetools.exists(ruta):
platformtools.dialog_ok("La carpeta no existe", "No hay copias de seguridad guardadas")
else:
dyesno = platformtools.dialog_yesno("Las copias de seguridad se eliminarán", "¿Está seguro?")
if dyesno:
import shutil
shutil.rmtree(ruta, ignore_errors=True)
def submenu_tools(item):
logger.info()
itemlist = []
itemlist = list()
itemlist.append(Item(channel=CHANNELNAME, title="Herramientas de canales", action="", folder=False,
thumbnail=get_thumb("channels.png")))
@@ -699,7 +433,7 @@ def conf_tools(item):
# logger.info(channel.channel + " No tiene archivo _data.json")
channeljson_exists = False
if channeljson_exists == True:
if channeljson_exists:
try:
datajson_size = filetools.getsize(file_settings)
except:
@@ -709,7 +443,7 @@ def conf_tools(item):
datajson_size = None
# Si el _data.json esta vacio o no existe...
if (len(dict_settings) and datajson_size) == 0 or channeljson_exists == False:
if (len(dict_settings) and datajson_size) == 0 or not channeljson_exists:
# Obtenemos controles del archivo ../channels/channel.json
needsfix = True
try:
@@ -723,7 +457,7 @@ def conf_tools(item):
# default_settings = {}
# Si _data.json necesita ser reparado o no existe...
if needsfix == True or channeljson_exists == False:
if needsfix or not channeljson_exists:
if default_settings is not None:
# Creamos el channel_data.json
default_settings.update(dict_settings)
@@ -748,8 +482,8 @@ def conf_tools(item):
# Si se ha establecido el estado del canal se añade a la lista
if needsfix is not None:
if needsfix == True:
if channeljson_exists == False:
if needsfix:
if not channeljson_exists:
list_status = " - Ajustes creados"
list_colour = "red"
else:

View File

@@ -1,58 +0,0 @@
# -*- coding: utf-8 -*-
# ------------------------------------------------------------
# Client for api.xxxxxxxxxxxxx
# ------------------------------------------------------------
import urllib
import jsontools
import scrapertools
from platformcode import config, logger
MAIN_URL = ""
API_KEY = "nzgJy84P9w54H2w"
DEFAULT_HEADERS = [["User-Agent", config.PLUGIN_NAME + " " + config.get_platform()]]
# ---------------------------------------------------------------------------------------------------------
# Common function for API calls
# ---------------------------------------------------------------------------------------------------------
# Make a remote call using post, ensuring api key is here
def remote_call(url, parameters={}, require_session=True):
logger.info("url=" + url + ", parameters=" + repr(parameters))
if not url.startswith("http"):
url = MAIN_URL + "/" + url
if not "api_key" in parameters:
parameters["api_key"] = API_KEY
# Add session token if not here
# if not "s" in parameters and require_session:
# parameters["s"] = get_session_token()
headers = DEFAULT_HEADERS
post = urllib.urlencode(parameters)
response_body = scrapertools.downloadpage(url, post, headers)
return jsontools.load(response_body)
# ---------------------------------------------------------------------------------------------------------
# Plugin service calls
# ---------------------------------------------------------------------------------------------------------
def plugins_get_all_packages():
logger.info()
parameters = {"plugin": config.PLUGIN_NAME, "platform": config.get_platform()}
return remote_call("plugins/get_all_packages.php", parameters)
def plugins_get_latest_packages():
logger.info()
parameters = {"plugin": config.PLUGIN_NAME, "platform": config.get_platform()}
return remote_call("plugins/get_latest_packages.php", parameters)

View File

@@ -100,17 +100,7 @@ def get_channel_parameters(channel_name):
if sys.version_info < tuple(map(int, (python_condition.split(".")))):
python_compatible = False
# compatible addon_version
addon_version_compatible = True
if 'addon_version' in channel_parameters["compatible"]:
import versiontools
addon_version_condition = channel_parameters["compatible"]['addon_version']
addon_version = int(addon_version_condition.replace(".", "").ljust(len(str(
versiontools.get_current_plugin_version())), '0'))
if versiontools.get_current_plugin_version() < addon_version:
addon_version_compatible = False
channel_parameters["compatible"] = python_compatible and addon_version_compatible
channel_parameters["compatible"] = python_compatible
else:
channel_parameters["compatible"] = True
@@ -284,41 +274,3 @@ def set_channel_setting(name, value, channel):
return None
return value
def get_channel_module(channel_name, package="channels"):
# Sustituye al que hay en servertools.py ...
# ...pero añade la posibilidad de incluir un paquete diferente de "channels"
if "." not in channel_name:
channel_module = __import__('%s.%s' % (package, channel_name), None, None, ['%s.%s' % (package, channel_name)])
else:
channel_module = __import__(channel_name, None, None, [channel_name])
return channel_module
def get_channel_remote_url(channel_name):
channel_parameters = get_channel_parameters(channel_name)
remote_channel_url = channel_parameters["update_url"] + channel_name + ".py"
remote_version_url = channel_parameters["update_url"] + channel_name + ".json"
logger.info("remote_channel_url=" + remote_channel_url)
logger.info("remote_version_url=" + remote_version_url)
return remote_channel_url, remote_version_url
def get_channel_local_path(channel_name):
if channel_name != "channelselector":
local_channel_path = os.path.join(config.get_runtime_path(), 'channels', channel_name + ".py")
local_version_path = os.path.join(config.get_runtime_path(), 'channels', channel_name + ".json")
local_compiled_path = os.path.join(config.get_runtime_path(), 'channels', channel_name + ".pyo")
else:
local_channel_path = os.path.join(config.get_runtime_path(), channel_name + ".py")
local_version_path = os.path.join(config.get_runtime_path(), channel_name + ".json")
local_compiled_path = os.path.join(config.get_runtime_path(), channel_name + ".pyo")
logger.info("local_channel_path=" + local_channel_path)
logger.info("local_version_path=" + local_version_path)
logger.info("local_compiled_path=" + local_compiled_path)
return local_channel_path, local_version_path, local_compiled_path

View File

@@ -29,19 +29,19 @@ def downloadpage(url, post=None, headers=None, follow_redirects=True, timeout=No
return response.data
def downloadpageWithResult(url, post=None, headers=None, follow_redirects=True, timeout=None, header_to_get=None):
response = httptools.downloadpage(url, post=post, headers=headers, follow_redirects=follow_redirects,
timeout=timeout)
if header_to_get:
return response.headers.get(header_to_get)
else:
return response.data, response.code
# def downloadpageWithResult(url, post=None, headers=None, follow_redirects=True, timeout=None, header_to_get=None):
# response = httptools.downloadpage(url, post=post, headers=headers, follow_redirects=follow_redirects,
# timeout=timeout)
#
# if header_to_get:
# return response.headers.get(header_to_get)
# else:
# return response.data, response.code
def downloadpageWithoutCookies(url):
response = httptools.downloadpage(url, cookies=False)
return response.data
# def downloadpageWithoutCookies(url):
# response = httptools.downloadpage(url, cookies=False)
# return response.data
def downloadpageGzip(url):
@@ -60,9 +60,9 @@ def get_header_from_response(url, header_to_get="", post=None, headers=None):
return response.headers.get(header_to_get)
def get_headers_from_response(url, post=None, headers=None):
response = httptools.downloadpage(url, post=post, headers=headers, only_headers=True)
return response.headers.items()
# def get_headers_from_response(url, post=None, headers=None):
# response = httptools.downloadpage(url, post=post, headers=headers, only_headers=True)
# return response.headers.items()
def read_body_and_headers(url, post=None, headers=None, follow_redirects=False, timeout=None):
@@ -71,10 +71,10 @@ def read_body_and_headers(url, post=None, headers=None, follow_redirects=False,
return response.data, response.headers
def anti_cloudflare(url, host="", headers=None, post=None, location=False):
# anti_cloudfare ya integrado en httptools por defecto
response = httptools.downloadpage(url, post=post, headers=headers)
return response.data
# def anti_cloudflare(url, host="", headers=None, post=None, location=False):
# # anti_cloudfare ya integrado en httptools por defecto
# response = httptools.downloadpage(url, post=post, headers=headers)
# return response.data
def printMatches(matches):
@@ -385,24 +385,24 @@ def remove_show_from_title(title, show):
return title
def getRandom(str):
return get_md5(str)
# def getRandom(str):
# return get_md5(str)
def unseo(cadena):
if cadena.upper().startswith("VER GRATIS LA PELICULA "):
cadena = cadena[23:]
elif cadena.upper().startswith("VER GRATIS PELICULA "):
cadena = cadena[20:]
elif cadena.upper().startswith("VER ONLINE LA PELICULA "):
cadena = cadena[23:]
elif cadena.upper().startswith("VER GRATIS "):
cadena = cadena[11:]
elif cadena.upper().startswith("VER ONLINE "):
cadena = cadena[11:]
elif cadena.upper().startswith("DESCARGA DIRECTA "):
cadena = cadena[17:]
return cadena
# def unseo(cadena):
# if cadena.upper().startswith("VER GRATIS LA PELICULA "):
# cadena = cadena[23:]
# elif cadena.upper().startswith("VER GRATIS PELICULA "):
# cadena = cadena[20:]
# elif cadena.upper().startswith("VER ONLINE LA PELICULA "):
# cadena = cadena[23:]
# elif cadena.upper().startswith("VER GRATIS "):
# cadena = cadena[11:]
# elif cadena.upper().startswith("VER ONLINE "):
# cadena = cadena[11:]
# elif cadena.upper().startswith("DESCARGA DIRECTA "):
# cadena = cadena[17:]
# return cadena
# scrapertools.get_filename_from_url(media_url)[-4:]
@@ -424,19 +424,19 @@ def get_filename_from_url(url):
return filename
def get_domain_from_url(url):
import urlparse
parsed_url = urlparse.urlparse(url)
try:
filename = parsed_url.netloc
except:
# Si falla es porque la implementación de parsed_url no reconoce los atributos como "path"
if len(parsed_url) >= 4:
filename = parsed_url[1]
else:
filename = ""
return filename
# def get_domain_from_url(url):
# import urlparse
# parsed_url = urlparse.urlparse(url)
# try:
# filename = parsed_url.netloc
# except:
# # Si falla es porque la implementación de parsed_url no reconoce los atributos como "path"
# if len(parsed_url) >= 4:
# filename = parsed_url[1]
# else:
# filename = ""
#
# return filename
def get_season_and_episode(title):
@@ -475,25 +475,25 @@ def get_season_and_episode(title):
return filename
def get_sha1(cadena):
try:
import hashlib
devuelve = hashlib.sha1(cadena).hexdigest()
except:
import sha
import binascii
devuelve = binascii.hexlify(sha.new(cadena).digest())
return devuelve
# def get_sha1(cadena):
# try:
# import hashlib
# devuelve = hashlib.sha1(cadena).hexdigest()
# except:
# import sha
# import binascii
# devuelve = binascii.hexlify(sha.new(cadena).digest())
#
# return devuelve
def get_md5(cadena):
try:
import hashlib
devuelve = hashlib.md5(cadena).hexdigest()
except:
import md5
import binascii
devuelve = binascii.hexlify(md5.new(cadena).digest())
return devuelve
# def get_md5(cadena):
# try:
# import hashlib
# devuelve = hashlib.md5(cadena).hexdigest()
# except:
# import md5
# import binascii
# devuelve = binascii.hexlify(md5.new(cadena).digest())
#
# return devuelve

View File

@@ -1251,7 +1251,6 @@ class Tmdb(object):
buscando = "id_Tmdb: " + str(self.result["id"]) + " temporada: " + str(numtemporada) + "\nURL: " + url
logger.info("[Tmdb.py] Buscando " + buscando)
try:
# self.temporada[numtemporada] = jsontools.load(scrapertools.downloadpageWithoutCookies(url))
self.temporada[numtemporada] = self.get_json(url)
except:

View File

@@ -1,170 +0,0 @@
# -*- coding: utf-8 -*-
# --------------------------------------------------------------------------------
# update_servers.py
# --------------------------------------------------------------------------------
import os
import urlparse
from core import scrapertools
from core import servertools
from platformcode import config
remote_url = ""
local_folder = os.path.join(config.get_runtime_path(), "servers")
### Procedures
def update_servers():
update_servers_files(
read_remote_servers_list(
dict(read_local_servers_list())
)
)
def update_servers_files(update_servers_list):
# ----------------------------
from platformcode import platformtools
progress = platformtools.dialog_progress_bg("Update servers list")
# ----------------------------
for index, server in enumerate(update_servers_list):
# ----------------------------
percentage = index * 100 / len(update_servers_list)
# ----------------------------
data = scrapertools.cache_page(remote_url + server[0] + ".py")
f = open(os.path.join(local_folder, server[0] + ".py"), 'w')
f.write(data)
f.close()
# ----------------------------
progress.update(percentage, ' Update server: "' + server[0] + '"', 'MD5: "' + server[1] + '"')
# ----------------------------
# ----------------------------
progress.close()
# ----------------------------
### Functions
## init
def read_remote_servers_list(local_servers):
data = scrapertools.cache_page(remote_url + "servertools.py")
f = open(os.path.join(local_folder, "servertools.py"), 'w')
f.write(data)
f.close()
all_servers = sorted(
servertools.FREE_SERVERS + \
servertools.PREMIUM_SERVERS + \
servertools.FILENIUM_SERVERS + \
servertools.REALDEBRID_SERVERS + \
servertools.ALLDEBRID_SERVERS
)
servers = []
for server_id in all_servers:
if server_id not in servers:
servers.append(server_id)
# ----------------------------
from platformcode import platformtools
progress = platformtools.dialog_progress_bg("Remote servers list")
# ----------------------------
remote_servers = []
update_servers_list = []
for index, server in enumerate(servers):
# ----------------------------
percentage = index * 100 / len(servers)
# ----------------------------
server_file = urlparse.urljoin(remote_url, server + ".py")
data = scrapertools.cache_page(server_file)
if data != "Not Found":
md5_remote_server = md5_remote(data)
remote_servers.append([server, md5_remote_server])
md5_local_server = local_servers.get(server)
if md5_local_server:
if md5_local_server != md5_remote_server:
update_servers_list.append([server, md5_remote_server, md5_local_server, "Update"])
else:
update_servers_list.append([server, md5_remote_server, "New", "Update"])
# ----------------------------
progress.update(percentage, ' Remote server: "' + server + '"', 'MD5: "' + md5_remote_server + '"')
# ----------------------------
# ----------------------------
progress.close()
# ----------------------------
return update_servers_list
def read_local_servers_list():
all_servers = sorted(
servertools.FREE_SERVERS + \
servertools.PREMIUM_SERVERS + \
servertools.FILENIUM_SERVERS + \
servertools.REALDEBRID_SERVERS + \
servertools.ALLDEBRID_SERVERS
)
servers = []
for server_id in all_servers:
if server_id not in servers:
servers.append(server_id)
# ----------------------------
from platformcode import platformtools
progress = platformtools.dialog_progress_bg("Local servers list")
# ----------------------------
local_servers = []
for index, server in enumerate(servers):
# ----------------------------
percentage = index * 100 / len(servers)
# ----------------------------
server_file = os.path.join(config.get_runtime_path(), "servers", server + ".py")
if os.path.exists(server_file):
md5_local_server = md5_local(server_file)
local_servers.append([server, md5_local_server])
# ----------------------------
progress.update(percentage, ' Local server: "' + server + '"', 'MD5: "' + md5_local_server + '"')
# ----------------------------
# ----------------------------
progress.close()
# ----------------------------
return local_servers
def md5_local(file_server):
import hashlib
hash = hashlib.md5()
with open(file_server) as f:
for chunk in iter(lambda: f.read(4096), ""):
hash.update(chunk)
return hash.hexdigest()
def md5_remote(data_server):
import hashlib
hash = hashlib.md5()
hash.update(data_server)
return hash.hexdigest()
### Run
update_servers()
# from threading import Thread
# Thread( target=update_servers ).start()

View File

@@ -1,208 +0,0 @@
# -*- coding: utf-8 -*-
# --------------------------------------------------------------------------------
# Updater process
# --------------------------------------------------------------------------------
import os
import time
import scrapertools
import versiontools
from platformcode import config, logger
# Método antiguo, muestra un popup con la versión
def checkforupdates():
logger.info()
# Valores por defecto
numero_version_publicada = 0
tag_version_publicada = ""
# Lee la versión remota
from core import api
latest_packages = api.plugins_get_latest_packages()
for latest_package in latest_packages["body"]:
if latest_package["package"] == "plugin":
numero_version_publicada = latest_package["version"]
tag_version_publicada = latest_package["tag"]
break
logger.info("version remota=" + str(numero_version_publicada))
# Lee la versión local
numero_version_local = versiontools.get_current_plugin_version()
logger.info("version local=" + str(numero_version_local))
hayqueactualizar = numero_version_publicada > numero_version_local
logger.info("-> hayqueactualizar=" + repr(hayqueactualizar))
# Si hay actualización disponible, devuelve la Nueva versión para que cada plataforma se encargue de mostrar los avisos
if hayqueactualizar:
return tag_version_publicada
else:
return None
# Método nuevo, devuelve el nº de actualizaciones disponibles además de indicar si hay nueva versión del plugin
def get_available_updates():
logger.info()
# Cuantas actualizaciones hay?
number_of_updates = 0
new_published_version_tag = ""
# Lee la versión remota
from core import api
latest_packages = api.plugins_get_latest_packages()
for latest_package in latest_packages["body"]:
if latest_package["package"] == "plugin":
if latest_package["version"] > versiontools.get_current_plugin_version():
number_of_updates = number_of_updates + 1
new_published_version_tag = latest_package["tag"]
elif latest_package["package"] == "channels":
if latest_package["version"] > versiontools.get_current_channels_version():
number_of_updates = number_of_updates + 1
elif latest_package["package"] == "servers":
if latest_package["version"] > versiontools.get_current_servers_version():
number_of_updates = number_of_updates + 1
return new_published_version_tag, number_of_updates
def update(item):
logger.info()
# Valores por defecto
published_version_url = ""
published_version_filename = ""
# Lee la versión remota
from core import api
latest_packages = api.plugins_get_latest_packages()
for latest_package in latest_packages["body"]:
if latest_package["package"] == "plugin":
published_version_url = latest_package["url"]
published_version_filename = latest_package["filename"]
published_version_number = latest_package["version"]
break
# La URL viene del API, y lo descarga en "userdata"
remotefilename = published_version_url
localfilename = os.path.join(config.get_data_path(), published_version_filename)
download_and_install(remotefilename, localfilename)
def download_and_install(remote_file_name, local_file_name):
logger.info("from " + remote_file_name + " to " + local_file_name)
if os.path.exists(local_file_name):
os.remove(local_file_name)
# Descarga el fichero
inicio = time.clock()
from core import downloadtools
downloadtools.downloadfile(remote_file_name, local_file_name, continuar=False)
fin = time.clock()
logger.info("Descargado en %d segundos " % (fin - inicio + 1))
logger.info("descomprime fichero...")
import ziptools
unzipper = ziptools.ziptools()
# Lo descomprime en "addons" (un nivel por encima del plugin)
installation_target = os.path.join(config.get_runtime_path(), "..")
logger.info("installation_target=%s" % installation_target)
unzipper.extract(local_file_name, installation_target)
# Borra el zip descargado
logger.info("borra fichero...")
os.remove(local_file_name)
logger.info("...fichero borrado")
def update_channel(channel_name):
logger.info(channel_name)
import channeltools
remote_channel_url, remote_version_url = channeltools.get_channel_remote_url(channel_name)
local_channel_path, local_version_path, local_compiled_path = channeltools.get_channel_local_path(channel_name)
# Version remota
try:
data = scrapertools.cachePage(remote_version_url)
logger.info("remote_data=" + data)
remote_version = int(scrapertools.find_single_match(data, '<version>([^<]+)</version>'))
addon_condition = int(scrapertools.find_single_match(data, "<addon_version>([^<]*)</addon_version>")
.replace(".", "").ljust(len(str(versiontools.get_current_plugin_version())), '0'))
except:
remote_version = 0
addon_condition = 0
logger.info("remote_version=%d" % remote_version)
# Version local
if os.path.exists(local_version_path):
infile = open(local_version_path)
from core import jsontools
data = jsontools.load(infile.read())
infile.close()
local_version = data.get('version', 0)
else:
local_version = 0
logger.info("local_version=%d" % local_version)
# Comprueba si ha cambiado
updated = (remote_version > local_version) and (versiontools.get_current_plugin_version() >= addon_condition)
if updated:
logger.info("downloading...")
download_channel(channel_name)
return updated
def download_channel(channel_name):
logger.info(channel_name)
import channeltools
remote_channel_url, remote_version_url = channeltools.get_channel_remote_url(channel_name)
local_channel_path, local_version_path, local_compiled_path = channeltools.get_channel_local_path(channel_name)
# Descarga el canal
try:
updated_channel_data = scrapertools.cachePage(remote_channel_url)
outfile = open(local_channel_path, "wb")
outfile.write(updated_channel_data)
outfile.flush()
outfile.close()
logger.info("Grabado a " + local_channel_path)
except:
import traceback
logger.error(traceback.format_exc())
# Descarga la version (puede no estar)
try:
updated_version_data = scrapertools.cachePage(remote_version_url)
outfile = open(local_version_path, "w")
outfile.write(updated_version_data)
outfile.flush()
outfile.close()
logger.info("Grabado a " + local_version_path)
except:
import traceback
logger.error(traceback.format_exc())
if os.path.exists(local_compiled_path):
os.remove(local_compiled_path)
from platformcode import platformtools
platformtools.dialog_notification(channel_name + " actualizado", "Se ha descargado una nueva versión")

View File

@@ -1,37 +0,0 @@
# -*- coding: utf-8 -*-
# --------------------------------------------------------------------------------
# Version Tools
# --------------------------------------------------------------------------------
import os
import scrapertools
from platformcode import config
def get_current_plugin_version():
return 4300
def get_current_plugin_version_tag():
return "1.7.0"
def get_current_plugin_date():
return "22/08/2017"
def get_current_channels_version():
f = open(os.path.join(config.get_runtime_path(), "channels", "version.xml"))
data = f.read()
f.close()
return int(scrapertools.find_single_match(data, "<version>([^<]+)</version>"))
def get_current_servers_version():
f = open(os.path.join(config.get_runtime_path(), "servers", "version.xml"))
data = f.read()
f.close()
return int(scrapertools.find_single_match(data, "<version>([^<]+)</version>"))