diff --git a/mediaserver/HTTPServer.py b/mediaserver/HTTPAndWSServer.py
similarity index 67%
rename from mediaserver/HTTPServer.py
rename to mediaserver/HTTPAndWSServer.py
index 92cf680e..9703ddf1 100644
--- a/mediaserver/HTTPServer.py
+++ b/mediaserver/HTTPAndWSServer.py
@@ -5,17 +5,17 @@ import re
import threading
import time
import traceback
-from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
+from BaseHTTPServer import HTTPServer
+from HTTPWebSocketsHandler import HTTPWebSocketsHandler
from platformcode import config, logger
-
+from core import jsontools as json
class MyHTTPServer(HTTPServer):
daemon_threads = True
def process_request_thread(self, request, client_address):
try:
-
self.finish_request(request, client_address)
self.shutdown_request(request)
except:
@@ -35,12 +35,15 @@ class MyHTTPServer(HTTPServer):
logger.error(traceback.format_exc())
-class Handler(BaseHTTPRequestHandler):
+class Handler(HTTPWebSocketsHandler):
def log_message(self, format, *args):
# sys.stderr.write("%s - - [%s] %s\n" %(self.client_address[0], self.log_date_time_string(), format%args))
pass
- def do_GET(self):
+ def sendMessage(self, message):
+ self.send_message(message)
+
+ def do_GET_HTTP(self):
from platformcode import platformtools
from platformcode import controllers
# Control de accesos
@@ -87,6 +90,40 @@ class Handler(BaseHTTPRequestHandler):
del c
return
+ def on_ws_message(self, message):
+ try:
+ if message:
+ json_message = json.load(message)
+
+ if "request" in json_message:
+ t = threading.Thread(target=run, args=[self.controller, json_message["request"].encode("utf8")], name=self.ID)
+ t.setDaemon(True)
+ t.start()
+
+ elif "data" in json_message:
+ if type(json_message["data"]["result"]) == unicode:
+ json_message["data"]["result"] = json_message["data"]["result"].encode("utf8")
+
+ self.controller.data = json_message["data"]
+
+ except:
+ logger.error(traceback.format_exc())
+ show_error_message(traceback.format_exc())
+
+ def on_ws_connected(self):
+ try:
+ self.ID = "%032x" % (random.getrandbits(128))
+ from platformcode.controllers.html import html
+ self.controller = html(self, self.ID)
+ self.server.fnc_info()
+ except:
+ logger.error(traceback.format_exc())
+
+ def on_ws_closed(self):
+ self.controller.__del__()
+ del self.controller
+ self.server.fnc_info()
+
def address_string(self):
# Disable reverse name lookups
return self.client_address[:2][0]
@@ -95,6 +132,13 @@ class Handler(BaseHTTPRequestHandler):
PORT = config.get_setting("server.port")
server = MyHTTPServer(('', int(PORT)), Handler)
+def run(controller, path):
+ try:
+ controller.run(path)
+ except:
+ logger.error(traceback.format_exc())
+ show_error_message(traceback.format_exc())
+
def start(fnc_info):
server.fnc_info = fnc_info
diff --git a/mediaserver/WebSocket.py b/mediaserver/WebSocket.py
deleted file mode 100644
index 083f692d..00000000
--- a/mediaserver/WebSocket.py
+++ /dev/null
@@ -1,89 +0,0 @@
-# -*- coding: utf-8 -*-
-# ------------------------------------------------------------
-# HTTPServer
-# ------------------------------------------------------------
-import os
-import random
-import traceback
-from threading import Thread
-
-import WebSocketServer
-
-from core import jsontools as json
-from platformcode import config, platformtools, logger
-
-
-class HandleWebSocket(WebSocketServer.WebSocket):
- def handleMessage(self):
- try:
- if self.data:
- json_message = json.load(str(self.data))
-
- if "request" in json_message:
- t = Thread(target=run, args=[self.controller, json_message["request"].encode("utf8")], name=self.ID)
- t.setDaemon(True)
- t.start()
-
- elif "data" in json_message:
- if type(json_message["data"]["result"]) == unicode:
- json_message["data"]["result"] = json_message["data"]["result"].encode("utf8")
-
- self.controller.data = json_message["data"]
-
- except:
- logger.error(traceback.format_exc())
- show_error_message(traceback.format_exc())
-
- def handleConnected(self):
- try:
- self.ID = "%032x" % (random.getrandbits(128))
- from platformcode.controllers.html import html
- self.controller = html(self, self.ID)
- self.server.fnc_info()
- except:
- logger.error(traceback.format_exc())
- self.close()
-
- def handleClose(self):
- self.controller.__del__()
- del self.controller
- self.server.fnc_info()
-
-
-port = config.get_setting("websocket.port")
-server = WebSocketServer.SimpleWebSocketServer("", int(port), HandleWebSocket)
-
-
-def start(fnc_info):
- server.fnc_info = fnc_info
- Thread(target=server.serveforever).start()
-
-
-def stop():
- server.close()
-
-
-def run(controller, path):
- try:
- controller.run(path)
- except:
- logger.error(traceback.format_exc())
- show_error_message(traceback.format_exc())
-
-
-def show_error_message(err_info):
- from core import scrapertools
- patron = 'File "' + os.path.join(config.get_runtime_path(), "channels", "").replace("\\", "\\\\") + '([^.]+)\.py"'
- canal = scrapertools.find_single_match(err_info, patron)
- if canal:
- platformtools.dialog_ok(
- "Se ha producido un error en el canal " + canal,
- "Esto puede ser devido a varias razones: \n \
- - El servidor no está disponible, o no esta respondiendo.\n \
- - Cambios en el diseño de la web.\n \
- - Etc...\n \
- Comprueba el log para ver mas detalles del error.")
- else:
- platformtools.dialog_ok(
- "Se ha producido un error en Alfa",
- "Comprueba el log para ver mas detalles del error.")
diff --git a/mediaserver/alfa.py b/mediaserver/alfa.py
index f90710ee..e53fe499 100644
--- a/mediaserver/alfa.py
+++ b/mediaserver/alfa.py
@@ -14,12 +14,11 @@ from platformcode import config
sys.path.append(os.path.join(config.get_runtime_path(), 'lib'))
from platformcode import platformtools, logger
-import HTTPServer
-import WebSocket
+import HTTPAndWSServer
http_port = config.get_setting("server.port")
-websocket_port = config.get_setting("websocket.port")
myip = config.get_local_ip()
+version = config.get_addon_version()
def thread_name_wrap(func):
@@ -43,9 +42,8 @@ if sys.version_info < (2, 7, 11):
def show_info():
os.system('cls' if os.name == 'nt' else 'clear')
print ("--------------------------------------------------------------------")
- print ("Alfa Iniciado")
+ print ("Alfa %s Iniciado" %version)
print ("La URL para acceder es http://%s:%s" % (myip, http_port))
- print ("WebSocket Server iniciado en ws://%s:%s" % (myip, websocket_port))
print ("--------------------------------------------------------------------")
print ("Runtime Path : " + config.get_runtime_path())
print ("Data Path : " + config.get_data_path())
@@ -67,14 +65,12 @@ def start():
logger.info("server init...")
config.verify_directories_created()
try:
- HTTPServer.start(show_info)
- WebSocket.start(show_info)
+ HTTPAndWSServer.start(show_info)
# Da por levantado el servicio
logger.info("--------------------------------------------------------------------")
- logger.info("Alfa Iniciado")
+ logger.info("Alfa %s Iniciado" %version)
logger.info("La URL para acceder es http://%s:%s" % (myip, http_port))
- logger.info("WebSocket Server iniciado en ws://%s:%s" % (myip, websocket_port))
logger.info("--------------------------------------------------------------------")
logger.info("Runtime Path : " + config.get_runtime_path())
logger.info("Data Path : " + config.get_data_path())
@@ -91,9 +87,7 @@ def start():
except KeyboardInterrupt:
print 'Deteniendo el servidor HTTP...'
- HTTPServer.stop()
- print 'Deteniendo el servidor WebSocket...'
- WebSocket.stop()
+ HTTPAndWSServer.stop()
print 'Alfa Detenido'
flag = False
diff --git a/mediaserver/lib/HTTPWebSocketsHandler.py b/mediaserver/lib/HTTPWebSocketsHandler.py
new file mode 100644
index 00000000..3e89b503
--- /dev/null
+++ b/mediaserver/lib/HTTPWebSocketsHandler.py
@@ -0,0 +1,229 @@
+'''
+The MIT License (MIT)
+
+Copyright (C) 2014, 2015 Seven Watt Últimos episodios
.+?]+>.+?
(.*?)'
'(.*?)', re.DOTALL).findall(data)
itemlist = []
-
for url, thumbnail, str_episode, show in matches:
-
try:
episode = int(str_episode.replace("Episodio ", ""))
except ValueError:
@@ -135,28 +112,21 @@ def novedades_episodios(item):
new_item = Item(channel=item.channel, action="findvideos", title=title, url=url, show=show, thumbnail=thumbnail,
fulltitle=title)
-
itemlist.append(new_item)
-
return itemlist
def novedades_anime(item):
logger.info()
-
data = httptools.downloadpage(item.url).data
data = re.sub(r"\n|\r|\t|\s{2}|-\s", "", data)
data = scrapertools.find_single_match(data, '
(.*?).+?
(.*?)
.+?)?', re.DOTALL).findall(data) itemlist = [] - for url, thumbnail, _type, title, plot in matches: - url = urlparse.urljoin(HOST, url) thumbnail = urlparse.urljoin(HOST, thumbnail) - new_item = Item(channel=item.channel, action="episodios", title=title, url=url, thumbnail=thumbnail, fulltitle=title, plot=plot) if _type != "Película": @@ -165,173 +135,75 @@ def novedades_anime(item): else: new_item.contentType = "movie" new_item.contentTitle = title - itemlist.append(new_item) - return itemlist def listado(item): logger.info() - data = httptools.downloadpage(item.url).data data = re.sub(r"\n|\r|\t|\s{2}|-\s", "", data) url_pagination = scrapertools.find_single_match(data, '(.*?)
', re.DOTALL).findall(data) - itemlist = [] - for url, thumbnail, _type, title, plot in matches: - url = urlparse.urljoin(HOST, url) thumbnail = urlparse.urljoin(HOST, thumbnail) - new_item = Item(channel=item.channel, action="episodios", title=title, url=url, thumbnail=thumbnail, fulltitle=title, plot=plot) - if _type == "Anime": new_item.show = title new_item.context = renumbertools.context(item) else: new_item.contentType = "movie" new_item.contentTitle = title - itemlist.append(new_item) - if url_pagination: url = urlparse.urljoin(HOST, url_pagination) title = ">> Pagina Siguiente" - itemlist.append(Item(channel=item.channel, action="listado", title=title, url=url)) - return itemlist def episodios(item): logger.info() itemlist = [] - data = httptools.downloadpage(item.url).data data = re.sub(r"\n|\r|\t|\s{2}|-\s", "", data) - - # fix para renumbertools - item.show = scrapertools.find_single_match(data, '(.*?)
') - - matches = re.compile('href="([^"]+)">(.*?)
', re.DOTALL).findall(data) - - if matches: - for url, thumb, title in matches: - title = title.strip() - url = urlparse.urljoin(item.url, url) - # thumbnail = item.thumbnail - - try: - episode = int(scrapertools.find_single_match(title, "^.+?\s(\d+)$")) - except ValueError: - season = 1 - episode = 1 - else: - season, episode = renumbertools.numbered_for_tratk(item.channel, item.show, 1, episode) - - title = "%sx%s : %s" % (season, str(episode).zfill(2), item.title) - - itemlist.append(item.clone(action="findvideos", title=title, url=url, thumbnail=thumb, fulltitle=title, - fanart=item.thumbnail, contentType="episode")) - else: - # no hay thumbnail - matches = re.compile(']+>(.*?)<', re.DOTALL).findall(data) - - for url, title in matches: - title = title.strip() - url = urlparse.urljoin(item.url, url) - thumb = item.thumbnail - - try: - episode = int(scrapertools.find_single_match(title, "^.+?\s(\d+)$")) - except ValueError: - season = 1 - episode = 1 - else: - season, episode = renumbertools.numbered_for_tratk(item.channel, item.show, 1, episode) - - title = "%sx%s : %s" % (season, str(episode).zfill(2), item.title) - - itemlist.append(item.clone(action="findvideos", title=title, url=url, thumbnail=thumb, fulltitle=title, - fanart=item.thumbnail, contentType="episode")) - + info = eval(scrapertools.find_single_match(data, 'anime_info = (.*?);')) + episodes = eval(scrapertools.find_single_match(data, 'var episodes = (.*?);')) + for episode in episodes: + url = '%s/ver/%s/%s-%s' % (HOST, episode[1], info[2], episode[0]) + title = '1x%s Episodio %s' % (episode[0], episode[0]) + itemlist.append(item.clone(title=title, url=url, action='findvideos', show=info[1])) + itemlist = itemlist[::-1] if config.get_videolibrary_support() and len(itemlist) > 0: itemlist.append(Item(channel=item.channel, title="Añadir esta serie a la videoteca", action="add_serie_to_library", extra="episodios")) - return itemlist def findvideos(item): logger.info() - itemlist = [] - data = re.sub(r"\n|\r|\t|\s{2}|-\s", "", httptools.downloadpage(item.url).data) - list_videos = scrapertools.find_multiple_matches(data, 'video\[\d\]\s=\s\'