2023-02-17 06:02:21 +00:00
|
|
|
from twisted.web.http import Request
|
|
|
|
import yaml
|
|
|
|
import logging, coloredlogs
|
|
|
|
from logging.handlers import TimedRotatingFileHandler
|
|
|
|
import zlib
|
|
|
|
import json
|
|
|
|
import base64
|
2023-03-05 02:58:51 +00:00
|
|
|
from os import path
|
|
|
|
from typing import Tuple
|
2023-02-17 06:02:21 +00:00
|
|
|
|
2023-02-26 04:40:50 +00:00
|
|
|
from titles.diva.handlers.base import *
|
2023-02-17 06:02:21 +00:00
|
|
|
from core.config import CoreConfig
|
|
|
|
from titles.diva.config import DivaConfig
|
2023-03-05 02:58:51 +00:00
|
|
|
from titles.diva.const import DivaConstants
|
2023-02-17 06:02:21 +00:00
|
|
|
from titles.diva.base import DivaBase
|
|
|
|
|
2023-03-09 16:38:58 +00:00
|
|
|
|
|
|
|
class DivaServlet:
|
2023-02-17 06:02:21 +00:00
|
|
|
def __init__(self, core_cfg: CoreConfig, cfg_dir: str) -> None:
|
|
|
|
self.core_cfg = core_cfg
|
|
|
|
self.game_cfg = DivaConfig()
|
2023-03-05 02:58:51 +00:00
|
|
|
if path.exists(f"{cfg_dir}/{DivaConstants.CONFIG_NAME}"):
|
2023-03-09 16:38:58 +00:00
|
|
|
self.game_cfg.update(
|
|
|
|
yaml.safe_load(open(f"{cfg_dir}/{DivaConstants.CONFIG_NAME}"))
|
|
|
|
)
|
2023-02-17 06:02:21 +00:00
|
|
|
|
|
|
|
self.base = DivaBase(core_cfg, self.game_cfg)
|
|
|
|
|
|
|
|
self.logger = logging.getLogger("diva")
|
|
|
|
log_fmt_str = "[%(asctime)s] Diva | %(levelname)s | %(message)s"
|
|
|
|
log_fmt = logging.Formatter(log_fmt_str)
|
2023-03-09 16:38:58 +00:00
|
|
|
fileHandler = TimedRotatingFileHandler(
|
|
|
|
"{0}/{1}.log".format(self.core_cfg.server.log_dir, "diva"),
|
|
|
|
encoding="utf8",
|
|
|
|
when="d",
|
|
|
|
backupCount=10,
|
|
|
|
)
|
2023-02-17 06:02:21 +00:00
|
|
|
|
|
|
|
fileHandler.setFormatter(log_fmt)
|
2023-03-09 16:38:58 +00:00
|
|
|
|
2023-02-17 06:02:21 +00:00
|
|
|
consoleHandler = logging.StreamHandler()
|
|
|
|
consoleHandler.setFormatter(log_fmt)
|
|
|
|
|
|
|
|
self.logger.addHandler(fileHandler)
|
|
|
|
self.logger.addHandler(consoleHandler)
|
2023-03-09 16:38:58 +00:00
|
|
|
|
2023-02-17 06:02:21 +00:00
|
|
|
self.logger.setLevel(self.game_cfg.server.loglevel)
|
2023-03-09 16:38:58 +00:00
|
|
|
coloredlogs.install(
|
|
|
|
level=self.game_cfg.server.loglevel, logger=self.logger, fmt=log_fmt_str
|
|
|
|
)
|
|
|
|
|
2023-03-05 02:58:51 +00:00
|
|
|
@classmethod
|
2023-03-09 16:38:58 +00:00
|
|
|
def get_allnet_info(
|
|
|
|
cls, game_code: str, core_cfg: CoreConfig, cfg_dir: str
|
|
|
|
) -> Tuple[bool, str, str]:
|
2023-03-05 02:58:51 +00:00
|
|
|
game_cfg = DivaConfig()
|
|
|
|
if path.exists(f"{cfg_dir}/{DivaConstants.CONFIG_NAME}"):
|
2023-03-09 16:38:58 +00:00
|
|
|
game_cfg.update(
|
|
|
|
yaml.safe_load(open(f"{cfg_dir}/{DivaConstants.CONFIG_NAME}"))
|
|
|
|
)
|
2023-03-05 02:58:51 +00:00
|
|
|
|
|
|
|
if not game_cfg.server.enable:
|
|
|
|
return (False, "", "")
|
2023-03-09 16:38:58 +00:00
|
|
|
|
2023-03-05 02:58:51 +00:00
|
|
|
if core_cfg.server.is_develop:
|
2023-03-09 16:38:58 +00:00
|
|
|
return (
|
|
|
|
True,
|
|
|
|
f"http://{core_cfg.title.hostname}:{core_cfg.title.port}/{game_code}/$v/",
|
|
|
|
"",
|
|
|
|
)
|
|
|
|
|
2023-03-05 02:58:51 +00:00
|
|
|
return (True, f"http://{core_cfg.title.hostname}/{game_code}/$v/", "")
|
|
|
|
|
2023-02-17 06:02:21 +00:00
|
|
|
def render_POST(self, req: Request, version: int, url_path: str) -> bytes:
|
|
|
|
req_raw = req.content.getvalue()
|
|
|
|
url_header = req.getAllHeaders()
|
2023-02-26 04:40:50 +00:00
|
|
|
req.responseHeaders.addRawHeader(b"content-type", b"text/plain")
|
2023-02-17 06:02:21 +00:00
|
|
|
|
2023-02-26 04:40:50 +00:00
|
|
|
if "THIS_STRING_SEPARATES" in str(url_header):
|
2023-02-17 06:02:21 +00:00
|
|
|
binary_request = req_raw.splitlines()
|
|
|
|
binary_cmd_decoded = binary_request[3].decode("utf-8")
|
|
|
|
|
2023-02-26 04:40:50 +00:00
|
|
|
req_cls = BaseBinaryRequest(binary_cmd_decoded)
|
2023-02-17 06:02:21 +00:00
|
|
|
|
2023-02-26 04:40:50 +00:00
|
|
|
else:
|
2023-03-09 16:49:51 +00:00
|
|
|
json_string = json.dumps(
|
|
|
|
req_raw.decode("utf-8")
|
|
|
|
) # Take the response and decode as UTF-8 and dump
|
|
|
|
b64string = json_string.replace(
|
|
|
|
r"\n", "\n"
|
|
|
|
) # Remove all \n and separate them as new lines
|
|
|
|
gz_string = base64.b64decode(b64string) # Decompressing the base64 string
|
|
|
|
|
2023-02-26 04:40:50 +00:00
|
|
|
try:
|
2023-03-09 16:49:51 +00:00
|
|
|
url_data = zlib.decompress(gz_string) # Decompressing the gzip
|
2023-02-26 04:40:50 +00:00
|
|
|
except zlib.error as e:
|
|
|
|
self.logger.error(f"Failed to defalte! {e} -> {gz_string}")
|
2023-02-26 16:40:13 +00:00
|
|
|
return b"stat=0"
|
2023-03-09 16:49:51 +00:00
|
|
|
|
2023-02-26 04:40:50 +00:00
|
|
|
try:
|
|
|
|
req_cls = BaseRequest(url_data)
|
|
|
|
except DivaRequestParseException as e:
|
|
|
|
self.logger.error(e)
|
2023-02-26 16:40:13 +00:00
|
|
|
return b"stat=0"
|
2023-03-09 16:49:51 +00:00
|
|
|
|
2023-03-13 08:04:08 +00:00
|
|
|
self.logger.debug(f"Request: {url_data}\nHeaders: {url_header}")
|
2023-03-09 16:49:51 +00:00
|
|
|
self.logger.info(
|
2023-03-13 08:04:08 +00:00
|
|
|
f"{req_cls.cmd} request from {req_cls.kc_serial}/{req_cls.b_serial} at {req.getClientAddress().host}"
|
2023-03-09 16:49:51 +00:00
|
|
|
)
|
2023-02-17 06:02:21 +00:00
|
|
|
|
2023-02-26 04:40:50 +00:00
|
|
|
handler_str = f"handle_{req_cls.cmd}_request"
|
2023-02-17 06:02:21 +00:00
|
|
|
|
2023-02-26 04:40:50 +00:00
|
|
|
if not hasattr(self.base, handler_str):
|
|
|
|
self.logger.warn(f"Unhandled cmd {req_cls.cmd}")
|
|
|
|
return BaseResponse(req_cls.cmd, req_cls.req_id).make().encode()
|
2023-02-17 06:02:21 +00:00
|
|
|
|
2023-02-26 04:40:50 +00:00
|
|
|
handler = getattr(self.base, handler_str)
|
2023-02-26 16:40:13 +00:00
|
|
|
|
|
|
|
response = handler(req_cls.raw)
|
|
|
|
if response is None or response == "":
|
|
|
|
response = BaseResponse(req_cls.cmd, req_cls.req_id).make()
|
2023-03-09 16:38:58 +00:00
|
|
|
|
2023-03-09 16:49:51 +00:00
|
|
|
self.logger.debug(f"Response: {response}")
|
2023-02-26 16:40:13 +00:00
|
|
|
return response.encode(errors="ignore")
|