2024-01-09 08:07:04 +00:00
|
|
|
from starlette.requests import Request
|
|
|
|
from starlette.routing import Route
|
2024-01-09 17:44:48 +00:00
|
|
|
from starlette.responses import Response, JSONResponse
|
2023-11-09 02:17:48 +00:00
|
|
|
import traceback
|
2023-11-19 06:05:34 +00:00
|
|
|
import sys
|
2023-02-17 06:02:21 +00:00
|
|
|
import yaml
|
|
|
|
import json
|
|
|
|
import re
|
|
|
|
import inflection
|
|
|
|
import logging, coloredlogs
|
|
|
|
from logging.handlers import TimedRotatingFileHandler
|
2023-11-09 02:17:48 +00:00
|
|
|
from typing import Dict, Tuple, List
|
2023-03-05 02:58:51 +00:00
|
|
|
from os import path
|
2023-02-17 06:02:21 +00:00
|
|
|
|
|
|
|
from core.config import CoreConfig
|
2024-01-09 17:44:48 +00:00
|
|
|
from core.title import BaseServlet, JSONResponseNoASCII
|
2023-11-09 02:17:48 +00:00
|
|
|
from core.utils import Utils
|
|
|
|
from .config import CxbConfig
|
|
|
|
from .const import CxbConstants
|
|
|
|
from .rev import CxbRev
|
|
|
|
from .rss1 import CxbRevSunriseS1
|
|
|
|
from .rss2 import CxbRevSunriseS2
|
2023-02-17 06:02:21 +00:00
|
|
|
|
2023-03-09 16:38:58 +00:00
|
|
|
|
2023-11-09 02:17:48 +00:00
|
|
|
class CxbServlet(BaseServlet):
|
2023-02-17 06:02:21 +00:00
|
|
|
def __init__(self, core_cfg: CoreConfig, cfg_dir: str) -> None:
|
|
|
|
self.isLeaf = True
|
|
|
|
self.cfg_dir = cfg_dir
|
|
|
|
self.core_cfg = core_cfg
|
|
|
|
self.game_cfg = CxbConfig()
|
2023-03-05 02:58:51 +00:00
|
|
|
if path.exists(f"{cfg_dir}/{CxbConstants.CONFIG_NAME}"):
|
2023-03-09 16:38:58 +00:00
|
|
|
self.game_cfg.update(
|
|
|
|
yaml.safe_load(open(f"{cfg_dir}/{CxbConstants.CONFIG_NAME}"))
|
|
|
|
)
|
2023-02-17 06:02:21 +00:00
|
|
|
|
|
|
|
self.logger = logging.getLogger("cxb")
|
2023-02-17 06:37:59 +00:00
|
|
|
if not hasattr(self.logger, "inited"):
|
|
|
|
log_fmt_str = "[%(asctime)s] CXB | %(levelname)s | %(message)s"
|
|
|
|
log_fmt = logging.Formatter(log_fmt_str)
|
2023-03-09 16:38:58 +00:00
|
|
|
fileHandler = TimedRotatingFileHandler(
|
|
|
|
"{0}/{1}.log".format(self.core_cfg.server.log_dir, "cxb"),
|
|
|
|
encoding="utf8",
|
|
|
|
when="d",
|
|
|
|
backupCount=10,
|
|
|
|
)
|
2023-02-17 06:02:21 +00:00
|
|
|
|
2023-02-17 06:37:59 +00:00
|
|
|
fileHandler.setFormatter(log_fmt)
|
2023-03-09 16:38:58 +00:00
|
|
|
|
2023-02-17 06:37:59 +00:00
|
|
|
consoleHandler = logging.StreamHandler()
|
|
|
|
consoleHandler.setFormatter(log_fmt)
|
2023-02-17 06:02:21 +00:00
|
|
|
|
2023-02-17 06:37:59 +00:00
|
|
|
self.logger.addHandler(fileHandler)
|
|
|
|
self.logger.addHandler(consoleHandler)
|
2023-03-09 16:38:58 +00:00
|
|
|
|
2023-02-17 06:37:59 +00:00
|
|
|
self.logger.setLevel(self.game_cfg.server.loglevel)
|
2023-03-09 16:38:58 +00:00
|
|
|
coloredlogs.install(
|
|
|
|
level=self.game_cfg.server.loglevel, logger=self.logger, fmt=log_fmt_str
|
|
|
|
)
|
2023-02-17 06:37:59 +00:00
|
|
|
self.logger.inited = True
|
2023-03-09 16:38:58 +00:00
|
|
|
|
2023-02-17 06:02:21 +00:00
|
|
|
self.versions = [
|
|
|
|
CxbRev(core_cfg, self.game_cfg),
|
|
|
|
CxbRevSunriseS1(core_cfg, self.game_cfg),
|
|
|
|
CxbRevSunriseS2(core_cfg, self.game_cfg),
|
|
|
|
]
|
2023-03-09 16:38:58 +00:00
|
|
|
|
2024-01-09 08:07:04 +00:00
|
|
|
def get_routes(self) -> List[Route]:
|
|
|
|
return [
|
|
|
|
Route("/data", self.handle_data, methods=['POST']),
|
|
|
|
Route("/action", self.handle_action, methods=['POST']),
|
|
|
|
Route("/v2/action", self.handle_action, methods=['POST']),
|
|
|
|
Route("/auth", self.handle_auth, methods=['POST']),
|
|
|
|
]
|
|
|
|
|
2023-03-05 02:58:51 +00:00
|
|
|
@classmethod
|
2023-11-09 02:17:48 +00:00
|
|
|
def is_game_enabled(cls, game_code: str, core_cfg: CoreConfig, cfg_dir: str) -> bool:
|
2023-03-05 02:58:51 +00:00
|
|
|
game_cfg = CxbConfig()
|
|
|
|
if path.exists(f"{cfg_dir}/{CxbConstants.CONFIG_NAME}"):
|
2023-03-09 16:38:58 +00:00
|
|
|
game_cfg.update(
|
|
|
|
yaml.safe_load(open(f"{cfg_dir}/{CxbConstants.CONFIG_NAME}"))
|
|
|
|
)
|
2023-03-05 02:58:51 +00:00
|
|
|
|
|
|
|
if not game_cfg.server.enable:
|
2023-11-09 02:17:48 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
def get_allnet_info(self, game_code: str, game_ver: int, keychip: str) -> Tuple[str, str]:
|
2023-12-29 18:59:53 +00:00
|
|
|
title_port_int = Utils.get_title_port(self.core_cfg)
|
|
|
|
title_port_ssl_int = Utils.get_title_port_ssl(self.core_cfg)
|
|
|
|
|
2024-01-09 22:49:18 +00:00
|
|
|
proto = "https" if self.game_cfg.server.use_https else "http"
|
2023-12-29 18:59:53 +00:00
|
|
|
|
|
|
|
if proto == "https":
|
2024-01-09 22:49:18 +00:00
|
|
|
t_port = f":{title_port_ssl_int}" if title_port_ssl_int != 443 else ""
|
2023-12-29 18:59:53 +00:00
|
|
|
|
|
|
|
else:
|
2024-01-09 22:49:18 +00:00
|
|
|
t_port = f":{title_port_int}" if title_port_int != 80 else ""
|
2023-03-09 16:38:58 +00:00
|
|
|
|
2023-12-29 18:59:53 +00:00
|
|
|
return (
|
2024-01-22 19:42:13 +00:00
|
|
|
f"{proto}://{self.core_cfg.server.hostname}{t_port}",
|
2023-12-29 18:59:53 +00:00
|
|
|
"",
|
|
|
|
)
|
2024-01-09 22:49:18 +00:00
|
|
|
|
2023-11-09 02:17:48 +00:00
|
|
|
|
2024-01-09 08:07:04 +00:00
|
|
|
async def preprocess(self, req: Request) -> Dict:
|
|
|
|
req_bytes = await req.body()
|
2023-11-09 02:17:48 +00:00
|
|
|
|
2023-02-17 06:02:21 +00:00
|
|
|
try:
|
|
|
|
req_json: Dict = json.loads(req_bytes)
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
try:
|
2023-03-09 16:38:58 +00:00
|
|
|
req_json: Dict = json.loads(
|
|
|
|
req_bytes.decode().replace('"', '\\"').replace("'", '"')
|
|
|
|
)
|
2023-02-17 06:02:21 +00:00
|
|
|
|
|
|
|
except Exception as f:
|
2023-08-08 14:17:56 +00:00
|
|
|
self.logger.warning(
|
2023-11-09 02:17:48 +00:00
|
|
|
f"Error decoding json to /data endpoint: {e} / {f} - {req_bytes}"
|
2023-03-09 16:38:58 +00:00
|
|
|
)
|
2023-02-17 06:02:21 +00:00
|
|
|
return b""
|
2023-11-09 02:17:48 +00:00
|
|
|
|
|
|
|
return req_json
|
2023-03-09 16:38:58 +00:00
|
|
|
|
2024-01-09 08:07:04 +00:00
|
|
|
async def handle_data(self, request: Request) -> bytes:
|
|
|
|
req_json = await self.preprocess(request)
|
2023-11-09 02:17:48 +00:00
|
|
|
func_to_find = "handle_data_"
|
|
|
|
version_string = "Base"
|
|
|
|
internal_ver = 0
|
2023-11-09 04:36:26 +00:00
|
|
|
version = 0
|
2023-11-09 02:17:48 +00:00
|
|
|
|
2023-02-17 06:02:21 +00:00
|
|
|
if req_json == {}:
|
2023-11-09 02:17:48 +00:00
|
|
|
self.logger.warning(f"Empty json request to /data")
|
2024-01-09 08:07:04 +00:00
|
|
|
return Response()
|
2023-11-09 04:36:26 +00:00
|
|
|
|
|
|
|
subcmd = list(req_json.keys())[0]
|
|
|
|
if subcmd == "dldate":
|
2023-11-09 02:17:48 +00:00
|
|
|
|
2023-11-09 04:36:26 +00:00
|
|
|
if (
|
|
|
|
not type(req_json["dldate"]) is dict
|
|
|
|
or "filetype" not in req_json["dldate"]
|
|
|
|
):
|
|
|
|
self.logger.warning(f"Malformed dldate request: {req_json}")
|
2024-01-09 08:07:04 +00:00
|
|
|
return Response()
|
2023-03-09 16:38:58 +00:00
|
|
|
|
2023-11-09 04:36:26 +00:00
|
|
|
filetype = req_json["dldate"]["filetype"]
|
|
|
|
filetype_split = filetype.split("/")
|
2023-02-17 06:02:21 +00:00
|
|
|
|
2023-11-09 04:36:26 +00:00
|
|
|
if len(filetype_split) < 2 or not filetype_split[0].isnumeric():
|
|
|
|
self.logger.warning(f"Malformed dldate request: {req_json}")
|
2024-01-09 08:07:04 +00:00
|
|
|
return Response()
|
2023-02-17 06:02:21 +00:00
|
|
|
|
2023-11-09 04:36:26 +00:00
|
|
|
version = int(filetype_split[0])
|
|
|
|
filename = filetype_split[len(filetype_split) - 1]
|
2023-02-17 06:02:21 +00:00
|
|
|
|
2023-11-09 04:36:26 +00:00
|
|
|
match = re.match(
|
|
|
|
"^([A-Za-z]*)(\d\d\d\d)$", filetype_split[len(filetype_split) - 1]
|
|
|
|
)
|
|
|
|
if match:
|
|
|
|
func_to_find += f"{inflection.underscore(match.group(1))}xxxx"
|
|
|
|
else:
|
|
|
|
func_to_find += f"{inflection.underscore(filename)}"
|
2023-02-17 06:02:21 +00:00
|
|
|
else:
|
2023-11-09 04:36:26 +00:00
|
|
|
filetype = subcmd
|
|
|
|
func_to_find += filetype
|
2023-11-09 02:17:48 +00:00
|
|
|
|
|
|
|
func_to_find += "_request"
|
|
|
|
|
2023-02-17 06:02:21 +00:00
|
|
|
if version <= 10102:
|
|
|
|
version_string = "Rev"
|
|
|
|
internal_ver = CxbConstants.VER_CROSSBEATS_REV
|
|
|
|
|
|
|
|
elif version == 10113 or version == 10103:
|
|
|
|
version_string = "Rev SunriseS1"
|
|
|
|
internal_ver = CxbConstants.VER_CROSSBEATS_REV_SUNRISE_S1
|
2023-03-09 16:38:58 +00:00
|
|
|
|
2023-02-17 06:02:21 +00:00
|
|
|
elif version >= 10114 or version == 10104:
|
|
|
|
version_string = "Rev SunriseS2"
|
|
|
|
internal_ver = CxbConstants.VER_CROSSBEATS_REV_SUNRISE_S2
|
2023-03-09 16:38:58 +00:00
|
|
|
|
2023-11-09 02:17:48 +00:00
|
|
|
if not hasattr(self.versions[internal_ver], func_to_find):
|
2023-11-25 18:23:39 +00:00
|
|
|
self.logger.warn(f"{version_string} has no handler for filetype {filetype} / {func_to_find}")
|
2024-01-09 08:07:04 +00:00
|
|
|
return JSONResponse({"data":""})
|
2023-11-09 02:17:48 +00:00
|
|
|
|
|
|
|
self.logger.info(f"{version_string} request for filetype {filetype}")
|
2023-02-17 06:02:21 +00:00
|
|
|
self.logger.debug(req_json)
|
|
|
|
|
2023-11-09 02:17:48 +00:00
|
|
|
handler = getattr(self.versions[internal_ver], func_to_find)
|
|
|
|
|
2023-02-17 06:02:21 +00:00
|
|
|
try:
|
2024-01-09 08:07:04 +00:00
|
|
|
resp = await handler(req_json)
|
2023-03-09 16:38:58 +00:00
|
|
|
|
2023-11-09 02:17:48 +00:00
|
|
|
except Exception as e:
|
|
|
|
self.logger.error(f"Error handling request for file {filetype} - {e}")
|
|
|
|
if self.logger.level == logging.DEBUG:
|
2023-11-19 06:05:34 +00:00
|
|
|
tp, val, tb = sys.exc_info()
|
|
|
|
traceback.print_exception(tp, val, tb, limit=1)
|
2023-11-09 02:17:48 +00:00
|
|
|
with open("{0}/{1}.log".format(self.core_cfg.server.log_dir, "cxb"), "a") as f:
|
2023-11-19 06:05:34 +00:00
|
|
|
traceback.print_exception(tp, val, tb, limit=1, file=f)
|
2024-01-09 08:07:04 +00:00
|
|
|
return Response()
|
2023-11-09 02:17:48 +00:00
|
|
|
|
|
|
|
self.logger.debug(f"{version_string} Response {resp}")
|
2024-01-09 17:44:48 +00:00
|
|
|
return JSONResponseNoASCII(resp)
|
2023-11-09 02:17:48 +00:00
|
|
|
|
2024-01-09 08:07:04 +00:00
|
|
|
async def handle_action(self, request: Request) -> bytes:
|
|
|
|
req_json = await self.preprocess(request)
|
2023-11-09 02:17:48 +00:00
|
|
|
subcmd = list(req_json.keys())[0]
|
|
|
|
func_to_find = f"handle_action_{subcmd}_request"
|
|
|
|
|
|
|
|
if not hasattr(self.versions[0], func_to_find):
|
|
|
|
self.logger.warn(f"No handler for action {subcmd} request")
|
2024-01-09 08:07:04 +00:00
|
|
|
return Response()
|
2023-11-09 02:17:48 +00:00
|
|
|
|
|
|
|
self.logger.info(f"Action {subcmd} Request")
|
|
|
|
self.logger.debug(req_json)
|
|
|
|
|
|
|
|
handler = getattr(self.versions[0], func_to_find)
|
|
|
|
|
|
|
|
try:
|
2024-01-09 08:07:04 +00:00
|
|
|
resp = await handler(req_json)
|
2023-02-17 06:02:21 +00:00
|
|
|
|
|
|
|
except Exception as e:
|
2023-11-09 02:17:48 +00:00
|
|
|
self.logger.error(f"Error handling action {subcmd} request - {e}")
|
|
|
|
if self.logger.level == logging.DEBUG:
|
2023-11-19 06:05:34 +00:00
|
|
|
tp, val, tb = sys.exc_info()
|
|
|
|
traceback.print_exception(tp, val, tb, limit=1)
|
2023-11-09 02:17:48 +00:00
|
|
|
with open("{0}/{1}.log".format(self.core_cfg.server.log_dir, "cxb"), "a") as f:
|
2023-11-19 06:05:34 +00:00
|
|
|
traceback.print_exception(tp, val, tb, limit=1, file=f)
|
2024-01-09 08:07:04 +00:00
|
|
|
return Response()
|
2023-11-09 02:17:48 +00:00
|
|
|
|
|
|
|
self.logger.debug(f"Response {resp}")
|
2024-01-09 17:44:48 +00:00
|
|
|
return JSONResponseNoASCII(resp)
|
2023-03-09 16:38:58 +00:00
|
|
|
|
2024-01-09 08:07:04 +00:00
|
|
|
async def handle_auth(self, request: Request) -> bytes:
|
|
|
|
req_json = await self.preprocess(request)
|
2023-11-09 02:17:48 +00:00
|
|
|
subcmd = list(req_json.keys())[0]
|
|
|
|
func_to_find = f"handle_auth_{subcmd}_request"
|
|
|
|
|
|
|
|
if not hasattr(self.versions[0], func_to_find):
|
|
|
|
self.logger.warn(f"No handler for auth {subcmd} request")
|
2024-01-09 08:07:04 +00:00
|
|
|
return Response()
|
2023-11-09 02:17:48 +00:00
|
|
|
|
|
|
|
self.logger.info(f"Action {subcmd} Request")
|
|
|
|
self.logger.debug(req_json)
|
|
|
|
|
|
|
|
handler = getattr(self.versions[0], func_to_find)
|
|
|
|
|
|
|
|
try:
|
2024-01-09 08:07:04 +00:00
|
|
|
resp = await handler(req_json)
|
2023-11-09 02:17:48 +00:00
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
self.logger.error(f"Error handling auth {subcmd} request - {e}")
|
|
|
|
if self.logger.level == logging.DEBUG:
|
2023-11-19 06:05:34 +00:00
|
|
|
tp, val, tb = sys.exc_info()
|
|
|
|
traceback.print_exception(tp, val, tb, limit=1)
|
2023-11-09 02:17:48 +00:00
|
|
|
with open("{0}/{1}.log".format(self.core_cfg.server.log_dir, "cxb"), "a") as f:
|
2023-11-19 06:05:34 +00:00
|
|
|
traceback.print_exception(tp, val, tb, limit=1, file=f)
|
2024-01-09 08:07:04 +00:00
|
|
|
return Response()
|
2023-11-09 02:17:48 +00:00
|
|
|
|
|
|
|
self.logger.debug(f"Response {resp}")
|
2024-01-09 17:44:48 +00:00
|
|
|
return JSONResponseNoASCII(resp)
|