forked from Hay1tsme/artemis
Per-version URI/Host (#66)
Allows setting allnet uri/host response based on things like version, config files, and other factors to accommodate a wider range of potential setups under the same roof. This DOES require all titles to adopt a new structure but it's documented and should hopefully be somewhat intuitive. Co-authored-by: Hay1tsme <kevin@hay1ts.me> Reviewed-on: Hay1tsme/artemis#66 Co-authored-by: Kevin Trocolli <pitok236@gmail.com> Co-committed-by: Kevin Trocolli <pitok236@gmail.com>
This commit is contained in:
parent
c2a330f42c
commit
cb8eaae2c0
@ -16,10 +16,11 @@ from os import path
|
||||
import urllib.parse
|
||||
import math
|
||||
|
||||
from core.config import CoreConfig
|
||||
from core.utils import Utils
|
||||
from core.data import Data
|
||||
from core.const import *
|
||||
from .config import CoreConfig
|
||||
from .utils import Utils
|
||||
from .data import Data
|
||||
from .const import *
|
||||
from .title import TitleServlet
|
||||
|
||||
BILLING_DT_FORMAT: Final[str] = "%Y%m%d%H%M%S"
|
||||
|
||||
@ -94,7 +95,6 @@ class AllnetServlet:
|
||||
self.config = core_cfg
|
||||
self.config_folder = cfg_folder
|
||||
self.data = Data(core_cfg)
|
||||
self.uri_registry: Dict[str, Tuple[str, str]] = {}
|
||||
|
||||
self.logger = logging.getLogger("allnet")
|
||||
if not hasattr(self.logger, "initialized"):
|
||||
@ -125,18 +125,8 @@ class AllnetServlet:
|
||||
if len(plugins) == 0:
|
||||
self.logger.error("No games detected!")
|
||||
|
||||
for _, mod in plugins.items():
|
||||
if hasattr(mod, "index") and hasattr(mod.index, "get_allnet_info"):
|
||||
for code in mod.game_codes:
|
||||
enabled, uri, host = mod.index.get_allnet_info(
|
||||
code, self.config, self.config_folder
|
||||
)
|
||||
|
||||
if enabled:
|
||||
self.uri_registry[code] = (uri, host)
|
||||
|
||||
self.logger.info(
|
||||
f"Serving {len(self.uri_registry)} game codes port {core_cfg.allnet.port}"
|
||||
f"Serving {len(TitleServlet.title_registry)} game codes port {core_cfg.allnet.port}"
|
||||
)
|
||||
|
||||
def handle_poweron(self, request: Request, _: Dict):
|
||||
@ -245,7 +235,7 @@ class AllnetServlet:
|
||||
arcade["timezone"] if arcade["timezone"] is not None else "+0900" if req.format_ver == 3 else "+09:00"
|
||||
)
|
||||
|
||||
if req.game_id not in self.uri_registry:
|
||||
if req.game_id not in TitleServlet.title_registry:
|
||||
if not self.config.server.is_develop:
|
||||
msg = f"Unrecognised game {req.game_id} attempted allnet auth from {request_ip}."
|
||||
self.data.base.log_event(
|
||||
@ -270,11 +260,9 @@ class AllnetServlet:
|
||||
self.logger.debug(f"Allnet response: {resp_str}")
|
||||
return (resp_str + "\n").encode("utf-8")
|
||||
|
||||
resp.uri, resp.host = self.uri_registry[req.game_id]
|
||||
|
||||
|
||||
int_ver = req.ver.replace(".", "")
|
||||
resp.uri = resp.uri.replace("$v", int_ver)
|
||||
resp.host = resp.host.replace("$v", int_ver)
|
||||
resp.uri, resp.host = TitleServlet.title_registry[req.game_id].get_allnet_info(req.game_id, int(int_ver), req.serial)
|
||||
|
||||
msg = f"{req.serial} authenticated from {request_ip}: {req.game_id} v{req.ver}"
|
||||
self.data.base.log_event("allnet", "ALLNET_AUTH_SUCCESS", logging.INFO, msg)
|
||||
@ -586,7 +574,7 @@ class AllnetPowerOnResponse:
|
||||
self.stat = 1
|
||||
self.uri = ""
|
||||
self.host = ""
|
||||
self.place_id = "123"
|
||||
self.place_id = "0123"
|
||||
self.name = "ARTEMiS"
|
||||
self.nickname = "ARTEMiS"
|
||||
self.region0 = "1"
|
||||
|
@ -36,6 +36,12 @@ class ServerConfig:
|
||||
self.__config, "core", "server", "is_develop", default=True
|
||||
)
|
||||
|
||||
@property
|
||||
def is_using_proxy(self) -> bool:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "server", "is_using_proxy", default=False
|
||||
)
|
||||
|
||||
@property
|
||||
def threading(self) -> bool:
|
||||
return CoreConfig.get_config_field(
|
||||
|
@ -7,15 +7,15 @@ from datetime import datetime
|
||||
from Crypto.Cipher import Blowfish
|
||||
import pytz
|
||||
|
||||
from core import CoreConfig
|
||||
from core.utils import Utils
|
||||
|
||||
from .config import CoreConfig
|
||||
from .utils import Utils
|
||||
from .title import TitleServlet
|
||||
|
||||
class MuchaServlet:
|
||||
mucha_registry: List[str] = []
|
||||
def __init__(self, cfg: CoreConfig, cfg_dir: str) -> None:
|
||||
self.config = cfg
|
||||
self.config_dir = cfg_dir
|
||||
self.mucha_registry: List[str] = []
|
||||
|
||||
self.logger = logging.getLogger("mucha")
|
||||
log_fmt_str = "[%(asctime)s] Mucha | %(levelname)s | %(message)s"
|
||||
@ -37,11 +37,9 @@ class MuchaServlet:
|
||||
self.logger.setLevel(cfg.mucha.loglevel)
|
||||
coloredlogs.install(level=cfg.mucha.loglevel, logger=self.logger, fmt=log_fmt_str)
|
||||
|
||||
all_titles = Utils.get_all_titles()
|
||||
|
||||
for _, mod in all_titles.items():
|
||||
if hasattr(mod, "index") and hasattr(mod.index, "get_mucha_info"):
|
||||
enabled, game_cd = mod.index.get_mucha_info(
|
||||
for _, mod in TitleServlet.title_registry.items():
|
||||
if hasattr(mod, "get_mucha_info"):
|
||||
enabled, game_cd = mod.get_mucha_info(
|
||||
self.config, self.config_dir
|
||||
)
|
||||
if enabled:
|
||||
|
138
core/title.py
138
core/title.py
@ -1,4 +1,4 @@
|
||||
from typing import Dict, Any
|
||||
from typing import Dict, List, Tuple
|
||||
import logging, coloredlogs
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
from twisted.web.http import Request
|
||||
@ -7,14 +7,88 @@ from core.config import CoreConfig
|
||||
from core.data import Data
|
||||
from core.utils import Utils
|
||||
|
||||
class BaseServlet:
|
||||
def __init__(self, core_cfg: CoreConfig, cfg_dir: str) -> None:
|
||||
self.core_cfg = core_cfg
|
||||
self.game_cfg = None
|
||||
self.logger = logging.getLogger("title")
|
||||
|
||||
@classmethod
|
||||
def is_game_enabled(cls, game_code: str, core_cfg: CoreConfig, cfg_dir: str) -> bool:
|
||||
"""Called during boot to check if a specific game code should load.
|
||||
|
||||
Args:
|
||||
game_code (str): 4 character game code
|
||||
core_cfg (CoreConfig): CoreConfig class
|
||||
cfg_dir (str): Config directory
|
||||
|
||||
Returns:
|
||||
bool: True if the game is enabled and set to run, False otherwise
|
||||
|
||||
"""
|
||||
return False
|
||||
|
||||
def get_endpoint_matchers(self) -> Tuple[List[Tuple[str, str, Dict]], List[Tuple[str, str, Dict]]]:
|
||||
"""Called during boot to get all matcher endpoints this title servlet handles
|
||||
|
||||
Returns:
|
||||
Tuple[List[Tuple[str, str, Dict]], List[Tuple[str, str, Dict]]]: A 2-length tuple where offset 0 is GET and offset 1 is POST,
|
||||
containing a list of 3-length tuples where offset 0 is the name of the function in the handler that should be called, offset 1
|
||||
is the matching string, and offset 2 is a dict containing rules for the matcher.
|
||||
"""
|
||||
return (
|
||||
[("render_GET", "/{game}/{version}/{endpoint}", {'game': R'S...'})],
|
||||
[("render_POST", "/{game}/{version}/{endpoint}", {'game': R'S...'})]
|
||||
)
|
||||
|
||||
def setup(self) -> None:
|
||||
"""Called once during boot, should contain any additional setup the handler must do, such as starting any sub-services
|
||||
"""
|
||||
pass
|
||||
|
||||
def get_allnet_info(self, game_code: str, game_ver: int, keychip: str) -> Tuple[str, str]:
|
||||
"""Called any time a request to PowerOn is made to retrieve the url/host strings to be sent back to the game
|
||||
|
||||
Args:
|
||||
game_code (str): 4 character game code
|
||||
game_ver (int): version, expressed as an integer by multiplying by 100 (1.10 -> 110)
|
||||
keychip (str): Keychip serial of the requesting machine, can be used to deliver specific URIs to different machines
|
||||
|
||||
Returns:
|
||||
Tuple[str, str]: A tuple where offset 0 is the allnet uri field, and offset 1 is the allnet host field
|
||||
"""
|
||||
if not self.core_cfg.server.is_using_proxy and Utils.get_title_port(self.core_cfg) != 80:
|
||||
return (f"http://{self.core_cfg.title.hostname}:{Utils.get_title_port(self.core_cfg)}/{game_code}/{game_ver}/", "")
|
||||
|
||||
return (f"http://{self.core_cfg.title.hostname}/{game_code}/{game_ver}/", "")
|
||||
|
||||
def get_mucha_info(self, core_cfg: CoreConfig, cfg_dir: str) -> Tuple[bool, str]:
|
||||
"""Called once during boot to check if this game is a mucha game
|
||||
|
||||
Args:
|
||||
core_cfg (CoreConfig): CoreConfig class
|
||||
cfg_dir (str): Config directory
|
||||
|
||||
Returns:
|
||||
Tuple[bool, str]: Tuple where offset 0 is true if the game is enabled, false otherwise, and offset 1 is the game CD
|
||||
"""
|
||||
return (False, "")
|
||||
|
||||
def render_POST(self, request: Request, game_code: str, matchers: Dict) -> bytes:
|
||||
self.logger.warn(f"{game_code} Does not dispatch POST")
|
||||
return None
|
||||
|
||||
def render_GET(self, request: Request, game_code: str, matchers: Dict) -> bytes:
|
||||
self.logger.warn(f"{game_code} Does not dispatch GET")
|
||||
return None
|
||||
|
||||
class TitleServlet:
|
||||
title_registry: Dict[str, BaseServlet] = {}
|
||||
def __init__(self, core_cfg: CoreConfig, cfg_folder: str):
|
||||
super().__init__()
|
||||
self.config = core_cfg
|
||||
self.config_folder = cfg_folder
|
||||
self.data = Data(core_cfg)
|
||||
self.title_registry: Dict[str, Any] = {}
|
||||
|
||||
self.logger = logging.getLogger("title")
|
||||
if not hasattr(self.logger, "initialized"):
|
||||
@ -43,62 +117,62 @@ class TitleServlet:
|
||||
plugins = Utils.get_all_titles()
|
||||
|
||||
for folder, mod in plugins.items():
|
||||
if hasattr(mod, "game_codes") and hasattr(mod, "index"):
|
||||
if hasattr(mod, "game_codes") and hasattr(mod, "index") and hasattr(mod.index, "is_game_enabled"):
|
||||
should_call_setup = True
|
||||
game_servlet: BaseServlet = mod.index
|
||||
game_codes: List[str] = mod.game_codes
|
||||
|
||||
for code in game_codes:
|
||||
if game_servlet.is_game_enabled(code, self.config, self.config_folder):
|
||||
handler_cls = game_servlet(self.config, self.config_folder)
|
||||
|
||||
if hasattr(mod.index, "get_allnet_info"):
|
||||
for code in mod.game_codes:
|
||||
enabled, _, _ = mod.index.get_allnet_info(
|
||||
code, self.config, self.config_folder
|
||||
)
|
||||
if hasattr(handler_cls, "setup") and should_call_setup:
|
||||
handler_cls.setup()
|
||||
should_call_setup = False
|
||||
|
||||
if enabled:
|
||||
handler_cls = mod.index(self.config, self.config_folder)
|
||||
|
||||
if hasattr(handler_cls, "setup") and should_call_setup:
|
||||
handler_cls.setup()
|
||||
should_call_setup = False
|
||||
|
||||
self.title_registry[code] = handler_cls
|
||||
|
||||
else:
|
||||
self.logger.warning(f"Game {folder} has no get_allnet_info")
|
||||
self.title_registry[code] = handler_cls
|
||||
|
||||
else:
|
||||
self.logger.error(f"{folder} missing game_code or index in __init__.py")
|
||||
self.logger.error(f"{folder} missing game_code or index in __init__.py, or is_game_enabled in index")
|
||||
|
||||
self.logger.info(
|
||||
f"Serving {len(self.title_registry)} game codes {'on port ' + str(core_cfg.title.port) if core_cfg.title.port > 0 else ''}"
|
||||
)
|
||||
|
||||
def render_GET(self, request: Request, endpoints: dict) -> bytes:
|
||||
code = endpoints["game"]
|
||||
code = endpoints["title"]
|
||||
subaction = endpoints['subaction']
|
||||
|
||||
if code not in self.title_registry:
|
||||
self.logger.warning(f"Unknown game code {code}")
|
||||
request.setResponseCode(404)
|
||||
return b""
|
||||
|
||||
index = self.title_registry[code]
|
||||
if not hasattr(index, "render_GET"):
|
||||
self.logger.warning(f"{code} does not dispatch GET")
|
||||
request.setResponseCode(405)
|
||||
handler = getattr(index, f"{subaction}", None)
|
||||
if handler is None:
|
||||
self.logger.error(f"{code} does not have handler for GET subaction {subaction}")
|
||||
request.setResponseCode(500)
|
||||
return b""
|
||||
|
||||
return index.render_GET(request, int(endpoints["version"]), endpoints["endpoint"])
|
||||
return handler(request, code, endpoints)
|
||||
|
||||
def render_POST(self, request: Request, endpoints: dict) -> bytes:
|
||||
code = endpoints["game"]
|
||||
code = endpoints["title"]
|
||||
subaction = endpoints['subaction']
|
||||
|
||||
if code not in self.title_registry:
|
||||
self.logger.warning(f"Unknown game code {code}")
|
||||
request.setResponseCode(404)
|
||||
return b""
|
||||
|
||||
index = self.title_registry[code]
|
||||
if not hasattr(index, "render_POST"):
|
||||
self.logger.warning(f"{code} does not dispatch POST")
|
||||
request.setResponseCode(405)
|
||||
handler = getattr(index, f"{subaction}", None)
|
||||
if handler is None:
|
||||
self.logger.error(f"{code} does not have handler for POST subaction {subaction}")
|
||||
request.setResponseCode(500)
|
||||
return b""
|
||||
|
||||
return index.render_POST(
|
||||
request, int(endpoints["version"]), endpoints["endpoint"]
|
||||
)
|
||||
endpoints.pop("title")
|
||||
endpoints.pop("subaction")
|
||||
return handler(request, code, endpoints)
|
||||
|
@ -5,8 +5,11 @@ import logging
|
||||
import importlib
|
||||
from os import walk
|
||||
|
||||
from .config import CoreConfig
|
||||
|
||||
class Utils:
|
||||
real_title_port = None
|
||||
real_title_port_ssl = None
|
||||
@classmethod
|
||||
def get_all_titles(cls) -> Dict[str, ModuleType]:
|
||||
ret: Dict[str, Any] = {}
|
||||
@ -33,3 +36,27 @@ class Utils:
|
||||
if b"x-forwarded-for" in req.getAllHeaders()
|
||||
else req.getClientAddress().host
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_title_port(cls, cfg: CoreConfig):
|
||||
if cls.real_title_port is not None: return cls.real_title_port
|
||||
|
||||
if cfg.title.port == 0:
|
||||
cls.real_title_port = cfg.allnet.port
|
||||
|
||||
else:
|
||||
cls.real_title_port = cfg.title.port
|
||||
|
||||
return cls.real_title_port
|
||||
|
||||
@classmethod
|
||||
def get_title_port_ssl(cls, cfg: CoreConfig):
|
||||
if cls.real_title_port_ssl is not None: return cls.real_title_port_ssl
|
||||
|
||||
if cfg.title.port_ssl == 0:
|
||||
cls.real_title_port_ssl = 443
|
||||
|
||||
else:
|
||||
cls.real_title_port_ssl = cfg.title.port_ssl
|
||||
|
||||
return cls.real_title_port_ssl
|
||||
|
@ -4,6 +4,7 @@ server:
|
||||
allow_unregistered_serials: True
|
||||
name: "ARTEMiS"
|
||||
is_develop: True
|
||||
is_using_proxy: False
|
||||
threading: False
|
||||
log_dir: "logs"
|
||||
check_arcade_ip: False
|
||||
|
@ -24,8 +24,7 @@ server {
|
||||
|
||||
# SSL titles, comment out if you don't plan on accepting SSL titles
|
||||
server {
|
||||
listen 443 ssl default_server;
|
||||
listen [::]:443 ssl default_server;
|
||||
listen 443 ssl;
|
||||
server_name your.hostname.here;
|
||||
|
||||
ssl_certificate /path/to/cert/title.crt;
|
||||
@ -55,7 +54,7 @@ server {
|
||||
ssl_session_tickets off;
|
||||
|
||||
ssl_protocols TLSv1 TLSv1.1 TLSv1.2 TLSv1.3;
|
||||
ssl_ciphers "ALL:@SECLEVEL=1";
|
||||
ssl_ciphers "ALL:@SECLEVEL=0";
|
||||
ssl_prefer_server_ciphers off;
|
||||
|
||||
location / {
|
||||
@ -75,7 +74,7 @@ server {
|
||||
ssl_session_tickets off;
|
||||
|
||||
ssl_protocols TLSv1 TLSv1.1 TLSv1.2 TLSv1.3;
|
||||
ssl_ciphers "ALL:@SECLEVEL=1";
|
||||
ssl_ciphers "ALL:@SECLEVEL=0";
|
||||
ssl_prefer_server_ciphers off;
|
||||
|
||||
location / {
|
||||
@ -85,28 +84,6 @@ server {
|
||||
}
|
||||
}
|
||||
|
||||
# CXB, comment this out if you don't plan on serving crossbeats.
|
||||
server {
|
||||
listen 443 ssl;
|
||||
server_name cxb.hostname.here;
|
||||
|
||||
ssl_certificate /path/to/cert/cxb.pem;
|
||||
ssl_certificate_key /path/to/cert/cxb.key;
|
||||
ssl_session_timeout 1d;
|
||||
ssl_session_cache shared:MozSSL:10m;
|
||||
ssl_session_tickets off;
|
||||
|
||||
ssl_protocols TLSv1 TLSv1.1 TLSv1.2 TLSv1.3;
|
||||
ssl_ciphers "ALL:@SECLEVEL=1";
|
||||
ssl_prefer_server_ciphers off;
|
||||
|
||||
location / {
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_pass_request_headers on;
|
||||
proxy_pass http://localhost:8080/SDBT/104/;
|
||||
}
|
||||
}
|
||||
|
||||
# Frontend, set to redirect to HTTPS. Comment out if you don't intend to use the frontend
|
||||
server {
|
||||
listen 80;
|
||||
@ -143,4 +120,4 @@ server {
|
||||
proxy_pass_request_headers on;
|
||||
proxy_pass http://localhost:8090/;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
44
index.py
44
index.py
@ -22,8 +22,8 @@ class HttpDispatcher(resource.Resource):
|
||||
self.map_post = Mapper()
|
||||
self.logger = logging.getLogger("core")
|
||||
|
||||
self.allnet = AllnetServlet(cfg, config_dir)
|
||||
self.title = TitleServlet(cfg, config_dir)
|
||||
self.allnet = AllnetServlet(cfg, config_dir)
|
||||
self.mucha = MuchaServlet(cfg, config_dir)
|
||||
|
||||
self.map_get.connect(
|
||||
@ -144,22 +144,32 @@ class HttpDispatcher(resource.Resource):
|
||||
conditions=dict(method=["POST"]),
|
||||
)
|
||||
|
||||
self.map_get.connect(
|
||||
"title_get",
|
||||
"/{game}/{version}/{endpoint:.*?}",
|
||||
controller="title",
|
||||
action="render_GET",
|
||||
conditions=dict(method=["GET"]),
|
||||
requirements=dict(game=R"S..."),
|
||||
)
|
||||
self.map_post.connect(
|
||||
"title_post",
|
||||
"/{game}/{version}/{endpoint:.*?}",
|
||||
controller="title",
|
||||
action="render_POST",
|
||||
conditions=dict(method=["POST"]),
|
||||
requirements=dict(game=R"S..."),
|
||||
)
|
||||
for code, game in self.title.title_registry.items():
|
||||
get_matchers, post_matchers = game.get_endpoint_matchers()
|
||||
|
||||
for m in get_matchers:
|
||||
self.map_get.connect(
|
||||
"title_get",
|
||||
m[1],
|
||||
controller="title",
|
||||
action="render_GET",
|
||||
title=code,
|
||||
subaction=m[0],
|
||||
conditions=dict(method=["GET"]),
|
||||
requirements=m[2],
|
||||
)
|
||||
|
||||
for m in post_matchers:
|
||||
self.map_post.connect(
|
||||
"title_post",
|
||||
m[1],
|
||||
controller="title",
|
||||
action="render_POST",
|
||||
title=code,
|
||||
subaction=m[0],
|
||||
conditions=dict(method=["POST"]),
|
||||
requirements=m[2],
|
||||
)
|
||||
|
||||
def render_GET(self, request: Request) -> bytes:
|
||||
test = self.map_get.match(request.uri.decode())
|
||||
|
@ -11,30 +11,31 @@ from Crypto.Util.Padding import pad
|
||||
from Crypto.Protocol.KDF import PBKDF2
|
||||
from Crypto.Hash import SHA1
|
||||
from os import path
|
||||
from typing import Tuple, Dict
|
||||
from typing import Tuple, Dict, List
|
||||
|
||||
from core import CoreConfig, Utils
|
||||
from titles.chuni.config import ChuniConfig
|
||||
from titles.chuni.const import ChuniConstants
|
||||
from titles.chuni.base import ChuniBase
|
||||
from titles.chuni.plus import ChuniPlus
|
||||
from titles.chuni.air import ChuniAir
|
||||
from titles.chuni.airplus import ChuniAirPlus
|
||||
from titles.chuni.star import ChuniStar
|
||||
from titles.chuni.starplus import ChuniStarPlus
|
||||
from titles.chuni.amazon import ChuniAmazon
|
||||
from titles.chuni.amazonplus import ChuniAmazonPlus
|
||||
from titles.chuni.crystal import ChuniCrystal
|
||||
from titles.chuni.crystalplus import ChuniCrystalPlus
|
||||
from titles.chuni.paradise import ChuniParadise
|
||||
from titles.chuni.new import ChuniNew
|
||||
from titles.chuni.newplus import ChuniNewPlus
|
||||
from titles.chuni.sun import ChuniSun
|
||||
from core.title import BaseServlet
|
||||
from .config import ChuniConfig
|
||||
from .const import ChuniConstants
|
||||
from .base import ChuniBase
|
||||
from .plus import ChuniPlus
|
||||
from .air import ChuniAir
|
||||
from .airplus import ChuniAirPlus
|
||||
from .star import ChuniStar
|
||||
from .starplus import ChuniStarPlus
|
||||
from .amazon import ChuniAmazon
|
||||
from .amazonplus import ChuniAmazonPlus
|
||||
from .crystal import ChuniCrystal
|
||||
from .crystalplus import ChuniCrystalPlus
|
||||
from .paradise import ChuniParadise
|
||||
from .new import ChuniNew
|
||||
from .newplus import ChuniNewPlus
|
||||
from .sun import ChuniSun
|
||||
|
||||
|
||||
class ChuniServlet:
|
||||
class ChuniServlet(BaseServlet):
|
||||
def __init__(self, core_cfg: CoreConfig, cfg_dir: str) -> None:
|
||||
self.core_cfg = core_cfg
|
||||
super().__init__(core_cfg, cfg_dir)
|
||||
self.game_cfg = ChuniConfig()
|
||||
self.hash_table: Dict[Dict[str, str]] = {}
|
||||
if path.exists(f"{cfg_dir}/{ChuniConstants.CONFIG_NAME}"):
|
||||
@ -115,10 +116,19 @@ class ChuniServlet:
|
||||
f"Hashed v{version} method {method_fixed} with {bytes.fromhex(keys[2])} to get {hash.hex()}"
|
||||
)
|
||||
|
||||
def get_endpoint_matchers(self) -> Tuple[List[Tuple[str, str, Dict]], List[Tuple[str, str, Dict]]]:
|
||||
return (
|
||||
[],
|
||||
[
|
||||
("render_POST", "/{version}/ChuniServlet/{endpoint}", {}),
|
||||
("render_POST", "/{version}/ChuniServlet/MatchingServer/{endpoint}", {})
|
||||
]
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_allnet_info(
|
||||
def is_game_enabled(
|
||||
cls, game_code: str, core_cfg: CoreConfig, cfg_dir: str
|
||||
) -> Tuple[bool, str, str]:
|
||||
) -> bool:
|
||||
game_cfg = ChuniConfig()
|
||||
if path.exists(f"{cfg_dir}/{ChuniConstants.CONFIG_NAME}"):
|
||||
game_cfg.update(
|
||||
@ -126,26 +136,26 @@ class ChuniServlet:
|
||||
)
|
||||
|
||||
if not game_cfg.server.enable:
|
||||
return (False, "", "")
|
||||
return False
|
||||
|
||||
if core_cfg.server.is_develop:
|
||||
return (
|
||||
True,
|
||||
f"http://{core_cfg.title.hostname}:{core_cfg.title.port}/{game_code}/$v/",
|
||||
"",
|
||||
)
|
||||
return True
|
||||
|
||||
def get_allnet_info(self, game_code: str, game_ver: int, keychip: str) -> Tuple[str, str]:
|
||||
if not self.core_cfg.server.is_using_proxy and Utils.get_title_port(self.core_cfg) != 80:
|
||||
return (f"http://{self.core_cfg.title.hostname}:{Utils.get_title_port(self.core_cfg)}/{game_ver}/", "")
|
||||
|
||||
return (True, f"http://{core_cfg.title.hostname}/{game_code}/$v/", "")
|
||||
return (f"http://{self.core_cfg.title.hostname}/{game_ver}/", "")
|
||||
|
||||
def render_POST(self, request: Request, version: int, url_path: str) -> bytes:
|
||||
if url_path.lower() == "ping":
|
||||
def render_POST(self, request: Request, game_code: str, matchers: Dict) -> bytes:
|
||||
endpoint = matchers['endpoint']
|
||||
version = int(matchers['version'])
|
||||
|
||||
if endpoint.lower() == "ping":
|
||||
return zlib.compress(b'{"returnCode": "1"}')
|
||||
|
||||
req_raw = request.content.getvalue()
|
||||
url_split = url_path.split("/")
|
||||
encrtped = False
|
||||
internal_ver = 0
|
||||
endpoint = url_split[len(url_split) - 1]
|
||||
client_ip = Utils.get_ip_addr(request)
|
||||
|
||||
if version < 105: # 1.0
|
||||
|
@ -71,11 +71,11 @@ class ChuniNew(ChuniBase):
|
||||
"matchErrorLimit": 9999,
|
||||
"romVersion": self.game_cfg.version.version(self.version)["rom"],
|
||||
"dataVersion": self.game_cfg.version.version(self.version)["data"],
|
||||
"matchingUri": f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}/SDHD/200/ChuniServlet/",
|
||||
"matchingUriX": f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}/SDHD/200/ChuniServlet/",
|
||||
"matchingUri": f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}/200/ChuniServlet/",
|
||||
"matchingUriX": f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}/200/ChuniServlet/",
|
||||
# might be really important for online battle to connect the cabs via UDP port 50201
|
||||
"udpHolePunchUri": f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}/SDHD/200/ChuniServlet/",
|
||||
"reflectorUri": f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}/SDHD/200/ChuniServlet/",
|
||||
"udpHolePunchUri": f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}/200/ChuniServlet/",
|
||||
"reflectorUri": f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}/200/ChuniServlet/",
|
||||
},
|
||||
"isDumpUpload": False,
|
||||
"isAou": False,
|
||||
|
@ -21,16 +21,16 @@ class ChuniNewPlus(ChuniNew):
|
||||
]
|
||||
ret["gameSetting"][
|
||||
"matchingUri"
|
||||
] = f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}/SDHD/205/ChuniServlet/"
|
||||
] = f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}/205/ChuniServlet/"
|
||||
ret["gameSetting"][
|
||||
"matchingUriX"
|
||||
] = f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}/SDHD/205/ChuniServlet/"
|
||||
] = f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}/205/ChuniServlet/"
|
||||
ret["gameSetting"][
|
||||
"udpHolePunchUri"
|
||||
] = f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}/SDHD/205/ChuniServlet/"
|
||||
] = f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}/205/ChuniServlet/"
|
||||
ret["gameSetting"][
|
||||
"reflectorUri"
|
||||
] = f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}/SDHD/205/ChuniServlet/"
|
||||
] = f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}/205/ChuniServlet/"
|
||||
return ret
|
||||
|
||||
def handle_cm_get_user_preview_api_request(self, data: Dict) -> Dict:
|
||||
|
@ -17,16 +17,16 @@ class ChuniSun(ChuniNewPlus):
|
||||
ret["gameSetting"]["dataVersion"] = self.game_cfg.version.version(self.version)["data"]
|
||||
ret["gameSetting"][
|
||||
"matchingUri"
|
||||
] = f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}/SDHD/210/ChuniServlet/"
|
||||
] = f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}/210/ChuniServlet/"
|
||||
ret["gameSetting"][
|
||||
"matchingUriX"
|
||||
] = f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}/SDHD/210/ChuniServlet/"
|
||||
] = f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}/210/ChuniServlet/"
|
||||
ret["gameSetting"][
|
||||
"udpHolePunchUri"
|
||||
] = f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}/SDHD/210/ChuniServlet/"
|
||||
] = f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}/210/ChuniServlet/"
|
||||
ret["gameSetting"][
|
||||
"reflectorUri"
|
||||
] = f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}/SDHD/210/ChuniServlet/"
|
||||
] = f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}/210/ChuniServlet/"
|
||||
return ret
|
||||
|
||||
def handle_cm_get_user_preview_api_request(self, data: Dict) -> Dict:
|
||||
|
@ -6,6 +6,7 @@ from enum import Enum
|
||||
|
||||
import pytz
|
||||
from core.config import CoreConfig
|
||||
from core.utils import Utils
|
||||
from core.data.cache import cached
|
||||
from titles.cm.const import CardMakerConstants
|
||||
from titles.cm.config import CardMakerConfig
|
||||
@ -29,8 +30,8 @@ class CardMakerBase:
|
||||
return version.replace(".", "")[:3]
|
||||
|
||||
def handle_get_game_connect_api_request(self, data: Dict) -> Dict:
|
||||
if self.core_cfg.server.is_develop:
|
||||
uri = f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}"
|
||||
if not self.core_cfg.server.is_using_proxy and Utils.get_title_port(self.core_cfg) != 80:
|
||||
uri = f"http://{self.core_cfg.title.hostname}:{Utils.get_title_port(self.core_cfg)}"
|
||||
else:
|
||||
uri = f"http://{self.core_cfg.title.hostname}"
|
||||
|
||||
@ -44,13 +45,13 @@ class CardMakerBase:
|
||||
{
|
||||
"modelKind": 0,
|
||||
"type": 1,
|
||||
"titleUri": f"{uri}/SDHD/{self._parse_int_ver(games_ver['chuni'])}/",
|
||||
"titleUri": f"{uri}/{self._parse_int_ver(games_ver['chuni'])}/ChuniServlet/",
|
||||
},
|
||||
# maimai DX
|
||||
{
|
||||
"modelKind": 1,
|
||||
"type": 1,
|
||||
"titleUri": f"{uri}/SDEZ/{self._parse_int_ver(games_ver['maimai'])}/",
|
||||
"titleUri": f"{uri}/{self._parse_int_ver(games_ver['maimai'])}/Maimai2Servlet/",
|
||||
},
|
||||
# ONGEKI
|
||||
{
|
||||
|
@ -7,21 +7,22 @@ import coloredlogs
|
||||
import zlib
|
||||
|
||||
from os import path
|
||||
from typing import Tuple
|
||||
from typing import Tuple, List, Dict
|
||||
from twisted.web.http import Request
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
|
||||
from core.config import CoreConfig
|
||||
from core.utils import Utils
|
||||
from titles.cm.config import CardMakerConfig
|
||||
from titles.cm.const import CardMakerConstants
|
||||
from titles.cm.base import CardMakerBase
|
||||
from titles.cm.cm135 import CardMaker135
|
||||
from core.title import BaseServlet
|
||||
from .config import CardMakerConfig
|
||||
from .const import CardMakerConstants
|
||||
from .base import CardMakerBase
|
||||
from .cm135 import CardMaker135
|
||||
|
||||
|
||||
class CardMakerServlet:
|
||||
class CardMakerServlet(BaseServlet):
|
||||
def __init__(self, core_cfg: CoreConfig, cfg_dir: str) -> None:
|
||||
self.core_cfg = core_cfg
|
||||
super().__init__(core_cfg, cfg_dir)
|
||||
self.game_cfg = CardMakerConfig()
|
||||
if path.exists(f"{cfg_dir}/{CardMakerConstants.CONFIG_NAME}"):
|
||||
self.game_cfg.update(
|
||||
@ -55,11 +56,11 @@ class CardMakerServlet:
|
||||
coloredlogs.install(
|
||||
level=self.game_cfg.server.loglevel, logger=self.logger, fmt=log_fmt_str
|
||||
)
|
||||
|
||||
|
||||
@classmethod
|
||||
def get_allnet_info(
|
||||
def is_game_enabled(
|
||||
cls, game_code: str, core_cfg: CoreConfig, cfg_dir: str
|
||||
) -> Tuple[bool, str, str]:
|
||||
) -> bool:
|
||||
game_cfg = CardMakerConfig()
|
||||
if path.exists(f"{cfg_dir}/{CardMakerConstants.CONFIG_NAME}"):
|
||||
game_cfg.update(
|
||||
@ -67,22 +68,21 @@ class CardMakerServlet:
|
||||
)
|
||||
|
||||
if not game_cfg.server.enable:
|
||||
return (False, "", "")
|
||||
return False
|
||||
|
||||
if core_cfg.server.is_develop:
|
||||
return (
|
||||
True,
|
||||
f"http://{core_cfg.title.hostname}:{core_cfg.title.port}/{game_code}/$v/",
|
||||
"",
|
||||
)
|
||||
return True
|
||||
|
||||
def get_endpoint_matchers(self) -> Tuple[List[Tuple[str, str, Dict]], List[Tuple[str, str, Dict]]]:
|
||||
return (
|
||||
[],
|
||||
[("render_POST", "/SDED/{version}/{endpoint}", {})]
|
||||
)
|
||||
|
||||
return (True, f"http://{core_cfg.title.hostname}/{game_code}/$v/", "")
|
||||
|
||||
def render_POST(self, request: Request, version: int, url_path: str) -> bytes:
|
||||
def render_POST(self, request: Request, game_code: str, matchers: Dict) -> bytes:
|
||||
version = int(matchers['version'])
|
||||
endpoint = matchers['endpoint']
|
||||
req_raw = request.content.getvalue()
|
||||
url_split = url_path.split("/")
|
||||
internal_ver = 0
|
||||
endpoint = url_split[len(url_split) - 1]
|
||||
client_ip = Utils.get_ip_addr(request)
|
||||
|
||||
if version >= 130 and version < 135: # Card Maker
|
||||
|
@ -3,13 +3,12 @@ import json
|
||||
from decimal import Decimal
|
||||
from base64 import b64encode
|
||||
from typing import Any, Dict, List
|
||||
from hashlib import md5
|
||||
from datetime import datetime
|
||||
from os import path
|
||||
|
||||
from core.config import CoreConfig
|
||||
from titles.cxb.config import CxbConfig
|
||||
from titles.cxb.const import CxbConstants
|
||||
from titles.cxb.database import CxbData
|
||||
from .config import CxbConfig
|
||||
from .const import CxbConstants
|
||||
from .database import CxbData
|
||||
|
||||
from threading import Thread
|
||||
|
||||
@ -22,6 +21,13 @@ class CxbBase:
|
||||
self.logger = logging.getLogger("cxb")
|
||||
self.version = CxbConstants.VER_CROSSBEATS_REV
|
||||
|
||||
def _get_data_contents(self, folder: str, filetype: str, encoding: str = None, subfolder: str = "") -> List[str]:
|
||||
if path.exists(f"titles/cxb/data/{folder}/{subfolder}{filetype}.csv"):
|
||||
with open(f"titles/cxb/data/{folder}/{subfolder}{filetype}.csv", encoding=encoding) as f:
|
||||
return f.readlines()
|
||||
|
||||
return []
|
||||
|
||||
def handle_action_rpreq_request(self, data: Dict) -> Dict:
|
||||
return {}
|
||||
|
||||
@ -192,7 +198,7 @@ class CxbBase:
|
||||
).decode("utf-8")
|
||||
)
|
||||
|
||||
def task_generateIndexData(versionindex):
|
||||
def task_generateIndexData(self, versionindex: List[str], uid: int):
|
||||
try:
|
||||
v_profile = self.data.profile.get_profile_index(0, uid, self.version)
|
||||
v_profile_data = v_profile["data"]
|
||||
@ -274,7 +280,7 @@ class CxbBase:
|
||||
thread_ScoreData.start()
|
||||
|
||||
for v in index:
|
||||
thread_IndexData = Thread(target=CxbBase.task_generateIndexData(versionindex))
|
||||
thread_IndexData = Thread(target=CxbBase.task_generateIndexData(self, versionindex, uid))
|
||||
thread_IndexData.start()
|
||||
|
||||
return {"index": index, "data": data1, "version": versionindex}
|
||||
|
@ -1,4 +1,5 @@
|
||||
from twisted.web.http import Request
|
||||
import traceback
|
||||
from twisted.web import resource, server
|
||||
from twisted.internet import reactor, endpoints
|
||||
import yaml
|
||||
@ -7,18 +8,20 @@ import re
|
||||
import inflection
|
||||
import logging, coloredlogs
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
from typing import Dict, Tuple
|
||||
from typing import Dict, Tuple, List
|
||||
from os import path
|
||||
|
||||
from core.config import CoreConfig
|
||||
from titles.cxb.config import CxbConfig
|
||||
from titles.cxb.const import CxbConstants
|
||||
from titles.cxb.rev import CxbRev
|
||||
from titles.cxb.rss1 import CxbRevSunriseS1
|
||||
from titles.cxb.rss2 import CxbRevSunriseS2
|
||||
from core.title import BaseServlet
|
||||
from core.utils import Utils
|
||||
from .config import CxbConfig
|
||||
from .const import CxbConstants
|
||||
from .rev import CxbRev
|
||||
from .rss1 import CxbRevSunriseS1
|
||||
from .rss2 import CxbRevSunriseS2
|
||||
|
||||
|
||||
class CxbServlet(resource.Resource):
|
||||
class CxbServlet(BaseServlet):
|
||||
def __init__(self, core_cfg: CoreConfig, cfg_dir: str) -> None:
|
||||
self.isLeaf = True
|
||||
self.cfg_dir = cfg_dir
|
||||
@ -61,9 +64,7 @@ class CxbServlet(resource.Resource):
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def get_allnet_info(
|
||||
cls, game_code: str, core_cfg: CoreConfig, cfg_dir: str
|
||||
) -> Tuple[bool, str, str]:
|
||||
def is_game_enabled(cls, game_code: str, core_cfg: CoreConfig, cfg_dir: str) -> bool:
|
||||
game_cfg = CxbConfig()
|
||||
if path.exists(f"{cfg_dir}/{CxbConstants.CONFIG_NAME}"):
|
||||
game_cfg.update(
|
||||
@ -71,51 +72,36 @@ class CxbServlet(resource.Resource):
|
||||
)
|
||||
|
||||
if not game_cfg.server.enable:
|
||||
return (False, "", "")
|
||||
|
||||
if core_cfg.server.is_develop:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def get_allnet_info(self, game_code: str, game_ver: int, keychip: str) -> Tuple[str, str]:
|
||||
if not self.core_cfg.server.is_using_proxy and Utils.get_title_port_ssl(self.core_cfg) != 443:
|
||||
return (
|
||||
True,
|
||||
f"http://{core_cfg.title.hostname}:{core_cfg.title.port}/{game_code}/$v/",
|
||||
f"https://{self.core_cfg.title.hostname}:{self.core_cfg.title.port_ssl}",
|
||||
"",
|
||||
)
|
||||
|
||||
return (True, f"http://{core_cfg.title.hostname}/{game_code}/$v/", "")
|
||||
return (f"https://{self.core_cfg.title.hostname}", "")
|
||||
|
||||
def get_endpoint_matchers(self) -> Tuple[List[Tuple[str, str, Dict]], List[Tuple[str, str, Dict]]]:
|
||||
return (
|
||||
[],
|
||||
[
|
||||
("handle_data", "/data", {}),
|
||||
("handle_action", "/action", {}),
|
||||
("handle_action", "/v2/action", {}),
|
||||
("handle_auth", "/auth", {}),
|
||||
]
|
||||
)
|
||||
|
||||
def setup(self):
|
||||
if self.game_cfg.server.enable:
|
||||
endpoints.serverFromString(
|
||||
reactor,
|
||||
f"tcp:{self.game_cfg.server.port}:interface={self.core_cfg.server.listen_address}",
|
||||
).listen(server.Site(CxbServlet(self.core_cfg, self.cfg_dir)))
|
||||
|
||||
if self.core_cfg.server.is_develop and self.game_cfg.server.ssl_enable:
|
||||
endpoints.serverFromString(
|
||||
reactor,
|
||||
f"ssl:{self.game_cfg.server.port_secure}"
|
||||
f":interface={self.core_cfg.server.listen_address}:privateKey={self.game_cfg.server.ssl_key}:"
|
||||
f"certKey={self.game_cfg.server.ssl_cert}",
|
||||
).listen(server.Site(CxbServlet(self.core_cfg, self.cfg_dir)))
|
||||
|
||||
self.logger.info(
|
||||
f"Ready on ports {self.game_cfg.server.port} & {self.game_cfg.server.port_secure}"
|
||||
)
|
||||
else:
|
||||
self.logger.info(f"Ready on port {self.game_cfg.server.port}")
|
||||
|
||||
def render_POST(self, request: Request):
|
||||
version = 0
|
||||
internal_ver = 0
|
||||
func_to_find = ""
|
||||
cmd = ""
|
||||
subcmd = ""
|
||||
req_url = request.uri.decode()
|
||||
url_split = req_url.split("/")
|
||||
def preprocess(self, req: Request) -> Dict:
|
||||
try:
|
||||
req_bytes = request.content.getvalue()
|
||||
req_bytes = req.content.getvalue()
|
||||
except:
|
||||
req_bytes = request.content.read().decode("utf-8")
|
||||
|
||||
req_bytes = req.content.read() # Can we just use this one?
|
||||
|
||||
try:
|
||||
req_json: Dict = json.loads(req_bytes)
|
||||
|
||||
@ -127,42 +113,49 @@ class CxbServlet(resource.Resource):
|
||||
|
||||
except Exception as f:
|
||||
self.logger.warning(
|
||||
f"Error decoding json: {e} / {f} - {req_url} - {req_bytes}"
|
||||
f"Error decoding json to /data endpoint: {e} / {f} - {req_bytes}"
|
||||
)
|
||||
return b""
|
||||
|
||||
return req_json
|
||||
|
||||
def handle_data(self, request: Request, game_code: str, matchers: Dict) -> bytes:
|
||||
req_json = self.preprocess(request)
|
||||
func_to_find = "handle_data_"
|
||||
version_string = "Base"
|
||||
internal_ver = 0
|
||||
|
||||
if req_json == {}:
|
||||
self.logger.warning(f"Empty json request to {req_url}")
|
||||
self.logger.warning(f"Empty json request to /data")
|
||||
return b""
|
||||
|
||||
if (
|
||||
not type(req_json["dldate"]) is dict
|
||||
or "filetype" not in req_json["dldate"]
|
||||
):
|
||||
self.logger.warning(f"Malformed dldate request: {req_json}")
|
||||
return b""
|
||||
|
||||
cmd = url_split[len(url_split) - 1]
|
||||
subcmd = list(req_json.keys())[0]
|
||||
filetype = req_json["dldate"]["filetype"]
|
||||
filetype_split = filetype.split("/")
|
||||
|
||||
if subcmd == "dldate":
|
||||
if (
|
||||
not type(req_json["dldate"]) is dict
|
||||
or "filetype" not in req_json["dldate"]
|
||||
):
|
||||
self.logger.warning(f"Malformed dldate request: {req_url} {req_json}")
|
||||
return b""
|
||||
if len(filetype_split) < 2 or not filetype_split[0].isnumeric():
|
||||
self.logger.warning(f"Malformed dldate request: {req_json}")
|
||||
return b""
|
||||
|
||||
filetype = req_json["dldate"]["filetype"]
|
||||
filetype_split = filetype.split("/")
|
||||
version = int(filetype_split[0])
|
||||
filetype_inflect_split = inflection.underscore(filetype).split("/")
|
||||
version = int(filetype_split[0])
|
||||
filename = filetype_split[len(filetype_split) - 1]
|
||||
|
||||
match = re.match(
|
||||
"^([A-Za-z]*)(\d\d\d\d)$", filetype_split[len(filetype_split) - 1]
|
||||
)
|
||||
if match:
|
||||
subcmd = f"{inflection.underscore(match.group(1))}xxxx"
|
||||
else:
|
||||
subcmd = f"{filetype_inflect_split[len(filetype_inflect_split) - 1]}"
|
||||
match = re.match(
|
||||
"^([A-Za-z]*)(\d\d\d\d)$", filetype_split[len(filetype_split) - 1]
|
||||
)
|
||||
if match:
|
||||
func_to_find += f"{inflection.underscore(match.group(1))}xxxx"
|
||||
else:
|
||||
filetype = subcmd
|
||||
|
||||
func_to_find = f"handle_{cmd}_{subcmd}_request"
|
||||
|
||||
func_to_find += f"{inflection.underscore(filename)}"
|
||||
|
||||
func_to_find += "_request"
|
||||
|
||||
if version <= 10102:
|
||||
version_string = "Rev"
|
||||
internal_ver = CxbConstants.VER_CROSSBEATS_REV
|
||||
@ -175,23 +168,81 @@ class CxbServlet(resource.Resource):
|
||||
version_string = "Rev SunriseS2"
|
||||
internal_ver = CxbConstants.VER_CROSSBEATS_REV_SUNRISE_S2
|
||||
|
||||
else:
|
||||
version_string = "Base"
|
||||
|
||||
self.logger.info(f"{version_string} Request {req_url} -> {filetype}")
|
||||
if not hasattr(self.versions[internal_ver], func_to_find):
|
||||
self.logger.warn(f"{version_string} has no handler for filetype {filetype}")
|
||||
return ""
|
||||
|
||||
self.logger.info(f"{version_string} request for filetype {filetype}")
|
||||
self.logger.debug(req_json)
|
||||
|
||||
handler = getattr(self.versions[internal_ver], func_to_find)
|
||||
|
||||
try:
|
||||
handler = getattr(self.versions[internal_ver], func_to_find)
|
||||
resp = handler(req_json)
|
||||
|
||||
except AttributeError as e:
|
||||
self.logger.warning(f"Unhandled {version_string} request {req_url} - {e}")
|
||||
resp = {}
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error handling {version_string} method {req_url} - {e}")
|
||||
raise
|
||||
|
||||
self.logger.error(f"Error handling request for file {filetype} - {e}")
|
||||
if self.logger.level == logging.DEBUG:
|
||||
traceback.print_exception(e, limit=1)
|
||||
with open("{0}/{1}.log".format(self.core_cfg.server.log_dir, "cxb"), "a") as f:
|
||||
traceback.print_exception(e, limit=1, file=f)
|
||||
return ""
|
||||
|
||||
self.logger.debug(f"{version_string} Response {resp}")
|
||||
return json.dumps(resp, ensure_ascii=False).encode("utf-8")
|
||||
|
||||
def handle_action(self, request: Request, game_code: str, matchers: Dict) -> bytes:
|
||||
req_json = self.preprocess(request)
|
||||
subcmd = list(req_json.keys())[0]
|
||||
func_to_find = f"handle_action_{subcmd}_request"
|
||||
|
||||
if not hasattr(self.versions[0], func_to_find):
|
||||
self.logger.warn(f"No handler for action {subcmd} request")
|
||||
return ""
|
||||
|
||||
self.logger.info(f"Action {subcmd} Request")
|
||||
self.logger.debug(req_json)
|
||||
|
||||
handler = getattr(self.versions[0], func_to_find)
|
||||
|
||||
try:
|
||||
resp = handler(req_json)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error handling action {subcmd} request - {e}")
|
||||
if self.logger.level == logging.DEBUG:
|
||||
traceback.print_exception(e, limit=1)
|
||||
with open("{0}/{1}.log".format(self.core_cfg.server.log_dir, "cxb"), "a") as f:
|
||||
traceback.print_exception(e, limit=1, file=f)
|
||||
return ""
|
||||
|
||||
self.logger.debug(f"Response {resp}")
|
||||
return json.dumps(resp, ensure_ascii=False).encode("utf-8")
|
||||
|
||||
def handle_auth(self, request: Request, game_code: str, matchers: Dict) -> bytes:
|
||||
req_json = self.preprocess(request)
|
||||
subcmd = list(req_json.keys())[0]
|
||||
func_to_find = f"handle_auth_{subcmd}_request"
|
||||
|
||||
if not hasattr(self.versions[0], func_to_find):
|
||||
self.logger.warn(f"No handler for auth {subcmd} request")
|
||||
return ""
|
||||
|
||||
self.logger.info(f"Action {subcmd} Request")
|
||||
self.logger.debug(req_json)
|
||||
|
||||
handler = getattr(self.versions[0], func_to_find)
|
||||
|
||||
try:
|
||||
resp = handler(req_json)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error handling auth {subcmd} request - {e}")
|
||||
if self.logger.level == logging.DEBUG:
|
||||
traceback.print_exception(e, limit=1)
|
||||
with open("{0}/{1}.log".format(self.core_cfg.server.log_dir, "cxb"), "a") as f:
|
||||
traceback.print_exception(e, limit=1, file=f)
|
||||
return ""
|
||||
|
||||
self.logger.debug(f"Response {resp}")
|
||||
return json.dumps(resp, ensure_ascii=False).encode("utf-8")
|
||||
|
@ -7,9 +7,9 @@ from datetime import datetime
|
||||
|
||||
from core.config import CoreConfig
|
||||
from core.data import Data, cached
|
||||
from titles.cxb.config import CxbConfig
|
||||
from titles.cxb.base import CxbBase
|
||||
from titles.cxb.const import CxbConstants
|
||||
from .config import CxbConfig
|
||||
from .base import CxbBase
|
||||
from .const import CxbConstants
|
||||
|
||||
|
||||
class CxbRev(CxbBase):
|
||||
@ -47,7 +47,7 @@ class CxbRev(CxbBase):
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_music_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(r"titles/cxb/rev_data/MusicArchiveList.csv") as music:
|
||||
with open(r"titles/cxb/data/rss/MusicArchiveList.csv") as music:
|
||||
lines = music.readlines()
|
||||
for line in lines:
|
||||
line_split = line.split(",")
|
||||
@ -59,7 +59,7 @@ class CxbRev(CxbBase):
|
||||
def handle_data_item_list_icon_request(self, data: Dict) -> Dict:
|
||||
ret_str = "\r\n#ItemListIcon\r\n"
|
||||
with open(
|
||||
r"titles/cxb/rev_data/Item/ItemArchiveList_Icon.csv", encoding="utf-8"
|
||||
r"titles/cxb/data/rss/Item/ItemArchiveList_Icon.csv", encoding="utf-8"
|
||||
) as item:
|
||||
lines = item.readlines()
|
||||
for line in lines:
|
||||
@ -70,7 +70,7 @@ class CxbRev(CxbBase):
|
||||
def handle_data_item_list_skin_notes_request(self, data: Dict) -> Dict:
|
||||
ret_str = "\r\n#ItemListSkinNotes\r\n"
|
||||
with open(
|
||||
r"titles/cxb/rev_data/Item/ItemArchiveList_SkinNotes.csv", encoding="utf-8"
|
||||
r"titles/cxb/data/rss/Item/ItemArchiveList_SkinNotes.csv", encoding="utf-8"
|
||||
) as item:
|
||||
lines = item.readlines()
|
||||
for line in lines:
|
||||
@ -81,7 +81,7 @@ class CxbRev(CxbBase):
|
||||
def handle_data_item_list_skin_effect_request(self, data: Dict) -> Dict:
|
||||
ret_str = "\r\n#ItemListSkinEffect\r\n"
|
||||
with open(
|
||||
r"titles/cxb/rev_data/Item/ItemArchiveList_SkinEffect.csv", encoding="utf-8"
|
||||
r"titles/cxb/data/rss/Item/ItemArchiveList_SkinEffect.csv", encoding="utf-8"
|
||||
) as item:
|
||||
lines = item.readlines()
|
||||
for line in lines:
|
||||
@ -92,7 +92,7 @@ class CxbRev(CxbBase):
|
||||
def handle_data_item_list_skin_bg_request(self, data: Dict) -> Dict:
|
||||
ret_str = "\r\n#ItemListSkinBg\r\n"
|
||||
with open(
|
||||
r"titles/cxb/rev_data/Item/ItemArchiveList_SkinBg.csv", encoding="utf-8"
|
||||
r"titles/cxb/data/rss/Item/ItemArchiveList_SkinBg.csv", encoding="utf-8"
|
||||
) as item:
|
||||
lines = item.readlines()
|
||||
for line in lines:
|
||||
@ -103,7 +103,7 @@ class CxbRev(CxbBase):
|
||||
def handle_data_item_list_title_request(self, data: Dict) -> Dict:
|
||||
ret_str = "\r\n#ItemListTitle\r\n"
|
||||
with open(
|
||||
r"titles/cxb/rev_data/Item/ItemList_Title.csv", encoding="shift-jis"
|
||||
r"titles/cxb/data/rss/Item/ItemList_Title.csv", encoding="shift-jis"
|
||||
) as item:
|
||||
lines = item.readlines()
|
||||
for line in lines:
|
||||
@ -114,7 +114,7 @@ class CxbRev(CxbBase):
|
||||
def handle_data_shop_list_music_request(self, data: Dict) -> Dict:
|
||||
ret_str = "\r\n#ShopListMusic\r\n"
|
||||
with open(
|
||||
r"titles/cxb/rev_data/Shop/ShopList_Music.csv", encoding="shift-jis"
|
||||
r"titles/cxb/data/rss/Shop/ShopList_Music.csv", encoding="shift-jis"
|
||||
) as shop:
|
||||
lines = shop.readlines()
|
||||
for line in lines:
|
||||
@ -125,7 +125,7 @@ class CxbRev(CxbBase):
|
||||
def handle_data_shop_list_icon_request(self, data: Dict) -> Dict:
|
||||
ret_str = "\r\n#ShopListIcon\r\n"
|
||||
with open(
|
||||
r"titles/cxb/rev_data/Shop/ShopList_Icon.csv", encoding="shift-jis"
|
||||
r"titles/cxb/data/rss/Shop/ShopList_Icon.csv", encoding="shift-jis"
|
||||
) as shop:
|
||||
lines = shop.readlines()
|
||||
for line in lines:
|
||||
@ -136,7 +136,7 @@ class CxbRev(CxbBase):
|
||||
def handle_data_shop_list_title_request(self, data: Dict) -> Dict:
|
||||
ret_str = "\r\n#ShopListTitle\r\n"
|
||||
with open(
|
||||
r"titles/cxb/rev_data/Shop/ShopList_Title.csv", encoding="shift-jis"
|
||||
r"titles/cxb/data/rss/Shop/ShopList_Title.csv", encoding="shift-jis"
|
||||
) as shop:
|
||||
lines = shop.readlines()
|
||||
for line in lines:
|
||||
@ -156,7 +156,7 @@ class CxbRev(CxbBase):
|
||||
def handle_data_shop_list_sale_request(self, data: Dict) -> Dict:
|
||||
ret_str = "\r\n#ShopListSale\r\n"
|
||||
with open(
|
||||
r"titles/cxb/rev_data/Shop/ShopList_Sale.csv", encoding="shift-jis"
|
||||
r"titles/cxb/data/rss/Shop/ShopList_Sale.csv", encoding="shift-jis"
|
||||
) as shop:
|
||||
lines = shop.readlines()
|
||||
for line in lines:
|
||||
@ -171,7 +171,7 @@ class CxbRev(CxbBase):
|
||||
extra_num = int(data["dldate"]["filetype"][-4:])
|
||||
ret_str = ""
|
||||
with open(
|
||||
rf"titles/cxb/rev_data/Ex000{extra_num}.csv", encoding="shift-jis"
|
||||
rf"titles/cxb/data/rss/Ex000{extra_num}.csv", encoding="shift-jis"
|
||||
) as stage:
|
||||
lines = stage.readlines()
|
||||
for line in lines:
|
||||
@ -187,7 +187,7 @@ class CxbRev(CxbBase):
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_news_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(r"titles/cxb/rev_data/NewsList.csv", encoding="UTF-8") as news:
|
||||
with open(r"titles/cxb/data/rss/NewsList.csv", encoding="UTF-8") as news:
|
||||
lines = news.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
@ -199,7 +199,7 @@ class CxbRev(CxbBase):
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_license_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(r"titles/cxb/rev_data/License_Offline.csv", encoding="UTF-8") as lic:
|
||||
with open(r"titles/cxb/data/rss/License_Offline.csv", encoding="UTF-8") as lic:
|
||||
lines = lic.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
@ -209,7 +209,7 @@ class CxbRev(CxbBase):
|
||||
def handle_data_course_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(
|
||||
r"titles/cxb/rev_data/Course/CourseList.csv", encoding="UTF-8"
|
||||
r"titles/cxb/data/rss/Course/CourseList.csv", encoding="UTF-8"
|
||||
) as course:
|
||||
lines = course.readlines()
|
||||
for line in lines:
|
||||
@ -222,7 +222,7 @@ class CxbRev(CxbBase):
|
||||
extra_num = int(data["dldate"]["filetype"][-4:])
|
||||
ret_str = ""
|
||||
with open(
|
||||
rf"titles/cxb/rev_data/Course/Cs000{extra_num}.csv", encoding="shift-jis"
|
||||
rf"titles/cxb/data/rss/Course/Cs000{extra_num}.csv", encoding="shift-jis"
|
||||
) as course:
|
||||
lines = course.readlines()
|
||||
for line in lines:
|
||||
@ -233,7 +233,7 @@ class CxbRev(CxbBase):
|
||||
def handle_data_mission_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(
|
||||
r"titles/cxb/rev_data/MissionList.csv", encoding="shift-jis"
|
||||
r"titles/cxb/data/rss/MissionList.csv", encoding="shift-jis"
|
||||
) as mission:
|
||||
lines = mission.readlines()
|
||||
for line in lines:
|
||||
@ -250,7 +250,7 @@ class CxbRev(CxbBase):
|
||||
def handle_data_event_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(
|
||||
r"titles/cxb/rev_data/Event/EventArchiveList.csv", encoding="shift-jis"
|
||||
r"titles/cxb/data/rss/Event/EventArchiveList.csv", encoding="shift-jis"
|
||||
) as mission:
|
||||
lines = mission.readlines()
|
||||
for line in lines:
|
||||
@ -292,7 +292,7 @@ class CxbRev(CxbBase):
|
||||
def handle_data_event_stamp_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(
|
||||
r"titles/cxb/rev_data/Event/EventStampList.csv", encoding="shift-jis"
|
||||
r"titles/cxb/data/rss/Event/EventStampList.csv", encoding="shift-jis"
|
||||
) as event:
|
||||
lines = event.readlines()
|
||||
for line in lines:
|
||||
|
@ -7,9 +7,9 @@ from datetime import datetime
|
||||
|
||||
from core.config import CoreConfig
|
||||
from core.data import Data, cached
|
||||
from titles.cxb.config import CxbConfig
|
||||
from titles.cxb.base import CxbBase
|
||||
from titles.cxb.const import CxbConstants
|
||||
from .config import CxbConfig
|
||||
from .base import CxbBase
|
||||
from .const import CxbConstants
|
||||
|
||||
|
||||
class CxbRevSunriseS1(CxbBase):
|
||||
@ -23,7 +23,7 @@ class CxbRevSunriseS1(CxbBase):
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_music_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(r"titles/cxb/rss1_data/MusicArchiveList.csv") as music:
|
||||
with open(r"titles/cxb/data/rss1/MusicArchiveList.csv") as music:
|
||||
lines = music.readlines()
|
||||
for line in lines:
|
||||
line_split = line.split(",")
|
||||
@ -36,7 +36,7 @@ class CxbRevSunriseS1(CxbBase):
|
||||
# ItemListIcon load
|
||||
ret_str = "#ItemListIcon\r\n"
|
||||
with open(
|
||||
r"titles/cxb/rss1_data/Item/ItemList_Icon.csv", encoding="shift-jis"
|
||||
r"titles/cxb/data/rss1/Item/ItemList_Icon.csv", encoding="shift-jis"
|
||||
) as item:
|
||||
lines = item.readlines()
|
||||
for line in lines:
|
||||
@ -45,7 +45,7 @@ class CxbRevSunriseS1(CxbBase):
|
||||
# ItemListTitle load
|
||||
ret_str += "\r\n#ItemListTitle\r\n"
|
||||
with open(
|
||||
r"titles/cxb/rss1_data/Item/ItemList_Title.csv", encoding="shift-jis"
|
||||
r"titles/cxb/data/rss1/Item/ItemList_Title.csv", encoding="shift-jis"
|
||||
) as item:
|
||||
lines = item.readlines()
|
||||
for line in lines:
|
||||
@ -58,7 +58,7 @@ class CxbRevSunriseS1(CxbBase):
|
||||
# ShopListIcon load
|
||||
ret_str = "#ShopListIcon\r\n"
|
||||
with open(
|
||||
r"titles/cxb/rss1_data/Shop/ShopList_Icon.csv", encoding="utf-8"
|
||||
r"titles/cxb/data/rss1/Shop/ShopList_Icon.csv", encoding="utf-8"
|
||||
) as shop:
|
||||
lines = shop.readlines()
|
||||
for line in lines:
|
||||
@ -67,7 +67,7 @@ class CxbRevSunriseS1(CxbBase):
|
||||
# ShopListMusic load
|
||||
ret_str += "\r\n#ShopListMusic\r\n"
|
||||
with open(
|
||||
r"titles/cxb/rss1_data/Shop/ShopList_Music.csv", encoding="utf-8"
|
||||
r"titles/cxb/data/rss1/Shop/ShopList_Music.csv", encoding="utf-8"
|
||||
) as shop:
|
||||
lines = shop.readlines()
|
||||
for line in lines:
|
||||
@ -76,7 +76,7 @@ class CxbRevSunriseS1(CxbBase):
|
||||
# ShopListSale load
|
||||
ret_str += "\r\n#ShopListSale\r\n"
|
||||
with open(
|
||||
r"titles/cxb/rss1_data/Shop/ShopList_Sale.csv", encoding="shift-jis"
|
||||
r"titles/cxb/data/rss1/Shop/ShopList_Sale.csv", encoding="shift-jis"
|
||||
) as shop:
|
||||
lines = shop.readlines()
|
||||
for line in lines:
|
||||
@ -85,7 +85,7 @@ class CxbRevSunriseS1(CxbBase):
|
||||
# ShopListSkinBg load
|
||||
ret_str += "\r\n#ShopListSkinBg\r\n"
|
||||
with open(
|
||||
r"titles/cxb/rss1_data/Shop/ShopList_SkinBg.csv", encoding="shift-jis"
|
||||
r"titles/cxb/data/rss1/Shop/ShopList_SkinBg.csv", encoding="shift-jis"
|
||||
) as shop:
|
||||
lines = shop.readlines()
|
||||
for line in lines:
|
||||
@ -94,7 +94,7 @@ class CxbRevSunriseS1(CxbBase):
|
||||
# ShopListSkinEffect load
|
||||
ret_str += "\r\n#ShopListSkinEffect\r\n"
|
||||
with open(
|
||||
r"titles/cxb/rss1_data/Shop/ShopList_SkinEffect.csv", encoding="shift-jis"
|
||||
r"titles/cxb/data/rss1/Shop/ShopList_SkinEffect.csv", encoding="shift-jis"
|
||||
) as shop:
|
||||
lines = shop.readlines()
|
||||
for line in lines:
|
||||
@ -103,7 +103,7 @@ class CxbRevSunriseS1(CxbBase):
|
||||
# ShopListSkinNotes load
|
||||
ret_str += "\r\n#ShopListSkinNotes\r\n"
|
||||
with open(
|
||||
r"titles/cxb/rss1_data/Shop/ShopList_SkinNotes.csv", encoding="shift-jis"
|
||||
r"titles/cxb/data/rss1/Shop/ShopList_SkinNotes.csv", encoding="shift-jis"
|
||||
) as shop:
|
||||
lines = shop.readlines()
|
||||
for line in lines:
|
||||
@ -112,7 +112,7 @@ class CxbRevSunriseS1(CxbBase):
|
||||
# ShopListTitle load
|
||||
ret_str += "\r\n#ShopListTitle\r\n"
|
||||
with open(
|
||||
r"titles/cxb/rss1_data/Shop/ShopList_Title.csv", encoding="utf-8"
|
||||
r"titles/cxb/data/rss1/Shop/ShopList_Title.csv", encoding="utf-8"
|
||||
) as shop:
|
||||
lines = shop.readlines()
|
||||
for line in lines:
|
||||
@ -140,7 +140,7 @@ class CxbRevSunriseS1(CxbBase):
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_news_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(r"titles/cxb/rss1_data/NewsList.csv", encoding="UTF-8") as news:
|
||||
with open(r"titles/cxb/data/rss1/NewsList.csv", encoding="UTF-8") as news:
|
||||
lines = news.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
@ -155,7 +155,7 @@ class CxbRevSunriseS1(CxbBase):
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_random_music_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(r"titles/cxb/rss1_data/MusicArchiveList.csv") as music:
|
||||
with open(r"titles/cxb/data/rss1/MusicArchiveList.csv") as music:
|
||||
lines = music.readlines()
|
||||
count = 0
|
||||
for line in lines:
|
||||
@ -169,7 +169,7 @@ class CxbRevSunriseS1(CxbBase):
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_license_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(r"titles/cxb/rss1_data/License.csv", encoding="UTF-8") as licenses:
|
||||
with open(r"titles/cxb/data/rss1/License.csv", encoding="UTF-8") as licenses:
|
||||
lines = licenses.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
@ -179,7 +179,7 @@ class CxbRevSunriseS1(CxbBase):
|
||||
def handle_data_course_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(
|
||||
r"titles/cxb/rss1_data/Course/CourseList.csv", encoding="UTF-8"
|
||||
r"titles/cxb/data/rss1/Course/CourseList.csv", encoding="UTF-8"
|
||||
) as course:
|
||||
lines = course.readlines()
|
||||
for line in lines:
|
||||
@ -191,7 +191,7 @@ class CxbRevSunriseS1(CxbBase):
|
||||
extra_num = int(data["dldate"]["filetype"][-4:])
|
||||
ret_str = ""
|
||||
with open(
|
||||
rf"titles/cxb/rss1_data/Course/Cs{extra_num}.csv", encoding="shift-jis"
|
||||
rf"titles/cxb/data/rss1/Course/Cs{extra_num}.csv", encoding="shift-jis"
|
||||
) as course:
|
||||
lines = course.readlines()
|
||||
for line in lines:
|
||||
@ -229,7 +229,7 @@ class CxbRevSunriseS1(CxbBase):
|
||||
def handle_data_partnerxxxx_request(self, data: Dict) -> Dict:
|
||||
partner_num = int(data["dldate"]["filetype"][-4:])
|
||||
ret_str = f"{partner_num},,{partner_num},1,10000,\r\n"
|
||||
with open(r"titles/cxb/rss1_data/Partner0000.csv") as partner:
|
||||
with open(r"titles/cxb/data/rss1/Partner0000.csv") as partner:
|
||||
lines = partner.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
|
@ -7,9 +7,9 @@ from datetime import datetime
|
||||
|
||||
from core.config import CoreConfig
|
||||
from core.data import Data, cached
|
||||
from titles.cxb.config import CxbConfig
|
||||
from titles.cxb.base import CxbBase
|
||||
from titles.cxb.const import CxbConstants
|
||||
from .config import CxbConfig
|
||||
from .base import CxbBase
|
||||
from .const import CxbConstants
|
||||
|
||||
|
||||
class CxbRevSunriseS2(CxbBase):
|
||||
@ -23,7 +23,7 @@ class CxbRevSunriseS2(CxbBase):
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_music_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(r"titles/cxb/rss2_data/MusicArchiveList.csv") as music:
|
||||
with open(r"titles/cxb/data/rss2/MusicArchiveList.csv") as music:
|
||||
lines = music.readlines()
|
||||
for line in lines:
|
||||
line_split = line.split(",")
|
||||
@ -36,7 +36,7 @@ class CxbRevSunriseS2(CxbBase):
|
||||
# ItemListIcon load
|
||||
ret_str = "#ItemListIcon\r\n"
|
||||
with open(
|
||||
r"titles/cxb/rss2_data/Item/ItemList_Icon.csv", encoding="utf-8"
|
||||
r"titles/cxb/data/rss2/Item/ItemList_Icon.csv", encoding="utf-8"
|
||||
) as item:
|
||||
lines = item.readlines()
|
||||
for line in lines:
|
||||
@ -45,7 +45,7 @@ class CxbRevSunriseS2(CxbBase):
|
||||
# ItemListTitle load
|
||||
ret_str += "\r\n#ItemListTitle\r\n"
|
||||
with open(
|
||||
r"titles/cxb/rss2_data/Item/ItemList_Title.csv", encoding="utf-8"
|
||||
r"titles/cxb/data/rss2/Item/ItemList_Title.csv", encoding="utf-8"
|
||||
) as item:
|
||||
lines = item.readlines()
|
||||
for line in lines:
|
||||
@ -58,7 +58,7 @@ class CxbRevSunriseS2(CxbBase):
|
||||
# ShopListIcon load
|
||||
ret_str = "#ShopListIcon\r\n"
|
||||
with open(
|
||||
r"titles/cxb/rss2_data/Shop/ShopList_Icon.csv", encoding="utf-8"
|
||||
r"titles/cxb/data/rss2/Shop/ShopList_Icon.csv", encoding="utf-8"
|
||||
) as shop:
|
||||
lines = shop.readlines()
|
||||
for line in lines:
|
||||
@ -67,7 +67,7 @@ class CxbRevSunriseS2(CxbBase):
|
||||
# ShopListMusic load
|
||||
ret_str += "\r\n#ShopListMusic\r\n"
|
||||
with open(
|
||||
r"titles/cxb/rss2_data/Shop/ShopList_Music.csv", encoding="utf-8"
|
||||
r"titles/cxb/data/rss2/Shop/ShopList_Music.csv", encoding="utf-8"
|
||||
) as shop:
|
||||
lines = shop.readlines()
|
||||
for line in lines:
|
||||
@ -76,7 +76,7 @@ class CxbRevSunriseS2(CxbBase):
|
||||
# ShopListSale load
|
||||
ret_str += "\r\n#ShopListSale\r\n"
|
||||
with open(
|
||||
r"titles/cxb/rss2_data/Shop/ShopList_Sale.csv", encoding="shift-jis"
|
||||
r"titles/cxb/data/rss2/Shop/ShopList_Sale.csv", encoding="shift-jis"
|
||||
) as shop:
|
||||
lines = shop.readlines()
|
||||
for line in lines:
|
||||
@ -85,7 +85,7 @@ class CxbRevSunriseS2(CxbBase):
|
||||
# ShopListSkinBg load
|
||||
ret_str += "\r\n#ShopListSkinBg\r\n"
|
||||
with open(
|
||||
r"titles/cxb/rss2_data/Shop/ShopList_SkinBg.csv", encoding="shift-jis"
|
||||
r"titles/cxb/data/rss2/Shop/ShopList_SkinBg.csv", encoding="shift-jis"
|
||||
) as shop:
|
||||
lines = shop.readlines()
|
||||
for line in lines:
|
||||
@ -94,7 +94,7 @@ class CxbRevSunriseS2(CxbBase):
|
||||
# ShopListSkinEffect load
|
||||
ret_str += "\r\n#ShopListSkinEffect\r\n"
|
||||
with open(
|
||||
r"titles/cxb/rss2_data/Shop/ShopList_SkinEffect.csv", encoding="shift-jis"
|
||||
r"titles/cxb/data/rss2/Shop/ShopList_SkinEffect.csv", encoding="shift-jis"
|
||||
) as shop:
|
||||
lines = shop.readlines()
|
||||
for line in lines:
|
||||
@ -103,7 +103,7 @@ class CxbRevSunriseS2(CxbBase):
|
||||
# ShopListSkinNotes load
|
||||
ret_str += "\r\n#ShopListSkinNotes\r\n"
|
||||
with open(
|
||||
r"titles/cxb/rss2_data/Shop/ShopList_SkinNotes.csv", encoding="shift-jis"
|
||||
r"titles/cxb/data/rss2/Shop/ShopList_SkinNotes.csv", encoding="shift-jis"
|
||||
) as shop:
|
||||
lines = shop.readlines()
|
||||
for line in lines:
|
||||
@ -112,7 +112,7 @@ class CxbRevSunriseS2(CxbBase):
|
||||
# ShopListTitle load
|
||||
ret_str += "\r\n#ShopListTitle\r\n"
|
||||
with open(
|
||||
r"titles/cxb/rss2_data/Shop/ShopList_Title.csv", encoding="utf-8"
|
||||
r"titles/cxb/data/rss2/Shop/ShopList_Title.csv", encoding="utf-8"
|
||||
) as shop:
|
||||
lines = shop.readlines()
|
||||
for line in lines:
|
||||
@ -140,7 +140,7 @@ class CxbRevSunriseS2(CxbBase):
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_news_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(r"titles/cxb/rss2_data/NewsList.csv", encoding="UTF-8") as news:
|
||||
with open(r"titles/cxb/data/rss2/NewsList.csv", encoding="UTF-8") as news:
|
||||
lines = news.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
@ -155,7 +155,7 @@ class CxbRevSunriseS2(CxbBase):
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_random_music_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(r"titles/cxb/rss2_data/MusicArchiveList.csv") as music:
|
||||
with open(r"titles/cxb/data/rss2/MusicArchiveList.csv") as music:
|
||||
lines = music.readlines()
|
||||
count = 0
|
||||
for line in lines:
|
||||
@ -169,7 +169,7 @@ class CxbRevSunriseS2(CxbBase):
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_license_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(r"titles/cxb/rss2_data/License.csv", encoding="UTF-8") as licenses:
|
||||
with open(r"titles/cxb/data/rss2/License.csv", encoding="UTF-8") as licenses:
|
||||
lines = licenses.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
@ -179,7 +179,7 @@ class CxbRevSunriseS2(CxbBase):
|
||||
def handle_data_course_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(
|
||||
r"titles/cxb/rss2_data/Course/CourseList.csv", encoding="UTF-8"
|
||||
r"titles/cxb/data/rss2/Course/CourseList.csv", encoding="UTF-8"
|
||||
) as course:
|
||||
lines = course.readlines()
|
||||
for line in lines:
|
||||
@ -191,7 +191,7 @@ class CxbRevSunriseS2(CxbBase):
|
||||
extra_num = int(data["dldate"]["filetype"][-4:])
|
||||
ret_str = ""
|
||||
with open(
|
||||
rf"titles/cxb/rss2_data/Course/Cs{extra_num}.csv", encoding="shift-jis"
|
||||
rf"titles/cxb/data/rss2/Course/Cs{extra_num}.csv", encoding="shift-jis"
|
||||
) as course:
|
||||
lines = course.readlines()
|
||||
for line in lines:
|
||||
@ -229,7 +229,7 @@ class CxbRevSunriseS2(CxbBase):
|
||||
def handle_data_partnerxxxx_request(self, data: Dict) -> Dict:
|
||||
partner_num = int(data["dldate"]["filetype"][-4:])
|
||||
ret_str = f"{partner_num},,{partner_num},1,10000,\r\n"
|
||||
with open(r"titles/cxb/rss2_data/Partner0000.csv") as partner:
|
||||
with open(r"titles/cxb/data/rss2/Partner0000.csv") as partner:
|
||||
lines = partner.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
|
@ -7,17 +7,19 @@ import json
|
||||
import urllib.parse
|
||||
import base64
|
||||
from os import path
|
||||
from typing import Tuple
|
||||
from typing import Tuple, Dict, List
|
||||
|
||||
from core.config import CoreConfig
|
||||
from titles.diva.config import DivaConfig
|
||||
from titles.diva.const import DivaConstants
|
||||
from titles.diva.base import DivaBase
|
||||
from core.title import BaseServlet
|
||||
from core.utils import Utils
|
||||
from .config import DivaConfig
|
||||
from .const import DivaConstants
|
||||
from .base import DivaBase
|
||||
|
||||
|
||||
class DivaServlet:
|
||||
class DivaServlet(BaseServlet):
|
||||
def __init__(self, core_cfg: CoreConfig, cfg_dir: str) -> None:
|
||||
self.core_cfg = core_cfg
|
||||
super().__init__(core_cfg, cfg_dir)
|
||||
self.game_cfg = DivaConfig()
|
||||
if path.exists(f"{cfg_dir}/{DivaConstants.CONFIG_NAME}"):
|
||||
self.game_cfg.update(
|
||||
@ -49,10 +51,22 @@ class DivaServlet:
|
||||
level=self.game_cfg.server.loglevel, logger=self.logger, fmt=log_fmt_str
|
||||
)
|
||||
|
||||
def get_endpoint_matchers(self) -> Tuple[List[Tuple[str, str, Dict]], List[Tuple[str, str, Dict]]]:
|
||||
return (
|
||||
[],
|
||||
[("render_POST", "/DivaServlet/", {})]
|
||||
)
|
||||
|
||||
def get_allnet_info(self, game_code: str, game_ver: int, keychip: str) -> Tuple[str, str]:
|
||||
if not self.core_cfg.server.is_using_proxy and Utils.get_title_port(self.core_cfg) != 80:
|
||||
return (f"http://{self.core_cfg.title.hostname}:{Utils.get_title_port(self.core_cfg)}/DivaServlet/", "")
|
||||
|
||||
return (f"http://{self.core_cfg.title.hostname}/DivaServlet/", "")
|
||||
|
||||
@classmethod
|
||||
def get_allnet_info(
|
||||
def is_game_enabled(
|
||||
cls, game_code: str, core_cfg: CoreConfig, cfg_dir: str
|
||||
) -> Tuple[bool, str, str]:
|
||||
) -> bool:
|
||||
game_cfg = DivaConfig()
|
||||
if path.exists(f"{cfg_dir}/{DivaConstants.CONFIG_NAME}"):
|
||||
game_cfg.update(
|
||||
@ -60,20 +74,13 @@ class DivaServlet:
|
||||
)
|
||||
|
||||
if not game_cfg.server.enable:
|
||||
return (False, "", "")
|
||||
return False
|
||||
|
||||
if core_cfg.server.is_develop:
|
||||
return (
|
||||
True,
|
||||
f"http://{core_cfg.title.hostname}:{core_cfg.title.port}/{game_code}/$v/",
|
||||
"",
|
||||
)
|
||||
return True
|
||||
|
||||
return (True, f"http://{core_cfg.title.hostname}/{game_code}/$v/", "")
|
||||
|
||||
def render_POST(self, req: Request, version: int, url_path: str) -> bytes:
|
||||
req_raw = req.content.getvalue()
|
||||
url_header = req.getAllHeaders()
|
||||
def render_POST(self, request: Request, game_code: str, matchers: Dict) -> bytes:
|
||||
req_raw = request.content.getvalue()
|
||||
url_header = request.getAllHeaders()
|
||||
|
||||
# Ping Dispatch
|
||||
if "THIS_STRING_SEPARATES" in str(url_header):
|
||||
@ -148,7 +155,7 @@ class DivaServlet:
|
||||
"utf-8"
|
||||
)
|
||||
|
||||
req.responseHeaders.addRawHeader(b"content-type", b"text/plain")
|
||||
request.responseHeaders.addRawHeader(b"content-type", b"text/plain")
|
||||
self.logger.debug(
|
||||
f"Response cmd={req_data['cmd']}&req_id={req_data['req_id']}&stat=ok{resp}"
|
||||
)
|
||||
|
@ -4,22 +4,22 @@ import logging
|
||||
import coloredlogs
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
from os import path
|
||||
from typing import Tuple, List
|
||||
from typing import Tuple, List, Dict
|
||||
from twisted.internet import reactor, endpoints
|
||||
from twisted.web import server, resource
|
||||
import importlib
|
||||
|
||||
from core.config import CoreConfig
|
||||
from core.title import BaseServlet
|
||||
from .config import IDZConfig
|
||||
from .const import IDZConstants
|
||||
from .userdb import IDZUserDBFactory, IDZUserDBWeb, IDZKey
|
||||
from .userdb import IDZUserDBFactory, IDZKey
|
||||
from .echo import IDZEcho
|
||||
from .handlers import IDZHandlerLoadConfigB
|
||||
|
||||
|
||||
class IDZServlet:
|
||||
class IDZServlet(BaseServlet):
|
||||
def __init__(self, core_cfg: CoreConfig, cfg_dir: str) -> None:
|
||||
self.core_cfg = core_cfg
|
||||
super().__init__(core_cfg, cfg_dir)
|
||||
self.game_cfg = IDZConfig()
|
||||
if path.exists(f"{cfg_dir}/{IDZConstants.CONFIG_NAME}"):
|
||||
self.game_cfg.update(
|
||||
@ -65,9 +65,9 @@ class IDZServlet:
|
||||
return hash_
|
||||
|
||||
@classmethod
|
||||
def get_allnet_info(
|
||||
def is_game_enabled(
|
||||
cls, game_code: str, core_cfg: CoreConfig, cfg_dir: str
|
||||
) -> Tuple[bool, str, str]:
|
||||
) -> bool:
|
||||
game_cfg = IDZConfig()
|
||||
if path.exists(f"{cfg_dir}/{IDZConstants.CONFIG_NAME}"):
|
||||
game_cfg.update(
|
||||
@ -75,21 +75,29 @@ class IDZServlet:
|
||||
)
|
||||
|
||||
if not game_cfg.server.enable:
|
||||
return (False, "", "")
|
||||
return False
|
||||
|
||||
if len(game_cfg.rsa_keys) <= 0 or not game_cfg.server.aes_key:
|
||||
logging.getLogger("idz").error("IDZ: No RSA/AES keys! IDZ cannot start")
|
||||
return (False, "", "")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def get_endpoint_matchers(self) -> Tuple[List[Tuple[str, str, Dict]], List[Tuple[str, str, Dict]]]:
|
||||
return[
|
||||
[("render_GET", "/{game}/{version}/{endpoint:.*?}", {'game': R'S...'})], # TODO: Slim this down to only the news stuff
|
||||
[]
|
||||
]
|
||||
|
||||
def get_allnet_info(self, game_code: str, game_ver: int, keychip: str) -> Tuple[str, str]:
|
||||
hostname = (
|
||||
core_cfg.title.hostname
|
||||
if not game_cfg.server.hostname
|
||||
else game_cfg.server.hostname
|
||||
self.core_cfg.title.hostname
|
||||
if not self.game_cfg.server.hostname
|
||||
else self.game_cfg.server.hostname
|
||||
)
|
||||
return (
|
||||
True,
|
||||
f"",
|
||||
f"{hostname}:{game_cfg.ports.userdb}",
|
||||
f"{hostname}:{self.game_cfg.ports.userdb}",
|
||||
)
|
||||
|
||||
def setup(self):
|
||||
@ -149,12 +157,8 @@ class IDZServlet:
|
||||
|
||||
self.logger.info(f"UserDB Listening on port {self.game_cfg.ports.userdb}")
|
||||
|
||||
def render_POST(self, request: Request, version: int, url_path: str) -> bytes:
|
||||
req_raw = request.content.getvalue()
|
||||
self.logger.info(f"IDZ POST request: {url_path} - {req_raw}")
|
||||
return b""
|
||||
|
||||
def render_GET(self, request: Request, version: int, url_path: str) -> bytes:
|
||||
def render_GET(self, request: Request, game_code: str, matchers: Dict) -> bytes:
|
||||
url_path = matchers['endpoint']
|
||||
self.logger.info(f"IDZ GET request: {url_path}")
|
||||
request.responseHeaders.setRawHeaders(
|
||||
"Content-Type", [b"text/plain; charset=utf-8"]
|
||||
|
@ -1,4 +1,4 @@
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any, Dict, List
|
||||
import logging
|
||||
from base64 import b64decode
|
||||
@ -7,9 +7,10 @@ from PIL import ImageFile
|
||||
|
||||
import pytz
|
||||
from core.config import CoreConfig
|
||||
from titles.mai2.const import Mai2Constants
|
||||
from titles.mai2.config import Mai2Config
|
||||
from titles.mai2.database import Mai2Data
|
||||
from core.utils import Utils
|
||||
from .const import Mai2Constants
|
||||
from .config import Mai2Config
|
||||
from .database import Mai2Data
|
||||
|
||||
|
||||
class Mai2Base:
|
||||
@ -22,16 +23,17 @@ class Mai2Base:
|
||||
self.can_deliver = False
|
||||
self.can_usbdl = False
|
||||
self.old_server = ""
|
||||
self.date_time_format = "%Y-%m-%d %H:%M:%S"
|
||||
|
||||
if self.core_config.server.is_develop and self.core_config.title.port > 0:
|
||||
self.old_server = f"http://{self.core_config.title.hostname}:{self.core_config.title.port}/SDEY/197/"
|
||||
if not self.core_config.server.is_using_proxy and Utils.get_title_port(self.core_config) != 80:
|
||||
self.old_server = f"http://{self.core_config.title.hostname}:{Utils.get_title_port(cfg)}/197/MaimaiServlet/"
|
||||
|
||||
else:
|
||||
self.old_server = f"http://{self.core_config.title.hostname}/SDEY/197/"
|
||||
self.old_server = f"http://{self.core_config.title.hostname}/197/MaimaiServlet/"
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict):
|
||||
# if reboot start/end time is not defined use the default behavior of being a few hours ago
|
||||
if self.core_cfg.title.reboot_start_time == "" or self.core_cfg.title.reboot_end_time == "":
|
||||
if self.core_config.title.reboot_start_time == "" or self.core_config.title.reboot_end_time == "":
|
||||
reboot_start = datetime.strftime(
|
||||
datetime.utcnow() + timedelta(hours=6), self.date_time_format
|
||||
)
|
||||
@ -43,8 +45,8 @@ class Mai2Base:
|
||||
current_jst = datetime.now(pytz.timezone('Asia/Tokyo')).date()
|
||||
|
||||
# parse config start/end times into datetime
|
||||
reboot_start_time = datetime.strptime(self.core_cfg.title.reboot_start_time, "%H:%M")
|
||||
reboot_end_time = datetime.strptime(self.core_cfg.title.reboot_end_time, "%H:%M")
|
||||
reboot_start_time = datetime.strptime(self.core_config.title.reboot_start_time, "%H:%M")
|
||||
reboot_end_time = datetime.strptime(self.core_config.title.reboot_end_time, "%H:%M")
|
||||
|
||||
# offset datetimes with current date/time
|
||||
reboot_start_time = reboot_start_time.replace(year=current_jst.year, month=current_jst.month, day=current_jst.day, tzinfo=pytz.timezone('Asia/Tokyo'))
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user