diff --git a/Dockerfile b/Dockerfile index d55c9e8..f946002 100644 --- a/Dockerfile +++ b/Dockerfile @@ -12,10 +12,11 @@ RUN chmod +x entrypoint.sh COPY index.py index.py COPY dbutils.py dbutils.py +COPY read.py read.py ADD core core ADD titles titles ADD config config ADD logs logs ADD cert cert -ENTRYPOINT [ "/app/entrypoint.sh" ] \ No newline at end of file +ENTRYPOINT [ "/app/entrypoint.sh" ] diff --git a/changelog.md b/changelog.md index e1ecbb9..d33d08c 100644 --- a/changelog.md +++ b/changelog.md @@ -13,6 +13,13 @@ Documenting updates to ARTEMiS, to be updated every time the master branch is pu + Frontend to download profile added + Importer to import profiles added +## 20231015 +### maimai DX ++ Added support for FESTiVAL PLUS + +### Card Maker ++ Added support for maimai DX FESTiVAL PLUS + ## 20230716 ### General + Docker files added (#19) diff --git a/core/aimedb.py b/core/aimedb.py index 3ac8b9d..5388600 100644 --- a/core/aimedb.py +++ b/core/aimedb.py @@ -145,7 +145,15 @@ class AimedbProtocol(Protocol): def handle_lookup(self, data: bytes, resp_code: int) -> ADBBaseResponse: req = ADBLookupRequest(data) user_id = self.data.card.get_user_id_from_card(req.access_code) + is_banned = self.data.card.get_card_banned(req.access_code) + is_locked = self.data.card.get_card_locked(req.access_code) + if is_banned and is_locked: + ret.head.status = ADBStatus.BAN_SYS_USER + elif is_banned: + ret.head.status = ADBStatus.BAN_SYS + elif is_locked: + ret.head.status = ADBStatus.LOCK_USER ret = ADBLookupResponse.from_req(req.head, user_id) self.logger.info( @@ -157,8 +165,17 @@ class AimedbProtocol(Protocol): req = ADBLookupRequest(data) user_id = self.data.card.get_user_id_from_card(req.access_code) + is_banned = self.data.card.get_card_banned(req.access_code) + is_locked = self.data.card.get_card_locked(req.access_code) + ret = ADBLookupExResponse.from_req(req.head, user_id) - + if is_banned and is_locked: + ret.head.status = ADBStatus.BAN_SYS_USER + elif is_banned: + ret.head.status = ADBStatus.BAN_SYS + elif is_locked: + ret.head.status = ADBStatus.LOCK_USER + self.logger.info( f"access_code {req.access_code} -> user_id {ret.user_id}" ) @@ -237,7 +254,7 @@ class AimedbProtocol(Protocol): def handle_register(self, data: bytes, resp_code: int) -> bytes: req = ADBLookupRequest(data) user_id = -1 - + if self.config.server.allow_user_registration: user_id = self.data.user.create_user() diff --git a/core/allnet.py b/core/allnet.py index 6610c23..8463f6e 100644 --- a/core/allnet.py +++ b/core/allnet.py @@ -16,10 +16,11 @@ from os import path import urllib.parse import math -from core.config import CoreConfig -from core.utils import Utils -from core.data import Data -from core.const import * +from .config import CoreConfig +from .utils import Utils +from .data import Data +from .const import * +from .title import TitleServlet BILLING_DT_FORMAT: Final[str] = "%Y%m%d%H%M%S" @@ -33,13 +34,67 @@ class ALLNET_STAT(Enum): bad_machine = -2 bad_shop = -3 +class DLI_STATUS(Enum): + START = 0 + GET_DOWNLOAD_CONFIGURATION = 1 + WAIT_DOWNLOAD = 2 + DOWNLOADING = 3 + + NOT_SPECIFY_DLI = 100 + ONLY_POST_REPORT = 101 + STOPPED_BY_APP_RELEASE = 102 + STOPPED_BY_OPT_RELEASE = 103 + + DOWNLOAD_COMPLETE_RECENTLY = 110 + + DOWNLOAD_COMPLETE_WAIT_RELEASE_TIME = 120 + DOWNLOAD_COMPLETE_BUT_NOT_SYNC_SERVER = 121 + DOWNLOAD_COMPLETE_BUT_NOT_FIRST_RESUME = 122 + DOWNLOAD_COMPLETE_BUT_NOT_FIRST_LAUNCH = 123 + DOWNLOAD_COMPLETE_WAIT_UPDATE = 124 + + DOWNLOAD_COMPLETE_AND_ALREADY_UPDATE = 130 + + ERROR_AUTH_FAILURE = 200 + + ERROR_GET_DLI_HTTP = 300 + ERROR_GET_DLI = 301 + ERROR_PARSE_DLI = 302 + ERROR_INVALID_GAME_ID = 303 + ERROR_INVALID_IMAGE_LIST = 304 + ERROR_GET_DLI_APP = 305 + + ERROR_GET_BOOT_ID = 400 + ERROR_ACCESS_SERVER = 401 + ERROR_NO_IMAGE = 402 + ERROR_ACCESS_IMAGE = 403 + + ERROR_DOWNLOAD_APP = 500 + ERROR_DOWNLOAD_OPT = 501 + + ERROR_DISK_FULL = 600 + ERROR_UNINSTALL = 601 + ERROR_INSTALL_APP = 602 + ERROR_INSTALL_OPT = 603 + + ERROR_GET_DLI_INTERNAL = 900 + ERROR_ICF = 901 + ERROR_CHECK_RELEASE_INTERNAL = 902 + UNKNOWN = 999 # Not the actual enum val but it needs to be here as a catch-all + + @classmethod + def from_int(cls, num: int) -> "DLI_STATUS": + try: + return cls(num) + except ValueError: + return cls.UNKNOWN + class AllnetServlet: def __init__(self, core_cfg: CoreConfig, cfg_folder: str): super().__init__() self.config = core_cfg self.config_folder = cfg_folder self.data = Data(core_cfg) - self.uri_registry: Dict[str, Tuple[str, str]] = {} self.logger = logging.getLogger("allnet") if not hasattr(self.logger, "initialized"): @@ -70,18 +125,8 @@ class AllnetServlet: if len(plugins) == 0: self.logger.error("No games detected!") - for _, mod in plugins.items(): - if hasattr(mod, "index") and hasattr(mod.index, "get_allnet_info"): - for code in mod.game_codes: - enabled, uri, host = mod.index.get_allnet_info( - code, self.config, self.config_folder - ) - - if enabled: - self.uri_registry[code] = (uri, host) - self.logger.info( - f"Serving {len(self.uri_registry)} game codes port {core_cfg.allnet.port}" + f"Serving {len(TitleServlet.title_registry)} game codes port {core_cfg.allnet.port}" ) def handle_poweron(self, request: Request, _: Dict): @@ -147,7 +192,7 @@ class AllnetServlet: resp_dict = {k: v for k, v in vars(resp).items() if v is not None} return (urllib.parse.unquote(urllib.parse.urlencode(resp_dict)) + "\n").encode("utf-8") - elif not arcade["ip"] or arcade["ip"] is None and self.config.server.strict_ip_checking: + elif (not arcade["ip"] or arcade["ip"] is None) and self.config.server.strict_ip_checking: msg = f"Serial {req.serial} attempted allnet auth from bad IP {req.ip}, but arcade {arcade['id']} has no IP set! (strict checking enabled)." self.data.base.log_event( "allnet", "ALLNET_AUTH_NO_SHOP_IP", logging.ERROR, msg @@ -190,7 +235,7 @@ class AllnetServlet: arcade["timezone"] if arcade["timezone"] is not None else "+0900" if req.format_ver == 3 else "+09:00" ) - if req.game_id not in self.uri_registry: + if req.game_id not in TitleServlet.title_registry: if not self.config.server.is_develop: msg = f"Unrecognised game {req.game_id} attempted allnet auth from {request_ip}." self.data.base.log_event( @@ -215,11 +260,9 @@ class AllnetServlet: self.logger.debug(f"Allnet response: {resp_str}") return (resp_str + "\n").encode("utf-8") - resp.uri, resp.host = self.uri_registry[req.game_id] - + int_ver = req.ver.replace(".", "") - resp.uri = resp.uri.replace("$v", int_ver) - resp.host = resp.host.replace("$v", int_ver) + resp.uri, resp.host = TitleServlet.title_registry[req.game_id].get_allnet_info(req.game_id, int(int_ver), req.serial) msg = f"{req.serial} authenticated from {request_ip}: {req.game_id} v{req.ver}" self.data.base.log_event("allnet", "ALLNET_AUTH_SUCCESS", logging.INFO, msg) @@ -315,6 +358,7 @@ class AllnetServlet: def handle_dlorder_report(self, request: Request, match: Dict) -> bytes: req_raw = request.content.getvalue() + client_ip = Utils.get_ip_addr(request) try: req_dict: Dict = json.loads(req_raw) except Exception as e: @@ -332,11 +376,17 @@ class AllnetServlet: self.logger.warning(f"Failed to parse DL Report: Invalid format - contains neither appimage nor optimage") return "NG" - dl_report_data = DLReport(dl_data, dl_data_type) + rep = DLReport(dl_data, dl_data_type) - if not dl_report_data.validate(): - self.logger.warning(f"Failed to parse DL Report: Invalid format - {dl_report_data.err}") + if not rep.validate(): + self.logger.warning(f"Failed to parse DL Report: Invalid format - {rep.err}") return "NG" + + msg = f"{rep.serial} @ {client_ip} reported {rep.rep_type.name} download state {rep.rf_state.name} for {rep.gd} v{rep.dav}:"\ + f" {rep.tdsc}/{rep.tsc} segments downloaded for working files {rep.wfl} with {rep.dfl if rep.dfl else 'none'} complete." + + self.data.base.log_event("allnet", "DL_REPORT", logging.INFO, msg, dl_data) + self.logger.info(msg) return "OK" @@ -524,7 +574,7 @@ class AllnetPowerOnResponse: self.stat = 1 self.uri = "" self.host = "" - self.place_id = "123" + self.place_id = "0123" self.name = "ARTEMiS" self.nickname = "ARTEMiS" self.region0 = "1" @@ -739,13 +789,13 @@ class DLReport: self.ot = data.get("ot") self.rt = data.get("rt") self.as_ = data.get("as") - self.rf_state = data.get("rf_state") + self.rf_state = DLI_STATUS.from_int(data.get("rf_state")) self.gd = data.get("gd") self.dav = data.get("dav") self.wdav = data.get("wdav") # app only self.dov = data.get("dov") self.wdov = data.get("wdov") # app only - self.__type = report_type + self.rep_type = report_type self.err = "" def validate(self) -> bool: @@ -753,14 +803,6 @@ class DLReport: self.err = "serial not provided" return False - if self.dfl is None: - self.err = "dfl not provided" - return False - - if self.wfl is None: - self.err = "wfl not provided" - return False - if self.tsc is None: self.err = "tsc not provided" return False @@ -769,18 +811,6 @@ class DLReport: self.err = "tdsc not provided" return False - if self.at is None: - self.err = "at not provided" - return False - - if self.ot is None: - self.err = "ot not provided" - return False - - if self.rt is None: - self.err = "rt not provided" - return False - if self.as_ is None: self.err = "as not provided" return False @@ -801,11 +831,11 @@ class DLReport: self.err = "dov not provided" return False - if (self.wdav is None or self.wdov is None) and self.__type == DLIMG_TYPE.app: + if (self.wdav is None or self.wdov is None) and self.rep_type == DLIMG_TYPE.app: self.err = "wdav or wdov not provided in app image" return False - if (self.wdav is not None or self.wdov is not None) and self.__type == DLIMG_TYPE.opt: + if (self.wdav is not None or self.wdov is not None) and self.rep_type == DLIMG_TYPE.opt: self.err = "wdav or wdov provided in opt image" return False diff --git a/core/config.py b/core/config.py index 83a941a..684ec15 100644 --- a/core/config.py +++ b/core/config.py @@ -36,6 +36,12 @@ class ServerConfig: self.__config, "core", "server", "is_develop", default=True ) + @property + def is_using_proxy(self) -> bool: + return CoreConfig.get_config_field( + self.__config, "core", "server", "is_using_proxy", default=False + ) + @property def threading(self) -> bool: return CoreConfig.get_config_field( @@ -84,6 +90,36 @@ class TitleConfig: return CoreConfig.get_config_field( self.__config, "core", "title", "port", default=8080 ) + + @property + def port_ssl(self) -> int: + return CoreConfig.get_config_field( + self.__config, "core", "title", "port_ssl", default=0 + ) + + @property + def ssl_key(self) -> str: + return CoreConfig.get_config_field( + self.__config, "core", "title", "ssl_key", default="cert/title.key" + ) + + @property + def ssl_cert(self) -> str: + return CoreConfig.get_config_field( + self.__config, "core", "title", "ssl_cert", default="cert/title.pem" + ) + + @property + def reboot_start_time(self) -> str: + return CoreConfig.get_config_field( + self.__config, "core", "title", "reboot_start_time", default="" + ) + + @property + def reboot_end_time(self) -> str: + return CoreConfig.get_config_field( + self.__config, "core", "title", "reboot_end_time", default="" + ) class DatabaseConfig: @@ -150,6 +186,12 @@ class DatabaseConfig: default=10000, ) + @property + def enable_memcached(self) -> bool: + return CoreConfig.get_config_field( + self.__config, "core", "database", "enable_memcached", default=True + ) + @property def memcached_host(self) -> str: return CoreConfig.get_config_field( diff --git a/core/data/cache.py b/core/data/cache.py index cabf597..1490826 100644 --- a/core/data/cache.py +++ b/core/data/cache.py @@ -17,7 +17,7 @@ except ModuleNotFoundError: def cached(lifetime: int = 10, extra_key: Any = None) -> Callable: def _cached(func: Callable) -> Callable: - if has_mc: + if has_mc and (cfg and cfg.database.enable_memcached): hostname = "127.0.0.1" if cfg: hostname = cfg.database.memcached_host diff --git a/core/data/schema/arcade.py b/core/data/schema/arcade.py index ce7c30a..2fb8e43 100644 --- a/core/data/schema/arcade.py +++ b/core/data/schema/arcade.py @@ -41,7 +41,6 @@ machine = Table( Column("country", String(3)), # overwrites if not null Column("timezone", String(255)), Column("ota_enable", Boolean), - Column("is_cab", Boolean), Column("memo", String(255)), Column("is_cab", Boolean), Column("data", JSON), diff --git a/core/data/schema/card.py b/core/data/schema/card.py index d8f5fc0..a95684e 100644 --- a/core/data/schema/card.py +++ b/core/data/schema/card.py @@ -64,6 +64,27 @@ class CardData(BaseData): return int(card["user"]) + def get_card_banned(self, access_code: str) -> Optional[bool]: + """ + Given a 20 digit access code as a string, check if the card is banned + """ + card = self.get_card_by_access_code(access_code) + if card is None: + return None + if card["is_banned"]: + return True + return False + def get_card_locked(self, access_code: str) -> Optional[bool]: + """ + Given a 20 digit access code as a string, check if the card is locked + """ + card = self.get_card_by_access_code(access_code) + if card is None: + return None + if card["is_locked"]: + return True + return False + def delete_card(self, card_id: int) -> None: sql = aime_card.delete(aime_card.c.id == card_id) diff --git a/core/data/schema/versions/SBZV_5_rollback.sql b/core/data/schema/versions/SBZV_5_rollback.sql new file mode 100644 index 0000000..851d357 --- /dev/null +++ b/core/data/schema/versions/SBZV_5_rollback.sql @@ -0,0 +1,2 @@ +ALTER TABLE diva_profile + DROP skn_eqp; \ No newline at end of file diff --git a/core/data/schema/versions/SBZV_6_upgrade.sql b/core/data/schema/versions/SBZV_6_upgrade.sql new file mode 100644 index 0000000..d417506 --- /dev/null +++ b/core/data/schema/versions/SBZV_6_upgrade.sql @@ -0,0 +1,2 @@ +ALTER TABLE diva_profile + ADD skn_eqp INT NOT NULL DEFAULT 0; \ No newline at end of file diff --git a/core/data/schema/versions/SDDT_5_rollback.sql b/core/data/schema/versions/SDDT_5_rollback.sql new file mode 100644 index 0000000..007716c --- /dev/null +++ b/core/data/schema/versions/SDDT_5_rollback.sql @@ -0,0 +1,22 @@ +SET FOREIGN_KEY_CHECKS=0; + +ALTER TABLE ongeki_user_event_point DROP COLUMN version; +ALTER TABLE ongeki_user_event_point DROP COLUMN rank; +ALTER TABLE ongeki_user_event_point DROP COLUMN type; +ALTER TABLE ongeki_user_event_point DROP COLUMN date; + +ALTER TABLE ongeki_user_tech_event DROP COLUMN version; + +ALTER TABLE ongeki_user_mission_point DROP COLUMN version; + +ALTER TABLE ongeki_static_events DROP COLUMN endDate; + +DROP TABLE ongeki_tech_event_ranking; +DROP TABLE ongeki_static_music_ranking_list; +DROP TABLE ongeki_static_rewards; +DROP TABLE ongeki_static_present_list; +DROP TABLE ongeki_static_tech_music; +DROP TABLE ongeki_static_client_testmode; +DROP TABLE ongeki_static_game_point; + +SET FOREIGN_KEY_CHECKS=1; diff --git a/core/data/schema/versions/SDDT_6_upgrade.sql b/core/data/schema/versions/SDDT_6_upgrade.sql new file mode 100644 index 0000000..82d5336 --- /dev/null +++ b/core/data/schema/versions/SDDT_6_upgrade.sql @@ -0,0 +1,98 @@ +SET FOREIGN_KEY_CHECKS=0; + +ALTER TABLE ongeki_user_event_point ADD COLUMN version INTEGER NOT NULL; +ALTER TABLE ongeki_user_event_point ADD COLUMN rank INTEGER; +ALTER TABLE ongeki_user_event_point ADD COLUMN type INTEGER NOT NULL; +ALTER TABLE ongeki_user_event_point ADD COLUMN date VARCHAR(25); + +ALTER TABLE ongeki_user_tech_event ADD COLUMN version INTEGER NOT NULL; + +ALTER TABLE ongeki_user_mission_point ADD COLUMN version INTEGER NOT NULL; + +ALTER TABLE ongeki_static_events ADD COLUMN endDate TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP; + +CREATE TABLE ongeki_tech_event_ranking ( + id INT PRIMARY KEY NOT NULL AUTO_INCREMENT, + user INT NOT NULL, + version INT NOT NULL, + date VARCHAR(25), + eventId INT NOT NULL, + rank INT, + totalPlatinumScore INT NOT NULL, + totalTechScore INT NOT NULL, + UNIQUE KEY ongeki_tech_event_ranking_uk (user, eventId), + CONSTRAINT ongeki_tech_event_ranking_ibfk1 FOREIGN KEY (user) REFERENCES aime_user(id) ON DELETE CASCADE ON UPDATE CASCADE +); + +CREATE TABLE ongeki_static_music_ranking_list ( + id INT PRIMARY KEY NOT NULL AUTO_INCREMENT, + version INT NOT NULL, + musicId INT NOT NULL, + point INT NOT NULL, + userName VARCHAR(255), + UNIQUE KEY ongeki_static_music_ranking_list_uk (version, musicId) +); + +CREATE TABLE ongeki_static_rewards ( + id INT PRIMARY KEY NOT NULL AUTO_INCREMENT, + version INT NOT NULL, + rewardId INT NOT NULL, + rewardName VARCHAR(255) NOT NULL, + itemKind INT NOT NULL, + itemId INT NOT NULL, + UNIQUE KEY ongeki_tech_event_ranking_uk (version, rewardId) +); + +CREATE TABLE ongeki_static_present_list ( + id INT PRIMARY KEY NOT NULL AUTO_INCREMENT, + version INT NOT NULL, + presentId INT NOT NULL, + presentName VARCHAR(255) NOT NULL, + rewardId INT NOT NULL, + stock INT NOT NULL, + message VARCHAR(255), + startDate VARCHAR(25) NOT NULL, + endDate VARCHAR(25) NOT NULL, + UNIQUE KEY ongeki_static_present_list_uk (version, presentId, rewardId) +); + +CREATE TABLE ongeki_static_tech_music ( + id INT PRIMARY KEY NOT NULL AUTO_INCREMENT, + version INT NOT NULL, + eventId INT NOT NULL, + musicId INT NOT NULL, + level INT NOT NULL, + UNIQUE KEY ongeki_static_tech_music_uk (version, musicId, eventId) +); + +CREATE TABLE ongeki_static_client_testmode ( + id INT PRIMARY KEY NOT NULL AUTO_INCREMENT, + regionId INT NOT NULL, + placeId INT NOT NULL, + clientId VARCHAR(11) NOT NULL, + updateDate TIMESTAMP NOT NULL, + isDelivery BOOLEAN NOT NULL, + groupId INT NOT NULL, + groupRole INT NOT NULL, + continueMode INT NOT NULL, + selectMusicTime INT NOT NULL, + advertiseVolume INT NOT NULL, + eventMode INT NOT NULL, + eventMusicNum INT NOT NULL, + patternGp INT NOT NULL, + limitGp INT NOT NULL, + maxLeverMovable INT NOT NULL, + minLeverMovable INT NOT NULL, + UNIQUE KEY ongeki_static_client_testmode_uk (clientId) +); + +CREATE TABLE ongeki_static_game_point ( + id INT PRIMARY KEY NOT NULL AUTO_INCREMENT, + type INT NOT NULL, + cost INT NOT NULL, + startDate VARCHAR(25) NOT NULL DEFAULT "2000-01-01 05:00:00.0", + endDate VARCHAR(25) NOT NULL DEFAULT "2099-01-01 05:00:00.0", + UNIQUE KEY ongeki_static_game_point_uk (type) +); + +SET FOREIGN_KEY_CHECKS=1; diff --git a/core/data/schema/versions/SDEZ_7_rollback.sql b/core/data/schema/versions/SDEZ_7_rollback.sql new file mode 100644 index 0000000..6bb9270 --- /dev/null +++ b/core/data/schema/versions/SDEZ_7_rollback.sql @@ -0,0 +1,10 @@ +ALTER TABLE mai2_profile_detail + DROP COLUMN mapStock; + +ALTER TABLE mai2_profile_extend + DROP COLUMN selectResultScoreViewType; + +ALTER TABLE mai2_profile_option + DROP COLUMN outFrameType, + DROP COLUMN touchVolume, + DROP COLUMN breakSlideVolume; diff --git a/core/data/schema/versions/SDEZ_8_upgrade.sql b/core/data/schema/versions/SDEZ_8_upgrade.sql new file mode 100644 index 0000000..c782d81 --- /dev/null +++ b/core/data/schema/versions/SDEZ_8_upgrade.sql @@ -0,0 +1,10 @@ +ALTER TABLE mai2_profile_detail + ADD mapStock INT NULL AFTER playCount; + +ALTER TABLE mai2_profile_extend + ADD selectResultScoreViewType INT NULL AFTER selectResultDetails; + +ALTER TABLE mai2_profile_option + ADD outFrameType INT NULL AFTER dispCenter, + ADD touchVolume INT NULL AFTER slideVolume, + ADD breakSlideVolume INT NULL AFTER slideVolume; diff --git a/core/mucha.py b/core/mucha.py index 7793f4b..7c6f0ab 100644 --- a/core/mucha.py +++ b/core/mucha.py @@ -7,15 +7,15 @@ from datetime import datetime from Crypto.Cipher import Blowfish import pytz -from core import CoreConfig -from core.utils import Utils - +from .config import CoreConfig +from .utils import Utils +from .title import TitleServlet class MuchaServlet: + mucha_registry: List[str] = [] def __init__(self, cfg: CoreConfig, cfg_dir: str) -> None: self.config = cfg self.config_dir = cfg_dir - self.mucha_registry: List[str] = [] self.logger = logging.getLogger("mucha") log_fmt_str = "[%(asctime)s] Mucha | %(levelname)s | %(message)s" @@ -37,11 +37,9 @@ class MuchaServlet: self.logger.setLevel(cfg.mucha.loglevel) coloredlogs.install(level=cfg.mucha.loglevel, logger=self.logger, fmt=log_fmt_str) - all_titles = Utils.get_all_titles() - - for _, mod in all_titles.items(): - if hasattr(mod, "index") and hasattr(mod.index, "get_mucha_info"): - enabled, game_cd = mod.index.get_mucha_info( + for _, mod in TitleServlet.title_registry.items(): + if hasattr(mod, "get_mucha_info"): + enabled, game_cd = mod.get_mucha_info( self.config, self.config_dir ) if enabled: diff --git a/core/title.py b/core/title.py index 5006898..3fdb30c 100644 --- a/core/title.py +++ b/core/title.py @@ -1,4 +1,4 @@ -from typing import Dict, Any +from typing import Dict, List, Tuple import logging, coloredlogs from logging.handlers import TimedRotatingFileHandler from twisted.web.http import Request @@ -7,14 +7,88 @@ from core.config import CoreConfig from core.data import Data from core.utils import Utils +class BaseServlet: + def __init__(self, core_cfg: CoreConfig, cfg_dir: str) -> None: + self.core_cfg = core_cfg + self.game_cfg = None + self.logger = logging.getLogger("title") + + @classmethod + def is_game_enabled(cls, game_code: str, core_cfg: CoreConfig, cfg_dir: str) -> bool: + """Called during boot to check if a specific game code should load. + + Args: + game_code (str): 4 character game code + core_cfg (CoreConfig): CoreConfig class + cfg_dir (str): Config directory + + Returns: + bool: True if the game is enabled and set to run, False otherwise + + """ + return False + + def get_endpoint_matchers(self) -> Tuple[List[Tuple[str, str, Dict]], List[Tuple[str, str, Dict]]]: + """Called during boot to get all matcher endpoints this title servlet handles + + Returns: + Tuple[List[Tuple[str, str, Dict]], List[Tuple[str, str, Dict]]]: A 2-length tuple where offset 0 is GET and offset 1 is POST, + containing a list of 3-length tuples where offset 0 is the name of the function in the handler that should be called, offset 1 + is the matching string, and offset 2 is a dict containing rules for the matcher. + """ + return ( + [("render_GET", "/{game}/{version}/{endpoint}", {'game': R'S...'})], + [("render_POST", "/{game}/{version}/{endpoint}", {'game': R'S...'})] + ) + + def setup(self) -> None: + """Called once during boot, should contain any additional setup the handler must do, such as starting any sub-services + """ + pass + + def get_allnet_info(self, game_code: str, game_ver: int, keychip: str) -> Tuple[str, str]: + """Called any time a request to PowerOn is made to retrieve the url/host strings to be sent back to the game + + Args: + game_code (str): 4 character game code + game_ver (int): version, expressed as an integer by multiplying by 100 (1.10 -> 110) + keychip (str): Keychip serial of the requesting machine, can be used to deliver specific URIs to different machines + + Returns: + Tuple[str, str]: A tuple where offset 0 is the allnet uri field, and offset 1 is the allnet host field + """ + if not self.core_cfg.server.is_using_proxy and Utils.get_title_port(self.core_cfg) != 80: + return (f"http://{self.core_cfg.title.hostname}:{Utils.get_title_port(self.core_cfg)}/{game_code}/{game_ver}/", "") + + return (f"http://{self.core_cfg.title.hostname}/{game_code}/{game_ver}/", "") + + def get_mucha_info(self, core_cfg: CoreConfig, cfg_dir: str) -> Tuple[bool, str]: + """Called once during boot to check if this game is a mucha game + + Args: + core_cfg (CoreConfig): CoreConfig class + cfg_dir (str): Config directory + + Returns: + Tuple[bool, str]: Tuple where offset 0 is true if the game is enabled, false otherwise, and offset 1 is the game CD + """ + return (False, "") + + def render_POST(self, request: Request, game_code: str, matchers: Dict) -> bytes: + self.logger.warn(f"{game_code} Does not dispatch POST") + return None + + def render_GET(self, request: Request, game_code: str, matchers: Dict) -> bytes: + self.logger.warn(f"{game_code} Does not dispatch GET") + return None class TitleServlet: + title_registry: Dict[str, BaseServlet] = {} def __init__(self, core_cfg: CoreConfig, cfg_folder: str): super().__init__() self.config = core_cfg self.config_folder = cfg_folder self.data = Data(core_cfg) - self.title_registry: Dict[str, Any] = {} self.logger = logging.getLogger("title") if not hasattr(self.logger, "initialized"): @@ -43,62 +117,62 @@ class TitleServlet: plugins = Utils.get_all_titles() for folder, mod in plugins.items(): - if hasattr(mod, "game_codes") and hasattr(mod, "index"): + if hasattr(mod, "game_codes") and hasattr(mod, "index") and hasattr(mod.index, "is_game_enabled"): should_call_setup = True + game_servlet: BaseServlet = mod.index + game_codes: List[str] = mod.game_codes + + for code in game_codes: + if game_servlet.is_game_enabled(code, self.config, self.config_folder): + handler_cls = game_servlet(self.config, self.config_folder) - if hasattr(mod.index, "get_allnet_info"): - for code in mod.game_codes: - enabled, _, _ = mod.index.get_allnet_info( - code, self.config, self.config_folder - ) + if hasattr(handler_cls, "setup") and should_call_setup: + handler_cls.setup() + should_call_setup = False - if enabled: - handler_cls = mod.index(self.config, self.config_folder) - - if hasattr(handler_cls, "setup") and should_call_setup: - handler_cls.setup() - should_call_setup = False - - self.title_registry[code] = handler_cls - - else: - self.logger.warning(f"Game {folder} has no get_allnet_info") + self.title_registry[code] = handler_cls else: - self.logger.error(f"{folder} missing game_code or index in __init__.py") + self.logger.error(f"{folder} missing game_code or index in __init__.py, or is_game_enabled in index") self.logger.info( f"Serving {len(self.title_registry)} game codes {'on port ' + str(core_cfg.title.port) if core_cfg.title.port > 0 else ''}" ) def render_GET(self, request: Request, endpoints: dict) -> bytes: - code = endpoints["game"] + code = endpoints["title"] + subaction = endpoints['subaction'] + if code not in self.title_registry: self.logger.warning(f"Unknown game code {code}") request.setResponseCode(404) return b"" index = self.title_registry[code] - if not hasattr(index, "render_GET"): - self.logger.warning(f"{code} does not dispatch GET") - request.setResponseCode(405) + handler = getattr(index, f"{subaction}", None) + if handler is None: + self.logger.error(f"{code} does not have handler for GET subaction {subaction}") + request.setResponseCode(500) return b"" - return index.render_GET(request, int(endpoints["version"]), endpoints["endpoint"]) + return handler(request, code, endpoints) def render_POST(self, request: Request, endpoints: dict) -> bytes: - code = endpoints["game"] + code = endpoints["title"] + subaction = endpoints['subaction'] + if code not in self.title_registry: self.logger.warning(f"Unknown game code {code}") request.setResponseCode(404) return b"" index = self.title_registry[code] - if not hasattr(index, "render_POST"): - self.logger.warning(f"{code} does not dispatch POST") - request.setResponseCode(405) + handler = getattr(index, f"{subaction}", None) + if handler is None: + self.logger.error(f"{code} does not have handler for POST subaction {subaction}") + request.setResponseCode(500) return b"" - return index.render_POST( - request, int(endpoints["version"]), endpoints["endpoint"] - ) + endpoints.pop("title") + endpoints.pop("subaction") + return handler(request, code, endpoints) diff --git a/core/utils.py b/core/utils.py index f364785..325272e 100644 --- a/core/utils.py +++ b/core/utils.py @@ -5,8 +5,11 @@ import logging import importlib from os import walk +from .config import CoreConfig class Utils: + real_title_port = None + real_title_port_ssl = None @classmethod def get_all_titles(cls) -> Dict[str, ModuleType]: ret: Dict[str, Any] = {} @@ -33,3 +36,27 @@ class Utils: if b"x-forwarded-for" in req.getAllHeaders() else req.getClientAddress().host ) + + @classmethod + def get_title_port(cls, cfg: CoreConfig): + if cls.real_title_port is not None: return cls.real_title_port + + if cfg.title.port == 0: + cls.real_title_port = cfg.allnet.port + + else: + cls.real_title_port = cfg.title.port + + return cls.real_title_port + + @classmethod + def get_title_port_ssl(cls, cfg: CoreConfig): + if cls.real_title_port_ssl is not None: return cls.real_title_port_ssl + + if cfg.title.port_ssl == 0: + cls.real_title_port_ssl = 443 + + else: + cls.real_title_port_ssl = cfg.title.port_ssl + + return cls.real_title_port_ssl diff --git a/docker-compose.yml b/docker-compose.yml index c43b6e5..6a35355 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -5,22 +5,23 @@ services: build: . volumes: - ./aime:/app/aime + - ./configs/config:/app/config environment: CFG_DEV: 1 CFG_CORE_SERVER_HOSTNAME: 0.0.0.0 CFG_CORE_DATABASE_HOST: ma.db CFG_CORE_MEMCACHED_HOSTNAME: ma.memcached - CFG_CORE_AIMEDB_KEY: keyhere + CFG_CORE_AIMEDB_KEY: CFG_CHUNI_SERVER_LOGLEVEL: debug ports: - - "80:80" - - "8443:8443" - - "22345:22345" + - "80:80" + - "8443:8443" + - "22345:22345" - - "8080:8080" - - "8090:8090" + - "8080:8080" + - "8090:8090" depends_on: db: @@ -28,21 +29,29 @@ services: db: hostname: ma.db - image: mysql:8.0.31-debian + image: yobasystems/alpine-mariadb:10.11.5 environment: MYSQL_DATABASE: aime MYSQL_USER: aime MYSQL_PASSWORD: aime MYSQL_ROOT_PASSWORD: AimeRootPassword + MYSQL_CHARSET: utf8mb4 + MYSQL_COLLATION: utf8mb4_general_ci + ##Note: expose port 3306 to allow read.py importer into database, comment out when not needed + #ports: + # - "3306:3306" + ##Note: uncomment to allow mysql to create a persistent database, leave commented if you want to rebuild database from scratch often + #volumes: + # - ./AimeDB:/var/lib/mysql healthcheck: - test: ["CMD", "mysqladmin" ,"ping", "-h", "localhost"] + test: ["CMD", "mysqladmin" ,"ping", "-h", "localhost", "-pAimeRootPassword"] timeout: 5s retries: 5 - memcached: hostname: ma.memcached - image: memcached:1.6.17-bullseye + image: memcached:1.6.22-alpine3.18 + command: [ "memcached", "-m", "1024", "-I", "128m" ] phpmyadmin: hostname: ma.phpmyadmin @@ -53,5 +62,5 @@ services: PMA_PASSWORD: AimeRootPassword APACHE_PORT: 8080 ports: - - "8080:8080" + - "9090:8080" diff --git a/docs/INSTALL_DOCKER.md b/docs/INSTALL_DOCKER.md new file mode 100644 index 0000000..edaf75b --- /dev/null +++ b/docs/INSTALL_DOCKER.md @@ -0,0 +1,246 @@ +# ARTEMiS - Docker Installation Guide + +This step-by-step guide will allow you to install a Contenerized Version of ARTEMiS inside Docker, some steps can be skipped assuming you already have pre-requisite components and modules installed. + +This guide assumes using Debian 12(bookworm-stable) as a Host Operating System for most of packages and modules. + +## Pre-Requisites: + +- Linux-Based Operating System (e.g. Debian, Ubuntu) +- Docker (https://get.docker.com) +- Python 3.9+ +- (optional) Git + +## Install Python3.9+ and Docker + +``` +(if this is a fresh install of the system) +sudo apt update && sudo apt upgrade + +(installs python3 and pip) +sudo apt install python3 python3-pip + +(installs docker) +curl -fsSL https://get.docker.com -o get-docker.sh +sh get-docker.sh + +(optionally install git) +sudo apt install git +``` + +## Get ARTEMiS + +If you installed git, clone into your choice of ARTEMiS git repository, e.g.: +``` +git clone +``` +If not, download the source package, and unpack it to the folder of your choice. + +## Prepare development/home configuration + +To build our Docker setup, first we need to create some folders and copy some files around +- Create 'aime', 'configs', 'AimeDB', and 'logs' folder in ARTEMiS root folder (where all source files exist) +- Inside configs folder, create 'config' folder, and copy all .yaml files from example_config to config (thats all files without nginx_example.conf) +- Edit .yaml files inside configs/config to suit your server needs +- Edit core.yaml inside configs/config: +``` +set server.listen_address: to "0.0.0.0" +set title.hostname: to machine's IP address, e.g. "192.168.x.x", depending on your network, or actual hostname if your configuration is already set for dns resolve +set database.host: to "ma.db" +set database.memcached_host: to "ma.memcached" +set aimedb.key: to "" +``` + +## Running Docker Compose + +After configuring, go to ARTEMiS root folder, and execute: + +``` +docker compose up -d +``` + +("-d" argument means detached or daemon, meaning you will regain control of your terminal and Containers will run in background) + +This will start pulling and building required images from network, after it's done, a development server should be running, with server accessible under machine's IP, frontend with port 8090, and PHPMyAdmin under port 9090. + +- To turn off the server, from ARTEMiS root folder, execute: + +``` +docker compose down +``` + +- If you changed some files around, and don't see your changes applied, execute: + +``` +(turn off the server) +docker compose down +(rebuild) +docker compose build +(turn on) +docker compose up -d +``` + +- If you need to see logs from containers running, execute: + +``` +docker compose logs +``` + +- add '-f' to the end if you want to follow logs. + +## Running commands + +If you need to execute python scripts supplied with the application, use `docker compose exec app python3