From a83b717821f1937cb325de1ef11f2cbe3f7c76f1 Mon Sep 17 00:00:00 2001 From: Hay1tsme Date: Sun, 19 Feb 2023 14:52:20 -0500 Subject: [PATCH 01/68] carry over database functions from megaime --- core/data/database.py | 1703 ++++++++++++++++++++++++++++++++++++++++- 1 file changed, 1702 insertions(+), 1 deletion(-) diff --git a/core/data/database.py b/core/data/database.py index 800b5d0..963d016 100644 --- a/core/data/database.py +++ b/core/data/database.py @@ -4,11 +4,14 @@ from sqlalchemy.orm import scoped_session, sessionmaker from sqlalchemy.exc import SQLAlchemyError from sqlalchemy import create_engine from logging.handlers import TimedRotatingFileHandler +from datetime import datetime +import importlib, os, json from hashlib import sha256 from core.config import CoreConfig from core.data.schema import * +from core.utils import Utils class Data: def __init__(self, cfg: CoreConfig) -> None: @@ -50,4 +53,1702 @@ class Data: coloredlogs.install(cfg.database.loglevel, logger=self.logger, fmt=log_fmt_str) self.logger.handler_set = True # type: ignore - \ No newline at end of file + def create_database(self): + self.logger.info("Creating databases...") + try: + metadata.create_all(self.__engine.connect()) + except SQLAlchemyError as e: + self.logger.error(f"Failed to create databases! {e}") + return + + games = Utils.get_all_titles() + for game_dir, game_mod in games.items(): + try: + title_db = game_mod.database(self.config) + metadata.create_all(self.__engine.connect()) + + self.base.set_schema_ver(game_mod.current_schema_version, game_mod.game_codes[0]) + + except Exception as e: + self.logger.warning(f"Could not load database schema from {game_dir} - {e}") + + self.logger.info(f"Setting base_schema_ver to {self.schema_ver_latest}") + self.base.set_schema_ver(self.schema_ver_latest) + + self.logger.info(f"Setting user auto_incrememnt to {self.config.database.user_table_autoincrement_start}") + self.user.reset_autoincrement(self.config.database.user_table_autoincrement_start) + + def recreate_database(self): + self.logger.info("Dropping all databases...") + self.base.execute("SET FOREIGN_KEY_CHECKS=0") + try: + metadata.drop_all(self.__engine.connect()) + except SQLAlchemyError as e: + self.logger.error(f"Failed to drop databases! {e}") + return + + for root, dirs, files in os.walk("./titles"): + for dir in dirs: + if not dir.startswith("__"): + try: + mod = importlib.import_module(f"titles.{dir}") + + try: + title_db = mod.database(self.config) + metadata.drop_all(self.__engine.connect()) + + except Exception as e: + self.logger.warning(f"Could not load database schema from {dir} - {e}") + + except ImportError as e: + self.logger.warning(f"Failed to load database schema dir {dir} - {e}") + break + + self.base.execute("SET FOREIGN_KEY_CHECKS=1") + + self.create_database() + + def migrate_database(self, game: str, version: int, action: str) -> None: + old_ver = self.base.get_schema_ver(game) + sql = "" + + if old_ver is None: + self.logger.error(f"Schema for game {game} does not exist, did you run the creation script?") + return + + if old_ver == version: + self.logger.info(f"Schema for game {game} is already version {old_ver}, nothing to do") + return + + if not os.path.exists(f"core/data/schema/versions/{game.upper()}_{version}_{action}.sql"): + self.logger.error(f"Could not find {action} script {game.upper()}_{version}_{action}.sql in core/data/schema/versions folder") + return + + with open(f"core/data/schema/versions/{game.upper()}_{version}_{action}.sql", "r", encoding="utf-8") as f: + sql = f.read() + + result = self.base.execute(sql) + if result is None: + self.logger.error("Error execuing sql script!") + return None + + result = self.base.set_schema_ver(version, game) + if result is None: + self.logger.error("Error setting version in schema_version table!") + return None + + self.logger.info(f"Successfully migrated {game} to schema version {version}") + + def dump_db(self): + dbname = self.config.database.name + + self.logger.info("Database dumper for use with the reworked schema") + self.logger.info("Dumping users...") + + sql = f"SELECT * FROM `{dbname}`.`user`" + + result = self.base.execute(sql) + if result is None: + self.logger.error("Failed") + return None + users = result.fetchall() + + user_list: List[Dict[str, Any]] = [] + for usr in users: + user_list.append({ + "id": usr["id"], + "username": usr["username"], + "email": usr["email"], + "password": usr["password"], + "permissions": usr["permissions"], + "created_date": datetime.strftime(usr["created_date"], "%Y-%m-%d %H:%M:%S"), + "last_login_date": datetime.strftime(usr["accessed_date"], "%Y-%m-%d %H:%M:%S"), + }) + + self.logger.info(f"Done, found {len(user_list)} users") + with open("dbdump-user.json", "w", encoding="utf-8") as f: + f.write(json.dumps(user_list)) + self.logger.info(f"Saved as dbdump-user.json") + + self.logger.info("Dumping cards...") + + sql = f"SELECT * FROM `{dbname}`.`card`" + + result = self.base.execute(sql) + if result is None: + self.logger.error("Failed") + return None + cards = result.fetchall() + + card_list: List[Dict[str, Any]] = [] + for crd in cards: + card_list.append({ + "id": crd["id"], + "user": crd["user"], + "access_code": crd["access_code"], + "is_locked": crd["is_locked"], + "is_banned": crd["is_banned"], + "created_date": datetime.strftime(crd["created_date"], "%Y-%m-%d %H:%M:%S"), + "last_login_date": datetime.strftime(crd["accessed_date"], "%Y-%m-%d %H:%M:%S"), + }) + + self.logger.info(f"Done, found {len(card_list)} cards") + with open("dbdump-card.json", "w", encoding="utf-8") as f: + f.write(json.dumps(card_list)) + self.logger.info(f"Saved as dbdump-card.json") + + self.logger.info("Dumping arcades...") + + sql = f"SELECT * FROM `{dbname}`.`arcade`" + + result = self.base.execute(sql) + if result is None: + self.logger.error("Failed") + return None + arcades = result.fetchall() + + arcade_list: List[Dict[str, Any]] = [] + for arc in arcades: + arcade_list.append({ + "id": arc["id"], + "name": arc["name"], + "nickname": arc["name"], + "country": None, + "country_id": None, + "state": None, + "city": None, + "region_id": None, + "timezone": None, + }) + + self.logger.info(f"Done, found {len(arcade_list)} arcades") + with open("dbdump-arcade.json", "w", encoding="utf-8") as f: + f.write(json.dumps(arcade_list)) + self.logger.info(f"Saved as dbdump-arcade.json") + + self.logger.info("Dumping machines...") + + sql = f"SELECT * FROM `{dbname}`.`machine`" + + result = self.base.execute(sql) + if result is None: + self.logger.error("Failed") + return None + machines = result.fetchall() + + machine_list: List[Dict[str, Any]] = [] + for mech in machines: + if "country" in mech["data"]: + country = mech["data"]["country"] + else: + country = None + + if "ota_enable" in mech["data"]: + ota_enable = mech["data"]["ota_enable"] + else: + ota_enable = None + + machine_list.append({ + "id": mech["id"], + "arcade": mech["arcade"], + "serial": mech["keychip"], + "game": mech["game"], + "board": None, + "country": country, + "timezone": None, + "ota_enable": ota_enable, + "is_cab": False, + }) + + self.logger.info(f"Done, found {len(machine_list)} machines") + with open("dbdump-machine.json", "w", encoding="utf-8") as f: + f.write(json.dumps(machine_list)) + self.logger.info(f"Saved as dbdump-machine.json") + + self.logger.info("Dumping arcade owners...") + + sql = f"SELECT * FROM `{dbname}`.`arcade_owner`" + + result = self.base.execute(sql) + if result is None: + self.logger.error("Failed") + return None + arcade_owners = result.fetchall() + + owner_list: List[Dict[str, Any]] = [] + for owner in owner_list: + owner_list.append(owner._asdict()) + + self.logger.info(f"Done, found {len(owner_list)} arcade owners") + with open("dbdump-arcade_owner.json", "w", encoding="utf-8") as f: + f.write(json.dumps(owner_list)) + self.logger.info(f"Saved as dbdump-arcade_owner.json") + + self.logger.info("Dumping profiles...") + + sql = f"SELECT * FROM `{dbname}`.`profile`" + + result = self.base.execute(sql) + if result is None: + self.logger.error("Failed") + return None + profiles = result.fetchall() + + profile_list: Dict[List[Dict[str, Any]]] = {} + for pf in profiles: + game = pf["game"] + + if game not in profile_list: + profile_list[game] = [] + + profile_list[game].append({ + "id": pf["id"], + "user": pf["user"], + "version": pf["version"], + "use_count": pf["use_count"], + "name": pf["name"], + "game_id": pf["game_id"], + "mods": pf["mods"], + "data": pf["data"], + }) + + self.logger.info(f"Done, found profiles for {len(profile_list)} games") + with open("dbdump-profile.json", "w", encoding="utf-8") as f: + f.write(json.dumps(profile_list)) + self.logger.info(f"Saved as dbdump-profile.json") + + self.logger.info("Dumping scores...") + + sql = f"SELECT * FROM `{dbname}`.`score`" + + result = self.base.execute(sql) + if result is None: + self.logger.error("Failed") + return None + scores = result.fetchall() + + score_list: Dict[List[Dict[str, Any]]] = {} + for sc in scores: + game = sc["game"] + + if game not in score_list: + score_list[game] = [] + + score_list[game].append({ + "id": sc["id"], + "user": sc["user"], + "version": sc["version"], + "song_id": sc["song_id"], + "chart_id": sc["chart_id"], + "score1": sc["score1"], + "score2": sc["score2"], + "fc1": sc["fc1"], + "fc2": sc["fc2"], + "cleared": sc["cleared"], + "grade": sc["grade"], + "data": sc["data"], + }) + + self.logger.info(f"Done, found scores for {len(score_list)} games") + with open("dbdump-score.json", "w", encoding="utf-8") as f: + f.write(json.dumps(score_list)) + self.logger.info(f"Saved as dbdump-score.json") + + self.logger.info("Dumping achievements...") + + sql = f"SELECT * FROM `{dbname}`.`achievement`" + + result = self.base.execute(sql) + if result is None: + self.logger.error("Failed") + return None + achievements = result.fetchall() + + achievement_list: Dict[List[Dict[str, Any]]] = {} + for ach in achievements: + game = ach["game"] + + if game not in achievement_list: + achievement_list[game] = [] + + achievement_list[game].append({ + "id": ach["id"], + "user": ach["user"], + "version": ach["version"], + "type": ach["type"], + "achievement_id": ach["achievement_id"], + "data": ach["data"], + }) + + self.logger.info(f"Done, found achievements for {len(achievement_list)} games") + with open("dbdump-achievement.json", "w", encoding="utf-8") as f: + f.write(json.dumps(achievement_list)) + self.logger.info(f"Saved as dbdump-achievement.json") + + self.logger.info("Dumping items...") + + sql = f"SELECT * FROM `{dbname}`.`item`" + + result = self.base.execute(sql) + if result is None: + self.logger.error("Failed") + return None + items = result.fetchall() + + item_list: Dict[List[Dict[str, Any]]] = {} + for itm in items: + game = itm["game"] + + if game not in item_list: + item_list[game] = [] + + item_list[game].append({ + "id": itm["id"], + "user": itm["user"], + "version": itm["version"], + "type": itm["type"], + "item_id": itm["item_id"], + "data": ach["data"], + }) + + self.logger.info(f"Done, found items for {len(item_list)} games") + with open("dbdump-item.json", "w", encoding="utf-8") as f: + f.write(json.dumps(item_list)) + self.logger.info(f"Saved as dbdump-item.json") + + def restore_from_old_schema(self): + # Import the tables we expect to be there + from core.data.schema.user import aime_user + from core.data.schema.card import aime_card + from core.data.schema.arcade import arcade, machine, arcade_owner + from sqlalchemy.dialects.mysql import Insert + + # Make sure that all the tables we're trying to access exist + self.create_database() + + # Import the data, making sure that dependencies are accounted for + if os.path.exists("dbdump-user.json"): + users = [] + with open("dbdump-user.json", "r", encoding="utf-8") as f: + users = json.load(f) + + self.logger.info(f"Load {len(users)} users") + + for user in users: + sql = Insert(aime_user).values(**user) + + conflict = sql.on_duplicate_key_update(**user) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert user {user['id']}") + continue + self.logger.info(f"Inserted user {user['id']} -> {result.lastrowid}") + + if os.path.exists("dbdump-card.json"): + cards = [] + with open("dbdump-card.json", "r", encoding="utf-8") as f: + cards = json.load(f) + + self.logger.info(f"Load {len(cards)} cards") + + for card in cards: + sql = Insert(aime_card).values(**card) + + conflict = sql.on_duplicate_key_update(**card) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert card {card['id']}") + continue + self.logger.info(f"Inserted card {card['id']} -> {result.lastrowid}") + + if os.path.exists("dbdump-arcade.json"): + arcades = [] + with open("dbdump-arcade.json", "r", encoding="utf-8") as f: + arcades = json.load(f) + + self.logger.info(f"Load {len(arcades)} arcades") + + for ac in arcades: + sql = Insert(arcade).values(**ac) + + conflict = sql.on_duplicate_key_update(**ac) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert arcade {ac['id']}") + continue + self.logger.info(f"Inserted arcade {ac['id']} -> {result.lastrowid}") + + if os.path.exists("dbdump-arcade_owner.json"): + ac_owners = [] + with open("dbdump-arcade_owner.json", "r", encoding="utf-8") as f: + ac_owners = json.load(f) + + self.logger.info(f"Load {len(ac_owners)} arcade owners") + + for owner in ac_owners: + sql = Insert(arcade_owner).values(**owner) + + conflict = sql.on_duplicate_key_update(**owner) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert arcade_owner {owner['user']}") + continue + self.logger.info(f"Inserted arcade_owner {owner['user']} -> {result.lastrowid}") + + if os.path.exists("dbdump-machine.json"): + mechs = [] + with open("dbdump-machine.json", "r", encoding="utf-8") as f: + mechs = json.load(f) + + self.logger.info(f"Load {len(mechs)} machines") + + for mech in mechs: + sql = Insert(machine).values(**mech) + + conflict = sql.on_duplicate_key_update(**mech) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert machine {mech['id']}") + continue + self.logger.info(f"Inserted machine {mech['id']} -> {result.lastrowid}") + + # Now the fun part, grabbing all our scores, profiles, items, and achievements and trying + # to conform them to our current, freeform schema. This will be painful... + profiles = {} + items = {} + scores = {} + achievements = {} + + if os.path.exists("dbdump-profile.json"): + with open("dbdump-profile.json", "r", encoding="utf-8") as f: + profiles = json.load(f) + + self.logger.info(f"Load {len(profiles)} profiles") + + if os.path.exists("dbdump-item.json"): + with open("dbdump-item.json", "r", encoding="utf-8") as f: + items = json.load(f) + + self.logger.info(f"Load {len(items)} items") + + if os.path.exists("dbdump-score.json"): + with open("dbdump-score.json", "r", encoding="utf-8") as f: + scores = json.load(f) + + self.logger.info(f"Load {len(scores)} scores") + + if os.path.exists("dbdump-achievement.json"): + with open("dbdump-achievement.json", "r", encoding="utf-8") as f: + achievements = json.load(f) + + self.logger.info(f"Load {len(achievements)} achievements") + + # Chuni / Chusan + if os.path.exists("titles/chuni/schema"): + from titles.chuni.schema.item import character, item, duel, map, map_area + from titles.chuni.schema.profile import profile, profile_ex, option, option_ex + from titles.chuni.schema.profile import recent_rating, activity, charge, emoney + from titles.chuni.schema.profile import overpower + from titles.chuni.schema.score import best_score, course + + chuni_profiles = [] + chuni_items = [] + chuni_scores = [] + + if "SDBT" in profiles: + chuni_profiles = profiles["SDBT"] + if "SDBT" in items: + chuni_items = items["SDBT"] + if "SDBT" in scores: + chuni_scores = scores["SDBT"] + if "SDHD" in profiles: + chuni_profiles += profiles["SDHD"] + if "SDHD" in items: + chuni_items += items["SDHD"] + if "SDHD" in scores: + chuni_scores += scores["SDHD"] + + self.logger.info(f"Importing {len(chuni_profiles)} chunithm/chunithm new profiles") + + for pf in chuni_profiles: + if type(pf["data"]) is not dict: + pf["data"] = json.loads(pf["data"]) + pf_data = pf["data"] + + # data + if "userData" in pf_data: + pf_data["userData"]["userName"] = bytes([ord(c) for c in pf_data["userData"]["userName"]]).decode("utf-8") + pf_data["userData"]["user"] = pf["user"] + pf_data["userData"]["version"] = pf["version"] + pf_data["userData"].pop("accessCode") + + if pf_data["userData"]["lastRomVersion"].startswith("2."): + pf_data["userData"]["version"] += 10 + + pf_data["userData"] = self.base.fix_bools(pf_data["userData"]) + + sql = Insert(profile).values(**pf_data["userData"]) + conflict = sql.on_duplicate_key_update(**pf_data["userData"]) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert chuni profile data for {pf['user']}") + continue + self.logger.info(f"Inserted chuni profile for {pf['user']} ->{result.lastrowid}") + + # data_ex + if "userDataEx" in pf_data and len(pf_data["userDataEx"]) > 0: + pf_data["userDataEx"][0]["user"] = pf["user"] + pf_data["userDataEx"][0]["version"] = pf["version"] + + pf_data["userDataEx"] = self.base.fix_bools(pf_data["userDataEx"][0]) + + sql = Insert(profile_ex).values(**pf_data["userDataEx"]) + conflict = sql.on_duplicate_key_update(**pf_data["userDataEx"]) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert chuni profile data_ex for {pf['user']}") + continue + self.logger.info(f"Inserted chuni profile data_ex for {pf['user']} ->{result.lastrowid}") + + # option + if "userGameOption" in pf_data: + pf_data["userGameOption"]["user"] = pf["user"] + + pf_data["userGameOption"] = self.base.fix_bools(pf_data["userGameOption"]) + + sql = Insert(option).values(**pf_data["userGameOption"]) + conflict = sql.on_duplicate_key_update(**pf_data["userGameOption"]) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert chuni profile options for {pf['user']}") + continue + self.logger.info(f"Inserted chuni profile options for {pf['user']} ->{result.lastrowid}") + + # option_ex + if "userGameOptionEx" in pf_data and len(pf_data["userGameOptionEx"]) > 0: + pf_data["userGameOptionEx"][0]["user"] = pf["user"] + + pf_data["userGameOptionEx"] = self.base.fix_bools(pf_data["userGameOptionEx"][0]) + + sql = Insert(option_ex).values(**pf_data["userGameOptionEx"]) + conflict = sql.on_duplicate_key_update(**pf_data["userGameOptionEx"]) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert chuni profile option_ex for {pf['user']}") + continue + self.logger.info(f"Inserted chuni profile option_ex for {pf['user']} ->{result.lastrowid}") + + # recent_rating + if "userRecentRatingList" in pf_data: + rr = { + "user": pf["user"], + "recentRating": pf_data["userRecentRatingList"] + } + + sql = Insert(recent_rating).values(**rr) + conflict = sql.on_duplicate_key_update(**rr) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert chuni profile recent_rating for {pf['user']}") + continue + self.logger.info(f"Inserted chuni profile recent_rating for {pf['user']} ->{result.lastrowid}") + + # activity + if "userActivityList" in pf_data: + for act in pf_data["userActivityList"]: + act["user"] = pf["user"] + + sql = Insert(activity).values(**act) + conflict = sql.on_duplicate_key_update(**act) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert chuni profile activity for {pf['user']}") + else: + self.logger.info(f"Inserted chuni profile activity for {pf['user']} ->{result.lastrowid}") + + # charge + if "userChargeList" in pf_data: + for cg in pf_data["userChargeList"]: + cg["user"] = pf["user"] + + cg = self.base.fix_bools(cg) + + sql = Insert(charge).values(**cg) + conflict = sql.on_duplicate_key_update(**cg) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert chuni profile charge for {pf['user']}") + else: + self.logger.info(f"Inserted chuni profile charge for {pf['user']} ->{result.lastrowid}") + + # emoney + if "userEmoneyList" in pf_data: + for emon in pf_data["userEmoneyList"]: + emon["user"] = pf["user"] + + sql = Insert(emoney).values(**emon) + conflict = sql.on_duplicate_key_update(**emon) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert chuni profile emoney for {pf['user']}") + else: + self.logger.info(f"Inserted chuni profile emoney for {pf['user']} ->{result.lastrowid}") + + # overpower + if "userOverPowerList" in pf_data: + for op in pf_data["userOverPowerList"]: + op["user"] = pf["user"] + + sql = Insert(overpower).values(**op) + conflict = sql.on_duplicate_key_update(**op) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert chuni profile overpower for {pf['user']}") + else: + self.logger.info(f"Inserted chuni profile overpower for {pf['user']} ->{result.lastrowid}") + + # map_area + if "userMapAreaList" in pf_data: + for ma in pf_data["userMapAreaList"]: + ma["user"] = pf["user"] + + ma = self.base.fix_bools(ma) + + sql = Insert(map_area).values(**ma) + conflict = sql.on_duplicate_key_update(**ma) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert chuni map_area for {pf['user']}") + else: + self.logger.info(f"Inserted chuni map_area for {pf['user']} ->{result.lastrowid}") + + #duel + if "userDuelList" in pf_data: + for ma in pf_data["userDuelList"]: + ma["user"] = pf["user"] + + ma = self.base.fix_bools(ma) + + sql = Insert(duel).values(**ma) + conflict = sql.on_duplicate_key_update(**ma) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert chuni duel for {pf['user']}") + else: + self.logger.info(f"Inserted chuni duel for {pf['user']} ->{result.lastrowid}") + + # map + if "userMapList" in pf_data: + for ma in pf_data["userMapList"]: + ma["user"] = pf["user"] + + ma = self.base.fix_bools(ma) + + sql = Insert(map).values(**ma) + conflict = sql.on_duplicate_key_update(**ma) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert chuni map for {pf['user']}") + else: + self.logger.info(f"Inserted chuni map for {pf['user']} ->{result.lastrowid}") + + self.logger.info(f"Importing {len(chuni_items)} chunithm/chunithm new items") + + for i in chuni_items: + if type(i["data"]) is not dict: + i["data"] = json.loads(i["data"]) + i_data = i["data"] + + i_data["user"] = i["user"] + + i_data = self.base.fix_bools(i_data) + + try: i_data.pop("assignIllust") + except: pass + + try: i_data.pop("exMaxLv") + except: pass + + if i["type"] == 20: #character + sql = Insert(character).values(**i_data) + else: + sql = Insert(item).values(**i_data) + + conflict = sql.on_duplicate_key_update(**i_data) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert chuni item for user {i['user']}") + + else: + self.logger.info(f"Inserted chuni item for user {i['user']} {i['item_id']} -> {result.lastrowid}") + + self.logger.info(f"Importing {len(chuni_scores)} chunithm/chunithm new scores") + + for sc in chuni_scores: + if type(sc["data"]) is not dict: + sc["data"] = json.loads(sc["data"]) + + score_data = self.base.fix_bools(sc["data"]) + + try: score_data.pop("theoryCount") + except: pass + + try: score_data.pop("ext1") + except: pass + + score_data["user"] = sc["user"] + + sql = Insert(best_score).values(**score_data) + conflict = sql.on_duplicate_key_update(**score_data) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to put chuni score for user {sc['user']}") + else: + self.logger.info(f"Inserted chuni score for user {sc['user']} {sc['song_id']}/{sc['chart_id']} -> {result.lastrowid}") + + else: + self.logger.info(f"Chuni/Chusan not found, skipping...") + + # CXB + if os.path.exists("titles/cxb/schema"): + from titles.cxb.schema.item import energy + from titles.cxb.schema.profile import profile + from titles.cxb.schema.score import score, ranking + + cxb_profiles = [] + cxb_items = [] + cxb_scores = [] + + if "SDCA" in profiles: + cxb_profiles = profiles["SDCA"] + if "SDCA" in items: + cxb_items = items["SDCA"] + if "SDCA" in scores: + cxb_scores = scores["SDCA"] + + self.logger.info(f"Importing {len(cxb_profiles)} CXB profiles") + + for pf in cxb_profiles: + user = pf["user"] + version = pf["version"] + pf_data = pf["data"]["data"] + pf_idx = pf["data"]["index"] + + for x in range(len(pf_data)): + sql = Insert(profile).values( + user = user, + version = version, + index = int(pf_idx[x]), + data = json.loads(pf_data[x]) if type(pf_data[x]) is not dict else pf_data[x] + ) + + conflict = sql.on_duplicate_key_update( + user = user, + version = version, + index = int(pf_idx[x]), + data = json.loads(pf_data[x]) if type(pf_data[x]) is not dict else pf_data[x] + ) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert CXB profile for user {user} Index {pf_idx[x]}") + + self.logger.info(f"Importing {len(cxb_scores)} CXB scores") + + for sc in cxb_scores: + user = sc["user"] + version = sc["version"] + mcode = sc["data"]["mcode"] + index = sc["data"]["index"] + + sql = Insert(score).values( + user = user, + game_version = version, + song_mcode = mcode, + song_index = index, + data = sc["data"] + ) + + conflict = sql.on_duplicate_key_update( + user = user, + game_version = version, + song_mcode = mcode, + song_index = index, + data = sc["data"] + ) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert CXB score for user {user} mcode {mcode}") + + self.logger.info(f"Importing {len(cxb_items)} CXB items") + + for it in cxb_items: + user = it["user"] + + if it["type"] == 3: # energy + sql = Insert(energy).values( + user = user, + energy = it["data"]["total"] + ) + + conflict = sql.on_duplicate_key_update( + user = user, + energy = it["data"]["total"] + ) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert CXB energy for user {user}") + + elif it["type"] == 2: + sql = Insert(ranking).values( + user = user, + rev_id = it["data"]["rid"], + song_id = it["data"]["sc"][1] if len(it["data"]["sc"]) > 1 else None, + score = it["data"]["sc"][0], + clear = it["data"]["clear"], + ) + + conflict = sql.on_duplicate_key_update( + user = user, + rev_id = it["data"]["rid"], + song_id = it["data"]["sc"][1] if len(it["data"]["sc"]) > 1 else None, + score = it["data"]["sc"][0], + clear = it["data"]["clear"], + ) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert CXB ranking for user {user}") + + else: + self.logger.error(f"Unknown CXB item type {it['type']} for user {user}") + + else: + self.logger.info(f"CXB not found, skipping...") + + # Diva + if os.path.exists("titles/diva/schema"): + from titles.diva.schema.profile import profile + from titles.diva.schema.score import score + from titles.diva.schema.item import shop + + diva_profiles = [] + diva_scores = [] + + if "SBZV" in profiles: + diva_profiles = profiles["SBZV"] + if "SBZV" in scores: + diva_scores = scores["SBZV"] + + self.logger.info(f"Importing {len(diva_profiles)} Diva profiles") + + for pf in diva_profiles: + pf["data"]["user"] = pf["user"] + pf["data"]["version"] = pf["version"] + pf_data = pf["data"] + + if "mdl_eqp_ary" in pf["data"]: + sql = Insert(shop).values( + user = user, + version = version, + mdl_eqp_ary = pf["data"]["mdl_eqp_ary"], + ) + conflict = sql.on_duplicate_key_update( + user = user, + version = version, + mdl_eqp_ary = pf["data"]["mdl_eqp_ary"] + ) + self.base.execute(conflict) + pf["data"].pop("mdl_eqp_ary") + + sql = Insert(profile).values(**pf_data) + conflict = sql.on_duplicate_key_update(**pf_data) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert diva profile for {pf['user']}") + + self.logger.info(f"Importing {len(diva_scores)} Diva scores") + + for sc in diva_scores: + user = sc["user"] + + clr_kind = -1 + for x in sc["data"]["stg_clr_kind"].split(","): + if x != "-1": + clr_kind = x + + cool_ct = 0 + for x in sc["data"]["stg_cool_cnt"].split(","): + if x != "0": + cool_ct = x + + fine_ct = 0 + for x in sc["data"]["stg_fine_cnt"].split(","): + if x != "0": + fine_ct = x + + safe_ct = 0 + for x in sc["data"]["stg_safe_cnt"].split(","): + if x != "0": + safe_ct = x + + sad_ct = 0 + for x in sc["data"]["stg_sad_cnt"].split(","): + if x != "0": + sad_ct = x + + worst_ct = 0 + for x in sc["data"]["stg_wt_wg_cnt"].split(","): + if x != "0": + worst_ct = x + + max_cmb = 0 + for x in sc["data"]["stg_max_cmb"].split(","): + if x != "0": + max_cmb = x + + sql = Insert(score).values( + user = user, + version = sc["version"], + pv_id = sc["song_id"], + difficulty = sc["chart_id"], + score = sc["score1"], + atn_pnt = sc["score2"], + clr_kind = clr_kind, + sort_kind = sc["data"]["sort_kind"], + cool = cool_ct, + fine = fine_ct, + safe = safe_ct, + sad = sad_ct, + worst = worst_ct, + max_combo = max_cmb, + ) + + conflict = sql.on_duplicate_key_update(user = user, + version = sc["version"], + pv_id = sc["song_id"], + difficulty = sc["chart_id"], + score = sc["score1"], + atn_pnt = sc["score2"], + clr_kind = clr_kind, + sort_kind = sc["data"]["sort_kind"], + cool = cool_ct, + fine = fine_ct, + safe = safe_ct, + sad = sad_ct, + worst = worst_ct, + max_combo = max_cmb + ) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert diva score for {pf['user']}") + + else: + self.logger.info(f"Diva not found, skipping...") + + # Ongeki + if os.path.exists("titles/ongeki/schema"): + from titles.ongeki.schema.item import card, deck, character, boss, story + from titles.ongeki.schema.item import chapter, item, music_item, login_bonus + from titles.ongeki.schema.item import event_point, mission_point, scenerio + from titles.ongeki.schema.item import trade_item, event_music, tech_event + from titles.ongeki.schema.profile import profile, option, activity, recent_rating + from titles.ongeki.schema.profile import rating_log, training_room, kop + from titles.ongeki.schema.score import score_best, tech_count, playlog + from titles.ongeki.schema.log import session_log + + item_types = { + "character": 20, + "story": 21, + "card": 22, + "deck": 23, + "login": 24, + "chapter": 25 + } + + ongeki_profiles = [] + ongeki_items = [] + ongeki_scores = [] + + if "SDDT" in profiles: + ongeki_profiles = profiles["SDDT"] + if "SDDT" in items: + ongeki_items = items["SDDT"] + if "SDDT" in scores: + ongeki_scores = scores["SDDT"] + + self.logger.info(f"Importing {len(ongeki_profiles)} ongeki profiles") + + for pf in ongeki_profiles: + user = pf["user"] + version = pf["version"] + pf_data = pf["data"] + + pf_data["userData"]["user"] = user + pf_data["userData"]["version"] = version + pf_data["userData"].pop("accessCode") + + sql = Insert(profile).values(**pf_data["userData"]) + conflict = sql.on_duplicate_key_update(**pf_data["userData"]) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki profile data for user {pf['user']}") + continue + + pf_data["userOption"]["user"] = user + + sql = Insert(option).values(**pf_data["userOption"]) + conflict = sql.on_duplicate_key_update(**pf_data["userOption"]) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki profile options for user {pf['user']}") + continue + + for pf_list in pf_data["userActivityList"]: + pf_list["user"] = user + + sql = Insert(activity).values(**pf_list) + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki profile activity for user {pf['user']}") + continue + + sql = Insert(recent_rating).values( + user = user, + recentRating = pf_data["userRecentRatingList"] + ) + + conflict = sql.on_duplicate_key_update( + user = user, + recentRating = pf_data["userRecentRatingList"] + ) + result = self.base.execute(conflict) + + if result is None: + self.logger.error(f"Failed to insert ongeki profile recent rating for user {pf['user']}") + continue + + for pf_list in pf_data["userRatinglogList"]: + pf_list["user"] = user + + sql = Insert(rating_log).values(**pf_list) + + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki profile rating log for user {pf['user']}") + continue + + for pf_list in pf_data["userTrainingRoomList"]: + pf_list["user"] = user + + sql = Insert(training_room).values(**pf_list) + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki profile training room for user {pf['user']}") + continue + + if "userKopList" in pf_data: + for pf_list in pf_data["userKopList"]: + pf_list["user"] = user + + sql = Insert(kop).values(**pf_list) + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki profile training room for user {pf['user']}") + continue + + for pf_list in pf_data["userBossList"]: + pf_list["user"] = user + + sql = Insert(boss).values(**pf_list) + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki item boss for user {pf['user']}") + continue + + for pf_list in pf_data["userDeckList"]: + pf_list["user"] = user + + sql = Insert(deck).values(**pf_list) + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki item deck for user {pf['user']}") + continue + + for pf_list in pf_data["userStoryList"]: + pf_list["user"] = user + + sql = Insert(story).values(**pf_list) + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki item story for user {pf['user']}") + continue + + for pf_list in pf_data["userChapterList"]: + pf_list["user"] = user + + sql = Insert(chapter).values(**pf_list) + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki item chapter for user {pf['user']}") + continue + + for pf_list in pf_data["userPlaylogList"]: + pf_list["user"] = user + + sql = Insert(playlog).values(**pf_list) + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki score playlog for user {pf['user']}") + continue + + for pf_list in pf_data["userMusicItemList"]: + pf_list["user"] = user + + sql = Insert(music_item).values(**pf_list) + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki item music item for user {pf['user']}") + continue + + for pf_list in pf_data["userTechCountList"]: + pf_list["user"] = user + + sql = Insert(tech_count).values(**pf_list) + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki item tech count for user {pf['user']}") + continue + + if "userTechEventList" in pf_data: + for pf_list in pf_data["userTechEventList"]: + pf_list["user"] = user + + sql = Insert(tech_event).values(**pf_list) + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki item tech event for user {pf['user']}") + continue + + for pf_list in pf_data["userTradeItemList"]: + pf_list["user"] = user + + sql = Insert(trade_item).values(**pf_list) + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki item trade item for user {pf['user']}") + continue + + if "userEventMusicList" in pf_data: + for pf_list in pf_data["userEventMusicList"]: + pf_list["user"] = user + + sql = Insert(event_music).values(**pf_list) + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki item event music for user {pf['user']}") + continue + + if "userEventPointList" in pf_data: + for pf_list in pf_data["userEventPointList"]: + pf_list["user"] = user + + sql = Insert(event_point).values(**pf_list) + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki item event point for user {pf['user']}") + continue + + for pf_list in pf_data["userLoginBonusList"]: + pf_list["user"] = user + + sql = Insert(login_bonus).values(**pf_list) + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki item login bonus for user {pf['user']}") + continue + + for pf_list in pf_data["userMissionPointList"]: + pf_list["user"] = user + + sql = Insert(mission_point).values(**pf_list) + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki item mission point for user {pf['user']}") + continue + + for pf_list in pf_data["userScenarioList"]: + pf_list["user"] = user + + sql = Insert(scenerio).values(**pf_list) + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki item scenerio for user {pf['user']}") + continue + + if "userSessionlogList" in pf_data: + for pf_list in pf_data["userSessionlogList"]: + pf_list["user"] = user + + sql = Insert(session_log).values(**pf_list) + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki log session for user {pf['user']}") + continue + + self.logger.info(f"Importing {len(ongeki_items)} ongeki items") + + for it in ongeki_items: + user = it["user"] + it_type = it["type"] + it_id = it["item_id"] + it_data = it["data"] + it_data["user"] = user + + if it_type == item_types["character"] and "characterId" in it_data: + sql = Insert(character).values(**it_data) + + elif it_type == item_types["story"]: + sql = Insert(story).values(**it_data) + + elif it_type == item_types["card"]: + sql = Insert(card).values(**it_data) + + elif it_type == item_types["deck"]: + sql = Insert(deck).values(**it_data) + + elif it_type == item_types["login"]: # login bonus + sql = Insert(login_bonus).values(**it_data) + + elif it_type == item_types["chapter"]: + sql = Insert(chapter).values(**it_data) + + else: + sql = Insert(item).values(**it_data) + + conflict = sql.on_duplicate_key_update(**it_data) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki item {it_id} kind {it_type} for user {user}") + + self.logger.info(f"Importing {len(ongeki_scores)} ongeki scores") + + for sc in ongeki_scores: + user = sc["user"] + sc_data = sc["data"] + sc_data["user"] = user + + sql = Insert(score_best).values(**sc_data) + conflict = sql.on_duplicate_key_update(**sc_data) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki score for user {user}: {sc['song_id']}/{sc['chart_id']}") + + else: + self.logger.info(f"Ongeki not found, skipping...") + + # Wacca + if os.path.exists("titles/wacca/schema"): + from titles.wacca.schema.profile import profile, option, bingo, gate, favorite + from titles.wacca.schema.item import item, ticket, song_unlock, trophy + from titles.wacca.schema.score import best_score, stageup + from titles.wacca.reverse import WaccaReverse + from titles.wacca.const import WaccaConstants + + default_opts = WaccaReverse.OPTIONS_DEFAULTS + opts = WaccaConstants.OPTIONS + item_types = WaccaConstants.ITEM_TYPES + + wacca_profiles = [] + wacca_items = [] + wacca_scores = [] + wacca_achievements = [] + + if "SDFE" in profiles: + wacca_profiles = profiles["SDFE"] + if "SDFE" in items: + wacca_items = items["SDFE"] + if "SDFE" in scores: + wacca_scores = scores["SDFE"] + if "SDFE" in achievements: + wacca_achievements = achievements["SDFE"] + + self.logger.info(f"Importing {len(wacca_profiles)} wacca profiles") + + for pf in wacca_profiles: + if pf["version"] == 0 or pf["version"] == 1: + season = 1 + elif pf["version"] == 2 or pf["version"] == 3: + season = 2 + elif pf["version"] >= 4: + season = 3 + + if type(pf["data"]) is not dict: + pf["data"] = json.loads(pf["data"]) + + try: + sql = Insert(profile).values( + id = pf["id"], + user = pf["user"], + version = pf["version"], + season = season, + username = pf["data"]["profile"]["username"] if "username" in pf["data"]["profile"] else pf["name"], + xp = pf["data"]["profile"]["xp"], + xp_season = pf["data"]["profile"]["xp"], + wp = pf["data"]["profile"]["wp"], + wp_season = pf["data"]["profile"]["wp"], + wp_total = pf["data"]["profile"]["total_wp_gained"], + dan_type = pf["data"]["profile"]["dan_type"], + dan_level = pf["data"]["profile"]["dan_level"], + title_0 = pf["data"]["profile"]["title_part_ids"][0], + title_1 = pf["data"]["profile"]["title_part_ids"][1], + title_2 = pf["data"]["profile"]["title_part_ids"][2], + rating = pf["data"]["profile"]["rating"], + vip_expire_time = datetime.fromtimestamp(pf["data"]["profile"]["vip_expire_time"]) if "vip_expire_time" in pf["data"]["profile"] else None, + login_count = pf["use_count"], + playcount_single = pf["use_count"], + playcount_single_season = pf["use_count"], + last_game_ver = pf["data"]["profile"]["last_game_ver"], + last_song_id = pf["data"]["profile"]["last_song_info"][0] if "last_song_info" in pf["data"]["profile"] else 0, + last_song_difficulty = pf["data"]["profile"]["last_song_info"][1] if "last_song_info" in pf["data"]["profile"] else 0, + last_folder_order = pf["data"]["profile"]["last_song_info"][2] if "last_song_info" in pf["data"]["profile"] else 0, + last_folder_id = pf["data"]["profile"]["last_song_info"][3] if "last_song_info" in pf["data"]["profile"] else 0, + last_song_order = pf["data"]["profile"]["last_song_info"][4] if "last_song_info" in pf["data"]["profile"] else 0, + last_login_date = datetime.fromtimestamp(pf["data"]["profile"]["last_login_timestamp"]), + ) + + conflict = sql.on_duplicate_key_update( + id = pf["id"], + user = pf["user"], + version = pf["version"], + season = season, + username = pf["data"]["profile"]["username"] if "username" in pf["data"]["profile"] else pf["name"], + xp = pf["data"]["profile"]["xp"], + xp_season = pf["data"]["profile"]["xp"], + wp = pf["data"]["profile"]["wp"], + wp_season = pf["data"]["profile"]["wp"], + wp_total = pf["data"]["profile"]["total_wp_gained"], + dan_type = pf["data"]["profile"]["dan_type"], + dan_level = pf["data"]["profile"]["dan_level"], + title_0 = pf["data"]["profile"]["title_part_ids"][0], + title_1 = pf["data"]["profile"]["title_part_ids"][1], + title_2 = pf["data"]["profile"]["title_part_ids"][2], + rating = pf["data"]["profile"]["rating"], + vip_expire_time = datetime.fromtimestamp(pf["data"]["profile"]["vip_expire_time"]) if "vip_expire_time" in pf["data"]["profile"] else None, + login_count = pf["use_count"], + playcount_single = pf["use_count"], + playcount_single_season = pf["use_count"], + last_game_ver = pf["data"]["profile"]["last_game_ver"], + last_song_id = pf["data"]["profile"]["last_song_info"][0] if "last_song_info" in pf["data"]["profile"] else 0, + last_song_difficulty = pf["data"]["profile"]["last_song_info"][1] if "last_song_info" in pf["data"]["profile"] else 0, + last_folder_order = pf["data"]["profile"]["last_song_info"][2] if "last_song_info" in pf["data"]["profile"] else 0, + last_folder_id = pf["data"]["profile"]["last_song_info"][3] if "last_song_info" in pf["data"]["profile"] else 0, + last_song_order = pf["data"]["profile"]["last_song_info"][4] if "last_song_info" in pf["data"]["profile"] else 0, + last_login_date = datetime.fromtimestamp(pf["data"]["profile"]["last_login_timestamp"]), + ) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert wacca profile for user {pf['user']}") + continue + + for opt, val in pf["data"]["option"].items(): + if val != default_opts[opt]: + opt_id = opts[opt] + sql = Insert(option).values( + user = pf["user"], + opt_id = opt_id, + value = val, + ) + + conflict = sql.on_duplicate_key_update( + user = pf["user"], + opt_id = opt_id, + value = val, + ) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert wacca option for user {pf['user']} {opt} -> {val}") + + except KeyError as e: + self.logger.warn(f"Outdated wacca profile, skipping: {e}") + + if "gate" in pf["data"]: + for profile_gate in pf["data"]["gate"]: + sql = Insert(gate).values( + user = pf["user"], + gate_id = profile_gate["id"], + page = profile_gate["page"], + loops = profile_gate["loops"], + progress = profile_gate["progress"], + last_used = datetime.fromtimestamp(profile_gate["last_used"]), + mission_flag = profile_gate["mission_flag"], + total_points = profile_gate["total_points"], + ) + + conflict = sql.on_duplicate_key_update( + user = pf["user"], + gate_id = profile_gate["id"], + page = profile_gate["page"], + loops = profile_gate["loops"], + progress = profile_gate["progress"], + last_used = datetime.fromtimestamp(profile_gate["last_used"]), + mission_flag = profile_gate["mission_flag"], + total_points = profile_gate["total_points"], + ) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert wacca gate for user {pf['user']} -> {profile_gate['id']}") + continue + + if "favorite" in pf["data"]: + for profile_favorite in pf["data"]["favorite"]: + sql = Insert(favorite).values( + user = pf["user"], + song_id = profile_favorite + ) + + conflict = sql.on_duplicate_key_update( + user = pf["user"], + song_id = profile_favorite + ) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert wacca favorite songs for user {pf['user']} -> {profile_favorite}") + continue + + for it in wacca_items: + user = it["user"] + item_type = it["type"] + item_id = it["item_id"] + + if type(it["data"]) is not dict: + it["data"] = json.loads(it["data"]) + + if item_type == item_types["ticket"]: + if "quantity" in it["data"]: + for x in range(it["data"]["quantity"]): + sql = Insert(ticket).values( + user = user, + ticket_id = item_id, + ) + + conflict = sql.on_duplicate_key_update( + user = user, + ticket_id = item_id, + ) + result = self.base.execute(conflict) + if result is None: + self.logger.warn(f"Wacca: Failed to insert ticket {item_id} for user {user}") + + elif item_type == item_types["music_unlock"] or item_type == item_types["music_difficulty_unlock"]: + diff = 0 + if "difficulty" in it["data"]: + for x in it["data"]["difficulty"]: + if x == 1: + diff += 1 + else: + break + + sql = Insert(song_unlock).values( + user = user, + song_id = item_id, + highest_difficulty = diff, + ) + + conflict = sql.on_duplicate_key_update( + user = user, + song_id = item_id, + highest_difficulty = diff, + ) + result = self.base.execute(conflict) + if result is None: + self.logger.warn(f"Wacca: Failed to insert song unlock {item_id} {diff} for user {user}") + + elif item_type == item_types["trophy"]: + season = int(item_id / 100000) + sql = Insert(trophy).values( + user = user, + trophy_id = item_id, + season = season, + progress = 0 if "progress" not in it["data"] else it["data"]["progress"], + badge_type = 0 # ??? + ) + + conflict = sql.on_duplicate_key_update( + user = user, + trophy_id = item_id, + season = season, + progress = 0 if "progress" not in it["data"] else it["data"]["progress"], + badge_type = 0 # ??? + ) + result = self.base.execute(conflict) + if result is None: + self.logger.warn(f"Wacca: Failed to insert trophy {item_id} for user {user}") + + else: + sql = Insert(item).values( + user = user, + item_id = item_id, + type = item_type, + acquire_date = datetime.fromtimestamp(it["data"]["obtainedDate"]) if "obtainedDate" in it["data"] else datetime.now(), + use_count = it["data"]["uses"] if "uses" in it["data"] else 0, + use_count_season = it["data"]["season_uses"] if "season_uses" in it["data"] else 0 + ) + + conflict = sql.on_duplicate_key_update( + user = user, + item_id = item_id, + type = item_type, + acquire_date = datetime.fromtimestamp(it["data"]["obtainedDate"]) if "obtainedDate" in it["data"] else datetime.now(), + use_count = it["data"]["uses"] if "uses" in it["data"] else 0, + use_count_season = it["data"]["season_uses"] if "season_uses" in it["data"] else 0 + ) + result = self.base.execute(conflict) + if result is None: + self.logger.warn(f"Wacca: Failed to insert trophy {item_id} for user {user}") + + for sc in wacca_scores: + if type(sc["data"]) is not dict: + sc["data"] = json.loads(sc["data"]) + + sql = Insert(best_score).values( + user = sc["user"], + song_id = int(sc["song_id"]), + chart_id = sc["chart_id"], + score = sc["score1"], + play_ct = 1 if "play_count" not in sc["data"] else sc["data"]["play_count"], + clear_ct = 1 if sc["cleared"] & 0x01 else 0, + missless_ct = 1 if sc["cleared"] & 0x02 else 0, + fullcombo_ct = 1 if sc["cleared"] & 0x04 else 0, + allmarv_ct = 1 if sc["cleared"] & 0x08 else 0, + grade_d_ct = 1 if sc["grade"] & 0x01 else 0, + grade_c_ct = 1 if sc["grade"] & 0x02 else 0, + grade_b_ct = 1 if sc["grade"] & 0x04 else 0, + grade_a_ct = 1 if sc["grade"] & 0x08 else 0, + grade_aa_ct = 1 if sc["grade"] & 0x10 else 0, + grade_aaa_ct = 1 if sc["grade"] & 0x20 else 0, + grade_s_ct = 1 if sc["grade"] & 0x40 else 0, + grade_ss_ct = 1 if sc["grade"] & 0x80 else 0, + grade_sss_ct = 1 if sc["grade"] & 0x100 else 0, + grade_master_ct = 1 if sc["grade"] & 0x200 else 0, + grade_sp_ct = 1 if sc["grade"] & 0x400 else 0, + grade_ssp_ct = 1 if sc["grade"] & 0x800 else 0, + grade_sssp_ct = 1 if sc["grade"] & 0x1000 else 0, + best_combo = 0 if "max_combo" not in sc["data"] else sc["data"]["max_combo"], + lowest_miss_ct = 0 if "lowest_miss_count" not in sc["data"] else sc["data"]["lowest_miss_count"], + rating = 0 if "rating" not in sc["data"] else sc["data"]["rating"], + ) + + conflict = sql.on_duplicate_key_update( + user = sc["user"], + song_id = int(sc["song_id"]), + chart_id = sc["chart_id"], + score = sc["score1"], + play_ct = 1 if "play_count" not in sc["data"] else sc["data"]["play_count"], + clear_ct = 1 if sc["cleared"] & 0x01 else 0, + missless_ct = 1 if sc["cleared"] & 0x02 else 0, + fullcombo_ct = 1 if sc["cleared"] & 0x04 else 0, + allmarv_ct = 1 if sc["cleared"] & 0x08 else 0, + grade_d_ct = 1 if sc["grade"] & 0x01 else 0, + grade_c_ct = 1 if sc["grade"] & 0x02 else 0, + grade_b_ct = 1 if sc["grade"] & 0x04 else 0, + grade_a_ct = 1 if sc["grade"] & 0x08 else 0, + grade_aa_ct = 1 if sc["grade"] & 0x10 else 0, + grade_aaa_ct = 1 if sc["grade"] & 0x20 else 0, + grade_s_ct = 1 if sc["grade"] & 0x40 else 0, + grade_ss_ct = 1 if sc["grade"] & 0x80 else 0, + grade_sss_ct = 1 if sc["grade"] & 0x100 else 0, + grade_master_ct = 1 if sc["grade"] & 0x200 else 0, + grade_sp_ct = 1 if sc["grade"] & 0x400 else 0, + grade_ssp_ct = 1 if sc["grade"] & 0x800 else 0, + grade_sssp_ct = 1 if sc["grade"] & 0x1000 else 0, + best_combo = 0 if "max_combo" not in sc["data"] else sc["data"]["max_combo"], + lowest_miss_ct = 0 if "lowest_miss_count" not in sc["data"] else sc["data"]["lowest_miss_count"], + rating = 0 if "rating" not in sc["data"] else sc["data"]["rating"], + ) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert wacca score for user {sc['user']} {int(sc['song_id'])} {sc['chart_id']}") + + for ach in wacca_achievements: + if ach["version"] == 0 or ach["version"] == 1: + season = 1 + elif ach["version"] == 2 or ach["version"] == 3: + season = 2 + elif ach["version"] >= 4: + season = 3 + + if type(ach["data"]) is not dict: + ach["data"] = json.loads(ach["data"]) + + sql = Insert(stageup).values( + user = ach["user"], + season = season, + stage_id = ach["achievement_id"], + clear_status = 0 if "clear" not in ach["data"] else ach["data"]["clear"], + clear_song_ct = 0 if "clear_song_ct" not in ach["data"] else ach["data"]["clear_song_ct"], + song1_score = 0 if "score1" not in ach["data"] else ach["data"]["score1"], + song2_score = 0 if "score2" not in ach["data"] else ach["data"]["score2"], + song3_score = 0 if "score3" not in ach["data"] else ach["data"]["score3"], + play_ct = 1 if "attemps" not in ach["data"] else ach["data"]["attemps"], + ) + + conflict = sql.on_duplicate_key_update( + user = ach["user"], + season = season, + stage_id = ach["achievement_id"], + clear_status = 0 if "clear" not in ach["data"] else ach["data"]["clear"], + clear_song_ct = 0 if "clear_song_ct" not in ach["data"] else ach["data"]["clear_song_ct"], + song1_score = 0 if "score1" not in ach["data"] else ach["data"]["score1"], + song2_score = 0 if "score2" not in ach["data"] else ach["data"]["score2"], + song3_score = 0 if "score3" not in ach["data"] else ach["data"]["score3"], + play_ct = 1 if "attemps" not in ach["data"] else ach["data"]["attemps"], + ) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert wacca achievement for user {ach['user']}") + + else: + self.logger.info(f"Wacca not found, skipping...") From 83f09e180e31bae474068a16b7341a4ecf6446d0 Mon Sep 17 00:00:00 2001 From: Hay1tsme Date: Wed, 22 Feb 2023 09:59:31 -0500 Subject: [PATCH 02/68] add protobuf to requirements, fixes #2 --- requirements.txt | 1 + requirements_win.txt | 1 + 2 files changed, 2 insertions(+) diff --git a/requirements.txt b/requirements.txt index 40dbf84..2631c24 100644 --- a/requirements.txt +++ b/requirements.txt @@ -13,3 +13,4 @@ coloredlogs pylibmc wacky Routes +protobuf diff --git a/requirements_win.txt b/requirements_win.txt index 89527fe..cbdbcde 100644 --- a/requirements_win.txt +++ b/requirements_win.txt @@ -12,3 +12,4 @@ inflection coloredlogs wacky Routes +protobuf From 670747cf4898c1fd1030e58efb57fc16438c43e2 Mon Sep 17 00:00:00 2001 From: Hay1tsme Date: Fri, 24 Feb 2023 13:34:32 -0500 Subject: [PATCH 03/68] add platform_system to requirements.txt --- requirements.txt | 2 +- requirements_win.txt | 15 --------------- 2 files changed, 1 insertion(+), 16 deletions(-) delete mode 100644 requirements_win.txt diff --git a/requirements.txt b/requirements.txt index 2631c24..0536370 100644 --- a/requirements.txt +++ b/requirements.txt @@ -10,7 +10,7 @@ service_identity PyCryptodome inflection coloredlogs -pylibmc +pylibmc; platform_system != "Windows" wacky Routes protobuf diff --git a/requirements_win.txt b/requirements_win.txt deleted file mode 100644 index cbdbcde..0000000 --- a/requirements_win.txt +++ /dev/null @@ -1,15 +0,0 @@ -mypy -wheel -twisted -pytz -pyyaml -sqlalchemy==1.4.46 -mysqlclient -pyopenssl -service_identity -PyCryptodome -inflection -coloredlogs -wacky -Routes -protobuf From be00ad8e961eafc79f5efc1bfa805b791502da93 Mon Sep 17 00:00:00 2001 From: Hay1tsme Date: Wed, 1 Mar 2023 23:55:29 -0500 Subject: [PATCH 04/68] wacca: add lily to list of items given on profile create, fixes #4 --- titles/wacca/lily.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/titles/wacca/lily.py b/titles/wacca/lily.py index 66a4bc8..e67b25c 100644 --- a/titles/wacca/lily.py +++ b/titles/wacca/lily.py @@ -50,6 +50,14 @@ class WaccaLily(WaccaS): resp = HousingStartResponseV1(region_id) return resp.make() + + def handle_user_status_create_request(self, data: Dict)-> Dict: + req = UserStatusCreateRequest(data) + resp = super().handle_user_status_create_request(data) + + self.data.item.put_item(req.aimeId, WaccaConstants.ITEM_TYPES["navigator"], 210002) # Lily, Added Lily + + return resp def handle_user_status_get_request(self, data: Dict)-> Dict: req = UserStatusGetRequest(data) From fe4dfe369b62ecfd90b5aa1d4531ada16d044516 Mon Sep 17 00:00:00 2001 From: Hay1tsme Date: Sun, 30 Apr 2023 01:18:00 +0000 Subject: [PATCH 05/68] Update 'readme.md' --- readme.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/readme.md b/readme.md index b1cb506..ec25191 100644 --- a/readme.md +++ b/readme.md @@ -2,7 +2,7 @@ A network service emulator for games running SEGA'S ALL.NET service, and similar. # Supported games -Games listed below have been tested and confirmed working. Only game versions older then the current one in active use in arcades (n-0) or current game versions older then a year (y-1) are supported. +Games listed below have been tested and confirmed working. Only game versions older then the version currently active in arcades, or games versions that have not recieved a major update in over one year, are supported. + Chunithm + All versions up to New!! Plus From 75842b5a886b3f61fdf50c361894a9887af39618 Mon Sep 17 00:00:00 2001 From: EmmyHeart Date: Tue, 11 Jul 2023 09:12:34 +0000 Subject: [PATCH 06/68] Add team support, implement team upsert, add rivals --- titles/chuni/base.py | 141 ++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 131 insertions(+), 10 deletions(-) diff --git a/titles/chuni/base.py b/titles/chuni/base.py index ed8d0fb..b9c1ae8 100644 --- a/titles/chuni/base.py +++ b/titles/chuni/base.py @@ -361,11 +361,98 @@ class ChuniBase: "userDuelList": duel_list, } + def handle_get_user_rival_data_api_request(self, data: Dict) -> Dict: + p = self.data.profile.get_rival(data["rivalId"]) + if p is None: + return {} + userRivalData = { + "rivalId": p.user, + "rivalName": p.userName + } + return { + "userId": data["userId"], + "userRivalData": userRivalData + } + + def handle_get_user_rival_music_api_request(self, data: Dict) -> Dict: + m = self.data.score.get_rival_music(data["rivalId"], data["nextIndex"], data["maxCount"]) + if m is None: + return {} + + user_rival_music_list = [] + for music in m: + actual_music_id = self.data.static.get_song(music["musicId"]) + if actual_music_id is None: + music_id = music["musicId"] + else: + music_id = actual_music_id["songId"] + level = music["level"] + score = music["score"] + rank = music["rank"] + + # Find the existing entry for the current musicId in the user_rival_music_list + music_entry = next((entry for entry in user_rival_music_list if entry["musicId"] == music_id), None) + + if music_entry is None: + # If the entry doesn't exist, create a new entry + music_entry = { + "musicId": music_id, + "length": 0, + "userRivalMusicDetailList": [] + } + user_rival_music_list.append(music_entry) + + # Check if the current score is higher than the previous highest score for the level + level_entry = next( + ( + entry + for entry in music_entry["userRivalMusicDetailList"] + if entry["level"] == level + ), + None, + ) + if level_entry is None or score > level_entry["scoreMax"]: + # If the level entry doesn't exist or the score is higher, update or add the entry + level_entry = { + "level": level, + "scoreMax": score, + "scoreRank": rank + } + + if level_entry not in music_entry["userRivalMusicDetailList"]: + music_entry["userRivalMusicDetailList"].append(level_entry) + + music_entry["length"] = len(music_entry["userRivalMusicDetailList"]) + + result = { + "userId": data["userId"], + "rivalId": data["rivalId"], + "nextIndex": -1, + "userRivalMusicList": user_rival_music_list + } + + return result + def handle_get_user_rival_music_api_requestded(self, data: Dict) -> Dict: + m = self.data.score.get_rival_music(data["rivalId"], data["nextIndex"], data["maxCount"]) + if m is None: + return {} + + userRivalMusicList = [] + for music in m: + self.logger.debug(music["point"]) + + return { + "userId": data["userId"], + "rivalId": data["rivalId"], + "nextIndex": -1 + + } + def handle_get_user_favorite_item_api_request(self, data: Dict) -> Dict: user_fav_item_list = [] # still needs to be implemented on WebUI - # 1: Music, 3: Character + # 1: Music, 2: User, 3: Character fav_list = self.data.item.get_all_favorites( data["userId"], self.version, fav_kind=int(data["kind"]) ) @@ -600,25 +687,43 @@ class ChuniBase: } def handle_get_user_team_api_request(self, data: Dict) -> Dict: - # TODO: use the database "chuni_profile_team" with a GUI + # Default values + team_id = 65535 team_name = self.game_cfg.team.team_name - if team_name == "": + team_rank = 0 + + # Get user profile + profile = self.data.profile.get_profile_data(data["userId"], self.version) + if profile and profile["teamId"]: + # Get team by id + team = self.data.profile.get_team_by_id(profile["teamId"]) + + if team: + team_id = team["id"] + team_name = team["teamName"] + # Determine whether to use scaled ranks, or original system + if self.game_cfg.team.rank_scale: + team_rank = self.data.profile.get_team_rank(team["id"]) + else: + team_rank = self.data.profile.get_team_rank_actual(team["id"]) + + # Don't return anything if no team name has been defined for defaults and there is no team set for the player + if not profile["teamId"] and team_name == "": return {"userId": data["userId"], "teamId": 0} return { "userId": data["userId"], - "teamId": 1, - "teamRank": 1, + "teamId": team_id, + "teamRank": team_rank, "teamName": team_name, "userTeamPoint": { "userId": data["userId"], - "teamId": 1, + "teamId": team_id, "orderId": 1, "teamPoint": 1, "aggrDate": data["playDate"], }, } - def handle_get_team_course_setting_api_request(self, data: Dict) -> Dict: return { "userId": data["userId"], @@ -709,9 +814,25 @@ class ChuniBase: self.data.score.put_playlog(user_id, playlog) if "userTeamPoint" in upsert: - # TODO: team stuff - pass + team_points = upsert["userTeamPoint"] + try: + for tp in team_points: + if tp["teamId"] != '65535': + # Fetch the current team data + current_team = self.data.profile.get_team_by_id(tp["teamId"]) + # Calculate the new teamPoint + new_team_point = int(tp["teamPoint"]) + current_team["teamPoint"] + + # Prepare the data to update + team_data = { + "teamPoint": new_team_point + } + + # Update the team data + self.data.profile.update_team(tp["teamId"], team_data) + except: + pass # Probably a better way to catch if the team is not set yet (new profiles), but let's just pass if "userMapAreaList" in upsert: for map_area in upsert["userMapAreaList"]: self.data.item.put_map_area(user_id, map_area) @@ -757,4 +878,4 @@ class ChuniBase: return { "userId": data["userId"], "userNetBattleData": {"recentNBSelectMusicList": []}, - } + } \ No newline at end of file From c01d3f49f57b650485f82526a4ec12d8594efe3c Mon Sep 17 00:00:00 2001 From: EmmyHeart Date: Tue, 11 Jul 2023 09:13:19 +0000 Subject: [PATCH 07/68] Added call for getting rival's music lists --- titles/chuni/schema/score.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/titles/chuni/schema/score.py b/titles/chuni/schema/score.py index 203aa11..ab26f5f 100644 --- a/titles/chuni/schema/score.py +++ b/titles/chuni/schema/score.py @@ -200,3 +200,10 @@ class ChuniScoreData(BaseData): if result is None: return None return result.lastrowid + + def get_rival_music(self, rival_id: int, index: int, max_count: int) -> Optional[List[Dict]]: + sql = select(playlog).where(playlog.c.user == rival_id).limit(max_count).offset(index) + result = self.execute(sql) + if result is None: + return None + return result.fetchall() \ No newline at end of file From 043ff1700810ea5c6c863c09db0fc40eed161b99 Mon Sep 17 00:00:00 2001 From: EmmyHeart Date: Tue, 11 Jul 2023 09:14:53 +0000 Subject: [PATCH 08/68] Add team support, rivals, and test function for getting playcounts --- titles/chuni/schema/profile.py | 100 +++++++++++++++++++++++++++++++++ 1 file changed, 100 insertions(+) diff --git a/titles/chuni/schema/profile.py b/titles/chuni/schema/profile.py index f8edc33..b055fcb 100644 --- a/titles/chuni/schema/profile.py +++ b/titles/chuni/schema/profile.py @@ -637,3 +637,103 @@ class ChuniProfileData(BaseData): if result is None: return None return result.fetchall() + + def get_team_by_id(self, team_id: int) -> Optional[Row]: + sql = select(team).where(team.c.id == team_id) + result = self.execute(sql) + + if result is None: + return None + return result.fetchone() + + def get_team_rank_actual(self, team_id: int) -> int: + # Normal ranking system, likely the one used in the real servers + # Query all teams sorted by 'teamPoint' + result = self.execute( + select(team.c.id).order_by(team.c.teamPoint.desc()) + ) + + # Get the rank of the team with the given team_id + rank = None + for i, row in enumerate(result, start=1): + if row.id == team_id: + rank = i + break + + # Return the rank if found, or a default rank otherwise + return rank if rank is not None else 0 + + def get_team_rank(self, team_id: int) -> int: + # Scaled ranking system, designed for smaller instances. + # Query all teams sorted by 'teamPoint' + result = self.execute( + select(team.c.id).order_by(team.c.teamPoint.desc()) + ) + + # Count total number of teams + total_teams = self.execute(select(func.count()).select_from(team)).scalar() + + # Get the rank of the team with the given team_id + rank = None + for i, row in enumerate(result, start=1): + if row.id == team_id: + rank = i + break + + # If the team is not found, return default rank + if rank is None: + return 0 + + # Define rank tiers + tiers = { + 1: range(1, int(total_teams * 0.1) + 1), # Rainbow + 2: range(int(total_teams * 0.1) + 1, int(total_teams * 0.4) + 1), # Gold + 3: range(int(total_teams * 0.4) + 1, int(total_teams * 0.7) + 1), # Silver + 4: range(int(total_teams * 0.7) + 1, total_teams + 1), # Grey + } + + # Assign rank based on tier + for tier_rank, tier_range in tiers.items(): + if rank in tier_range: + return tier_rank + + # Return default rank if not found in any tier + return 0 + + def update_team(self, team_id: int, team_data: Dict) -> bool: + team_data["id"] = team_id + + sql = insert(team).values(**team_data) + conflict = sql.on_duplicate_key_update(**team_data) + + result = self.execute(conflict) + + if result is None: + self.logger.warn( + f"update_team: Failed to update team! team id: {team_id}" + ) + return False + return True + def get_rival(self, rival_id: int) -> Optional[Row]: + sql = select(profile).where(profile.c.user == rival_id) + result = self.execute(sql) + + if result is None: + return None + return result.fetchone() + def get_overview(self) -> Dict: + # Fetch and add up all the playcounts + playcount_sql = self.execute(select(profile.c.playCount)) + + if playcount_sql is None: + self.logger.warn( + f"get_overview: Couldn't pull playcounts" + ) + return 0 + + total_play_count = 0; + for row in playcount_sql: + total_play_count += row[0] + return { + "total_play_count": total_play_count + } \ No newline at end of file From b42e8ab76c8bb6bfdf671aa013e3afc034ea9ff9 Mon Sep 17 00:00:00 2001 From: EmmyHeart Date: Tue, 11 Jul 2023 09:16:11 +0000 Subject: [PATCH 09/68] Added function for pulling a song via the DB unique ID instead of the native song ID --- titles/chuni/schema/static.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/titles/chuni/schema/static.py b/titles/chuni/schema/static.py index 85d0397..3796232 100644 --- a/titles/chuni/schema/static.py +++ b/titles/chuni/schema/static.py @@ -453,6 +453,15 @@ class ChuniStaticData(BaseData): return None return result.fetchone() + def get_song(self, music_id: int) -> Optional[Row]: + sql = music.select(music.c.id == music_id) + + result = self.execute(sql) + if result is None: + return None + return result.fetchone() + + def put_avatar( self, version: int, @@ -587,4 +596,4 @@ class ChuniStaticData(BaseData): result = self.execute(sql) if result is None: return None - return result.fetchone() + return result.fetchone() \ No newline at end of file From eecd3a829dcc029dcf6e472e68ecc5f8aaa91417 Mon Sep 17 00:00:00 2001 From: EmmyHeart Date: Tue, 11 Jul 2023 09:40:49 +0000 Subject: [PATCH 10/68] Added value for team rank scaling, and documented it a bit --- example_config/chuni.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/example_config/chuni.yaml b/example_config/chuni.yaml index 59db51e..ed0aca0 100644 --- a/example_config/chuni.yaml +++ b/example_config/chuni.yaml @@ -3,7 +3,8 @@ server: loglevel: "info" team: - name: ARTEMiS + name: ARTEMiS # If this is set, all players that are not on a team will use this one by default. + rankScale: True # Scales the in-game ranking based on the number of teams within the database, rather than the default scale of ~100 that the game normally uses. mods: use_login_bonus: True From 1bc8648e357bc75a8782799e5c57b98041846b90 Mon Sep 17 00:00:00 2001 From: EmmyHeart Date: Tue, 11 Jul 2023 09:56:09 +0000 Subject: [PATCH 11/68] I knew I forgot something (fixed config) --- example_config/chuni.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/example_config/chuni.yaml b/example_config/chuni.yaml index ed0aca0..687b195 100644 --- a/example_config/chuni.yaml +++ b/example_config/chuni.yaml @@ -4,7 +4,7 @@ server: team: name: ARTEMiS # If this is set, all players that are not on a team will use this one by default. - rankScale: True # Scales the in-game ranking based on the number of teams within the database, rather than the default scale of ~100 that the game normally uses. + rank_scale: True # Scales the in-game ranking based on the number of teams within the database, rather than the default scale of ~100 that the game normally uses. mods: use_login_bonus: True From 3c7ceabf4e1ac8e07bd1dac8d932a2539bc57771 Mon Sep 17 00:00:00 2001 From: EmmyHeart Date: Tue, 11 Jul 2023 10:04:25 +0000 Subject: [PATCH 12/68] And again --- titles/chuni/config.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/titles/chuni/config.py b/titles/chuni/config.py index 48d70d2..9b294ad 100644 --- a/titles/chuni/config.py +++ b/titles/chuni/config.py @@ -30,6 +30,11 @@ class ChuniTeamConfig: return CoreConfig.get_config_field( self.__config, "chuni", "team", "name", default="" ) + @property + def rank_scale(self) -> str: + return CoreConfig.get_config_field( + self.__config, "chuni", "team", "rank_scale", default="False" + ) class ChuniModsConfig: From fc947d36a5fcaa42ae60c5541962d04685536fe1 Mon Sep 17 00:00:00 2001 From: Kevin Trocolli Date: Mon, 24 Jul 2023 00:49:08 -0400 Subject: [PATCH 13/68] mai2: fix get user music for dx and up --- titles/mai2/dx.py | 39 ++++++++++++++++++++++++++++----------- 1 file changed, 28 insertions(+), 11 deletions(-) diff --git a/titles/mai2/dx.py b/titles/mai2/dx.py index fba092a..7815e82 100644 --- a/titles/mai2/dx.py +++ b/titles/mai2/dx.py @@ -545,21 +545,38 @@ class Mai2DX(Mai2Base): return {"userId": data["userId"], "length": 0, "userRegionList": []} def handle_get_user_music_api_request(self, data: Dict) -> Dict: - songs = self.data.score.get_best_scores(data["userId"]) + user_id = data.get("userId", 0) + next_index = data.get("nextIndex", 0) + max_ct = data.get("maxCount", 50) + upper_lim = next_index + max_ct music_detail_list = [] - next_index = 0 - if songs is not None: - for song in songs: - tmp = song._asdict() - tmp.pop("id") - tmp.pop("user") - music_detail_list.append(tmp) + if user_id <= 0: + self.logger.warn("handle_get_user_music_api_request: Could not find userid in data, or userId is 0") + return {} + + songs = self.data.score.get_best_scores(user_id) + if songs is None: + self.logger.debug("handle_get_user_music_api_request: get_best_scores returned None!") + return { + "userId": data["userId"], + "nextIndex": 0, + "userMusicList": [], + } - if len(music_detail_list) == data["maxCount"]: - next_index = data["maxCount"] + data["nextIndex"] - break + num_user_songs = len(songs) + for x in range(next_index, upper_lim): + if num_user_songs <= x: + break + + tmp = songs[x]._asdict() + tmp.pop("id") + tmp.pop("user") + music_detail_list.append(tmp) + + next_index = 0 if len(music_detail_list) < max_ct or num_user_songs == upper_lim else upper_lim + self.logger.info(f"Send songs {next_index}-{upper_lim} ({len(music_detail_list)}) out of {num_user_songs} for user {user_id} (next idx {next_index})") return { "userId": data["userId"], "nextIndex": next_index, From 9681f86e33738d0c825055d85eb4922fc0c16ee0 Mon Sep 17 00:00:00 2001 From: EmmyHeart Date: Thu, 5 Oct 2023 07:56:46 +0000 Subject: [PATCH 14/68] Chunithm SQL documentation Figured I would outline all of the neat SQL tricks you can do with this build, as well as properly document the features. --- docs/game_specific_info.md | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/docs/game_specific_info.md b/docs/game_specific_info.md index d5d1eff..163bcbd 100644 --- a/docs/game_specific_info.md +++ b/docs/game_specific_info.md @@ -88,6 +88,36 @@ After a failed Online Battle the room will be deleted. The host is used for the - Timer countdown should be handled globally and not by one user - Game can freeze or can crash if someone (especially the host) leaves the matchmaking +### Rivals + +You can configure up to 4 rivals in Chunithm on a per-user basis. There is no UI to do this currently, so in the database, you can do this: +```sql +INSERT INTO aime.chuni_item_favorite (user, version, favId, favKind) VALUES (, , , 2); +INSERT INTO aime.chuni_item_favorite (user, version, favId, favKind) VALUES (, , , 2); +``` +Note that the version **must match**, otherwise song lookup may not work. + +### Teams + +You can also configure teams for users to be on. There is no UI to do this currently, so in the database, you can do this: +```sql +INSERT INTO aime.chuni_profile_team (teamName) VALUES (); +``` +Team names can be regular ASCII, and they will be displayed ingame. + +On smaller installations, you may also wish to enable scaled team rankings. By default, Chunithm determines team ranking within the first 100 teams. This can be configured in the YAML: +```yaml +team: + rank_scale: True # Scales the in-game ranking based on the number of teams within the database, rather than the default scale of ~100 that the game normally uses. +``` + +### Favorite songs +You can set the songs that will be in a user's Favorite Songs category using the following SQL entries: +```sql +INSERT INTO aime.chuni_item_favorite (user, version, favId, favKind) VALUES (, , , 1); +``` +The songId is based on the actual ID within your version of Chunithm. + ## crossbeats REV. From b5ccd6794015d22165ce240896ee6885f9596be5 Mon Sep 17 00:00:00 2001 From: Kevin Trocolli Date: Thu, 5 Oct 2023 22:16:50 -0400 Subject: [PATCH 15/68] db: add memcache toggle --- core/config.py | 6 ++++++ core/data/cache.py | 2 +- example_config/core.yaml | 1 + 3 files changed, 8 insertions(+), 1 deletion(-) diff --git a/core/config.py b/core/config.py index 83a941a..14f06f5 100644 --- a/core/config.py +++ b/core/config.py @@ -150,6 +150,12 @@ class DatabaseConfig: default=10000, ) + @property + def enable_memcached(self) -> bool: + return CoreConfig.get_config_field( + self.__config, "core", "database", "enable_memcached", default=True + ) + @property def memcached_host(self) -> str: return CoreConfig.get_config_field( diff --git a/core/data/cache.py b/core/data/cache.py index cabf597..1490826 100644 --- a/core/data/cache.py +++ b/core/data/cache.py @@ -17,7 +17,7 @@ except ModuleNotFoundError: def cached(lifetime: int = 10, extra_key: Any = None) -> Callable: def _cached(func: Callable) -> Callable: - if has_mc: + if has_mc and (cfg and cfg.database.enable_memcached): hostname = "127.0.0.1" if cfg: hostname = cfg.database.memcached_host diff --git a/example_config/core.yaml b/example_config/core.yaml index 1ecb7ff..76ec5b1 100644 --- a/example_config/core.yaml +++ b/example_config/core.yaml @@ -24,6 +24,7 @@ database: sha2_password: False loglevel: "warn" user_table_autoincrement_start: 10000 + enable_memcached: True memcached_host: "localhost" frontend: From 06d95c8c5fa4881b28cdb66ac4d8597b0bd71fd6 Mon Sep 17 00:00:00 2001 From: Midorica Date: Sat, 7 Oct 2023 11:59:01 -0400 Subject: [PATCH 16/68] Adding details under the game specific info doc --- docs/game_specific_info.md | 36 ++++++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/docs/game_specific_info.md b/docs/game_specific_info.md index e7d4d96..0d94e56 100644 --- a/docs/game_specific_info.md +++ b/docs/game_specific_info.md @@ -59,6 +59,28 @@ python read.py --game SDBT --version --binfolder /path/to/game/fold The importer for Chunithm will import: Events, Music, Charge Items and Avatar Accesories. +### Config + +Config file is located in `config/chuni.yaml`. + +| Option | Info | +|------------------|----------------------------------------------------------------------------------------------------------------| +| `name` | If this is set, all players that are not on a team will use this one by default. | +| `rank_scale` | Scales the in-game ranking based on the number of teams within the database | +| `use_login_bonus`| This is used to enable the login bonuses | +| `crypto` | This option is used to enable the TLS Encryption | + + +**If you would like to use network encryption, the following will be required underneath but key, iv and hash are required:** + +```shell +crypto: + encrypted_only: False + keys: + 12: ["0000000000000000000000000000000000000000000000000000000000000000", "00000000000000000000000000000000", "0000000000000000"] + 13: ["0000000000000000000000000000000000000000000000000000000000000000", "00000000000000000000000000000000", "0000000000000000"] +``` + ### Database upgrade Always make sure your database (tables) are up-to-date, to do so go to the `core/data/schema/versions` folder and see @@ -241,6 +263,9 @@ Config file is located in `config/diva.yaml`. | `unlock_all_modules` | Unlocks all modules (costumes) by default, if set to `False` all modules need to be purchased | | `unlock_all_items` | Unlocks all items (customizations) by default, if set to `False` all items need to be purchased | +### Custom PV Lists (databanks) + +In order to use custom PV Lists, simply drop in your .dat files inside of /titles/diva/data/ and make sure they are called PvList0.dat, PvList1.dat, PvList2.dat, PvList3.dat and PvList4.dat exactly. ### Database upgrade @@ -287,9 +312,20 @@ Config file is located in `config/ongeki.yaml`. | Option | Info | |------------------|----------------------------------------------------------------------------------------------------------------| | `enabled_gachas` | Enter all gacha IDs for Card Maker to work, other than default may not work due to missing cards added to them | +| `crypto` | This option is used to enable the TLS Encryption | Note: 1149 and higher are only for Card Maker 1.35 and higher and will be ignored on lower versions. +**If you would like to use network encryption, the following will be required underneath but key, iv and hash are required:** + +```shell +crypto: + encrypted_only: False + keys: + 6: ["0000000000000000000000000000000000000000000000000000000000000000", "00000000000000000000000000000000", "0000000000000000"] + 7: ["0000000000000000000000000000000000000000000000000000000000000000", "00000000000000000000000000000000", "0000000000000000"] +``` + ### Database upgrade Always make sure your database (tables) are up-to-date, to do so go to the `core/data/schema/versions` folder and see From 41fcadfd55b39b5e8e0dbb9749a9dc369ffaa8f4 Mon Sep 17 00:00:00 2001 From: Midorica Date: Sat, 7 Oct 2023 12:04:33 -0400 Subject: [PATCH 17/68] quick fix to markdown file --- docs/game_specific_info.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/game_specific_info.md b/docs/game_specific_info.md index 0d94e56..07a7fdf 100644 --- a/docs/game_specific_info.md +++ b/docs/game_specific_info.md @@ -73,7 +73,7 @@ Config file is located in `config/chuni.yaml`. **If you would like to use network encryption, the following will be required underneath but key, iv and hash are required:** -```shell +```yaml crypto: encrypted_only: False keys: @@ -318,7 +318,7 @@ Note: 1149 and higher are only for Card Maker 1.35 and higher and will be ignore **If you would like to use network encryption, the following will be required underneath but key, iv and hash are required:** -```shell +```yaml crypto: encrypted_only: False keys: From 56ddd3b1ccc0131dde24b4c80acaab0d827b67c9 Mon Sep 17 00:00:00 2001 From: Midorica Date: Sat, 7 Oct 2023 12:07:14 -0400 Subject: [PATCH 18/68] fix yet again to the md file --- docs/game_specific_info.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/docs/game_specific_info.md b/docs/game_specific_info.md index 07a7fdf..12a8e86 100644 --- a/docs/game_specific_info.md +++ b/docs/game_specific_info.md @@ -77,7 +77,6 @@ Config file is located in `config/chuni.yaml`. crypto: encrypted_only: False keys: - 12: ["0000000000000000000000000000000000000000000000000000000000000000", "00000000000000000000000000000000", "0000000000000000"] 13: ["0000000000000000000000000000000000000000000000000000000000000000", "00000000000000000000000000000000", "0000000000000000"] ``` @@ -322,7 +321,6 @@ Note: 1149 and higher are only for Card Maker 1.35 and higher and will be ignore crypto: encrypted_only: False keys: - 6: ["0000000000000000000000000000000000000000000000000000000000000000", "00000000000000000000000000000000", "0000000000000000"] 7: ["0000000000000000000000000000000000000000000000000000000000000000", "00000000000000000000000000000000", "0000000000000000"] ``` From 1996f3f356e78fd7747ce84f10f845d6b897bb0e Mon Sep 17 00:00:00 2001 From: Midorica Date: Sat, 7 Oct 2023 12:11:24 -0400 Subject: [PATCH 19/68] fix for chunithm song loading after 600 scores --- titles/chuni/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/titles/chuni/base.py b/titles/chuni/base.py index 2a6385d..1a7f350 100644 --- a/titles/chuni/base.py +++ b/titles/chuni/base.py @@ -588,7 +588,7 @@ class ChuniBase: if len(song_list) >= max_ct: break - if len(song_list) >= next_idx + max_ct: + if len(music_detail) >= next_idx + max_ct: next_idx += max_ct else: next_idx = -1 From bfaadff9f6d73d4ab87c9d87ba55987f76e0e4d4 Mon Sep 17 00:00:00 2001 From: Midorica Date: Sat, 7 Oct 2023 12:31:08 -0400 Subject: [PATCH 20/68] pushing news support customization for chunithm --- docs/game_specific_info.md | 1 + example_config/chuni.yaml | 1 + titles/chuni/base.py | 12 +++++++++++- titles/chuni/config.py | 6 ++++++ 4 files changed, 19 insertions(+), 1 deletion(-) diff --git a/docs/game_specific_info.md b/docs/game_specific_info.md index 12a8e86..722fdda 100644 --- a/docs/game_specific_info.md +++ b/docs/game_specific_info.md @@ -65,6 +65,7 @@ Config file is located in `config/chuni.yaml`. | Option | Info | |------------------|----------------------------------------------------------------------------------------------------------------| +| `news_msg` | If this is set, the news at the top of the main screen will be displayed (up to Chunithm Paradise Lost) | | `name` | If this is set, all players that are not on a team will use this one by default. | | `rank_scale` | Scales the in-game ranking based on the number of teams within the database | | `use_login_bonus`| This is used to enable the login bonuses | diff --git a/example_config/chuni.yaml b/example_config/chuni.yaml index 687b195..09e3569 100644 --- a/example_config/chuni.yaml +++ b/example_config/chuni.yaml @@ -1,6 +1,7 @@ server: enable: True loglevel: "info" + news_msg: "" team: name: ARTEMiS # If this is set, all players that are not on a team will use this one by default. diff --git a/titles/chuni/base.py b/titles/chuni/base.py index 1a7f350..0783b55 100644 --- a/titles/chuni/base.py +++ b/titles/chuni/base.py @@ -181,7 +181,17 @@ class ChuniBase: return {"type": data["type"], "length": 0, "gameIdlistList": []} def handle_get_game_message_api_request(self, data: Dict) -> Dict: - return {"type": data["type"], "length": "0", "gameMessageList": []} + return { + "type": data["type"], + "length": 1, + "gameMessageList": [{ + "id": 1, + "type": 1, + "message": f"Welcome to {self.core_cfg.server.name} network!" if not self.game_config.server.news_msg else self.game_config.server.news_msg, + "startDate": "2017-12-05 07:00:00.0", + "endDate": "2099-12-31 00:00:00.0" + }] + } def handle_get_game_ranking_api_request(self, data: Dict) -> Dict: return {"type": data["type"], "gameRankingList": []} diff --git a/titles/chuni/config.py b/titles/chuni/config.py index 9b294ad..2ec1368 100644 --- a/titles/chuni/config.py +++ b/titles/chuni/config.py @@ -19,6 +19,12 @@ class ChuniServerConfig: self.__config, "chuni", "server", "loglevel", default="info" ) ) + + @property + def news_msg(self) -> str: + CoreConfig.get_config_field( + self.__config, "chuni", "server", "news_msg", default="" + ) class ChuniTeamConfig: From 7fc5544c1598de9da61e139de277044d2dcf9b41 Mon Sep 17 00:00:00 2001 From: Midorica Date: Sun, 8 Oct 2023 15:52:41 -0400 Subject: [PATCH 21/68] adding a SAO note to documentation --- docs/game_specific_info.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/game_specific_info.md b/docs/game_specific_info.md index 722fdda..ac27f55 100644 --- a/docs/game_specific_info.md +++ b/docs/game_specific_info.md @@ -574,6 +574,8 @@ python dbutils.py --game SDEW upgrade - Player title is currently static and cannot be changed in-game - QR Card Scanning currently only load a static hero +**Network hashing in GssSite.dll must be disabled** + ### Credits for SAO support: - Midorica - Limited Network Support From bad106ceba393e106a4ec6eb98460f08d9b9938c Mon Sep 17 00:00:00 2001 From: EmmyHeart Date: Mon, 9 Oct 2023 06:35:14 +0000 Subject: [PATCH 22/68] Fixed typo with game_cfg/game_config This resulted in an exception on Plost and earlier, leading to games not actually working. --- titles/chuni/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/titles/chuni/base.py b/titles/chuni/base.py index 0783b55..21a6719 100644 --- a/titles/chuni/base.py +++ b/titles/chuni/base.py @@ -187,7 +187,7 @@ class ChuniBase: "gameMessageList": [{ "id": 1, "type": 1, - "message": f"Welcome to {self.core_cfg.server.name} network!" if not self.game_config.server.news_msg else self.game_config.server.news_msg, + "message": f"Welcome to {self.core_cfg.server.name} network!" if not self.game_cfg.server.news_msg else self.game_cfg.server.news_msg, "startDate": "2017-12-05 07:00:00.0", "endDate": "2099-12-31 00:00:00.0" }] From 0a12e935931dd765b90e99a679fbe18f930ab4ef Mon Sep 17 00:00:00 2001 From: EmmyHeart Date: Mon, 9 Oct 2023 06:47:55 +0000 Subject: [PATCH 23/68] news_msg config option doesn't actually return anything. Fixed! --- titles/chuni/config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/titles/chuni/config.py b/titles/chuni/config.py index 2ec1368..05cc8ad 100644 --- a/titles/chuni/config.py +++ b/titles/chuni/config.py @@ -22,7 +22,7 @@ class ChuniServerConfig: @property def news_msg(self) -> str: - CoreConfig.get_config_field( + return CoreConfig.get_config_field( self.__config, "chuni", "server", "news_msg", default="" ) From 6d592dcbc7b02e0dab03c7f385ddde9538a3c228 Mon Sep 17 00:00:00 2001 From: Midorica Date: Tue, 10 Oct 2023 18:24:54 -0400 Subject: [PATCH 24/68] fixing skin issue with diva profile --- core/data/schema/versions/SBZV_5_rollback.sql | 2 ++ core/data/schema/versions/SBZV_6_upgrade.sql | 2 ++ titles/diva/base.py | 2 +- titles/diva/schema/profile.py | 1 + 4 files changed, 6 insertions(+), 1 deletion(-) create mode 100644 core/data/schema/versions/SBZV_5_rollback.sql create mode 100644 core/data/schema/versions/SBZV_6_upgrade.sql diff --git a/core/data/schema/versions/SBZV_5_rollback.sql b/core/data/schema/versions/SBZV_5_rollback.sql new file mode 100644 index 0000000..4eddc17 --- /dev/null +++ b/core/data/schema/versions/SBZV_5_rollback.sql @@ -0,0 +1,2 @@ +ALTER TABLE diva_profile + DROP skn_eqp, \ No newline at end of file diff --git a/core/data/schema/versions/SBZV_6_upgrade.sql b/core/data/schema/versions/SBZV_6_upgrade.sql new file mode 100644 index 0000000..63ff6b3 --- /dev/null +++ b/core/data/schema/versions/SBZV_6_upgrade.sql @@ -0,0 +1,2 @@ +ALTER TABLE diva_profile + ADD skn_eqp INT NOT NULL DEFAULT 0, \ No newline at end of file diff --git a/titles/diva/base.py b/titles/diva/base.py index 3f9ef12..6db0dbc 100644 --- a/titles/diva/base.py +++ b/titles/diva/base.py @@ -402,7 +402,7 @@ class DivaBase: response += f"&lv_num={profile['lv_num']}" response += f"&lv_pnt={profile['lv_pnt']}" response += f"&vcld_pts={profile['vcld_pts']}" - response += f"&skn_eqp={profile['use_pv_skn_eqp']}" + response += f"&skn_eqp={profile['skn_eqp']}" response += f"&btn_se_eqp={profile['btn_se_eqp']}" response += f"&sld_se_eqp={profile['sld_se_eqp']}" response += f"&chn_sld_se_eqp={profile['chn_sld_se_eqp']}" diff --git a/titles/diva/schema/profile.py b/titles/diva/schema/profile.py index 7107068..7bd6bf0 100644 --- a/titles/diva/schema/profile.py +++ b/titles/diva/schema/profile.py @@ -51,6 +51,7 @@ profile = Table( Column("rgo_sts", Integer, nullable=False, server_default="1"), Column("lv_efct_id", Integer, nullable=False, server_default="0"), Column("lv_plt_id", Integer, nullable=False, server_default="1"), + Column("skn_eqp", Integer, nullable=False, server_default="0"), Column("passwd_stat", Integer, nullable=False, server_default="0"), Column("passwd", String(12), nullable=False, server_default="**********"), Column( From aa9d48ccc9bae133812efe6a619de819029b32a6 Mon Sep 17 00:00:00 2001 From: Midorica Date: Tue, 10 Oct 2023 18:37:18 -0400 Subject: [PATCH 25/68] fixed the diva profile again --- core/data/schema/versions/SBZV_5_rollback.sql | 2 +- core/data/schema/versions/SBZV_6_upgrade.sql | 2 +- titles/diva/__init__.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/core/data/schema/versions/SBZV_5_rollback.sql b/core/data/schema/versions/SBZV_5_rollback.sql index 4eddc17..851d357 100644 --- a/core/data/schema/versions/SBZV_5_rollback.sql +++ b/core/data/schema/versions/SBZV_5_rollback.sql @@ -1,2 +1,2 @@ ALTER TABLE diva_profile - DROP skn_eqp, \ No newline at end of file + DROP skn_eqp; \ No newline at end of file diff --git a/core/data/schema/versions/SBZV_6_upgrade.sql b/core/data/schema/versions/SBZV_6_upgrade.sql index 63ff6b3..d417506 100644 --- a/core/data/schema/versions/SBZV_6_upgrade.sql +++ b/core/data/schema/versions/SBZV_6_upgrade.sql @@ -1,2 +1,2 @@ ALTER TABLE diva_profile - ADD skn_eqp INT NOT NULL DEFAULT 0, \ No newline at end of file + ADD skn_eqp INT NOT NULL DEFAULT 0; \ No newline at end of file diff --git a/titles/diva/__init__.py b/titles/diva/__init__.py index 46ea090..9a9e6ef 100644 --- a/titles/diva/__init__.py +++ b/titles/diva/__init__.py @@ -7,4 +7,4 @@ index = DivaServlet database = DivaData reader = DivaReader game_codes = [DivaConstants.GAME_CODE] -current_schema_version = 5 +current_schema_version = 6 From d6e4db48f4d3d074f9d7653eb9d31aa744e058db Mon Sep 17 00:00:00 2001 From: Dniel97 Date: Sun, 15 Oct 2023 19:04:15 +0200 Subject: [PATCH 26/68] Added maimai DX FESTiVAL PLUS support - Added Card Maker support for FESTiVAL PLUS - Bumped SDEZ database to version 8 - Updated docs for FESTiVAL PLUS --- changelog.md | 7 ++ core/data/schema/versions/SDEZ_7_rollback.sql | 10 ++ core/data/schema/versions/SDEZ_8_upgrade.sql | 10 ++ docs/game_specific_info.md | 37 +++--- readme.md | 2 +- titles/cm/read.py | 1 + titles/mai2/__init__.py | 2 +- titles/mai2/const.py | 18 +-- titles/mai2/dx.py | 35 ++++++ titles/mai2/festival.py | 8 +- titles/mai2/festivalplus.py | 38 ++++++ titles/mai2/index.py | 93 +++++++++----- titles/mai2/schema/profile.py | 116 +++++++++++------- titles/mai2/universe.py | 13 +- 14 files changed, 285 insertions(+), 105 deletions(-) create mode 100644 core/data/schema/versions/SDEZ_7_rollback.sql create mode 100644 core/data/schema/versions/SDEZ_8_upgrade.sql create mode 100644 titles/mai2/festivalplus.py diff --git a/changelog.md b/changelog.md index 40d2e48..75ca217 100644 --- a/changelog.md +++ b/changelog.md @@ -1,6 +1,13 @@ # Changelog Documenting updates to ARTEMiS, to be updated every time the master branch is pushed to. +## 20231015 +### maimai DX ++ Added support for FESTiVAL PLUS + +### Card Maker ++ Added support for maimai DX FESTiVAL PLUS + ## 20230716 ### General + Docker files added (#19) diff --git a/core/data/schema/versions/SDEZ_7_rollback.sql b/core/data/schema/versions/SDEZ_7_rollback.sql new file mode 100644 index 0000000..6bb9270 --- /dev/null +++ b/core/data/schema/versions/SDEZ_7_rollback.sql @@ -0,0 +1,10 @@ +ALTER TABLE mai2_profile_detail + DROP COLUMN mapStock; + +ALTER TABLE mai2_profile_extend + DROP COLUMN selectResultScoreViewType; + +ALTER TABLE mai2_profile_option + DROP COLUMN outFrameType, + DROP COLUMN touchVolume, + DROP COLUMN breakSlideVolume; diff --git a/core/data/schema/versions/SDEZ_8_upgrade.sql b/core/data/schema/versions/SDEZ_8_upgrade.sql new file mode 100644 index 0000000..c782d81 --- /dev/null +++ b/core/data/schema/versions/SDEZ_8_upgrade.sql @@ -0,0 +1,10 @@ +ALTER TABLE mai2_profile_detail + ADD mapStock INT NULL AFTER playCount; + +ALTER TABLE mai2_profile_extend + ADD selectResultScoreViewType INT NULL AFTER selectResultDetails; + +ALTER TABLE mai2_profile_option + ADD outFrameType INT NULL AFTER dispCenter, + ADD touchVolume INT NULL AFTER slideVolume, + ADD breakSlideVolume INT NULL AFTER slideVolume; diff --git a/docs/game_specific_info.md b/docs/game_specific_info.md index ac27f55..9f39a7e 100644 --- a/docs/game_specific_info.md +++ b/docs/game_specific_info.md @@ -194,18 +194,19 @@ For versions pre-dx | SDBM | 5 | maimai ORANGE PLUS | | SDCQ | 6 | maimai PiNK | | SDCQ | 7 | maimai PiNK PLUS | -| SDDK | 8 | maimai MURASAKI | -| SDDK | 9 | maimai MURASAKI PLUS | -| SDDZ | 10 | maimai MILK | -| SDDZ | 11 | maimai MILK PLUS | +| SDDK | 8 | maimai MURASAKi | +| SDDK | 9 | maimai MURASAKi PLUS | +| SDDZ | 10 | maimai MiLK | +| SDDZ | 11 | maimai MiLK PLUS | | SDEY | 12 | maimai FiNALE | | SDEZ | 13 | maimai DX | | SDEZ | 14 | maimai DX PLUS | | SDEZ | 15 | maimai DX Splash | | SDEZ | 16 | maimai DX Splash PLUS | -| SDEZ | 17 | maimai DX Universe | -| SDEZ | 18 | maimai DX Universe PLUS | -| SDEZ | 19 | maimai DX Festival | +| SDEZ | 17 | maimai DX UNiVERSE | +| SDEZ | 18 | maimai DX UNiVERSE PLUS | +| SDEZ | 19 | maimai DX FESTiVAL | +| SDEZ | 20 | maimai DX FESTiVAL PLUS | ### Importer @@ -347,15 +348,21 @@ python dbutils.py --game SDDT upgrade ### Support status -* Card Maker 1.30: - * CHUNITHM NEW!!: Yes - * maimai DX UNiVERSE: Yes - * O.N.G.E.K.I. bright: Yes +#### Card Maker 1.30: +* CHUNITHM NEW!!: Yes +* maimai DX UNiVERSE: Yes +* O.N.G.E.K.I. bright: Yes -* Card Maker 1.35: - * CHUNITHM SUN: Yes (NEW PLUS!! up to A032) - * maimai DX FESTiVAL: Yes (up to A035) (UNiVERSE PLUS up to A031) - * O.N.G.E.K.I. bright MEMORY: Yes +#### Card Maker 1.35: +* CHUNITHM: + * NEW!!: Yes + * NEW PLUS!!: Yes (added in A028) + * SUN: Yes (added in A032) +* maimai DX: + * UNiVERSE PLUS: Yes + * FESTiVAL: Yes (added in A031) + * FESTiVAL PLUS: Yes (added in A035) +* O.N.G.E.K.I. bright MEMORY: Yes ### Importer diff --git a/readme.md b/readme.md index 2c49faa..2997338 100644 --- a/readme.md +++ b/readme.md @@ -11,7 +11,7 @@ Games listed below have been tested and confirmed working. Only game versions ol + All versions + omnimix + maimai DX - + All versions up to FESTiVAL + + All versions up to FESTiVAL PLUS + Hatsune Miku: Project DIVA Arcade + All versions diff --git a/titles/cm/read.py b/titles/cm/read.py index 059010e..376789d 100644 --- a/titles/cm/read.py +++ b/titles/cm/read.py @@ -205,6 +205,7 @@ class CardMakerReader(BaseReader): "1.20": Mai2Constants.VER_MAIMAI_DX_UNIVERSE, "1.25": Mai2Constants.VER_MAIMAI_DX_UNIVERSE_PLUS, "1.30": Mai2Constants.VER_MAIMAI_DX_FESTIVAL, + "1.35": Mai2Constants.VER_MAIMAI_DX_FESTIVAL_PLUS, } for root, dirs, files in os.walk(base_dir): diff --git a/titles/mai2/__init__.py b/titles/mai2/__init__.py index 2fa3874..4857644 100644 --- a/titles/mai2/__init__.py +++ b/titles/mai2/__init__.py @@ -16,4 +16,4 @@ game_codes = [ Mai2Constants.GAME_CODE_GREEN, Mai2Constants.GAME_CODE, ] -current_schema_version = 7 +current_schema_version = 8 diff --git a/titles/mai2/const.py b/titles/mai2/const.py index 6e9a070..a4c29db 100644 --- a/titles/mai2/const.py +++ b/titles/mai2/const.py @@ -20,13 +20,13 @@ class Mai2Constants: DATE_TIME_FORMAT = "%Y-%m-%d %H:%M:%S" - GAME_CODE = "SBXL" - GAME_CODE_GREEN = "SBZF" - GAME_CODE_ORANGE = "SDBM" - GAME_CODE_PINK = "SDCQ" - GAME_CODE_MURASAKI = "SDDK" - GAME_CODE_MILK = "SDDZ" - GAME_CODE_FINALE = "SDEY" + GAME_CODE = "SBXL" + GAME_CODE_GREEN = "SBZF" + GAME_CODE_ORANGE = "SDBM" + GAME_CODE_PINK = "SDCQ" + GAME_CODE_MURASAKI = "SDDK" + GAME_CODE_MILK = "SDDZ" + GAME_CODE_FINALE = "SDEY" GAME_CODE_DX = "SDEZ" CONFIG_NAME = "mai2.yaml" @@ -52,6 +52,7 @@ class Mai2Constants: VER_MAIMAI_DX_UNIVERSE = 17 VER_MAIMAI_DX_UNIVERSE_PLUS = 18 VER_MAIMAI_DX_FESTIVAL = 19 + VER_MAIMAI_DX_FESTIVAL_PLUS = 20 VERSION_STRING = ( "maimai", @@ -66,7 +67,7 @@ class Mai2Constants: "maimai MURASAKi PLUS", "maimai MiLK", "maimai MiLK PLUS", - "maimai FiNALE", + "maimai FiNALE", "maimai DX", "maimai DX PLUS", "maimai DX Splash", @@ -74,6 +75,7 @@ class Mai2Constants: "maimai DX UNiVERSE", "maimai DX UNiVERSE PLUS", "maimai DX FESTiVAL", + "maimai DX FESTiVAL PLUS", ) @classmethod diff --git a/titles/mai2/dx.py b/titles/mai2/dx.py index e373649..3feccc5 100644 --- a/titles/mai2/dx.py +++ b/titles/mai2/dx.py @@ -542,8 +542,43 @@ class Mai2DX(Mai2Base): } def handle_get_user_region_api_request(self, data: Dict) -> Dict: + """ + class UserRegionList: + regionId: int + playCount: int + created: str + """ return {"userId": data["userId"], "length": 0, "userRegionList": []} + def handle_get_user_rival_data_api_request(self, data: Dict) -> Dict: + user_id = data["userId"] + rival_id = data["rivalId"] + + """ + class UserRivalData: + rivalId: int + rivalName: str + """ + return {"userId": user_id, "userRivalData": {}} + + def handle_get_user_rival_music_api_request(self, data: Dict) -> Dict: + user_id = data["userId"] + rival_id = data["rivalId"] + next_idx = data["nextIndex"] + rival_music_levels = data["userRivalMusicLevelList"] + + """ + class UserRivalMusicList: + class UserRivalMusicDetailList: + level: int + achievement: int + deluxscoreMax: int + + musicId: int + userRivalMusicDetailList: list[UserRivalMusicDetailList] + """ + return {"userId": user_id, "nextIndex": 0, "userRivalMusicList": []} + def handle_get_user_music_api_request(self, data: Dict) -> Dict: user_id = data.get("userId", 0) next_index = data.get("nextIndex", 0) diff --git a/titles/mai2/festival.py b/titles/mai2/festival.py index fcdd4ed..145fa71 100644 --- a/titles/mai2/festival.py +++ b/titles/mai2/festival.py @@ -14,7 +14,7 @@ class Mai2Festival(Mai2UniversePlus): def handle_cm_get_user_preview_api_request(self, data: Dict) -> Dict: user_data = super().handle_cm_get_user_preview_api_request(data) - # hardcode lastDataVersion for CardMaker 1.35 + # hardcode lastDataVersion for CardMaker user_data["lastDataVersion"] = "1.30.00" return user_data @@ -25,7 +25,13 @@ class Mai2Festival(Mai2UniversePlus): return user_login def handle_get_user_recommend_rate_music_api_request(self, data: Dict) -> Dict: + """ + userRecommendRateMusicIdList: list[int] + """ return {"userId": data["userId"], "userRecommendRateMusicIdList": []} def handle_get_user_recommend_select_music_api_request(self, data: Dict) -> Dict: + """ + userRecommendSelectionMusicIdList: list[int] + """ return {"userId": data["userId"], "userRecommendSelectionMusicIdList": []} diff --git a/titles/mai2/festivalplus.py b/titles/mai2/festivalplus.py new file mode 100644 index 0000000..7deeb98 --- /dev/null +++ b/titles/mai2/festivalplus.py @@ -0,0 +1,38 @@ +from typing import Dict + +from core.config import CoreConfig +from titles.mai2.festival import Mai2Festival +from titles.mai2.const import Mai2Constants +from titles.mai2.config import Mai2Config + + +class Mai2FestivalPlus(Mai2Festival): + def __init__(self, cfg: CoreConfig, game_cfg: Mai2Config) -> None: + super().__init__(cfg, game_cfg) + self.version = Mai2Constants.VER_MAIMAI_DX_FESTIVAL_PLUS + + def handle_cm_get_user_preview_api_request(self, data: Dict) -> Dict: + user_data = super().handle_cm_get_user_preview_api_request(data) + + # hardcode lastDataVersion for CardMaker + user_data["lastDataVersion"] = "1.35.00" + return user_data + + def handle_get_user_favorite_item_api_request(self, data: Dict) -> Dict: + user_id = data.get("userId", 0) + kind = data.get("kind", 2) + next_index = data.get("nextIndex", 0) + max_ct = data.get("maxCount", 100) + is_all = data.get("isAllFavoriteItem", False) + + """ + class userFavoriteItemList: + orderId: int + id: int + """ + return { + "userId": user_id, + "kind": kind, + "nextIndex": 0, + "userFavoriteItemList": [], + } diff --git a/titles/mai2/index.py b/titles/mai2/index.py index 329c1f6..fdea8db 100644 --- a/titles/mai2/index.py +++ b/titles/mai2/index.py @@ -23,6 +23,7 @@ from titles.mai2.splashplus import Mai2SplashPlus from titles.mai2.universe import Mai2Universe from titles.mai2.universeplus import Mai2UniversePlus from titles.mai2.festival import Mai2Festival +from titles.mai2.festivalplus import Mai2FestivalPlus class Mai2Servlet: @@ -47,7 +48,7 @@ class Mai2Servlet: None, None, None, - Mai2Finale, + Mai2Finale, Mai2DX, Mai2DXPlus, Mai2Splash, @@ -55,6 +56,7 @@ class Mai2Servlet: Mai2Universe, Mai2UniversePlus, Mai2Festival, + Mai2FestivalPlus, ] self.logger = logging.getLogger("mai2") @@ -110,22 +112,34 @@ class Mai2Servlet: ) def setup(self): - if self.game_cfg.uploads.photos and self.game_cfg.uploads.photos_dir and not path.exists(self.game_cfg.uploads.photos_dir): + if ( + self.game_cfg.uploads.photos + and self.game_cfg.uploads.photos_dir + and not path.exists(self.game_cfg.uploads.photos_dir) + ): try: mkdir(self.game_cfg.uploads.photos_dir) except Exception: - self.logger.error(f"Failed to make photo upload directory at {self.game_cfg.uploads.photos_dir}") + self.logger.error( + f"Failed to make photo upload directory at {self.game_cfg.uploads.photos_dir}" + ) - if self.game_cfg.uploads.movies and self.game_cfg.uploads.movies_dir and not path.exists(self.game_cfg.uploads.movies_dir): + if ( + self.game_cfg.uploads.movies + and self.game_cfg.uploads.movies_dir + and not path.exists(self.game_cfg.uploads.movies_dir) + ): try: mkdir(self.game_cfg.uploads.movies_dir) except Exception: - self.logger.error(f"Failed to make movie upload directory at {self.game_cfg.uploads.movies_dir}") + self.logger.error( + f"Failed to make movie upload directory at {self.game_cfg.uploads.movies_dir}" + ) def render_POST(self, request: Request, version: int, url_path: str) -> bytes: if url_path.lower() == "ping": return zlib.compress(b'{"returnCode": "1"}') - + elif url_path.startswith("api/movie/"): self.logger.info(f"Movie data: {url_path} - {request.content.getvalue()}") return b"" @@ -140,18 +154,20 @@ class Mai2Servlet: if request.uri.startswith(b"/SDEZ"): if version < 105: # 1.0 internal_ver = Mai2Constants.VER_MAIMAI_DX - elif version >= 105 and version < 110: # Plus + elif version >= 105 and version < 110: # PLUS internal_ver = Mai2Constants.VER_MAIMAI_DX_PLUS elif version >= 110 and version < 115: # Splash internal_ver = Mai2Constants.VER_MAIMAI_DX_SPLASH - elif version >= 115 and version < 120: # Splash Plus + elif version >= 115 and version < 120: # Splash PLUS internal_ver = Mai2Constants.VER_MAIMAI_DX_SPLASH_PLUS - elif version >= 120 and version < 125: # Universe + elif version >= 120 and version < 125: # UNiVERSE internal_ver = Mai2Constants.VER_MAIMAI_DX_UNIVERSE - elif version >= 125 and version < 130: # Universe Plus + elif version >= 125 and version < 130: # UNiVERSE PLUS internal_ver = Mai2Constants.VER_MAIMAI_DX_UNIVERSE_PLUS - elif version >= 130: # Festival + elif version >= 130 and version < 135: # FESTiVAL internal_ver = Mai2Constants.VER_MAIMAI_DX_FESTIVAL + elif version >= 135: # FESTiVAL PLUS + internal_ver = Mai2Constants.VER_MAIMAI_DX_FESTIVAL_PLUS else: if version < 110: # 1.0 @@ -180,15 +196,20 @@ class Mai2Servlet: internal_ver = Mai2Constants.VER_MAIMAI_MILK_PLUS elif version >= 197: # Finale internal_ver = Mai2Constants.VER_MAIMAI_FINALE - - if request.getHeader('Mai-Encoding') is not None or request.getHeader('X-Mai-Encoding') is not None: + + if ( + request.getHeader("Mai-Encoding") is not None + or request.getHeader("X-Mai-Encoding") is not None + ): # The has is some flavor of MD5 of the endpoint with a constant bolted onto the end of it. # See cake.dll's Obfuscator function for details. Hopefully most DLL edits will remove # these two(?) headers to not cause issues, but given the general quality of SEGA data... - enc_ver = request.getHeader('Mai-Encoding') + enc_ver = request.getHeader("Mai-Encoding") if enc_ver is None: - enc_ver = request.getHeader('X-Mai-Encoding') - self.logger.debug(f"Encryption v{enc_ver} - User-Agent: {request.getHeader('User-Agent')}") + enc_ver = request.getHeader("X-Mai-Encoding") + self.logger.debug( + f"Encryption v{enc_ver} - User-Agent: {request.getHeader('User-Agent')}" + ) try: unzip = zlib.decompress(req_raw) @@ -231,10 +252,12 @@ class Mai2Servlet: self.logger.debug(f"v{version} GET {url_path}") url_split = url_path.split("/") - if (url_split[0] == "api" and url_split[1] == "movie") or url_split[0] == "movie": + if (url_split[0] == "api" and url_split[1] == "movie") or url_split[ + 0 + ] == "movie": if url_split[2] == "moviestart": - return json.dumps({"moviestart":{"status":"OK"}}).encode() - + return json.dumps({"moviestart": {"status": "OK"}}).encode() + else: request.setResponseCode(404) return b"" @@ -251,20 +274,24 @@ class Mai2Servlet: elif url_split[1].startswith("friend"): self.logger.info(f"v{version} old server friend inquire") return zlib.compress(b"{}") - + else: request.setResponseCode(404) return b"" - + elif url_split[0] == "usbdl": if url_split[1] == "CONNECTIONTEST": self.logger.info(f"v{version} usbdl server test") return b"" - - elif self.game_cfg.deliver.udbdl_enable and path.exists(f"{self.game_cfg.deliver.content_folder}/usb/{url_split[-1]}"): - with open(f"{self.game_cfg.deliver.content_folder}/usb/{url_split[-1]}", 'rb') as f: + + elif self.game_cfg.deliver.udbdl_enable and path.exists( + f"{self.game_cfg.deliver.content_folder}/usb/{url_split[-1]}" + ): + with open( + f"{self.game_cfg.deliver.content_folder}/usb/{url_split[-1]}", "rb" + ) as f: return f.read() - + else: request.setResponseCode(404) return b"" @@ -272,12 +299,18 @@ class Mai2Servlet: elif url_split[0] == "deliver": file = url_split[len(url_split) - 1] self.logger.info(f"v{version} {file} deliver inquire") - self.logger.debug(f"{self.game_cfg.deliver.content_folder}/net_deliver/{file}") - - if self.game_cfg.deliver.enable and path.exists(f"{self.game_cfg.deliver.content_folder}/net_deliver/{file}"): - with open(f"{self.game_cfg.deliver.content_folder}/net_deliver/{file}", 'rb') as f: + self.logger.debug( + f"{self.game_cfg.deliver.content_folder}/net_deliver/{file}" + ) + + if self.game_cfg.deliver.enable and path.exists( + f"{self.game_cfg.deliver.content_folder}/net_deliver/{file}" + ): + with open( + f"{self.game_cfg.deliver.content_folder}/net_deliver/{file}", "rb" + ) as f: return f.read() - + else: request.setResponseCode(404) return b"" diff --git a/titles/mai2/schema/profile.py b/titles/mai2/schema/profile.py index 2b3f38b..211440d 100644 --- a/titles/mai2/schema/profile.py +++ b/titles/mai2/schema/profile.py @@ -40,6 +40,7 @@ detail = Table( Column("charaLockSlot", JSON), Column("contentBit", BigInteger), Column("playCount", Integer), + Column("mapStock", Integer), # new with fes+ Column("eventWatchedDate", String(25)), Column("lastGameId", String(25)), Column("lastRomVersion", String(25)), @@ -100,7 +101,7 @@ detail = Table( ) detail_old = Table( - "maimai_profile_detail", + "maimai_profile_detail", metadata, Column("id", Integer, primary_key=True, nullable=False), Column( @@ -216,6 +217,7 @@ extend = Table( Column("isPhotoAgree", Boolean), Column("isGotoCodeRead", Boolean), Column("selectResultDetails", Boolean), + Column("selectResultScoreViewType", Integer), # new with fes+ Column("sortCategorySetting", Integer), Column("sortMusicSetting", Integer), Column("selectedCardList", JSON), @@ -251,6 +253,7 @@ option = Table( Column("touchSize", Integer), Column("starRotate", Integer), Column("dispCenter", Integer), + Column("outFrameType", Integer), # new with fes+ Column("dispChain", Integer), Column("dispRate", Integer), Column("dispBar", Integer), @@ -276,6 +279,8 @@ option = Table( Column("exVolume", Integer), Column("slideSe", Integer), Column("slideVolume", Integer), + Column("breakSlideVolume", Integer), # new with fes+ + Column("touchVolume", Integer), # new with fes+ Column("touchHoldVolume", Integer), Column("damageSeVolume", Integer), Column("headPhoneVolume", Integer), @@ -438,7 +443,11 @@ boss = Table( "maimai_profile_boss", metadata, Column("id", Integer, primary_key=True, nullable=False), - Column("user", ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"), nullable=False), + Column( + "user", + ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"), + nullable=False, + ), Column("pandoraFlagList0", Integer), Column("pandoraFlagList1", Integer), Column("pandoraFlagList2", Integer), @@ -455,23 +464,32 @@ recent_rating = Table( "maimai_profile_recent_rating", metadata, Column("id", Integer, primary_key=True, nullable=False), - Column("user", ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"), nullable=False), + Column( + "user", + ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"), + nullable=False, + ), Column("userRecentRatingList", JSON), UniqueConstraint("user", name="mai2_profile_recent_rating_uk"), mysql_charset="utf8mb4", ) consec_logins = Table( - "mai2_profile_consec_logins", + "mai2_profile_consec_logins", metadata, Column("id", Integer, primary_key=True, nullable=False), - Column("user", ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"), nullable=False), + Column( + "user", + ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"), + nullable=False, + ), Column("version", Integer, nullable=False), Column("logins", Integer), UniqueConstraint("user", "version", name="mai2_profile_consec_logins_uk"), mysql_charset="utf8mb4", ) + class Mai2ProfileData(BaseData): def put_profile_detail( self, user_id: int, version: int, detail_data: Dict, is_dx: bool = True @@ -494,18 +512,22 @@ class Mai2ProfileData(BaseData): return None return result.lastrowid - def get_profile_detail(self, user_id: int, version: int, is_dx: bool = True) -> Optional[Row]: + def get_profile_detail( + self, user_id: int, version: int, is_dx: bool = True + ) -> Optional[Row]: if is_dx: sql = ( select(detail) .where(and_(detail.c.user == user_id, detail.c.version <= version)) .order_by(detail.c.version.desc()) ) - + else: sql = ( select(detail_old) - .where(and_(detail_old.c.user == user_id, detail_old.c.version <= version)) + .where( + and_(detail_old.c.user == user_id, detail_old.c.version <= version) + ) .order_by(detail_old.c.version.desc()) ) @@ -582,11 +604,15 @@ class Mai2ProfileData(BaseData): result = self.execute(conflict) if result is None: - self.logger.warning(f"put_profile_option: failed to update! {user_id} is_dx {is_dx}") + self.logger.warning( + f"put_profile_option: failed to update! {user_id} is_dx {is_dx}" + ) return None return result.lastrowid - def get_profile_option(self, user_id: int, version: int, is_dx: bool = True) -> Optional[Row]: + def get_profile_option( + self, user_id: int, version: int, is_dx: bool = True + ) -> Optional[Row]: if is_dx: sql = ( select(option) @@ -596,7 +622,9 @@ class Mai2ProfileData(BaseData): else: sql = ( select(option_old) - .where(and_(option_old.c.user == user_id, option_old.c.version <= version)) + .where( + and_(option_old.c.user == user_id, option_old.c.version <= version) + ) .order_by(option_old.c.version.desc()) ) @@ -689,7 +717,9 @@ class Mai2ProfileData(BaseData): return None return result.fetchall() - def put_web_option(self, user_id: int, version: int, web_opts: Dict) -> Optional[int]: + def put_web_option( + self, user_id: int, version: int, web_opts: Dict + ) -> Optional[int]: web_opts["user"] = user_id web_opts["version"] = version sql = insert(web_opt).values(**web_opts) @@ -698,14 +728,14 @@ class Mai2ProfileData(BaseData): result = self.execute(conflict) if result is None: - self.logger.warning( - f"put_web_option: failed to update! user_id: {user_id}" - ) + self.logger.warning(f"put_web_option: failed to update! user_id: {user_id}") return None return result.lastrowid - + def get_web_option(self, user_id: int, version: int) -> Optional[Row]: - sql = web_opt.select(and_(web_opt.c.user == user_id, web_opt.c.version == version)) + sql = web_opt.select( + and_(web_opt.c.user == user_id, web_opt.c.version == version) + ) result = self.execute(sql) if result is None: @@ -725,7 +755,7 @@ class Mai2ProfileData(BaseData): ) return None return result.lastrowid - + def get_grade_status(self, user_id: int) -> Optional[Row]: sql = grade_status.select(grade_status.c.user == user_id) @@ -742,12 +772,10 @@ class Mai2ProfileData(BaseData): result = self.execute(conflict) if result is None: - self.logger.warning( - f"put_boss_list: failed to update! user_id: {user_id}" - ) + self.logger.warning(f"put_boss_list: failed to update! user_id: {user_id}") return None return result.lastrowid - + def get_boss_list(self, user_id: int) -> Optional[Row]: sql = boss.select(boss.c.user == user_id) @@ -759,7 +787,7 @@ class Mai2ProfileData(BaseData): def put_recent_rating(self, user_id: int, rr: Dict) -> Optional[int]: sql = insert(recent_rating).values(user=user_id, userRecentRatingList=rr) - conflict = sql.on_duplicate_key_update({'userRecentRatingList': rr}) + conflict = sql.on_duplicate_key_update({"userRecentRatingList": rr}) result = self.execute(conflict) if result is None: @@ -768,7 +796,7 @@ class Mai2ProfileData(BaseData): ) return None return result.lastrowid - + def get_recent_rating(self, user_id: int) -> Optional[Row]: sql = recent_rating.select(recent_rating.c.user == user_id) @@ -776,27 +804,25 @@ class Mai2ProfileData(BaseData): if result is None: return None return result.fetchone() - - def add_consec_login(self, user_id: int, version: int) -> None: - sql = insert(consec_logins).values( - user=user_id, - version=version, - logins=1 - ) - conflict = sql.on_duplicate_key_update( - logins=consec_logins.c.logins + 1 - ) + def add_consec_login(self, user_id: int, version: int) -> None: + sql = insert(consec_logins).values(user=user_id, version=version, logins=1) + + conflict = sql.on_duplicate_key_update(logins=consec_logins.c.logins + 1) result = self.execute(conflict) if result is None: - self.logger.error(f"Failed to update consecutive login count for user {user_id} version {version}") + self.logger.error( + f"Failed to update consecutive login count for user {user_id} version {version}" + ) def get_consec_login(self, user_id: int, version: int) -> Optional[Row]: - sql = select(consec_logins).where(and_( - consec_logins.c.user==user_id, - consec_logins.c.version==version, - )) + sql = select(consec_logins).where( + and_( + consec_logins.c.user == user_id, + consec_logins.c.version == version, + ) + ) result = self.execute(sql) if result is None: @@ -804,12 +830,12 @@ class Mai2ProfileData(BaseData): return result.fetchone() def reset_consec_login(self, user_id: int, version: int) -> Optional[Row]: - sql = consec_logins.update(and_( - consec_logins.c.user==user_id, - consec_logins.c.version==version, - )).values( - logins=1 - ) + sql = consec_logins.update( + and_( + consec_logins.c.user == user_id, + consec_logins.c.version == version, + ) + ).values(logins=1) result = self.execute(sql) if result is None: diff --git a/titles/mai2/universe.py b/titles/mai2/universe.py index c17f899..d25a295 100644 --- a/titles/mai2/universe.py +++ b/titles/mai2/universe.py @@ -23,10 +23,11 @@ class Mai2Universe(Mai2SplashPlus): return { "userName": p["userName"], "rating": p["playerRating"], - # hardcode lastDataVersion for CardMaker 1.34 + # hardcode lastDataVersion for CardMaker "lastDataVersion": "1.20.00", + # checks if the user is still logged in "isLogin": False, - "isExistSellingCard": False, + "isExistSellingCard": True, } def handle_cm_get_user_data_api_request(self, data: Dict) -> Dict: @@ -70,8 +71,12 @@ class Mai2Universe(Mai2SplashPlus): tmp.pop("cardName") tmp.pop("enabled") - tmp["startDate"] = datetime.strftime(tmp["startDate"], Mai2Constants.DATE_TIME_FORMAT) - tmp["endDate"] = datetime.strftime(tmp["endDate"], Mai2Constants.DATE_TIME_FORMAT) + tmp["startDate"] = datetime.strftime( + tmp["startDate"], Mai2Constants.DATE_TIME_FORMAT + ) + tmp["endDate"] = datetime.strftime( + tmp["endDate"], Mai2Constants.DATE_TIME_FORMAT + ) tmp["noticeStartDate"] = datetime.strftime( tmp["noticeStartDate"], Mai2Constants.DATE_TIME_FORMAT ) From 480551f94250b1ebce0d5a359110408f825f4704 Mon Sep 17 00:00:00 2001 From: EmmyHeart Date: Mon, 16 Oct 2023 13:07:05 +0000 Subject: [PATCH 27/68] Fixed logic error leading to strict IP checking always being enabled --- core/allnet.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/allnet.py b/core/allnet.py index 6610c23..b0e1c78 100644 --- a/core/allnet.py +++ b/core/allnet.py @@ -147,7 +147,7 @@ class AllnetServlet: resp_dict = {k: v for k, v in vars(resp).items() if v is not None} return (urllib.parse.unquote(urllib.parse.urlencode(resp_dict)) + "\n").encode("utf-8") - elif not arcade["ip"] or arcade["ip"] is None and self.config.server.strict_ip_checking: + elif (not arcade["ip"] or arcade["ip"] is None) and self.config.server.strict_ip_checking: msg = f"Serial {req.serial} attempted allnet auth from bad IP {req.ip}, but arcade {arcade['id']} has no IP set! (strict checking enabled)." self.data.base.log_event( "allnet", "ALLNET_AUTH_NO_SHOP_IP", logging.ERROR, msg From c78a62de14c7e65abb06845cf07fef5b51e6ebc9 Mon Sep 17 00:00:00 2001 From: EmmyHeart Date: Mon, 16 Oct 2023 13:09:11 +0000 Subject: [PATCH 28/68] Fixed Rival Music retrieval --- titles/chuni/schema/score.py | 30 +++++++++++++++++++++++++++--- 1 file changed, 27 insertions(+), 3 deletions(-) diff --git a/titles/chuni/schema/score.py b/titles/chuni/schema/score.py index ab26f5f..c31f560 100644 --- a/titles/chuni/schema/score.py +++ b/titles/chuni/schema/score.py @@ -201,9 +201,33 @@ class ChuniScoreData(BaseData): return None return result.lastrowid - def get_rival_music(self, rival_id: int, index: int, max_count: int) -> Optional[List[Dict]]: - sql = select(playlog).where(playlog.c.user == rival_id).limit(max_count).offset(index) + def get_rival_music(self, rival_id: int) -> Optional[List[Dict]]: + sql = select(playlog).where(playlog.c.user == rival_id) result = self.execute(sql) if result is None: return None - return result.fetchall() \ No newline at end of file + return result.fetchall() + def get_rival_music_highest_scores(self, rival_id: int) -> Optional[List[Dict]]: + # Define the subquery to retrieve the highest scoreMax for each level and musicId + subquery = ( + select([ + self.playlog.c.musicId, + self.playlog.c.level, + func.max(self.playlog.c.score) + ]) + .where(self.playlog.c.user == rival_id) + .group_by(self.playlog.c.musicId, self.playlog.c.level) + ) + + # Join the subquery with the playlog table to get the full records + query = select([ + self.playlog, + subquery.scalar() + ]).where(self.playlog.c.user == rival_id) + + result = self.execute(query) + + if result is None: + return None + + return result.fetchall() From d49997f832c02be4eb229c90c48158156d49d386 Mon Sep 17 00:00:00 2001 From: EmmyHeart Date: Mon, 16 Oct 2023 13:09:53 +0000 Subject: [PATCH 29/68] Team Rank Scaling is dead, long live Team Rank Scaling --- titles/chuni/schema/profile.py | 42 ++++------------------------------ 1 file changed, 4 insertions(+), 38 deletions(-) diff --git a/titles/chuni/schema/profile.py b/titles/chuni/schema/profile.py index fa7a4ab..ea70583 100644 --- a/titles/chuni/schema/profile.py +++ b/titles/chuni/schema/profile.py @@ -646,7 +646,7 @@ class ChuniProfileData(BaseData): return None return result.fetchone() - def get_team_rank_actual(self, team_id: int) -> int: + def get_team_rank(self, team_id: int) -> int: # Normal ranking system, likely the one used in the real servers # Query all teams sorted by 'teamPoint' result = self.execute( @@ -663,42 +663,8 @@ class ChuniProfileData(BaseData): # Return the rank if found, or a default rank otherwise return rank if rank is not None else 0 - def get_team_rank(self, team_id: int) -> int: - # Scaled ranking system, designed for smaller instances. - # Query all teams sorted by 'teamPoint' - result = self.execute( - select(team.c.id).order_by(team.c.teamPoint.desc()) - ) - - # Count total number of teams - total_teams = self.execute(select(func.count()).select_from(team)).scalar() - - # Get the rank of the team with the given team_id - rank = None - for i, row in enumerate(result, start=1): - if row.id == team_id: - rank = i - break - - # If the team is not found, return default rank - if rank is None: - return 0 - - # Define rank tiers - tiers = { - 1: range(1, int(total_teams * 0.1) + 1), # Rainbow - 2: range(int(total_teams * 0.1) + 1, int(total_teams * 0.4) + 1), # Gold - 3: range(int(total_teams * 0.4) + 1, int(total_teams * 0.7) + 1), # Silver - 4: range(int(total_teams * 0.7) + 1, total_teams + 1), # Grey - } - - # Assign rank based on tier - for tier_rank, tier_range in tiers.items(): - if rank in tier_range: - return tier_rank - - # Return default rank if not found in any tier - return 0 + # RIP scaled team ranking. Gone, but forgotten + # def get_team_rank_scaled(self, team_id: int) -> int: def update_team(self, team_id: int, team_data: Dict) -> bool: team_data["id"] = team_id @@ -736,4 +702,4 @@ class ChuniProfileData(BaseData): total_play_count += row[0] return { "total_play_count": total_play_count - } \ No newline at end of file + } From 35a7525f74776219e41f579556571233d4d673bd Mon Sep 17 00:00:00 2001 From: EmmyHeart Date: Mon, 16 Oct 2023 13:14:11 +0000 Subject: [PATCH 30/68] Removed extraneous function --- titles/chuni/schema/score.py | 24 ------------------------ 1 file changed, 24 deletions(-) diff --git a/titles/chuni/schema/score.py b/titles/chuni/schema/score.py index c31f560..67cf141 100644 --- a/titles/chuni/schema/score.py +++ b/titles/chuni/schema/score.py @@ -207,27 +207,3 @@ class ChuniScoreData(BaseData): if result is None: return None return result.fetchall() - def get_rival_music_highest_scores(self, rival_id: int) -> Optional[List[Dict]]: - # Define the subquery to retrieve the highest scoreMax for each level and musicId - subquery = ( - select([ - self.playlog.c.musicId, - self.playlog.c.level, - func.max(self.playlog.c.score) - ]) - .where(self.playlog.c.user == rival_id) - .group_by(self.playlog.c.musicId, self.playlog.c.level) - ) - - # Join the subquery with the playlog table to get the full records - query = select([ - self.playlog, - subquery.scalar() - ]).where(self.playlog.c.user == rival_id) - - result = self.execute(query) - - if result is None: - return None - - return result.fetchall() From 3ef40fe85e04484d73319142217bb184db87dd09 Mon Sep 17 00:00:00 2001 From: EmmyHeart Date: Mon, 16 Oct 2023 13:15:39 +0000 Subject: [PATCH 31/68] Removed Team Rank Scaling and fixed RIval Music Data. Also cleaned up extraneous functions --- titles/chuni/base.py | 106 ++++++++++++++++++++----------------------- 1 file changed, 49 insertions(+), 57 deletions(-) diff --git a/titles/chuni/base.py b/titles/chuni/base.py index 21a6719..775b09b 100644 --- a/titles/chuni/base.py +++ b/titles/chuni/base.py @@ -200,12 +200,30 @@ class ChuniBase: return {"type": data["type"], "length": 0, "gameSaleList": []} def handle_get_game_setting_api_request(self, data: Dict) -> Dict: - reboot_start = datetime.strftime( - datetime.now() - timedelta(hours=4), self.date_time_format - ) - reboot_end = datetime.strftime( - datetime.now() - timedelta(hours=3), self.date_time_format - ) + # if reboot start/end time is not defined use the default behavior of being a few hours ago + if self.core_cfg.title.reboot_start_time == "" or self.core_cfg.title.reboot_end_time == "": + reboot_start = datetime.strftime( + datetime.utcnow() + timedelta(hours=6), self.date_time_format + ) + reboot_end = datetime.strftime( + datetime.utcnow() + timedelta(hours=7), self.date_time_format + ) + else: + # get current datetime in JST + current_jst = datetime.now(pytz.timezone('Asia/Tokyo')).date() + + # parse config start/end times into datetime + reboot_start_time = datetime.strptime(self.core_cfg.title.reboot_start_time, "%H:%M") + reboot_end_time = datetime.strptime(self.core_cfg.title.reboot_end_time, "%H:%M") + + # offset datetimes with current date/time + reboot_start_time = reboot_start_time.replace(year=current_jst.year, month=current_jst.month, day=current_jst.day, tzinfo=pytz.timezone('Asia/Tokyo')) + reboot_end_time = reboot_end_time.replace(year=current_jst.year, month=current_jst.month, day=current_jst.day, tzinfo=pytz.timezone('Asia/Tokyo')) + + # create strings for use in gameSetting + reboot_start = reboot_start_time.strftime(self.date_time_format) + reboot_end = reboot_end_time.strftime(self.date_time_format) + return { "gameSetting": { "dataVersion": "1.00.00", @@ -385,26 +403,24 @@ class ChuniBase: } def handle_get_user_rival_music_api_request(self, data: Dict) -> Dict: - m = self.data.score.get_rival_music(data["rivalId"], data["nextIndex"], data["maxCount"]) - if m is None: - return {} - + rival_id = data["rivalId"] + next_index = int(data["nextIndex"]) + max_count = int(data["maxCount"]) user_rival_music_list = [] - for music in m: - actual_music_id = self.data.static.get_song(music["musicId"]) - if actual_music_id is None: - music_id = music["musicId"] - else: - music_id = actual_music_id["songId"] + + # Fetch all the rival music entries for the user + all_entries = self.data.score.get_rival_music(rival_id) + + # Process the entries based on max_count and nextIndex + for music in all_entries[next_index:]: + music_id = music["musicId"] level = music["level"] score = music["score"] rank = music["rank"] - # Find the existing entry for the current musicId in the user_rival_music_list + # Create a music entry for the current music_id if it's unique music_entry = next((entry for entry in user_rival_music_list if entry["musicId"] == music_id), None) - if music_entry is None: - # If the entry doesn't exist, create a new entry music_entry = { "musicId": music_id, "length": 0, @@ -412,52 +428,32 @@ class ChuniBase: } user_rival_music_list.append(music_entry) - # Check if the current score is higher than the previous highest score for the level - level_entry = next( - ( - entry - for entry in music_entry["userRivalMusicDetailList"] - if entry["level"] == level - ), - None, - ) - if level_entry is None or score > level_entry["scoreMax"]: - # If the level entry doesn't exist or the score is higher, update or add the entry + # Create a level entry for the current level if it's unique or has a higher score + level_entry = next((entry for entry in music_entry["userRivalMusicDetailList"] if entry["level"] == level), None) + if level_entry is None: level_entry = { "level": level, "scoreMax": score, "scoreRank": rank } + music_entry["userRivalMusicDetailList"].append(level_entry) + elif score > level_entry["scoreMax"]: + level_entry["scoreMax"] = score + level_entry["scoreRank"] = rank - if level_entry not in music_entry["userRivalMusicDetailList"]: - music_entry["userRivalMusicDetailList"].append(level_entry) - + # Calculate the length for each "musicId" by counting the unique levels + for music_entry in user_rival_music_list: music_entry["length"] = len(music_entry["userRivalMusicDetailList"]) + # Prepare the result dictionary with user rival music data result = { "userId": data["userId"], "rivalId": data["rivalId"], - "nextIndex": -1, - "userRivalMusicList": user_rival_music_list + "nextIndex": str(next_index + len(all_entries) if len(all_entries) <= len(user_rival_music_list) else -1), + "userRivalMusicList": user_rival_music_list[:max_count] } return result - def handle_get_user_rival_music_api_requestded(self, data: Dict) -> Dict: - m = self.data.score.get_rival_music(data["rivalId"], data["nextIndex"], data["maxCount"]) - if m is None: - return {} - - userRivalMusicList = [] - for music in m: - self.logger.debug(music["point"]) - - return { - "userId": data["userId"], - "rivalId": data["rivalId"], - "nextIndex": -1 - - } - def handle_get_user_favorite_item_api_request(self, data: Dict) -> Dict: user_fav_item_list = [] @@ -711,11 +707,7 @@ class ChuniBase: if team: team_id = team["id"] team_name = team["teamName"] - # Determine whether to use scaled ranks, or original system - if self.game_cfg.team.rank_scale: - team_rank = self.data.profile.get_team_rank(team["id"]) - else: - team_rank = self.data.profile.get_team_rank_actual(team["id"]) + team_rank = self.data.profile.get_team_rank(team["id"]) # Don't return anything if no team name has been defined for defaults and there is no team set for the player if not profile["teamId"] and team_name == "": @@ -888,4 +880,4 @@ class ChuniBase: return { "userId": data["userId"], "userNetBattleData": {"recentNBSelectMusicList": []}, - } \ No newline at end of file + } From 540d7fc2c2b4fb383f301afdd3b8835132838af6 Mon Sep 17 00:00:00 2001 From: EmmyHeart Date: Mon, 16 Oct 2023 13:18:23 +0000 Subject: [PATCH 32/68] Added reboot time support --- titles/chuni/new.py | 30 ++++++++++++++++++++++++------ 1 file changed, 24 insertions(+), 6 deletions(-) diff --git a/titles/chuni/new.py b/titles/chuni/new.py index 40dee9b..8a658bf 100644 --- a/titles/chuni/new.py +++ b/titles/chuni/new.py @@ -3,6 +3,7 @@ from datetime import datetime, timedelta from random import randint from typing import Dict +import pytz from core.config import CoreConfig from titles.chuni.const import ChuniConstants from titles.chuni.database import ChuniData @@ -31,12 +32,29 @@ class ChuniNew(ChuniBase): match_end = datetime.strftime( datetime.utcnow() + timedelta(hours=16), self.date_time_format ) - reboot_start = datetime.strftime( - datetime.utcnow() + timedelta(hours=6), self.date_time_format - ) - reboot_end = datetime.strftime( - datetime.utcnow() + timedelta(hours=7), self.date_time_format - ) + # if reboot start/end time is not defined use the default behavior of being a few hours ago + if self.core_cfg.title.reboot_start_time == "" or self.core_cfg.title.reboot_end_time == "": + reboot_start = datetime.strftime( + datetime.utcnow() + timedelta(hours=6), self.date_time_format + ) + reboot_end = datetime.strftime( + datetime.utcnow() + timedelta(hours=7), self.date_time_format + ) + else: + # get current datetime in JST + current_jst = datetime.now(pytz.timezone('Asia/Tokyo')).date() + + # parse config start/end times into datetime + reboot_start_time = datetime.strptime(self.core_cfg.title.reboot_start_time, "%H:%M") + reboot_end_time = datetime.strptime(self.core_cfg.title.reboot_end_time, "%H:%M") + + # offset datetimes with current date/time + reboot_start_time = reboot_start_time.replace(year=current_jst.year, month=current_jst.month, day=current_jst.day, tzinfo=pytz.timezone('Asia/Tokyo')) + reboot_end_time = reboot_end_time.replace(year=current_jst.year, month=current_jst.month, day=current_jst.day, tzinfo=pytz.timezone('Asia/Tokyo')) + + # create strings for use in gameSetting + reboot_start = reboot_start_time.strftime(self.date_time_format) + reboot_end = reboot_end_time.strftime(self.date_time_format) return { "gameSetting": { "isMaintenance": False, From e18b87ee5cb33c8ae5b0d44f2235f3b09eb2e92f Mon Sep 17 00:00:00 2001 From: EmmyHeart Date: Mon, 16 Oct 2023 13:20:00 +0000 Subject: [PATCH 33/68] Added reboot time support --- titles/cm/base.py | 30 ++++++++++++++++++++++++------ 1 file changed, 24 insertions(+), 6 deletions(-) diff --git a/titles/cm/base.py b/titles/cm/base.py index dae6ecb..587ceb5 100644 --- a/titles/cm/base.py +++ b/titles/cm/base.py @@ -4,6 +4,7 @@ import json import logging from enum import Enum +import pytz from core.config import CoreConfig from core.data.cache import cached from titles.cm.const import CardMakerConstants @@ -61,12 +62,29 @@ class CardMakerBase: } def handle_get_game_setting_api_request(self, data: Dict) -> Dict: - reboot_start = date.strftime( - datetime.now() + timedelta(hours=3), self.date_time_format - ) - reboot_end = date.strftime( - datetime.now() + timedelta(hours=4), self.date_time_format - ) + # if reboot start/end time is not defined use the default behavior of being a few hours ago + if self.core_cfg.title.reboot_start_time == "" or self.core_cfg.title.reboot_end_time == "": + reboot_start = datetime.strftime( + datetime.utcnow() + timedelta(hours=6), self.date_time_format + ) + reboot_end = datetime.strftime( + datetime.utcnow() + timedelta(hours=7), self.date_time_format + ) + else: + # get current datetime in JST + current_jst = datetime.now(pytz.timezone('Asia/Tokyo')).date() + + # parse config start/end times into datetime + reboot_start_time = datetime.strptime(self.core_cfg.title.reboot_start_time, "%H:%M") + reboot_end_time = datetime.strptime(self.core_cfg.title.reboot_end_time, "%H:%M") + + # offset datetimes with current date/time + reboot_start_time = reboot_start_time.replace(year=current_jst.year, month=current_jst.month, day=current_jst.day, tzinfo=pytz.timezone('Asia/Tokyo')) + reboot_end_time = reboot_end_time.replace(year=current_jst.year, month=current_jst.month, day=current_jst.day, tzinfo=pytz.timezone('Asia/Tokyo')) + + # create strings for use in gameSetting + reboot_start = reboot_start_time.strftime(self.date_time_format) + reboot_end = reboot_end_time.strftime(self.date_time_format) # grab the dict with all games version numbers from user config games_ver = self.game_cfg.version.version(self.version) From 8d289ca0666c6935f1c8d9c63681fbf7660bda19 Mon Sep 17 00:00:00 2001 From: EmmyHeart Date: Mon, 16 Oct 2023 13:20:37 +0000 Subject: [PATCH 34/68] Added reboot time support --- titles/ongeki/base.py | 31 +++++++++++++++++++++++++------ 1 file changed, 25 insertions(+), 6 deletions(-) diff --git a/titles/ongeki/base.py b/titles/ongeki/base.py index ccf24f5..c72c086 100644 --- a/titles/ongeki/base.py +++ b/titles/ongeki/base.py @@ -4,6 +4,7 @@ import json import logging from enum import Enum +import pytz from core.config import CoreConfig from core.data.cache import cached from titles.ongeki.const import OngekiConstants @@ -103,12 +104,30 @@ class OngekiBase: self.version = OngekiConstants.VER_ONGEKI def handle_get_game_setting_api_request(self, data: Dict) -> Dict: - reboot_start = date.strftime( - datetime.now() + timedelta(hours=3), self.date_time_format - ) - reboot_end = date.strftime( - datetime.now() + timedelta(hours=4), self.date_time_format - ) + # if reboot start/end time is not defined use the default behavior of being a few hours ago + if self.core_cfg.title.reboot_start_time == "" or self.core_cfg.title.reboot_end_time == "": + reboot_start = datetime.strftime( + datetime.utcnow() + timedelta(hours=6), self.date_time_format + ) + reboot_end = datetime.strftime( + datetime.utcnow() + timedelta(hours=7), self.date_time_format + ) + else: + # get current datetime in JST + current_jst = datetime.now(pytz.timezone('Asia/Tokyo')).date() + + # parse config start/end times into datetime + reboot_start_time = datetime.strptime(self.core_cfg.title.reboot_start_time, "%H:%M") + reboot_end_time = datetime.strptime(self.core_cfg.title.reboot_end_time, "%H:%M") + + # offset datetimes with current date/time + reboot_start_time = reboot_start_time.replace(year=current_jst.year, month=current_jst.month, day=current_jst.day, tzinfo=pytz.timezone('Asia/Tokyo')) + reboot_end_time = reboot_end_time.replace(year=current_jst.year, month=current_jst.month, day=current_jst.day, tzinfo=pytz.timezone('Asia/Tokyo')) + + # create strings for use in gameSetting + reboot_start = reboot_start_time.strftime(self.date_time_format) + reboot_end = reboot_end_time.strftime(self.date_time_format) + return { "gameSetting": { "dataVersion": "1.00.00", From 9cff321857b809fb97e3313bc9517f2c64bc632c Mon Sep 17 00:00:00 2001 From: EmmyHeart Date: Mon, 16 Oct 2023 13:21:07 +0000 Subject: [PATCH 35/68] Added reboot time support --- titles/mai2/base.py | 36 +++++++++++++++++++++++++++++++----- 1 file changed, 31 insertions(+), 5 deletions(-) diff --git a/titles/mai2/base.py b/titles/mai2/base.py index ad0433f..26677ac 100644 --- a/titles/mai2/base.py +++ b/titles/mai2/base.py @@ -5,6 +5,7 @@ from base64 import b64decode from os import path, stat, remove from PIL import ImageFile +import pytz from core.config import CoreConfig from titles.mai2.const import Mai2Constants from titles.mai2.config import Mai2Config @@ -21,22 +22,47 @@ class Mai2Base: self.can_deliver = False self.can_usbdl = False self.old_server = "" - + if self.core_config.server.is_develop and self.core_config.title.port > 0: self.old_server = f"http://{self.core_config.title.hostname}:{self.core_config.title.port}/SDEY/197/" - + else: self.old_server = f"http://{self.core_config.title.hostname}/SDEY/197/" def handle_get_game_setting_api_request(self, data: Dict): - return { + # if reboot start/end time is not defined use the default behavior of being a few hours ago + if self.core_cfg.title.reboot_start_time == "" or self.core_cfg.title.reboot_end_time == "": + reboot_start = datetime.strftime( + datetime.utcnow() + timedelta(hours=6), self.date_time_format + ) + reboot_end = datetime.strftime( + datetime.utcnow() + timedelta(hours=7), self.date_time_format + ) + else: + # get current datetime in JST + current_jst = datetime.now(pytz.timezone('Asia/Tokyo')).date() + + # parse config start/end times into datetime + reboot_start_time = datetime.strptime(self.core_cfg.title.reboot_start_time, "%H:%M") + reboot_end_time = datetime.strptime(self.core_cfg.title.reboot_end_time, "%H:%M") + + # offset datetimes with current date/time + reboot_start_time = reboot_start_time.replace(year=current_jst.year, month=current_jst.month, day=current_jst.day, tzinfo=pytz.timezone('Asia/Tokyo')) + reboot_end_time = reboot_end_time.replace(year=current_jst.year, month=current_jst.month, day=current_jst.day, tzinfo=pytz.timezone('Asia/Tokyo')) + + # create strings for use in gameSetting + reboot_start = reboot_start_time.strftime(self.date_time_format) + reboot_end = reboot_end_time.strftime(self.date_time_format) + + + return { "isDevelop": False, "isAouAccession": False, "gameSetting": { "isMaintenance": False, "requestInterval": 1800, - "rebootStartTime": "2020-01-01 07:00:00.0", - "rebootEndTime": "2020-01-01 07:59:59.0", + "rebootStartTime": reboot_start, + "rebootEndTime": reboot_end, "movieUploadLimit": 100, "movieStatus": 1, "movieServerUri": self.old_server + "api/movie" if self.game_config.uploads.movies else "movie", From 4f0a5f60ab67801ef64b62602a9f3ddbb2d161f5 Mon Sep 17 00:00:00 2001 From: EmmyHeart Date: Mon, 16 Oct 2023 13:22:18 +0000 Subject: [PATCH 36/68] Added config for reboot time support --- core/config.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/core/config.py b/core/config.py index 14f06f5..3a772f7 100644 --- a/core/config.py +++ b/core/config.py @@ -85,6 +85,18 @@ class TitleConfig: self.__config, "core", "title", "port", default=8080 ) + @property + def reboot_start_time(self) -> str: + return CoreConfig.get_config_field( + self.__config, "core", "title", "reboot_start_time", default="" + ) + + @property + def reboot_end_time(self) -> str: + return CoreConfig.get_config_field( + self.__config, "core", "title", "reboot_end_time", default="" + ) + class DatabaseConfig: def __init__(self, parent_config: "CoreConfig") -> None: From 300cd10abfc6eadd8334a216661aac5f038ff9a1 Mon Sep 17 00:00:00 2001 From: EmmyHeart Date: Mon, 16 Oct 2023 13:23:56 +0000 Subject: [PATCH 37/68] Updated example config with reboot time You can leave this option out to disable reboot times, and the games will use the prior mechanism of maintenance always being 4~ hours before current time --- example_config/core.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/example_config/core.yaml b/example_config/core.yaml index 76ec5b1..7bf097e 100644 --- a/example_config/core.yaml +++ b/example_config/core.yaml @@ -13,6 +13,9 @@ title: loglevel: "info" hostname: "localhost" port: 8080 + reboot_start_time: "04:00" + reboot_end_time: "05:00" + database: host: "localhost" From 8c114532e83018c4dcd0464a2e42e6bad9609ef4 Mon Sep 17 00:00:00 2001 From: EmmyHeart Date: Mon, 16 Oct 2023 13:30:10 +0000 Subject: [PATCH 38/68] Updated config documentation with IP enforcement and reboot times --- docs/config.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/config.md b/docs/config.md index 18f90eb..9de57f9 100644 --- a/docs/config.md +++ b/docs/config.md @@ -6,11 +6,15 @@ - `name`: Name for the server, used by some games in their default MOTDs. Default `ARTEMiS` - `is_develop`: Flags that the server is a development instance without a proxy standing in front of it. Setting to `False` tells the server not to listen for SSL, because the proxy should be handling all SSL-related things, among other things. Default `True` - `threading`: Flags that `reactor.run` should be called via the `Thread` standard library. May provide a speed boost, but removes the ability to kill the server via `Ctrl + C`. Default: `False` +- `check_arcade_ip`: Checks IPs against the `arcade` table in the database, if one is defined. Default `False` +- `strict_ip_checking`: Rejects clients if there is no IP in the `arcade` table for the respective arcade - `log_dir`: Directory to store logs. Server MUST have read and write permissions to this directory or you will have issues. Default `logs` ## Title - `loglevel`: Logging level for the title server. Default `info` - `hostname`: Hostname that gets sent to clients to tell them where to connect. Games must be able to connect to your server via the hostname or IP you spcify here. Note that most games will reject `localhost` or `127.0.0.1`. Default `localhost` - `port`: Port that the title server will listen for connections on. Set to 0 to use the Allnet handler to reduce the port footprint. Default `8080` +- `reboot_start_time`: 24 hour JST time that clients will see as the start of maintenance period. Leave blank for no maintenance time. Default: "" +- `reboot_end_time`: 24 hour JST time that clients will see as the end of maintenance period. Leave blank for no maintenance time. Default: "" ## Database - `host`: Host of the database. Default `localhost` - `username`: Username of the account the server should connect to the database with. Default `aime` From 862907b82ad58d4f6854c06c25dd5248c92c676e Mon Sep 17 00:00:00 2001 From: Dniel97 Date: Mon, 16 Oct 2023 21:36:26 +0200 Subject: [PATCH 39/68] mai2: Fixed cards not showing up --- titles/mai2/schema/item.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/titles/mai2/schema/item.py b/titles/mai2/schema/item.py index d57a993..5bc011c 100644 --- a/titles/mai2/schema/item.py +++ b/titles/mai2/schema/item.py @@ -333,7 +333,7 @@ class Mai2ItemData(BaseData): if result is None: return None return result.fetchone() - + def put_character_(self, user_id: int, char_data: Dict) -> Optional[int]: char_data["user"] = user_id sql = insert(character).values(**char_data) @@ -489,6 +489,8 @@ class Mai2ItemData(BaseData): else: sql = card.select(and_(card.c.user == user_id, card.c.cardKind == kind)) + sql = sql.order_by(card.c.startDate.desc()) + result = self.execute(sql) if result is None: return None From 93b8b86b55c845deaaade462ad57596ea7823d4e Mon Sep 17 00:00:00 2001 From: Midorica Date: Tue, 17 Oct 2023 13:12:08 -0400 Subject: [PATCH 40/68] Adding base handler & config for HTTPS title support --- core/config.py | 18 ++++++++++++++++++ docs/config.md | 3 +++ example_config/core.yaml | 3 +++ index.py | 6 ++++++ 4 files changed, 30 insertions(+) diff --git a/core/config.py b/core/config.py index 3a772f7..118f1e3 100644 --- a/core/config.py +++ b/core/config.py @@ -84,6 +84,24 @@ class TitleConfig: return CoreConfig.get_config_field( self.__config, "core", "title", "port", default=8080 ) + + @property + def port_ssl(self) -> int: + return CoreConfig.get_config_field( + self.__config, "core", "title", "port_ssl", default=0 + ) + + @property + def ssl_key(self) -> str: + return CoreConfig.get_config_field( + self.__config, "core", "title", "ssl_key", default="cert/title.key" + ) + + @property + def ssl_cert(self) -> str: + return CoreConfig.get_config_field( + self.__config, "core", "title", "ssl_cert", default="cert/title.pem" + ) @property def reboot_start_time(self) -> str: diff --git a/docs/config.md b/docs/config.md index 9de57f9..81fb43d 100644 --- a/docs/config.md +++ b/docs/config.md @@ -13,6 +13,9 @@ - `loglevel`: Logging level for the title server. Default `info` - `hostname`: Hostname that gets sent to clients to tell them where to connect. Games must be able to connect to your server via the hostname or IP you spcify here. Note that most games will reject `localhost` or `127.0.0.1`. Default `localhost` - `port`: Port that the title server will listen for connections on. Set to 0 to use the Allnet handler to reduce the port footprint. Default `8080` +- `port_ssl`: Port that the secure title server will listen for connections on. Set to 0 to use the Allnet handler to reduce the port footprint. Default `0` +- `ssl_key`: Location of the ssl server key for the secure title server. Ignored if `port_ssl` is set to `0` or `is_develop` set to `False`. Default `cert/title.key` +- `ssl_cert`: Location of the ssl server certificate for the secure title server. Must not be a self-signed SSL. Ignored if `port_ssl` is set to `0` or `is_develop` is set to `False`. Default `cert/title.pem` - `reboot_start_time`: 24 hour JST time that clients will see as the start of maintenance period. Leave blank for no maintenance time. Default: "" - `reboot_end_time`: 24 hour JST time that clients will see as the end of maintenance period. Leave blank for no maintenance time. Default: "" ## Database diff --git a/example_config/core.yaml b/example_config/core.yaml index 7bf097e..b4aac28 100644 --- a/example_config/core.yaml +++ b/example_config/core.yaml @@ -13,6 +13,9 @@ title: loglevel: "info" hostname: "localhost" port: 8080 + port_ssl: 0 + ssl_cert: "cert/title.crt" + ssl_key: "cert/title.key" reboot_start_time: "04:00" reboot_end_time: "05:00" diff --git a/index.py b/index.py index 0c36e81..72a40fa 100644 --- a/index.py +++ b/index.py @@ -279,6 +279,7 @@ if __name__ == "__main__": allnet_server_str = f"tcp:{cfg.allnet.port}:interface={cfg.server.listen_address}" title_server_str = f"tcp:{cfg.title.port}:interface={cfg.server.listen_address}" + title_https_server_str = f"ssl:{cfg.title.port_ssl}:interface={cfg.server.listen_address}:privateKey={cfg.title.ssl_key}:certKey={cfg.title.ssl_cert}" adb_server_str = f"tcp:{cfg.aimedb.port}:interface={cfg.server.listen_address}" frontend_server_str = ( f"tcp:{cfg.frontend.port}:interface={cfg.server.listen_address}" @@ -312,6 +313,11 @@ if __name__ == "__main__": endpoints.serverFromString(reactor, title_server_str).listen( server.Site(dispatcher) ) + + if cfg.title.port_ssl > 0: + endpoints.serverFromString(reactor, title_https_server_str).listen( + server.Site(dispatcher) + ) if cfg.server.threading: Thread(target=reactor.run, args=(False,)).start() From 54c77c047eb4a7206e165a1c362a67ff96384e70 Mon Sep 17 00:00:00 2001 From: Midorica Date: Wed, 18 Oct 2023 18:44:03 -0400 Subject: [PATCH 41/68] fixing duplicated is_cab value from machine table --- core/data/schema/arcade.py | 1 - 1 file changed, 1 deletion(-) diff --git a/core/data/schema/arcade.py b/core/data/schema/arcade.py index ce7c30a..2fb8e43 100644 --- a/core/data/schema/arcade.py +++ b/core/data/schema/arcade.py @@ -41,7 +41,6 @@ machine = Table( Column("country", String(3)), # overwrites if not null Column("timezone", String(255)), Column("ota_enable", Boolean), - Column("is_cab", Boolean), Column("memo", String(255)), Column("is_cab", Boolean), Column("data", JSON), From 8fcd227b331bad66f0778b4f05e70d2e41ac53e6 Mon Sep 17 00:00:00 2001 From: EmmyHeart Date: Fri, 20 Oct 2023 03:24:25 +0000 Subject: [PATCH 42/68] Added functions for checking if Aime card is locked or banned --- core/data/schema/card.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/core/data/schema/card.py b/core/data/schema/card.py index d8f5fc0..a95684e 100644 --- a/core/data/schema/card.py +++ b/core/data/schema/card.py @@ -64,6 +64,27 @@ class CardData(BaseData): return int(card["user"]) + def get_card_banned(self, access_code: str) -> Optional[bool]: + """ + Given a 20 digit access code as a string, check if the card is banned + """ + card = self.get_card_by_access_code(access_code) + if card is None: + return None + if card["is_banned"]: + return True + return False + def get_card_locked(self, access_code: str) -> Optional[bool]: + """ + Given a 20 digit access code as a string, check if the card is locked + """ + card = self.get_card_by_access_code(access_code) + if card is None: + return None + if card["is_locked"]: + return True + return False + def delete_card(self, card_id: int) -> None: sql = aime_card.delete(aime_card.c.id == card_id) From 719ae9cfb1cc8418918d5eed358b48ac8272e8d4 Mon Sep 17 00:00:00 2001 From: EmmyHeart Date: Fri, 20 Oct 2023 03:25:12 +0000 Subject: [PATCH 43/68] Added checks for if Aime card is locked or banned, and pass that status to the game --- core/aimedb.py | 21 +++++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/core/aimedb.py b/core/aimedb.py index 3ac8b9d..5388600 100644 --- a/core/aimedb.py +++ b/core/aimedb.py @@ -145,7 +145,15 @@ class AimedbProtocol(Protocol): def handle_lookup(self, data: bytes, resp_code: int) -> ADBBaseResponse: req = ADBLookupRequest(data) user_id = self.data.card.get_user_id_from_card(req.access_code) + is_banned = self.data.card.get_card_banned(req.access_code) + is_locked = self.data.card.get_card_locked(req.access_code) + if is_banned and is_locked: + ret.head.status = ADBStatus.BAN_SYS_USER + elif is_banned: + ret.head.status = ADBStatus.BAN_SYS + elif is_locked: + ret.head.status = ADBStatus.LOCK_USER ret = ADBLookupResponse.from_req(req.head, user_id) self.logger.info( @@ -157,8 +165,17 @@ class AimedbProtocol(Protocol): req = ADBLookupRequest(data) user_id = self.data.card.get_user_id_from_card(req.access_code) + is_banned = self.data.card.get_card_banned(req.access_code) + is_locked = self.data.card.get_card_locked(req.access_code) + ret = ADBLookupExResponse.from_req(req.head, user_id) - + if is_banned and is_locked: + ret.head.status = ADBStatus.BAN_SYS_USER + elif is_banned: + ret.head.status = ADBStatus.BAN_SYS + elif is_locked: + ret.head.status = ADBStatus.LOCK_USER + self.logger.info( f"access_code {req.access_code} -> user_id {ret.user_id}" ) @@ -237,7 +254,7 @@ class AimedbProtocol(Protocol): def handle_register(self, data: bytes, resp_code: int) -> bytes: req = ADBLookupRequest(data) user_id = -1 - + if self.config.server.allow_user_registration: user_id = self.data.user.create_user() From 3f8c62044c5631580371cb945d664b113eb8e264 Mon Sep 17 00:00:00 2001 From: EmmyHeart Date: Fri, 20 Oct 2023 03:26:51 +0000 Subject: [PATCH 44/68] Optimized rival music list, added ranking API, and began work on Team Courses (need help) --- titles/chuni/base.py | 70 ++++++++++++++++++++++++++++---------------- 1 file changed, 45 insertions(+), 25 deletions(-) diff --git a/titles/chuni/base.py b/titles/chuni/base.py index 775b09b..ffc512d 100644 --- a/titles/chuni/base.py +++ b/titles/chuni/base.py @@ -194,7 +194,8 @@ class ChuniBase: } def handle_get_game_ranking_api_request(self, data: Dict) -> Dict: - return {"type": data["type"], "gameRankingList": []} + rankings = self.data.score.get_rankings(self.version) + return {"type": data["type"], "gameRankingList": rankings} def handle_get_game_sale_api_request(self, data: Dict) -> Dict: return {"type": data["type"], "length": 0, "gameSaleList": []} @@ -401,7 +402,6 @@ class ChuniBase: "userId": data["userId"], "userRivalData": userRivalData } - def handle_get_user_rival_music_api_request(self, data: Dict) -> Dict: rival_id = data["rivalId"] next_index = int(data["nextIndex"]) @@ -415,10 +415,10 @@ class ChuniBase: for music in all_entries[next_index:]: music_id = music["musicId"] level = music["level"] - score = music["score"] - rank = music["rank"] + score = music["scoreMax"] + rank = music["scoreRank"] - # Create a music entry for the current music_id if it's unique + # Create a music entry for the current music_id music_entry = next((entry for entry in user_rival_music_list if entry["musicId"] == music_id), None) if music_entry is None: music_entry = { @@ -428,20 +428,15 @@ class ChuniBase: } user_rival_music_list.append(music_entry) - # Create a level entry for the current level if it's unique or has a higher score - level_entry = next((entry for entry in music_entry["userRivalMusicDetailList"] if entry["level"] == level), None) - if level_entry is None: - level_entry = { - "level": level, - "scoreMax": score, - "scoreRank": rank - } - music_entry["userRivalMusicDetailList"].append(level_entry) - elif score > level_entry["scoreMax"]: - level_entry["scoreMax"] = score - level_entry["scoreRank"] = rank + # Create a level entry for the current level + level_entry = { + "level": level, + "scoreMax": score, + "scoreRank": rank + } + music_entry["userRivalMusicDetailList"].append(level_entry) - # Calculate the length for each "musicId" by counting the unique levels + # Calculate the length for each "musicId" by counting the levels for music_entry in user_rival_music_list: music_entry["length"] = len(music_entry["userRivalMusicDetailList"]) @@ -729,17 +724,42 @@ class ChuniBase: def handle_get_team_course_setting_api_request(self, data: Dict) -> Dict: return { "userId": data["userId"], - "length": 0, - "nextIndex": 0, - "teamCourseSettingList": [], + "length": 1, + "nextIndex": -1, + "teamCourseSettingList": [ + { + "orderId": 1, + "courseId": 1, + "classId": 1, + "ruleId": 1, + "courseName": "Test", + "teamCourseMusicList": [ + {"track": 184, "type": 1, "level": 3, "selectLevel": -1}, + {"track": 184, "type": 1, "level": 3, "selectLevel": -1}, + {"track": 184, "type": 1, "level": 3, "selectLevel": -1} + ], + "teamCourseRankingInfoList": [], + "recodeDate": "2099-12-31 11:59:99.0", + "isPlayed": False + } + ], } def handle_get_team_course_rule_api_request(self, data: Dict) -> Dict: return { "userId": data["userId"], - "length": 0, - "nextIndex": 0, - "teamCourseRuleList": [], + "length": 1, + "nextIndex": -1, + "teamCourseRuleList": [ + { + "recoveryLife": 0, + "clearLife": 100, + "damageMiss": 1, + "damageAttack": 1, + "damageJustice": 1, + "damageJusticeC": 1 + } + ], } def handle_upsert_user_all_api_request(self, data: Dict) -> Dict: @@ -813,7 +833,7 @@ class ChuniBase: playlog["playedUserName1"] = self.read_wtf8(playlog["playedUserName1"]) playlog["playedUserName2"] = self.read_wtf8(playlog["playedUserName2"]) playlog["playedUserName3"] = self.read_wtf8(playlog["playedUserName3"]) - self.data.score.put_playlog(user_id, playlog) + self.data.score.put_playlog(user_id, playlog, self.version) if "userTeamPoint" in upsert: team_points = upsert["userTeamPoint"] From 32903d979e88ca3d70351e560699013c645ca9d0 Mon Sep 17 00:00:00 2001 From: EmmyHeart Date: Fri, 20 Oct 2023 03:29:05 +0000 Subject: [PATCH 45/68] Added code for song rankings using romVersion, and ensuring romVersion is set on playlog upsert --- titles/chuni/schema/score.py | 55 ++++++++++++++++++++++++++++++++++-- 1 file changed, 52 insertions(+), 3 deletions(-) diff --git a/titles/chuni/schema/score.py b/titles/chuni/schema/score.py index 67cf141..7712e3f 100644 --- a/titles/chuni/schema/score.py +++ b/titles/chuni/schema/score.py @@ -6,7 +6,7 @@ from sqlalchemy.schema import ForeignKey from sqlalchemy.engine import Row from sqlalchemy.sql import func, select from sqlalchemy.dialects.mysql import insert - +from sqlalchemy.sql.expression import exists from core.data.schema import BaseData, metadata course = Table( @@ -189,9 +189,28 @@ class ChuniScoreData(BaseData): return None return result.fetchall() - def put_playlog(self, aime_id: int, playlog_data: Dict) -> Optional[int]: + def put_playlog(self, aime_id: int, playlog_data: Dict, version: int) -> Optional[int]: + # Calculate the ROM version that should be inserted into the DB, based on the version of the ggame being inserted + # We only need from Version 10 (Plost) and back, as newer versions include romVersion in their upsert + # This matters both for gameRankings, as well as a future DB update to keep version data separate + romVer = { + 10: "1.50.0", + 9: "1.45.0", + 8: "1.40.0", + 7: "1.35.0", + 6: "1.30.0", + 5: "1.25.0", + 4: "1.20.0", + 3: "1.15.0", + 2: "1.10.0", + 1: "1.05.0", + 0: "1.00.0" + } + playlog_data["user"] = aime_id playlog_data = self.fix_bools(playlog_data) + if "romVersion" not in playlog_data: + playlog_data["romVersion"] = romVer.get(version, "1.00.0") sql = insert(playlog).values(**playlog_data) conflict = sql.on_duplicate_key_update(**playlog_data) @@ -201,9 +220,39 @@ class ChuniScoreData(BaseData): return None return result.lastrowid + def get_rankings(self, version: int) -> Optional[List[Dict]]: + # Calculates the ROM version that should be fetched for rankings, based on the game version being retrieved + # This prevents tracks that are not accessible in your version from counting towards the 10 results + romVer = { + 13: "2.10%", + 12: "2.05%", + 11: "2.00%", + 10: "1.50%", + 9: "1.45%", + 8: "1.40%", + 7: "1.35%", + 6: "1.30%", + 5: "1.25%", + 4: "1.20%", + 3: "1.15%", + 2: "1.10%", + 1: "1.05%", + 0: "1.00%" + } + sql = select([playlog.c.musicId.label('id'), func.count(playlog.c.musicId).label('point')]).where((playlog.c.level != 4) & (playlog.c.romVersion.like(romVer.get(version, "%")))).group_by(playlog.c.musicId).order_by(func.count(playlog.c.musicId).desc()).limit(10) + result = self.execute(sql) + + if result is None: + return None + + rows = result.fetchall() + return [dict(row) for row in rows] + def get_rival_music(self, rival_id: int) -> Optional[List[Dict]]: - sql = select(playlog).where(playlog.c.user == rival_id) + sql = select(best_score).where(best_score.c.user == rival_id) + result = self.execute(sql) if result is None: return None return result.fetchall() + From 21cb37001b52a77e66c7d2d74be9d7f92bad8bfe Mon Sep 17 00:00:00 2001 From: EmmyHeart Date: Fri, 20 Oct 2023 03:31:36 +0000 Subject: [PATCH 46/68] Stubbed Team Course functions as they do not currently do anything --- titles/chuni/base.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/titles/chuni/base.py b/titles/chuni/base.py index ffc512d..ce9ef3c 100644 --- a/titles/chuni/base.py +++ b/titles/chuni/base.py @@ -722,6 +722,14 @@ class ChuniBase: }, } def handle_get_team_course_setting_api_request(self, data: Dict) -> Dict: + return { + "userId": data["userId"], + "length": 0, + "nextIndex": -1, + "teamCourseSettingList": [], + } + + def handle_get_team_course_setting_api_request_proto(self, data: Dict) -> Dict: return { "userId": data["userId"], "length": 1, @@ -746,6 +754,14 @@ class ChuniBase: } def handle_get_team_course_rule_api_request(self, data: Dict) -> Dict: + return { + "userId": data["userId"], + "length": 0, + "nextIndex": -1, + "teamCourseRuleList": [] + } + + def handle_get_team_course_rule_api_request_proto(self, data: Dict) -> Dict: return { "userId": data["userId"], "length": 1, From 990d60cf27c3f89348caffc738bb9dfc87a92d72 Mon Sep 17 00:00:00 2001 From: EmmyHeart Date: Fri, 20 Oct 2023 04:25:08 +0000 Subject: [PATCH 47/68] Forgot to remove rank scale option from the example config, as it does literally nothing now lol --- example_config/chuni.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/example_config/chuni.yaml b/example_config/chuni.yaml index 09e3569..578bcb3 100644 --- a/example_config/chuni.yaml +++ b/example_config/chuni.yaml @@ -5,7 +5,6 @@ server: team: name: ARTEMiS # If this is set, all players that are not on a team will use this one by default. - rank_scale: True # Scales the in-game ranking based on the number of teams within the database, rather than the default scale of ~100 that the game normally uses. mods: use_login_bonus: True From a3d2955fce3ac98a6e63718a8493a345eb6154ff Mon Sep 17 00:00:00 2001 From: phantomlan Date: Sat, 21 Oct 2023 17:22:58 +0200 Subject: [PATCH 48/68] - update docker-compose.yml - update mysqldb to mariadb-alpine - update mysql healthcheck to also include Password - update memcached to alpine counterpart - update phpmyadmin port to not collide with ARTEMiS main app - add commented out options for DB Persistency - update Dockerfile - add read.py to Dockerfile - add docs/INSTALL_DOCKER.md guide --- Dockerfile | 4 +- docker-compose.yml | 31 ++++--- docs/INSTALL_DOCKER.md | 203 +++++++++++++++++++++++++++++++++++++++++ 3 files changed, 225 insertions(+), 13 deletions(-) create mode 100644 docs/INSTALL_DOCKER.md diff --git a/Dockerfile b/Dockerfile index e34dfec..507f01f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -12,10 +12,10 @@ RUN chmod +x entrypoint.sh COPY index.py index.py COPY dbutils.py dbutils.py +COPY read.py read.py ADD core core ADD titles titles -ADD config config ADD log log ADD cert cert -ENTRYPOINT [ "/app/entrypoint.sh" ] \ No newline at end of file +ENTRYPOINT [ "/app/entrypoint.sh" ] diff --git a/docker-compose.yml b/docker-compose.yml index c43b6e5..3e349b0 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -5,22 +5,23 @@ services: build: . volumes: - ./aime:/app/aime + - ./config:/app/config environment: CFG_DEV: 1 CFG_CORE_SERVER_HOSTNAME: 0.0.0.0 CFG_CORE_DATABASE_HOST: ma.db CFG_CORE_MEMCACHED_HOSTNAME: ma.memcached - CFG_CORE_AIMEDB_KEY: keyhere + CFG_CORE_AIMEDB_KEY: CFG_CHUNI_SERVER_LOGLEVEL: debug ports: - - "80:80" - - "8443:8443" - - "22345:22345" + - "80:80" + - "8443:8443" + - "22345:22345" - - "8080:8080" - - "8090:8090" + - "8080:8080" + - "8090:8090" depends_on: db: @@ -28,21 +29,29 @@ services: db: hostname: ma.db - image: mysql:8.0.31-debian + image: yobasystems/alpine-mariadb:10.11.5 environment: MYSQL_DATABASE: aime MYSQL_USER: aime MYSQL_PASSWORD: aime MYSQL_ROOT_PASSWORD: AimeRootPassword + MYSQL_CHARSET: utf8 + MYSQL_COLLATION: utf8_general_ci + ##Note: expose port 3306 to allow read.py importer into database, comment out when not needed + #ports: + # - "3306:3306" + ##Note: uncomment to allow mysql to create a persistent database, leave commented if you want to rebuild database from scratch often + #volumes: + # - ./AimeDB:/var/lib/mysql healthcheck: - test: ["CMD", "mysqladmin" ,"ping", "-h", "localhost"] + test: ["CMD", "mysqladmin" ,"ping", "-h", "localhost", "-pAimeRootPassword"] timeout: 5s retries: 5 - memcached: hostname: ma.memcached - image: memcached:1.6.17-bullseye + image: memcached:1.6.22-alpine3.18 + command: [ "memcached", "-m", "1024", "-I", "128m" ] phpmyadmin: hostname: ma.phpmyadmin @@ -53,5 +62,5 @@ services: PMA_PASSWORD: AimeRootPassword APACHE_PORT: 8080 ports: - - "8080:8080" + - "9090:8080" diff --git a/docs/INSTALL_DOCKER.md b/docs/INSTALL_DOCKER.md new file mode 100644 index 0000000..f0cf81a --- /dev/null +++ b/docs/INSTALL_DOCKER.md @@ -0,0 +1,203 @@ +# ARTEMiS - Docker Installation Guide +This step-by-step guide will allow you to install a Contenerized Version of ARTEMiS inside Docker, some steps can be skipped assuming you already have pre-requisite components and modules installed. + +This guide assumes using Debian 12(bookworm-stable) as a Host Operating System for most of packages and modules. + +## Pre-Requisites: +- Linux-Based Operating System (e.g. Debian, Ubuntu) +- Docker (https://get.docker.com) +- Python 3.9+ +- (optional) Git + +## Install Python3.9+ and Docker +``` +(if this is a fresh install of the system) +sudo apt update && sudo apt upgrade + +(installs python3 and pip) +sudo apt install python3 python3-pip + +(installs docker) +curl -fsSL https://get.docker.com -o get-docker.sh +sh get-docker.sh + +(optionally install git) +sudo apt install git +``` + +## Get ARTEMiS +If you installed git, clone into your choice of ARTEMiS git repository, e.g.: +``` +git clone +``` +If not, download the source package, and unpack it to the folder of your choice. + +## Prepare development/home configuration +To build our Docker setup, first we need to create some folders and copy some files around +- Create 'aime', 'configs', 'AimeDB', and 'logs' folder in ARTEMiS root folder (where all source files exist) +- Inside configs folder, create 'config' folder, and copy all .yaml files from example_config to config (thats all files without nginx_example.conf) +- Edit .yaml files inside configs/config to suit your server needs +- Edit core.yaml inside configs/config: +``` +set server.listen_address: to "0.0.0.0" +set title.hostname: to machine's IP address, e.g. "192.168.x.x", depending on your network, or actual hostname if your configuration is already set for dns resolve +set database.host: to "ma.db" +set database.memcached_host: to "ma.memcached" +set aimedb.key: to "" +``` + +## Running Docker Compose +After configuring, go to ARTEMiS root folder, and execute: +``` +docker compose up -d +("-d" argument means detached or daemon, meaning you will regain control of your terminal and Containers will run in background) +``` +This will start pulling and building required images from network, after it's done, a development server should be running, with server accessible under machine's IP, frontend with port 8090, and PHPMyAdmin under port 9090. + +To turn off the server, from ARTEMiS root folder, execute: +``` +docker compose down +``` + +If you changed some files around, and don't see your changes applied, execute: +``` +(turn off the server) +docker compose down +(rebuild) +docker compose build +(turn on) +docker compose up -d +``` + +If you need to see logs from containers running, execute: +``` +docker compose logs +``` +add '-f' to the end if you want to follow logs. + +## Running commands +If you need to execute python scripts supplied with the application, use `docker compose exec app python3