From a83b717821f1937cb325de1ef11f2cbe3f7c76f1 Mon Sep 17 00:00:00 2001 From: Hay1tsme Date: Sun, 19 Feb 2023 14:52:20 -0500 Subject: [PATCH 01/10] carry over database functions from megaime --- core/data/database.py | 1703 ++++++++++++++++++++++++++++++++++++++++- 1 file changed, 1702 insertions(+), 1 deletion(-) diff --git a/core/data/database.py b/core/data/database.py index 800b5d0..963d016 100644 --- a/core/data/database.py +++ b/core/data/database.py @@ -4,11 +4,14 @@ from sqlalchemy.orm import scoped_session, sessionmaker from sqlalchemy.exc import SQLAlchemyError from sqlalchemy import create_engine from logging.handlers import TimedRotatingFileHandler +from datetime import datetime +import importlib, os, json from hashlib import sha256 from core.config import CoreConfig from core.data.schema import * +from core.utils import Utils class Data: def __init__(self, cfg: CoreConfig) -> None: @@ -50,4 +53,1702 @@ class Data: coloredlogs.install(cfg.database.loglevel, logger=self.logger, fmt=log_fmt_str) self.logger.handler_set = True # type: ignore - \ No newline at end of file + def create_database(self): + self.logger.info("Creating databases...") + try: + metadata.create_all(self.__engine.connect()) + except SQLAlchemyError as e: + self.logger.error(f"Failed to create databases! {e}") + return + + games = Utils.get_all_titles() + for game_dir, game_mod in games.items(): + try: + title_db = game_mod.database(self.config) + metadata.create_all(self.__engine.connect()) + + self.base.set_schema_ver(game_mod.current_schema_version, game_mod.game_codes[0]) + + except Exception as e: + self.logger.warning(f"Could not load database schema from {game_dir} - {e}") + + self.logger.info(f"Setting base_schema_ver to {self.schema_ver_latest}") + self.base.set_schema_ver(self.schema_ver_latest) + + self.logger.info(f"Setting user auto_incrememnt to {self.config.database.user_table_autoincrement_start}") + self.user.reset_autoincrement(self.config.database.user_table_autoincrement_start) + + def recreate_database(self): + self.logger.info("Dropping all databases...") + self.base.execute("SET FOREIGN_KEY_CHECKS=0") + try: + metadata.drop_all(self.__engine.connect()) + except SQLAlchemyError as e: + self.logger.error(f"Failed to drop databases! {e}") + return + + for root, dirs, files in os.walk("./titles"): + for dir in dirs: + if not dir.startswith("__"): + try: + mod = importlib.import_module(f"titles.{dir}") + + try: + title_db = mod.database(self.config) + metadata.drop_all(self.__engine.connect()) + + except Exception as e: + self.logger.warning(f"Could not load database schema from {dir} - {e}") + + except ImportError as e: + self.logger.warning(f"Failed to load database schema dir {dir} - {e}") + break + + self.base.execute("SET FOREIGN_KEY_CHECKS=1") + + self.create_database() + + def migrate_database(self, game: str, version: int, action: str) -> None: + old_ver = self.base.get_schema_ver(game) + sql = "" + + if old_ver is None: + self.logger.error(f"Schema for game {game} does not exist, did you run the creation script?") + return + + if old_ver == version: + self.logger.info(f"Schema for game {game} is already version {old_ver}, nothing to do") + return + + if not os.path.exists(f"core/data/schema/versions/{game.upper()}_{version}_{action}.sql"): + self.logger.error(f"Could not find {action} script {game.upper()}_{version}_{action}.sql in core/data/schema/versions folder") + return + + with open(f"core/data/schema/versions/{game.upper()}_{version}_{action}.sql", "r", encoding="utf-8") as f: + sql = f.read() + + result = self.base.execute(sql) + if result is None: + self.logger.error("Error execuing sql script!") + return None + + result = self.base.set_schema_ver(version, game) + if result is None: + self.logger.error("Error setting version in schema_version table!") + return None + + self.logger.info(f"Successfully migrated {game} to schema version {version}") + + def dump_db(self): + dbname = self.config.database.name + + self.logger.info("Database dumper for use with the reworked schema") + self.logger.info("Dumping users...") + + sql = f"SELECT * FROM `{dbname}`.`user`" + + result = self.base.execute(sql) + if result is None: + self.logger.error("Failed") + return None + users = result.fetchall() + + user_list: List[Dict[str, Any]] = [] + for usr in users: + user_list.append({ + "id": usr["id"], + "username": usr["username"], + "email": usr["email"], + "password": usr["password"], + "permissions": usr["permissions"], + "created_date": datetime.strftime(usr["created_date"], "%Y-%m-%d %H:%M:%S"), + "last_login_date": datetime.strftime(usr["accessed_date"], "%Y-%m-%d %H:%M:%S"), + }) + + self.logger.info(f"Done, found {len(user_list)} users") + with open("dbdump-user.json", "w", encoding="utf-8") as f: + f.write(json.dumps(user_list)) + self.logger.info(f"Saved as dbdump-user.json") + + self.logger.info("Dumping cards...") + + sql = f"SELECT * FROM `{dbname}`.`card`" + + result = self.base.execute(sql) + if result is None: + self.logger.error("Failed") + return None + cards = result.fetchall() + + card_list: List[Dict[str, Any]] = [] + for crd in cards: + card_list.append({ + "id": crd["id"], + "user": crd["user"], + "access_code": crd["access_code"], + "is_locked": crd["is_locked"], + "is_banned": crd["is_banned"], + "created_date": datetime.strftime(crd["created_date"], "%Y-%m-%d %H:%M:%S"), + "last_login_date": datetime.strftime(crd["accessed_date"], "%Y-%m-%d %H:%M:%S"), + }) + + self.logger.info(f"Done, found {len(card_list)} cards") + with open("dbdump-card.json", "w", encoding="utf-8") as f: + f.write(json.dumps(card_list)) + self.logger.info(f"Saved as dbdump-card.json") + + self.logger.info("Dumping arcades...") + + sql = f"SELECT * FROM `{dbname}`.`arcade`" + + result = self.base.execute(sql) + if result is None: + self.logger.error("Failed") + return None + arcades = result.fetchall() + + arcade_list: List[Dict[str, Any]] = [] + for arc in arcades: + arcade_list.append({ + "id": arc["id"], + "name": arc["name"], + "nickname": arc["name"], + "country": None, + "country_id": None, + "state": None, + "city": None, + "region_id": None, + "timezone": None, + }) + + self.logger.info(f"Done, found {len(arcade_list)} arcades") + with open("dbdump-arcade.json", "w", encoding="utf-8") as f: + f.write(json.dumps(arcade_list)) + self.logger.info(f"Saved as dbdump-arcade.json") + + self.logger.info("Dumping machines...") + + sql = f"SELECT * FROM `{dbname}`.`machine`" + + result = self.base.execute(sql) + if result is None: + self.logger.error("Failed") + return None + machines = result.fetchall() + + machine_list: List[Dict[str, Any]] = [] + for mech in machines: + if "country" in mech["data"]: + country = mech["data"]["country"] + else: + country = None + + if "ota_enable" in mech["data"]: + ota_enable = mech["data"]["ota_enable"] + else: + ota_enable = None + + machine_list.append({ + "id": mech["id"], + "arcade": mech["arcade"], + "serial": mech["keychip"], + "game": mech["game"], + "board": None, + "country": country, + "timezone": None, + "ota_enable": ota_enable, + "is_cab": False, + }) + + self.logger.info(f"Done, found {len(machine_list)} machines") + with open("dbdump-machine.json", "w", encoding="utf-8") as f: + f.write(json.dumps(machine_list)) + self.logger.info(f"Saved as dbdump-machine.json") + + self.logger.info("Dumping arcade owners...") + + sql = f"SELECT * FROM `{dbname}`.`arcade_owner`" + + result = self.base.execute(sql) + if result is None: + self.logger.error("Failed") + return None + arcade_owners = result.fetchall() + + owner_list: List[Dict[str, Any]] = [] + for owner in owner_list: + owner_list.append(owner._asdict()) + + self.logger.info(f"Done, found {len(owner_list)} arcade owners") + with open("dbdump-arcade_owner.json", "w", encoding="utf-8") as f: + f.write(json.dumps(owner_list)) + self.logger.info(f"Saved as dbdump-arcade_owner.json") + + self.logger.info("Dumping profiles...") + + sql = f"SELECT * FROM `{dbname}`.`profile`" + + result = self.base.execute(sql) + if result is None: + self.logger.error("Failed") + return None + profiles = result.fetchall() + + profile_list: Dict[List[Dict[str, Any]]] = {} + for pf in profiles: + game = pf["game"] + + if game not in profile_list: + profile_list[game] = [] + + profile_list[game].append({ + "id": pf["id"], + "user": pf["user"], + "version": pf["version"], + "use_count": pf["use_count"], + "name": pf["name"], + "game_id": pf["game_id"], + "mods": pf["mods"], + "data": pf["data"], + }) + + self.logger.info(f"Done, found profiles for {len(profile_list)} games") + with open("dbdump-profile.json", "w", encoding="utf-8") as f: + f.write(json.dumps(profile_list)) + self.logger.info(f"Saved as dbdump-profile.json") + + self.logger.info("Dumping scores...") + + sql = f"SELECT * FROM `{dbname}`.`score`" + + result = self.base.execute(sql) + if result is None: + self.logger.error("Failed") + return None + scores = result.fetchall() + + score_list: Dict[List[Dict[str, Any]]] = {} + for sc in scores: + game = sc["game"] + + if game not in score_list: + score_list[game] = [] + + score_list[game].append({ + "id": sc["id"], + "user": sc["user"], + "version": sc["version"], + "song_id": sc["song_id"], + "chart_id": sc["chart_id"], + "score1": sc["score1"], + "score2": sc["score2"], + "fc1": sc["fc1"], + "fc2": sc["fc2"], + "cleared": sc["cleared"], + "grade": sc["grade"], + "data": sc["data"], + }) + + self.logger.info(f"Done, found scores for {len(score_list)} games") + with open("dbdump-score.json", "w", encoding="utf-8") as f: + f.write(json.dumps(score_list)) + self.logger.info(f"Saved as dbdump-score.json") + + self.logger.info("Dumping achievements...") + + sql = f"SELECT * FROM `{dbname}`.`achievement`" + + result = self.base.execute(sql) + if result is None: + self.logger.error("Failed") + return None + achievements = result.fetchall() + + achievement_list: Dict[List[Dict[str, Any]]] = {} + for ach in achievements: + game = ach["game"] + + if game not in achievement_list: + achievement_list[game] = [] + + achievement_list[game].append({ + "id": ach["id"], + "user": ach["user"], + "version": ach["version"], + "type": ach["type"], + "achievement_id": ach["achievement_id"], + "data": ach["data"], + }) + + self.logger.info(f"Done, found achievements for {len(achievement_list)} games") + with open("dbdump-achievement.json", "w", encoding="utf-8") as f: + f.write(json.dumps(achievement_list)) + self.logger.info(f"Saved as dbdump-achievement.json") + + self.logger.info("Dumping items...") + + sql = f"SELECT * FROM `{dbname}`.`item`" + + result = self.base.execute(sql) + if result is None: + self.logger.error("Failed") + return None + items = result.fetchall() + + item_list: Dict[List[Dict[str, Any]]] = {} + for itm in items: + game = itm["game"] + + if game not in item_list: + item_list[game] = [] + + item_list[game].append({ + "id": itm["id"], + "user": itm["user"], + "version": itm["version"], + "type": itm["type"], + "item_id": itm["item_id"], + "data": ach["data"], + }) + + self.logger.info(f"Done, found items for {len(item_list)} games") + with open("dbdump-item.json", "w", encoding="utf-8") as f: + f.write(json.dumps(item_list)) + self.logger.info(f"Saved as dbdump-item.json") + + def restore_from_old_schema(self): + # Import the tables we expect to be there + from core.data.schema.user import aime_user + from core.data.schema.card import aime_card + from core.data.schema.arcade import arcade, machine, arcade_owner + from sqlalchemy.dialects.mysql import Insert + + # Make sure that all the tables we're trying to access exist + self.create_database() + + # Import the data, making sure that dependencies are accounted for + if os.path.exists("dbdump-user.json"): + users = [] + with open("dbdump-user.json", "r", encoding="utf-8") as f: + users = json.load(f) + + self.logger.info(f"Load {len(users)} users") + + for user in users: + sql = Insert(aime_user).values(**user) + + conflict = sql.on_duplicate_key_update(**user) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert user {user['id']}") + continue + self.logger.info(f"Inserted user {user['id']} -> {result.lastrowid}") + + if os.path.exists("dbdump-card.json"): + cards = [] + with open("dbdump-card.json", "r", encoding="utf-8") as f: + cards = json.load(f) + + self.logger.info(f"Load {len(cards)} cards") + + for card in cards: + sql = Insert(aime_card).values(**card) + + conflict = sql.on_duplicate_key_update(**card) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert card {card['id']}") + continue + self.logger.info(f"Inserted card {card['id']} -> {result.lastrowid}") + + if os.path.exists("dbdump-arcade.json"): + arcades = [] + with open("dbdump-arcade.json", "r", encoding="utf-8") as f: + arcades = json.load(f) + + self.logger.info(f"Load {len(arcades)} arcades") + + for ac in arcades: + sql = Insert(arcade).values(**ac) + + conflict = sql.on_duplicate_key_update(**ac) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert arcade {ac['id']}") + continue + self.logger.info(f"Inserted arcade {ac['id']} -> {result.lastrowid}") + + if os.path.exists("dbdump-arcade_owner.json"): + ac_owners = [] + with open("dbdump-arcade_owner.json", "r", encoding="utf-8") as f: + ac_owners = json.load(f) + + self.logger.info(f"Load {len(ac_owners)} arcade owners") + + for owner in ac_owners: + sql = Insert(arcade_owner).values(**owner) + + conflict = sql.on_duplicate_key_update(**owner) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert arcade_owner {owner['user']}") + continue + self.logger.info(f"Inserted arcade_owner {owner['user']} -> {result.lastrowid}") + + if os.path.exists("dbdump-machine.json"): + mechs = [] + with open("dbdump-machine.json", "r", encoding="utf-8") as f: + mechs = json.load(f) + + self.logger.info(f"Load {len(mechs)} machines") + + for mech in mechs: + sql = Insert(machine).values(**mech) + + conflict = sql.on_duplicate_key_update(**mech) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert machine {mech['id']}") + continue + self.logger.info(f"Inserted machine {mech['id']} -> {result.lastrowid}") + + # Now the fun part, grabbing all our scores, profiles, items, and achievements and trying + # to conform them to our current, freeform schema. This will be painful... + profiles = {} + items = {} + scores = {} + achievements = {} + + if os.path.exists("dbdump-profile.json"): + with open("dbdump-profile.json", "r", encoding="utf-8") as f: + profiles = json.load(f) + + self.logger.info(f"Load {len(profiles)} profiles") + + if os.path.exists("dbdump-item.json"): + with open("dbdump-item.json", "r", encoding="utf-8") as f: + items = json.load(f) + + self.logger.info(f"Load {len(items)} items") + + if os.path.exists("dbdump-score.json"): + with open("dbdump-score.json", "r", encoding="utf-8") as f: + scores = json.load(f) + + self.logger.info(f"Load {len(scores)} scores") + + if os.path.exists("dbdump-achievement.json"): + with open("dbdump-achievement.json", "r", encoding="utf-8") as f: + achievements = json.load(f) + + self.logger.info(f"Load {len(achievements)} achievements") + + # Chuni / Chusan + if os.path.exists("titles/chuni/schema"): + from titles.chuni.schema.item import character, item, duel, map, map_area + from titles.chuni.schema.profile import profile, profile_ex, option, option_ex + from titles.chuni.schema.profile import recent_rating, activity, charge, emoney + from titles.chuni.schema.profile import overpower + from titles.chuni.schema.score import best_score, course + + chuni_profiles = [] + chuni_items = [] + chuni_scores = [] + + if "SDBT" in profiles: + chuni_profiles = profiles["SDBT"] + if "SDBT" in items: + chuni_items = items["SDBT"] + if "SDBT" in scores: + chuni_scores = scores["SDBT"] + if "SDHD" in profiles: + chuni_profiles += profiles["SDHD"] + if "SDHD" in items: + chuni_items += items["SDHD"] + if "SDHD" in scores: + chuni_scores += scores["SDHD"] + + self.logger.info(f"Importing {len(chuni_profiles)} chunithm/chunithm new profiles") + + for pf in chuni_profiles: + if type(pf["data"]) is not dict: + pf["data"] = json.loads(pf["data"]) + pf_data = pf["data"] + + # data + if "userData" in pf_data: + pf_data["userData"]["userName"] = bytes([ord(c) for c in pf_data["userData"]["userName"]]).decode("utf-8") + pf_data["userData"]["user"] = pf["user"] + pf_data["userData"]["version"] = pf["version"] + pf_data["userData"].pop("accessCode") + + if pf_data["userData"]["lastRomVersion"].startswith("2."): + pf_data["userData"]["version"] += 10 + + pf_data["userData"] = self.base.fix_bools(pf_data["userData"]) + + sql = Insert(profile).values(**pf_data["userData"]) + conflict = sql.on_duplicate_key_update(**pf_data["userData"]) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert chuni profile data for {pf['user']}") + continue + self.logger.info(f"Inserted chuni profile for {pf['user']} ->{result.lastrowid}") + + # data_ex + if "userDataEx" in pf_data and len(pf_data["userDataEx"]) > 0: + pf_data["userDataEx"][0]["user"] = pf["user"] + pf_data["userDataEx"][0]["version"] = pf["version"] + + pf_data["userDataEx"] = self.base.fix_bools(pf_data["userDataEx"][0]) + + sql = Insert(profile_ex).values(**pf_data["userDataEx"]) + conflict = sql.on_duplicate_key_update(**pf_data["userDataEx"]) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert chuni profile data_ex for {pf['user']}") + continue + self.logger.info(f"Inserted chuni profile data_ex for {pf['user']} ->{result.lastrowid}") + + # option + if "userGameOption" in pf_data: + pf_data["userGameOption"]["user"] = pf["user"] + + pf_data["userGameOption"] = self.base.fix_bools(pf_data["userGameOption"]) + + sql = Insert(option).values(**pf_data["userGameOption"]) + conflict = sql.on_duplicate_key_update(**pf_data["userGameOption"]) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert chuni profile options for {pf['user']}") + continue + self.logger.info(f"Inserted chuni profile options for {pf['user']} ->{result.lastrowid}") + + # option_ex + if "userGameOptionEx" in pf_data and len(pf_data["userGameOptionEx"]) > 0: + pf_data["userGameOptionEx"][0]["user"] = pf["user"] + + pf_data["userGameOptionEx"] = self.base.fix_bools(pf_data["userGameOptionEx"][0]) + + sql = Insert(option_ex).values(**pf_data["userGameOptionEx"]) + conflict = sql.on_duplicate_key_update(**pf_data["userGameOptionEx"]) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert chuni profile option_ex for {pf['user']}") + continue + self.logger.info(f"Inserted chuni profile option_ex for {pf['user']} ->{result.lastrowid}") + + # recent_rating + if "userRecentRatingList" in pf_data: + rr = { + "user": pf["user"], + "recentRating": pf_data["userRecentRatingList"] + } + + sql = Insert(recent_rating).values(**rr) + conflict = sql.on_duplicate_key_update(**rr) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert chuni profile recent_rating for {pf['user']}") + continue + self.logger.info(f"Inserted chuni profile recent_rating for {pf['user']} ->{result.lastrowid}") + + # activity + if "userActivityList" in pf_data: + for act in pf_data["userActivityList"]: + act["user"] = pf["user"] + + sql = Insert(activity).values(**act) + conflict = sql.on_duplicate_key_update(**act) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert chuni profile activity for {pf['user']}") + else: + self.logger.info(f"Inserted chuni profile activity for {pf['user']} ->{result.lastrowid}") + + # charge + if "userChargeList" in pf_data: + for cg in pf_data["userChargeList"]: + cg["user"] = pf["user"] + + cg = self.base.fix_bools(cg) + + sql = Insert(charge).values(**cg) + conflict = sql.on_duplicate_key_update(**cg) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert chuni profile charge for {pf['user']}") + else: + self.logger.info(f"Inserted chuni profile charge for {pf['user']} ->{result.lastrowid}") + + # emoney + if "userEmoneyList" in pf_data: + for emon in pf_data["userEmoneyList"]: + emon["user"] = pf["user"] + + sql = Insert(emoney).values(**emon) + conflict = sql.on_duplicate_key_update(**emon) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert chuni profile emoney for {pf['user']}") + else: + self.logger.info(f"Inserted chuni profile emoney for {pf['user']} ->{result.lastrowid}") + + # overpower + if "userOverPowerList" in pf_data: + for op in pf_data["userOverPowerList"]: + op["user"] = pf["user"] + + sql = Insert(overpower).values(**op) + conflict = sql.on_duplicate_key_update(**op) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert chuni profile overpower for {pf['user']}") + else: + self.logger.info(f"Inserted chuni profile overpower for {pf['user']} ->{result.lastrowid}") + + # map_area + if "userMapAreaList" in pf_data: + for ma in pf_data["userMapAreaList"]: + ma["user"] = pf["user"] + + ma = self.base.fix_bools(ma) + + sql = Insert(map_area).values(**ma) + conflict = sql.on_duplicate_key_update(**ma) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert chuni map_area for {pf['user']}") + else: + self.logger.info(f"Inserted chuni map_area for {pf['user']} ->{result.lastrowid}") + + #duel + if "userDuelList" in pf_data: + for ma in pf_data["userDuelList"]: + ma["user"] = pf["user"] + + ma = self.base.fix_bools(ma) + + sql = Insert(duel).values(**ma) + conflict = sql.on_duplicate_key_update(**ma) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert chuni duel for {pf['user']}") + else: + self.logger.info(f"Inserted chuni duel for {pf['user']} ->{result.lastrowid}") + + # map + if "userMapList" in pf_data: + for ma in pf_data["userMapList"]: + ma["user"] = pf["user"] + + ma = self.base.fix_bools(ma) + + sql = Insert(map).values(**ma) + conflict = sql.on_duplicate_key_update(**ma) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert chuni map for {pf['user']}") + else: + self.logger.info(f"Inserted chuni map for {pf['user']} ->{result.lastrowid}") + + self.logger.info(f"Importing {len(chuni_items)} chunithm/chunithm new items") + + for i in chuni_items: + if type(i["data"]) is not dict: + i["data"] = json.loads(i["data"]) + i_data = i["data"] + + i_data["user"] = i["user"] + + i_data = self.base.fix_bools(i_data) + + try: i_data.pop("assignIllust") + except: pass + + try: i_data.pop("exMaxLv") + except: pass + + if i["type"] == 20: #character + sql = Insert(character).values(**i_data) + else: + sql = Insert(item).values(**i_data) + + conflict = sql.on_duplicate_key_update(**i_data) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert chuni item for user {i['user']}") + + else: + self.logger.info(f"Inserted chuni item for user {i['user']} {i['item_id']} -> {result.lastrowid}") + + self.logger.info(f"Importing {len(chuni_scores)} chunithm/chunithm new scores") + + for sc in chuni_scores: + if type(sc["data"]) is not dict: + sc["data"] = json.loads(sc["data"]) + + score_data = self.base.fix_bools(sc["data"]) + + try: score_data.pop("theoryCount") + except: pass + + try: score_data.pop("ext1") + except: pass + + score_data["user"] = sc["user"] + + sql = Insert(best_score).values(**score_data) + conflict = sql.on_duplicate_key_update(**score_data) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to put chuni score for user {sc['user']}") + else: + self.logger.info(f"Inserted chuni score for user {sc['user']} {sc['song_id']}/{sc['chart_id']} -> {result.lastrowid}") + + else: + self.logger.info(f"Chuni/Chusan not found, skipping...") + + # CXB + if os.path.exists("titles/cxb/schema"): + from titles.cxb.schema.item import energy + from titles.cxb.schema.profile import profile + from titles.cxb.schema.score import score, ranking + + cxb_profiles = [] + cxb_items = [] + cxb_scores = [] + + if "SDCA" in profiles: + cxb_profiles = profiles["SDCA"] + if "SDCA" in items: + cxb_items = items["SDCA"] + if "SDCA" in scores: + cxb_scores = scores["SDCA"] + + self.logger.info(f"Importing {len(cxb_profiles)} CXB profiles") + + for pf in cxb_profiles: + user = pf["user"] + version = pf["version"] + pf_data = pf["data"]["data"] + pf_idx = pf["data"]["index"] + + for x in range(len(pf_data)): + sql = Insert(profile).values( + user = user, + version = version, + index = int(pf_idx[x]), + data = json.loads(pf_data[x]) if type(pf_data[x]) is not dict else pf_data[x] + ) + + conflict = sql.on_duplicate_key_update( + user = user, + version = version, + index = int(pf_idx[x]), + data = json.loads(pf_data[x]) if type(pf_data[x]) is not dict else pf_data[x] + ) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert CXB profile for user {user} Index {pf_idx[x]}") + + self.logger.info(f"Importing {len(cxb_scores)} CXB scores") + + for sc in cxb_scores: + user = sc["user"] + version = sc["version"] + mcode = sc["data"]["mcode"] + index = sc["data"]["index"] + + sql = Insert(score).values( + user = user, + game_version = version, + song_mcode = mcode, + song_index = index, + data = sc["data"] + ) + + conflict = sql.on_duplicate_key_update( + user = user, + game_version = version, + song_mcode = mcode, + song_index = index, + data = sc["data"] + ) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert CXB score for user {user} mcode {mcode}") + + self.logger.info(f"Importing {len(cxb_items)} CXB items") + + for it in cxb_items: + user = it["user"] + + if it["type"] == 3: # energy + sql = Insert(energy).values( + user = user, + energy = it["data"]["total"] + ) + + conflict = sql.on_duplicate_key_update( + user = user, + energy = it["data"]["total"] + ) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert CXB energy for user {user}") + + elif it["type"] == 2: + sql = Insert(ranking).values( + user = user, + rev_id = it["data"]["rid"], + song_id = it["data"]["sc"][1] if len(it["data"]["sc"]) > 1 else None, + score = it["data"]["sc"][0], + clear = it["data"]["clear"], + ) + + conflict = sql.on_duplicate_key_update( + user = user, + rev_id = it["data"]["rid"], + song_id = it["data"]["sc"][1] if len(it["data"]["sc"]) > 1 else None, + score = it["data"]["sc"][0], + clear = it["data"]["clear"], + ) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert CXB ranking for user {user}") + + else: + self.logger.error(f"Unknown CXB item type {it['type']} for user {user}") + + else: + self.logger.info(f"CXB not found, skipping...") + + # Diva + if os.path.exists("titles/diva/schema"): + from titles.diva.schema.profile import profile + from titles.diva.schema.score import score + from titles.diva.schema.item import shop + + diva_profiles = [] + diva_scores = [] + + if "SBZV" in profiles: + diva_profiles = profiles["SBZV"] + if "SBZV" in scores: + diva_scores = scores["SBZV"] + + self.logger.info(f"Importing {len(diva_profiles)} Diva profiles") + + for pf in diva_profiles: + pf["data"]["user"] = pf["user"] + pf["data"]["version"] = pf["version"] + pf_data = pf["data"] + + if "mdl_eqp_ary" in pf["data"]: + sql = Insert(shop).values( + user = user, + version = version, + mdl_eqp_ary = pf["data"]["mdl_eqp_ary"], + ) + conflict = sql.on_duplicate_key_update( + user = user, + version = version, + mdl_eqp_ary = pf["data"]["mdl_eqp_ary"] + ) + self.base.execute(conflict) + pf["data"].pop("mdl_eqp_ary") + + sql = Insert(profile).values(**pf_data) + conflict = sql.on_duplicate_key_update(**pf_data) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert diva profile for {pf['user']}") + + self.logger.info(f"Importing {len(diva_scores)} Diva scores") + + for sc in diva_scores: + user = sc["user"] + + clr_kind = -1 + for x in sc["data"]["stg_clr_kind"].split(","): + if x != "-1": + clr_kind = x + + cool_ct = 0 + for x in sc["data"]["stg_cool_cnt"].split(","): + if x != "0": + cool_ct = x + + fine_ct = 0 + for x in sc["data"]["stg_fine_cnt"].split(","): + if x != "0": + fine_ct = x + + safe_ct = 0 + for x in sc["data"]["stg_safe_cnt"].split(","): + if x != "0": + safe_ct = x + + sad_ct = 0 + for x in sc["data"]["stg_sad_cnt"].split(","): + if x != "0": + sad_ct = x + + worst_ct = 0 + for x in sc["data"]["stg_wt_wg_cnt"].split(","): + if x != "0": + worst_ct = x + + max_cmb = 0 + for x in sc["data"]["stg_max_cmb"].split(","): + if x != "0": + max_cmb = x + + sql = Insert(score).values( + user = user, + version = sc["version"], + pv_id = sc["song_id"], + difficulty = sc["chart_id"], + score = sc["score1"], + atn_pnt = sc["score2"], + clr_kind = clr_kind, + sort_kind = sc["data"]["sort_kind"], + cool = cool_ct, + fine = fine_ct, + safe = safe_ct, + sad = sad_ct, + worst = worst_ct, + max_combo = max_cmb, + ) + + conflict = sql.on_duplicate_key_update(user = user, + version = sc["version"], + pv_id = sc["song_id"], + difficulty = sc["chart_id"], + score = sc["score1"], + atn_pnt = sc["score2"], + clr_kind = clr_kind, + sort_kind = sc["data"]["sort_kind"], + cool = cool_ct, + fine = fine_ct, + safe = safe_ct, + sad = sad_ct, + worst = worst_ct, + max_combo = max_cmb + ) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert diva score for {pf['user']}") + + else: + self.logger.info(f"Diva not found, skipping...") + + # Ongeki + if os.path.exists("titles/ongeki/schema"): + from titles.ongeki.schema.item import card, deck, character, boss, story + from titles.ongeki.schema.item import chapter, item, music_item, login_bonus + from titles.ongeki.schema.item import event_point, mission_point, scenerio + from titles.ongeki.schema.item import trade_item, event_music, tech_event + from titles.ongeki.schema.profile import profile, option, activity, recent_rating + from titles.ongeki.schema.profile import rating_log, training_room, kop + from titles.ongeki.schema.score import score_best, tech_count, playlog + from titles.ongeki.schema.log import session_log + + item_types = { + "character": 20, + "story": 21, + "card": 22, + "deck": 23, + "login": 24, + "chapter": 25 + } + + ongeki_profiles = [] + ongeki_items = [] + ongeki_scores = [] + + if "SDDT" in profiles: + ongeki_profiles = profiles["SDDT"] + if "SDDT" in items: + ongeki_items = items["SDDT"] + if "SDDT" in scores: + ongeki_scores = scores["SDDT"] + + self.logger.info(f"Importing {len(ongeki_profiles)} ongeki profiles") + + for pf in ongeki_profiles: + user = pf["user"] + version = pf["version"] + pf_data = pf["data"] + + pf_data["userData"]["user"] = user + pf_data["userData"]["version"] = version + pf_data["userData"].pop("accessCode") + + sql = Insert(profile).values(**pf_data["userData"]) + conflict = sql.on_duplicate_key_update(**pf_data["userData"]) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki profile data for user {pf['user']}") + continue + + pf_data["userOption"]["user"] = user + + sql = Insert(option).values(**pf_data["userOption"]) + conflict = sql.on_duplicate_key_update(**pf_data["userOption"]) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki profile options for user {pf['user']}") + continue + + for pf_list in pf_data["userActivityList"]: + pf_list["user"] = user + + sql = Insert(activity).values(**pf_list) + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki profile activity for user {pf['user']}") + continue + + sql = Insert(recent_rating).values( + user = user, + recentRating = pf_data["userRecentRatingList"] + ) + + conflict = sql.on_duplicate_key_update( + user = user, + recentRating = pf_data["userRecentRatingList"] + ) + result = self.base.execute(conflict) + + if result is None: + self.logger.error(f"Failed to insert ongeki profile recent rating for user {pf['user']}") + continue + + for pf_list in pf_data["userRatinglogList"]: + pf_list["user"] = user + + sql = Insert(rating_log).values(**pf_list) + + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki profile rating log for user {pf['user']}") + continue + + for pf_list in pf_data["userTrainingRoomList"]: + pf_list["user"] = user + + sql = Insert(training_room).values(**pf_list) + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki profile training room for user {pf['user']}") + continue + + if "userKopList" in pf_data: + for pf_list in pf_data["userKopList"]: + pf_list["user"] = user + + sql = Insert(kop).values(**pf_list) + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki profile training room for user {pf['user']}") + continue + + for pf_list in pf_data["userBossList"]: + pf_list["user"] = user + + sql = Insert(boss).values(**pf_list) + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki item boss for user {pf['user']}") + continue + + for pf_list in pf_data["userDeckList"]: + pf_list["user"] = user + + sql = Insert(deck).values(**pf_list) + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki item deck for user {pf['user']}") + continue + + for pf_list in pf_data["userStoryList"]: + pf_list["user"] = user + + sql = Insert(story).values(**pf_list) + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki item story for user {pf['user']}") + continue + + for pf_list in pf_data["userChapterList"]: + pf_list["user"] = user + + sql = Insert(chapter).values(**pf_list) + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki item chapter for user {pf['user']}") + continue + + for pf_list in pf_data["userPlaylogList"]: + pf_list["user"] = user + + sql = Insert(playlog).values(**pf_list) + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki score playlog for user {pf['user']}") + continue + + for pf_list in pf_data["userMusicItemList"]: + pf_list["user"] = user + + sql = Insert(music_item).values(**pf_list) + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki item music item for user {pf['user']}") + continue + + for pf_list in pf_data["userTechCountList"]: + pf_list["user"] = user + + sql = Insert(tech_count).values(**pf_list) + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki item tech count for user {pf['user']}") + continue + + if "userTechEventList" in pf_data: + for pf_list in pf_data["userTechEventList"]: + pf_list["user"] = user + + sql = Insert(tech_event).values(**pf_list) + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki item tech event for user {pf['user']}") + continue + + for pf_list in pf_data["userTradeItemList"]: + pf_list["user"] = user + + sql = Insert(trade_item).values(**pf_list) + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki item trade item for user {pf['user']}") + continue + + if "userEventMusicList" in pf_data: + for pf_list in pf_data["userEventMusicList"]: + pf_list["user"] = user + + sql = Insert(event_music).values(**pf_list) + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki item event music for user {pf['user']}") + continue + + if "userEventPointList" in pf_data: + for pf_list in pf_data["userEventPointList"]: + pf_list["user"] = user + + sql = Insert(event_point).values(**pf_list) + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki item event point for user {pf['user']}") + continue + + for pf_list in pf_data["userLoginBonusList"]: + pf_list["user"] = user + + sql = Insert(login_bonus).values(**pf_list) + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki item login bonus for user {pf['user']}") + continue + + for pf_list in pf_data["userMissionPointList"]: + pf_list["user"] = user + + sql = Insert(mission_point).values(**pf_list) + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki item mission point for user {pf['user']}") + continue + + for pf_list in pf_data["userScenarioList"]: + pf_list["user"] = user + + sql = Insert(scenerio).values(**pf_list) + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki item scenerio for user {pf['user']}") + continue + + if "userSessionlogList" in pf_data: + for pf_list in pf_data["userSessionlogList"]: + pf_list["user"] = user + + sql = Insert(session_log).values(**pf_list) + conflict = sql.on_duplicate_key_update(**pf_list) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki log session for user {pf['user']}") + continue + + self.logger.info(f"Importing {len(ongeki_items)} ongeki items") + + for it in ongeki_items: + user = it["user"] + it_type = it["type"] + it_id = it["item_id"] + it_data = it["data"] + it_data["user"] = user + + if it_type == item_types["character"] and "characterId" in it_data: + sql = Insert(character).values(**it_data) + + elif it_type == item_types["story"]: + sql = Insert(story).values(**it_data) + + elif it_type == item_types["card"]: + sql = Insert(card).values(**it_data) + + elif it_type == item_types["deck"]: + sql = Insert(deck).values(**it_data) + + elif it_type == item_types["login"]: # login bonus + sql = Insert(login_bonus).values(**it_data) + + elif it_type == item_types["chapter"]: + sql = Insert(chapter).values(**it_data) + + else: + sql = Insert(item).values(**it_data) + + conflict = sql.on_duplicate_key_update(**it_data) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki item {it_id} kind {it_type} for user {user}") + + self.logger.info(f"Importing {len(ongeki_scores)} ongeki scores") + + for sc in ongeki_scores: + user = sc["user"] + sc_data = sc["data"] + sc_data["user"] = user + + sql = Insert(score_best).values(**sc_data) + conflict = sql.on_duplicate_key_update(**sc_data) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert ongeki score for user {user}: {sc['song_id']}/{sc['chart_id']}") + + else: + self.logger.info(f"Ongeki not found, skipping...") + + # Wacca + if os.path.exists("titles/wacca/schema"): + from titles.wacca.schema.profile import profile, option, bingo, gate, favorite + from titles.wacca.schema.item import item, ticket, song_unlock, trophy + from titles.wacca.schema.score import best_score, stageup + from titles.wacca.reverse import WaccaReverse + from titles.wacca.const import WaccaConstants + + default_opts = WaccaReverse.OPTIONS_DEFAULTS + opts = WaccaConstants.OPTIONS + item_types = WaccaConstants.ITEM_TYPES + + wacca_profiles = [] + wacca_items = [] + wacca_scores = [] + wacca_achievements = [] + + if "SDFE" in profiles: + wacca_profiles = profiles["SDFE"] + if "SDFE" in items: + wacca_items = items["SDFE"] + if "SDFE" in scores: + wacca_scores = scores["SDFE"] + if "SDFE" in achievements: + wacca_achievements = achievements["SDFE"] + + self.logger.info(f"Importing {len(wacca_profiles)} wacca profiles") + + for pf in wacca_profiles: + if pf["version"] == 0 or pf["version"] == 1: + season = 1 + elif pf["version"] == 2 or pf["version"] == 3: + season = 2 + elif pf["version"] >= 4: + season = 3 + + if type(pf["data"]) is not dict: + pf["data"] = json.loads(pf["data"]) + + try: + sql = Insert(profile).values( + id = pf["id"], + user = pf["user"], + version = pf["version"], + season = season, + username = pf["data"]["profile"]["username"] if "username" in pf["data"]["profile"] else pf["name"], + xp = pf["data"]["profile"]["xp"], + xp_season = pf["data"]["profile"]["xp"], + wp = pf["data"]["profile"]["wp"], + wp_season = pf["data"]["profile"]["wp"], + wp_total = pf["data"]["profile"]["total_wp_gained"], + dan_type = pf["data"]["profile"]["dan_type"], + dan_level = pf["data"]["profile"]["dan_level"], + title_0 = pf["data"]["profile"]["title_part_ids"][0], + title_1 = pf["data"]["profile"]["title_part_ids"][1], + title_2 = pf["data"]["profile"]["title_part_ids"][2], + rating = pf["data"]["profile"]["rating"], + vip_expire_time = datetime.fromtimestamp(pf["data"]["profile"]["vip_expire_time"]) if "vip_expire_time" in pf["data"]["profile"] else None, + login_count = pf["use_count"], + playcount_single = pf["use_count"], + playcount_single_season = pf["use_count"], + last_game_ver = pf["data"]["profile"]["last_game_ver"], + last_song_id = pf["data"]["profile"]["last_song_info"][0] if "last_song_info" in pf["data"]["profile"] else 0, + last_song_difficulty = pf["data"]["profile"]["last_song_info"][1] if "last_song_info" in pf["data"]["profile"] else 0, + last_folder_order = pf["data"]["profile"]["last_song_info"][2] if "last_song_info" in pf["data"]["profile"] else 0, + last_folder_id = pf["data"]["profile"]["last_song_info"][3] if "last_song_info" in pf["data"]["profile"] else 0, + last_song_order = pf["data"]["profile"]["last_song_info"][4] if "last_song_info" in pf["data"]["profile"] else 0, + last_login_date = datetime.fromtimestamp(pf["data"]["profile"]["last_login_timestamp"]), + ) + + conflict = sql.on_duplicate_key_update( + id = pf["id"], + user = pf["user"], + version = pf["version"], + season = season, + username = pf["data"]["profile"]["username"] if "username" in pf["data"]["profile"] else pf["name"], + xp = pf["data"]["profile"]["xp"], + xp_season = pf["data"]["profile"]["xp"], + wp = pf["data"]["profile"]["wp"], + wp_season = pf["data"]["profile"]["wp"], + wp_total = pf["data"]["profile"]["total_wp_gained"], + dan_type = pf["data"]["profile"]["dan_type"], + dan_level = pf["data"]["profile"]["dan_level"], + title_0 = pf["data"]["profile"]["title_part_ids"][0], + title_1 = pf["data"]["profile"]["title_part_ids"][1], + title_2 = pf["data"]["profile"]["title_part_ids"][2], + rating = pf["data"]["profile"]["rating"], + vip_expire_time = datetime.fromtimestamp(pf["data"]["profile"]["vip_expire_time"]) if "vip_expire_time" in pf["data"]["profile"] else None, + login_count = pf["use_count"], + playcount_single = pf["use_count"], + playcount_single_season = pf["use_count"], + last_game_ver = pf["data"]["profile"]["last_game_ver"], + last_song_id = pf["data"]["profile"]["last_song_info"][0] if "last_song_info" in pf["data"]["profile"] else 0, + last_song_difficulty = pf["data"]["profile"]["last_song_info"][1] if "last_song_info" in pf["data"]["profile"] else 0, + last_folder_order = pf["data"]["profile"]["last_song_info"][2] if "last_song_info" in pf["data"]["profile"] else 0, + last_folder_id = pf["data"]["profile"]["last_song_info"][3] if "last_song_info" in pf["data"]["profile"] else 0, + last_song_order = pf["data"]["profile"]["last_song_info"][4] if "last_song_info" in pf["data"]["profile"] else 0, + last_login_date = datetime.fromtimestamp(pf["data"]["profile"]["last_login_timestamp"]), + ) + + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert wacca profile for user {pf['user']}") + continue + + for opt, val in pf["data"]["option"].items(): + if val != default_opts[opt]: + opt_id = opts[opt] + sql = Insert(option).values( + user = pf["user"], + opt_id = opt_id, + value = val, + ) + + conflict = sql.on_duplicate_key_update( + user = pf["user"], + opt_id = opt_id, + value = val, + ) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert wacca option for user {pf['user']} {opt} -> {val}") + + except KeyError as e: + self.logger.warn(f"Outdated wacca profile, skipping: {e}") + + if "gate" in pf["data"]: + for profile_gate in pf["data"]["gate"]: + sql = Insert(gate).values( + user = pf["user"], + gate_id = profile_gate["id"], + page = profile_gate["page"], + loops = profile_gate["loops"], + progress = profile_gate["progress"], + last_used = datetime.fromtimestamp(profile_gate["last_used"]), + mission_flag = profile_gate["mission_flag"], + total_points = profile_gate["total_points"], + ) + + conflict = sql.on_duplicate_key_update( + user = pf["user"], + gate_id = profile_gate["id"], + page = profile_gate["page"], + loops = profile_gate["loops"], + progress = profile_gate["progress"], + last_used = datetime.fromtimestamp(profile_gate["last_used"]), + mission_flag = profile_gate["mission_flag"], + total_points = profile_gate["total_points"], + ) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert wacca gate for user {pf['user']} -> {profile_gate['id']}") + continue + + if "favorite" in pf["data"]: + for profile_favorite in pf["data"]["favorite"]: + sql = Insert(favorite).values( + user = pf["user"], + song_id = profile_favorite + ) + + conflict = sql.on_duplicate_key_update( + user = pf["user"], + song_id = profile_favorite + ) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert wacca favorite songs for user {pf['user']} -> {profile_favorite}") + continue + + for it in wacca_items: + user = it["user"] + item_type = it["type"] + item_id = it["item_id"] + + if type(it["data"]) is not dict: + it["data"] = json.loads(it["data"]) + + if item_type == item_types["ticket"]: + if "quantity" in it["data"]: + for x in range(it["data"]["quantity"]): + sql = Insert(ticket).values( + user = user, + ticket_id = item_id, + ) + + conflict = sql.on_duplicate_key_update( + user = user, + ticket_id = item_id, + ) + result = self.base.execute(conflict) + if result is None: + self.logger.warn(f"Wacca: Failed to insert ticket {item_id} for user {user}") + + elif item_type == item_types["music_unlock"] or item_type == item_types["music_difficulty_unlock"]: + diff = 0 + if "difficulty" in it["data"]: + for x in it["data"]["difficulty"]: + if x == 1: + diff += 1 + else: + break + + sql = Insert(song_unlock).values( + user = user, + song_id = item_id, + highest_difficulty = diff, + ) + + conflict = sql.on_duplicate_key_update( + user = user, + song_id = item_id, + highest_difficulty = diff, + ) + result = self.base.execute(conflict) + if result is None: + self.logger.warn(f"Wacca: Failed to insert song unlock {item_id} {diff} for user {user}") + + elif item_type == item_types["trophy"]: + season = int(item_id / 100000) + sql = Insert(trophy).values( + user = user, + trophy_id = item_id, + season = season, + progress = 0 if "progress" not in it["data"] else it["data"]["progress"], + badge_type = 0 # ??? + ) + + conflict = sql.on_duplicate_key_update( + user = user, + trophy_id = item_id, + season = season, + progress = 0 if "progress" not in it["data"] else it["data"]["progress"], + badge_type = 0 # ??? + ) + result = self.base.execute(conflict) + if result is None: + self.logger.warn(f"Wacca: Failed to insert trophy {item_id} for user {user}") + + else: + sql = Insert(item).values( + user = user, + item_id = item_id, + type = item_type, + acquire_date = datetime.fromtimestamp(it["data"]["obtainedDate"]) if "obtainedDate" in it["data"] else datetime.now(), + use_count = it["data"]["uses"] if "uses" in it["data"] else 0, + use_count_season = it["data"]["season_uses"] if "season_uses" in it["data"] else 0 + ) + + conflict = sql.on_duplicate_key_update( + user = user, + item_id = item_id, + type = item_type, + acquire_date = datetime.fromtimestamp(it["data"]["obtainedDate"]) if "obtainedDate" in it["data"] else datetime.now(), + use_count = it["data"]["uses"] if "uses" in it["data"] else 0, + use_count_season = it["data"]["season_uses"] if "season_uses" in it["data"] else 0 + ) + result = self.base.execute(conflict) + if result is None: + self.logger.warn(f"Wacca: Failed to insert trophy {item_id} for user {user}") + + for sc in wacca_scores: + if type(sc["data"]) is not dict: + sc["data"] = json.loads(sc["data"]) + + sql = Insert(best_score).values( + user = sc["user"], + song_id = int(sc["song_id"]), + chart_id = sc["chart_id"], + score = sc["score1"], + play_ct = 1 if "play_count" not in sc["data"] else sc["data"]["play_count"], + clear_ct = 1 if sc["cleared"] & 0x01 else 0, + missless_ct = 1 if sc["cleared"] & 0x02 else 0, + fullcombo_ct = 1 if sc["cleared"] & 0x04 else 0, + allmarv_ct = 1 if sc["cleared"] & 0x08 else 0, + grade_d_ct = 1 if sc["grade"] & 0x01 else 0, + grade_c_ct = 1 if sc["grade"] & 0x02 else 0, + grade_b_ct = 1 if sc["grade"] & 0x04 else 0, + grade_a_ct = 1 if sc["grade"] & 0x08 else 0, + grade_aa_ct = 1 if sc["grade"] & 0x10 else 0, + grade_aaa_ct = 1 if sc["grade"] & 0x20 else 0, + grade_s_ct = 1 if sc["grade"] & 0x40 else 0, + grade_ss_ct = 1 if sc["grade"] & 0x80 else 0, + grade_sss_ct = 1 if sc["grade"] & 0x100 else 0, + grade_master_ct = 1 if sc["grade"] & 0x200 else 0, + grade_sp_ct = 1 if sc["grade"] & 0x400 else 0, + grade_ssp_ct = 1 if sc["grade"] & 0x800 else 0, + grade_sssp_ct = 1 if sc["grade"] & 0x1000 else 0, + best_combo = 0 if "max_combo" not in sc["data"] else sc["data"]["max_combo"], + lowest_miss_ct = 0 if "lowest_miss_count" not in sc["data"] else sc["data"]["lowest_miss_count"], + rating = 0 if "rating" not in sc["data"] else sc["data"]["rating"], + ) + + conflict = sql.on_duplicate_key_update( + user = sc["user"], + song_id = int(sc["song_id"]), + chart_id = sc["chart_id"], + score = sc["score1"], + play_ct = 1 if "play_count" not in sc["data"] else sc["data"]["play_count"], + clear_ct = 1 if sc["cleared"] & 0x01 else 0, + missless_ct = 1 if sc["cleared"] & 0x02 else 0, + fullcombo_ct = 1 if sc["cleared"] & 0x04 else 0, + allmarv_ct = 1 if sc["cleared"] & 0x08 else 0, + grade_d_ct = 1 if sc["grade"] & 0x01 else 0, + grade_c_ct = 1 if sc["grade"] & 0x02 else 0, + grade_b_ct = 1 if sc["grade"] & 0x04 else 0, + grade_a_ct = 1 if sc["grade"] & 0x08 else 0, + grade_aa_ct = 1 if sc["grade"] & 0x10 else 0, + grade_aaa_ct = 1 if sc["grade"] & 0x20 else 0, + grade_s_ct = 1 if sc["grade"] & 0x40 else 0, + grade_ss_ct = 1 if sc["grade"] & 0x80 else 0, + grade_sss_ct = 1 if sc["grade"] & 0x100 else 0, + grade_master_ct = 1 if sc["grade"] & 0x200 else 0, + grade_sp_ct = 1 if sc["grade"] & 0x400 else 0, + grade_ssp_ct = 1 if sc["grade"] & 0x800 else 0, + grade_sssp_ct = 1 if sc["grade"] & 0x1000 else 0, + best_combo = 0 if "max_combo" not in sc["data"] else sc["data"]["max_combo"], + lowest_miss_ct = 0 if "lowest_miss_count" not in sc["data"] else sc["data"]["lowest_miss_count"], + rating = 0 if "rating" not in sc["data"] else sc["data"]["rating"], + ) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert wacca score for user {sc['user']} {int(sc['song_id'])} {sc['chart_id']}") + + for ach in wacca_achievements: + if ach["version"] == 0 or ach["version"] == 1: + season = 1 + elif ach["version"] == 2 or ach["version"] == 3: + season = 2 + elif ach["version"] >= 4: + season = 3 + + if type(ach["data"]) is not dict: + ach["data"] = json.loads(ach["data"]) + + sql = Insert(stageup).values( + user = ach["user"], + season = season, + stage_id = ach["achievement_id"], + clear_status = 0 if "clear" not in ach["data"] else ach["data"]["clear"], + clear_song_ct = 0 if "clear_song_ct" not in ach["data"] else ach["data"]["clear_song_ct"], + song1_score = 0 if "score1" not in ach["data"] else ach["data"]["score1"], + song2_score = 0 if "score2" not in ach["data"] else ach["data"]["score2"], + song3_score = 0 if "score3" not in ach["data"] else ach["data"]["score3"], + play_ct = 1 if "attemps" not in ach["data"] else ach["data"]["attemps"], + ) + + conflict = sql.on_duplicate_key_update( + user = ach["user"], + season = season, + stage_id = ach["achievement_id"], + clear_status = 0 if "clear" not in ach["data"] else ach["data"]["clear"], + clear_song_ct = 0 if "clear_song_ct" not in ach["data"] else ach["data"]["clear_song_ct"], + song1_score = 0 if "score1" not in ach["data"] else ach["data"]["score1"], + song2_score = 0 if "score2" not in ach["data"] else ach["data"]["score2"], + song3_score = 0 if "score3" not in ach["data"] else ach["data"]["score3"], + play_ct = 1 if "attemps" not in ach["data"] else ach["data"]["attemps"], + ) + result = self.base.execute(conflict) + if result is None: + self.logger.error(f"Failed to insert wacca achievement for user {ach['user']}") + + else: + self.logger.info(f"Wacca not found, skipping...") From 83f09e180e31bae474068a16b7341a4ecf6446d0 Mon Sep 17 00:00:00 2001 From: Hay1tsme Date: Wed, 22 Feb 2023 09:59:31 -0500 Subject: [PATCH 02/10] add protobuf to requirements, fixes #2 --- requirements.txt | 1 + requirements_win.txt | 1 + 2 files changed, 2 insertions(+) diff --git a/requirements.txt b/requirements.txt index 40dbf84..2631c24 100644 --- a/requirements.txt +++ b/requirements.txt @@ -13,3 +13,4 @@ coloredlogs pylibmc wacky Routes +protobuf diff --git a/requirements_win.txt b/requirements_win.txt index 89527fe..cbdbcde 100644 --- a/requirements_win.txt +++ b/requirements_win.txt @@ -12,3 +12,4 @@ inflection coloredlogs wacky Routes +protobuf From 670747cf4898c1fd1030e58efb57fc16438c43e2 Mon Sep 17 00:00:00 2001 From: Hay1tsme Date: Fri, 24 Feb 2023 13:34:32 -0500 Subject: [PATCH 03/10] add platform_system to requirements.txt --- requirements.txt | 2 +- requirements_win.txt | 15 --------------- 2 files changed, 1 insertion(+), 16 deletions(-) delete mode 100644 requirements_win.txt diff --git a/requirements.txt b/requirements.txt index 2631c24..0536370 100644 --- a/requirements.txt +++ b/requirements.txt @@ -10,7 +10,7 @@ service_identity PyCryptodome inflection coloredlogs -pylibmc +pylibmc; platform_system != "Windows" wacky Routes protobuf diff --git a/requirements_win.txt b/requirements_win.txt deleted file mode 100644 index cbdbcde..0000000 --- a/requirements_win.txt +++ /dev/null @@ -1,15 +0,0 @@ -mypy -wheel -twisted -pytz -pyyaml -sqlalchemy==1.4.46 -mysqlclient -pyopenssl -service_identity -PyCryptodome -inflection -coloredlogs -wacky -Routes -protobuf From be00ad8e961eafc79f5efc1bfa805b791502da93 Mon Sep 17 00:00:00 2001 From: Hay1tsme Date: Wed, 1 Mar 2023 23:55:29 -0500 Subject: [PATCH 04/10] wacca: add lily to list of items given on profile create, fixes #4 --- titles/wacca/lily.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/titles/wacca/lily.py b/titles/wacca/lily.py index 66a4bc8..e67b25c 100644 --- a/titles/wacca/lily.py +++ b/titles/wacca/lily.py @@ -50,6 +50,14 @@ class WaccaLily(WaccaS): resp = HousingStartResponseV1(region_id) return resp.make() + + def handle_user_status_create_request(self, data: Dict)-> Dict: + req = UserStatusCreateRequest(data) + resp = super().handle_user_status_create_request(data) + + self.data.item.put_item(req.aimeId, WaccaConstants.ITEM_TYPES["navigator"], 210002) # Lily, Added Lily + + return resp def handle_user_status_get_request(self, data: Dict)-> Dict: req = UserStatusGetRequest(data) From fe4dfe369b62ecfd90b5aa1d4531ada16d044516 Mon Sep 17 00:00:00 2001 From: Hay1tsme Date: Sun, 30 Apr 2023 01:18:00 +0000 Subject: [PATCH 05/10] Update 'readme.md' --- readme.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/readme.md b/readme.md index b1cb506..ec25191 100644 --- a/readme.md +++ b/readme.md @@ -2,7 +2,7 @@ A network service emulator for games running SEGA'S ALL.NET service, and similar. # Supported games -Games listed below have been tested and confirmed working. Only game versions older then the current one in active use in arcades (n-0) or current game versions older then a year (y-1) are supported. +Games listed below have been tested and confirmed working. Only game versions older then the version currently active in arcades, or games versions that have not recieved a major update in over one year, are supported. + Chunithm + All versions up to New!! Plus From fc947d36a5fcaa42ae60c5541962d04685536fe1 Mon Sep 17 00:00:00 2001 From: Kevin Trocolli Date: Mon, 24 Jul 2023 00:49:08 -0400 Subject: [PATCH 06/10] mai2: fix get user music for dx and up --- titles/mai2/dx.py | 39 ++++++++++++++++++++++++++++----------- 1 file changed, 28 insertions(+), 11 deletions(-) diff --git a/titles/mai2/dx.py b/titles/mai2/dx.py index fba092a..7815e82 100644 --- a/titles/mai2/dx.py +++ b/titles/mai2/dx.py @@ -545,21 +545,38 @@ class Mai2DX(Mai2Base): return {"userId": data["userId"], "length": 0, "userRegionList": []} def handle_get_user_music_api_request(self, data: Dict) -> Dict: - songs = self.data.score.get_best_scores(data["userId"]) + user_id = data.get("userId", 0) + next_index = data.get("nextIndex", 0) + max_ct = data.get("maxCount", 50) + upper_lim = next_index + max_ct music_detail_list = [] - next_index = 0 - if songs is not None: - for song in songs: - tmp = song._asdict() - tmp.pop("id") - tmp.pop("user") - music_detail_list.append(tmp) + if user_id <= 0: + self.logger.warn("handle_get_user_music_api_request: Could not find userid in data, or userId is 0") + return {} + + songs = self.data.score.get_best_scores(user_id) + if songs is None: + self.logger.debug("handle_get_user_music_api_request: get_best_scores returned None!") + return { + "userId": data["userId"], + "nextIndex": 0, + "userMusicList": [], + } - if len(music_detail_list) == data["maxCount"]: - next_index = data["maxCount"] + data["nextIndex"] - break + num_user_songs = len(songs) + for x in range(next_index, upper_lim): + if num_user_songs <= x: + break + + tmp = songs[x]._asdict() + tmp.pop("id") + tmp.pop("user") + music_detail_list.append(tmp) + + next_index = 0 if len(music_detail_list) < max_ct or num_user_songs == upper_lim else upper_lim + self.logger.info(f"Send songs {next_index}-{upper_lim} ({len(music_detail_list)}) out of {num_user_songs} for user {user_id} (next idx {next_index})") return { "userId": data["userId"], "nextIndex": next_index, From a3d2955fce3ac98a6e63718a8493a345eb6154ff Mon Sep 17 00:00:00 2001 From: phantomlan Date: Sat, 21 Oct 2023 17:22:58 +0200 Subject: [PATCH 07/10] - update docker-compose.yml - update mysqldb to mariadb-alpine - update mysql healthcheck to also include Password - update memcached to alpine counterpart - update phpmyadmin port to not collide with ARTEMiS main app - add commented out options for DB Persistency - update Dockerfile - add read.py to Dockerfile - add docs/INSTALL_DOCKER.md guide --- Dockerfile | 4 +- docker-compose.yml | 31 ++++--- docs/INSTALL_DOCKER.md | 203 +++++++++++++++++++++++++++++++++++++++++ 3 files changed, 225 insertions(+), 13 deletions(-) create mode 100644 docs/INSTALL_DOCKER.md diff --git a/Dockerfile b/Dockerfile index e34dfec..507f01f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -12,10 +12,10 @@ RUN chmod +x entrypoint.sh COPY index.py index.py COPY dbutils.py dbutils.py +COPY read.py read.py ADD core core ADD titles titles -ADD config config ADD log log ADD cert cert -ENTRYPOINT [ "/app/entrypoint.sh" ] \ No newline at end of file +ENTRYPOINT [ "/app/entrypoint.sh" ] diff --git a/docker-compose.yml b/docker-compose.yml index c43b6e5..3e349b0 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -5,22 +5,23 @@ services: build: . volumes: - ./aime:/app/aime + - ./config:/app/config environment: CFG_DEV: 1 CFG_CORE_SERVER_HOSTNAME: 0.0.0.0 CFG_CORE_DATABASE_HOST: ma.db CFG_CORE_MEMCACHED_HOSTNAME: ma.memcached - CFG_CORE_AIMEDB_KEY: keyhere + CFG_CORE_AIMEDB_KEY: CFG_CHUNI_SERVER_LOGLEVEL: debug ports: - - "80:80" - - "8443:8443" - - "22345:22345" + - "80:80" + - "8443:8443" + - "22345:22345" - - "8080:8080" - - "8090:8090" + - "8080:8080" + - "8090:8090" depends_on: db: @@ -28,21 +29,29 @@ services: db: hostname: ma.db - image: mysql:8.0.31-debian + image: yobasystems/alpine-mariadb:10.11.5 environment: MYSQL_DATABASE: aime MYSQL_USER: aime MYSQL_PASSWORD: aime MYSQL_ROOT_PASSWORD: AimeRootPassword + MYSQL_CHARSET: utf8 + MYSQL_COLLATION: utf8_general_ci + ##Note: expose port 3306 to allow read.py importer into database, comment out when not needed + #ports: + # - "3306:3306" + ##Note: uncomment to allow mysql to create a persistent database, leave commented if you want to rebuild database from scratch often + #volumes: + # - ./AimeDB:/var/lib/mysql healthcheck: - test: ["CMD", "mysqladmin" ,"ping", "-h", "localhost"] + test: ["CMD", "mysqladmin" ,"ping", "-h", "localhost", "-pAimeRootPassword"] timeout: 5s retries: 5 - memcached: hostname: ma.memcached - image: memcached:1.6.17-bullseye + image: memcached:1.6.22-alpine3.18 + command: [ "memcached", "-m", "1024", "-I", "128m" ] phpmyadmin: hostname: ma.phpmyadmin @@ -53,5 +62,5 @@ services: PMA_PASSWORD: AimeRootPassword APACHE_PORT: 8080 ports: - - "8080:8080" + - "9090:8080" diff --git a/docs/INSTALL_DOCKER.md b/docs/INSTALL_DOCKER.md new file mode 100644 index 0000000..f0cf81a --- /dev/null +++ b/docs/INSTALL_DOCKER.md @@ -0,0 +1,203 @@ +# ARTEMiS - Docker Installation Guide +This step-by-step guide will allow you to install a Contenerized Version of ARTEMiS inside Docker, some steps can be skipped assuming you already have pre-requisite components and modules installed. + +This guide assumes using Debian 12(bookworm-stable) as a Host Operating System for most of packages and modules. + +## Pre-Requisites: +- Linux-Based Operating System (e.g. Debian, Ubuntu) +- Docker (https://get.docker.com) +- Python 3.9+ +- (optional) Git + +## Install Python3.9+ and Docker +``` +(if this is a fresh install of the system) +sudo apt update && sudo apt upgrade + +(installs python3 and pip) +sudo apt install python3 python3-pip + +(installs docker) +curl -fsSL https://get.docker.com -o get-docker.sh +sh get-docker.sh + +(optionally install git) +sudo apt install git +``` + +## Get ARTEMiS +If you installed git, clone into your choice of ARTEMiS git repository, e.g.: +``` +git clone +``` +If not, download the source package, and unpack it to the folder of your choice. + +## Prepare development/home configuration +To build our Docker setup, first we need to create some folders and copy some files around +- Create 'aime', 'configs', 'AimeDB', and 'logs' folder in ARTEMiS root folder (where all source files exist) +- Inside configs folder, create 'config' folder, and copy all .yaml files from example_config to config (thats all files without nginx_example.conf) +- Edit .yaml files inside configs/config to suit your server needs +- Edit core.yaml inside configs/config: +``` +set server.listen_address: to "0.0.0.0" +set title.hostname: to machine's IP address, e.g. "192.168.x.x", depending on your network, or actual hostname if your configuration is already set for dns resolve +set database.host: to "ma.db" +set database.memcached_host: to "ma.memcached" +set aimedb.key: to "" +``` + +## Running Docker Compose +After configuring, go to ARTEMiS root folder, and execute: +``` +docker compose up -d +("-d" argument means detached or daemon, meaning you will regain control of your terminal and Containers will run in background) +``` +This will start pulling and building required images from network, after it's done, a development server should be running, with server accessible under machine's IP, frontend with port 8090, and PHPMyAdmin under port 9090. + +To turn off the server, from ARTEMiS root folder, execute: +``` +docker compose down +``` + +If you changed some files around, and don't see your changes applied, execute: +``` +(turn off the server) +docker compose down +(rebuild) +docker compose build +(turn on) +docker compose up -d +``` + +If you need to see logs from containers running, execute: +``` +docker compose logs +``` +add '-f' to the end if you want to follow logs. + +## Running commands +If you need to execute python scripts supplied with the application, use `docker compose exec app python3