diff --git a/README.md b/README.md index fb3915e..0c99ffe 100644 --- a/README.md +++ b/README.md @@ -39,7 +39,7 @@ options: ## TODO -- [x] Chunithm New!! Plus +- [x] CHUNITHM SUN - [x] O.N.G.E.K.I. Bright Memory - [ ] maimai DX UNiVERSE/FESTiVAL - [ ] Change the game version to import using arguments diff --git a/aqua_importer.py b/aqua_importer.py index dc312b1..0b0d831 100644 --- a/aqua_importer.py +++ b/aqua_importer.py @@ -19,7 +19,7 @@ from core.config import CoreConfig from core.data.database import Data from titles.chuni.config import ChuniConfig from titles.chuni.const import ChuniConstants -from titles.chuni.newplus import ChuniNewPlus +from titles.chuni.sun import ChuniSun from titles.ongeki.brightmemory import OngekiBrightMemory from titles.ongeki.config import OngekiConfig from titles.ongeki.const import OngekiConstants @@ -161,7 +161,7 @@ class Importer: row: Row, datetime_columns: list[Dict], unused_columns: list[str], - user_id_column: str = "userId", + card_id: int, ) -> Dict: row = row._asdict() for column in datetime_columns: @@ -180,9 +180,8 @@ class Importer: # add the new camelCase key, value pair to tmp tmp[k] = v if v != "null" else None - # get the aqua internal user id and also drop it from the tmp dict - aqua_user_id = tmp[user_id_column] - tmp.pop(user_id_column) + # drop the aqua internal user id + tmp.pop("userId", None) # removes unused columns for unused in unused_columns: @@ -190,9 +189,8 @@ class Importer: # get from the internal user id the actual luid card_data = None - card_result = self.aqua.execute( - f"SELECT * FROM sega_card WHERE id = {aqua_user_id}" - ) + card_result = self.aqua.execute(f"SELECT * FROM sega_card WHERE id = {card_id}") + for card in card_result: card_data = card._asdict() @@ -207,14 +205,31 @@ class Importer: return tmp + def get_chuni_card_id_by_aqua_row(self, row: Row, user_id_column: str = "user_id"): + aqua_user_id = row._asdict()[user_id_column] + user_result = self.aqua.execute( + f"SELECT * FROM chusan_user_data WHERE id = {aqua_user_id}" + ) + + # could never be None undless somethign is really fucked up + user_data = None + for user in user_result: + user_data = user._asdict() + + card_id = user_data["card_id"] + + return card_id + def import_chuni(self): game_cfg = ChuniConfig() game_cfg.update(yaml.safe_load(open(f"{self.config_folder}/chuni.yaml"))) - base = ChuniNewPlus(self.config, game_cfg) + base = ChuniSun(self.config, game_cfg) version_str = ChuniConstants.game_ver_to_string(base.version) - answer = input(f"Do you really want to import ALL {version_str} data into ARTEMiS? [N/y]: ") + answer = input( + f"Do you really want to import ALL {version_str} data into ARTEMiS? [N/y]: " + ) if answer.lower() != "y": self.logger.info("User aborted operation") return @@ -224,8 +239,8 @@ class Importer: tmp = self.parse_aqua_row( row, datetime_columns, - unused_columns=["id", "lastLoginDate"], - user_id_column="cardId", + unused_columns=["id", "lastLoginDate", "cardId"], + card_id=row._asdict()["card_id"], ) base.handle_upsert_user_all_api_request( @@ -236,10 +251,9 @@ class Importer: result, datetime_columns = self.parse_aqua_db("chusan_user_game_option") for row in result: + user = self.get_chuni_card_id_by_aqua_row(row) tmp = self.parse_aqua_row( - row, - datetime_columns, - unused_columns=["id"], + row, datetime_columns, unused_columns=["id"], card_id=user ) tmp["speed_120"] = tmp.pop("speed120") @@ -255,21 +269,28 @@ class Importer: result, datetime_columns = self.parse_aqua_db("chusan_user_general_data") for row in result: - tmp = self.parse_aqua_row(row, datetime_columns, unused_columns=["id"]) + user = self.get_chuni_card_id_by_aqua_row(row) + tmp = self.parse_aqua_row( + row, datetime_columns, unused_columns=["id"], card_id=user + ) if tmp["propertyKey"] == "recent_rating_list": rating_list = [] for rating in tmp["propertyValue"].split(","): music_id, difficult_id, score = rating.split(":") - rating_list.append({ - "score": score, - "musicId": music_id, - "difficultId": difficult_id, - "romVersionCode": "2000001" - - }) + rating_list.append( + { + "score": score, + "musicId": music_id, + "difficultId": difficult_id, + "romVersionCode": "2000001", + } + ) base.handle_upsert_user_all_api_request( - {"userId": tmp["user"], "upsertUserAll": {"userRecentRatingList": rating_list}} + { + "userId": tmp["user"], + "upsertUserAll": {"userRecentRatingList": rating_list}, + } ) self.logger.info( @@ -278,7 +299,10 @@ class Importer: result, datetime_columns = self.parse_aqua_db("chusan_user_activity") for row in result: - tmp = self.parse_aqua_row(row, datetime_columns, unused_columns=["id"]) + user = self.get_chuni_card_id_by_aqua_row(row) + tmp = self.parse_aqua_row( + row, datetime_columns, unused_columns=["id"], card_id=user + ) tmp["id"] = tmp["activityId"] tmp.pop("activityId") @@ -292,7 +316,10 @@ class Importer: result, datetime_columns = self.parse_aqua_db("chusan_user_character") for row in result: - tmp = self.parse_aqua_row(row, datetime_columns, unused_columns=["id"]) + user = self.get_chuni_card_id_by_aqua_row(row) + tmp = self.parse_aqua_row( + row, datetime_columns, unused_columns=["id"], card_id=user + ) base.handle_upsert_user_all_api_request( {"userId": tmp["user"], "upsertUserAll": {"userCharacterList": [tmp]}} @@ -304,7 +331,10 @@ class Importer: result, datetime_columns = self.parse_aqua_db("chusan_user_course") for row in result: - tmp = self.parse_aqua_row(row, datetime_columns, unused_columns=["id"]) + user = self.get_chuni_card_id_by_aqua_row(row) + tmp = self.parse_aqua_row( + row, datetime_columns, unused_columns=["id"], card_id=user + ) base.handle_upsert_user_all_api_request( {"userId": tmp["user"], "upsertUserAll": {"userCourseList": [tmp]}} @@ -314,7 +344,10 @@ class Importer: result, datetime_columns = self.parse_aqua_db("chusan_user_duel") for row in result: - tmp = self.parse_aqua_row(row, datetime_columns, unused_columns=["id"]) + user = self.get_chuni_card_id_by_aqua_row(row) + tmp = self.parse_aqua_row( + row, datetime_columns, unused_columns=["id"], card_id=user + ) base.handle_upsert_user_all_api_request( {"userId": tmp["user"], "upsertUserAll": {"userDuelList": [tmp]}} @@ -324,7 +357,10 @@ class Importer: result, datetime_columns = self.parse_aqua_db("chusan_user_item") for row in result: - tmp = self.parse_aqua_row(row, datetime_columns, unused_columns=["id"]) + user = self.get_chuni_card_id_by_aqua_row(row) + tmp = self.parse_aqua_row( + row, datetime_columns, unused_columns=["id"], card_id=user + ) base.handle_upsert_user_all_api_request( {"userId": tmp["user"], "upsertUserAll": {"userItemList": [tmp]}} @@ -334,7 +370,10 @@ class Importer: result, datetime_columns = self.parse_aqua_db("chusan_user_map_area") for row in result: - tmp = self.parse_aqua_row(row, datetime_columns, unused_columns=["id"]) + user = self.get_chuni_card_id_by_aqua_row(row) + tmp = self.parse_aqua_row( + row, datetime_columns, unused_columns=["id"], card_id=user + ) base.handle_upsert_user_all_api_request( {"userId": tmp["user"], "upsertUserAll": {"userMapAreaList": [tmp]}} @@ -344,7 +383,10 @@ class Importer: result, datetime_columns = self.parse_aqua_db("chusan_user_music_detail") for row in result: - tmp = self.parse_aqua_row(row, datetime_columns, unused_columns=["id"]) + user = self.get_chuni_card_id_by_aqua_row(row) + tmp = self.parse_aqua_row( + row, datetime_columns, unused_columns=["id"], card_id=user + ) base.handle_upsert_user_all_api_request( {"userId": tmp["user"], "upsertUserAll": {"userMusicDetailList": [tmp]}} @@ -356,7 +398,10 @@ class Importer: result, datetime_columns = self.parse_aqua_db("chusan_user_playlog") for row in result: - tmp = self.parse_aqua_row(row, datetime_columns, unused_columns=["id"]) + user = self.get_chuni_card_id_by_aqua_row(row) + tmp = self.parse_aqua_row( + row, datetime_columns, unused_columns=["id"], card_id=user + ) base.handle_upsert_user_all_api_request( {"userId": tmp["user"], "upsertUserAll": {"userPlaylogList": [tmp]}} @@ -371,7 +416,9 @@ class Importer: base = OngekiBrightMemory(self.config, game_cfg) version_str = OngekiConstants.game_ver_to_string(base.version) - answer = input(f"Do you really want to import ALL {version_str} data into ARTEMiS? [N/y]: ") + answer = input( + f"Do you really want to import ALL {version_str} data into ARTEMiS? [N/y]: " + ) if answer.lower() != "y": self.logger.info("User aborted operation") return @@ -416,15 +463,19 @@ class Importer: rating_list = [] for rating in tmp["propertyValue"].split(","): music_id, difficult_id, score = rating.split(":") - rating_list.append({ - "score": score, - "musicId": music_id, - "difficultId": difficult_id, - "romVersionCode": "1000000" - - }) + rating_list.append( + { + "score": score, + "musicId": music_id, + "difficultId": difficult_id, + "romVersionCode": "1000000", + } + ) base.handle_upsert_user_all_api_request( - {"userId": tmp["user"], "upsertUserAll": {"userRecentRatingList": rating_list}} + { + "userId": tmp["user"], + "upsertUserAll": {"userRecentRatingList": rating_list}, + } ) self.logger.info( @@ -501,7 +552,9 @@ class Importer: {"userId": tmp["user"], "upsertUserAll": {"userCharacterList": [tmp]}} ) - self.logger.info(f"Imported {version_str} userCharacter: {tmp['characterId']}") + self.logger.info( + f"Imported {version_str} userCharacter: {tmp['characterId']}" + ) result, datetime_columns = self.parse_aqua_db("ongeki_user_deck") for row in result: @@ -554,10 +607,15 @@ class Importer: ) base.handle_upsert_user_all_api_request( - {"userId": tmp["user"], "upsertUserAll": {"userMemoryChapterList": [tmp]}} + { + "userId": tmp["user"], + "upsertUserAll": {"userMemoryChapterList": [tmp]}, + } ) - self.logger.info(f"Imported {version_str} userMemoryChapter: {tmp['chapterId']}") + self.logger.info( + f"Imported {version_str} userMemoryChapter: {tmp['chapterId']}" + ) result, datetime_columns = self.parse_aqua_db("ongeki_user_mission_point") for row in result: @@ -568,10 +626,15 @@ class Importer: ) base.handle_upsert_user_all_api_request( - {"userId": tmp["user"], "upsertUserAll": {"userMissionPointList": [tmp]}} + { + "userId": tmp["user"], + "upsertUserAll": {"userMissionPointList": [tmp]}, + } ) - self.logger.info(f"Imported {version_str} userMissionPoint: {tmp['eventId']}") + self.logger.info( + f"Imported {version_str} userMissionPoint: {tmp['eventId']}" + ) result, datetime_columns = self.parse_aqua_db("ongeki_user_music_detail") for row in result: @@ -585,7 +648,9 @@ class Importer: {"userId": tmp["user"], "upsertUserAll": {"userMusicDetailList": [tmp]}} ) - self.logger.info(f"Imported {version_str} userMusicDetail: {tmp['musicId']}") + self.logger.info( + f"Imported {version_str} userMusicDetail: {tmp['musicId']}" + ) result, datetime_columns = self.parse_aqua_db("ongeki_user_playlog") for row in result: