Merge pull request 'add support for maimai dx festival' (#1) from xyqyear/AQUA-to-ARTEMiS:add-mai2-fes-support into main

Reviewed-on: #1
This commit is contained in:
Dniel97 2023-10-28 22:05:13 +00:00
commit c586e438aa
1 changed files with 392 additions and 2 deletions

View File

@ -22,6 +22,9 @@ from titles.chuni.sun import ChuniSun
from titles.ongeki.brightmemory import OngekiBrightMemory from titles.ongeki.brightmemory import OngekiBrightMemory
from titles.ongeki.config import OngekiConfig from titles.ongeki.config import OngekiConfig
from titles.ongeki.const import OngekiConstants from titles.ongeki.const import OngekiConstants
from titles.mai2.festival import Mai2Festival
from titles.mai2.config import Mai2Config
from titles.mai2.const import Mai2Constants
class AquaData: class AquaData:
@ -247,7 +250,7 @@ class Importer:
f"SELECT * FROM chusan_user_data WHERE id = {aqua_user_id}" f"SELECT * FROM chusan_user_data WHERE id = {aqua_user_id}"
) )
# could never be None undless somethign is really fucked up # could never be None undless something is really fucked up
user_data = None user_data = None
for user in user_result: for user in user_result:
user_data = user._asdict() user_data = user._asdict()
@ -451,7 +454,7 @@ class Importer:
f"SELECT * FROM ongeki_user_data WHERE id = {aqua_user_id}" f"SELECT * FROM ongeki_user_data WHERE id = {aqua_user_id}"
) )
# could never be None undless somethign is really fucked up # could never be None undless something is really fucked up
user_data = None user_data = None
for user in user_result: for user in user_result:
user_data = user._asdict() user_data = user._asdict()
@ -733,6 +736,392 @@ class Importer:
self.logger.info(f"Imported {version_str} userTechCount: {tmp['levelId']}") self.logger.info(f"Imported {version_str} userTechCount: {tmp['levelId']}")
def get_mai2_card_id_by_aqua_row(self, row: Row, user_id_column: str = "user_id"):
aqua_user_id = row._asdict()[user_id_column]
user_result = self.aqua.execute(
f"SELECT * FROM maimai2_user_detail WHERE id = {aqua_user_id}"
)
# could never be None undless something is really fucked up
user_data = None
for user in user_result:
user_data = user._asdict()
card_id = user_data["aime_card_id"]
return card_id
def get_mai2_rating_lists_by_aqua_row(self, row: Row, user_id_column: str = "id"):
aqua_user_id = row._asdict()[user_id_column]
user_general_data_result = self.aqua.execute(
f"SELECT * FROM maimai2_user_general_data WHERE user_id = {aqua_user_id}"
)
ratings = {}
for row in user_general_data_result:
row = row._asdict()
propery_key = row["property_key"]
property_value: str = row["property_value"]
ratings[propery_key] = []
if property_value == "":
continue
for rating_str in property_value.split(","):
(
music_id_str,
level_str,
romVersion_str,
achievement_str,
) = rating_str.split(":")
ratings[propery_key].append(
{
"musicId": int(music_id_str),
"level": int(level_str),
"romVersion": int(romVersion_str),
"achievement": int(achievement_str),
}
)
user_udemae_result = self.aqua.execute(
f"SELECT * FROM maimai2_user_udemae WHERE user_id = {aqua_user_id}"
)
for user_udeame_row in user_udemae_result:
user_udeame = user_udeame_row._asdict()
user_udeame.pop("id")
user_udeame.pop("user_id")
udemae = {inflection.camelize(k, False): v for k, v in user_udeame.items()}
return (
ratings["recent_rating"],
ratings["recent_rating_new"],
ratings["recent_rating_next"],
ratings["recent_rating_next_new"],
udemae,
)
def import_mai2(self):
game_cfg = Mai2Config()
game_cfg.update(yaml.safe_load(open(f"{self.config_folder}/mai2.yaml")))
fes = Mai2Festival(self.config, game_cfg)
version_str = Mai2Constants.game_ver_to_string(fes.version)
answer = input(
f"Do you really want to import ALL {version_str} data into ARTEMiS? [N/y]: "
)
if answer.lower() != "y":
self.logger.info("User aborted operation")
return
# maimai2_user_detail -> userData
result, datetime_columns = self.parse_aqua_db("maimai2_user_detail")
for row in result:
tmp = self.parse_aqua_row(
row,
datetime_columns,
unused_columns=["id", "aimeCardId"],
card_id=row._asdict()["aime_card_id"],
)
# useless but required
tmp["accessCode"] = ""
# camel case conversion fix
tmp["lastSelectEMoney"] = tmp.pop("lastSelectemoney")
# convert charaSlot and charaLockSlot
# "0;0;0;0;0" to [0, 0, 0, 0, 0]
tmp["charaSlot"] = [int(x) for x in tmp["charaSlot"].split(";")]
tmp["charaLockSlot"] = [int(x) for x in tmp["charaLockSlot"].split(";")]
fes.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userData": [tmp]}}
)
self.logger.info(f"Imported {version_str} userData: {tmp['user']}")
# ! Here we insert user rating list
rating = tmp["playerRating"]
(
rating_list,
new_rating_list,
next_rating_list,
next_new_rating_list,
udemae,
) = self.get_mai2_rating_lists_by_aqua_row(row)
fes.handle_upsert_user_all_api_request(
{
"userId": tmp["user"],
"upsertUserAll": {
"userRatingList": [
{
"rating": rating,
"ratingList": rating_list,
"newRatingList": new_rating_list,
"nextRatingList": next_rating_list,
"nextNewRatingList": next_new_rating_list,
"udemae": udemae,
}
]
},
}
)
# maimai2_user_playlog -> userPlaylogList
result, datetime_columns = self.parse_aqua_db("maimai2_user_playlog")
for row in result:
user = self.get_mai2_card_id_by_aqua_row(row)
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=[], card_id=user
)
tmp["userId"] = tmp["user"]
id_ = tmp.pop("id")
fes.handle_upload_user_playlog_api_request(
{"userId": tmp["user"], "userPlaylog": tmp}
)
self.logger.info(f"Imported {version_str} userPlaylog: {id_}")
# maimai2_user_extend -> userExtend
result, datetime_columns = self.parse_aqua_db("maimai2_user_extend")
for row in result:
user = self.get_mai2_card_id_by_aqua_row(row)
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
fes.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userExtend": [tmp]}}
)
self.logger.info(f"Imported {version_str} userExtend: {tmp['user']}")
# skip userGhost
# maimai2_user_option -> userOption
result, datetime_columns = self.parse_aqua_db("maimai2_user_option")
for row in result:
user = self.get_mai2_card_id_by_aqua_row(row)
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
fes.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userOption": [tmp]}}
)
self.logger.info(f"Imported {version_str} userOption: {tmp['user']}")
# maimai2_user_activity -> userActivityList
result, datetime_columns = self.parse_aqua_db("maimai2_user_activity")
for row in result:
user = self.get_mai2_card_id_by_aqua_row(row)
tmp = self.parse_aqua_row(
row,
datetime_columns,
# we do need the id column cuz aqua always sets activityId to 1
# and in artemis, activityId will be set to the value of id and id will be dropped
unused_columns=[],
card_id=user,
)
# using raw operation because wtf
fes.data.profile.put_profile_activity(tmp["user"], tmp)
self.logger.info(
f"Imported {version_str} userActivity: {tmp['user']}, {tmp['activityId']}"
)
# maimai2_user_charge -> userChargeList
# untested
result, datetime_columns = self.parse_aqua_db("maimai2_user_charge")
for row in result:
user = self.get_mai2_card_id_by_aqua_row(row)
tmp = self.parse_aqua_row(
row,
datetime_columns,
unused_columns=["id"],
card_id=user,
)
fes.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userChargeList": [tmp]}}
)
self.logger.info(
f"Imported {version_str} userCharge: {tmp['user']}, {tmp['chargeId']}"
)
# maimai2_user_character -> userCharacterList
result, datetime_columns = self.parse_aqua_db("maimai2_user_character")
for row in result:
user = self.get_mai2_card_id_by_aqua_row(row)
tmp = self.parse_aqua_row(
row,
datetime_columns,
unused_columns=["id"],
card_id=user,
)
tmp["point"] = 0
fes.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userCharacterList": [tmp]}}
)
self.logger.info(
f"Imported {version_str} userCharacter: {tmp['user']}, {tmp['characterId']}"
)
# maimai2_user_item -> userItemList
result, datetime_columns = self.parse_aqua_db("maimai2_user_item")
for row in result:
user = self.get_mai2_card_id_by_aqua_row(row)
tmp = self.parse_aqua_row(
row,
datetime_columns,
unused_columns=["id"],
card_id=user,
)
fes.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userItemList": [tmp]}}
)
self.logger.info(
f"Imported {version_str} userItem: {tmp['user']}, {tmp['itemKind']},{tmp['itemId']}"
)
# maimai2_user_login_bonus -> userLoginBonusList
result, datetime_columns = self.parse_aqua_db("maimai2_user_login_bonus")
for row in result:
user = self.get_mai2_card_id_by_aqua_row(row)
tmp = self.parse_aqua_row(
row,
datetime_columns,
unused_columns=["id"],
card_id=user,
)
fes.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userLoginBonusList": [tmp]}}
)
self.logger.info(
f"Imported {version_str} userLoginBonus: {tmp['user']}, {tmp['bonusId']}"
)
# maimai2_user_map -> userMapList
result, datetime_columns = self.parse_aqua_db("maimai2_user_map")
for row in result:
user = self.get_mai2_card_id_by_aqua_row(row)
tmp = self.parse_aqua_row(
row,
datetime_columns,
unused_columns=["id"],
card_id=user,
)
fes.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userMapList": [tmp]}}
)
self.logger.info(
f"Imported {version_str} userMap: {tmp['user']}, {tmp['mapId']}"
)
# maimai2_User_music_detail -> userMusicDetailList
result, datetime_columns = self.parse_aqua_db("maimai2_user_music_detail")
for row in result:
user = self.get_mai2_card_id_by_aqua_row(row)
tmp = self.parse_aqua_row(
row,
datetime_columns,
unused_columns=["id"],
card_id=user,
)
fes.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userMusicDetailList": [tmp]}}
)
self.logger.info(
f"Imported {version_str} userMusicDetail: {tmp['user']}, {tmp['musicId']}"
)
# maimai2_user_course -> userCourseList
result, datetime_columns = self.parse_aqua_db("maimai2_user_course")
for row in result:
user = self.get_mai2_card_id_by_aqua_row(row)
tmp = self.parse_aqua_row(
row,
datetime_columns,
unused_columns=["id"],
card_id=user,
)
fes.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userCourseList": [tmp]}}
)
self.logger.info(
f"Imported {version_str} userCourse: {tmp['user']}, {tmp['courseId']}"
)
# maimai2_user_favorite -> userFavoriteList
# untested
result, datetime_columns = self.parse_aqua_db("maimai2_user_favorite")
for row in result:
user = self.get_mai2_card_id_by_aqua_row(row)
tmp = self.parse_aqua_row(
row,
datetime_columns,
unused_columns=["id"],
card_id=user,
)
tmp = {
"user": tmp["user"],
"kind": tmp["itemKind"],
"itemIdList": tmp["itemIdList"],
}
fes.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userFavoriteList": [tmp]}}
)
self.logger.info(f"Imported {version_str} userFavorite: {tmp['user']}")
# maimai2_user_friend_season_ranking -> userFriendSeasonRankingList
result, datetime_columns = self.parse_aqua_db(
"maimai2_user_friend_season_ranking"
)
for row in result:
user = self.get_mai2_card_id_by_aqua_row(row)
tmp = self.parse_aqua_row(
row,
datetime_columns,
unused_columns=["id"],
card_id=user,
)
# user is redundant
artemis_user = tmp.pop("user")
fes.handle_upsert_user_all_api_request(
{
"userId": artemis_user,
"upsertUserAll": {"userFriendSeasonRankingList": [tmp]},
}
)
self.logger.info(
f"Imported {version_str} userFriendSeasonRanking: {artemis_user}, {tmp['seasonId']}"
)
def main(): def main():
parser = argparse.ArgumentParser(description="AQUA to ARTEMiS") parser = argparse.ArgumentParser(description="AQUA to ARTEMiS")
@ -753,6 +1142,7 @@ def main():
importer.import_chuni() importer.import_chuni()
importer.import_ongeki() importer.import_ongeki()
importer.import_mai2()
if __name__ == "__main__": if __name__ == "__main__":