Fixed CHUNITHM imports and changed NEW!! to SUN

This commit is contained in:
Dniel97 2023-05-16 23:17:07 +02:00
parent 3e21b46d3c
commit aaf92b0525
Signed by untrusted user: Dniel97
GPG Key ID: 6180B3C768FB2E08
2 changed files with 113 additions and 48 deletions

View File

@ -39,7 +39,7 @@ options:
## TODO ## TODO
- [x] Chunithm New!! Plus - [x] CHUNITHM SUN
- [x] O.N.G.E.K.I. Bright Memory - [x] O.N.G.E.K.I. Bright Memory
- [ ] maimai DX UNiVERSE/FESTiVAL - [ ] maimai DX UNiVERSE/FESTiVAL
- [ ] Change the game version to import using arguments - [ ] Change the game version to import using arguments

View File

@ -19,7 +19,7 @@ from core.config import CoreConfig
from core.data.database import Data from core.data.database import Data
from titles.chuni.config import ChuniConfig from titles.chuni.config import ChuniConfig
from titles.chuni.const import ChuniConstants from titles.chuni.const import ChuniConstants
from titles.chuni.newplus import ChuniNewPlus from titles.chuni.sun import ChuniSun
from titles.ongeki.brightmemory import OngekiBrightMemory from titles.ongeki.brightmemory import OngekiBrightMemory
from titles.ongeki.config import OngekiConfig from titles.ongeki.config import OngekiConfig
from titles.ongeki.const import OngekiConstants from titles.ongeki.const import OngekiConstants
@ -161,7 +161,7 @@ class Importer:
row: Row, row: Row,
datetime_columns: list[Dict], datetime_columns: list[Dict],
unused_columns: list[str], unused_columns: list[str],
user_id_column: str = "userId", card_id: int,
) -> Dict: ) -> Dict:
row = row._asdict() row = row._asdict()
for column in datetime_columns: for column in datetime_columns:
@ -180,9 +180,8 @@ class Importer:
# add the new camelCase key, value pair to tmp # add the new camelCase key, value pair to tmp
tmp[k] = v if v != "null" else None tmp[k] = v if v != "null" else None
# get the aqua internal user id and also drop it from the tmp dict # drop the aqua internal user id
aqua_user_id = tmp[user_id_column] tmp.pop("userId", None)
tmp.pop(user_id_column)
# removes unused columns # removes unused columns
for unused in unused_columns: for unused in unused_columns:
@ -190,9 +189,8 @@ class Importer:
# get from the internal user id the actual luid # get from the internal user id the actual luid
card_data = None card_data = None
card_result = self.aqua.execute( card_result = self.aqua.execute(f"SELECT * FROM sega_card WHERE id = {card_id}")
f"SELECT * FROM sega_card WHERE id = {aqua_user_id}"
)
for card in card_result: for card in card_result:
card_data = card._asdict() card_data = card._asdict()
@ -207,14 +205,31 @@ class Importer:
return tmp return tmp
def get_chuni_card_id_by_aqua_row(self, row: Row, user_id_column: str = "user_id"):
aqua_user_id = row._asdict()[user_id_column]
user_result = self.aqua.execute(
f"SELECT * FROM chusan_user_data WHERE id = {aqua_user_id}"
)
# could never be None undless somethign is really fucked up
user_data = None
for user in user_result:
user_data = user._asdict()
card_id = user_data["card_id"]
return card_id
def import_chuni(self): def import_chuni(self):
game_cfg = ChuniConfig() game_cfg = ChuniConfig()
game_cfg.update(yaml.safe_load(open(f"{self.config_folder}/chuni.yaml"))) game_cfg.update(yaml.safe_load(open(f"{self.config_folder}/chuni.yaml")))
base = ChuniNewPlus(self.config, game_cfg) base = ChuniSun(self.config, game_cfg)
version_str = ChuniConstants.game_ver_to_string(base.version) version_str = ChuniConstants.game_ver_to_string(base.version)
answer = input(f"Do you really want to import ALL {version_str} data into ARTEMiS? [N/y]: ") answer = input(
f"Do you really want to import ALL {version_str} data into ARTEMiS? [N/y]: "
)
if answer.lower() != "y": if answer.lower() != "y":
self.logger.info("User aborted operation") self.logger.info("User aborted operation")
return return
@ -224,8 +239,8 @@ class Importer:
tmp = self.parse_aqua_row( tmp = self.parse_aqua_row(
row, row,
datetime_columns, datetime_columns,
unused_columns=["id", "lastLoginDate"], unused_columns=["id", "lastLoginDate", "cardId"],
user_id_column="cardId", card_id=row._asdict()["card_id"],
) )
base.handle_upsert_user_all_api_request( base.handle_upsert_user_all_api_request(
@ -236,10 +251,9 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("chusan_user_game_option") result, datetime_columns = self.parse_aqua_db("chusan_user_game_option")
for row in result: for row in result:
user = self.get_chuni_card_id_by_aqua_row(row)
tmp = self.parse_aqua_row( tmp = self.parse_aqua_row(
row, row, datetime_columns, unused_columns=["id"], card_id=user
datetime_columns,
unused_columns=["id"],
) )
tmp["speed_120"] = tmp.pop("speed120") tmp["speed_120"] = tmp.pop("speed120")
@ -255,21 +269,28 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("chusan_user_general_data") result, datetime_columns = self.parse_aqua_db("chusan_user_general_data")
for row in result: for row in result:
tmp = self.parse_aqua_row(row, datetime_columns, unused_columns=["id"]) user = self.get_chuni_card_id_by_aqua_row(row)
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
if tmp["propertyKey"] == "recent_rating_list": if tmp["propertyKey"] == "recent_rating_list":
rating_list = [] rating_list = []
for rating in tmp["propertyValue"].split(","): for rating in tmp["propertyValue"].split(","):
music_id, difficult_id, score = rating.split(":") music_id, difficult_id, score = rating.split(":")
rating_list.append({ rating_list.append(
"score": score, {
"musicId": music_id, "score": score,
"difficultId": difficult_id, "musicId": music_id,
"romVersionCode": "2000001" "difficultId": difficult_id,
"romVersionCode": "2000001",
}) }
)
base.handle_upsert_user_all_api_request( base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userRecentRatingList": rating_list}} {
"userId": tmp["user"],
"upsertUserAll": {"userRecentRatingList": rating_list},
}
) )
self.logger.info( self.logger.info(
@ -278,7 +299,10 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("chusan_user_activity") result, datetime_columns = self.parse_aqua_db("chusan_user_activity")
for row in result: for row in result:
tmp = self.parse_aqua_row(row, datetime_columns, unused_columns=["id"]) user = self.get_chuni_card_id_by_aqua_row(row)
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
tmp["id"] = tmp["activityId"] tmp["id"] = tmp["activityId"]
tmp.pop("activityId") tmp.pop("activityId")
@ -292,7 +316,10 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("chusan_user_character") result, datetime_columns = self.parse_aqua_db("chusan_user_character")
for row in result: for row in result:
tmp = self.parse_aqua_row(row, datetime_columns, unused_columns=["id"]) user = self.get_chuni_card_id_by_aqua_row(row)
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
base.handle_upsert_user_all_api_request( base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userCharacterList": [tmp]}} {"userId": tmp["user"], "upsertUserAll": {"userCharacterList": [tmp]}}
@ -304,7 +331,10 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("chusan_user_course") result, datetime_columns = self.parse_aqua_db("chusan_user_course")
for row in result: for row in result:
tmp = self.parse_aqua_row(row, datetime_columns, unused_columns=["id"]) user = self.get_chuni_card_id_by_aqua_row(row)
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
base.handle_upsert_user_all_api_request( base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userCourseList": [tmp]}} {"userId": tmp["user"], "upsertUserAll": {"userCourseList": [tmp]}}
@ -314,7 +344,10 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("chusan_user_duel") result, datetime_columns = self.parse_aqua_db("chusan_user_duel")
for row in result: for row in result:
tmp = self.parse_aqua_row(row, datetime_columns, unused_columns=["id"]) user = self.get_chuni_card_id_by_aqua_row(row)
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
base.handle_upsert_user_all_api_request( base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userDuelList": [tmp]}} {"userId": tmp["user"], "upsertUserAll": {"userDuelList": [tmp]}}
@ -324,7 +357,10 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("chusan_user_item") result, datetime_columns = self.parse_aqua_db("chusan_user_item")
for row in result: for row in result:
tmp = self.parse_aqua_row(row, datetime_columns, unused_columns=["id"]) user = self.get_chuni_card_id_by_aqua_row(row)
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
base.handle_upsert_user_all_api_request( base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userItemList": [tmp]}} {"userId": tmp["user"], "upsertUserAll": {"userItemList": [tmp]}}
@ -334,7 +370,10 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("chusan_user_map_area") result, datetime_columns = self.parse_aqua_db("chusan_user_map_area")
for row in result: for row in result:
tmp = self.parse_aqua_row(row, datetime_columns, unused_columns=["id"]) user = self.get_chuni_card_id_by_aqua_row(row)
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
base.handle_upsert_user_all_api_request( base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userMapAreaList": [tmp]}} {"userId": tmp["user"], "upsertUserAll": {"userMapAreaList": [tmp]}}
@ -344,7 +383,10 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("chusan_user_music_detail") result, datetime_columns = self.parse_aqua_db("chusan_user_music_detail")
for row in result: for row in result:
tmp = self.parse_aqua_row(row, datetime_columns, unused_columns=["id"]) user = self.get_chuni_card_id_by_aqua_row(row)
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
base.handle_upsert_user_all_api_request( base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userMusicDetailList": [tmp]}} {"userId": tmp["user"], "upsertUserAll": {"userMusicDetailList": [tmp]}}
@ -356,7 +398,10 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("chusan_user_playlog") result, datetime_columns = self.parse_aqua_db("chusan_user_playlog")
for row in result: for row in result:
tmp = self.parse_aqua_row(row, datetime_columns, unused_columns=["id"]) user = self.get_chuni_card_id_by_aqua_row(row)
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
base.handle_upsert_user_all_api_request( base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userPlaylogList": [tmp]}} {"userId": tmp["user"], "upsertUserAll": {"userPlaylogList": [tmp]}}
@ -371,7 +416,9 @@ class Importer:
base = OngekiBrightMemory(self.config, game_cfg) base = OngekiBrightMemory(self.config, game_cfg)
version_str = OngekiConstants.game_ver_to_string(base.version) version_str = OngekiConstants.game_ver_to_string(base.version)
answer = input(f"Do you really want to import ALL {version_str} data into ARTEMiS? [N/y]: ") answer = input(
f"Do you really want to import ALL {version_str} data into ARTEMiS? [N/y]: "
)
if answer.lower() != "y": if answer.lower() != "y":
self.logger.info("User aborted operation") self.logger.info("User aborted operation")
return return
@ -416,15 +463,19 @@ class Importer:
rating_list = [] rating_list = []
for rating in tmp["propertyValue"].split(","): for rating in tmp["propertyValue"].split(","):
music_id, difficult_id, score = rating.split(":") music_id, difficult_id, score = rating.split(":")
rating_list.append({ rating_list.append(
"score": score, {
"musicId": music_id, "score": score,
"difficultId": difficult_id, "musicId": music_id,
"romVersionCode": "1000000" "difficultId": difficult_id,
"romVersionCode": "1000000",
}) }
)
base.handle_upsert_user_all_api_request( base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userRecentRatingList": rating_list}} {
"userId": tmp["user"],
"upsertUserAll": {"userRecentRatingList": rating_list},
}
) )
self.logger.info( self.logger.info(
@ -501,7 +552,9 @@ class Importer:
{"userId": tmp["user"], "upsertUserAll": {"userCharacterList": [tmp]}} {"userId": tmp["user"], "upsertUserAll": {"userCharacterList": [tmp]}}
) )
self.logger.info(f"Imported {version_str} userCharacter: {tmp['characterId']}") self.logger.info(
f"Imported {version_str} userCharacter: {tmp['characterId']}"
)
result, datetime_columns = self.parse_aqua_db("ongeki_user_deck") result, datetime_columns = self.parse_aqua_db("ongeki_user_deck")
for row in result: for row in result:
@ -554,10 +607,15 @@ class Importer:
) )
base.handle_upsert_user_all_api_request( base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userMemoryChapterList": [tmp]}} {
"userId": tmp["user"],
"upsertUserAll": {"userMemoryChapterList": [tmp]},
}
) )
self.logger.info(f"Imported {version_str} userMemoryChapter: {tmp['chapterId']}") self.logger.info(
f"Imported {version_str} userMemoryChapter: {tmp['chapterId']}"
)
result, datetime_columns = self.parse_aqua_db("ongeki_user_mission_point") result, datetime_columns = self.parse_aqua_db("ongeki_user_mission_point")
for row in result: for row in result:
@ -568,10 +626,15 @@ class Importer:
) )
base.handle_upsert_user_all_api_request( base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userMissionPointList": [tmp]}} {
"userId": tmp["user"],
"upsertUserAll": {"userMissionPointList": [tmp]},
}
) )
self.logger.info(f"Imported {version_str} userMissionPoint: {tmp['eventId']}") self.logger.info(
f"Imported {version_str} userMissionPoint: {tmp['eventId']}"
)
result, datetime_columns = self.parse_aqua_db("ongeki_user_music_detail") result, datetime_columns = self.parse_aqua_db("ongeki_user_music_detail")
for row in result: for row in result:
@ -585,7 +648,9 @@ class Importer:
{"userId": tmp["user"], "upsertUserAll": {"userMusicDetailList": [tmp]}} {"userId": tmp["user"], "upsertUserAll": {"userMusicDetailList": [tmp]}}
) )
self.logger.info(f"Imported {version_str} userMusicDetail: {tmp['musicId']}") self.logger.info(
f"Imported {version_str} userMusicDetail: {tmp['musicId']}"
)
result, datetime_columns = self.parse_aqua_db("ongeki_user_playlog") result, datetime_columns = self.parse_aqua_db("ongeki_user_playlog")
for row in result: for row in result: