Compare commits

..

No commits in common. "develop" and "main" have entirely different histories.

2 changed files with 96 additions and 528 deletions

View File

@ -1,10 +1,10 @@
# AQUA-to-ARTEMiS (for develop branch only)
# AQUA-to-ARTEMiS
Imports AQUAs db.sqlite or MySQL database into ARTEMiS develop branch, currently WIP
Imports AQUAs db.sqlite database into ARTEMiS database, currently WIP
## Requirements
Already have the **develop** version of [ARTEMiS](https://gitea.tendokyu.moe/Hay1tsme/artemis/src/branch/develop) installed and created the database with
Already have [ARTEMiS](https://gitea.tendokyu.moe/Hay1tsme/artemis) installed and created the database with
```sh
python dbutils.py create
@ -17,19 +17,19 @@ python dbutils.py create
Place `aqua_importer.py` in your ARTEMiS directory and execute it:
```sh
python aqua_importer.py "C:\path\to\aqua"
python aqua_importer.py "C:\path\to\aqua\data"
```
**CAN OVERWRITE ALREADY EXISTING GAMES DATA! BE WARNED!!**
## How to use
```
usage: aqua_importer.py [-h] [--config CONFIG] aqua_data_path
usage: aqua_importer.py [-h] [--config CONFIG] aqua_folder_path
AQUA to ARTEMiS
positional arguments:
aqua_data_path Absolute folder path to AQUA folder, where application.properties is located in.
aqua_folder_path Absolute folder path to AQUA /data folder, where db.sqlite is located in
options:
-h, --help show this help message and exit
@ -41,6 +41,6 @@ options:
- [x] CHUNITHM SUN
- [x] O.N.G.E.K.I. Bright Memory
- [x] maimai DX FESTiVAL
- [ ] maimai DX UNiVERSE/FESTiVAL
- [ ] Change the game version to import using arguments
- [ ] Code clean up/optimizations

View File

@ -1,4 +1,3 @@
import asyncio
from datetime import datetime
import os
import inflection
@ -11,6 +10,7 @@ from sqlalchemy.exc import SQLAlchemyError
from logging.handlers import TimedRotatingFileHandler
from typing import Any, Dict, Optional
import yaml
import yaml
import argparse
import logging
import coloredlogs
@ -23,19 +23,13 @@ from titles.chuni.sun import ChuniSun
from titles.ongeki.brightmemory import OngekiBrightMemory
from titles.ongeki.config import OngekiConfig
from titles.ongeki.const import OngekiConstants
from titles.mai2.festival import Mai2Festival
from titles.mai2.config import Mai2Config
from titles.mai2.const import Mai2Constants
class AquaData:
def __init__(self, aqua_db_path: str) -> None:
if "@" in aqua_db_path:
self.__url = f"mysql+pymysql://{aqua_db_path}"
else:
self.__url = f"sqlite:///{aqua_db_path}"
self.__url = f"sqlite:///{aqua_db_path}"
self.__engine = create_engine(self.__url, pool_recycle=3600, echo=False)
self.__engine = create_engine(self.__url, pool_recycle=3600)
# self.inspector = reflection.Inspector.from_engine(self.__engine)
session = sessionmaker(bind=self.__engine)
@ -97,7 +91,6 @@ class Importer:
self.config_folder = cfg_folder
self.data = Data(core_cfg)
self.title_registry: Dict[str, Any] = {}
self.use_mysql = False
self.logger = logging.getLogger("importer")
if not hasattr(self.logger, "initialized"):
@ -121,63 +114,31 @@ class Importer:
coloredlogs.install(level="INFO", logger=self.logger, fmt=log_fmt_str)
self.logger.initialized = True
if not os.path.isfile(f"{aqua_folder}/application.properties"):
self.logger.error("Could not locate AQUA application.properties file!")
exit(1)
with open(f"{aqua_folder}/application.properties") as file:
lines = file.readlines()
properties = {}
for line in lines:
line = line.strip()
if not line or line.startswith("#"):
continue
parts = line.split("=")
if len(parts) >= 2:
key = parts[0].strip()
value = "=".join(parts[1:]).strip()
properties[key] = value
db_driver = properties.get("spring.datasource.driver-class-name")
if "sqlite" in db_driver:
aqua_db_path = None
db_url = properties.get("spring.datasource.url").split("sqlite:")[1]
temp = os.path.join(f"{aqua_folder}/{db_url}")
aqua_db_path = None
if os.path.exists(aqua_folder):
temp = os.path.join(aqua_folder, "db.sqlite")
if os.path.isfile(temp):
aqua_db_path = temp
if not aqua_db_path:
self.logger.error("Could not locate AQUA db.sqlite file!")
exit(1)
if not aqua_db_path:
self.logger.error("Could not locate AQUA db.sqlite file!")
exit(1)
self.aqua = AquaData(aqua_db_path)
self.aqua = AquaData(aqua_db_path)
elif "mysql" in db_driver or "mariadb" in db_driver:
self.use_mysql = True
db_username = properties.get("spring.datasource.username")
db_password = properties.get("spring.datasource.password")
db_url = (
properties.get("spring.datasource.url").split("?")[0].split("//")[1]
)
self.aqua = AquaData(f"{db_username}:{db_password}@{db_url}")
else:
self.logger.error("Unknown database type!")
async def get_user_id(self, luid: str):
user_id = await self.data.card.get_user_id_from_card(access_code=luid)
def get_user_id(self, luid: str):
user_id = self.data.card.get_user_id_from_card(access_code=luid)
if user_id is not None:
return user_id
user_id = await self.data.user.create_user()
user_id = self.data.user.create_user()
if user_id is None:
user_id = -1
self.logger.error("Failed to register user!")
else:
card_id = await self.data.card.create_card(user_id, luid)
card_id = self.data.card.create_card(user_id, luid)
if card_id is None:
user_id = -1
@ -195,7 +156,7 @@ class Importer:
return result, datetime_columns
async def parse_aqua_row(
def parse_aqua_row(
self,
row: Row,
datetime_columns: list[Dict],
@ -203,16 +164,14 @@ class Importer:
card_id: int,
) -> Dict:
row = row._asdict()
if not self.use_mysql:
for column in datetime_columns:
ts = row[column["name"]]
if ts is None or ts == 0:
continue
# actuall remove the last 3 zeros for the correct timestamp
fixed_ts = int(str(ts)[:-3])
# save the datetim object in the dict
row[column["name"]] = datetime.fromtimestamp(fixed_ts)
for column in datetime_columns:
ts = row[column["name"]]
if ts is None:
continue
# actuall remove the last 3 zeros for the correct timestamp
fixed_ts = int(str(ts)[:-3])
# save the datetim object in the dict
row[column["name"]] = datetime.fromtimestamp(fixed_ts)
tmp = {}
for k, v in row.items():
@ -239,7 +198,7 @@ class Importer:
card_id = card_data["luid"]
# get the ARTEMiS internal user id, if not create an user
user_id = await self.get_user_id(card_id)
user_id = self.get_user_id(card_id)
# add the ARTEMiS user id to the dict
tmp["user"] = user_id
@ -252,7 +211,7 @@ class Importer:
f"SELECT * FROM chusan_user_data WHERE id = {aqua_user_id}"
)
# could never be None undless something is really fucked up
# could never be None undless somethign is really fucked up
user_data = None
for user in user_result:
user_data = user._asdict()
@ -261,7 +220,7 @@ class Importer:
return card_id
async def import_chuni(self):
def import_chuni(self):
game_cfg = ChuniConfig()
game_cfg.update(yaml.safe_load(open(f"{self.config_folder}/chuni.yaml")))
@ -277,14 +236,14 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("chusan_user_data")
for row in result:
tmp = await self.parse_aqua_row(
tmp = self.parse_aqua_row(
row,
datetime_columns,
unused_columns=["id", "lastLoginDate", "cardId"],
card_id=row._asdict()["card_id"],
)
await base.handle_upsert_user_all_api_request(
base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userData": [tmp]}}
)
@ -293,7 +252,7 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("chusan_user_game_option")
for row in result:
user = self.get_chuni_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
@ -302,7 +261,7 @@ class Importer:
tmp["playTimingOffset_120"] = tmp.pop("playTimingOffset120")
tmp["judgeTimingOffset_120"] = tmp.pop("judgeTimingOffset120")
await base.handle_upsert_user_all_api_request(
base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userGameOption": [tmp]}}
)
@ -311,7 +270,7 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("chusan_user_general_data")
for row in result:
user = self.get_chuni_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
@ -327,7 +286,7 @@ class Importer:
"romVersionCode": "2000001",
}
)
await base.handle_upsert_user_all_api_request(
base.handle_upsert_user_all_api_request(
{
"userId": tmp["user"],
"upsertUserAll": {"userRecentRatingList": rating_list},
@ -341,13 +300,13 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("chusan_user_activity")
for row in result:
user = self.get_chuni_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
tmp["id"] = tmp["activityId"]
tmp.pop("activityId")
await base.handle_upsert_user_all_api_request(
base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userActivityList": [tmp]}}
)
@ -358,11 +317,11 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("chusan_user_character")
for row in result:
user = self.get_chuni_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
await base.handle_upsert_user_all_api_request(
base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userCharacterList": [tmp]}}
)
@ -373,11 +332,11 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("chusan_user_course")
for row in result:
user = self.get_chuni_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
await base.handle_upsert_user_all_api_request(
base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userCourseList": [tmp]}}
)
@ -386,11 +345,11 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("chusan_user_duel")
for row in result:
user = self.get_chuni_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
await base.handle_upsert_user_all_api_request(
base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userDuelList": [tmp]}}
)
@ -399,11 +358,11 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("chusan_user_item")
for row in result:
user = self.get_chuni_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
await base.handle_upsert_user_all_api_request(
base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userItemList": [tmp]}}
)
@ -412,11 +371,11 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("chusan_user_map_area")
for row in result:
user = self.get_chuni_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
await base.handle_upsert_user_all_api_request(
base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userMapAreaList": [tmp]}}
)
@ -425,11 +384,11 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("chusan_user_music_detail")
for row in result:
user = self.get_chuni_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
await base.handle_upsert_user_all_api_request(
base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userMusicDetailList": [tmp]}}
)
@ -440,11 +399,11 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("chusan_user_playlog")
for row in result:
user = self.get_chuni_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
await base.handle_upsert_user_all_api_request(
base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userPlaylogList": [tmp]}}
)
@ -456,7 +415,7 @@ class Importer:
f"SELECT * FROM ongeki_user_data WHERE id = {aqua_user_id}"
)
# could never be None undless something is really fucked up
# could never be None undless somethign is really fucked up
user_data = None
for user in user_result:
user_data = user._asdict()
@ -465,7 +424,7 @@ class Importer:
return card_id
async def import_ongeki(self):
def import_ongeki(self):
game_cfg = OngekiConfig()
game_cfg.update(yaml.safe_load(open(f"{self.config_folder}/ongeki.yaml")))
@ -481,7 +440,7 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("ongeki_user_data")
for row in result:
tmp = await self.parse_aqua_row(
tmp = self.parse_aqua_row(
row,
datetime_columns,
unused_columns=["id", "aimeCardId"],
@ -490,7 +449,7 @@ class Importer:
# useless but required
tmp["accessCode"] = ""
await base.handle_upsert_user_all_api_request(
base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userData": [tmp]}}
)
@ -499,12 +458,12 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("ongeki_user_option")
for row in result:
user = self.get_ongeki_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
tmp.pop("dispbp")
await base.handle_upsert_user_all_api_request(
base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userOption": [tmp]}}
)
@ -513,7 +472,7 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("ongeki_user_general_data")
for row in result:
user = self.get_ongeki_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
@ -529,7 +488,7 @@ class Importer:
"romVersionCode": "1000000",
}
)
await base.handle_upsert_user_all_api_request(
base.handle_upsert_user_all_api_request(
{
"userId": tmp["user"],
"upsertUserAll": {"userRecentRatingList": rating_list},
@ -543,11 +502,11 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("ongeki_user_deck")
for row in result:
user = self.get_ongeki_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
await base.handle_upsert_user_all_api_request(
base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userDeckList": [tmp]}}
)
@ -556,13 +515,13 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("ongeki_user_activity")
for row in result:
user = self.get_ongeki_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
tmp["id"] = tmp["activityId"]
tmp.pop("activityId")
await base.handle_upsert_user_all_api_request(
base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userActivityList": [tmp]}}
)
@ -571,11 +530,11 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("ongeki_user_card")
for row in result:
user = self.get_ongeki_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
await base.handle_upsert_user_all_api_request(
base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userCardList": [tmp]}}
)
@ -584,11 +543,11 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("ongeki_user_chapter")
for row in result:
user = self.get_ongeki_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
await base.handle_upsert_user_all_api_request(
base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userChapterList": [tmp]}}
)
@ -597,11 +556,11 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("ongeki_user_character")
for row in result:
user = self.get_ongeki_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
await base.handle_upsert_user_all_api_request(
base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userCharacterList": [tmp]}}
)
@ -612,11 +571,11 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("ongeki_user_deck")
for row in result:
user = self.get_ongeki_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
await base.handle_upsert_user_all_api_request(
base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userDeckList": [tmp]}}
)
@ -625,11 +584,11 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("ongeki_user_item")
for row in result:
user = self.get_ongeki_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
await base.handle_upsert_user_all_api_request(
base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userItemList": [tmp]}}
)
@ -638,11 +597,11 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("ongeki_user_item")
for row in result:
user = self.get_ongeki_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
await base.handle_upsert_user_all_api_request(
base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userItemList": [tmp]}}
)
@ -651,11 +610,11 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("ongeki_user_memory_chapter")
for row in result:
user = self.get_ongeki_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
await base.handle_upsert_user_all_api_request(
base.handle_upsert_user_all_api_request(
{
"userId": tmp["user"],
"upsertUserAll": {"userMemoryChapterList": [tmp]},
@ -669,11 +628,11 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("ongeki_user_mission_point")
for row in result:
user = self.get_ongeki_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
await base.handle_upsert_user_all_api_request(
base.handle_upsert_user_all_api_request(
{
"userId": tmp["user"],
"upsertUserAll": {"userMissionPointList": [tmp]},
@ -687,11 +646,11 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("ongeki_user_music_detail")
for row in result:
user = self.get_ongeki_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
await base.handle_upsert_user_all_api_request(
base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userMusicDetailList": [tmp]}}
)
@ -702,11 +661,11 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("ongeki_user_playlog")
for row in result:
user = self.get_ongeki_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
await base.handle_upsert_user_all_api_request(
base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userPlaylogList": [tmp]}}
)
@ -715,11 +674,11 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("ongeki_user_story")
for row in result:
user = self.get_ongeki_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
await base.handle_upsert_user_all_api_request(
base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userStoryList": [tmp]}}
)
@ -728,408 +687,18 @@ class Importer:
result, datetime_columns = self.parse_aqua_db("ongeki_user_tech_count")
for row in result:
user = self.get_ongeki_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
tmp = self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
await base.handle_upsert_user_all_api_request(
base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userTechCountList": [tmp]}}
)
self.logger.info(f"Imported {version_str} userTechCount: {tmp['levelId']}")
def get_mai2_card_id_by_aqua_row(self, row: Row, user_id_column: str = "user_id"):
aqua_user_id = row._asdict()[user_id_column]
user_result = self.aqua.execute(
f"SELECT * FROM maimai2_user_detail WHERE id = {aqua_user_id}"
)
# could never be None undless something is really fucked up
user_data = None
for user in user_result:
user_data = user._asdict()
card_id = user_data["aime_card_id"]
return card_id
def get_mai2_rating_lists_by_aqua_row(self, row: Row, user_id_column: str = "id"):
aqua_user_id = row._asdict()[user_id_column]
user_general_data_result = self.aqua.execute(
f"SELECT * FROM maimai2_user_general_data WHERE user_id = {aqua_user_id}"
)
ratings = {}
for row in user_general_data_result:
row = row._asdict()
propery_key = row["property_key"]
property_value: str = row["property_value"]
ratings[propery_key] = []
if property_value == "":
continue
for rating_str in property_value.split(","):
(
music_id_str,
level_str,
romVersion_str,
achievement_str,
) = rating_str.split(":")
ratings[propery_key].append(
{
"musicId": int(music_id_str),
"level": int(level_str),
"romVersion": int(romVersion_str),
"achievement": int(achievement_str),
}
)
user_udemae_result = self.aqua.execute(
f"SELECT * FROM maimai2_user_udemae WHERE user_id = {aqua_user_id}"
)
for user_udeame_row in user_udemae_result:
user_udeame = user_udeame_row._asdict()
user_udeame.pop("id")
user_udeame.pop("user_id")
udemae = {inflection.camelize(k, False): v for k, v in user_udeame.items()}
return (
ratings["recent_rating"],
ratings["recent_rating_new"],
ratings["recent_rating_next"],
ratings["recent_rating_next_new"],
udemae,
)
async def import_mai2(self):
game_cfg = Mai2Config()
game_cfg.update(yaml.safe_load(open(f"{self.config_folder}/mai2.yaml")))
base = Mai2Festival(self.config, game_cfg)
version_str = Mai2Constants.game_ver_to_string(base.version)
answer = input(
f"Do you really want to import ALL {version_str} data into ARTEMiS? [N/y]: "
)
if answer.lower() != "y":
self.logger.info("User aborted operation")
return
# maimai2_user_detail -> userData
result, datetime_columns = self.parse_aqua_db("maimai2_user_detail")
for row in result:
tmp = await self.parse_aqua_row(
row,
datetime_columns,
unused_columns=["id", "aimeCardId"],
card_id=row._asdict()["aime_card_id"],
)
# useless but required
tmp["accessCode"] = ""
# camel case conversion fix
tmp["lastSelectEMoney"] = tmp.pop("lastSelectemoney")
# convert charaSlot and charaLockSlot
# "0;0;0;0;0" to [0, 0, 0, 0, 0]
tmp["charaSlot"] = [int(x) for x in tmp["charaSlot"].split(";")]
tmp["charaLockSlot"] = [int(x) for x in tmp["charaLockSlot"].split(";")]
await base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userData": [tmp]}}
)
self.logger.info(f"Imported {version_str} userData: {tmp['user']}")
# ! Here we insert user rating list
rating = tmp["playerRating"]
(
rating_list,
new_rating_list,
next_rating_list,
next_new_rating_list,
udemae,
) = self.get_mai2_rating_lists_by_aqua_row(row)
await base.handle_upsert_user_all_api_request(
{
"userId": tmp["user"],
"upsertUserAll": {
"userRatingList": [
{
"rating": rating,
"ratingList": rating_list,
"newRatingList": new_rating_list,
"nextRatingList": next_rating_list,
"nextNewRatingList": next_new_rating_list,
"udemae": udemae,
}
]
},
}
)
# maimai2_user_playlog -> userPlaylogList
result, datetime_columns = self.parse_aqua_db("maimai2_user_playlog")
for row in result:
user = self.get_mai2_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
row, datetime_columns, unused_columns=[], card_id=user
)
tmp["userId"] = tmp["user"]
id_ = tmp.pop("id")
await base.handle_upload_user_playlog_api_request(
{"userId": tmp["user"], "userPlaylog": tmp}
)
self.logger.info(f"Imported {version_str} userPlaylog: {id_}")
# maimai2_user_extend -> userExtend
result, datetime_columns = self.parse_aqua_db("maimai2_user_extend")
for row in result:
user = self.get_mai2_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
# convert the str to a list, so it matches the JSON schema
tmp["selectedCardList"] = []
await base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userExtend": [tmp]}}
)
self.logger.info(f"Imported {version_str} userExtend: {tmp['user']}")
# skip userGhost
# maimai2_user_option -> userOption
result, datetime_columns = self.parse_aqua_db("maimai2_user_option")
for row in result:
user = self.get_mai2_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
row, datetime_columns, unused_columns=["id"], card_id=user
)
await base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userOption": [tmp]}}
)
self.logger.info(f"Imported {version_str} userOption: {tmp['user']}")
# maimai2_user_activity -> userActivityList
result, datetime_columns = self.parse_aqua_db("maimai2_user_activity")
for row in result:
user = self.get_mai2_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
row,
datetime_columns,
# we do need the id column cuz aqua always sets activityId to 1
# and in artemis, activityId will be set to the value of id and id will be dropped
unused_columns=[],
card_id=user,
)
# using raw operation because wtf
base.data.profile.put_profile_activity(tmp["user"], tmp)
self.logger.info(
f"Imported {version_str} userActivity: {tmp['user']}, {tmp['activityId']}"
)
# maimai2_user_charge -> userChargeList
# untested
result, datetime_columns = self.parse_aqua_db("maimai2_user_charge")
for row in result:
user = self.get_mai2_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
row,
datetime_columns,
unused_columns=["id"],
card_id=user,
)
await base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userChargeList": [tmp]}}
)
self.logger.info(
f"Imported {version_str} userCharge: {tmp['user']}, {tmp['chargeId']}"
)
# maimai2_user_character -> userCharacterList
result, datetime_columns = self.parse_aqua_db("maimai2_user_character")
for row in result:
user = self.get_mai2_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
row,
datetime_columns,
unused_columns=["id"],
card_id=user,
)
tmp["point"] = 0
await base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userCharacterList": [tmp]}}
)
self.logger.info(
f"Imported {version_str} userCharacter: {tmp['user']}, {tmp['characterId']}"
)
# maimai2_user_item -> userItemList
result, datetime_columns = self.parse_aqua_db("maimai2_user_item")
for row in result:
user = self.get_mai2_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
row,
datetime_columns,
unused_columns=["id"],
card_id=user,
)
await base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userItemList": [tmp]}}
)
self.logger.info(
f"Imported {version_str} userItem: {tmp['user']}, {tmp['itemKind']},{tmp['itemId']}"
)
# maimai2_user_login_bonus -> userLoginBonusList
result, datetime_columns = self.parse_aqua_db("maimai2_user_login_bonus")
for row in result:
user = self.get_mai2_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
row,
datetime_columns,
unused_columns=["id"],
card_id=user,
)
await base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userLoginBonusList": [tmp]}}
)
self.logger.info(
f"Imported {version_str} userLoginBonus: {tmp['user']}, {tmp['bonusId']}"
)
# maimai2_user_map -> userMapList
result, datetime_columns = self.parse_aqua_db("maimai2_user_map")
for row in result:
user = self.get_mai2_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
row,
datetime_columns,
unused_columns=["id"],
card_id=user,
)
await base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userMapList": [tmp]}}
)
self.logger.info(
f"Imported {version_str} userMap: {tmp['user']}, {tmp['mapId']}"
)
# maimai2_User_music_detail -> userMusicDetailList
result, datetime_columns = self.parse_aqua_db("maimai2_user_music_detail")
for row in result:
user = self.get_mai2_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
row,
datetime_columns,
unused_columns=["id"],
card_id=user,
)
await base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userMusicDetailList": [tmp]}}
)
self.logger.info(
f"Imported {version_str} userMusicDetail: {tmp['user']}, {tmp['musicId']}"
)
# maimai2_user_course -> userCourseList
result, datetime_columns = self.parse_aqua_db("maimai2_user_course")
for row in result:
user = self.get_mai2_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
row,
datetime_columns,
unused_columns=["id"],
card_id=user,
)
await base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userCourseList": [tmp]}}
)
self.logger.info(
f"Imported {version_str} userCourse: {tmp['user']}, {tmp['courseId']}"
)
# maimai2_user_favorite -> userFavoriteList
# untested
result, datetime_columns = self.parse_aqua_db("maimai2_user_favorite")
for row in result:
user = self.get_mai2_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
row,
datetime_columns,
unused_columns=["id"],
card_id=user,
)
tmp = {
"user": tmp["user"],
"kind": tmp["itemKind"],
"itemIdList": tmp["itemIdList"],
}
await base.handle_upsert_user_all_api_request(
{"userId": tmp["user"], "upsertUserAll": {"userFavoriteList": [tmp]}}
)
self.logger.info(f"Imported {version_str} userFavorite: {tmp['user']}")
# maimai2_user_friend_season_ranking -> userFriendSeasonRankingList
result, datetime_columns = self.parse_aqua_db(
"maimai2_user_friend_season_ranking"
)
for row in result:
user = self.get_mai2_card_id_by_aqua_row(row)
tmp = await self.parse_aqua_row(
row,
datetime_columns,
unused_columns=["id"],
card_id=user,
)
# user is redundant
artemis_user = tmp.pop("user")
await base.handle_upsert_user_all_api_request(
{
"userId": artemis_user,
"upsertUserAll": {"userFriendSeasonRankingList": [tmp]},
}
)
self.logger.info(
f"Imported {version_str} userFriendSeasonRanking: {artemis_user}, {tmp['seasonId']}"
)
async def main():
def main():
parser = argparse.ArgumentParser(description="AQUA to ARTEMiS")
parser.add_argument(
"--config", "-c", type=str, help="Config directory to use", default="config"
@ -1137,7 +706,7 @@ async def main():
parser.add_argument(
"aqua_folder_path",
type=str,
help="Absolute folder path to AQUA folder, where application.properties is located in.",
help="Absolute folder path to AQUA /data folder, where db.sqlite is located in",
)
args = parser.parse_args()
@ -1146,10 +715,9 @@ async def main():
importer = Importer(core_cfg, args.config, args.aqua_folder_path)
await importer.import_chuni()
await importer.import_ongeki()
await importer.import_mai2()
importer.import_chuni()
importer.import_ongeki()
if __name__ == "__main__":
asyncio.run(main())
main()