1
0
Fork 0

Compare commits

...

10 Commits

23 changed files with 4167 additions and 254 deletions

116
alembic.ini Normal file
View File

@ -0,0 +1,116 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = alembic
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python>=3.9 or backports.zoneinfo library.
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
# string value is passed to ZoneInfo()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = mysql://root:root@localhost/artemis2023?charset=utf8mb4
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
# hooks = ruff
# ruff.type = exec
# ruff.executable = %(here)s/.venv/bin/ruff
# ruff.options = --fix REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = [%(asctime)s] %(levelname)s | %(name)s | %(message)s
datefmt = %Y-%m-%d %H:%M:%S

111
alembic/env.py Normal file
View File

@ -0,0 +1,111 @@
from logging.config import fileConfig
from os import path
import yaml
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
from core import CoreConfig
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name, disable_existing_loggers=False)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
# Core
from core.data.schema import metadata
# Titles
from titles.chuni.schema import *
from titles.cm.schema import *
from titles.cxb.schema.item import *
from titles.diva.schema import *
from titles.idac.schema import *
# from titles.idz.schema import *
from titles.mai2.schema import *
from titles.ongeki.schema import *
from titles.pokken.schema import *
from titles.sao.schema import *
target_metadata = metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def get_url_from_config():
url = config.get_main_option('artemis_db_url')
if url is not None:
return url
import core.data.database
cfg: CoreConfig = CoreConfig()
if path.exists(f"config/core.yaml"):
cfg.update(yaml.safe_load(open(f"config/core.yaml")))
return core.data.database.build_db_url_from_config(cfg.database)
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
# Load config
url = get_url_from_config()
from sqlalchemy import create_engine
engine = create_engine(url)
with engine.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

26
alembic/script.py.mako Normal file
View File

@ -0,0 +1,26 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

File diff suppressed because it is too large Load Diff

View File

@ -159,7 +159,7 @@ class DatabaseConfig:
@property
def protocol(self) -> str:
return CoreConfig.get_config_field(
self.__config, "core", "database", "type", default="mysql"
self.__config, "core", "database", "protocol", default="mysql"
)
@property
@ -186,6 +186,13 @@ class DatabaseConfig:
default=10000,
)
@property
def sqlite_db_filename(self) -> str:
return CoreConfig.get_config_field(
self.__config, "core", "database", "sqlite_db_filename", default="aime.db"
)
@property
def enable_memcached(self) -> bool:
return CoreConfig.get_config_field(

View File

@ -1,5 +1,8 @@
import logging, coloredlogs
from typing import Optional, Dict, List
import alembic
from alembic.config import Config
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy import create_engine
@ -9,11 +12,22 @@ import secrets, string
import bcrypt
from hashlib import sha256
from core.config import CoreConfig
from core.config import CoreConfig, DatabaseConfig
from core.data.schema import *
from core.utils import Utils
def build_db_url_from_config(database: DatabaseConfig):
if database.protocol == "sqlite":
return f"{database.protocol}:///{database.sqlite_db_filename}"
if database.sha2_password:
passwd = sha256(database.password.encode()).digest()
return f"{database.protocol}://{database.username}:{passwd.hex()}@{database.host}:{database.port}/{database.name}?charset=utf8mb4"
else:
return f"{database.protocol}://{database.username}:{database.password}@{database.host}:{database.port}/{database.name}?charset=utf8mb4"
class Data:
current_schema_version = 6
engine = None
@ -24,12 +38,8 @@ class Data:
base = None
def __init__(self, cfg: CoreConfig) -> None:
self.config = cfg
if self.config.database.sha2_password:
passwd = sha256(self.config.database.password.encode()).digest()
self.__url = f"{self.config.database.protocol}://{self.config.database.username}:{passwd.hex()}@{self.config.database.host}:{self.config.database.port}/{self.config.database.name}?charset=utf8mb4"
else:
self.__url = f"{self.config.database.protocol}://{self.config.database.username}:{self.config.database.password}@{self.config.database.host}:{self.config.database.port}/{self.config.database.name}?charset=utf8mb4"
self.__url = build_db_url_from_config(self.config.database)
if Data.engine is None:
Data.engine = create_engine(self.__url, pool_recycle=3600)
@ -355,3 +365,10 @@ class Data:
all_game_versions = self.base.get_all_schema_vers()
for ver in all_game_versions:
self.logger.info(f"{ver['game']} -> v{ver['version']}")
def alembic_upgrade(self, alembic_ini_path: str):
from alembic import command
alembic_cfg = alembic.config.Config(alembic_ini_path)
alembic_cfg.set_main_option('artemis_db_url', build_db_url_from_config(self.config.database))
command.upgrade(alembic_cfg, 'head')

View File

@ -2,8 +2,7 @@ from typing import Optional, Dict, List
from sqlalchemy import Table, Column, and_, or_
from sqlalchemy.sql.schema import ForeignKey, PrimaryKeyConstraint
from sqlalchemy.types import Integer, String, Boolean, JSON
from sqlalchemy.sql import func, select
from sqlalchemy.dialects.mysql import insert
from sqlalchemy.sql import func, select, insert
from sqlalchemy.engine import Row
import re

View File

@ -1,15 +1,17 @@
import json
import logging
import sqlite3
from random import randrange
from typing import Any, Optional, Dict, List
from sqlalchemy.engine import Row
import sqlalchemy.dialects.mysql
from sqlalchemy.engine import Row, CursorResult
from sqlalchemy.engine.cursor import CursorResult
from sqlalchemy.engine.base import Connection
from sqlalchemy.sql import text, func, select
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.sql import text, func, select, insert
from sqlalchemy.exc import SQLAlchemyError, IntegrityError
from sqlalchemy import MetaData, Table, Column
from sqlalchemy.types import Integer, String, TIMESTAMP, JSON
from sqlalchemy.dialects.mysql import insert
from core.config import CoreConfig
@ -102,12 +104,60 @@ class BaseData:
return None
return row["version"]
def touch_schema_ver(self, ver: int, game: str = "CORE") -> Optional[int]:
sql = insert(schema_ver).values(game=game, version=ver)
conflict = sql.on_duplicate_key_update(version=schema_ver.c.version)
result = self.execute(conflict)
def upsert(self, table: Table, data: Dict) -> CursorResult | None:
# fall back to the old behavior for mysql, and use current behavior for postgres
sql = self.dialect_insert(table).values(data)
unique_columns = get_unique_columns(table)
if len(unique_columns) > 0:
if self.conn.bind.name == "mysql":
conflict = sql.on_duplicate_key_update(**data)
elif self.conn.bind.name == "postgresql":
conflict = sql.on_conflict_do_update(
index_elements=unique_columns,
set_=data,
)
if self.conn.bind.name == "sqlite":
from sqlalchemy import exc
try:
result = self.conn.execute(sql)
except (exc.IntegrityError, sqlite3.IntegrityError) as e:
# run update query using the values from unique_columns
from operator import and_
conditions = [getattr(table.c, col) == data[col] for col in unique_columns]
if len(conditions) > 1:
from functools import reduce
where_clause = reduce(and_, conditions)
elif conditions:
where_clause = conditions[0]
upd = table.update().where(where_clause).values(data)
self.logger.error(f"SQL post-conflict update ({', '.join(unique_columns)}): {str(upd)}")
result = self.execute(upd)
else:
result = self.execute(conflict)
else:
result = self.execute(sql)
if result is None:
self.logger.error(f"Failed to upsert data into {table.name}")
return None
return result
def dialect_insert(self, table: Table):
if self.conn.bind.name == "mysql":
return sqlalchemy.dialects.mysql.insert(table)
elif self.conn.bind.name == "postgresql":
return sqlalchemy.dialects.postgresql.insert(table)
elif self.conn.bind.name == "sqlite":
return sqlalchemy.dialects.sqlite.insert(table)
else:
raise Exception("Unknown dialect")
def touch_schema_ver(self, ver: int, game: str = "CORE") -> Optional[int]:
result = self.upsert(schema_ver, dict(game=game, version=ver))
if result is None:
self.logger.error(
f"Failed to update schema version for game {game} (v{ver})"
@ -116,10 +166,7 @@ class BaseData:
return result.lastrowid
def set_schema_ver(self, ver: int, game: str = "CORE") -> Optional[int]:
sql = insert(schema_ver).values(game=game, version=ver)
conflict = sql.on_duplicate_key_update(version=ver)
result = self.execute(conflict)
result = self.upsert(schema_ver, dict(game=game, version=ver))
if result is None:
self.logger.error(
f"Failed to update schema version for game {game} (v{ver})"
@ -165,3 +212,25 @@ class BaseData:
data[k] = False
return data
unique_columns_cache = {}
def get_unique_columns(table: Table):
global unique_columns_cache
# Check if the unique columns for this table are already cached
if table.name in unique_columns_cache:
return unique_columns_cache[table.name]
# Calculate the unique columns for this table
unique_columns = []
from sqlalchemy import UniqueConstraint, PrimaryKeyConstraint
has_unique_constraint = any(isinstance(constraint, UniqueConstraint) for constraint in table.constraints)
for constraint in table.constraints:
# if we do not have a unique constraint, use the primary key
if (not has_unique_constraint and isinstance(constraint, PrimaryKeyConstraint)) or isinstance(constraint, UniqueConstraint):
unique_columns.extend([column.name for column in constraint.columns])
unique_columns_cache[table.name] = unique_columns
print(unique_columns)
return unique_columns

View File

@ -2,9 +2,7 @@ from enum import Enum
from typing import Optional, List
from sqlalchemy import Table, Column
from sqlalchemy.types import Integer, String, TIMESTAMP
from sqlalchemy.sql import func
from sqlalchemy.dialects.mysql import insert
from sqlalchemy.sql import func, select
from sqlalchemy.sql import func, select, insert
from sqlalchemy.engine import Row
import bcrypt
@ -41,26 +39,21 @@ class UserData(BaseData):
permission: int = 1,
) -> Optional[int]:
if id is None:
sql = insert(aime_user).values(
result = self.upsert(aime_user, dict(
username=username,
email=email,
password=password,
permissions=permission,
)
))
else:
sql = insert(aime_user).values(
result = self.upsert(aime_user, dict(
id=id,
username=username,
email=email,
password=password,
permissions=permission,
)
))
conflict = sql.on_duplicate_key_update(
username=username, email=email, password=password, permissions=permission
)
result = self.execute(conflict)
if result is None:
return None
return result.lastrowid

View File

@ -3,7 +3,7 @@ SET FOREIGN_KEY_CHECKS = 0;
ALTER TABLE chuni_score_playlog
CHANGE COLUMN isClear isClear TINYINT(1) NULL DEFAULT NULL;
ALTER TABLE aime.chuni_score_best
ALTER TABLE chuni_score_best
CHANGE COLUMN isSuccess isSuccess TINYINT(1) NULL DEFAULT NULL ;
ALTER TABLE chuni_score_playlog

View File

@ -3,7 +3,7 @@ SET FOREIGN_KEY_CHECKS = 0;
ALTER TABLE chuni_score_playlog
CHANGE COLUMN isClear isClear TINYINT(6) NULL DEFAULT NULL;
ALTER TABLE aime.chuni_score_best
ALTER TABLE chuni_score_best
CHANGE COLUMN isSuccess isSuccess INT(11) NULL DEFAULT NULL ;
ALTER TABLE chuni_score_playlog

View File

@ -1,3 +1,5 @@
import os
import yaml
import argparse
import logging
@ -88,4 +90,7 @@ if __name__ == "__main__":
elif args.action == "version":
data.show_versions()
elif args.action == "alembic_upgrade":
data.alembic_upgrade(os.path.join(os.path.dirname(__file__), 'alembic.ini'))
data.logger.info("Done")

View File

@ -54,6 +54,7 @@ Games listed below have been tested and confirmed working.
| 11 | CHUNITHM NEW!! |
| 12 | CHUNITHM NEW PLUS!! |
| 13 | CHUNITHM SUN |
| 14 | CHUNITHM SUN PLUS |
### Importer
@ -74,7 +75,6 @@ Config file is located in `config/chuni.yaml`.
|------------------|----------------------------------------------------------------------------------------------------------------|
| `news_msg` | If this is set, the news at the top of the main screen will be displayed (up to Chunithm Paradise Lost) |
| `name` | If this is set, all players that are not on a team will use this one by default. |
| `rank_scale` | Scales the in-game ranking based on the number of teams within the database |
| `use_login_bonus`| This is used to enable the login bonuses |
| `crypto` | This option is used to enable the TLS Encryption |
@ -132,12 +132,6 @@ INSERT INTO aime.chuni_profile_team (teamName) VALUES (<teamName>);
```
Team names can be regular ASCII, and they will be displayed ingame.
On smaller installations, you may also wish to enable scaled team rankings. By default, Chunithm determines team ranking within the first 100 teams. This can be configured in the YAML:
```yaml
team:
rank_scale: True # Scales the in-game ranking based on the number of teams within the database, rather than the default scale of ~100 that the game normally uses.
```
### Favorite songs
You can set the songs that will be in a user's Favorite Songs category using the following SQL entries:
```sql

View File

@ -19,6 +19,9 @@ version:
13:
rom: 2.10.00
data: 2.10.00
14:
rom: 2.15.00
data: 2.15.00
crypto:
encrypted_only: False

View File

@ -5,7 +5,7 @@ A network service emulator for games running SEGA'S ALL.NET service, and similar
Games listed below have been tested and confirmed working. Only game versions older then the version currently active in arcades, or games versions that have not recieved a major update in over one year, are supported.
+ CHUNITHM
+ All versions up to SUN
+ All versions up to SUN PLUS
+ crossbeats REV.
+ All versions + omnimix

View File

@ -19,3 +19,4 @@ protobuf
autobahn
pillow
pyjwt
alembic==1.13.0

View File

@ -419,13 +419,13 @@ class ChuniBase:
all_entries = self.data.score.get_rival_music(rival_id)
# Process the entries based on max_count and nextIndex
for music in all_entries[next_index:]:
for music in all_entries:
music_id = music["musicId"]
level = music["level"]
score = music["scoreMax"]
rank = music["scoreRank"]
# Create a music entry for the current music_id
# Create a music entry for the current music_id if it's unique
music_entry = next((entry for entry in user_rival_music_list if entry["musicId"] == music_id), None)
if music_entry is None:
music_entry = {
@ -435,15 +435,20 @@ class ChuniBase:
}
user_rival_music_list.append(music_entry)
# Create a level entry for the current level
level_entry = {
"level": level,
"scoreMax": score,
"scoreRank": rank
}
music_entry["userRivalMusicDetailList"].append(level_entry)
# Create a level entry for the current level if it's unique or has a higher score
level_entry = next((entry for entry in music_entry["userRivalMusicDetailList"] if entry["level"] == level), None)
if level_entry is None:
level_entry = {
"level": level,
"scoreMax": score,
"scoreRank": rank
}
music_entry["userRivalMusicDetailList"].append(level_entry)
elif score > level_entry["scoreMax"]:
level_entry["scoreMax"] = score
level_entry["scoreRank"] = rank
# Calculate the length for each "musicId" by counting the levels
# Calculate the length for each "musicId" by counting the unique levels
for music_entry in user_rival_music_list:
music_entry["length"] = len(music_entry["userRivalMusicDetailList"])
@ -451,11 +456,11 @@ class ChuniBase:
result = {
"userId": data["userId"],
"rivalId": data["rivalId"],
"nextIndex": str(next_index + len(all_entries) if len(all_entries) <= len(user_rival_music_list) else -1),
"userRivalMusicList": user_rival_music_list[:max_count]
"nextIndex": str(next_index + len(user_rival_music_list[next_index: next_index + max_count]) if max_count <= len(user_rival_music_list[next_index: next_index + max_count]) else -1),
"userRivalMusicList": user_rival_music_list[next_index: next_index + max_count]
}
return result
def handle_get_user_favorite_item_api_request(self, data: Dict) -> Dict:
user_fav_item_list = []

View File

@ -101,8 +101,14 @@ class ChuniServlet(BaseServlet):
]
for method in method_list:
method_fixed = inflection.camelize(method)[6:-7]
# number of iterations was changed to 70 in SUN
iter_count = 70 if version >= ChuniConstants.VER_CHUNITHM_SUN else 44
# number of iterations was changed to 70 in SUN and then to 36
if version == ChuniConstants.VER_CHUNITHM_SUN_PLUS:
iter_count = 36
elif version == ChuniConstants.VER_CHUNITHM_SUN:
iter_count = 70
else:
iter_count = 44
hash = PBKDF2(
method_fixed,
bytes.fromhex(keys[2]),

View File

@ -10,8 +10,7 @@ from sqlalchemy import (
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON
from sqlalchemy.engine.base import Connection
from sqlalchemy.schema import ForeignKey
from sqlalchemy.sql import func, select
from sqlalchemy.dialects.mysql import insert
from sqlalchemy.sql import func, select, insert
from sqlalchemy.engine import Row
from core.data.schema import BaseData, metadata
@ -301,20 +300,14 @@ class ChuniItemData(BaseData):
rest_sec: int = 60,
is_full: bool = False
) -> Optional[int]:
sql = insert(matching).values(
result = self.upsert(matching, dict(
roomId=room_id,
version=version,
restMSec=rest_sec,
user=user_id,
isFull=is_full,
matchingMemberInfoList=matching_member_info_list,
)
conflict = sql.on_duplicate_key_update(
restMSec=rest_sec, matchingMemberInfoList=matching_member_info_list
)
result = self.execute(conflict)
))
if result is None:
return None
return result.lastrowid
@ -348,13 +341,12 @@ class ChuniItemData(BaseData):
def put_login_bonus(
self, user_id: int, version: int, preset_id: int, **login_bonus_data
) -> Optional[int]:
sql = insert(login_bonus).values(
version=version, user=user_id, presetId=preset_id, **login_bonus_data
)
conflict = sql.on_duplicate_key_update(presetId=preset_id, **login_bonus_data)
result = self.execute(conflict)
result = self.upsert(login_bonus, dict(
version=version,
user=user_id,
presetId=preset_id,
**login_bonus_data
))
if result is None:
return None
return result.lastrowid
@ -396,10 +388,7 @@ class ChuniItemData(BaseData):
character_data = self.fix_bools(character_data)
sql = insert(character).values(**character_data)
conflict = sql.on_duplicate_key_update(**character_data)
result = self.execute(conflict)
result = self.upsert(character, character_data)
if result is None:
return None
return result.lastrowid
@ -427,10 +416,7 @@ class ChuniItemData(BaseData):
item_data = self.fix_bools(item_data)
sql = insert(item).values(**item_data)
conflict = sql.on_duplicate_key_update(**item_data)
result = self.execute(conflict)
result = self.upsert(item, item_data)
if result is None:
return None
return result.lastrowid
@ -453,10 +439,8 @@ class ChuniItemData(BaseData):
duel_data = self.fix_bools(duel_data)
sql = insert(duel).values(**duel_data)
conflict = sql.on_duplicate_key_update(**duel_data)
result = self.upsert(duel, duel_data)
result = self.execute(conflict)
if result is None:
return None
return result.lastrowid
@ -474,10 +458,7 @@ class ChuniItemData(BaseData):
map_data = self.fix_bools(map_data)
sql = insert(map).values(**map_data)
conflict = sql.on_duplicate_key_update(**map_data)
result = self.execute(conflict)
result = self.upsert(map, map_data)
if result is None:
return None
return result.lastrowid
@ -495,10 +476,7 @@ class ChuniItemData(BaseData):
map_area_data = self.fix_bools(map_area_data)
sql = insert(map_area).values(**map_area_data)
conflict = sql.on_duplicate_key_update(**map_area_data)
result = self.execute(conflict)
result = self.upsert(map_area, map_area_data)
if result is None:
return None
return result.lastrowid
@ -522,13 +500,7 @@ class ChuniItemData(BaseData):
def put_user_gacha(
self, aime_id: int, gacha_id: int, gacha_data: Dict
) -> Optional[int]:
sql = insert(gacha).values(user=aime_id, gachaId=gacha_id, **gacha_data)
conflict = sql.on_duplicate_key_update(
user=aime_id, gachaId=gacha_id, **gacha_data
)
result = self.execute(conflict)
result = self.upsert(gacha, dict(user=aime_id, gachaId=gacha_id, **gacha_data))
if result is None:
self.logger.warning(f"put_user_gacha: Failed to insert! aime_id: {aime_id}")
return None
@ -566,11 +538,7 @@ class ChuniItemData(BaseData):
return result.fetchall()
def put_user_print_state(self, aime_id: int, **print_data) -> Optional[int]:
sql = insert(print_state).values(user=aime_id, **print_data)
conflict = sql.on_duplicate_key_update(user=aime_id, **print_data)
result = self.execute(conflict)
result = self.upsert(print_state, dict(user=aime_id, **print_data))
if result is None:
self.logger.warning(
f"put_user_print_state: Failed to insert! aime_id: {aime_id}"
@ -581,13 +549,7 @@ class ChuniItemData(BaseData):
def put_user_print_detail(
self, aime_id: int, serial_id: str, user_print_data: Dict
) -> Optional[int]:
sql = insert(print_detail).values(
user=aime_id, serialId=serial_id, **user_print_data
)
conflict = sql.on_duplicate_key_update(user=aime_id, **user_print_data)
result = self.execute(conflict)
result = self.upsert(print_detail, dict(user=aime_id, serialId=serial_id, **user_print_data))
if result is None:
self.logger.warning(
f"put_user_print_detail: Failed to insert! aime_id: {aime_id}"

View File

@ -4,8 +4,7 @@ from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON, BigInteg
from sqlalchemy.engine.base import Connection
from sqlalchemy.schema import ForeignKey
from sqlalchemy.engine import Row
from sqlalchemy.sql import func, select
from sqlalchemy.dialects.mysql import insert
from sqlalchemy.sql import func, select, insert
from core.data.schema import BaseData, metadata
@ -405,10 +404,7 @@ class ChuniProfileData(BaseData):
profile_data = self.fix_bools(profile_data)
sql = insert(profile).values(**profile_data)
conflict = sql.on_duplicate_key_update(**profile_data)
result = self.execute(conflict)
result = self.upsert(profile, profile_data)
if result is None:
self.logger.warning(f"put_profile_data: Failed to update! aime_id: {aime_id}")
return None
@ -447,9 +443,7 @@ class ChuniProfileData(BaseData):
if "accessCode" in profile_ex_data:
profile_ex_data.pop("accessCode")
sql = insert(profile_ex).values(**profile_ex_data)
conflict = sql.on_duplicate_key_update(**profile_ex_data)
result = self.execute(conflict)
result = self.upsert(profile_ex, profile_ex_data)
if result is None:
self.logger.warning(
@ -474,9 +468,7 @@ class ChuniProfileData(BaseData):
def put_profile_option(self, aime_id: int, option_data: Dict) -> Optional[int]:
option_data["user"] = aime_id
sql = insert(option).values(**option_data)
conflict = sql.on_duplicate_key_update(**option_data)
result = self.execute(conflict)
result = self.upsert(option, option_data)
if result is None:
self.logger.warning(
@ -498,9 +490,7 @@ class ChuniProfileData(BaseData):
) -> Optional[int]:
option_ex_data["user"] = aime_id
sql = insert(option_ex).values(**option_ex_data)
conflict = sql.on_duplicate_key_update(**option_ex_data)
result = self.execute(conflict)
result = self.upsert(option_ex, option_ex_data)
if result is None:
self.logger.warning(
@ -520,12 +510,8 @@ class ChuniProfileData(BaseData):
def put_profile_recent_rating(
self, aime_id: int, recent_rating_data: List[Dict]
) -> Optional[int]:
sql = insert(recent_rating).values(
user=aime_id, recentRating=recent_rating_data
)
conflict = sql.on_duplicate_key_update(recentRating=recent_rating_data)
result = self.upsert(recent_rating, dict(user=aime_id, recentRating=recent_rating_data))
result = self.execute(conflict)
if result is None:
self.logger.warning(
f"put_profile_recent_rating: Failed to update! aime_id: {aime_id}"
@ -547,9 +533,7 @@ class ChuniProfileData(BaseData):
activity_data["activityId"] = activity_data["id"]
activity_data.pop("id")
sql = insert(activity).values(**activity_data)
conflict = sql.on_duplicate_key_update(**activity_data)
result = self.execute(conflict)
result = self.upsert(activity, activity_data)
if result is None:
self.logger.warning(
@ -573,9 +557,7 @@ class ChuniProfileData(BaseData):
def put_profile_charge(self, aime_id: int, charge_data: Dict) -> Optional[int]:
charge_data["user"] = aime_id
sql = insert(charge).values(**charge_data)
conflict = sql.on_duplicate_key_update(**charge_data)
result = self.execute(conflict)
result = self.upsert(charge, charge_data)
if result is None:
self.logger.warning(
@ -601,10 +583,7 @@ class ChuniProfileData(BaseData):
def put_profile_emoney(self, aime_id: int, emoney_data: Dict) -> Optional[int]:
emoney_data["user"] = aime_id
sql = insert(emoney).values(**emoney_data)
conflict = sql.on_duplicate_key_update(**emoney_data)
result = self.execute(conflict)
result = self.upsert(emoney, emoney_data)
if result is None:
return None
return result.lastrowid
@ -622,10 +601,7 @@ class ChuniProfileData(BaseData):
) -> Optional[int]:
overpower_data["user"] = aime_id
sql = insert(overpower).values(**overpower_data)
conflict = sql.on_duplicate_key_update(**overpower_data)
result = self.execute(conflict)
result = self.upsert(overpower, overpower_data)
if result is None:
return None
return result.lastrowid
@ -669,10 +645,7 @@ class ChuniProfileData(BaseData):
def update_team(self, team_id: int, team_data: Dict) -> bool:
team_data["id"] = team_id
sql = insert(team).values(**team_data)
conflict = sql.on_duplicate_key_update(**team_data)
result = self.execute(conflict)
result = self.upsert(team, team_data)
if result is None:
self.logger.warn(

View File

@ -4,8 +4,7 @@ from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON, BigInteg
from sqlalchemy.engine.base import Connection
from sqlalchemy.schema import ForeignKey
from sqlalchemy.engine import Row
from sqlalchemy.sql import func, select
from sqlalchemy.dialects.mysql import insert
from sqlalchemy.sql import func, select, insert
from sqlalchemy.sql.expression import exists
from core.data.schema import BaseData, metadata
@ -154,10 +153,7 @@ class ChuniScoreData(BaseData):
course_data["user"] = aime_id
course_data = self.fix_bools(course_data)
sql = insert(course).values(**course_data)
conflict = sql.on_duplicate_key_update(**course_data)
result = self.execute(conflict)
result = self.upsert(course, course_data)
if result is None:
return None
return result.lastrowid
@ -174,10 +170,8 @@ class ChuniScoreData(BaseData):
score_data["user"] = aime_id
score_data = self.fix_bools(score_data)
sql = insert(best_score).values(**score_data)
conflict = sql.on_duplicate_key_update(**score_data)
result = self.upsert(best_score, score_data)
result = self.execute(conflict)
if result is None:
return None
return result.lastrowid
@ -213,10 +207,7 @@ class ChuniScoreData(BaseData):
if "romVersion" not in playlog_data:
playlog_data["romVersion"] = romVer.get(version, "1.00.0")
sql = insert(playlog).values(**playlog_data)
conflict = sql.on_duplicate_key_update(**playlog_data)
result = self.execute(conflict)
result = self.upsert(playlog, playlog_data)
if result is None:
return None
return result.lastrowid

View File

@ -11,8 +11,7 @@ from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON, Float
from sqlalchemy.engine.base import Connection
from sqlalchemy.engine import Row
from sqlalchemy.schema import ForeignKey
from sqlalchemy.sql import func, select
from sqlalchemy.dialects.mysql import insert
from sqlalchemy.sql import func, select, insert
from datetime import datetime
from core.data.schema import BaseData, metadata
@ -187,7 +186,7 @@ class ChuniStaticData(BaseData):
need_login_day_count: int,
login_bonus_category_type: int,
) -> Optional[int]:
sql = insert(login_bonus).values(
result = self.upsert(login_bonus, dict(
version=version,
presetId=preset_id,
loginBonusId=login_bonus_id,
@ -197,17 +196,7 @@ class ChuniStaticData(BaseData):
itemNum=item_num,
needLoginDayCount=need_login_day_count,
loginBonusCategoryType=login_bonus_category_type,
)
conflict = sql.on_duplicate_key_update(
loginBonusName=login_bonus_name,
presentName=present_ame,
itemNum=item_num,
needLoginDayCount=need_login_day_count,
loginBonusCategoryType=login_bonus_category_type,
)
result = self.execute(conflict)
))
if result is None:
return None
return result.lastrowid
@ -248,18 +237,12 @@ class ChuniStaticData(BaseData):
def put_login_bonus_preset(
self, version: int, preset_id: int, preset_name: str, is_enabled: bool
) -> Optional[int]:
sql = insert(login_bonus_preset).values(
result = self.upsert(login_bonus_preset, dict(
presetId=preset_id,
version=version,
presetName=preset_name,
isEnabled=is_enabled,
)
conflict = sql.on_duplicate_key_update(
presetName=preset_name, isEnabled=is_enabled
)
result = self.execute(conflict)
))
if result is None:
return None
return result.lastrowid
@ -282,13 +265,7 @@ class ChuniStaticData(BaseData):
def put_event(
self, version: int, event_id: int, type: int, name: str
) -> Optional[int]:
sql = insert(events).values(
version=version, eventId=event_id, type=type, name=name
)
conflict = sql.on_duplicate_key_update(name=name)
result = self.execute(conflict)
result = self.upsert(events, dict(version=version, eventId=event_id, type=type, name=name))
if result is None:
return None
return result.lastrowid
@ -355,7 +332,7 @@ class ChuniStaticData(BaseData):
jacketPath: str,
we_tag: str,
) -> Optional[int]:
sql = insert(music).values(
result = self.upsert(music, dict(
version=version,
songId=song_id,
chartId=chart_id,
@ -365,18 +342,7 @@ class ChuniStaticData(BaseData):
genre=genre,
jacketPath=jacketPath,
worldsEndTag=we_tag,
)
conflict = sql.on_duplicate_key_update(
title=title,
artist=artist,
level=level,
genre=genre,
jacketPath=jacketPath,
worldsEndTag=we_tag,
)
result = self.execute(conflict)
))
if result is None:
return None
return result.lastrowid
@ -390,23 +356,14 @@ class ChuniStaticData(BaseData):
consume_type: int,
selling_appeal: bool,
) -> Optional[int]:
sql = insert(charge).values(
result = self.upsert(charge, dict(
version=version,
chargeId=charge_id,
name=name,
expirationDays=expiration_days,
consumeType=consume_type,
sellingAppeal=selling_appeal,
)
conflict = sql.on_duplicate_key_update(
name=name,
expirationDays=expiration_days,
consumeType=consume_type,
sellingAppeal=selling_appeal,
)
result = self.execute(conflict)
))
if result is None:
return None
return result.lastrowid
@ -471,23 +428,14 @@ class ChuniStaticData(BaseData):
iconPath: str,
texturePath: str,
) -> Optional[int]:
sql = insert(avatar).values(
result = self.upsert(avatar, dict(
version=version,
avatarAccessoryId=avatarAccessoryId,
name=name,
category=category,
iconPath=iconPath,
texturePath=texturePath,
)
conflict = sql.on_duplicate_key_update(
name=name,
category=category,
iconPath=iconPath,
texturePath=texturePath,
)
result = self.execute(conflict)
))
if result is None:
return None
return result.lastrowid
@ -499,21 +447,12 @@ class ChuniStaticData(BaseData):
gacha_name: int,
**gacha_data,
) -> Optional[int]:
sql = insert(gachas).values(
result = self.upsert(gachas, dict(
version=version,
gachaId=gacha_id,
gachaName=gacha_name,
**gacha_data,
)
conflict = sql.on_duplicate_key_update(
version=version,
gachaId=gacha_id,
gachaName=gacha_name,
**gacha_data,
)
result = self.execute(conflict)
))
if result is None:
self.logger.warning(f"Failed to insert gacha! gacha_id {gacha_id}")
return None
@ -542,13 +481,7 @@ class ChuniStaticData(BaseData):
def put_gacha_card(
self, gacha_id: int, card_id: int, **gacha_card
) -> Optional[int]:
sql = insert(gacha_cards).values(gachaId=gacha_id, cardId=card_id, **gacha_card)
conflict = sql.on_duplicate_key_update(
gachaId=gacha_id, cardId=card_id, **gacha_card
)
result = self.execute(conflict)
sql = self.upsert(gacha_cards, dict(gachaId=gacha_id, cardId=card_id, **gacha_card))
if result is None:
self.logger.warning(f"Failed to insert gacha card! gacha_id {gacha_id}")
return None
@ -580,11 +513,11 @@ class ChuniStaticData(BaseData):
return result.fetchone()
def put_card(self, version: int, card_id: int, **card_data) -> Optional[int]:
sql = insert(cards).values(version=version, cardId=card_id, **card_data)
conflict = sql.on_duplicate_key_update(**card_data)
result = self.execute(conflict)
result = self.upsert(cards, dict(
version=version,
cardId=card_id,
**card_data,
))
if result is None:
self.logger.warning(f"Failed to insert card! card_id {card_id}")
return None

View File

@ -45,7 +45,7 @@ class CardMakerBase:
{
"modelKind": 0,
"type": 1,
"titleUri": f"{uri}/{self._parse_int_ver(games_ver['chuni'])}/ChuniServlet/",
"titleUri": f"{uri}/SDHD/{self._parse_int_ver(games_ver['chuni'])}/ChuniServlet/",
},
# maimai DX
{