forked from Hay1tsme/artemis
Merge branch 'develop' into fork_develop
This commit is contained in:
commit
b21ddb92ce
@ -12,8 +12,8 @@ from Crypto.Signature import PKCS1_v1_5
|
|||||||
from time import strptime
|
from time import strptime
|
||||||
|
|
||||||
from core.config import CoreConfig
|
from core.config import CoreConfig
|
||||||
from core.data import Data
|
|
||||||
from core.utils import Utils
|
from core.utils import Utils
|
||||||
|
from core.data import Data
|
||||||
from core.const import *
|
from core.const import *
|
||||||
|
|
||||||
|
|
||||||
@ -65,11 +65,11 @@ class AllnetServlet:
|
|||||||
self.uri_registry[code] = (uri, host)
|
self.uri_registry[code] = (uri, host)
|
||||||
|
|
||||||
self.logger.info(
|
self.logger.info(
|
||||||
f"Allnet serving {len(self.uri_registry)} games on port {core_cfg.allnet.port}"
|
f"Serving {len(self.uri_registry)} game codes port {core_cfg.allnet.port}"
|
||||||
)
|
)
|
||||||
|
|
||||||
def handle_poweron(self, request: Request, _: Dict):
|
def handle_poweron(self, request: Request, _: Dict):
|
||||||
request_ip = request.getClientAddress().host
|
request_ip = Utils.get_ip_addr(request)
|
||||||
try:
|
try:
|
||||||
req_dict = self.allnet_req_to_dict(request.content.getvalue())
|
req_dict = self.allnet_req_to_dict(request.content.getvalue())
|
||||||
if req_dict is None:
|
if req_dict is None:
|
||||||
@ -95,14 +95,21 @@ class AllnetServlet:
|
|||||||
|
|
||||||
self.logger.debug(f"Allnet request: {vars(req)}")
|
self.logger.debug(f"Allnet request: {vars(req)}")
|
||||||
if req.game_id not in self.uri_registry:
|
if req.game_id not in self.uri_registry:
|
||||||
msg = f"Unrecognised game {req.game_id} attempted allnet auth from {request_ip}."
|
if not self.config.server.is_develop:
|
||||||
self.data.base.log_event(
|
msg = f"Unrecognised game {req.game_id} attempted allnet auth from {request_ip}."
|
||||||
"allnet", "ALLNET_AUTH_UNKNOWN_GAME", logging.WARN, msg
|
self.data.base.log_event(
|
||||||
)
|
"allnet", "ALLNET_AUTH_UNKNOWN_GAME", logging.WARN, msg
|
||||||
self.logger.warn(msg)
|
)
|
||||||
|
self.logger.warn(msg)
|
||||||
|
|
||||||
resp.stat = 0
|
resp.stat = 0
|
||||||
return self.dict_to_http_form_string([vars(resp)])
|
return self.dict_to_http_form_string([vars(resp)])
|
||||||
|
|
||||||
|
else:
|
||||||
|
self.logger.info(f"Allowed unknown game {req.game_id} v{req.ver} to authenticate from {request_ip} due to 'is_develop' being enabled. S/N: {req.serial}")
|
||||||
|
resp.uri = f"http://{self.config.title.hostname}:{self.config.title.port}/{req.game_id}/{req.ver.replace('.', '')}/"
|
||||||
|
resp.host = f"{self.config.title.hostname}:{self.config.title.port}"
|
||||||
|
return self.dict_to_http_form_string([vars(resp)])
|
||||||
|
|
||||||
resp.uri, resp.host = self.uri_registry[req.game_id]
|
resp.uri, resp.host = self.uri_registry[req.game_id]
|
||||||
|
|
||||||
@ -162,7 +169,7 @@ class AllnetServlet:
|
|||||||
return self.dict_to_http_form_string([vars(resp)]).encode("utf-8")
|
return self.dict_to_http_form_string([vars(resp)]).encode("utf-8")
|
||||||
|
|
||||||
def handle_dlorder(self, request: Request, _: Dict):
|
def handle_dlorder(self, request: Request, _: Dict):
|
||||||
request_ip = request.getClientAddress().host
|
request_ip = Utils.get_ip_addr(request)
|
||||||
try:
|
try:
|
||||||
req_dict = self.allnet_req_to_dict(request.content.getvalue())
|
req_dict = self.allnet_req_to_dict(request.content.getvalue())
|
||||||
if req_dict is None:
|
if req_dict is None:
|
||||||
@ -181,7 +188,9 @@ class AllnetServlet:
|
|||||||
self.logger.error(e)
|
self.logger.error(e)
|
||||||
return b""
|
return b""
|
||||||
|
|
||||||
|
self.logger.info(f"DownloadOrder from {request_ip} -> {req.game_id} v{req.ver} serial {req.serial}")
|
||||||
resp = AllnetDownloadOrderResponse()
|
resp = AllnetDownloadOrderResponse()
|
||||||
|
|
||||||
if not self.config.allnet.allow_online_updates:
|
if not self.config.allnet.allow_online_updates:
|
||||||
return self.dict_to_http_form_string([vars(resp)])
|
return self.dict_to_http_form_string([vars(resp)])
|
||||||
|
|
||||||
@ -190,7 +199,7 @@ class AllnetServlet:
|
|||||||
|
|
||||||
def handle_billing_request(self, request: Request, _: Dict):
|
def handle_billing_request(self, request: Request, _: Dict):
|
||||||
req_dict = self.billing_req_to_dict(request.content.getvalue())
|
req_dict = self.billing_req_to_dict(request.content.getvalue())
|
||||||
request_ip = request.getClientAddress()
|
request_ip = Utils.get_ip_addr(request)
|
||||||
if req_dict is None:
|
if req_dict is None:
|
||||||
self.logger.error(f"Failed to parse request {request.content.getvalue()}")
|
self.logger.error(f"Failed to parse request {request.content.getvalue()}")
|
||||||
return b""
|
return b""
|
||||||
@ -223,7 +232,7 @@ class AllnetServlet:
|
|||||||
return self.dict_to_http_form_string([vars(resp)])
|
return self.dict_to_http_form_string([vars(resp)])
|
||||||
|
|
||||||
msg = (
|
msg = (
|
||||||
f"Billing checkin from {request.getClientIP()}: game {kc_game} keychip {kc_serial} playcount "
|
f"Billing checkin from {request_ip}: game {kc_game} keychip {kc_serial} playcount "
|
||||||
f"{kc_playcount} billing_type {kc_billigtype} nearfull {kc_nearfull} playlimit {kc_playlimit}"
|
f"{kc_playcount} billing_type {kc_billigtype} nearfull {kc_nearfull} playlimit {kc_playlimit}"
|
||||||
)
|
)
|
||||||
self.logger.info(msg)
|
self.logger.info(msg)
|
||||||
@ -255,7 +264,7 @@ class AllnetServlet:
|
|||||||
return resp_str.encode("utf-8")
|
return resp_str.encode("utf-8")
|
||||||
|
|
||||||
def handle_naomitest(self, request: Request, _: Dict) -> bytes:
|
def handle_naomitest(self, request: Request, _: Dict) -> bytes:
|
||||||
self.logger.info(f"Ping from {request.getClientAddress().host}")
|
self.logger.info(f"Ping from {Utils.get_ip_addr(request)}")
|
||||||
return b"naomi ok"
|
return b"naomi ok"
|
||||||
|
|
||||||
def kvp_to_dict(self, kvp: List[str]) -> List[Dict[str, Any]]:
|
def kvp_to_dict(self, kvp: List[str]) -> List[Dict[str, Any]]:
|
||||||
@ -371,7 +380,7 @@ class AllnetPowerOnResponse3:
|
|||||||
self.utc_time = datetime.now(tz=pytz.timezone("UTC")).strftime(
|
self.utc_time = datetime.now(tz=pytz.timezone("UTC")).strftime(
|
||||||
"%Y-%m-%dT%H:%M:%SZ"
|
"%Y-%m-%dT%H:%M:%SZ"
|
||||||
)
|
)
|
||||||
self.setting = ""
|
self.setting = "1"
|
||||||
self.res_ver = "3"
|
self.res_ver = "3"
|
||||||
self.token = str(token)
|
self.token = str(token)
|
||||||
|
|
||||||
|
@ -267,24 +267,6 @@ class MuchaConfig:
|
|||||||
self.__config, "core", "mucha", "hostname", default="localhost"
|
self.__config, "core", "mucha", "hostname", default="localhost"
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
|
||||||
def port(self) -> int:
|
|
||||||
return CoreConfig.get_config_field(
|
|
||||||
self.__config, "core", "mucha", "port", default=8444
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def ssl_cert(self) -> str:
|
|
||||||
return CoreConfig.get_config_field(
|
|
||||||
self.__config, "core", "mucha", "ssl_cert", default="cert/server.pem"
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def signing_key(self) -> str:
|
|
||||||
return CoreConfig.get_config_field(
|
|
||||||
self.__config, "core", "mucha", "signing_key", default="cert/billing.key"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class CoreConfig(dict):
|
class CoreConfig(dict):
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
|
@ -71,7 +71,8 @@ class Data:
|
|||||||
games = Utils.get_all_titles()
|
games = Utils.get_all_titles()
|
||||||
for game_dir, game_mod in games.items():
|
for game_dir, game_mod in games.items():
|
||||||
try:
|
try:
|
||||||
title_db = game_mod.database(self.config)
|
if hasattr(game_mod, "database") and hasattr(game_mod, "current_schema_version"):
|
||||||
|
game_mod.database(self.config)
|
||||||
metadata.create_all(self.__engine.connect())
|
metadata.create_all(self.__engine.connect())
|
||||||
|
|
||||||
self.base.set_schema_ver(
|
self.base.set_schema_ver(
|
||||||
@ -109,7 +110,8 @@ class Data:
|
|||||||
mod = importlib.import_module(f"titles.{dir}")
|
mod = importlib.import_module(f"titles.{dir}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
title_db = mod.database(self.config)
|
if hasattr(mod, "database"):
|
||||||
|
mod.database(self.config)
|
||||||
metadata.drop_all(self.__engine.connect())
|
metadata.drop_all(self.__engine.connect())
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -143,25 +145,49 @@ class Data:
|
|||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
if not os.path.exists(
|
if action == "upgrade":
|
||||||
f"core/data/schema/versions/{game.upper()}_{version}_{action}.sql"
|
for x in range(old_ver, version):
|
||||||
):
|
if not os.path.exists(
|
||||||
self.logger.error(
|
f"core/data/schema/versions/{game.upper()}_{x + 1}_{action}.sql"
|
||||||
f"Could not find {action} script {game.upper()}_{version}_{action}.sql in core/data/schema/versions folder"
|
):
|
||||||
)
|
self.logger.error(
|
||||||
return
|
f"Could not find {action} script {game.upper()}_{x + 1}_{action}.sql in core/data/schema/versions folder"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
with open(
|
with open(
|
||||||
f"core/data/schema/versions/{game.upper()}_{version}_{action}.sql",
|
f"core/data/schema/versions/{game.upper()}_{x + 1}_{action}.sql",
|
||||||
"r",
|
"r",
|
||||||
encoding="utf-8",
|
encoding="utf-8",
|
||||||
) as f:
|
) as f:
|
||||||
sql = f.read()
|
sql = f.read()
|
||||||
|
|
||||||
result = self.base.execute(sql)
|
result = self.base.execute(sql)
|
||||||
if result is None:
|
if result is None:
|
||||||
self.logger.error("Error execuing sql script!")
|
self.logger.error("Error execuing sql script!")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
else:
|
||||||
|
for x in range(old_ver, version, -1):
|
||||||
|
if not os.path.exists(
|
||||||
|
f"core/data/schema/versions/{game.upper()}_{x - 1}_{action}.sql"
|
||||||
|
):
|
||||||
|
self.logger.error(
|
||||||
|
f"Could not find {action} script {game.upper()}_{x - 1}_{action}.sql in core/data/schema/versions folder"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
with open(
|
||||||
|
f"core/data/schema/versions/{game.upper()}_{x - 1}_{action}.sql",
|
||||||
|
"r",
|
||||||
|
encoding="utf-8",
|
||||||
|
) as f:
|
||||||
|
sql = f.read()
|
||||||
|
|
||||||
|
result = self.base.execute(sql)
|
||||||
|
if result is None:
|
||||||
|
self.logger.error("Error execuing sql script!")
|
||||||
|
return None
|
||||||
|
|
||||||
result = self.base.set_schema_ver(version, game)
|
result = self.base.set_schema_ver(version, game)
|
||||||
if result is None:
|
if result is None:
|
||||||
@ -235,3 +261,19 @@ class Data:
|
|||||||
if not cards:
|
if not cards:
|
||||||
self.logger.info(f"Delete hanging user {user['id']}")
|
self.logger.info(f"Delete hanging user {user['id']}")
|
||||||
self.user.delete_user(user["id"])
|
self.user.delete_user(user["id"])
|
||||||
|
|
||||||
|
def autoupgrade(self) -> None:
|
||||||
|
all_games = self.base.get_all_schema_vers()
|
||||||
|
if all_games is None:
|
||||||
|
self.logger.warn("Failed to get schema versions")
|
||||||
|
|
||||||
|
for x in all_games:
|
||||||
|
game = x["game"].upper()
|
||||||
|
update_ver = 1
|
||||||
|
for y in range(2, 100):
|
||||||
|
if os.path.exists(f"core/data/schema/versions/{game}_{y}_upgrade.sql"):
|
||||||
|
update_ver = y
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
|
self.migrate_database(game, update_ver, "upgrade")
|
@ -2,6 +2,7 @@ import json
|
|||||||
import logging
|
import logging
|
||||||
from random import randrange
|
from random import randrange
|
||||||
from typing import Any, Optional, Dict, List
|
from typing import Any, Optional, Dict, List
|
||||||
|
from sqlalchemy.engine import Row
|
||||||
from sqlalchemy.engine.cursor import CursorResult
|
from sqlalchemy.engine.cursor import CursorResult
|
||||||
from sqlalchemy.engine.base import Connection
|
from sqlalchemy.engine.base import Connection
|
||||||
from sqlalchemy.sql import text, func, select
|
from sqlalchemy.sql import text, func, select
|
||||||
@ -81,6 +82,14 @@ class BaseData:
|
|||||||
"""
|
"""
|
||||||
return randrange(10000, 9999999)
|
return randrange(10000, 9999999)
|
||||||
|
|
||||||
|
def get_all_schema_vers(self) -> Optional[List[Row]]:
|
||||||
|
sql = select(schema_ver)
|
||||||
|
|
||||||
|
result = self.execute(sql)
|
||||||
|
if result is None:
|
||||||
|
return None
|
||||||
|
return result.fetchall()
|
||||||
|
|
||||||
def get_schema_ver(self, game: str) -> Optional[int]:
|
def get_schema_ver(self, game: str) -> Optional[int]:
|
||||||
sql = select(schema_ver).where(schema_ver.c.game == game)
|
sql = select(schema_ver).where(schema_ver.c.game == game)
|
||||||
|
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
SET FOREIGN_KEY_CHECKS=0;
|
SET FOREIGN_KEY_CHECKS=0;
|
||||||
ALTER TABLE diva_score DROP COLUMN edition;
|
|
||||||
ALTER TABLE diva_playlog DROP COLUMN edition;
|
|
||||||
|
|
||||||
ALTER TABLE diva_score DROP FOREIGN KEY diva_score_ibfk_1;
|
ALTER TABLE diva_score DROP FOREIGN KEY diva_score_ibfk_1;
|
||||||
ALTER TABLE diva_score DROP CONSTRAINT diva_score_uk;
|
ALTER TABLE diva_score DROP CONSTRAINT diva_score_uk;
|
||||||
ALTER TABLE diva_score ADD CONSTRAINT diva_score_uk UNIQUE (user, pv_id, difficulty);
|
ALTER TABLE diva_score ADD CONSTRAINT diva_score_uk UNIQUE (user, pv_id, difficulty);
|
||||||
ALTER TABLE diva_score ADD CONSTRAINT diva_score_ibfk_1 FOREIGN KEY (user) REFERENCES aime_user(id) ON DELETE CASCADE;
|
ALTER TABLE diva_score ADD CONSTRAINT diva_score_ibfk_1 FOREIGN KEY (user) REFERENCES aime_user(id) ON DELETE CASCADE;
|
||||||
|
|
||||||
|
ALTER TABLE diva_score DROP COLUMN edition;
|
||||||
|
ALTER TABLE diva_playlog DROP COLUMN edition;
|
||||||
SET FOREIGN_KEY_CHECKS=1;
|
SET FOREIGN_KEY_CHECKS=1;
|
@ -1,99 +0,0 @@
|
|||||||
CREATE TABLE ongeki_user_gacha (
|
|
||||||
id INT PRIMARY KEY NOT NULL AUTO_INCREMENT,
|
|
||||||
user INT NOT NULL,
|
|
||||||
gachaId INT NOT NULL,
|
|
||||||
totalGachaCnt INT DEFAULT 0,
|
|
||||||
ceilingGachaCnt INT DEFAULT 0,
|
|
||||||
selectPoint INT DEFAULT 0,
|
|
||||||
useSelectPoint INT DEFAULT 0,
|
|
||||||
dailyGachaCnt INT DEFAULT 0,
|
|
||||||
fiveGachaCnt INT DEFAULT 0,
|
|
||||||
elevenGachaCnt INT DEFAULT 0,
|
|
||||||
dailyGachaDate TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
CONSTRAINT ongeki_user_gacha_uk UNIQUE (user, gachaId),
|
|
||||||
CONSTRAINT ongeki_user_gacha_ibfk_1 FOREIGN KEY (user) REFERENCES aime_user (id) ON DELETE CASCADE ON UPDATE CASCADE
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE ongeki_user_gacha_supply (
|
|
||||||
id INT PRIMARY KEY NOT NULL AUTO_INCREMENT,
|
|
||||||
user INT NOT NULL,
|
|
||||||
cardId INT NOT NULL,
|
|
||||||
CONSTRAINT ongeki_user_gacha_supply_uk UNIQUE (user, cardId),
|
|
||||||
CONSTRAINT ongeki_user_gacha_supply_ibfk_1 FOREIGN KEY (user) REFERENCES aime_user (id) ON DELETE CASCADE ON UPDATE CASCADE
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE ongeki_static_gachas (
|
|
||||||
id INT PRIMARY KEY NOT NULL AUTO_INCREMENT,
|
|
||||||
version INT NOT NULL,
|
|
||||||
gachaId INT NOT NULL,
|
|
||||||
gachaName VARCHAR(255) NOT NULL,
|
|
||||||
kind INT NOT NULL,
|
|
||||||
type INT DEFAULT 0,
|
|
||||||
isCeiling BOOLEAN DEFAULT 0,
|
|
||||||
maxSelectPoint INT DEFAULT 0,
|
|
||||||
ceilingCnt INT DEFAULT 10,
|
|
||||||
changeRateCnt1 INT DEFAULT 0,
|
|
||||||
changeRateCnt2 INT DEFAULT 0,
|
|
||||||
startDate TIMESTAMP DEFAULT '2018-01-01 00:00:00.0',
|
|
||||||
endDate TIMESTAMP DEFAULT '2038-01-01 00:00:00.0',
|
|
||||||
noticeStartDate TIMESTAMP DEFAULT '2018-01-01 00:00:00.0',
|
|
||||||
noticeEndDate TIMESTAMP DEFAULT '2038-01-01 00:00:00.0',
|
|
||||||
convertEndDate TIMESTAMP DEFAULT '2038-01-01 00:00:00.0',
|
|
||||||
CONSTRAINT ongeki_static_gachas_uk UNIQUE (version, gachaId, gachaName)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE ongeki_static_gacha_cards (
|
|
||||||
id INT NOT NULL AUTO_INCREMENT PRIMARY KEY,
|
|
||||||
gachaId INT NOT NULL,
|
|
||||||
cardId INT NOT NULL,
|
|
||||||
rarity INT NOT NULL,
|
|
||||||
weight INT DEFAULT 1,
|
|
||||||
isPickup BOOLEAN DEFAULT 0,
|
|
||||||
isSelect BOOLEAN DEFAULT 1,
|
|
||||||
CONSTRAINT ongeki_static_gacha_cards_uk UNIQUE (gachaId, cardId)
|
|
||||||
);
|
|
||||||
|
|
||||||
|
|
||||||
CREATE TABLE ongeki_static_cards (
|
|
||||||
id INT NOT NULL AUTO_INCREMENT PRIMARY KEY,
|
|
||||||
version INT NOT NULL,
|
|
||||||
cardId INT NOT NULL,
|
|
||||||
name VARCHAR(255) NOT NULL,
|
|
||||||
charaId INT NOT NULL,
|
|
||||||
nickName VARCHAR(255),
|
|
||||||
school VARCHAR(255) NOT NULL,
|
|
||||||
attribute VARCHAR(5) NOT NULL,
|
|
||||||
gakunen VARCHAR(255) NOT NULL,
|
|
||||||
rarity INT NOT NULL,
|
|
||||||
levelParam VARCHAR(255) NOT NULL,
|
|
||||||
skillId INT NOT NULL,
|
|
||||||
choKaikaSkillId INT NOT NULL,
|
|
||||||
cardNumber VARCHAR(255),
|
|
||||||
CONSTRAINT ongeki_static_cards_uk UNIQUE (version, cardId)
|
|
||||||
) CHARACTER SET utf8mb4;
|
|
||||||
|
|
||||||
CREATE TABLE ongeki_user_print_detail (
|
|
||||||
id INT NOT NULL AUTO_INCREMENT PRIMARY KEY,
|
|
||||||
user INT NOT NULL,
|
|
||||||
cardId INT NOT NULL,
|
|
||||||
cardType INT DEFAULT 0,
|
|
||||||
printDate TIMESTAMP NOT NULL,
|
|
||||||
serialId VARCHAR(20) NOT NULL,
|
|
||||||
placeId INT NOT NULL,
|
|
||||||
clientId VARCHAR(11) NOT NULL,
|
|
||||||
printerSerialId VARCHAR(20) NOT NULL,
|
|
||||||
isHolograph BOOLEAN DEFAULT 0,
|
|
||||||
isAutographed BOOLEAN DEFAULT 0,
|
|
||||||
printOption1 BOOLEAN DEFAULT 1,
|
|
||||||
printOption2 BOOLEAN DEFAULT 1,
|
|
||||||
printOption3 BOOLEAN DEFAULT 1,
|
|
||||||
printOption4 BOOLEAN DEFAULT 1,
|
|
||||||
printOption5 BOOLEAN DEFAULT 1,
|
|
||||||
printOption6 BOOLEAN DEFAULT 1,
|
|
||||||
printOption7 BOOLEAN DEFAULT 1,
|
|
||||||
printOption8 BOOLEAN DEFAULT 1,
|
|
||||||
printOption9 BOOLEAN DEFAULT 1,
|
|
||||||
printOption10 BOOLEAN DEFAULT 0,
|
|
||||||
FOREIGN KEY (user) REFERENCES aime_user(id) ON DELETE CASCADE ON UPDATE CASCADE,
|
|
||||||
CONSTRAINT ongeki_user_print_detail_uk UNIQUE (serialId)
|
|
||||||
) CHARACTER SET utf8mb4;
|
|
21
core/data/schema/versions/SDEZ_2_rollback.sql
Normal file
21
core/data/schema/versions/SDEZ_2_rollback.sql
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
ALTER TABLE mai2_item_card
|
||||||
|
CHANGE COLUMN cardId card_id INT NOT NULL AFTER user,
|
||||||
|
CHANGE COLUMN cardTypeId card_kind INT NOT NULL,
|
||||||
|
CHANGE COLUMN charaId chara_id INT NOT NULL,
|
||||||
|
CHANGE COLUMN mapId map_id INT NOT NULL,
|
||||||
|
CHANGE COLUMN startDate start_date TIMESTAMP NULL DEFAULT '2018-01-01 00:00:00',
|
||||||
|
CHANGE COLUMN endDate end_date TIMESTAMP NULL DEFAULT '2038-01-01 00:00:00';
|
||||||
|
|
||||||
|
ALTER TABLE mai2_item_item
|
||||||
|
CHANGE COLUMN itemId item_id INT NOT NULL AFTER user,
|
||||||
|
CHANGE COLUMN itemKind item_kind INT NOT NULL,
|
||||||
|
CHANGE COLUMN isValid is_valid TINYINT(1) NOT NULL DEFAULT '1';
|
||||||
|
|
||||||
|
ALTER TABLE mai2_item_character
|
||||||
|
CHANGE COLUMN characterId character_id INT NOT NULL,
|
||||||
|
CHANGE COLUMN useCount use_count INT NOT NULL DEFAULT '0';
|
||||||
|
|
||||||
|
ALTER TABLE mai2_item_charge
|
||||||
|
CHANGE COLUMN chargeId charge_id INT NOT NULL,
|
||||||
|
CHANGE COLUMN purchaseDate purchase_date TIMESTAMP NOT NULL,
|
||||||
|
CHANGE COLUMN validDate valid_date TIMESTAMP NOT NULL;
|
21
core/data/schema/versions/SDEZ_3_upgrade.sql
Normal file
21
core/data/schema/versions/SDEZ_3_upgrade.sql
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
ALTER TABLE mai2_item_card
|
||||||
|
CHANGE COLUMN card_id cardId INT NOT NULL AFTER user,
|
||||||
|
CHANGE COLUMN card_kind cardTypeId INT NOT NULL,
|
||||||
|
CHANGE COLUMN chara_id charaId INT NOT NULL,
|
||||||
|
CHANGE COLUMN map_id mapId INT NOT NULL,
|
||||||
|
CHANGE COLUMN start_date startDate TIMESTAMP NULL DEFAULT '2018-01-01 00:00:00',
|
||||||
|
CHANGE COLUMN end_date endDate TIMESTAMP NULL DEFAULT '2038-01-01 00:00:00';
|
||||||
|
|
||||||
|
ALTER TABLE mai2_item_item
|
||||||
|
CHANGE COLUMN item_id itemId INT NOT NULL AFTER user,
|
||||||
|
CHANGE COLUMN item_kind itemKind INT NOT NULL,
|
||||||
|
CHANGE COLUMN is_valid isValid TINYINT(1) NOT NULL DEFAULT '1';
|
||||||
|
|
||||||
|
ALTER TABLE mai2_item_character
|
||||||
|
CHANGE COLUMN character_id characterId INT NOT NULL,
|
||||||
|
CHANGE COLUMN use_count useCount INT NOT NULL DEFAULT '0';
|
||||||
|
|
||||||
|
ALTER TABLE mai2_item_charge
|
||||||
|
CHANGE COLUMN charge_id chargeId INT NOT NULL,
|
||||||
|
CHANGE COLUMN purchase_date purchaseDate TIMESTAMP NOT NULL,
|
||||||
|
CHANGE COLUMN valid_date validDate TIMESTAMP NOT NULL;
|
@ -10,9 +10,8 @@ from twisted.python.components import registerAdapter
|
|||||||
import jinja2
|
import jinja2
|
||||||
import bcrypt
|
import bcrypt
|
||||||
|
|
||||||
from core.config import CoreConfig
|
from core import CoreConfig, Utils
|
||||||
from core.data import Data
|
from core.data import Data
|
||||||
from core.utils import Utils
|
|
||||||
|
|
||||||
|
|
||||||
class IUserSession(Interface):
|
class IUserSession(Interface):
|
||||||
@ -31,7 +30,7 @@ class UserSession(object):
|
|||||||
|
|
||||||
class FrontendServlet(resource.Resource):
|
class FrontendServlet(resource.Resource):
|
||||||
def getChild(self, name: bytes, request: Request):
|
def getChild(self, name: bytes, request: Request):
|
||||||
self.logger.debug(f"{request.getClientIP()} -> {name.decode()}")
|
self.logger.debug(f"{Utils.get_ip_addr(request)} -> {name.decode()}")
|
||||||
if name == b"":
|
if name == b"":
|
||||||
return self
|
return self
|
||||||
return resource.Resource.getChild(self, name, request)
|
return resource.Resource.getChild(self, name, request)
|
||||||
@ -85,7 +84,7 @@ class FrontendServlet(resource.Resource):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def render_GET(self, request):
|
def render_GET(self, request):
|
||||||
self.logger.debug(f"{request.getClientIP()} -> {request.uri.decode()}")
|
self.logger.debug(f"{Utils.get_ip_addr(request)} -> {request.uri.decode()}")
|
||||||
template = self.environment.get_template("core/frontend/index.jinja")
|
template = self.environment.get_template("core/frontend/index.jinja")
|
||||||
return template.render(
|
return template.render(
|
||||||
server_name=self.config.server.name,
|
server_name=self.config.server.name,
|
||||||
@ -114,7 +113,7 @@ class FE_Base(resource.Resource):
|
|||||||
|
|
||||||
class FE_Gate(FE_Base):
|
class FE_Gate(FE_Base):
|
||||||
def render_GET(self, request: Request):
|
def render_GET(self, request: Request):
|
||||||
self.logger.debug(f"{request.getClientIP()} -> {request.uri.decode()}")
|
self.logger.debug(f"{Utils.get_ip_addr(request)} -> {request.uri.decode()}")
|
||||||
uri: str = request.uri.decode()
|
uri: str = request.uri.decode()
|
||||||
|
|
||||||
sesh = request.getSession()
|
sesh = request.getSession()
|
||||||
@ -143,7 +142,7 @@ class FE_Gate(FE_Base):
|
|||||||
|
|
||||||
def render_POST(self, request: Request):
|
def render_POST(self, request: Request):
|
||||||
uri = request.uri.decode()
|
uri = request.uri.decode()
|
||||||
ip = request.getClientAddress().host
|
ip = Utils.get_ip_addr(request)
|
||||||
|
|
||||||
if uri == "/gate/gate.login":
|
if uri == "/gate/gate.login":
|
||||||
access_code: str = request.args[b"access_code"][0].decode()
|
access_code: str = request.args[b"access_code"][0].decode()
|
||||||
|
@ -6,7 +6,7 @@ from twisted.web.http import Request
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import pytz
|
import pytz
|
||||||
|
|
||||||
from core.config import CoreConfig
|
from core import CoreConfig
|
||||||
from core.utils import Utils
|
from core.utils import Utils
|
||||||
|
|
||||||
|
|
||||||
@ -47,11 +47,13 @@ class MuchaServlet:
|
|||||||
self.mucha_registry.append(game_cd)
|
self.mucha_registry.append(game_cd)
|
||||||
|
|
||||||
self.logger.info(
|
self.logger.info(
|
||||||
f"Serving {len(self.mucha_registry)} games on port {self.config.mucha.port}"
|
f"Serving {len(self.mucha_registry)} games"
|
||||||
)
|
)
|
||||||
|
|
||||||
def handle_boardauth(self, request: Request, _: Dict) -> bytes:
|
def handle_boardauth(self, request: Request, _: Dict) -> bytes:
|
||||||
req_dict = self.mucha_preprocess(request.content.getvalue())
|
req_dict = self.mucha_preprocess(request.content.getvalue())
|
||||||
|
client_ip = Utils.get_ip_addr(request)
|
||||||
|
|
||||||
if req_dict is None:
|
if req_dict is None:
|
||||||
self.logger.error(
|
self.logger.error(
|
||||||
f"Error processing mucha request {request.content.getvalue()}"
|
f"Error processing mucha request {request.content.getvalue()}"
|
||||||
@ -61,7 +63,7 @@ class MuchaServlet:
|
|||||||
req = MuchaAuthRequest(req_dict)
|
req = MuchaAuthRequest(req_dict)
|
||||||
self.logger.debug(f"Mucha request {vars(req)}")
|
self.logger.debug(f"Mucha request {vars(req)}")
|
||||||
self.logger.info(
|
self.logger.info(
|
||||||
f"Boardauth request from {request.getClientAddress().host} for {req.gameVer}"
|
f"Boardauth request from {client_ip} for {req.gameVer}"
|
||||||
)
|
)
|
||||||
|
|
||||||
if req.gameCd not in self.mucha_registry:
|
if req.gameCd not in self.mucha_registry:
|
||||||
@ -71,7 +73,7 @@ class MuchaServlet:
|
|||||||
# TODO: Decrypt S/N
|
# TODO: Decrypt S/N
|
||||||
|
|
||||||
resp = MuchaAuthResponse(
|
resp = MuchaAuthResponse(
|
||||||
f"{self.config.mucha.hostname}{':' + str(self.config.mucha.port) if self.config.server.is_develop else ''}"
|
f"{self.config.mucha.hostname}{':' + str(self.config.allnet.port) if self.config.server.is_develop else ''}"
|
||||||
)
|
)
|
||||||
|
|
||||||
self.logger.debug(f"Mucha response {vars(resp)}")
|
self.logger.debug(f"Mucha response {vars(resp)}")
|
||||||
@ -80,6 +82,8 @@ class MuchaServlet:
|
|||||||
|
|
||||||
def handle_updatecheck(self, request: Request, _: Dict) -> bytes:
|
def handle_updatecheck(self, request: Request, _: Dict) -> bytes:
|
||||||
req_dict = self.mucha_preprocess(request.content.getvalue())
|
req_dict = self.mucha_preprocess(request.content.getvalue())
|
||||||
|
client_ip = Utils.get_ip_addr(request)
|
||||||
|
|
||||||
if req_dict is None:
|
if req_dict is None:
|
||||||
self.logger.error(
|
self.logger.error(
|
||||||
f"Error processing mucha request {request.content.getvalue()}"
|
f"Error processing mucha request {request.content.getvalue()}"
|
||||||
@ -89,7 +93,7 @@ class MuchaServlet:
|
|||||||
req = MuchaUpdateRequest(req_dict)
|
req = MuchaUpdateRequest(req_dict)
|
||||||
self.logger.debug(f"Mucha request {vars(req)}")
|
self.logger.debug(f"Mucha request {vars(req)}")
|
||||||
self.logger.info(
|
self.logger.info(
|
||||||
f"Updatecheck request from {request.getClientAddress().host} for {req.gameVer}"
|
f"Updatecheck request from {client_ip} for {req.gameVer}"
|
||||||
)
|
)
|
||||||
|
|
||||||
if req.gameCd not in self.mucha_registry:
|
if req.gameCd not in self.mucha_registry:
|
||||||
|
@ -68,7 +68,7 @@ class TitleServlet:
|
|||||||
self.logger.error(f"{folder} missing game_code or index in __init__.py")
|
self.logger.error(f"{folder} missing game_code or index in __init__.py")
|
||||||
|
|
||||||
self.logger.info(
|
self.logger.info(
|
||||||
f"Serving {len(self.title_registry)} game codes on port {core_cfg.title.port}"
|
f"Serving {len(self.title_registry)} game codes {'on port ' + str(core_cfg.title.port) if core_cfg.title.port > 0 else ''}"
|
||||||
)
|
)
|
||||||
|
|
||||||
def render_GET(self, request: Request, endpoints: dict) -> bytes:
|
def render_GET(self, request: Request, endpoints: dict) -> bytes:
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
from typing import Dict, Any
|
from typing import Dict, Any
|
||||||
from types import ModuleType
|
from types import ModuleType
|
||||||
|
from twisted.web.http import Request
|
||||||
import logging
|
import logging
|
||||||
import importlib
|
import importlib
|
||||||
from os import walk
|
from os import walk
|
||||||
@ -21,3 +22,7 @@ class Utils:
|
|||||||
logging.getLogger("core").error(f"get_all_titles: {dir} - {e}")
|
logging.getLogger("core").error(f"get_all_titles: {dir} - {e}")
|
||||||
raise
|
raise
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_ip_addr(cls, req: Request) -> str:
|
||||||
|
return req.getAllHeaders()[b"x-forwarded-for"].decode() if b"x-forwarded-for" in req.getAllHeaders() else req.getClientAddress().host
|
||||||
|
16
dbutils.py
16
dbutils.py
@ -2,7 +2,7 @@ import yaml
|
|||||||
import argparse
|
import argparse
|
||||||
from core.config import CoreConfig
|
from core.config import CoreConfig
|
||||||
from core.data import Data
|
from core.data import Data
|
||||||
from os import path
|
from os import path, mkdir, access, W_OK
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
parser = argparse.ArgumentParser(description="Database utilities")
|
parser = argparse.ArgumentParser(description="Database utilities")
|
||||||
@ -33,8 +33,19 @@ if __name__ == "__main__":
|
|||||||
cfg = CoreConfig()
|
cfg = CoreConfig()
|
||||||
if path.exists(f"{args.config}/core.yaml"):
|
if path.exists(f"{args.config}/core.yaml"):
|
||||||
cfg.update(yaml.safe_load(open(f"{args.config}/core.yaml")))
|
cfg.update(yaml.safe_load(open(f"{args.config}/core.yaml")))
|
||||||
|
|
||||||
|
if not path.exists(cfg.server.log_dir):
|
||||||
|
mkdir(cfg.server.log_dir)
|
||||||
|
|
||||||
|
if not access(cfg.server.log_dir, W_OK):
|
||||||
|
print(
|
||||||
|
f"Log directory {cfg.server.log_dir} NOT writable, please check permissions"
|
||||||
|
)
|
||||||
|
exit(1)
|
||||||
|
|
||||||
data = Data(cfg)
|
data = Data(cfg)
|
||||||
|
|
||||||
|
|
||||||
if args.action == "create":
|
if args.action == "create":
|
||||||
data.create_database()
|
data.create_database()
|
||||||
|
|
||||||
@ -53,6 +64,9 @@ if __name__ == "__main__":
|
|||||||
else:
|
else:
|
||||||
data.migrate_database(args.game, int(args.version), args.action)
|
data.migrate_database(args.game, int(args.version), args.action)
|
||||||
|
|
||||||
|
elif args.action == "autoupgrade":
|
||||||
|
data.autoupgrade()
|
||||||
|
|
||||||
elif args.action == "create-owner":
|
elif args.action == "create-owner":
|
||||||
data.create_owner(args.email)
|
data.create_owner(args.email)
|
||||||
|
|
||||||
|
351
docs/game_specific_info.md
Normal file
351
docs/game_specific_info.md
Normal file
@ -0,0 +1,351 @@
|
|||||||
|
# ARTEMiS Games Documentation
|
||||||
|
|
||||||
|
Below are all supported games with supported version ids in order to use
|
||||||
|
the corresponding importer and database upgrades.
|
||||||
|
|
||||||
|
**Important: The described database upgrades are only required if you are using an old database schema, f.e. still
|
||||||
|
using the megaime database. Clean installations always create the latest database structure!**
|
||||||
|
|
||||||
|
# Table of content
|
||||||
|
|
||||||
|
- [Supported Games](#supported-games)
|
||||||
|
- [Chunithm](#chunithm)
|
||||||
|
- [crossbeats REV.](#crossbeats-rev)
|
||||||
|
- [maimai DX](#maimai-dx)
|
||||||
|
- [O.N.G.E.K.I.](#o-n-g-e-k-i)
|
||||||
|
- [Card Maker](#card-maker)
|
||||||
|
- [WACCA](#wacca)
|
||||||
|
|
||||||
|
|
||||||
|
# Supported Games
|
||||||
|
|
||||||
|
Games listed below have been tested and confirmed working.
|
||||||
|
|
||||||
|
## Chunithm
|
||||||
|
|
||||||
|
### SDBT
|
||||||
|
|
||||||
|
| Version ID | Version Name |
|
||||||
|
|------------|--------------------|
|
||||||
|
| 0 | Chunithm |
|
||||||
|
| 1 | Chunithm+ |
|
||||||
|
| 2 | Chunithm Air |
|
||||||
|
| 3 | Chunithm Air + |
|
||||||
|
| 4 | Chunithm Star |
|
||||||
|
| 5 | Chunithm Star + |
|
||||||
|
| 6 | Chunithm Amazon |
|
||||||
|
| 7 | Chunithm Amazon + |
|
||||||
|
| 8 | Chunithm Crystal |
|
||||||
|
| 9 | Chunithm Crystal + |
|
||||||
|
| 10 | Chunithm Paradise |
|
||||||
|
|
||||||
|
### SDHD/SDBT
|
||||||
|
|
||||||
|
| Version ID | Version Name |
|
||||||
|
|------------|-----------------|
|
||||||
|
| 11 | Chunithm New!! |
|
||||||
|
| 12 | Chunithm New!!+ |
|
||||||
|
|
||||||
|
|
||||||
|
### Importer
|
||||||
|
|
||||||
|
In order to use the importer locate your game installation folder and execute:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
python read.py --series SDBT --version <version ID> --binfolder /path/to/game/folder --optfolder /path/to/game/option/folder
|
||||||
|
```
|
||||||
|
|
||||||
|
The importer for Chunithm will import: Events, Music, Charge Items and Avatar Accesories.
|
||||||
|
|
||||||
|
### Database upgrade
|
||||||
|
|
||||||
|
Always make sure your database (tables) are up-to-date, to do so go to the `core/data/schema/versions` folder and see
|
||||||
|
which version is the latest, f.e. `SDBT_3_upgrade.sql`. In order to upgrade to version 3 in this case you need to
|
||||||
|
perform all previous updates as well:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
python dbutils.py --game SDBT --version 2 upgrade
|
||||||
|
python dbutils.py --game SDBT --version 3 upgrade
|
||||||
|
```
|
||||||
|
|
||||||
|
## crossbeats REV.
|
||||||
|
|
||||||
|
### SDCA
|
||||||
|
|
||||||
|
| Version ID | Version Name |
|
||||||
|
|------------|------------------------------------|
|
||||||
|
| 0 | crossbeats REV. |
|
||||||
|
| 1 | crossbeats REV. SUNRISE |
|
||||||
|
| 2 | crossbeats REV. SUNRISE S2 |
|
||||||
|
| 3 | crossbeats REV. SUNRISE S2 Omnimix |
|
||||||
|
|
||||||
|
### Importer
|
||||||
|
|
||||||
|
In order to use the importer you need to use the provided `Export.csv` file:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
python read.py --series SDCA --version <version ID> --binfolder titles/cxb/data
|
||||||
|
```
|
||||||
|
|
||||||
|
The importer for crossbeats REV. will import Music.
|
||||||
|
|
||||||
|
### Config
|
||||||
|
|
||||||
|
Config file is located in `config/cxb.yaml`.
|
||||||
|
|
||||||
|
| Option | Info |
|
||||||
|
|------------------------|------------------------------------------------------------|
|
||||||
|
| `hostname` | Requires a proper `hostname` (not localhost!) to run |
|
||||||
|
| `ssl_enable` | Enables/Disables the use of the `ssl_cert` and `ssl_key` |
|
||||||
|
| `port` | Set your unsecure port number |
|
||||||
|
| `port_secure` | Set your secure/SSL port number |
|
||||||
|
| `ssl_cert`, `ssl_key` | Enter your SSL certificate (requires not self signed cert) |
|
||||||
|
|
||||||
|
|
||||||
|
## maimai DX
|
||||||
|
|
||||||
|
### SDEZ
|
||||||
|
|
||||||
|
| Version ID | Version Name |
|
||||||
|
|------------|-------------------------|
|
||||||
|
| 0 | maimai DX |
|
||||||
|
| 1 | maimai DX PLUS |
|
||||||
|
| 2 | maimai DX Splash |
|
||||||
|
| 3 | maimai DX Splash PLUS |
|
||||||
|
| 4 | maimai DX Universe |
|
||||||
|
| 5 | maimai DX Universe PLUS |
|
||||||
|
|
||||||
|
### Importer
|
||||||
|
|
||||||
|
In order to use the importer locate your game installation folder and execute:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
python read.py --series SDEZ --version <version ID> --binfolder /path/to/game/folder --optfolder /path/to/game/option/folder
|
||||||
|
```
|
||||||
|
|
||||||
|
The importer for maimai DX will import Events, Music and Tickets.
|
||||||
|
|
||||||
|
**NOTE: It is required to use the importer because the game will
|
||||||
|
crash without it!**
|
||||||
|
|
||||||
|
### Database upgrade
|
||||||
|
|
||||||
|
Always make sure your database (tables) are up-to-date, to do so go to the `core/data/schema/versions` folder and see which version is the latest, f.e. `SDEZ_2_upgrade.sql`. In order to upgrade to version 2 in this case you need to perform all previous updates as well:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
python dbutils.py --game SDEZ --version 2 upgrade
|
||||||
|
```
|
||||||
|
|
||||||
|
## Hatsune Miku Project Diva
|
||||||
|
|
||||||
|
### SBZV
|
||||||
|
|
||||||
|
| Version ID | Version Name |
|
||||||
|
|------------|---------------------------------|
|
||||||
|
| 0 | Project Diva Arcade |
|
||||||
|
| 1 | Project Diva Arcade Future Tone |
|
||||||
|
|
||||||
|
|
||||||
|
### Importer
|
||||||
|
|
||||||
|
In order to use the importer locate your game installation folder and execute:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
python read.py --series SBZV --version <version ID> --binfolder /path/to/game/data/diva --optfolder /path/to/game/data/diva/mdata
|
||||||
|
```
|
||||||
|
|
||||||
|
The importer for Project Diva Arcade will all required data in order to use
|
||||||
|
the Shop, Modules and Customizations.
|
||||||
|
|
||||||
|
### Config
|
||||||
|
|
||||||
|
Config file is located in `config/diva.yaml`.
|
||||||
|
|
||||||
|
| Option | Info |
|
||||||
|
|----------------------|-------------------------------------------------------------------------------------------------|
|
||||||
|
| `unlock_all_modules` | Unlocks all modules (costumes) by default, if set to `False` all modules need to be purchased |
|
||||||
|
| `unlock_all_items` | Unlocks all items (customizations) by default, if set to `False` all items need to be purchased |
|
||||||
|
|
||||||
|
|
||||||
|
### Database upgrade
|
||||||
|
|
||||||
|
Always make sure your database (tables) are up-to-date, to do so go to the `core/data/schema/versions` folder and see
|
||||||
|
which version is the latest, f.e. `SBZV_4_upgrade.sql`. In order to upgrade to version 4 in this case you need to
|
||||||
|
perform all previous updates as well:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
python dbutils.py --game SBZV --version 2 upgrade
|
||||||
|
python dbutils.py --game SBZV --version 3 upgrade
|
||||||
|
python dbutils.py --game SBZV --version 4 upgrade
|
||||||
|
```
|
||||||
|
|
||||||
|
## O.N.G.E.K.I.
|
||||||
|
|
||||||
|
### SDDT
|
||||||
|
|
||||||
|
| Version ID | Version Name |
|
||||||
|
|------------|----------------------------|
|
||||||
|
| 0 | O.N.G.E.K.I. |
|
||||||
|
| 1 | O.N.G.E.K.I. + |
|
||||||
|
| 2 | O.N.G.E.K.I. Summer |
|
||||||
|
| 3 | O.N.G.E.K.I. Summer + |
|
||||||
|
| 4 | O.N.G.E.K.I. Red |
|
||||||
|
| 5 | O.N.G.E.K.I. Red + |
|
||||||
|
| 6 | O.N.G.E.K.I. Bright |
|
||||||
|
| 7 | O.N.G.E.K.I. Bright Memory |
|
||||||
|
|
||||||
|
|
||||||
|
### Importer
|
||||||
|
|
||||||
|
In order to use the importer locate your game installation folder and execute:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
python read.py --series SDDT --version <version ID> --binfolder /path/to/game/folder --optfolder /path/to/game/option/folder
|
||||||
|
```
|
||||||
|
|
||||||
|
The importer for O.N.G.E.K.I. will all all Cards, Music and Events.
|
||||||
|
|
||||||
|
**NOTE: The Importer is required for Card Maker.**
|
||||||
|
|
||||||
|
### Config
|
||||||
|
|
||||||
|
Config file is located in `config/ongeki.yaml`.
|
||||||
|
|
||||||
|
| Option | Info |
|
||||||
|
|------------------|----------------------------------------------------------------------------------------------------------------|
|
||||||
|
| `enabled_gachas` | Enter all gacha IDs for Card Maker to work, other than default may not work due to missing cards added to them |
|
||||||
|
|
||||||
|
Note: 1149 and higher are only for Card Maker 1.35 and higher and will be ignored on lower versions.
|
||||||
|
|
||||||
|
### Database upgrade
|
||||||
|
|
||||||
|
Always make sure your database (tables) are up-to-date, to do so go to the `core/data/schema/versions` folder and see
|
||||||
|
which version is the latest, f.e. `SDDT_4_upgrade.sql`. In order to upgrade to version 4 in this case you need to
|
||||||
|
perform all previous updates as well:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
python dbutils.py --game SDDT --version 2 upgrade
|
||||||
|
python dbutils.py --game SDDT --version 3 upgrade
|
||||||
|
python dbutils.py --game SDDT --version 4 upgrade
|
||||||
|
```
|
||||||
|
|
||||||
|
## Card Maker
|
||||||
|
|
||||||
|
### SDED
|
||||||
|
|
||||||
|
| Version ID | Version Name |
|
||||||
|
|------------|-----------------|
|
||||||
|
| 0 | Card Maker 1.34 |
|
||||||
|
| 1 | Card Maker 1.35 |
|
||||||
|
|
||||||
|
|
||||||
|
### Support status
|
||||||
|
|
||||||
|
* Card Maker 1.34:
|
||||||
|
* Chunithm New!!: Yes
|
||||||
|
* maimai DX Universe: Yes
|
||||||
|
* O.N.G.E.K.I. Bright: Yes
|
||||||
|
|
||||||
|
* Card Maker 1.35:
|
||||||
|
* Chunithm New!!+: Yes
|
||||||
|
* maimai DX Universe PLUS: Yes
|
||||||
|
* O.N.G.E.K.I. Bright Memory: Yes
|
||||||
|
|
||||||
|
|
||||||
|
### Importer
|
||||||
|
|
||||||
|
In order to use the importer you need to use the provided `.csv` files (which are required for O.N.G.E.K.I.) and the
|
||||||
|
option folders:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
python read.py --series SDED --version <version ID> --binfolder titles/cm/cm_data --optfolder /path/to/cardmaker/option/folder
|
||||||
|
```
|
||||||
|
|
||||||
|
**If you haven't already executed the O.N.G.E.K.I. importer, make sure you import all cards!**
|
||||||
|
|
||||||
|
```shell
|
||||||
|
python read.py --series SDDT --version <version ID> --binfolder /path/to/game/folder --optfolder /path/to/game/option/folder
|
||||||
|
```
|
||||||
|
|
||||||
|
Also make sure to import all maimai and Chunithm data as well:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
python read.py --series SDED --version <version ID> --binfolder /path/to/cardmaker/CardMaker_Data
|
||||||
|
```
|
||||||
|
|
||||||
|
The importer for Card Maker will import all required Gachas (Banners) and cards (for maimai/Chunithm) and the hardcoded
|
||||||
|
Cards for each Gacha (O.N.G.E.K.I. only).
|
||||||
|
|
||||||
|
**NOTE: Without executing the importer Card Maker WILL NOT work!**
|
||||||
|
|
||||||
|
|
||||||
|
### O.N.G.E.K.I. Gachas
|
||||||
|
|
||||||
|
Gacha "無料ガチャ" can only pull from the free cards with the following probabilities: 94%: R, 5% SR and 1% chance of
|
||||||
|
getting an SSR card
|
||||||
|
|
||||||
|
Gacha "無料ガチャ(SR確定)" can only pull from free SR cards with prob: 92% SR and 8% chance of getting an SSR card
|
||||||
|
|
||||||
|
Gacha "レギュラーガチャ" can pull from every card added to ongeki_static_cards with the following prob: 77% R, 20% SR
|
||||||
|
and 3% chance of getting an SSR card
|
||||||
|
|
||||||
|
All other (limited) gachas can pull from every card added to ongeki_static_cards but with the promoted cards
|
||||||
|
(click on the green button under the banner) having a 10 times higher chance to get pulled
|
||||||
|
|
||||||
|
### Chunithm Gachas
|
||||||
|
|
||||||
|
All cards in Chunithm (basically just the characters) have the same rarity to it just pulls randomly from all cards
|
||||||
|
from a given gacha but made sure you cannot pull the same card twice in the same 5 times gacha roll.
|
||||||
|
|
||||||
|
### Notes
|
||||||
|
|
||||||
|
Card Maker 1.34 will only load an O.N.G.E.K.I. Bright profile (1.30). Card Maker 1.35 will only load an O.N.G.E.K.I.
|
||||||
|
Bright Memory profile (1.35).
|
||||||
|
The gachas inside the `ongeki.yaml` will make sure only the right gacha ids for the right CM version will be loaded.
|
||||||
|
Gacha IDs up to 1140 will be loaded for CM 1.34 and all gachas will be loaded for CM 1.35.
|
||||||
|
|
||||||
|
**NOTE: There is currently no way to load/use the (printed) maimai DX cards!**
|
||||||
|
|
||||||
|
## WACCA
|
||||||
|
|
||||||
|
### SDFE
|
||||||
|
|
||||||
|
| Version ID | Version Name |
|
||||||
|
|------------|---------------|
|
||||||
|
| 0 | WACCA |
|
||||||
|
| 1 | WACCA S |
|
||||||
|
| 2 | WACCA Lily |
|
||||||
|
| 3 | WACCA Lily R |
|
||||||
|
| 4 | WACCA Reverse |
|
||||||
|
|
||||||
|
|
||||||
|
### Importer
|
||||||
|
|
||||||
|
In order to use the importer locate your game installation folder and execute:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
python read.py --series SDFE --version <version ID> --binfolder /path/to/game/WindowsNoEditor/Mercury/Content
|
||||||
|
```
|
||||||
|
|
||||||
|
The importer for WACCA will import all Music data.
|
||||||
|
|
||||||
|
### Config
|
||||||
|
|
||||||
|
Config file is located in `config/wacca.yaml`.
|
||||||
|
|
||||||
|
| Option | Info |
|
||||||
|
|--------------------|-----------------------------------------------------------------------------|
|
||||||
|
| `always_vip` | Enables/Disables VIP, if disabled it needs to be purchased manually in game |
|
||||||
|
| `infinite_tickets` | Always set the "unlock expert" tickets to 5 |
|
||||||
|
| `infinite_wp` | Sets the user WP to `999999` |
|
||||||
|
| `enabled_gates` | Enter all gate IDs which should be enabled in game |
|
||||||
|
|
||||||
|
|
||||||
|
### Database upgrade
|
||||||
|
|
||||||
|
Always make sure your database (tables) are up-to-date, to do so go to the `core/data/schema/versions` folder and see which version is the latest, f.e. `SDFE_3_upgrade.sql`. In order to upgrade to version 3 in this case you need to perform all previous updates as well:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
python dbutils.py --game SDFE --version 2 upgrade
|
||||||
|
python dbutils.py --game SDFE --version 3 upgrade
|
||||||
|
```
|
@ -48,6 +48,3 @@ mucha:
|
|||||||
enable: False
|
enable: False
|
||||||
hostname: "localhost"
|
hostname: "localhost"
|
||||||
loglevel: "info"
|
loglevel: "info"
|
||||||
port: 8444
|
|
||||||
ssl_key: "cert/server.key"
|
|
||||||
ssl_cert: "cert/server.pem"
|
|
||||||
|
@ -4,6 +4,8 @@ server {
|
|||||||
server_name naominet.jp;
|
server_name naominet.jp;
|
||||||
|
|
||||||
location / {
|
location / {
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_pass_request_headers on;
|
||||||
proxy_pass http://localhost:8000/;
|
proxy_pass http://localhost:8000/;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -14,6 +16,8 @@ server {
|
|||||||
server_name your.hostname.here;
|
server_name your.hostname.here;
|
||||||
|
|
||||||
location / {
|
location / {
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_pass_request_headers on;
|
||||||
proxy_pass http://localhost:8080/;
|
proxy_pass http://localhost:8080/;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -75,6 +79,8 @@ server {
|
|||||||
ssl_prefer_server_ciphers off;
|
ssl_prefer_server_ciphers off;
|
||||||
|
|
||||||
location / {
|
location / {
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_pass_request_headers on;
|
||||||
proxy_pass http://localhost:8080/;
|
proxy_pass http://localhost:8080/;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -95,6 +101,8 @@ server {
|
|||||||
ssl_prefer_server_ciphers off;
|
ssl_prefer_server_ciphers off;
|
||||||
|
|
||||||
location / {
|
location / {
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_pass_request_headers on;
|
||||||
proxy_pass http://localhost:8080/SDBT/104/;
|
proxy_pass http://localhost:8080/SDBT/104/;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -131,6 +139,8 @@ server {
|
|||||||
add_header Strict-Transport-Security "max-age=63072000" always;
|
add_header Strict-Transport-Security "max-age=63072000" always;
|
||||||
|
|
||||||
location / {
|
location / {
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_pass_request_headers on;
|
||||||
proxy_pass http://localhost:8090/;
|
proxy_pass http://localhost:8090/;
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -3,9 +3,6 @@ server:
|
|||||||
enable: True
|
enable: True
|
||||||
loglevel: "info"
|
loglevel: "info"
|
||||||
port: 9000
|
port: 9000
|
||||||
port_matching: 9001
|
port_stun: 9001
|
||||||
port_stun: 9002
|
port_turn: 9002
|
||||||
port_turn: 9003
|
port_admission: 9003
|
||||||
port_admission: 9004
|
|
||||||
ssl_cert: cert/pokken.crt
|
|
||||||
ssl_key: cert/pokken.key
|
|
20
index.py
20
index.py
@ -96,9 +96,11 @@ class HttpDispatcher(resource.Resource):
|
|||||||
|
|
||||||
def render_GET(self, request: Request) -> bytes:
|
def render_GET(self, request: Request) -> bytes:
|
||||||
test = self.map_get.match(request.uri.decode())
|
test = self.map_get.match(request.uri.decode())
|
||||||
|
client_ip = Utils.get_ip_addr(request)
|
||||||
|
|
||||||
if test is None:
|
if test is None:
|
||||||
self.logger.debug(
|
self.logger.debug(
|
||||||
f"Unknown GET endpoint {request.uri.decode()} from {request.getClientAddress().host} to port {request.getHost().port}"
|
f"Unknown GET endpoint {request.uri.decode()} from {client_ip} to port {request.getHost().port}"
|
||||||
)
|
)
|
||||||
request.setResponseCode(404)
|
request.setResponseCode(404)
|
||||||
return b"Endpoint not found."
|
return b"Endpoint not found."
|
||||||
@ -107,9 +109,11 @@ class HttpDispatcher(resource.Resource):
|
|||||||
|
|
||||||
def render_POST(self, request: Request) -> bytes:
|
def render_POST(self, request: Request) -> bytes:
|
||||||
test = self.map_post.match(request.uri.decode())
|
test = self.map_post.match(request.uri.decode())
|
||||||
|
client_ip = Utils.get_ip_addr(request)
|
||||||
|
|
||||||
if test is None:
|
if test is None:
|
||||||
self.logger.debug(
|
self.logger.debug(
|
||||||
f"Unknown POST endpoint {request.uri.decode()} from {request.getClientAddress().host} to port {request.getHost().port}"
|
f"Unknown POST endpoint {request.uri.decode()} from {client_ip} to port {request.getHost().port}"
|
||||||
)
|
)
|
||||||
request.setResponseCode(404)
|
request.setResponseCode(404)
|
||||||
return b"Endpoint not found."
|
return b"Endpoint not found."
|
||||||
@ -166,6 +170,12 @@ if __name__ == "__main__":
|
|||||||
if not path.exists(cfg.server.log_dir):
|
if not path.exists(cfg.server.log_dir):
|
||||||
mkdir(cfg.server.log_dir)
|
mkdir(cfg.server.log_dir)
|
||||||
|
|
||||||
|
if not access(cfg.server.log_dir, W_OK):
|
||||||
|
print(
|
||||||
|
f"Log directory {cfg.server.log_dir} NOT writable, please check permissions"
|
||||||
|
)
|
||||||
|
exit(1)
|
||||||
|
|
||||||
logger = logging.getLogger("core")
|
logger = logging.getLogger("core")
|
||||||
log_fmt_str = "[%(asctime)s] Core | %(levelname)s | %(message)s"
|
log_fmt_str = "[%(asctime)s] Core | %(levelname)s | %(message)s"
|
||||||
log_fmt = logging.Formatter(log_fmt_str)
|
log_fmt = logging.Formatter(log_fmt_str)
|
||||||
@ -185,12 +195,6 @@ if __name__ == "__main__":
|
|||||||
logger.setLevel(log_lv)
|
logger.setLevel(log_lv)
|
||||||
coloredlogs.install(level=log_lv, logger=logger, fmt=log_fmt_str)
|
coloredlogs.install(level=log_lv, logger=logger, fmt=log_fmt_str)
|
||||||
|
|
||||||
if not access(cfg.server.log_dir, W_OK):
|
|
||||||
logger.error(
|
|
||||||
f"Log directory {cfg.server.log_dir} NOT writable, please check permissions"
|
|
||||||
)
|
|
||||||
exit(1)
|
|
||||||
|
|
||||||
if not cfg.aimedb.key:
|
if not cfg.aimedb.key:
|
||||||
logger.error("!!AIMEDB KEY BLANK, SET KEY IN CORE.YAML!!")
|
logger.error("!!AIMEDB KEY BLANK, SET KEY IN CORE.YAML!!")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
9
read.py
9
read.py
@ -4,13 +4,13 @@ import re
|
|||||||
import os
|
import os
|
||||||
import yaml
|
import yaml
|
||||||
from os import path
|
from os import path
|
||||||
import logging, coloredlogs
|
import logging
|
||||||
|
import coloredlogs
|
||||||
|
|
||||||
from logging.handlers import TimedRotatingFileHandler
|
from logging.handlers import TimedRotatingFileHandler
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
from core import CoreConfig
|
from core import CoreConfig, Utils
|
||||||
from core.utils import Utils
|
|
||||||
|
|
||||||
|
|
||||||
class BaseReader:
|
class BaseReader:
|
||||||
@ -135,7 +135,8 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
for dir, mod in titles.items():
|
for dir, mod in titles.items():
|
||||||
if args.series in mod.game_codes:
|
if args.series in mod.game_codes:
|
||||||
handler = mod.reader(config, args.version, bin_arg, opt_arg, args.extra)
|
handler = mod.reader(config, args.version,
|
||||||
|
bin_arg, opt_arg, args.extra)
|
||||||
handler.read()
|
handler.read()
|
||||||
|
|
||||||
logger.info("Done")
|
logger.info("Done")
|
||||||
|
@ -17,7 +17,7 @@ Games listed below have been tested and confirmed working. Only game versions ol
|
|||||||
|
|
||||||
+ Card Maker
|
+ Card Maker
|
||||||
+ 1.34.xx
|
+ 1.34.xx
|
||||||
+ 1.36.xx
|
+ 1.35.xx
|
||||||
|
|
||||||
+ Ongeki
|
+ Ongeki
|
||||||
+ All versions up to Bright Memory
|
+ All versions up to Bright Memory
|
||||||
@ -36,5 +36,8 @@ Games listed below have been tested and confirmed working. Only game versions ol
|
|||||||
## Setup guides
|
## Setup guides
|
||||||
Follow the platform-specific guides for [windows](docs/INSTALL_WINDOWS.md) and [ubuntu](docs/INSTALL_UBUNTU.md) to setup and run the server.
|
Follow the platform-specific guides for [windows](docs/INSTALL_WINDOWS.md) and [ubuntu](docs/INSTALL_UBUNTU.md) to setup and run the server.
|
||||||
|
|
||||||
|
## Game specific information
|
||||||
|
Read [Games specific info](docs/game_specific_info.md) for all supported games, importer settings, configuration option and database upgrades.
|
||||||
|
|
||||||
## Production guide
|
## Production guide
|
||||||
See the [production guide](docs/prod.md) for running a production server.
|
See the [production guide](docs/prod.md) for running a production server.
|
||||||
|
@ -33,6 +33,9 @@ class ChuniBase:
|
|||||||
def handle_get_game_charge_api_request(self, data: Dict) -> Dict:
|
def handle_get_game_charge_api_request(self, data: Dict) -> Dict:
|
||||||
game_charge_list = self.data.static.get_enabled_charges(self.version)
|
game_charge_list = self.data.static.get_enabled_charges(self.version)
|
||||||
|
|
||||||
|
if game_charge_list is None or len(game_charge_list) == 0:
|
||||||
|
return {"length": 0, "gameChargeList": []}
|
||||||
|
|
||||||
charges = []
|
charges = []
|
||||||
for x in range(len(game_charge_list)):
|
for x in range(len(game_charge_list)):
|
||||||
charges.append(
|
charges.append(
|
||||||
@ -52,6 +55,14 @@ class ChuniBase:
|
|||||||
def handle_get_game_event_api_request(self, data: Dict) -> Dict:
|
def handle_get_game_event_api_request(self, data: Dict) -> Dict:
|
||||||
game_events = self.data.static.get_enabled_events(self.version)
|
game_events = self.data.static.get_enabled_events(self.version)
|
||||||
|
|
||||||
|
if game_events is None or len(game_events) == 0:
|
||||||
|
self.logger.warn("No enabled events, did you run the reader?")
|
||||||
|
return {
|
||||||
|
"type": data["type"],
|
||||||
|
"length": 0,
|
||||||
|
"gameEventList": [],
|
||||||
|
}
|
||||||
|
|
||||||
event_list = []
|
event_list = []
|
||||||
for evt_row in game_events:
|
for evt_row in game_events:
|
||||||
tmp = {}
|
tmp = {}
|
||||||
@ -588,3 +599,11 @@ class ChuniBase:
|
|||||||
|
|
||||||
def handle_upsert_client_testmode_api_request(self, data: Dict) -> Dict:
|
def handle_upsert_client_testmode_api_request(self, data: Dict) -> Dict:
|
||||||
return {"returnCode": "1"}
|
return {"returnCode": "1"}
|
||||||
|
|
||||||
|
def handle_get_user_net_battle_data_api_request(self, data: Dict) -> Dict:
|
||||||
|
return {
|
||||||
|
"userId": data["userId"],
|
||||||
|
"userNetBattleData": {
|
||||||
|
"recentNBSelectMusicList": []
|
||||||
|
}
|
||||||
|
}
|
@ -8,10 +8,12 @@ import inflection
|
|||||||
import string
|
import string
|
||||||
from Crypto.Cipher import AES
|
from Crypto.Cipher import AES
|
||||||
from Crypto.Util.Padding import pad
|
from Crypto.Util.Padding import pad
|
||||||
|
from Crypto.Protocol.KDF import PBKDF2
|
||||||
|
from Crypto.Hash import SHA1
|
||||||
from os import path
|
from os import path
|
||||||
from typing import Tuple
|
from typing import Tuple, Dict
|
||||||
|
|
||||||
from core import CoreConfig
|
from core import CoreConfig, Utils
|
||||||
from titles.chuni.config import ChuniConfig
|
from titles.chuni.config import ChuniConfig
|
||||||
from titles.chuni.const import ChuniConstants
|
from titles.chuni.const import ChuniConstants
|
||||||
from titles.chuni.base import ChuniBase
|
from titles.chuni.base import ChuniBase
|
||||||
@ -33,25 +35,26 @@ class ChuniServlet:
|
|||||||
def __init__(self, core_cfg: CoreConfig, cfg_dir: str) -> None:
|
def __init__(self, core_cfg: CoreConfig, cfg_dir: str) -> None:
|
||||||
self.core_cfg = core_cfg
|
self.core_cfg = core_cfg
|
||||||
self.game_cfg = ChuniConfig()
|
self.game_cfg = ChuniConfig()
|
||||||
|
self.hash_table: Dict[Dict[str, str]] = {}
|
||||||
if path.exists(f"{cfg_dir}/{ChuniConstants.CONFIG_NAME}"):
|
if path.exists(f"{cfg_dir}/{ChuniConstants.CONFIG_NAME}"):
|
||||||
self.game_cfg.update(
|
self.game_cfg.update(
|
||||||
yaml.safe_load(open(f"{cfg_dir}/{ChuniConstants.CONFIG_NAME}"))
|
yaml.safe_load(open(f"{cfg_dir}/{ChuniConstants.CONFIG_NAME}"))
|
||||||
)
|
)
|
||||||
|
|
||||||
self.versions = [
|
self.versions = [
|
||||||
ChuniBase(core_cfg, self.game_cfg),
|
ChuniBase,
|
||||||
ChuniPlus(core_cfg, self.game_cfg),
|
ChuniPlus,
|
||||||
ChuniAir(core_cfg, self.game_cfg),
|
ChuniAir,
|
||||||
ChuniAirPlus(core_cfg, self.game_cfg),
|
ChuniAirPlus,
|
||||||
ChuniStar(core_cfg, self.game_cfg),
|
ChuniStar,
|
||||||
ChuniStarPlus(core_cfg, self.game_cfg),
|
ChuniStarPlus,
|
||||||
ChuniAmazon(core_cfg, self.game_cfg),
|
ChuniAmazon,
|
||||||
ChuniAmazonPlus(core_cfg, self.game_cfg),
|
ChuniAmazonPlus,
|
||||||
ChuniCrystal(core_cfg, self.game_cfg),
|
ChuniCrystal,
|
||||||
ChuniCrystalPlus(core_cfg, self.game_cfg),
|
ChuniCrystalPlus,
|
||||||
ChuniParadise(core_cfg, self.game_cfg),
|
ChuniParadise,
|
||||||
ChuniNew(core_cfg, self.game_cfg),
|
ChuniNew,
|
||||||
ChuniNewPlus(core_cfg, self.game_cfg),
|
ChuniNewPlus,
|
||||||
]
|
]
|
||||||
|
|
||||||
self.logger = logging.getLogger("chuni")
|
self.logger = logging.getLogger("chuni")
|
||||||
@ -80,6 +83,21 @@ class ChuniServlet:
|
|||||||
)
|
)
|
||||||
self.logger.inited = True
|
self.logger.inited = True
|
||||||
|
|
||||||
|
for version, keys in self.game_cfg.crypto.keys.items():
|
||||||
|
if len(keys) < 3:
|
||||||
|
continue
|
||||||
|
|
||||||
|
self.hash_table[version] = {}
|
||||||
|
|
||||||
|
method_list = [method for method in dir(self.versions[version]) if not method.startswith('__')]
|
||||||
|
for method in method_list:
|
||||||
|
method_fixed = inflection.camelize(method)[6:-7]
|
||||||
|
hash = PBKDF2(method_fixed, bytes.fromhex(keys[2]), 128, count=44, hmac_hash_module=SHA1)
|
||||||
|
|
||||||
|
self.hash_table[version][hash.hex()] = method_fixed
|
||||||
|
|
||||||
|
self.logger.debug(f"Hashed v{version} method {method_fixed} with {bytes.fromhex(keys[2])} to get {hash.hex()}")
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_allnet_info(
|
def get_allnet_info(
|
||||||
cls, game_code: str, core_cfg: CoreConfig, cfg_dir: str
|
cls, game_code: str, core_cfg: CoreConfig, cfg_dir: str
|
||||||
@ -103,7 +121,7 @@ class ChuniServlet:
|
|||||||
return (True, f"http://{core_cfg.title.hostname}/{game_code}/$v/", "")
|
return (True, f"http://{core_cfg.title.hostname}/{game_code}/$v/", "")
|
||||||
|
|
||||||
def render_POST(self, request: Request, version: int, url_path: str) -> bytes:
|
def render_POST(self, request: Request, version: int, url_path: str) -> bytes:
|
||||||
if url_path.lower() == "/ping":
|
if url_path.lower() == "ping":
|
||||||
return zlib.compress(b'{"returnCode": "1"}')
|
return zlib.compress(b'{"returnCode": "1"}')
|
||||||
|
|
||||||
req_raw = request.content.getvalue()
|
req_raw = request.content.getvalue()
|
||||||
@ -111,6 +129,7 @@ class ChuniServlet:
|
|||||||
encrtped = False
|
encrtped = False
|
||||||
internal_ver = 0
|
internal_ver = 0
|
||||||
endpoint = url_split[len(url_split) - 1]
|
endpoint = url_split[len(url_split) - 1]
|
||||||
|
client_ip = Utils.get_ip_addr(request)
|
||||||
|
|
||||||
if version < 105: # 1.0
|
if version < 105: # 1.0
|
||||||
internal_ver = ChuniConstants.VER_CHUNITHM
|
internal_ver = ChuniConstants.VER_CHUNITHM
|
||||||
@ -143,25 +162,38 @@ class ChuniServlet:
|
|||||||
# If we get a 32 character long hex string, it's a hash and we're
|
# If we get a 32 character long hex string, it's a hash and we're
|
||||||
# doing encrypted. The likelyhood of false positives is low but
|
# doing encrypted. The likelyhood of false positives is low but
|
||||||
# technically not 0
|
# technically not 0
|
||||||
endpoint = request.getHeader("User-Agent").split("#")[0]
|
if internal_ver < ChuniConstants.VER_CHUNITHM_NEW:
|
||||||
|
endpoint = request.getHeader("User-Agent").split("#")[0]
|
||||||
|
|
||||||
|
else:
|
||||||
|
if internal_ver not in self.hash_table:
|
||||||
|
self.logger.error(f"v{version} does not support encryption or no keys entered")
|
||||||
|
return zlib.compress(b'{"stat": "0"}')
|
||||||
|
|
||||||
|
elif endpoint.lower() not in self.hash_table[internal_ver]:
|
||||||
|
self.logger.error(f"No hash found for v{version} endpoint {endpoint}")
|
||||||
|
return zlib.compress(b'{"stat": "0"}')
|
||||||
|
|
||||||
|
endpoint = self.hash_table[internal_ver][endpoint.lower()]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
crypt = AES.new(
|
crypt = AES.new(
|
||||||
bytes.fromhex(self.game_cfg.crypto.keys[str(internal_ver)][0]),
|
bytes.fromhex(self.game_cfg.crypto.keys[internal_ver][0]),
|
||||||
AES.MODE_CBC,
|
AES.MODE_CBC,
|
||||||
bytes.fromhex(self.game_cfg.crypto.keys[str(internal_ver)][1]),
|
bytes.fromhex(self.game_cfg.crypto.keys[internal_ver][1]),
|
||||||
)
|
)
|
||||||
|
|
||||||
req_raw = crypt.decrypt(req_raw)
|
req_raw = crypt.decrypt(req_raw)
|
||||||
|
|
||||||
except:
|
except Exception as e:
|
||||||
self.logger.error(
|
self.logger.error(
|
||||||
f"Failed to decrypt v{version} request to {endpoint} -> {req_raw}"
|
f"Failed to decrypt v{version} request to {endpoint} -> {e}"
|
||||||
)
|
)
|
||||||
return zlib.compress(b'{"stat": "0"}')
|
return zlib.compress(b'{"stat": "0"}')
|
||||||
|
|
||||||
encrtped = True
|
encrtped = True
|
||||||
|
|
||||||
if not encrtped and self.game_cfg.crypto.encrypted_only:
|
if not encrtped and self.game_cfg.crypto.encrypted_only and internal_ver >= ChuniConstants.VER_CHUNITHM_CRYSTAL_PLUS:
|
||||||
self.logger.error(
|
self.logger.error(
|
||||||
f"Unencrypted v{version} {endpoint} request, but config is set to encrypted only: {req_raw}"
|
f"Unencrypted v{version} {endpoint} request, but config is set to encrypted only: {req_raw}"
|
||||||
)
|
)
|
||||||
@ -179,19 +211,20 @@ class ChuniServlet:
|
|||||||
req_data = json.loads(unzip)
|
req_data = json.loads(unzip)
|
||||||
|
|
||||||
self.logger.info(
|
self.logger.info(
|
||||||
f"v{version} {endpoint} request from {request.getClientAddress().host}"
|
f"v{version} {endpoint} request from {client_ip}"
|
||||||
)
|
)
|
||||||
self.logger.debug(req_data)
|
self.logger.debug(req_data)
|
||||||
|
|
||||||
func_to_find = "handle_" + inflection.underscore(endpoint) + "_request"
|
func_to_find = "handle_" + inflection.underscore(endpoint) + "_request"
|
||||||
|
handler_cls = self.versions[internal_ver](self.core_cfg, self.game_cfg)
|
||||||
|
|
||||||
if not hasattr(self.versions[internal_ver], func_to_find):
|
if not hasattr(handler_cls, func_to_find):
|
||||||
self.logger.warning(f"Unhandled v{version} request {endpoint}")
|
self.logger.warning(f"Unhandled v{version} request {endpoint}")
|
||||||
resp = {"returnCode": 1}
|
resp = {"returnCode": 1}
|
||||||
|
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
handler = getattr(self.versions[internal_ver], func_to_find)
|
handler = getattr(handler_cls, func_to_find)
|
||||||
resp = handler(req_data)
|
resp = handler(req_data)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -211,9 +244,9 @@ class ChuniServlet:
|
|||||||
padded = pad(zipped, 16)
|
padded = pad(zipped, 16)
|
||||||
|
|
||||||
crypt = AES.new(
|
crypt = AES.new(
|
||||||
bytes.fromhex(self.game_cfg.crypto.keys[str(internal_ver)][0]),
|
bytes.fromhex(self.game_cfg.crypto.keys[internal_ver][0]),
|
||||||
AES.MODE_CBC,
|
AES.MODE_CBC,
|
||||||
bytes.fromhex(self.game_cfg.crypto.keys[str(internal_ver)][1]),
|
bytes.fromhex(self.game_cfg.crypto.keys[internal_ver][1]),
|
||||||
)
|
)
|
||||||
|
|
||||||
return crypt.encrypt(padded)
|
return crypt.encrypt(padded)
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import logging
|
import logging
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
|
from random import randint
|
||||||
from typing import Dict
|
from typing import Dict
|
||||||
|
|
||||||
from core.config import CoreConfig
|
from core.config import CoreConfig
|
||||||
@ -61,7 +61,7 @@ class ChuniNew(ChuniBase):
|
|||||||
}
|
}
|
||||||
|
|
||||||
def handle_remove_token_api_request(self, data: Dict) -> Dict:
|
def handle_remove_token_api_request(self, data: Dict) -> Dict:
|
||||||
return { "returnCode": "1" }
|
return {"returnCode": "1"}
|
||||||
|
|
||||||
def handle_delete_token_api_request(self, data: Dict) -> Dict:
|
def handle_delete_token_api_request(self, data: Dict) -> Dict:
|
||||||
return {"returnCode": "1"}
|
return {"returnCode": "1"}
|
||||||
@ -122,11 +122,355 @@ class ChuniNew(ChuniBase):
|
|||||||
"playerLevel": profile["playerLevel"],
|
"playerLevel": profile["playerLevel"],
|
||||||
"rating": profile["rating"],
|
"rating": profile["rating"],
|
||||||
"headphone": profile["headphone"],
|
"headphone": profile["headphone"],
|
||||||
"chargeState": 0,
|
# Enables favorites and teams
|
||||||
"userNameEx": "0",
|
"chargeState": 1,
|
||||||
|
"userNameEx": "",
|
||||||
"banState": 0,
|
"banState": 0,
|
||||||
"classEmblemMedal": profile["classEmblemMedal"],
|
"classEmblemMedal": profile["classEmblemMedal"],
|
||||||
"classEmblemBase": profile["classEmblemBase"],
|
"classEmblemBase": profile["classEmblemBase"],
|
||||||
"battleRankId": profile["battleRankId"],
|
"battleRankId": profile["battleRankId"],
|
||||||
}
|
}
|
||||||
return data1
|
return data1
|
||||||
|
|
||||||
|
def handle_cm_get_user_preview_api_request(self, data: Dict) -> Dict:
|
||||||
|
p = self.data.profile.get_profile_data(data["userId"], self.version)
|
||||||
|
if p is None:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
return {
|
||||||
|
"userName": p["userName"],
|
||||||
|
"level": p["level"],
|
||||||
|
"medal": p["medal"],
|
||||||
|
"lastDataVersion": "2.00.00",
|
||||||
|
"isLogin": False,
|
||||||
|
}
|
||||||
|
|
||||||
|
def handle_printer_login_api_request(self, data: Dict) -> Dict:
|
||||||
|
return {"returnCode": 1}
|
||||||
|
|
||||||
|
def handle_printer_logout_api_request(self, data: Dict) -> Dict:
|
||||||
|
return {"returnCode": 1}
|
||||||
|
|
||||||
|
def handle_get_game_gacha_api_request(self, data: Dict) -> Dict:
|
||||||
|
"""
|
||||||
|
returns all current active banners (gachas)
|
||||||
|
"""
|
||||||
|
game_gachas = self.data.static.get_gachas(self.version)
|
||||||
|
|
||||||
|
# clean the database rows
|
||||||
|
game_gacha_list = []
|
||||||
|
for gacha in game_gachas:
|
||||||
|
tmp = gacha._asdict()
|
||||||
|
tmp.pop("id")
|
||||||
|
tmp.pop("version")
|
||||||
|
tmp["startDate"] = datetime.strftime(tmp["startDate"], "%Y-%m-%d %H:%M:%S")
|
||||||
|
tmp["endDate"] = datetime.strftime(tmp["endDate"], "%Y-%m-%d %H:%M:%S")
|
||||||
|
tmp["noticeStartDate"] = datetime.strftime(
|
||||||
|
tmp["noticeStartDate"], "%Y-%m-%d %H:%M:%S"
|
||||||
|
)
|
||||||
|
tmp["noticeEndDate"] = datetime.strftime(
|
||||||
|
tmp["noticeEndDate"], "%Y-%m-%d %H:%M:%S"
|
||||||
|
)
|
||||||
|
|
||||||
|
game_gacha_list.append(tmp)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"length": len(game_gacha_list),
|
||||||
|
"gameGachaList": game_gacha_list,
|
||||||
|
# no clue
|
||||||
|
"registIdList": [],
|
||||||
|
}
|
||||||
|
|
||||||
|
def handle_get_game_gacha_card_by_id_api_request(self, data: Dict) -> Dict:
|
||||||
|
"""
|
||||||
|
returns all valid cards for a given gachaId
|
||||||
|
"""
|
||||||
|
game_gacha_cards = self.data.static.get_gacha_cards(data["gachaId"])
|
||||||
|
|
||||||
|
game_gacha_card_list = []
|
||||||
|
for gacha_card in game_gacha_cards:
|
||||||
|
tmp = gacha_card._asdict()
|
||||||
|
tmp.pop("id")
|
||||||
|
game_gacha_card_list.append(tmp)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"gachaId": data["gachaId"],
|
||||||
|
"length": len(game_gacha_card_list),
|
||||||
|
# check isPickup from the chuni_static_gachas?
|
||||||
|
"isPickup": False,
|
||||||
|
"gameGachaCardList": game_gacha_card_list,
|
||||||
|
# again no clue
|
||||||
|
"emissionList": [],
|
||||||
|
"afterCalcList": [],
|
||||||
|
"ssrBookCalcList": [],
|
||||||
|
}
|
||||||
|
|
||||||
|
def handle_cm_get_user_data_api_request(self, data: Dict) -> Dict:
|
||||||
|
p = self.data.profile.get_profile_data(data["userId"], self.version)
|
||||||
|
if p is None:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
profile = p._asdict()
|
||||||
|
profile.pop("id")
|
||||||
|
profile.pop("user")
|
||||||
|
profile.pop("version")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"userId": data["userId"],
|
||||||
|
"userData": profile,
|
||||||
|
"userEmoney": [
|
||||||
|
{
|
||||||
|
"type": 0,
|
||||||
|
"emoneyCredit": 100,
|
||||||
|
"emoneyBrand": 1,
|
||||||
|
"ext1": 0,
|
||||||
|
"ext2": 0,
|
||||||
|
"ext3": 0,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
def handle_get_user_gacha_api_request(self, data: Dict) -> Dict:
|
||||||
|
user_gachas = self.data.item.get_user_gachas(data["userId"])
|
||||||
|
if user_gachas is None:
|
||||||
|
return {"userId": data["userId"], "length": 0, "userGachaList": []}
|
||||||
|
|
||||||
|
user_gacha_list = []
|
||||||
|
for gacha in user_gachas:
|
||||||
|
tmp = gacha._asdict()
|
||||||
|
tmp.pop("id")
|
||||||
|
tmp.pop("user")
|
||||||
|
tmp["dailyGachaDate"] = datetime.strftime(tmp["dailyGachaDate"], "%Y-%m-%d")
|
||||||
|
user_gacha_list.append(tmp)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"userId": data["userId"],
|
||||||
|
"length": len(user_gacha_list),
|
||||||
|
"userGachaList": user_gacha_list,
|
||||||
|
}
|
||||||
|
|
||||||
|
def handle_get_user_printed_card_api_request(self, data: Dict) -> Dict:
|
||||||
|
user_print_list = self.data.item.get_user_print_states(
|
||||||
|
data["userId"], has_completed=True
|
||||||
|
)
|
||||||
|
if user_print_list is None:
|
||||||
|
return {
|
||||||
|
"userId": data["userId"],
|
||||||
|
"length": 0,
|
||||||
|
"nextIndex": -1,
|
||||||
|
"userPrintedCardList": [],
|
||||||
|
}
|
||||||
|
|
||||||
|
print_list = []
|
||||||
|
next_idx = int(data["nextIndex"])
|
||||||
|
max_ct = int(data["maxCount"])
|
||||||
|
|
||||||
|
for x in range(next_idx, len(user_print_list)):
|
||||||
|
tmp = user_print_list[x]._asdict()
|
||||||
|
print_list.append(tmp["cardId"])
|
||||||
|
|
||||||
|
if len(user_print_list) >= max_ct:
|
||||||
|
break
|
||||||
|
|
||||||
|
if len(user_print_list) >= max_ct:
|
||||||
|
next_idx = next_idx + max_ct
|
||||||
|
else:
|
||||||
|
next_idx = -1
|
||||||
|
|
||||||
|
return {
|
||||||
|
"userId": data["userId"],
|
||||||
|
"length": len(print_list),
|
||||||
|
"nextIndex": next_idx,
|
||||||
|
"userPrintedCardList": print_list,
|
||||||
|
}
|
||||||
|
|
||||||
|
def handle_get_user_card_print_error_api_request(self, data: Dict) -> Dict:
|
||||||
|
user_id = data["userId"]
|
||||||
|
|
||||||
|
user_print_states = self.data.item.get_user_print_states(
|
||||||
|
user_id, has_completed=False
|
||||||
|
)
|
||||||
|
|
||||||
|
card_print_state_list = []
|
||||||
|
for card in user_print_states:
|
||||||
|
tmp = card._asdict()
|
||||||
|
tmp["orderId"] = tmp["id"]
|
||||||
|
tmp.pop("user")
|
||||||
|
tmp["limitDate"] = datetime.strftime(tmp["limitDate"], "%Y-%m-%d")
|
||||||
|
|
||||||
|
card_print_state_list.append(tmp)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"userId": user_id,
|
||||||
|
"length": len(card_print_state_list),
|
||||||
|
"userCardPrintStateList": card_print_state_list,
|
||||||
|
}
|
||||||
|
|
||||||
|
def handle_cm_get_user_character_api_request(self, data: Dict) -> Dict:
|
||||||
|
return super().handle_get_user_character_api_request(data)
|
||||||
|
|
||||||
|
def handle_cm_get_user_item_api_request(self, data: Dict) -> Dict:
|
||||||
|
return super().handle_get_user_item_api_request(data)
|
||||||
|
|
||||||
|
def handle_roll_gacha_api_request(self, data: Dict) -> Dict:
|
||||||
|
"""
|
||||||
|
Handle a gacha roll API request, with:
|
||||||
|
gachaId: the gachaId where the cards should be pulled from
|
||||||
|
times: the number of gacha rolls
|
||||||
|
characterId: the character which the user wants
|
||||||
|
"""
|
||||||
|
gacha_id = data["gachaId"]
|
||||||
|
num_rolls = data["times"]
|
||||||
|
chara_id = data["characterId"]
|
||||||
|
|
||||||
|
rolled_cards = []
|
||||||
|
|
||||||
|
# characterId is set after 10 rolls, where the user can select a card
|
||||||
|
# from all gameGachaCards, therefore the correct cardId for a given
|
||||||
|
# characterId should be returned
|
||||||
|
if chara_id != -1:
|
||||||
|
# get the
|
||||||
|
card = self.data.static.get_gacha_card_by_character(gacha_id, chara_id)
|
||||||
|
|
||||||
|
tmp = card._asdict()
|
||||||
|
tmp.pop("id")
|
||||||
|
|
||||||
|
rolled_cards.append(tmp)
|
||||||
|
else:
|
||||||
|
gacha_cards = self.data.static.get_gacha_cards(gacha_id)
|
||||||
|
|
||||||
|
# get the card id for each roll
|
||||||
|
for _ in range(num_rolls):
|
||||||
|
# get the index from all possible cards
|
||||||
|
card_idx = randint(0, len(gacha_cards) - 1)
|
||||||
|
# remove the index from the cards so it wont get pulled again
|
||||||
|
card = gacha_cards.pop(card_idx)
|
||||||
|
|
||||||
|
# remove the "id" fronm the card
|
||||||
|
tmp = card._asdict()
|
||||||
|
tmp.pop("id")
|
||||||
|
|
||||||
|
rolled_cards.append(tmp)
|
||||||
|
|
||||||
|
return {"length": len(rolled_cards), "gameGachaCardList": rolled_cards}
|
||||||
|
|
||||||
|
def handle_cm_upsert_user_gacha_api_request(self, data: Dict) -> Dict:
|
||||||
|
upsert = data["cmUpsertUserGacha"]
|
||||||
|
user_id = data["userId"]
|
||||||
|
place_id = data["placeId"]
|
||||||
|
|
||||||
|
# save the user data
|
||||||
|
user_data = upsert["userData"]
|
||||||
|
user_data.pop("rankUpChallengeResults")
|
||||||
|
user_data.pop("userEmoney")
|
||||||
|
|
||||||
|
self.data.profile.put_profile_data(user_id, self.version, user_data)
|
||||||
|
|
||||||
|
# save the user gacha
|
||||||
|
user_gacha = upsert["userGacha"]
|
||||||
|
gacha_id = user_gacha["gachaId"]
|
||||||
|
user_gacha.pop("gachaId")
|
||||||
|
user_gacha.pop("dailyGachaDate")
|
||||||
|
|
||||||
|
self.data.item.put_user_gacha(user_id, gacha_id, user_gacha)
|
||||||
|
|
||||||
|
# save all user items
|
||||||
|
if "userItemList" in upsert:
|
||||||
|
for item in upsert["userItemList"]:
|
||||||
|
self.data.item.put_item(user_id, item)
|
||||||
|
|
||||||
|
# add every gamegachaCard to database
|
||||||
|
for card in upsert["gameGachaCardList"]:
|
||||||
|
self.data.item.put_user_print_state(
|
||||||
|
user_id,
|
||||||
|
hasCompleted=False,
|
||||||
|
placeId=place_id,
|
||||||
|
cardId=card["cardId"],
|
||||||
|
gachaId=card["gachaId"],
|
||||||
|
)
|
||||||
|
|
||||||
|
# retrieve every game gacha card which has been added in order to get
|
||||||
|
# the orderId for the next request
|
||||||
|
user_print_states = self.data.item.get_user_print_states_by_gacha(
|
||||||
|
user_id, gacha_id, has_completed=False
|
||||||
|
)
|
||||||
|
card_print_state_list = []
|
||||||
|
for card in user_print_states:
|
||||||
|
tmp = card._asdict()
|
||||||
|
tmp["orderId"] = tmp["id"]
|
||||||
|
tmp.pop("user")
|
||||||
|
tmp["limitDate"] = datetime.strftime(tmp["limitDate"], "%Y-%m-%d")
|
||||||
|
|
||||||
|
card_print_state_list.append(tmp)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"returnCode": "1",
|
||||||
|
"apiName": "CMUpsertUserGachaApi",
|
||||||
|
"userCardPrintStateList": card_print_state_list,
|
||||||
|
}
|
||||||
|
|
||||||
|
def handle_cm_upsert_user_printlog_api_request(self, data: Dict) -> Dict:
|
||||||
|
return {
|
||||||
|
"returnCode": 1,
|
||||||
|
"orderId": 0,
|
||||||
|
"serialId": "11111111111111111111",
|
||||||
|
"apiName": "CMUpsertUserPrintlogApi",
|
||||||
|
}
|
||||||
|
|
||||||
|
def handle_cm_upsert_user_print_api_request(self, data: Dict) -> Dict:
|
||||||
|
user_print_detail = data["userPrintDetail"]
|
||||||
|
user_id = data["userId"]
|
||||||
|
|
||||||
|
# generate random serial id
|
||||||
|
serial_id = "".join([str(randint(0, 9)) for _ in range(20)])
|
||||||
|
|
||||||
|
# not needed because are either zero or unset
|
||||||
|
user_print_detail.pop("orderId")
|
||||||
|
user_print_detail.pop("printNumber")
|
||||||
|
user_print_detail.pop("serialId")
|
||||||
|
user_print_detail["printDate"] = datetime.strptime(
|
||||||
|
user_print_detail["printDate"], "%Y-%m-%d"
|
||||||
|
)
|
||||||
|
|
||||||
|
# add the entry to the user print table with the random serialId
|
||||||
|
self.data.item.put_user_print_detail(user_id, serial_id, user_print_detail)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"returnCode": 1,
|
||||||
|
"orderId": 0,
|
||||||
|
"serialId": serial_id,
|
||||||
|
"apiName": "CMUpsertUserPrintApi",
|
||||||
|
}
|
||||||
|
|
||||||
|
def handle_cm_upsert_user_print_subtract_api_request(self, data: Dict) -> Dict:
|
||||||
|
upsert = data["userCardPrintState"]
|
||||||
|
user_id = data["userId"]
|
||||||
|
place_id = data["placeId"]
|
||||||
|
|
||||||
|
# save all user items
|
||||||
|
if "userItemList" in data:
|
||||||
|
for item in data["userItemList"]:
|
||||||
|
self.data.item.put_item(user_id, item)
|
||||||
|
|
||||||
|
# set the card print state to success and use the orderId as the key
|
||||||
|
self.data.item.put_user_print_state(
|
||||||
|
user_id,
|
||||||
|
id=upsert["orderId"],
|
||||||
|
hasCompleted=True
|
||||||
|
)
|
||||||
|
|
||||||
|
return {"returnCode": "1", "apiName": "CMUpsertUserPrintSubtractApi"}
|
||||||
|
|
||||||
|
def handle_cm_upsert_user_print_cancel_api_request(self, data: Dict) -> Dict:
|
||||||
|
order_ids = data["orderIdList"]
|
||||||
|
user_id = data["userId"]
|
||||||
|
|
||||||
|
# set the card print state to success and use the orderId as the key
|
||||||
|
for order_id in order_ids:
|
||||||
|
self.data.item.put_user_print_state(
|
||||||
|
user_id,
|
||||||
|
id=order_id,
|
||||||
|
hasCompleted=True
|
||||||
|
)
|
||||||
|
|
||||||
|
return {"returnCode": "1", "apiName": "CMUpsertUserPrintCancelApi"}
|
||||||
|
@ -30,3 +30,10 @@ class ChuniNewPlus(ChuniNew):
|
|||||||
"reflectorUri"
|
"reflectorUri"
|
||||||
] = f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}/SDHD/205/ChuniServlet/"
|
] = f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}/SDHD/205/ChuniServlet/"
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
def handle_cm_get_user_preview_api_request(self, data: Dict) -> Dict:
|
||||||
|
user_data = super().handle_cm_get_user_preview_api_request(data)
|
||||||
|
|
||||||
|
# hardcode lastDataVersion for CardMaker 1.35
|
||||||
|
user_data["lastDataVersion"] = "2.05.00"
|
||||||
|
return user_data
|
||||||
|
@ -114,6 +114,76 @@ map_area = Table(
|
|||||||
mysql_charset="utf8mb4",
|
mysql_charset="utf8mb4",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
gacha = Table(
|
||||||
|
"chuni_item_gacha",
|
||||||
|
metadata,
|
||||||
|
Column("id", Integer, primary_key=True, nullable=False),
|
||||||
|
Column(
|
||||||
|
"user",
|
||||||
|
ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"),
|
||||||
|
nullable=False,
|
||||||
|
),
|
||||||
|
Column("gachaId", Integer, nullable=False),
|
||||||
|
Column("totalGachaCnt", Integer, server_default="0"),
|
||||||
|
Column("ceilingGachaCnt", Integer, server_default="0"),
|
||||||
|
Column("dailyGachaCnt", Integer, server_default="0"),
|
||||||
|
Column("fiveGachaCnt", Integer, server_default="0"),
|
||||||
|
Column("elevenGachaCnt", Integer, server_default="0"),
|
||||||
|
Column("dailyGachaDate", TIMESTAMP, nullable=False, server_default=func.now()),
|
||||||
|
UniqueConstraint("user", "gachaId", name="chuni_item_gacha_uk"),
|
||||||
|
mysql_charset="utf8mb4",
|
||||||
|
)
|
||||||
|
|
||||||
|
print_state = Table(
|
||||||
|
"chuni_item_print_state",
|
||||||
|
metadata,
|
||||||
|
Column("id", Integer, primary_key=True, nullable=False),
|
||||||
|
Column(
|
||||||
|
"user",
|
||||||
|
ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"),
|
||||||
|
nullable=False,
|
||||||
|
),
|
||||||
|
Column("hasCompleted", Boolean, nullable=False, server_default="0"),
|
||||||
|
Column(
|
||||||
|
"limitDate", TIMESTAMP, nullable=False, server_default="2038-01-01 00:00:00.0"
|
||||||
|
),
|
||||||
|
Column("placeId", Integer),
|
||||||
|
Column("cardId", Integer),
|
||||||
|
Column("gachaId", Integer),
|
||||||
|
UniqueConstraint("id", "user", name="chuni_item_print_state_uk"),
|
||||||
|
mysql_charset="utf8mb4",
|
||||||
|
)
|
||||||
|
|
||||||
|
print_detail = Table(
|
||||||
|
"chuni_item_print_detail",
|
||||||
|
metadata,
|
||||||
|
Column("id", Integer, primary_key=True, nullable=False),
|
||||||
|
Column(
|
||||||
|
"user",
|
||||||
|
ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"),
|
||||||
|
nullable=False,
|
||||||
|
),
|
||||||
|
Column("cardId", Integer, nullable=False),
|
||||||
|
Column("printDate", TIMESTAMP, nullable=False),
|
||||||
|
Column("serialId", String(20), nullable=False),
|
||||||
|
Column("placeId", Integer, nullable=False),
|
||||||
|
Column("clientId", String(11), nullable=False),
|
||||||
|
Column("printerSerialId", String(20), nullable=False),
|
||||||
|
Column("printOption1", Boolean, server_default="0"),
|
||||||
|
Column("printOption2", Boolean, server_default="0"),
|
||||||
|
Column("printOption3", Boolean, server_default="0"),
|
||||||
|
Column("printOption4", Boolean, server_default="0"),
|
||||||
|
Column("printOption5", Boolean, server_default="0"),
|
||||||
|
Column("printOption6", Boolean, server_default="0"),
|
||||||
|
Column("printOption7", Boolean, server_default="0"),
|
||||||
|
Column("printOption8", Boolean, server_default="0"),
|
||||||
|
Column("printOption9", Boolean, server_default="0"),
|
||||||
|
Column("printOption10", Boolean, server_default="0"),
|
||||||
|
Column("created", String(255), server_default=""),
|
||||||
|
UniqueConstraint("serialId", name="chuni_item_print_detail_uk"),
|
||||||
|
mysql_charset="utf8mb4",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class ChuniItemData(BaseData):
|
class ChuniItemData(BaseData):
|
||||||
def put_character(self, user_id: int, character_data: Dict) -> Optional[int]:
|
def put_character(self, user_id: int, character_data: Dict) -> Optional[int]:
|
||||||
@ -235,3 +305,89 @@ class ChuniItemData(BaseData):
|
|||||||
if result is None:
|
if result is None:
|
||||||
return None
|
return None
|
||||||
return result.fetchall()
|
return result.fetchall()
|
||||||
|
|
||||||
|
def get_user_gachas(self, aime_id: int) -> Optional[List[Row]]:
|
||||||
|
sql = gacha.select(gacha.c.user == aime_id)
|
||||||
|
|
||||||
|
result = self.execute(sql)
|
||||||
|
if result is None:
|
||||||
|
return None
|
||||||
|
return result.fetchall()
|
||||||
|
|
||||||
|
def put_user_gacha(
|
||||||
|
self, aime_id: int, gacha_id: int, gacha_data: Dict
|
||||||
|
) -> Optional[int]:
|
||||||
|
sql = insert(gacha).values(user=aime_id, gachaId=gacha_id, **gacha_data)
|
||||||
|
|
||||||
|
conflict = sql.on_duplicate_key_update(
|
||||||
|
user=aime_id, gachaId=gacha_id, **gacha_data
|
||||||
|
)
|
||||||
|
result = self.execute(conflict)
|
||||||
|
|
||||||
|
if result is None:
|
||||||
|
self.logger.warn(f"put_user_gacha: Failed to insert! aime_id: {aime_id}")
|
||||||
|
return None
|
||||||
|
return result.lastrowid
|
||||||
|
|
||||||
|
def get_user_print_states(
|
||||||
|
self, aime_id: int, has_completed: bool = False
|
||||||
|
) -> Optional[List[Row]]:
|
||||||
|
sql = print_state.select(
|
||||||
|
and_(
|
||||||
|
print_state.c.user == aime_id,
|
||||||
|
print_state.c.hasCompleted == has_completed
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
result = self.execute(sql)
|
||||||
|
if result is None:
|
||||||
|
return None
|
||||||
|
return result.fetchall()
|
||||||
|
|
||||||
|
def get_user_print_states_by_gacha(
|
||||||
|
self, aime_id: int, gacha_id: int, has_completed: bool = False
|
||||||
|
) -> Optional[List[Row]]:
|
||||||
|
sql = print_state.select(
|
||||||
|
and_(
|
||||||
|
print_state.c.user == aime_id,
|
||||||
|
print_state.c.gachaId == gacha_id,
|
||||||
|
print_state.c.hasCompleted == has_completed
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
result = self.execute(sql)
|
||||||
|
if result is None:
|
||||||
|
return None
|
||||||
|
return result.fetchall()
|
||||||
|
|
||||||
|
def put_user_print_state(self, aime_id: int, **print_data) -> Optional[int]:
|
||||||
|
sql = insert(print_state).values(user=aime_id, **print_data)
|
||||||
|
|
||||||
|
conflict = sql.on_duplicate_key_update(user=aime_id, **print_data)
|
||||||
|
result = self.execute(conflict)
|
||||||
|
|
||||||
|
if result is None:
|
||||||
|
self.logger.warn(
|
||||||
|
f"put_user_print_state: Failed to insert! aime_id: {aime_id}"
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
return result.lastrowid
|
||||||
|
|
||||||
|
def put_user_print_detail(
|
||||||
|
self, aime_id: int, serial_id: str, user_print_data: Dict
|
||||||
|
) -> Optional[int]:
|
||||||
|
sql = insert(print_detail).values(
|
||||||
|
user=aime_id, serialId=serial_id, **user_print_data
|
||||||
|
)
|
||||||
|
|
||||||
|
conflict = sql.on_duplicate_key_update(
|
||||||
|
user=aime_id, **user_print_data
|
||||||
|
)
|
||||||
|
result = self.execute(conflict)
|
||||||
|
|
||||||
|
if result is None:
|
||||||
|
self.logger.warn(
|
||||||
|
f"put_user_print_detail: Failed to insert! aime_id: {aime_id}"
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
return result.lastrowid
|
@ -68,6 +68,60 @@ avatar = Table(
|
|||||||
mysql_charset="utf8mb4",
|
mysql_charset="utf8mb4",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
gachas = Table(
|
||||||
|
"chuni_static_gachas",
|
||||||
|
metadata,
|
||||||
|
Column("id", Integer, primary_key=True, nullable=False),
|
||||||
|
Column("version", Integer, nullable=False),
|
||||||
|
Column("gachaId", Integer, nullable=False),
|
||||||
|
Column("gachaName", String(255), nullable=False),
|
||||||
|
Column("type", Integer, nullable=False, server_default="0"),
|
||||||
|
Column("kind", Integer, nullable=False, server_default="0"),
|
||||||
|
Column("isCeiling", Boolean, server_default="0"),
|
||||||
|
Column("ceilingCnt", Integer, server_default="10"),
|
||||||
|
Column("changeRateCnt1", Integer, server_default="0"),
|
||||||
|
Column("changeRateCnt2", Integer, server_default="0"),
|
||||||
|
Column("startDate", TIMESTAMP, server_default="2018-01-01 00:00:00.0"),
|
||||||
|
Column("endDate", TIMESTAMP, server_default="2038-01-01 00:00:00.0"),
|
||||||
|
Column("noticeStartDate", TIMESTAMP, server_default="2018-01-01 00:00:00.0"),
|
||||||
|
Column("noticeEndDate", TIMESTAMP, server_default="2038-01-01 00:00:00.0"),
|
||||||
|
UniqueConstraint("version", "gachaId", "gachaName", name="chuni_static_gachas_uk"),
|
||||||
|
mysql_charset="utf8mb4",
|
||||||
|
)
|
||||||
|
|
||||||
|
cards = Table(
|
||||||
|
"chuni_static_cards",
|
||||||
|
metadata,
|
||||||
|
Column("id", Integer, primary_key=True, nullable=False),
|
||||||
|
Column("version", Integer, nullable=False),
|
||||||
|
Column("cardId", Integer, nullable=False),
|
||||||
|
Column("charaName", String(255), nullable=False),
|
||||||
|
Column("charaId", Integer, nullable=False),
|
||||||
|
Column("presentName", String(255), nullable=False),
|
||||||
|
Column("rarity", Integer, server_default="2"),
|
||||||
|
Column("labelType", Integer, nullable=False),
|
||||||
|
Column("difType", Integer, nullable=False),
|
||||||
|
Column("miss", Integer, nullable=False),
|
||||||
|
Column("combo", Integer, nullable=False),
|
||||||
|
Column("chain", Integer, nullable=False),
|
||||||
|
Column("skillName", String(255), nullable=False),
|
||||||
|
UniqueConstraint("version", "cardId", name="chuni_static_cards_uk"),
|
||||||
|
mysql_charset="utf8mb4",
|
||||||
|
)
|
||||||
|
|
||||||
|
gacha_cards = Table(
|
||||||
|
"chuni_static_gacha_cards",
|
||||||
|
metadata,
|
||||||
|
Column("id", Integer, primary_key=True, nullable=False),
|
||||||
|
Column("gachaId", Integer, nullable=False),
|
||||||
|
Column("cardId", Integer, nullable=False),
|
||||||
|
Column("rarity", Integer, nullable=False),
|
||||||
|
Column("weight", Integer, server_default="1"),
|
||||||
|
Column("isPickup", Boolean, server_default="0"),
|
||||||
|
UniqueConstraint("gachaId", "cardId", name="chuni_static_gacha_cards_uk"),
|
||||||
|
mysql_charset="utf8mb4",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class ChuniStaticData(BaseData):
|
class ChuniStaticData(BaseData):
|
||||||
def put_event(
|
def put_event(
|
||||||
@ -265,3 +319,112 @@ class ChuniStaticData(BaseData):
|
|||||||
if result is None:
|
if result is None:
|
||||||
return None
|
return None
|
||||||
return result.lastrowid
|
return result.lastrowid
|
||||||
|
|
||||||
|
def put_gacha(
|
||||||
|
self,
|
||||||
|
version: int,
|
||||||
|
gacha_id: int,
|
||||||
|
gacha_name: int,
|
||||||
|
**gacha_data,
|
||||||
|
) -> Optional[int]:
|
||||||
|
sql = insert(gachas).values(
|
||||||
|
version=version,
|
||||||
|
gachaId=gacha_id,
|
||||||
|
gachaName=gacha_name,
|
||||||
|
**gacha_data,
|
||||||
|
)
|
||||||
|
|
||||||
|
conflict = sql.on_duplicate_key_update(
|
||||||
|
version=version,
|
||||||
|
gachaId=gacha_id,
|
||||||
|
gachaName=gacha_name,
|
||||||
|
**gacha_data,
|
||||||
|
)
|
||||||
|
|
||||||
|
result = self.execute(conflict)
|
||||||
|
if result is None:
|
||||||
|
self.logger.warn(f"Failed to insert gacha! gacha_id {gacha_id}")
|
||||||
|
return None
|
||||||
|
return result.lastrowid
|
||||||
|
|
||||||
|
def get_gachas(self, version: int) -> Optional[List[Dict]]:
|
||||||
|
sql = gachas.select(gachas.c.version <= version).order_by(
|
||||||
|
gachas.c.gachaId.asc()
|
||||||
|
)
|
||||||
|
|
||||||
|
result = self.execute(sql)
|
||||||
|
if result is None:
|
||||||
|
return None
|
||||||
|
return result.fetchall()
|
||||||
|
|
||||||
|
def get_gacha(self, version: int, gacha_id: int) -> Optional[Dict]:
|
||||||
|
sql = gachas.select(
|
||||||
|
and_(gachas.c.version <= version, gachas.c.gachaId == gacha_id)
|
||||||
|
)
|
||||||
|
|
||||||
|
result = self.execute(sql)
|
||||||
|
if result is None:
|
||||||
|
return None
|
||||||
|
return result.fetchone()
|
||||||
|
|
||||||
|
def put_gacha_card(
|
||||||
|
self, gacha_id: int, card_id: int, **gacha_card
|
||||||
|
) -> Optional[int]:
|
||||||
|
sql = insert(gacha_cards).values(gachaId=gacha_id, cardId=card_id, **gacha_card)
|
||||||
|
|
||||||
|
conflict = sql.on_duplicate_key_update(
|
||||||
|
gachaId=gacha_id, cardId=card_id, **gacha_card
|
||||||
|
)
|
||||||
|
|
||||||
|
result = self.execute(conflict)
|
||||||
|
if result is None:
|
||||||
|
self.logger.warn(f"Failed to insert gacha card! gacha_id {gacha_id}")
|
||||||
|
return None
|
||||||
|
return result.lastrowid
|
||||||
|
|
||||||
|
def get_gacha_cards(self, gacha_id: int) -> Optional[List[Dict]]:
|
||||||
|
sql = gacha_cards.select(gacha_cards.c.gachaId == gacha_id)
|
||||||
|
|
||||||
|
result = self.execute(sql)
|
||||||
|
if result is None:
|
||||||
|
return None
|
||||||
|
return result.fetchall()
|
||||||
|
|
||||||
|
def get_gacha_card_by_character(self, gacha_id: int, chara_id: int) -> Optional[Dict]:
|
||||||
|
sql_sub = (
|
||||||
|
select(cards.c.cardId)
|
||||||
|
.filter(
|
||||||
|
cards.c.charaId == chara_id
|
||||||
|
)
|
||||||
|
.scalar_subquery()
|
||||||
|
)
|
||||||
|
|
||||||
|
# Perform the main query, also rename the resulting column to ranking
|
||||||
|
sql = gacha_cards.select(and_(
|
||||||
|
gacha_cards.c.gachaId == gacha_id,
|
||||||
|
gacha_cards.c.cardId == sql_sub
|
||||||
|
))
|
||||||
|
|
||||||
|
result = self.execute(sql)
|
||||||
|
if result is None:
|
||||||
|
return None
|
||||||
|
return result.fetchone()
|
||||||
|
|
||||||
|
def put_card(self, version: int, card_id: int, **card_data) -> Optional[int]:
|
||||||
|
sql = insert(cards).values(version=version, cardId=card_id, **card_data)
|
||||||
|
|
||||||
|
conflict = sql.on_duplicate_key_update(**card_data)
|
||||||
|
|
||||||
|
result = self.execute(conflict)
|
||||||
|
if result is None:
|
||||||
|
self.logger.warn(f"Failed to insert card! card_id {card_id}")
|
||||||
|
return None
|
||||||
|
return result.lastrowid
|
||||||
|
|
||||||
|
def get_card(self, version: int, card_id: int) -> Optional[Dict]:
|
||||||
|
sql = cards.select(and_(cards.c.version <= version, cards.c.cardId == card_id))
|
||||||
|
|
||||||
|
result = self.execute(sql)
|
||||||
|
if result is None:
|
||||||
|
return None
|
||||||
|
return result.fetchone()
|
||||||
|
@ -1,9 +1,11 @@
|
|||||||
from titles.cm.index import CardMakerServlet
|
from titles.cm.index import CardMakerServlet
|
||||||
from titles.cm.const import CardMakerConstants
|
from titles.cm.const import CardMakerConstants
|
||||||
from titles.cm.read import CardMakerReader
|
from titles.cm.read import CardMakerReader
|
||||||
|
from titles.cm.database import CardMakerData
|
||||||
|
|
||||||
index = CardMakerServlet
|
index = CardMakerServlet
|
||||||
reader = CardMakerReader
|
reader = CardMakerReader
|
||||||
|
database = CardMakerData
|
||||||
|
|
||||||
game_codes = [CardMakerConstants.GAME_CODE]
|
game_codes = [CardMakerConstants.GAME_CODE]
|
||||||
|
|
||||||
|
@ -11,10 +11,10 @@ from titles.cm.const import CardMakerConstants
|
|||||||
from titles.cm.config import CardMakerConfig
|
from titles.cm.config import CardMakerConfig
|
||||||
|
|
||||||
|
|
||||||
class CardMaker136(CardMakerBase):
|
class CardMaker135(CardMakerBase):
|
||||||
def __init__(self, core_cfg: CoreConfig, game_cfg: CardMakerConfig) -> None:
|
def __init__(self, core_cfg: CoreConfig, game_cfg: CardMakerConfig) -> None:
|
||||||
super().__init__(core_cfg, game_cfg)
|
super().__init__(core_cfg, game_cfg)
|
||||||
self.version = CardMakerConstants.VER_CARD_MAKER_136
|
self.version = CardMakerConstants.VER_CARD_MAKER_135
|
||||||
|
|
||||||
def handle_get_game_connect_api_request(self, data: Dict) -> Dict:
|
def handle_get_game_connect_api_request(self, data: Dict) -> Dict:
|
||||||
ret = super().handle_get_game_connect_api_request(data)
|
ret = super().handle_get_game_connect_api_request(data)
|
||||||
@ -32,7 +32,7 @@ class CardMaker136(CardMakerBase):
|
|||||||
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||||
ret = super().handle_get_game_setting_api_request(data)
|
ret = super().handle_get_game_setting_api_request(data)
|
||||||
ret["gameSetting"]["dataVersion"] = "1.35.00"
|
ret["gameSetting"]["dataVersion"] = "1.35.00"
|
||||||
ret["gameSetting"]["ongekiCmVersion"] = "1.35.04"
|
ret["gameSetting"]["ongekiCmVersion"] = "1.35.03"
|
||||||
ret["gameSetting"]["chuniCmVersion"] = "2.05.00"
|
ret["gameSetting"]["chuniCmVersion"] = "2.05.00"
|
||||||
ret["gameSetting"]["maimaiCmVersion"] = "1.25.00"
|
ret["gameSetting"]["maimaiCmVersion"] = "1.25.00"
|
||||||
return ret
|
return ret
|
@ -4,9 +4,9 @@ class CardMakerConstants:
|
|||||||
CONFIG_NAME = "cardmaker.yaml"
|
CONFIG_NAME = "cardmaker.yaml"
|
||||||
|
|
||||||
VER_CARD_MAKER = 0
|
VER_CARD_MAKER = 0
|
||||||
VER_CARD_MAKER_136 = 1
|
VER_CARD_MAKER_135 = 1
|
||||||
|
|
||||||
VERSION_NAMES = ("Card Maker 1.34", "Card Maker 1.36")
|
VERSION_NAMES = ("Card Maker 1.34", "Card Maker 1.35")
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def game_ver_to_string(cls, ver: int):
|
def game_ver_to_string(cls, ver: int):
|
||||||
|
8
titles/cm/database.py
Normal file
8
titles/cm/database.py
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
from core.data import Data
|
||||||
|
from core.config import CoreConfig
|
||||||
|
|
||||||
|
|
||||||
|
class CardMakerData(Data):
|
||||||
|
def __init__(self, cfg: CoreConfig) -> None:
|
||||||
|
super().__init__(cfg)
|
||||||
|
# empty Card Maker database
|
@ -15,7 +15,7 @@ from core.config import CoreConfig
|
|||||||
from titles.cm.config import CardMakerConfig
|
from titles.cm.config import CardMakerConfig
|
||||||
from titles.cm.const import CardMakerConstants
|
from titles.cm.const import CardMakerConstants
|
||||||
from titles.cm.base import CardMakerBase
|
from titles.cm.base import CardMakerBase
|
||||||
from titles.cm.cm136 import CardMaker136
|
from titles.cm.cm135 import CardMaker135
|
||||||
|
|
||||||
|
|
||||||
class CardMakerServlet:
|
class CardMakerServlet:
|
||||||
@ -29,7 +29,7 @@ class CardMakerServlet:
|
|||||||
|
|
||||||
self.versions = [
|
self.versions = [
|
||||||
CardMakerBase(core_cfg, self.game_cfg),
|
CardMakerBase(core_cfg, self.game_cfg),
|
||||||
CardMaker136(core_cfg, self.game_cfg),
|
CardMaker135(core_cfg, self.game_cfg),
|
||||||
]
|
]
|
||||||
|
|
||||||
self.logger = logging.getLogger("cardmaker")
|
self.logger = logging.getLogger("cardmaker")
|
||||||
@ -87,8 +87,8 @@ class CardMakerServlet:
|
|||||||
|
|
||||||
if version >= 130 and version < 135: # Card Maker
|
if version >= 130 and version < 135: # Card Maker
|
||||||
internal_ver = CardMakerConstants.VER_CARD_MAKER
|
internal_ver = CardMakerConstants.VER_CARD_MAKER
|
||||||
elif version >= 135 and version < 140: # Card Maker
|
elif version >= 135 and version < 136: # Card Maker 1.35
|
||||||
internal_ver = CardMakerConstants.VER_CARD_MAKER_136
|
internal_ver = CardMakerConstants.VER_CARD_MAKER_135
|
||||||
|
|
||||||
if all(c in string.hexdigits for c in endpoint) and len(endpoint) == 32:
|
if all(c in string.hexdigits for c in endpoint) and len(endpoint) == 32:
|
||||||
# If we get a 32 character long hex string, it's a hash and we're
|
# If we get a 32 character long hex string, it's a hash and we're
|
||||||
|
@ -12,6 +12,10 @@ from titles.ongeki.database import OngekiData
|
|||||||
from titles.cm.const import CardMakerConstants
|
from titles.cm.const import CardMakerConstants
|
||||||
from titles.ongeki.const import OngekiConstants
|
from titles.ongeki.const import OngekiConstants
|
||||||
from titles.ongeki.config import OngekiConfig
|
from titles.ongeki.config import OngekiConfig
|
||||||
|
from titles.mai2.database import Mai2Data
|
||||||
|
from titles.mai2.const import Mai2Constants
|
||||||
|
from titles.chuni.database import ChuniData
|
||||||
|
from titles.chuni.const import ChuniConstants
|
||||||
|
|
||||||
|
|
||||||
class CardMakerReader(BaseReader):
|
class CardMakerReader(BaseReader):
|
||||||
@ -25,6 +29,8 @@ class CardMakerReader(BaseReader):
|
|||||||
) -> None:
|
) -> None:
|
||||||
super().__init__(config, version, bin_dir, opt_dir, extra)
|
super().__init__(config, version, bin_dir, opt_dir, extra)
|
||||||
self.ongeki_data = OngekiData(config)
|
self.ongeki_data = OngekiData(config)
|
||||||
|
self.mai2_data = Mai2Data(config)
|
||||||
|
self.chuni_data = ChuniData(config)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.logger.info(
|
self.logger.info(
|
||||||
@ -34,15 +40,29 @@ class CardMakerReader(BaseReader):
|
|||||||
self.logger.error(f"Invalid Card Maker version {version}")
|
self.logger.error(f"Invalid Card Maker version {version}")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
|
def _get_card_maker_directory(self, directory: str) -> str:
|
||||||
|
for root, dirs, files in os.walk(directory):
|
||||||
|
for dir in dirs:
|
||||||
|
if (
|
||||||
|
os.path.exists(f"{root}/{dir}/MU3")
|
||||||
|
and os.path.exists(f"{root}/{dir}/MAI")
|
||||||
|
and os.path.exists(f"{root}/{dir}/CHU")
|
||||||
|
):
|
||||||
|
return f"{root}/{dir}"
|
||||||
|
|
||||||
def read(self) -> None:
|
def read(self) -> None:
|
||||||
static_datas = {
|
static_datas = {
|
||||||
"static_gachas.csv": "read_ongeki_gacha_csv",
|
"static_gachas.csv": "read_ongeki_gacha_csv",
|
||||||
"static_gacha_cards.csv": "read_ongeki_gacha_card_csv",
|
"static_gacha_cards.csv": "read_ongeki_gacha_card_csv",
|
||||||
}
|
}
|
||||||
|
|
||||||
data_dirs = []
|
|
||||||
|
|
||||||
if self.bin_dir is not None:
|
if self.bin_dir is not None:
|
||||||
|
data_dir = self._get_card_maker_directory(self.bin_dir)
|
||||||
|
|
||||||
|
self.read_chuni_card(f"{data_dir}/CHU/Data/A000/card")
|
||||||
|
self.read_chuni_gacha(f"{data_dir}/CHU/Data/A000/gacha")
|
||||||
|
|
||||||
|
self.read_mai2_card(f"{data_dir}/MAI/Data/A000/card")
|
||||||
for file, func in static_datas.items():
|
for file, func in static_datas.items():
|
||||||
if os.path.exists(f"{self.bin_dir}/MU3/{file}"):
|
if os.path.exists(f"{self.bin_dir}/MU3/{file}"):
|
||||||
read_csv = getattr(CardMakerReader, func)
|
read_csv = getattr(CardMakerReader, func)
|
||||||
@ -53,13 +73,163 @@ class CardMakerReader(BaseReader):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if self.opt_dir is not None:
|
if self.opt_dir is not None:
|
||||||
data_dirs += self.get_data_directories(self.opt_dir)
|
data_dirs = self.get_data_directories(self.opt_dir)
|
||||||
|
|
||||||
# ONGEKI (MU3) cnnot easily access the bin data(A000.pac)
|
# ONGEKI (MU3) cnnot easily access the bin data(A000.pac)
|
||||||
# so only opt_dir will work for now
|
# so only opt_dir will work for now
|
||||||
for dir in data_dirs:
|
for dir in data_dirs:
|
||||||
|
self.read_chuni_card(f"{dir}/CHU/card")
|
||||||
|
self.read_chuni_gacha(f"{dir}/CHU/gacha")
|
||||||
|
|
||||||
self.read_ongeki_gacha(f"{dir}/MU3/gacha")
|
self.read_ongeki_gacha(f"{dir}/MU3/gacha")
|
||||||
|
|
||||||
|
def read_chuni_card(self, base_dir: str) -> None:
|
||||||
|
self.logger.info(f"Reading cards from {base_dir}...")
|
||||||
|
|
||||||
|
version_ids = {
|
||||||
|
"v2_00": ChuniConstants.VER_CHUNITHM_NEW,
|
||||||
|
"v2_05": ChuniConstants.VER_CHUNITHM_NEW_PLUS,
|
||||||
|
# Chunithm SUN, ignore for now
|
||||||
|
"v2_10": ChuniConstants.VER_CHUNITHM_NEW_PLUS + 1
|
||||||
|
}
|
||||||
|
|
||||||
|
for root, dirs, files in os.walk(base_dir):
|
||||||
|
for dir in dirs:
|
||||||
|
if os.path.exists(f"{root}/{dir}/Card.xml"):
|
||||||
|
with open(f"{root}/{dir}/Card.xml", "r", encoding="utf-8") as f:
|
||||||
|
troot = ET.fromstring(f.read())
|
||||||
|
|
||||||
|
card_id = int(troot.find("name").find("id").text)
|
||||||
|
|
||||||
|
chara_name = troot.find("chuniCharaName").find("str").text
|
||||||
|
chara_id = troot.find("chuniCharaName").find("id").text
|
||||||
|
version = version_ids[
|
||||||
|
troot.find("netOpenName").find("str").text[:5]
|
||||||
|
]
|
||||||
|
present_name = troot.find("chuniPresentName").find("str").text
|
||||||
|
rarity = int(troot.find("rareType").text)
|
||||||
|
label = int(troot.find("labelType").text)
|
||||||
|
dif = int(troot.find("difType").text)
|
||||||
|
miss = int(troot.find("miss").text)
|
||||||
|
combo = int(troot.find("combo").text)
|
||||||
|
chain = int(troot.find("chain").text)
|
||||||
|
skill_name = troot.find("skillName").text
|
||||||
|
|
||||||
|
self.chuni_data.static.put_card(
|
||||||
|
version,
|
||||||
|
card_id,
|
||||||
|
charaName=chara_name,
|
||||||
|
charaId=chara_id,
|
||||||
|
presentName=present_name,
|
||||||
|
rarity=rarity,
|
||||||
|
labelType=label,
|
||||||
|
difType=dif,
|
||||||
|
miss=miss,
|
||||||
|
combo=combo,
|
||||||
|
chain=chain,
|
||||||
|
skillName=skill_name,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.logger.info(f"Added chuni card {card_id}")
|
||||||
|
|
||||||
|
def read_chuni_gacha(self, base_dir: str) -> None:
|
||||||
|
self.logger.info(f"Reading gachas from {base_dir}...")
|
||||||
|
|
||||||
|
version_ids = {
|
||||||
|
"v2_00": ChuniConstants.VER_CHUNITHM_NEW,
|
||||||
|
"v2_05": ChuniConstants.VER_CHUNITHM_NEW_PLUS,
|
||||||
|
# Chunithm SUN, ignore for now
|
||||||
|
"v2_10": ChuniConstants.VER_CHUNITHM_NEW_PLUS + 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
for root, dirs, files in os.walk(base_dir):
|
||||||
|
for dir in dirs:
|
||||||
|
if os.path.exists(f"{root}/{dir}/Gacha.xml"):
|
||||||
|
with open(f"{root}/{dir}/Gacha.xml", "r", encoding="utf-8") as f:
|
||||||
|
troot = ET.fromstring(f.read())
|
||||||
|
|
||||||
|
name = troot.find("gachaName").text
|
||||||
|
gacha_id = int(troot.find("name").find("id").text)
|
||||||
|
|
||||||
|
version = version_ids[
|
||||||
|
troot.find("netOpenName").find("str").text[:5]
|
||||||
|
]
|
||||||
|
ceiling_cnt = int(troot.find("ceilingNum").text)
|
||||||
|
gacha_type = int(troot.find("gachaType").text)
|
||||||
|
is_ceiling = (
|
||||||
|
True if troot.find("ceilingType").text == "1" else False
|
||||||
|
)
|
||||||
|
|
||||||
|
self.chuni_data.static.put_gacha(
|
||||||
|
version,
|
||||||
|
gacha_id,
|
||||||
|
name,
|
||||||
|
type=gacha_type,
|
||||||
|
isCeiling=is_ceiling,
|
||||||
|
ceilingCnt=ceiling_cnt,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.logger.info(f"Added chuni gacha {gacha_id}")
|
||||||
|
|
||||||
|
for gacha_card in troot.find("infos").iter("GachaCardDataInfo"):
|
||||||
|
# get the card ID from the id element
|
||||||
|
card_id = gacha_card.find("cardName").find("id").text
|
||||||
|
|
||||||
|
# get the weight from the weight element
|
||||||
|
weight = int(gacha_card.find("weight").text)
|
||||||
|
|
||||||
|
# get the pickup flag from the pickup element
|
||||||
|
is_pickup = (
|
||||||
|
True if gacha_card.find("pickup").text == "1" else False
|
||||||
|
)
|
||||||
|
|
||||||
|
self.chuni_data.static.put_gacha_card(
|
||||||
|
gacha_id,
|
||||||
|
card_id,
|
||||||
|
weight=weight,
|
||||||
|
rarity=2,
|
||||||
|
isPickup=is_pickup,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.logger.info(
|
||||||
|
f"Added chuni card {card_id} to gacha {gacha_id}"
|
||||||
|
)
|
||||||
|
|
||||||
|
def read_mai2_card(self, base_dir: str) -> None:
|
||||||
|
self.logger.info(f"Reading cards from {base_dir}...")
|
||||||
|
|
||||||
|
version_ids = {
|
||||||
|
"1.00": Mai2Constants.VER_MAIMAI_DX,
|
||||||
|
"1.05": Mai2Constants.VER_MAIMAI_DX_PLUS,
|
||||||
|
"1.09": Mai2Constants.VER_MAIMAI_DX_PLUS,
|
||||||
|
"1.10": Mai2Constants.VER_MAIMAI_DX_SPLASH,
|
||||||
|
"1.15": Mai2Constants.VER_MAIMAI_DX_SPLASH_PLUS,
|
||||||
|
"1.20": Mai2Constants.VER_MAIMAI_DX_UNIVERSE,
|
||||||
|
"1.25": Mai2Constants.VER_MAIMAI_DX_UNIVERSE_PLUS,
|
||||||
|
}
|
||||||
|
|
||||||
|
for root, dirs, files in os.walk(base_dir):
|
||||||
|
for dir in dirs:
|
||||||
|
if os.path.exists(f"{root}/{dir}/Card.xml"):
|
||||||
|
with open(f"{root}/{dir}/Card.xml", "r", encoding="utf-8") as f:
|
||||||
|
troot = ET.fromstring(f.read())
|
||||||
|
|
||||||
|
name = troot.find("name").find("str").text
|
||||||
|
card_id = int(troot.find("name").find("id").text)
|
||||||
|
|
||||||
|
version = version_ids[
|
||||||
|
troot.find("enableVersion").find("str").text
|
||||||
|
]
|
||||||
|
|
||||||
|
enabled = (
|
||||||
|
True if troot.find("disable").text == "false" else False
|
||||||
|
)
|
||||||
|
|
||||||
|
self.mai2_data.static.put_card(
|
||||||
|
version, card_id, name, enabled=enabled
|
||||||
|
)
|
||||||
|
self.logger.info(f"Added mai2 card {card_id}")
|
||||||
|
|
||||||
def read_ongeki_gacha_csv(self, file_path: str) -> None:
|
def read_ongeki_gacha_csv(self, file_path: str) -> None:
|
||||||
self.logger.info(f"Reading gachas from {file_path}...")
|
self.logger.info(f"Reading gachas from {file_path}...")
|
||||||
|
|
||||||
@ -76,7 +246,7 @@ class CardMakerReader(BaseReader):
|
|||||||
maxSelectPoint=row["maxSelectPoint"],
|
maxSelectPoint=row["maxSelectPoint"],
|
||||||
)
|
)
|
||||||
|
|
||||||
self.logger.info(f"Added gacha {row['gachaId']}")
|
self.logger.info(f"Added ongeki gacha {row['gachaId']}")
|
||||||
|
|
||||||
def read_ongeki_gacha_card_csv(self, file_path: str) -> None:
|
def read_ongeki_gacha_card_csv(self, file_path: str) -> None:
|
||||||
self.logger.info(f"Reading gacha cards from {file_path}...")
|
self.logger.info(f"Reading gacha cards from {file_path}...")
|
||||||
@ -93,7 +263,7 @@ class CardMakerReader(BaseReader):
|
|||||||
isSelect=True if row["isSelect"] == "1" else False,
|
isSelect=True if row["isSelect"] == "1" else False,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.logger.info(f"Added card {row['cardId']} to gacha")
|
self.logger.info(f"Added ongeki card {row['cardId']} to gacha")
|
||||||
|
|
||||||
def read_ongeki_gacha(self, base_dir: str) -> None:
|
def read_ongeki_gacha(self, base_dir: str) -> None:
|
||||||
self.logger.info(f"Reading gachas from {base_dir}...")
|
self.logger.info(f"Reading gachas from {base_dir}...")
|
||||||
@ -152,4 +322,4 @@ class CardMakerReader(BaseReader):
|
|||||||
isCeiling=is_ceiling,
|
isCeiling=is_ceiling,
|
||||||
maxSelectPoint=max_select_point,
|
maxSelectPoint=max_select_point,
|
||||||
)
|
)
|
||||||
self.logger.info(f"Added gacha {gacha_id}")
|
self.logger.info(f"Added ongeki gacha {gacha_id}")
|
||||||
|
1
titles/cm/schema/__init__.py
Normal file
1
titles/cm/schema/__init__.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
__all__ = []
|
@ -98,11 +98,11 @@ class CxbServlet(resource.Resource):
|
|||||||
).listen(server.Site(CxbServlet(self.core_cfg, self.cfg_dir)))
|
).listen(server.Site(CxbServlet(self.core_cfg, self.cfg_dir)))
|
||||||
|
|
||||||
self.logger.info(
|
self.logger.info(
|
||||||
f"Crossbeats title server ready on port {self.game_cfg.server.port} & {self.game_cfg.server.port_secure}"
|
f"Ready on ports {self.game_cfg.server.port} & {self.game_cfg.server.port_secure}"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
self.logger.info(
|
self.logger.info(
|
||||||
f"Crossbeats title server ready on port {self.game_cfg.server.port}"
|
f"Ready on port {self.game_cfg.server.port}"
|
||||||
)
|
)
|
||||||
|
|
||||||
def render_POST(self, request: Request):
|
def render_POST(self, request: Request):
|
||||||
|
@ -7,4 +7,4 @@ index = Mai2Servlet
|
|||||||
database = Mai2Data
|
database = Mai2Data
|
||||||
reader = Mai2Reader
|
reader = Mai2Reader
|
||||||
game_codes = [Mai2Constants.GAME_CODE]
|
game_codes = [Mai2Constants.GAME_CODE]
|
||||||
current_schema_version = 2
|
current_schema_version = 3
|
||||||
|
@ -202,6 +202,16 @@ class Mai2Base:
|
|||||||
for act in v:
|
for act in v:
|
||||||
self.data.profile.put_profile_activity(user_id, act)
|
self.data.profile.put_profile_activity(user_id, act)
|
||||||
|
|
||||||
|
if "userChargeList" in upsert and len(upsert["userChargeList"]) > 0:
|
||||||
|
for charge in upsert["userChargeList"]:
|
||||||
|
self.data.item.put_charge(
|
||||||
|
user_id,
|
||||||
|
charge["chargeId"],
|
||||||
|
charge["stock"],
|
||||||
|
datetime.strptime(charge["purchaseDate"], "%Y-%m-%d %H:%M:%S"),
|
||||||
|
datetime.strptime(charge["validDate"], "%Y-%m-%d %H:%M:%S")
|
||||||
|
)
|
||||||
|
|
||||||
if upsert["isNewCharacterList"] and int(upsert["isNewCharacterList"]) > 0:
|
if upsert["isNewCharacterList"] and int(upsert["isNewCharacterList"]) > 0:
|
||||||
for char in upsert["userCharacterList"]:
|
for char in upsert["userCharacterList"]:
|
||||||
self.data.item.put_character(
|
self.data.item.put_character(
|
||||||
@ -299,10 +309,67 @@ class Mai2Base:
|
|||||||
return {"userId": data["userId"], "userOption": options_dict}
|
return {"userId": data["userId"], "userOption": options_dict}
|
||||||
|
|
||||||
def handle_get_user_card_api_request(self, data: Dict) -> Dict:
|
def handle_get_user_card_api_request(self, data: Dict) -> Dict:
|
||||||
return {"userId": data["userId"], "nextIndex": 0, "userCardList": []}
|
user_cards = self.data.item.get_cards(data["userId"])
|
||||||
|
if user_cards is None:
|
||||||
|
return {
|
||||||
|
"userId": data["userId"],
|
||||||
|
"nextIndex": 0,
|
||||||
|
"userCardList": []
|
||||||
|
}
|
||||||
|
|
||||||
|
max_ct = data["maxCount"]
|
||||||
|
next_idx = data["nextIndex"]
|
||||||
|
start_idx = next_idx
|
||||||
|
end_idx = max_ct + start_idx
|
||||||
|
|
||||||
|
if len(user_cards[start_idx:]) > max_ct:
|
||||||
|
next_idx += max_ct
|
||||||
|
else:
|
||||||
|
next_idx = 0
|
||||||
|
|
||||||
|
card_list = []
|
||||||
|
for card in user_cards:
|
||||||
|
tmp = card._asdict()
|
||||||
|
tmp.pop("id")
|
||||||
|
tmp.pop("user")
|
||||||
|
tmp["startDate"] = datetime.strftime(
|
||||||
|
tmp["startDate"], "%Y-%m-%d %H:%M:%S")
|
||||||
|
tmp["endDate"] = datetime.strftime(
|
||||||
|
tmp["endDate"], "%Y-%m-%d %H:%M:%S")
|
||||||
|
card_list.append(tmp)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"userId": data["userId"],
|
||||||
|
"nextIndex": next_idx,
|
||||||
|
"userCardList": card_list[start_idx:end_idx]
|
||||||
|
}
|
||||||
|
|
||||||
def handle_get_user_charge_api_request(self, data: Dict) -> Dict:
|
def handle_get_user_charge_api_request(self, data: Dict) -> Dict:
|
||||||
return {"userId": data["userId"], "length": 0, "userChargeList": []}
|
user_charges = self.data.item.get_charges(data["userId"])
|
||||||
|
if user_charges is None:
|
||||||
|
return {
|
||||||
|
"userId": data["userId"],
|
||||||
|
"length": 0,
|
||||||
|
"userChargeList": []
|
||||||
|
}
|
||||||
|
|
||||||
|
user_charge_list = []
|
||||||
|
for charge in user_charges:
|
||||||
|
tmp = charge._asdict()
|
||||||
|
tmp.pop("id")
|
||||||
|
tmp.pop("user")
|
||||||
|
tmp["purchaseDate"] = datetime.strftime(
|
||||||
|
tmp["purchaseDate"], "%Y-%m-%d %H:%M:%S")
|
||||||
|
tmp["validDate"] = datetime.strftime(
|
||||||
|
tmp["validDate"], "%Y-%m-%d %H:%M:%S")
|
||||||
|
|
||||||
|
user_charge_list.append(tmp)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"userId": data["userId"],
|
||||||
|
"length": len(user_charge_list),
|
||||||
|
"userChargeList": user_charge_list
|
||||||
|
}
|
||||||
|
|
||||||
def handle_get_user_item_api_request(self, data: Dict) -> Dict:
|
def handle_get_user_item_api_request(self, data: Dict) -> Dict:
|
||||||
kind = int(data["nextIndex"] / 10000000000)
|
kind = int(data["nextIndex"] / 10000000000)
|
||||||
@ -313,15 +380,13 @@ class Mai2Base:
|
|||||||
|
|
||||||
for x in range(next_idx, data["maxCount"]):
|
for x in range(next_idx, data["maxCount"]):
|
||||||
try:
|
try:
|
||||||
user_item_list.append(
|
user_item_list.append({
|
||||||
{
|
"itemKind": user_items[x]["itemKind"],
|
||||||
"item_kind": user_items[x]["item_kind"],
|
"itemId": user_items[x]["itemId"],
|
||||||
"item_id": user_items[x]["item_id"],
|
"stock": user_items[x]["stock"],
|
||||||
"stock": user_items[x]["stock"],
|
"isValid": user_items[x]["isValid"]
|
||||||
"isValid": user_items[x]["is_valid"],
|
})
|
||||||
}
|
except IndexError:
|
||||||
)
|
|
||||||
except:
|
|
||||||
break
|
break
|
||||||
|
|
||||||
if len(user_item_list) == data["maxCount"]:
|
if len(user_item_list) == data["maxCount"]:
|
||||||
@ -332,21 +397,18 @@ class Mai2Base:
|
|||||||
"userId": data["userId"],
|
"userId": data["userId"],
|
||||||
"nextIndex": next_idx,
|
"nextIndex": next_idx,
|
||||||
"itemKind": kind,
|
"itemKind": kind,
|
||||||
"userItemList": user_item_list,
|
"userItemList": user_item_list
|
||||||
}
|
}
|
||||||
|
|
||||||
def handle_get_user_character_api_request(self, data: Dict) -> Dict:
|
def handle_get_user_character_api_request(self, data: Dict) -> Dict:
|
||||||
characters = self.data.item.get_characters(data["userId"])
|
characters = self.data.item.get_characters(data["userId"])
|
||||||
|
|
||||||
chara_list = []
|
chara_list = []
|
||||||
for chara in characters:
|
for chara in characters:
|
||||||
chara_list.append(
|
tmp = chara._asdict()
|
||||||
{
|
tmp.pop("id")
|
||||||
"characterId": chara["character_id"],
|
tmp.pop("user")
|
||||||
"level": chara["level"],
|
chara_list.append(tmp)
|
||||||
"awakening": chara["awakening"],
|
|
||||||
"useCount": chara["use_count"],
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
return {"userId": data["userId"], "userCharacterList": chara_list}
|
return {"userId": data["userId"], "userCharacterList": chara_list}
|
||||||
|
|
||||||
@ -417,10 +479,21 @@ class Mai2Base:
|
|||||||
tmp.pop("user")
|
tmp.pop("user")
|
||||||
mlst.append(tmp)
|
mlst.append(tmp)
|
||||||
|
|
||||||
return {"userActivity": {"playList": plst, "musicList": mlst}}
|
return {
|
||||||
|
"userActivity": {
|
||||||
|
"playList": plst,
|
||||||
|
"musicList": mlst
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
def handle_get_user_course_api_request(self, data: Dict) -> Dict:
|
def handle_get_user_course_api_request(self, data: Dict) -> Dict:
|
||||||
user_courses = self.data.score.get_courses(data["userId"])
|
user_courses = self.data.score.get_courses(data["userId"])
|
||||||
|
if user_courses is None:
|
||||||
|
return {
|
||||||
|
"userId": data["userId"],
|
||||||
|
"nextIndex": 0,
|
||||||
|
"userCourseList": []
|
||||||
|
}
|
||||||
|
|
||||||
course_list = []
|
course_list = []
|
||||||
for course in user_courses:
|
for course in user_courses:
|
||||||
@ -429,7 +502,11 @@ class Mai2Base:
|
|||||||
tmp.pop("id")
|
tmp.pop("id")
|
||||||
course_list.append(tmp)
|
course_list.append(tmp)
|
||||||
|
|
||||||
return {"userId": data["userId"], "nextIndex": 0, "userCourseList": course_list}
|
return {
|
||||||
|
"userId": data["userId"],
|
||||||
|
"nextIndex": 0,
|
||||||
|
"userCourseList": course_list
|
||||||
|
}
|
||||||
|
|
||||||
def handle_get_user_portrait_api_request(self, data: Dict) -> Dict:
|
def handle_get_user_portrait_api_request(self, data: Dict) -> Dict:
|
||||||
# No support for custom pfps
|
# No support for custom pfps
|
||||||
@ -540,18 +617,11 @@ class Mai2Base:
|
|||||||
|
|
||||||
if songs is not None:
|
if songs is not None:
|
||||||
for song in songs:
|
for song in songs:
|
||||||
music_detail_list.append(
|
tmp = song._asdict()
|
||||||
{
|
tmp.pop("id")
|
||||||
"musicId": song["song_id"],
|
tmp.pop("user")
|
||||||
"level": song["chart_id"],
|
music_detail_list.append(tmp)
|
||||||
"playCount": song["play_count"],
|
|
||||||
"achievement": song["achievement"],
|
|
||||||
"comboStatus": song["combo_status"],
|
|
||||||
"syncStatus": song["sync_status"],
|
|
||||||
"deluxscoreMax": song["dx_score"],
|
|
||||||
"scoreRank": song["score_rank"],
|
|
||||||
}
|
|
||||||
)
|
|
||||||
if len(music_detail_list) == data["maxCount"]:
|
if len(music_detail_list) == data["maxCount"]:
|
||||||
next_index = data["maxCount"] + data["nextIndex"]
|
next_index = data["maxCount"] + data["nextIndex"]
|
||||||
break
|
break
|
||||||
@ -559,5 +629,5 @@ class Mai2Base:
|
|||||||
return {
|
return {
|
||||||
"userId": data["userId"],
|
"userId": data["userId"],
|
||||||
"nextIndex": next_index,
|
"nextIndex": next_index,
|
||||||
"userMusicList": [{"userMusicDetailList": music_detail_list}],
|
"userMusicList": [{"userMusicDetailList": music_detail_list}]
|
||||||
}
|
}
|
||||||
|
@ -89,7 +89,7 @@ class Mai2Servlet:
|
|||||||
)
|
)
|
||||||
|
|
||||||
def render_POST(self, request: Request, version: int, url_path: str) -> bytes:
|
def render_POST(self, request: Request, version: int, url_path: str) -> bytes:
|
||||||
if url_path.lower() == "/ping":
|
if url_path.lower() == "ping":
|
||||||
return zlib.compress(b'{"returnCode": "1"}')
|
return zlib.compress(b'{"returnCode": "1"}')
|
||||||
|
|
||||||
req_raw = request.content.getvalue()
|
req_raw = request.content.getvalue()
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
from core.data.schema import BaseData, metadata
|
from core.data.schema import BaseData, metadata
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
from typing import Optional, Dict, List
|
from typing import Optional, Dict, List
|
||||||
from sqlalchemy import Table, Column, UniqueConstraint, PrimaryKeyConstraint, and_
|
from sqlalchemy import Table, Column, UniqueConstraint, PrimaryKeyConstraint, and_
|
||||||
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON
|
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON
|
||||||
@ -17,11 +18,11 @@ character = Table(
|
|||||||
ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"),
|
ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"),
|
||||||
nullable=False,
|
nullable=False,
|
||||||
),
|
),
|
||||||
Column("character_id", Integer, nullable=False),
|
Column("characterId", Integer, nullable=False),
|
||||||
Column("level", Integer, nullable=False, server_default="1"),
|
Column("level", Integer, nullable=False, server_default="1"),
|
||||||
Column("awakening", Integer, nullable=False, server_default="0"),
|
Column("awakening", Integer, nullable=False, server_default="0"),
|
||||||
Column("use_count", Integer, nullable=False, server_default="0"),
|
Column("useCount", Integer, nullable=False, server_default="0"),
|
||||||
UniqueConstraint("user", "character_id", name="mai2_item_character_uk"),
|
UniqueConstraint("user", "characterId", name="mai2_item_character_uk"),
|
||||||
mysql_charset="utf8mb4",
|
mysql_charset="utf8mb4",
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -34,13 +35,13 @@ card = Table(
|
|||||||
ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"),
|
ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"),
|
||||||
nullable=False,
|
nullable=False,
|
||||||
),
|
),
|
||||||
Column("card_kind", Integer, nullable=False),
|
Column("cardId", Integer, nullable=False),
|
||||||
Column("card_id", Integer, nullable=False),
|
Column("cardTypeId", Integer, nullable=False),
|
||||||
Column("chara_id", Integer, nullable=False),
|
Column("charaId", Integer, nullable=False),
|
||||||
Column("map_id", Integer, nullable=False),
|
Column("mapId", Integer, nullable=False),
|
||||||
Column("start_date", String(255), nullable=False),
|
Column("startDate", TIMESTAMP, server_default="2018-01-01 00:00:00.0"),
|
||||||
Column("end_date", String(255), nullable=False),
|
Column("endDate", TIMESTAMP, server_default="2038-01-01 00:00:00.0"),
|
||||||
UniqueConstraint("user", "card_kind", "card_id", name="mai2_item_card_uk"),
|
UniqueConstraint("user", "cardId", "cardTypeId", name="mai2_item_card_uk"),
|
||||||
mysql_charset="utf8mb4",
|
mysql_charset="utf8mb4",
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -53,11 +54,11 @@ item = Table(
|
|||||||
ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"),
|
ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"),
|
||||||
nullable=False,
|
nullable=False,
|
||||||
),
|
),
|
||||||
Column("item_kind", Integer, nullable=False),
|
Column("itemId", Integer, nullable=False),
|
||||||
Column("item_id", Integer, nullable=False),
|
Column("itemKind", Integer, nullable=False),
|
||||||
Column("stock", Integer, nullable=False, server_default="1"),
|
Column("stock", Integer, nullable=False, server_default="1"),
|
||||||
Column("is_valid", Boolean, nullable=False, server_default="1"),
|
Column("isValid", Boolean, nullable=False, server_default="1"),
|
||||||
UniqueConstraint("user", "item_kind", "item_id", name="mai2_item_item_uk"),
|
UniqueConstraint("user", "itemId", "itemKind", name="mai2_item_item_uk"),
|
||||||
mysql_charset="utf8mb4",
|
mysql_charset="utf8mb4",
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -139,11 +140,44 @@ charge = Table(
|
|||||||
ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"),
|
ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"),
|
||||||
nullable=False,
|
nullable=False,
|
||||||
),
|
),
|
||||||
Column("charge_id", Integer, nullable=False),
|
Column("chargeId", Integer, nullable=False),
|
||||||
Column("stock", Integer, nullable=False),
|
Column("stock", Integer, nullable=False),
|
||||||
Column("purchase_date", String(255), nullable=False),
|
Column("purchaseDate", String(255), nullable=False),
|
||||||
Column("valid_date", String(255), nullable=False),
|
Column("validDate", String(255), nullable=False),
|
||||||
UniqueConstraint("user", "charge_id", name="mai2_item_charge_uk"),
|
UniqueConstraint("user", "chargeId", name="mai2_item_charge_uk"),
|
||||||
|
mysql_charset="utf8mb4",
|
||||||
|
)
|
||||||
|
|
||||||
|
print_detail = Table(
|
||||||
|
"mai2_item_print_detail",
|
||||||
|
metadata,
|
||||||
|
Column("id", Integer, primary_key=True, nullable=False),
|
||||||
|
Column(
|
||||||
|
"user",
|
||||||
|
ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"),
|
||||||
|
nullable=False,
|
||||||
|
),
|
||||||
|
Column("orderId", Integer),
|
||||||
|
Column("printNumber", Integer),
|
||||||
|
Column("printDate", TIMESTAMP, nullable=False, server_default=func.now()),
|
||||||
|
Column("serialId", String(20), nullable=False),
|
||||||
|
Column("placeId", Integer, nullable=False),
|
||||||
|
Column("clientId", String(11), nullable=False),
|
||||||
|
Column("printerSerialId", String(20), nullable=False),
|
||||||
|
Column("cardRomVersion", Integer),
|
||||||
|
Column("isHolograph", Boolean, server_default="1"),
|
||||||
|
Column("printOption1", Boolean, server_default="0"),
|
||||||
|
Column("printOption2", Boolean, server_default="0"),
|
||||||
|
Column("printOption3", Boolean, server_default="0"),
|
||||||
|
Column("printOption4", Boolean, server_default="0"),
|
||||||
|
Column("printOption5", Boolean, server_default="0"),
|
||||||
|
Column("printOption6", Boolean, server_default="0"),
|
||||||
|
Column("printOption7", Boolean, server_default="0"),
|
||||||
|
Column("printOption8", Boolean, server_default="0"),
|
||||||
|
Column("printOption9", Boolean, server_default="0"),
|
||||||
|
Column("printOption10", Boolean, server_default="0"),
|
||||||
|
Column("created", String(255), server_default=""),
|
||||||
|
UniqueConstraint("user", "serialId", name="mai2_item_print_detail_uk"),
|
||||||
mysql_charset="utf8mb4",
|
mysql_charset="utf8mb4",
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -154,15 +188,15 @@ class Mai2ItemData(BaseData):
|
|||||||
) -> None:
|
) -> None:
|
||||||
sql = insert(item).values(
|
sql = insert(item).values(
|
||||||
user=user_id,
|
user=user_id,
|
||||||
item_kind=item_kind,
|
itemKind=item_kind,
|
||||||
item_id=item_id,
|
itemId=item_id,
|
||||||
stock=stock,
|
stock=stock,
|
||||||
is_valid=is_valid,
|
isValid=is_valid,
|
||||||
)
|
)
|
||||||
|
|
||||||
conflict = sql.on_duplicate_key_update(
|
conflict = sql.on_duplicate_key_update(
|
||||||
stock=stock,
|
stock=stock,
|
||||||
is_valid=is_valid,
|
isValid=is_valid,
|
||||||
)
|
)
|
||||||
|
|
||||||
result = self.execute(conflict)
|
result = self.execute(conflict)
|
||||||
@ -178,7 +212,7 @@ class Mai2ItemData(BaseData):
|
|||||||
sql = item.select(item.c.user == user_id)
|
sql = item.select(item.c.user == user_id)
|
||||||
else:
|
else:
|
||||||
sql = item.select(
|
sql = item.select(
|
||||||
and_(item.c.user == user_id, item.c.item_kind == item_kind)
|
and_(item.c.user == user_id, item.c.itemKind == item_kind)
|
||||||
)
|
)
|
||||||
|
|
||||||
result = self.execute(sql)
|
result = self.execute(sql)
|
||||||
@ -190,8 +224,8 @@ class Mai2ItemData(BaseData):
|
|||||||
sql = item.select(
|
sql = item.select(
|
||||||
and_(
|
and_(
|
||||||
item.c.user == user_id,
|
item.c.user == user_id,
|
||||||
item.c.item_kind == item_kind,
|
item.c.itemKind == item_kind,
|
||||||
item.c.item_id == item_id,
|
item.c.itemId == item_id,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -382,3 +416,93 @@ class Mai2ItemData(BaseData):
|
|||||||
if result is None:
|
if result is None:
|
||||||
return None
|
return None
|
||||||
return result.fetchall()
|
return result.fetchall()
|
||||||
|
|
||||||
|
def put_card(
|
||||||
|
self,
|
||||||
|
user_id: int,
|
||||||
|
card_type_id: int,
|
||||||
|
card_kind: int,
|
||||||
|
chara_id: int,
|
||||||
|
map_id: int,
|
||||||
|
) -> Optional[Row]:
|
||||||
|
sql = insert(card).values(
|
||||||
|
user=user_id,
|
||||||
|
cardId=card_type_id,
|
||||||
|
cardTypeId=card_kind,
|
||||||
|
charaId=chara_id,
|
||||||
|
mapId=map_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
conflict = sql.on_duplicate_key_update(charaId=chara_id, mapId=map_id)
|
||||||
|
|
||||||
|
result = self.execute(conflict)
|
||||||
|
if result is None:
|
||||||
|
self.logger.warn(
|
||||||
|
f"put_card: failed to insert card! user_id: {user_id}, kind: {kind}"
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
return result.lastrowid
|
||||||
|
|
||||||
|
def get_cards(self, user_id: int, kind: int = None) -> Optional[Row]:
|
||||||
|
if kind is None:
|
||||||
|
sql = card.select(card.c.user == user_id)
|
||||||
|
else:
|
||||||
|
sql = card.select(and_(card.c.user == user_id, card.c.cardKind == kind))
|
||||||
|
|
||||||
|
result = self.execute(sql)
|
||||||
|
if result is None:
|
||||||
|
return None
|
||||||
|
return result.fetchall()
|
||||||
|
|
||||||
|
def put_charge(
|
||||||
|
self,
|
||||||
|
user_id: int,
|
||||||
|
charge_id: int,
|
||||||
|
stock: int,
|
||||||
|
purchase_date: datetime,
|
||||||
|
valid_date: datetime,
|
||||||
|
) -> Optional[Row]:
|
||||||
|
sql = insert(charge).values(
|
||||||
|
user=user_id,
|
||||||
|
chargeId=charge_id,
|
||||||
|
stock=stock,
|
||||||
|
purchaseDate=purchase_date,
|
||||||
|
validDate=valid_date,
|
||||||
|
)
|
||||||
|
|
||||||
|
conflict = sql.on_duplicate_key_update(
|
||||||
|
stock=stock, purchaseDate=purchase_date, validDate=valid_date
|
||||||
|
)
|
||||||
|
|
||||||
|
result = self.execute(conflict)
|
||||||
|
if result is None:
|
||||||
|
self.logger.warn(
|
||||||
|
f"put_card: failed to insert charge! user_id: {user_id}, chargeId: {charge_id}"
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
return result.lastrowid
|
||||||
|
|
||||||
|
def get_charges(self, user_id: int) -> Optional[Row]:
|
||||||
|
sql = charge.select(charge.c.user == user_id)
|
||||||
|
|
||||||
|
result = self.execute(sql)
|
||||||
|
if result is None:
|
||||||
|
return None
|
||||||
|
return result.fetchall()
|
||||||
|
|
||||||
|
def put_user_print_detail(
|
||||||
|
self, aime_id: int, serial_id: str, user_print_data: Dict
|
||||||
|
) -> Optional[int]:
|
||||||
|
sql = insert(print_detail).values(
|
||||||
|
user=aime_id, serialId=serial_id, **user_print_data
|
||||||
|
)
|
||||||
|
|
||||||
|
conflict = sql.on_duplicate_key_update(**user_print_data)
|
||||||
|
result = self.execute(conflict)
|
||||||
|
|
||||||
|
if result is None:
|
||||||
|
self.logger.warn(
|
||||||
|
f"put_user_print_detail: Failed to insert! aime_id: {aime_id}"
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
return result.lastrowid
|
||||||
|
@ -448,7 +448,9 @@ class Mai2ProfileData(BaseData):
|
|||||||
return None
|
return None
|
||||||
return result.lastrowid
|
return result.lastrowid
|
||||||
|
|
||||||
def get_profile_activity(self, user_id: int, kind: int = None) -> Optional[Row]:
|
def get_profile_activity(
|
||||||
|
self, user_id: int, kind: int = None
|
||||||
|
) -> Optional[List[Row]]:
|
||||||
sql = activity.select(
|
sql = activity.select(
|
||||||
and_(
|
and_(
|
||||||
activity.c.user == user_id,
|
activity.c.user == user_id,
|
||||||
@ -459,4 +461,4 @@ class Mai2ProfileData(BaseData):
|
|||||||
result = self.execute(sql)
|
result = self.execute(sql)
|
||||||
if result is None:
|
if result is None:
|
||||||
return None
|
return None
|
||||||
return result.fetchone()
|
return result.fetchall()
|
||||||
|
@ -242,7 +242,7 @@ class Mai2ScoreData(BaseData):
|
|||||||
return result.lastrowid
|
return result.lastrowid
|
||||||
|
|
||||||
def get_courses(self, user_id: int) -> Optional[List[Row]]:
|
def get_courses(self, user_id: int) -> Optional[List[Row]]:
|
||||||
sql = course.select(best_score.c.user == user_id)
|
sql = course.select(course.c.user == user_id)
|
||||||
|
|
||||||
result = self.execute(sql)
|
result = self.execute(sql)
|
||||||
if result is None:
|
if result is None:
|
||||||
|
@ -53,6 +53,22 @@ ticket = Table(
|
|||||||
mysql_charset="utf8mb4",
|
mysql_charset="utf8mb4",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
cards = Table(
|
||||||
|
"mai2_static_cards",
|
||||||
|
metadata,
|
||||||
|
Column("id", Integer, primary_key=True, nullable=False),
|
||||||
|
Column("version", Integer, nullable=False),
|
||||||
|
Column("cardId", Integer, nullable=False),
|
||||||
|
Column("cardName", String(255), nullable=False),
|
||||||
|
Column("startDate", TIMESTAMP, server_default="2018-01-01 00:00:00.0"),
|
||||||
|
Column("endDate", TIMESTAMP, server_default="2038-01-01 00:00:00.0"),
|
||||||
|
Column("noticeStartDate", TIMESTAMP, server_default="2018-01-01 00:00:00.0"),
|
||||||
|
Column("noticeEndDate", TIMESTAMP, server_default="2038-01-01 00:00:00.0"),
|
||||||
|
Column("enabled", Boolean, server_default="1"),
|
||||||
|
UniqueConstraint("version", "cardId", "cardName", name="mai2_static_cards_uk"),
|
||||||
|
mysql_charset="utf8mb4",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Mai2StaticData(BaseData):
|
class Mai2StaticData(BaseData):
|
||||||
def put_game_event(
|
def put_game_event(
|
||||||
@ -166,6 +182,8 @@ class Mai2StaticData(BaseData):
|
|||||||
|
|
||||||
conflict = sql.on_duplicate_key_update(price=ticket_price)
|
conflict = sql.on_duplicate_key_update(price=ticket_price)
|
||||||
|
|
||||||
|
conflict = sql.on_duplicate_key_update(price=ticket_price)
|
||||||
|
|
||||||
result = self.execute(conflict)
|
result = self.execute(conflict)
|
||||||
if result is None:
|
if result is None:
|
||||||
self.logger.warn(f"Failed to insert charge {ticket_id} type {ticket_type}")
|
self.logger.warn(f"Failed to insert charge {ticket_id} type {ticket_type}")
|
||||||
@ -208,3 +226,24 @@ class Mai2StaticData(BaseData):
|
|||||||
if result is None:
|
if result is None:
|
||||||
return None
|
return None
|
||||||
return result.fetchone()
|
return result.fetchone()
|
||||||
|
|
||||||
|
def put_card(self, version: int, card_id: int, card_name: str, **card_data) -> int:
|
||||||
|
sql = insert(cards).values(
|
||||||
|
version=version, cardId=card_id, cardName=card_name, **card_data
|
||||||
|
)
|
||||||
|
|
||||||
|
conflict = sql.on_duplicate_key_update(**card_data)
|
||||||
|
|
||||||
|
result = self.execute(conflict)
|
||||||
|
if result is None:
|
||||||
|
self.logger.warn(f"Failed to insert card {card_id}")
|
||||||
|
return None
|
||||||
|
return result.lastrowid
|
||||||
|
|
||||||
|
def get_enabled_cards(self, version: int) -> Optional[List[Row]]:
|
||||||
|
sql = cards.select(and_(cards.c.version == version, cards.c.enabled == True))
|
||||||
|
|
||||||
|
result = self.execute(sql)
|
||||||
|
if result is None:
|
||||||
|
return None
|
||||||
|
return result.fetchall()
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
from typing import Any, List, Dict
|
from typing import Any, List, Dict
|
||||||
|
from random import randint
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
import pytz
|
import pytz
|
||||||
import json
|
import json
|
||||||
@ -13,3 +14,176 @@ class Mai2Universe(Mai2Base):
|
|||||||
def __init__(self, cfg: CoreConfig, game_cfg: Mai2Config) -> None:
|
def __init__(self, cfg: CoreConfig, game_cfg: Mai2Config) -> None:
|
||||||
super().__init__(cfg, game_cfg)
|
super().__init__(cfg, game_cfg)
|
||||||
self.version = Mai2Constants.VER_MAIMAI_DX_UNIVERSE
|
self.version = Mai2Constants.VER_MAIMAI_DX_UNIVERSE
|
||||||
|
|
||||||
|
def handle_cm_get_user_preview_api_request(self, data: Dict) -> Dict:
|
||||||
|
p = self.data.profile.get_profile_detail(data["userId"], self.version)
|
||||||
|
if p is None:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
return {
|
||||||
|
"userName": p["userName"],
|
||||||
|
"rating": p["playerRating"],
|
||||||
|
# hardcode lastDataVersion for CardMaker 1.34
|
||||||
|
"lastDataVersion": "1.20.00",
|
||||||
|
"isLogin": False,
|
||||||
|
"isExistSellingCard": False,
|
||||||
|
}
|
||||||
|
|
||||||
|
def handle_cm_get_user_data_api_request(self, data: Dict) -> Dict:
|
||||||
|
# user already exists, because the preview checks that already
|
||||||
|
p = self.data.profile.get_profile_detail(data["userId"], self.version)
|
||||||
|
|
||||||
|
cards = self.data.card.get_user_cards(data["userId"])
|
||||||
|
if cards is None or len(cards) == 0:
|
||||||
|
# This should never happen
|
||||||
|
self.logger.error(
|
||||||
|
f"handle_get_user_data_api_request: Internal error - No cards found for user id {data['userId']}"
|
||||||
|
)
|
||||||
|
return {}
|
||||||
|
|
||||||
|
# get the dict representation of the row so we can modify values
|
||||||
|
user_data = p._asdict()
|
||||||
|
|
||||||
|
# remove the values the game doesn't want
|
||||||
|
user_data.pop("id")
|
||||||
|
user_data.pop("user")
|
||||||
|
user_data.pop("version")
|
||||||
|
|
||||||
|
return {"userId": data["userId"], "userData": user_data}
|
||||||
|
|
||||||
|
def handle_cm_login_api_request(self, data: Dict) -> Dict:
|
||||||
|
return {"returnCode": 1}
|
||||||
|
|
||||||
|
def handle_cm_logout_api_request(self, data: Dict) -> Dict:
|
||||||
|
return {"returnCode": 1}
|
||||||
|
|
||||||
|
def handle_cm_get_selling_card_api_request(self, data: Dict) -> Dict:
|
||||||
|
selling_cards = self.data.static.get_enabled_cards(self.version)
|
||||||
|
if selling_cards is None:
|
||||||
|
return {"length": 0, "sellingCardList": []}
|
||||||
|
|
||||||
|
selling_card_list = []
|
||||||
|
for card in selling_cards:
|
||||||
|
tmp = card._asdict()
|
||||||
|
tmp.pop("id")
|
||||||
|
tmp.pop("version")
|
||||||
|
tmp.pop("cardName")
|
||||||
|
tmp.pop("enabled")
|
||||||
|
|
||||||
|
tmp["startDate"] = datetime.strftime(tmp["startDate"], "%Y-%m-%d %H:%M:%S")
|
||||||
|
tmp["endDate"] = datetime.strftime(tmp["endDate"], "%Y-%m-%d %H:%M:%S")
|
||||||
|
tmp["noticeStartDate"] = datetime.strftime(
|
||||||
|
tmp["noticeStartDate"], "%Y-%m-%d %H:%M:%S"
|
||||||
|
)
|
||||||
|
tmp["noticeEndDate"] = datetime.strftime(
|
||||||
|
tmp["noticeEndDate"], "%Y-%m-%d %H:%M:%S"
|
||||||
|
)
|
||||||
|
|
||||||
|
selling_card_list.append(tmp)
|
||||||
|
|
||||||
|
return {"length": len(selling_card_list), "sellingCardList": selling_card_list}
|
||||||
|
|
||||||
|
def handle_cm_get_user_card_api_request(self, data: Dict) -> Dict:
|
||||||
|
user_cards = self.data.item.get_cards(data["userId"])
|
||||||
|
if user_cards is None:
|
||||||
|
return {"returnCode": 1, "length": 0, "nextIndex": 0, "userCardList": []}
|
||||||
|
|
||||||
|
max_ct = data["maxCount"]
|
||||||
|
next_idx = data["nextIndex"]
|
||||||
|
start_idx = next_idx
|
||||||
|
end_idx = max_ct + start_idx
|
||||||
|
|
||||||
|
if len(user_cards[start_idx:]) > max_ct:
|
||||||
|
next_idx += max_ct
|
||||||
|
else:
|
||||||
|
next_idx = 0
|
||||||
|
|
||||||
|
card_list = []
|
||||||
|
for card in user_cards:
|
||||||
|
tmp = card._asdict()
|
||||||
|
tmp.pop("id")
|
||||||
|
tmp.pop("user")
|
||||||
|
|
||||||
|
tmp["startDate"] = datetime.strftime(tmp["startDate"], "%Y-%m-%d %H:%M:%S")
|
||||||
|
tmp["endDate"] = datetime.strftime(tmp["endDate"], "%Y-%m-%d %H:%M:%S")
|
||||||
|
card_list.append(tmp)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"returnCode": 1,
|
||||||
|
"length": len(card_list[start_idx:end_idx]),
|
||||||
|
"nextIndex": next_idx,
|
||||||
|
"userCardList": card_list[start_idx:end_idx],
|
||||||
|
}
|
||||||
|
|
||||||
|
def handle_cm_get_user_item_api_request(self, data: Dict) -> Dict:
|
||||||
|
super().handle_get_user_item_api_request(data)
|
||||||
|
|
||||||
|
def handle_cm_get_user_character_api_request(self, data: Dict) -> Dict:
|
||||||
|
characters = self.data.item.get_characters(data["userId"])
|
||||||
|
|
||||||
|
chara_list = []
|
||||||
|
for chara in characters:
|
||||||
|
chara_list.append(
|
||||||
|
{
|
||||||
|
"characterId": chara["characterId"],
|
||||||
|
# no clue why those values are even needed
|
||||||
|
"point": 0,
|
||||||
|
"count": 0,
|
||||||
|
"level": chara["level"],
|
||||||
|
"nextAwake": 0,
|
||||||
|
"nextAwakePercent": 0,
|
||||||
|
"favorite": False,
|
||||||
|
"awakening": chara["awakening"],
|
||||||
|
"useCount": chara["useCount"],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"returnCode": 1,
|
||||||
|
"length": len(chara_list),
|
||||||
|
"userCharacterList": chara_list,
|
||||||
|
}
|
||||||
|
|
||||||
|
def handle_cm_get_user_card_print_error_api_request(self, data: Dict) -> Dict:
|
||||||
|
return {"length": 0, "userPrintDetailList": []}
|
||||||
|
|
||||||
|
def handle_cm_upsert_user_print_api_request(self, data: Dict) -> Dict:
|
||||||
|
user_id = data["userId"]
|
||||||
|
upsert = data["userPrintDetail"]
|
||||||
|
|
||||||
|
# set a random card serial number
|
||||||
|
serial_id = "".join([str(randint(0, 9)) for _ in range(20)])
|
||||||
|
|
||||||
|
user_card = upsert["userCard"]
|
||||||
|
self.data.item.put_card(
|
||||||
|
user_id,
|
||||||
|
user_card["cardId"],
|
||||||
|
user_card["cardTypeId"],
|
||||||
|
user_card["charaId"],
|
||||||
|
user_card["mapId"],
|
||||||
|
)
|
||||||
|
|
||||||
|
# properly format userPrintDetail for the database
|
||||||
|
upsert.pop("userCard")
|
||||||
|
upsert.pop("serialId")
|
||||||
|
upsert["printDate"] = datetime.strptime(upsert["printDate"], "%Y-%m-%d")
|
||||||
|
|
||||||
|
self.data.item.put_user_print_detail(user_id, serial_id, upsert)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"returnCode": 1,
|
||||||
|
"orderId": 0,
|
||||||
|
"serialId": serial_id,
|
||||||
|
"startDate": "2018-01-01 00:00:00",
|
||||||
|
"endDate": "2038-01-01 00:00:00",
|
||||||
|
}
|
||||||
|
|
||||||
|
def handle_cm_upsert_user_printlog_api_request(self, data: Dict) -> Dict:
|
||||||
|
return {
|
||||||
|
"returnCode": 1,
|
||||||
|
"orderId": 0,
|
||||||
|
"serialId": data["userPrintlog"]["serialId"],
|
||||||
|
}
|
||||||
|
|
||||||
|
def handle_cm_upsert_buy_card_api_request(self, data: Dict) -> Dict:
|
||||||
|
return {"returnCode": 1}
|
||||||
|
@ -4,12 +4,19 @@ import pytz
|
|||||||
import json
|
import json
|
||||||
|
|
||||||
from core.config import CoreConfig
|
from core.config import CoreConfig
|
||||||
from titles.mai2.base import Mai2Base
|
from titles.mai2.universe import Mai2Universe
|
||||||
from titles.mai2.const import Mai2Constants
|
from titles.mai2.const import Mai2Constants
|
||||||
from titles.mai2.config import Mai2Config
|
from titles.mai2.config import Mai2Config
|
||||||
|
|
||||||
|
|
||||||
class Mai2UniversePlus(Mai2Base):
|
class Mai2UniversePlus(Mai2Universe):
|
||||||
def __init__(self, cfg: CoreConfig, game_cfg: Mai2Config) -> None:
|
def __init__(self, cfg: CoreConfig, game_cfg: Mai2Config) -> None:
|
||||||
super().__init__(cfg, game_cfg)
|
super().__init__(cfg, game_cfg)
|
||||||
self.version = Mai2Constants.VER_MAIMAI_DX_UNIVERSE_PLUS
|
self.version = Mai2Constants.VER_MAIMAI_DX_UNIVERSE_PLUS
|
||||||
|
|
||||||
|
def handle_cm_get_user_preview_api_request(self, data: Dict) -> Dict:
|
||||||
|
user_data = super().handle_cm_get_user_preview_api_request(data)
|
||||||
|
|
||||||
|
# hardcode lastDataVersion for CardMaker 1.35
|
||||||
|
user_data["lastDataVersion"] = "1.25.00"
|
||||||
|
return user_data
|
||||||
|
@ -452,7 +452,8 @@ class OngekiBase:
|
|||||||
tmp.pop("id")
|
tmp.pop("id")
|
||||||
items.append(tmp)
|
items.append(tmp)
|
||||||
|
|
||||||
xout = kind * 10000000000 + (data["nextIndex"] % 10000000000) + len(items)
|
xout = kind * 10000000000 + \
|
||||||
|
(data["nextIndex"] % 10000000000) + len(items)
|
||||||
|
|
||||||
if len(items) < data["maxCount"] or data["maxCount"] == 0:
|
if len(items) < data["maxCount"] or data["maxCount"] == 0:
|
||||||
nextIndex = 0
|
nextIndex = 0
|
||||||
@ -851,7 +852,8 @@ class OngekiBase:
|
|||||||
)
|
)
|
||||||
|
|
||||||
if "userOption" in upsert and len(upsert["userOption"]) > 0:
|
if "userOption" in upsert and len(upsert["userOption"]) > 0:
|
||||||
self.data.profile.put_profile_options(user_id, upsert["userOption"][0])
|
self.data.profile.put_profile_options(
|
||||||
|
user_id, upsert["userOption"][0])
|
||||||
|
|
||||||
if "userPlaylogList" in upsert:
|
if "userPlaylogList" in upsert:
|
||||||
for playlog in upsert["userPlaylogList"]:
|
for playlog in upsert["userPlaylogList"]:
|
||||||
|
@ -93,7 +93,12 @@ class OngekiBright(OngekiBase):
|
|||||||
def handle_cm_get_user_character_api_request(self, data: Dict) -> Dict:
|
def handle_cm_get_user_character_api_request(self, data: Dict) -> Dict:
|
||||||
user_characters = self.data.item.get_characters(data["userId"])
|
user_characters = self.data.item.get_characters(data["userId"])
|
||||||
if user_characters is None:
|
if user_characters is None:
|
||||||
return {}
|
return {
|
||||||
|
"userId": data["userId"],
|
||||||
|
"length": 0,
|
||||||
|
"nextIndex": 0,
|
||||||
|
"userCharacterList": []
|
||||||
|
}
|
||||||
|
|
||||||
max_ct = data["maxCount"]
|
max_ct = data["maxCount"]
|
||||||
next_idx = data["nextIndex"]
|
next_idx = data["nextIndex"]
|
||||||
@ -543,7 +548,7 @@ class OngekiBright(OngekiBase):
|
|||||||
"returnCode": 1,
|
"returnCode": 1,
|
||||||
"orderId": 0,
|
"orderId": 0,
|
||||||
"serialId": "11111111111111111111",
|
"serialId": "11111111111111111111",
|
||||||
"apiName": "CMUpsertUserPrintPlaylogApi",
|
"apiName": "CMUpsertUserPrintPlaylogApi"
|
||||||
}
|
}
|
||||||
|
|
||||||
def handle_cm_upsert_user_printlog_api_request(self, data: Dict) -> Dict:
|
def handle_cm_upsert_user_printlog_api_request(self, data: Dict) -> Dict:
|
||||||
@ -551,7 +556,7 @@ class OngekiBright(OngekiBase):
|
|||||||
"returnCode": 1,
|
"returnCode": 1,
|
||||||
"orderId": 0,
|
"orderId": 0,
|
||||||
"serialId": "11111111111111111111",
|
"serialId": "11111111111111111111",
|
||||||
"apiName": "CMUpsertUserPrintlogApi",
|
"apiName": "CMUpsertUserPrintlogApi"
|
||||||
}
|
}
|
||||||
|
|
||||||
def handle_cm_upsert_user_print_api_request(self, data: Dict) -> Dict:
|
def handle_cm_upsert_user_print_api_request(self, data: Dict) -> Dict:
|
||||||
|
@ -142,8 +142,8 @@ class OngekiBrightMemory(OngekiBright):
|
|||||||
user_data = super().handle_cm_get_user_data_api_request(data)
|
user_data = super().handle_cm_get_user_data_api_request(data)
|
||||||
|
|
||||||
# hardcode Card Maker version for now
|
# hardcode Card Maker version for now
|
||||||
# Card Maker 1.34.00 = 1.30.01
|
# Card Maker 1.34 = 1.30.01
|
||||||
# Card Maker 1.36.00 = 1.35.04
|
# Card Maker 1.35 = 1.35.03
|
||||||
user_data["userData"]["compatibleCmVersion"] = "1.35.04"
|
user_data["userData"]["compatibleCmVersion"] = "1.35.03"
|
||||||
|
|
||||||
return user_data
|
return user_data
|
||||||
|
@ -3,7 +3,8 @@ import json
|
|||||||
import inflection
|
import inflection
|
||||||
import yaml
|
import yaml
|
||||||
import string
|
import string
|
||||||
import logging, coloredlogs
|
import logging
|
||||||
|
import coloredlogs
|
||||||
import zlib
|
import zlib
|
||||||
from logging.handlers import TimedRotatingFileHandler
|
from logging.handlers import TimedRotatingFileHandler
|
||||||
from os import path
|
from os import path
|
||||||
@ -93,7 +94,7 @@ class OngekiServlet:
|
|||||||
)
|
)
|
||||||
|
|
||||||
def render_POST(self, request: Request, version: int, url_path: str) -> bytes:
|
def render_POST(self, request: Request, version: int, url_path: str) -> bytes:
|
||||||
if url_path.lower() == "/ping":
|
if url_path.lower() == "ping":
|
||||||
return zlib.compress(b'{"returnCode": 1}')
|
return zlib.compress(b'{"returnCode": 1}')
|
||||||
|
|
||||||
req_raw = request.content.getvalue()
|
req_raw = request.content.getvalue()
|
||||||
|
@ -706,7 +706,7 @@ class OngekiItemData(BaseData):
|
|||||||
)
|
)
|
||||||
|
|
||||||
conflict = sql.on_duplicate_key_update(
|
conflict = sql.on_duplicate_key_update(
|
||||||
user=aime_id, serialId=serial_id, **user_print_data
|
user=aime_id, **user_print_data
|
||||||
)
|
)
|
||||||
result = self.execute(conflict)
|
result = self.execute(conflict)
|
||||||
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
import json
|
import json, logging
|
||||||
from typing import Any
|
from typing import Any, Dict
|
||||||
|
|
||||||
from core.config import CoreConfig
|
from core.config import CoreConfig
|
||||||
from titles.pokken.config import PokkenConfig
|
from titles.pokken.config import PokkenConfig
|
||||||
@ -12,6 +12,7 @@ class PokkenBase:
|
|||||||
self.core_cfg = core_cfg
|
self.core_cfg = core_cfg
|
||||||
self.game_cfg = game_cfg
|
self.game_cfg = game_cfg
|
||||||
self.version = 0
|
self.version = 0
|
||||||
|
self.logger = logging.getLogger("pokken")
|
||||||
|
|
||||||
def handle_noop(self, request: Any) -> bytes:
|
def handle_noop(self, request: Any) -> bytes:
|
||||||
res = jackal_pb2.Response()
|
res = jackal_pb2.Response()
|
||||||
@ -20,25 +21,26 @@ class PokkenBase:
|
|||||||
|
|
||||||
return res.SerializeToString()
|
return res.SerializeToString()
|
||||||
|
|
||||||
def handle_ping(self, request: jackal_pb2.PingRequestData) -> bytes:
|
def handle_ping(self, request: jackal_pb2.Request) -> bytes:
|
||||||
res = jackal_pb2.Response()
|
res = jackal_pb2.Response()
|
||||||
res.result = 1
|
res.result = 1
|
||||||
res.type = jackal_pb2.MessageType.PING
|
res.type = jackal_pb2.MessageType.PING
|
||||||
|
|
||||||
return res.SerializeToString()
|
return res.SerializeToString()
|
||||||
|
|
||||||
def handle_register_pcb(self, request: jackal_pb2.RegisterPcbRequestData) -> bytes:
|
def handle_register_pcb(self, request: jackal_pb2.Request) -> bytes:
|
||||||
res = jackal_pb2.Response()
|
res = jackal_pb2.Response()
|
||||||
res.result = 1
|
res.result = 1
|
||||||
res.type = jackal_pb2.MessageType.REGISTER_PCB
|
res.type = jackal_pb2.MessageType.REGISTER_PCB
|
||||||
|
self.logger.info(f"Register PCB {request.register_pcb.pcb_id}")
|
||||||
|
|
||||||
regist_pcb = jackal_pb2.RegisterPcbResponseData()
|
regist_pcb = jackal_pb2.RegisterPcbResponseData()
|
||||||
regist_pcb.server_time = int(datetime.now().timestamp() / 1000)
|
regist_pcb.server_time = int(datetime.now().timestamp())
|
||||||
biwa_setting = {
|
biwa_setting = {
|
||||||
"MatchingServer": {
|
"MatchingServer": {
|
||||||
"host": f"https://{self.game_cfg.server.hostname}",
|
"host": f"https://{self.game_cfg.server.hostname}",
|
||||||
"port": self.game_cfg.server.port_matching,
|
"port": self.game_cfg.server.port,
|
||||||
"url": "/matching",
|
"url": "/SDAK/100/matching",
|
||||||
},
|
},
|
||||||
"StunServer": {
|
"StunServer": {
|
||||||
"addr": self.game_cfg.server.hostname,
|
"addr": self.game_cfg.server.hostname,
|
||||||
@ -60,7 +62,7 @@ class PokkenBase:
|
|||||||
|
|
||||||
return res.SerializeToString()
|
return res.SerializeToString()
|
||||||
|
|
||||||
def handle_save_ads(self, request: jackal_pb2.SaveAdsRequestData) -> bytes:
|
def handle_save_ads(self, request: jackal_pb2.Request) -> bytes:
|
||||||
res = jackal_pb2.Response()
|
res = jackal_pb2.Response()
|
||||||
res.result = 1
|
res.result = 1
|
||||||
res.type = jackal_pb2.MessageType.SAVE_ADS
|
res.type = jackal_pb2.MessageType.SAVE_ADS
|
||||||
@ -68,7 +70,7 @@ class PokkenBase:
|
|||||||
return res.SerializeToString()
|
return res.SerializeToString()
|
||||||
|
|
||||||
def handle_save_client_log(
|
def handle_save_client_log(
|
||||||
self, request: jackal_pb2.SaveClientLogRequestData
|
self, request: jackal_pb2.Request
|
||||||
) -> bytes:
|
) -> bytes:
|
||||||
res = jackal_pb2.Response()
|
res = jackal_pb2.Response()
|
||||||
res.result = 1
|
res.result = 1
|
||||||
@ -77,7 +79,7 @@ class PokkenBase:
|
|||||||
return res.SerializeToString()
|
return res.SerializeToString()
|
||||||
|
|
||||||
def handle_check_diagnosis(
|
def handle_check_diagnosis(
|
||||||
self, request: jackal_pb2.CheckDiagnosisRequestData
|
self, request: jackal_pb2.Request
|
||||||
) -> bytes:
|
) -> bytes:
|
||||||
res = jackal_pb2.Response()
|
res = jackal_pb2.Response()
|
||||||
res.result = 1
|
res.result = 1
|
||||||
@ -86,7 +88,7 @@ class PokkenBase:
|
|||||||
return res.SerializeToString()
|
return res.SerializeToString()
|
||||||
|
|
||||||
def handle_load_client_settings(
|
def handle_load_client_settings(
|
||||||
self, request: jackal_pb2.CheckDiagnosisRequestData
|
self, request: jackal_pb2.Request
|
||||||
) -> bytes:
|
) -> bytes:
|
||||||
res = jackal_pb2.Response()
|
res = jackal_pb2.Response()
|
||||||
res.result = 1
|
res.result = 1
|
||||||
@ -108,3 +110,36 @@ class PokkenBase:
|
|||||||
res.load_client_settings.CopyFrom(settings)
|
res.load_client_settings.CopyFrom(settings)
|
||||||
|
|
||||||
return res.SerializeToString()
|
return res.SerializeToString()
|
||||||
|
|
||||||
|
def handle_load_ranking(self, request: jackal_pb2.Request) -> bytes:
|
||||||
|
res = jackal_pb2.Response()
|
||||||
|
res.result = 1
|
||||||
|
res.type = jackal_pb2.MessageType.LOAD_RANKING
|
||||||
|
ranking = jackal_pb2.LoadRankingResponseData()
|
||||||
|
|
||||||
|
ranking.ranking_id = 1
|
||||||
|
ranking.ranking_start = 0
|
||||||
|
ranking.ranking_end = 1
|
||||||
|
ranking.event_end = True
|
||||||
|
ranking.modify_date = int(datetime.now().timestamp() / 1000)
|
||||||
|
res.load_ranking.CopyFrom(ranking)
|
||||||
|
|
||||||
|
def handle_matching_noop(self, data: Dict = {}, client_ip: str = "127.0.0.1") -> Dict:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def handle_matching_start_matching(self, data: Dict = {}, client_ip: str = "127.0.0.1") -> Dict:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def handle_matching_is_matching(self, data: Dict = {}, client_ip: str = "127.0.0.1") -> Dict:
|
||||||
|
"""
|
||||||
|
"sessionId":"12345678",
|
||||||
|
"A":{
|
||||||
|
"pcb_id": data["data"]["must"]["pcb_id"],
|
||||||
|
"gip": client_ip
|
||||||
|
},
|
||||||
|
"list":[]
|
||||||
|
"""
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def handle_matching_stop_matching(self, data: Dict = {}, client_ip: str = "127.0.0.1") -> Dict:
|
||||||
|
return {}
|
@ -31,43 +31,24 @@ class PokkenServerConfig:
|
|||||||
self.__config, "pokken", "server", "port", default=9000
|
self.__config, "pokken", "server", "port", default=9000
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
|
||||||
def port_matching(self) -> int:
|
|
||||||
return CoreConfig.get_config_field(
|
|
||||||
self.__config, "pokken", "server", "port_matching", default=9001
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def port_stun(self) -> int:
|
def port_stun(self) -> int:
|
||||||
return CoreConfig.get_config_field(
|
return CoreConfig.get_config_field(
|
||||||
self.__config, "pokken", "server", "port_stun", default=9002
|
self.__config, "pokken", "server", "port_stun", default=9001
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def port_turn(self) -> int:
|
def port_turn(self) -> int:
|
||||||
return CoreConfig.get_config_field(
|
return CoreConfig.get_config_field(
|
||||||
self.__config, "pokken", "server", "port_turn", default=9003
|
self.__config, "pokken", "server", "port_turn", default=9002
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def port_admission(self) -> int:
|
def port_admission(self) -> int:
|
||||||
return CoreConfig.get_config_field(
|
return CoreConfig.get_config_field(
|
||||||
self.__config, "pokken", "server", "port_admission", default=9004
|
self.__config, "pokken", "server", "port_admission", default=9003
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
|
||||||
def ssl_cert(self) -> str:
|
|
||||||
return CoreConfig.get_config_field(
|
|
||||||
self.__config, "pokken", "server", "ssl_cert", default="cert/pokken.crt"
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def ssl_key(self) -> str:
|
|
||||||
return CoreConfig.get_config_field(
|
|
||||||
self.__config, "pokken", "server", "ssl_key", default="cert/pokken.key"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class PokkenConfig(dict):
|
class PokkenConfig(dict):
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
self.server = PokkenServerConfig(self)
|
self.server = PokkenServerConfig(self)
|
||||||
|
@ -1,18 +1,20 @@
|
|||||||
from typing import Tuple
|
from typing import Tuple
|
||||||
from twisted.web.http import Request
|
from twisted.web.http import Request
|
||||||
from twisted.web import resource, server
|
from twisted.web import resource
|
||||||
from twisted.internet import reactor, endpoints
|
import json, ast
|
||||||
|
from datetime import datetime
|
||||||
import yaml
|
import yaml
|
||||||
import logging, coloredlogs
|
import logging, coloredlogs
|
||||||
from logging.handlers import TimedRotatingFileHandler
|
from logging.handlers import TimedRotatingFileHandler
|
||||||
from titles.pokken.proto import jackal_pb2
|
import inflection
|
||||||
from os import path
|
from os import path
|
||||||
from google.protobuf.message import DecodeError
|
from google.protobuf.message import DecodeError
|
||||||
|
|
||||||
from core.config import CoreConfig
|
from core import CoreConfig, Utils
|
||||||
from titles.pokken.config import PokkenConfig
|
from titles.pokken.config import PokkenConfig
|
||||||
from titles.pokken.base import PokkenBase
|
from titles.pokken.base import PokkenBase
|
||||||
from titles.pokken.const import PokkenConstants
|
from titles.pokken.const import PokkenConstants
|
||||||
|
from titles.pokken.proto import jackal_pb2
|
||||||
|
|
||||||
|
|
||||||
class PokkenServlet(resource.Resource):
|
class PokkenServlet(resource.Resource):
|
||||||
@ -65,16 +67,9 @@ class PokkenServlet(resource.Resource):
|
|||||||
if not game_cfg.server.enable:
|
if not game_cfg.server.enable:
|
||||||
return (False, "", "")
|
return (False, "", "")
|
||||||
|
|
||||||
# if core_cfg.server.is_develop:
|
|
||||||
# return (
|
|
||||||
# True,
|
|
||||||
# f"https://{game_cfg.server.hostname}:{game_cfg.server.port}/{game_code}/$v/",
|
|
||||||
# f"{game_cfg.server.hostname}:{game_cfg.server.port}/",
|
|
||||||
# )
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
True,
|
True,
|
||||||
f"https://{game_cfg.server.hostname}:443/{game_code}/$v/",
|
f"https://{game_cfg.server.hostname}:{game_cfg.server.port}/{game_code}/$v/",
|
||||||
f"{game_cfg.server.hostname}/SDAK/$v/",
|
f"{game_cfg.server.hostname}/SDAK/$v/",
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -94,42 +89,15 @@ class PokkenServlet(resource.Resource):
|
|||||||
|
|
||||||
return (True, "PKF2")
|
return (True, "PKF2")
|
||||||
|
|
||||||
def setup(self):
|
def setup(self) -> None:
|
||||||
"""
|
# TODO: Setup stun, turn (UDP) and admission (WSS) servers
|
||||||
There's currently no point in having this server on because Twisted
|
pass
|
||||||
won't play ball with both the fact that it's TLSv1.1, and because the
|
|
||||||
types of certs that pokken will accept are too flimsy for Twisted
|
|
||||||
so it will throw a fit. Currently leaving this here in case a bypass
|
|
||||||
is discovered in the future, but it's unlikly. For now, just use NGINX.
|
|
||||||
"""
|
|
||||||
if self.game_cfg.server.enable and self.core_cfg.server.is_develop:
|
|
||||||
key_exists = path.exists(self.game_cfg.server.ssl_key)
|
|
||||||
cert_exists = path.exists(self.game_cfg.server.ssl_cert)
|
|
||||||
|
|
||||||
if key_exists and cert_exists:
|
|
||||||
endpoints.serverFromString(
|
|
||||||
reactor,
|
|
||||||
f"ssl:{self.game_cfg.server.port}"
|
|
||||||
f":interface={self.core_cfg.server.listen_address}:privateKey={self.game_cfg.server.ssl_key}:"
|
|
||||||
f"certKey={self.game_cfg.server.ssl_cert}",
|
|
||||||
).listen(server.Site(self))
|
|
||||||
|
|
||||||
self.logger.info(
|
|
||||||
f"Pokken title server ready on port {self.game_cfg.server.port}"
|
|
||||||
)
|
|
||||||
|
|
||||||
else:
|
|
||||||
self.logger.error(
|
|
||||||
f"Could not find cert at {self.game_cfg.server.ssl_key} or key at {self.game_cfg.server.ssl_cert}, Pokken not running."
|
|
||||||
)
|
|
||||||
|
|
||||||
def render_POST(
|
def render_POST(
|
||||||
self, request: Request, version: int = 0, endpoints: str = ""
|
self, request: Request, version: int = 0, endpoints: str = ""
|
||||||
) -> bytes:
|
) -> bytes:
|
||||||
if endpoints == "":
|
if endpoints == "matching":
|
||||||
endpoints = request.uri.decode()
|
return self.handle_matching(request)
|
||||||
if endpoints.startswith("/matching"):
|
|
||||||
self.logger.info("Matching request")
|
|
||||||
|
|
||||||
content = request.content.getvalue()
|
content = request.content.getvalue()
|
||||||
if content == b"":
|
if content == b"":
|
||||||
@ -147,10 +115,46 @@ class PokkenServlet(resource.Resource):
|
|||||||
pokken_request.type
|
pokken_request.type
|
||||||
].name.lower()
|
].name.lower()
|
||||||
|
|
||||||
self.logger.info(f"{endpoint} request")
|
|
||||||
|
|
||||||
handler = getattr(self.base, f"handle_{endpoint}", None)
|
handler = getattr(self.base, f"handle_{endpoint}", None)
|
||||||
if handler is None:
|
if handler is None:
|
||||||
self.logger.warn(f"No handler found for message type {endpoint}")
|
self.logger.warn(f"No handler found for message type {endpoint}")
|
||||||
return self.base.handle_noop(pokken_request)
|
return self.base.handle_noop(pokken_request)
|
||||||
return handler(pokken_request)
|
|
||||||
|
self.logger.info(f"{endpoint} request from {Utils.get_ip_addr(request)}")
|
||||||
|
self.logger.debug(pokken_request)
|
||||||
|
|
||||||
|
ret = handler(pokken_request)
|
||||||
|
self.logger.debug(f"Response: {ret}")
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def handle_matching(self, request: Request) -> bytes:
|
||||||
|
content = request.content.getvalue()
|
||||||
|
client_ip = Utils.get_ip_addr(request)
|
||||||
|
|
||||||
|
if content is None or content == b"":
|
||||||
|
self.logger.info("Empty matching request")
|
||||||
|
return json.dumps(self.base.handle_matching_noop()).encode()
|
||||||
|
|
||||||
|
json_content = ast.literal_eval(content.decode().replace('null', 'None').replace('true', 'True').replace('false', 'False'))
|
||||||
|
self.logger.info(f"Matching {json_content['call']} request")
|
||||||
|
self.logger.debug(json_content)
|
||||||
|
|
||||||
|
handler = getattr(self.base, f"handle_matching_{inflection.underscore(json_content['call'])}", None)
|
||||||
|
if handler is None:
|
||||||
|
self.logger.warn(f"No handler found for message type {json_content['call']}")
|
||||||
|
return json.dumps(self.base.handle_matching_noop()).encode()
|
||||||
|
|
||||||
|
ret = handler(json_content, client_ip)
|
||||||
|
|
||||||
|
if ret is None:
|
||||||
|
ret = {}
|
||||||
|
if "result" not in ret:
|
||||||
|
ret["result"] = "true"
|
||||||
|
if "data" not in ret:
|
||||||
|
ret["data"] = {}
|
||||||
|
if "timestamp" not in ret:
|
||||||
|
ret["timestamp"] = int(datetime.now().timestamp() * 1000)
|
||||||
|
|
||||||
|
self.logger.debug(f"Response {ret}")
|
||||||
|
|
||||||
|
return json.dumps(ret).encode()
|
||||||
|
@ -635,14 +635,14 @@ class WaccaBase:
|
|||||||
new_tickets.append([ticket["id"], ticket["ticket_id"], 9999999999])
|
new_tickets.append([ticket["id"], ticket["ticket_id"], 9999999999])
|
||||||
|
|
||||||
for item in req.itemsUsed:
|
for item in req.itemsUsed:
|
||||||
if item.itemType == WaccaConstants.ITEM_TYPES["wp"]:
|
if item.itemType == WaccaConstants.ITEM_TYPES["wp"] and not self.game_config.mods.infinite_wp:
|
||||||
if current_wp >= item.quantity:
|
if current_wp >= item.quantity:
|
||||||
current_wp -= item.quantity
|
current_wp -= item.quantity
|
||||||
self.data.profile.spend_wp(req.profileId, item.quantity)
|
self.data.profile.spend_wp(req.profileId, item.quantity)
|
||||||
else:
|
else:
|
||||||
return BaseResponse().make()
|
return BaseResponse().make()
|
||||||
|
|
||||||
elif item.itemType == WaccaConstants.ITEM_TYPES["ticket"]:
|
elif item.itemType == WaccaConstants.ITEM_TYPES["ticket"] and not self.game_config.mods.infinite_tickets:
|
||||||
for x in range(len(new_tickets)):
|
for x in range(len(new_tickets)):
|
||||||
if new_tickets[x][1] == item.itemId:
|
if new_tickets[x][1] == item.itemId:
|
||||||
self.data.item.spend_ticket(new_tickets[x][0])
|
self.data.item.spend_ticket(new_tickets[x][0])
|
||||||
@ -836,7 +836,7 @@ class WaccaBase:
|
|||||||
resp.songDetail.grades = SongDetailGradeCountsV2(counts=grades)
|
resp.songDetail.grades = SongDetailGradeCountsV2(counts=grades)
|
||||||
else:
|
else:
|
||||||
resp.songDetail.grades = SongDetailGradeCountsV1(counts=grades)
|
resp.songDetail.grades = SongDetailGradeCountsV1(counts=grades)
|
||||||
|
resp.songDetail.lock_state = 1
|
||||||
return resp.make()
|
return resp.make()
|
||||||
|
|
||||||
# TODO: Coop and vs data
|
# TODO: Coop and vs data
|
||||||
@ -880,7 +880,7 @@ class WaccaBase:
|
|||||||
user_id = profile["user"]
|
user_id = profile["user"]
|
||||||
resp.currentWp = profile["wp"]
|
resp.currentWp = profile["wp"]
|
||||||
|
|
||||||
if req.purchaseType == PurchaseType.PurchaseTypeWP:
|
if req.purchaseType == PurchaseType.PurchaseTypeWP and not self.game_config.mods.infinite_wp:
|
||||||
resp.currentWp -= req.cost
|
resp.currentWp -= req.cost
|
||||||
self.data.profile.spend_wp(req.profileId, req.cost)
|
self.data.profile.spend_wp(req.profileId, req.cost)
|
||||||
|
|
||||||
@ -1070,19 +1070,12 @@ class WaccaBase:
|
|||||||
):
|
):
|
||||||
if item.quantity > WaccaConstants.Difficulty.HARD.value:
|
if item.quantity > WaccaConstants.Difficulty.HARD.value:
|
||||||
old_score = self.data.score.get_best_score(
|
old_score = self.data.score.get_best_score(
|
||||||
user_id, item.itemId, item.quantity
|
user_id, item.itemId, item.quantity
|
||||||
|
)
|
||||||
|
if not old_score:
|
||||||
|
self.data.score.put_best_score(
|
||||||
|
user_id, item.itemId, item.quantity, 0, [0] * 5, [0] * 13, 0, 0
|
||||||
)
|
)
|
||||||
if not old_score:
|
|
||||||
self.data.score.put_best_score(
|
|
||||||
user_id,
|
|
||||||
item.itemId,
|
|
||||||
item.quantity,
|
|
||||||
0,
|
|
||||||
[0] * 5,
|
|
||||||
[0] * 13,
|
|
||||||
0,
|
|
||||||
0,
|
|
||||||
)
|
|
||||||
|
|
||||||
if item.quantity == 0:
|
if item.quantity == 0:
|
||||||
item.quantity = WaccaConstants.Difficulty.HARD.value
|
item.quantity = WaccaConstants.Difficulty.HARD.value
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
import yaml
|
import yaml
|
||||||
import jinja2
|
import jinja2
|
||||||
from twisted.web.http import Request
|
from twisted.web.http import Request
|
||||||
|
from os import path
|
||||||
|
|
||||||
from core.frontend import FE_Base
|
from core.frontend import FE_Base
|
||||||
from core.config import CoreConfig
|
from core.config import CoreConfig
|
||||||
@ -16,7 +17,10 @@ class WaccaFrontend(FE_Base):
|
|||||||
super().__init__(cfg, environment)
|
super().__init__(cfg, environment)
|
||||||
self.data = WaccaData(cfg)
|
self.data = WaccaData(cfg)
|
||||||
self.game_cfg = WaccaConfig()
|
self.game_cfg = WaccaConfig()
|
||||||
self.game_cfg.update(yaml.safe_load(open(f"{cfg_dir}/wacca.yaml")))
|
if path.exists(f"{cfg_dir}/{WaccaConstants.CONFIG_NAME}"):
|
||||||
|
self.game_cfg.update(
|
||||||
|
yaml.safe_load(open(f"{cfg_dir}/{WaccaConstants.CONFIG_NAME}"))
|
||||||
|
)
|
||||||
self.nav_name = "Wacca"
|
self.nav_name = "Wacca"
|
||||||
|
|
||||||
def render_GET(self, request: Request) -> bytes:
|
def render_GET(self, request: Request) -> bytes:
|
||||||
|
@ -577,7 +577,21 @@ class SongDetailGradeCountsV2(SongDetailGradeCountsV1):
|
|||||||
self.ssspCt = counts[12]
|
self.ssspCt = counts[12]
|
||||||
|
|
||||||
def make(self) -> List:
|
def make(self) -> List:
|
||||||
return super().make() + [self.spCt, self.sspCt, self.ssspCt]
|
return [
|
||||||
|
self.dCt,
|
||||||
|
self.cCt,
|
||||||
|
self.bCt,
|
||||||
|
self.aCt,
|
||||||
|
self.aaCt,
|
||||||
|
self.aaaCt,
|
||||||
|
self.sCt,
|
||||||
|
self.spCt,
|
||||||
|
self.ssCt,
|
||||||
|
self.sspCt,
|
||||||
|
self.sssCt,
|
||||||
|
self.ssspCt,
|
||||||
|
self.masterCt,
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class BestScoreDetailV1:
|
class BestScoreDetailV1:
|
||||||
@ -928,7 +942,7 @@ class MusicUpdateDetailV1:
|
|||||||
self.score = 0
|
self.score = 0
|
||||||
self.lowestMissCount = 0
|
self.lowestMissCount = 0
|
||||||
self.maxSkillPts = 0
|
self.maxSkillPts = 0
|
||||||
self.locked = 0
|
self.lock_state = 0
|
||||||
|
|
||||||
def make(self) -> List:
|
def make(self) -> List:
|
||||||
return [
|
return [
|
||||||
@ -940,7 +954,7 @@ class MusicUpdateDetailV1:
|
|||||||
self.score,
|
self.score,
|
||||||
self.lowestMissCount,
|
self.lowestMissCount,
|
||||||
self.maxSkillPts,
|
self.maxSkillPts,
|
||||||
self.locked,
|
self.lock_state,
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@ -12,8 +12,8 @@ class UserInfoUpdateRequest(BaseRequest):
|
|||||||
self.optsUpdated: List[UserOption] = []
|
self.optsUpdated: List[UserOption] = []
|
||||||
self.unknown2: List = self.params[2]
|
self.unknown2: List = self.params[2]
|
||||||
self.datesUpdated: List[DateUpdate] = []
|
self.datesUpdated: List[DateUpdate] = []
|
||||||
self.favoritesAdded: List[int] = self.params[4]
|
self.favoritesRemoved: List[int] = self.params[4]
|
||||||
self.favoritesRemoved: List[int] = self.params[5]
|
self.favoritesAdded: List[int] = self.params[5]
|
||||||
|
|
||||||
for x in self.params[1]:
|
for x in self.params[1]:
|
||||||
self.optsUpdated.append(UserOption(x[0], x[1]))
|
self.optsUpdated.append(UserOption(x[0], x[1]))
|
||||||
|
@ -8,7 +8,7 @@ from twisted.web.http import Request
|
|||||||
from typing import Dict, Tuple
|
from typing import Dict, Tuple
|
||||||
from os import path
|
from os import path
|
||||||
|
|
||||||
from core.config import CoreConfig
|
from core import CoreConfig, Utils
|
||||||
from titles.wacca.config import WaccaConfig
|
from titles.wacca.config import WaccaConfig
|
||||||
from titles.wacca.config import WaccaConfig
|
from titles.wacca.config import WaccaConfig
|
||||||
from titles.wacca.const import WaccaConstants
|
from titles.wacca.const import WaccaConstants
|
||||||
@ -89,6 +89,7 @@ class WaccaServlet:
|
|||||||
request.responseHeaders.addRawHeader(b"X-Wacca-Hash", hash.hex().encode())
|
request.responseHeaders.addRawHeader(b"X-Wacca-Hash", hash.hex().encode())
|
||||||
return json.dumps(resp).encode()
|
return json.dumps(resp).encode()
|
||||||
|
|
||||||
|
client_ip = Utils.get_ip_addr(request)
|
||||||
try:
|
try:
|
||||||
req_json = json.loads(request.content.getvalue())
|
req_json = json.loads(request.content.getvalue())
|
||||||
version_full = Version(req_json["appVersion"])
|
version_full = Version(req_json["appVersion"])
|
||||||
@ -101,7 +102,7 @@ class WaccaServlet:
|
|||||||
resp.message = "不正なリクエスト エラーです"
|
resp.message = "不正なリクエスト エラーです"
|
||||||
return end(resp.make())
|
return end(resp.make())
|
||||||
|
|
||||||
if "/api/" in url_path:
|
if "api/" in url_path:
|
||||||
func_to_find = (
|
func_to_find = (
|
||||||
"handle_" + url_path.partition("api/")[2].replace("/", "_") + "_request"
|
"handle_" + url_path.partition("api/")[2].replace("/", "_") + "_request"
|
||||||
)
|
)
|
||||||
@ -140,7 +141,7 @@ class WaccaServlet:
|
|||||||
return end(resp.make())
|
return end(resp.make())
|
||||||
|
|
||||||
self.logger.info(
|
self.logger.info(
|
||||||
f"v{req_json['appVersion']} {url_path} request from {request.getClientAddress().host} with chipId {req_json['chipId']}"
|
f"v{req_json['appVersion']} {url_path} request from {client_ip} with chipId {req_json['chipId']}"
|
||||||
)
|
)
|
||||||
self.logger.debug(req_json)
|
self.logger.debug(req_json)
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user