forked from Hay1tsme/artemis
Merge branch 'develop'
This commit is contained in:
commit
5f586379ca
21
Dockerfile
Normal file
21
Dockerfile
Normal file
@ -0,0 +1,21 @@
|
||||
FROM python:3.9.15-slim-bullseye
|
||||
|
||||
RUN apt update && apt install default-libmysqlclient-dev build-essential libtk nodejs npm -y
|
||||
|
||||
WORKDIR /app
|
||||
COPY requirements.txt requirements.txt
|
||||
RUN pip3 install -r requirements.txt
|
||||
RUN npm i -g nodemon
|
||||
|
||||
COPY entrypoint.sh entrypoint.sh
|
||||
RUN chmod +x entrypoint.sh
|
||||
|
||||
COPY index.py index.py
|
||||
COPY dbutils.py dbutils.py
|
||||
ADD core core
|
||||
ADD titles titles
|
||||
ADD config config
|
||||
ADD log log
|
||||
ADD cert cert
|
||||
|
||||
ENTRYPOINT [ "/app/entrypoint.sh" ]
|
50
changelog.md
50
changelog.md
@ -1,6 +1,56 @@
|
||||
# Changelog
|
||||
Documenting updates to ARTEMiS, to be updated every time the master branch is pushed to.
|
||||
|
||||
## 20230716
|
||||
### General
|
||||
+ Docker files added (#19)
|
||||
+ Added support for threading
|
||||
+ This comes with the caviat that enabling it will not allow you to use Ctrl + C to stop the server.
|
||||
|
||||
### Webui
|
||||
+ Small improvements
|
||||
+ Add card display
|
||||
|
||||
### Allnet
|
||||
+ Billing format validation
|
||||
+ Fix naomitest.html endpoint
|
||||
+ Add event logging for auths and billing
|
||||
+ LoaderStateRecorder endpoint handler added
|
||||
|
||||
### Mucha
|
||||
+ Fixed log level always being "Info"
|
||||
+ Add stub handler for DownloadState
|
||||
|
||||
### Sword Art Online
|
||||
+ Support added
|
||||
|
||||
### Crossbeats
|
||||
+ Added threading to profile loading
|
||||
+ This should cause a noticeable speed-up
|
||||
|
||||
### Card Maker
|
||||
+ DX Passes fixed
|
||||
+ Various improvements
|
||||
|
||||
### Diva
|
||||
+ Added clear status calculation
|
||||
+ Various minor fixes and improvements
|
||||
|
||||
### Maimai
|
||||
+ Added support for memorial photo uploads
|
||||
+ Added support for the following versions
|
||||
+ Festival
|
||||
+ FiNALE
|
||||
+ Various bug fixes and improvements
|
||||
|
||||
### Wacca
|
||||
+ Fixed an error that sometimes occoured when trying to unlock songs (#22)
|
||||
|
||||
### Pokken
|
||||
+ Profile saving added (loading TBA)
|
||||
+ Use external STUN server for matching by default
|
||||
+ Matching still not working
|
||||
|
||||
## 2023042300
|
||||
### Wacca
|
||||
+ Time free now works properly
|
||||
|
@ -63,7 +63,7 @@ class AimedbProtocol(Protocol):
|
||||
|
||||
try:
|
||||
decrypted = cipher.decrypt(data)
|
||||
except:
|
||||
except Exception:
|
||||
self.logger.error(f"Failed to decrypt {data.hex()}")
|
||||
return None
|
||||
|
||||
|
202
core/allnet.py
202
core/allnet.py
@ -1,4 +1,4 @@
|
||||
from typing import Dict, List, Any, Optional, Tuple
|
||||
from typing import Dict, List, Any, Optional, Tuple, Union
|
||||
import logging, coloredlogs
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
from twisted.web.http import Request
|
||||
@ -11,6 +11,7 @@ from Crypto.Hash import SHA
|
||||
from Crypto.Signature import PKCS1_v1_5
|
||||
from time import strptime
|
||||
from os import path
|
||||
import urllib.parse
|
||||
|
||||
from core.config import CoreConfig
|
||||
from core.utils import Utils
|
||||
@ -79,7 +80,7 @@ class AllnetServlet:
|
||||
req = AllnetPowerOnRequest(req_dict[0])
|
||||
# Validate the request. Currently we only validate the fields we plan on using
|
||||
|
||||
if not req.game_id or not req.ver or not req.serial or not req.ip:
|
||||
if not req.game_id or not req.ver or not req.serial or not req.ip or not req.firm_ver or not req.boot_ver:
|
||||
raise AllnetRequestException(
|
||||
f"Bad auth request params from {request_ip} - {vars(req)}"
|
||||
)
|
||||
@ -89,10 +90,12 @@ class AllnetServlet:
|
||||
self.logger.error(e)
|
||||
return b""
|
||||
|
||||
if req.format_ver == "3":
|
||||
if req.format_ver == 3:
|
||||
resp = AllnetPowerOnResponse3(req.token)
|
||||
else:
|
||||
elif req.format_ver == 2:
|
||||
resp = AllnetPowerOnResponse2()
|
||||
else:
|
||||
resp = AllnetPowerOnResponse()
|
||||
|
||||
self.logger.debug(f"Allnet request: {vars(req)}")
|
||||
if req.game_id not in self.uri_registry:
|
||||
@ -103,8 +106,9 @@ class AllnetServlet:
|
||||
)
|
||||
self.logger.warn(msg)
|
||||
|
||||
resp.stat = 0
|
||||
return self.dict_to_http_form_string([vars(resp)])
|
||||
resp.stat = -1
|
||||
resp_dict = {k: v for k, v in vars(resp).items() if v is not None}
|
||||
return (urllib.parse.unquote(urllib.parse.urlencode(resp_dict)) + "\n").encode("utf-8")
|
||||
|
||||
else:
|
||||
self.logger.info(
|
||||
@ -112,7 +116,12 @@ class AllnetServlet:
|
||||
)
|
||||
resp.uri = f"http://{self.config.title.hostname}:{self.config.title.port}/{req.game_id}/{req.ver.replace('.', '')}/"
|
||||
resp.host = f"{self.config.title.hostname}:{self.config.title.port}"
|
||||
return self.dict_to_http_form_string([vars(resp)])
|
||||
|
||||
resp_dict = {k: v for k, v in vars(resp).items() if v is not None}
|
||||
resp_str = urllib.parse.unquote(urllib.parse.urlencode(resp_dict))
|
||||
|
||||
self.logger.debug(f"Allnet response: {resp_str}")
|
||||
return (resp_str + "\n").encode("utf-8")
|
||||
|
||||
resp.uri, resp.host = self.uri_registry[req.game_id]
|
||||
|
||||
@ -124,8 +133,9 @@ class AllnetServlet:
|
||||
)
|
||||
self.logger.warn(msg)
|
||||
|
||||
resp.stat = 0
|
||||
return self.dict_to_http_form_string([vars(resp)])
|
||||
resp.stat = -2
|
||||
resp_dict = {k: v for k, v in vars(resp).items() if v is not None}
|
||||
return (urllib.parse.unquote(urllib.parse.urlencode(resp_dict)) + "\n").encode("utf-8")
|
||||
|
||||
if machine is not None:
|
||||
arcade = self.data.arcade.get_arcade(machine["arcade"])
|
||||
@ -167,9 +177,13 @@ class AllnetServlet:
|
||||
msg = f"{req.serial} authenticated from {request_ip}: {req.game_id} v{req.ver}"
|
||||
self.data.base.log_event("allnet", "ALLNET_AUTH_SUCCESS", logging.INFO, msg)
|
||||
self.logger.info(msg)
|
||||
self.logger.debug(f"Allnet response: {vars(resp)}")
|
||||
|
||||
return self.dict_to_http_form_string([vars(resp)]).encode("utf-8")
|
||||
resp_dict = {k: v for k, v in vars(resp).items() if v is not None}
|
||||
resp_str = urllib.parse.unquote(urllib.parse.urlencode(resp_dict))
|
||||
self.logger.debug(f"Allnet response: {resp_dict}")
|
||||
resp_str += "\n"
|
||||
|
||||
return resp_str.encode("utf-8")
|
||||
|
||||
def handle_dlorder(self, request: Request, _: Dict):
|
||||
request_ip = Utils.get_ip_addr(request)
|
||||
@ -194,27 +208,29 @@ class AllnetServlet:
|
||||
self.logger.info(
|
||||
f"DownloadOrder from {request_ip} -> {req.game_id} v{req.ver} serial {req.serial}"
|
||||
)
|
||||
resp = AllnetDownloadOrderResponse()
|
||||
resp = AllnetDownloadOrderResponse(serial=req.serial)
|
||||
|
||||
if (
|
||||
not self.config.allnet.allow_online_updates
|
||||
or not self.config.allnet.update_cfg_folder
|
||||
):
|
||||
return self.dict_to_http_form_string([vars(resp)])
|
||||
return urllib.parse.unquote(urllib.parse.urlencode(vars(resp))) + "\n"
|
||||
|
||||
else: # TODO: Keychip check
|
||||
if path.exists(
|
||||
f"{self.config.allnet.update_cfg_folder}/{req.game_id}-{req.ver}-app.ini"
|
||||
f"{self.config.allnet.update_cfg_folder}/{req.game_id}-{req.ver.replace('.', '')}-app.ini"
|
||||
):
|
||||
resp.uri = f"http://{self.config.title.hostname}:{self.config.title.port}/dl/ini/{req.game_id}-{req.ver.replace('.', '')}-app.ini"
|
||||
|
||||
if path.exists(
|
||||
f"{self.config.allnet.update_cfg_folder}/{req.game_id}-{req.ver}-opt.ini"
|
||||
f"{self.config.allnet.update_cfg_folder}/{req.game_id}-{req.ver.replace('.', '')}-opt.ini"
|
||||
):
|
||||
resp.uri += f"|http://{self.config.title.hostname}:{self.config.title.port}/dl/ini/{req.game_id}-{req.ver.replace('.', '')}-opt.ini"
|
||||
|
||||
self.logger.debug(f"Sending download uri {resp.uri}")
|
||||
return self.dict_to_http_form_string([vars(resp)])
|
||||
self.data.base.log_event("allnet", "DLORDER_REQ_SUCCESS", logging.INFO, f"{Utils.get_ip_addr(request)} requested DL Order for {req.serial} {req.game_id} v{req.ver}")
|
||||
|
||||
return urllib.parse.unquote(urllib.parse.urlencode(vars(resp))) + "\n"
|
||||
|
||||
def handle_dlorder_ini(self, request: Request, match: Dict) -> bytes:
|
||||
if "file" not in match:
|
||||
@ -223,6 +239,8 @@ class AllnetServlet:
|
||||
req_file = match["file"].replace("%0A", "")
|
||||
|
||||
if path.exists(f"{self.config.allnet.update_cfg_folder}/{req_file}"):
|
||||
self.logger.info(f"Request for DL INI file {req_file} from {Utils.get_ip_addr(request)} successful")
|
||||
self.data.base.log_event("allnet", "DLORDER_INI_SENT", logging.INFO, f"{Utils.get_ip_addr(request)} successfully recieved {req_file}")
|
||||
return open(
|
||||
f"{self.config.allnet.update_cfg_folder}/{req_file}", "rb"
|
||||
).read()
|
||||
@ -236,6 +254,27 @@ class AllnetServlet:
|
||||
)
|
||||
return b""
|
||||
|
||||
def handle_loaderstaterecorder(self, request: Request, match: Dict) -> bytes:
|
||||
req_data = request.content.getvalue()
|
||||
sections = req_data.decode("utf-8").split("\r\n")
|
||||
|
||||
req_dict = dict(urllib.parse.parse_qsl(sections[0]))
|
||||
|
||||
serial: Union[str, None] = req_dict.get("serial", None)
|
||||
num_files_to_dl: Union[str, None] = req_dict.get("nb_ftd", None)
|
||||
num_files_dld: Union[str, None] = req_dict.get("nb_dld", None)
|
||||
dl_state: Union[str, None] = req_dict.get("dld_st", None)
|
||||
ip = Utils.get_ip_addr(request)
|
||||
|
||||
if serial is None or num_files_dld is None or num_files_to_dl is None or dl_state is None:
|
||||
return "NG".encode()
|
||||
|
||||
self.logger.info(f"LoaderStateRecorder Request from {ip} {serial}: {num_files_dld}/{num_files_to_dl} Files download (State: {dl_state})")
|
||||
return "OK".encode()
|
||||
|
||||
def handle_alive(self, request: Request, match: Dict) -> bytes:
|
||||
return "OK".encode()
|
||||
|
||||
def handle_billing_request(self, request: Request, _: Dict):
|
||||
req_dict = self.billing_req_to_dict(request.content.getvalue())
|
||||
request_ip = Utils.get_ip_addr(request)
|
||||
@ -249,6 +288,7 @@ class AllnetServlet:
|
||||
signer = PKCS1_v1_5.new(rsa)
|
||||
digest = SHA.new()
|
||||
|
||||
try:
|
||||
kc_playlimit = int(req_dict[0]["playlimit"])
|
||||
kc_nearfull = int(req_dict[0]["nearfull"])
|
||||
kc_billigtype = int(req_dict[0]["billingtype"])
|
||||
@ -258,6 +298,9 @@ class AllnetServlet:
|
||||
kc_date = strptime(req_dict[0]["date"], "%Y%m%d%H%M%S")
|
||||
kc_serial_bytes = kc_serial.encode()
|
||||
|
||||
except KeyError as e:
|
||||
return f"result=5&linelimit=&message={e} field is missing".encode()
|
||||
|
||||
machine = self.data.arcade.get_machine(kc_serial)
|
||||
if machine is None and not self.config.server.allow_unregistered_serials:
|
||||
msg = f"Unrecognised serial {kc_serial} attempted billing checkin from {request_ip} for game {kc_game}."
|
||||
@ -295,7 +338,7 @@ class AllnetServlet:
|
||||
|
||||
resp = BillingResponse(playlimit, playlimit_sig, nearfull, nearfull_sig)
|
||||
|
||||
resp_str = self.dict_to_http_form_string([vars(resp)], True)
|
||||
resp_str = self.dict_to_http_form_string([vars(resp)])
|
||||
if resp_str is None:
|
||||
self.logger.error(f"Failed to parse response {vars(resp)}")
|
||||
|
||||
@ -306,21 +349,6 @@ class AllnetServlet:
|
||||
self.logger.info(f"Ping from {Utils.get_ip_addr(request)}")
|
||||
return b"naomi ok"
|
||||
|
||||
def kvp_to_dict(self, kvp: List[str]) -> List[Dict[str, Any]]:
|
||||
ret: List[Dict[str, Any]] = []
|
||||
for x in kvp:
|
||||
items = x.split("&")
|
||||
tmp = {}
|
||||
|
||||
for item in items:
|
||||
kvp = item.split("=")
|
||||
if len(kvp) == 2:
|
||||
tmp[kvp[0]] = kvp[1]
|
||||
|
||||
ret.append(tmp)
|
||||
|
||||
return ret
|
||||
|
||||
def billing_req_to_dict(self, data: bytes):
|
||||
"""
|
||||
Parses an billing request string into a python dictionary
|
||||
@ -330,7 +358,10 @@ class AllnetServlet:
|
||||
unzipped = decomp.decompress(data)
|
||||
sections = unzipped.decode("ascii").split("\r\n")
|
||||
|
||||
return self.kvp_to_dict(sections)
|
||||
ret = []
|
||||
for x in sections:
|
||||
ret.append(dict(urllib.parse.parse_qsl(x)))
|
||||
return ret
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"billing_req_to_dict: {e} while parsing {data}")
|
||||
@ -345,7 +376,10 @@ class AllnetServlet:
|
||||
unzipped = zlib.decompress(zipped)
|
||||
sections = unzipped.decode("utf-8").split("\r\n")
|
||||
|
||||
return self.kvp_to_dict(sections)
|
||||
ret = []
|
||||
for x in sections:
|
||||
ret.append(dict(urllib.parse.parse_qsl(x)))
|
||||
return ret
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"allnet_req_to_dict: {e} while parsing {data}")
|
||||
@ -354,7 +388,7 @@ class AllnetServlet:
|
||||
def dict_to_http_form_string(
|
||||
self,
|
||||
data: List[Dict[str, Any]],
|
||||
crlf: bool = False,
|
||||
crlf: bool = True,
|
||||
trailing_newline: bool = True,
|
||||
) -> Optional[str]:
|
||||
"""
|
||||
@ -364,21 +398,19 @@ class AllnetServlet:
|
||||
urlencode = ""
|
||||
for item in data:
|
||||
for k, v in item.items():
|
||||
if k is None or v is None:
|
||||
continue
|
||||
urlencode += f"{k}={v}&"
|
||||
|
||||
if crlf:
|
||||
urlencode = urlencode[:-1] + "\r\n"
|
||||
else:
|
||||
urlencode = urlencode[:-1] + "\n"
|
||||
|
||||
if not trailing_newline:
|
||||
if crlf:
|
||||
urlencode = urlencode[:-2]
|
||||
else:
|
||||
urlencode = urlencode[:-1]
|
||||
|
||||
return urlencode
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"dict_to_http_form_string: {e} while parsing {data}")
|
||||
return None
|
||||
@ -388,64 +420,68 @@ class AllnetPowerOnRequest:
|
||||
def __init__(self, req: Dict) -> None:
|
||||
if req is None:
|
||||
raise AllnetRequestException("Request processing failed")
|
||||
self.game_id: str = req.get("game_id", "")
|
||||
self.ver: str = req.get("ver", "")
|
||||
self.serial: str = req.get("serial", "")
|
||||
self.ip: str = req.get("ip", "")
|
||||
self.firm_ver: str = req.get("firm_ver", "")
|
||||
self.boot_ver: str = req.get("boot_ver", "")
|
||||
self.encode: str = req.get("encode", "")
|
||||
self.hops = int(req.get("hops", "0"))
|
||||
self.format_ver = req.get("format_ver", "2")
|
||||
self.token = int(req.get("token", "0"))
|
||||
self.game_id: str = req.get("game_id", None)
|
||||
self.ver: str = req.get("ver", None)
|
||||
self.serial: str = req.get("serial", None)
|
||||
self.ip: str = req.get("ip", None)
|
||||
self.firm_ver: str = req.get("firm_ver", None)
|
||||
self.boot_ver: str = req.get("boot_ver", None)
|
||||
self.encode: str = req.get("encode", "EUC-JP")
|
||||
self.hops = int(req.get("hops", "-1"))
|
||||
self.format_ver = float(req.get("format_ver", "1.00"))
|
||||
self.token: str = req.get("token", "0")
|
||||
|
||||
|
||||
class AllnetPowerOnResponse3:
|
||||
def __init__(self, token) -> None:
|
||||
self.stat = 1
|
||||
self.uri = ""
|
||||
self.host = ""
|
||||
self.place_id = "123"
|
||||
self.name = ""
|
||||
self.nickname = ""
|
||||
self.region0 = "1"
|
||||
self.region_name0 = "W"
|
||||
self.region_name1 = ""
|
||||
self.region_name2 = ""
|
||||
self.region_name3 = ""
|
||||
self.country = "JPN"
|
||||
self.allnet_id = "123"
|
||||
self.client_timezone = "+0900"
|
||||
self.utc_time = datetime.now(tz=pytz.timezone("UTC")).strftime(
|
||||
"%Y-%m-%dT%H:%M:%SZ"
|
||||
)
|
||||
self.setting = "1"
|
||||
self.res_ver = "3"
|
||||
self.token = str(token)
|
||||
|
||||
|
||||
class AllnetPowerOnResponse2:
|
||||
class AllnetPowerOnResponse:
|
||||
def __init__(self) -> None:
|
||||
self.stat = 1
|
||||
self.uri = ""
|
||||
self.host = ""
|
||||
self.place_id = "123"
|
||||
self.name = "Test"
|
||||
self.nickname = "Test123"
|
||||
self.name = "ARTEMiS"
|
||||
self.nickname = "ARTEMiS"
|
||||
self.region0 = "1"
|
||||
self.region_name0 = "W"
|
||||
self.region_name1 = "X"
|
||||
self.region_name2 = "Y"
|
||||
self.region_name3 = "Z"
|
||||
self.country = "JPN"
|
||||
self.region_name1 = ""
|
||||
self.region_name2 = ""
|
||||
self.region_name3 = ""
|
||||
self.setting = "1"
|
||||
self.year = datetime.now().year
|
||||
self.month = datetime.now().month
|
||||
self.day = datetime.now().day
|
||||
self.hour = datetime.now().hour
|
||||
self.minute = datetime.now().minute
|
||||
self.second = datetime.now().second
|
||||
self.setting = "1"
|
||||
self.timezone = "+0900"
|
||||
|
||||
class AllnetPowerOnResponse3(AllnetPowerOnResponse):
|
||||
def __init__(self, token) -> None:
|
||||
super().__init__()
|
||||
|
||||
# Added in v3
|
||||
self.country = "JPN"
|
||||
self.allnet_id = "123"
|
||||
self.client_timezone = "+0900"
|
||||
self.utc_time = datetime.now(tz=pytz.timezone("UTC")).strftime(
|
||||
"%Y-%m-%dT%H:%M:%SZ"
|
||||
)
|
||||
self.res_ver = "3"
|
||||
self.token = token
|
||||
|
||||
# Removed in v3
|
||||
self.year = None
|
||||
self.month = None
|
||||
self.day = None
|
||||
self.hour = None
|
||||
self.minute = None
|
||||
self.second = None
|
||||
|
||||
|
||||
class AllnetPowerOnResponse2(AllnetPowerOnResponse):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
|
||||
# Added in v2
|
||||
self.country = "JPN"
|
||||
self.timezone = "+09:00"
|
||||
self.res_class = "PowerOnResponseV2"
|
||||
|
||||
|
||||
|
@ -36,6 +36,12 @@ class ServerConfig:
|
||||
self.__config, "core", "server", "is_develop", default=True
|
||||
)
|
||||
|
||||
@property
|
||||
def threading(self) -> bool:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "server", "threading", default=False
|
||||
)
|
||||
|
||||
@property
|
||||
def log_dir(self) -> str:
|
||||
return CoreConfig.get_config_field(
|
||||
|
@ -15,31 +15,48 @@ from core.utils import Utils
|
||||
|
||||
|
||||
class Data:
|
||||
current_schema_version = 4
|
||||
engine = None
|
||||
session = None
|
||||
user = None
|
||||
arcade = None
|
||||
card = None
|
||||
base = None
|
||||
def __init__(self, cfg: CoreConfig) -> None:
|
||||
self.config = cfg
|
||||
|
||||
if self.config.database.sha2_password:
|
||||
passwd = sha256(self.config.database.password.encode()).digest()
|
||||
self.__url = f"{self.config.database.protocol}://{self.config.database.username}:{passwd.hex()}@{self.config.database.host}/{self.config.database.name}?charset=utf8mb4"
|
||||
self.__url = f"{self.config.database.protocol}://{self.config.database.username}:{passwd.hex()}@{self.config.database.host}:{self.config.database.port}/{self.config.database.name}?charset=utf8mb4"
|
||||
else:
|
||||
self.__url = f"{self.config.database.protocol}://{self.config.database.username}:{self.config.database.password}@{self.config.database.host}/{self.config.database.name}?charset=utf8mb4"
|
||||
self.__url = f"{self.config.database.protocol}://{self.config.database.username}:{self.config.database.password}@{self.config.database.host}:{self.config.database.port}/{self.config.database.name}?charset=utf8mb4"
|
||||
|
||||
self.__engine = create_engine(self.__url, pool_recycle=3600)
|
||||
session = sessionmaker(bind=self.__engine, autoflush=True, autocommit=True)
|
||||
self.session = scoped_session(session)
|
||||
if Data.engine is None:
|
||||
Data.engine = create_engine(self.__url, pool_recycle=3600)
|
||||
self.__engine = Data.engine
|
||||
|
||||
self.user = UserData(self.config, self.session)
|
||||
self.arcade = ArcadeData(self.config, self.session)
|
||||
self.card = CardData(self.config, self.session)
|
||||
self.base = BaseData(self.config, self.session)
|
||||
self.current_schema_version = 4
|
||||
if Data.session is None:
|
||||
s = sessionmaker(bind=Data.engine, autoflush=True, autocommit=True)
|
||||
Data.session = scoped_session(s)
|
||||
|
||||
if Data.user is None:
|
||||
Data.user = UserData(self.config, self.session)
|
||||
|
||||
if Data.arcade is None:
|
||||
Data.arcade = ArcadeData(self.config, self.session)
|
||||
|
||||
if Data.card is None:
|
||||
Data.card = CardData(self.config, self.session)
|
||||
|
||||
if Data.base is None:
|
||||
Data.base = BaseData(self.config, self.session)
|
||||
|
||||
log_fmt_str = "[%(asctime)s] %(levelname)s | Database | %(message)s"
|
||||
log_fmt = logging.Formatter(log_fmt_str)
|
||||
self.logger = logging.getLogger("database")
|
||||
|
||||
# Prevent the logger from adding handlers multiple times
|
||||
if not getattr(self.logger, "handler_set", None):
|
||||
log_fmt_str = "[%(asctime)s] %(levelname)s | Database | %(message)s"
|
||||
log_fmt = logging.Formatter(log_fmt_str)
|
||||
fileHandler = TimedRotatingFileHandler(
|
||||
"{0}/{1}.log".format(self.config.server.log_dir, "db"),
|
||||
encoding="utf-8",
|
||||
@ -77,7 +94,7 @@ class Data:
|
||||
game_mod.database(self.config)
|
||||
metadata.create_all(self.__engine.connect())
|
||||
|
||||
self.base.set_schema_ver(
|
||||
self.base.touch_schema_ver(
|
||||
game_mod.current_schema_version, game_mod.game_codes[0]
|
||||
)
|
||||
|
||||
@ -333,3 +350,8 @@ class Data:
|
||||
|
||||
if not failed:
|
||||
self.base.set_schema_ver(latest_ver, game)
|
||||
|
||||
def show_versions(self) -> None:
|
||||
all_game_versions = self.base.get_all_schema_vers()
|
||||
for ver in all_game_versions:
|
||||
self.logger.info(f"{ver['game']} -> v{ver['version']}")
|
||||
|
@ -58,7 +58,7 @@ class BaseData:
|
||||
self.logger.error(f"UnicodeEncodeError error {e}")
|
||||
return None
|
||||
|
||||
except:
|
||||
except Exception:
|
||||
try:
|
||||
res = self.conn.execute(sql, opts)
|
||||
|
||||
@ -70,7 +70,7 @@ class BaseData:
|
||||
self.logger.error(f"UnicodeEncodeError error {e}")
|
||||
return None
|
||||
|
||||
except:
|
||||
except Exception:
|
||||
self.logger.error(f"Unknown error")
|
||||
raise
|
||||
|
||||
@ -103,6 +103,18 @@ class BaseData:
|
||||
|
||||
return row["version"]
|
||||
|
||||
def touch_schema_ver(self, ver: int, game: str = "CORE") -> Optional[int]:
|
||||
sql = insert(schema_ver).values(game=game, version=ver)
|
||||
conflict = sql.on_duplicate_key_update(version=schema_ver.c.version)
|
||||
|
||||
result = self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.error(
|
||||
f"Failed to update schema version for game {game} (v{ver})"
|
||||
)
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def set_schema_ver(self, ver: int, game: str = "CORE") -> Optional[int]:
|
||||
sql = insert(schema_ver).values(game=game, version=ver)
|
||||
conflict = sql.on_duplicate_key_update(version=ver)
|
||||
|
@ -80,6 +80,9 @@ class UserData(BaseData):
|
||||
if usr["password"] is None:
|
||||
return False
|
||||
|
||||
if passwd is None or not passwd:
|
||||
return False
|
||||
|
||||
return bcrypt.checkpw(passwd, usr["password"].encode())
|
||||
|
||||
def reset_autoincrement(self, ai_value: int) -> None:
|
||||
|
9
core/data/schema/versions/SBZV_4_rollback.sql
Normal file
9
core/data/schema/versions/SBZV_4_rollback.sql
Normal file
@ -0,0 +1,9 @@
|
||||
ALTER TABLE diva_profile
|
||||
DROP cnp_cid,
|
||||
DROP cnp_val,
|
||||
DROP cnp_rr,
|
||||
DROP cnp_sp,
|
||||
DROP btn_se_eqp,
|
||||
DROP sld_se_eqp,
|
||||
DROP chn_sld_se_eqp,
|
||||
DROP sldr_tch_se_eqp;
|
9
core/data/schema/versions/SBZV_5_upgrade.sql
Normal file
9
core/data/schema/versions/SBZV_5_upgrade.sql
Normal file
@ -0,0 +1,9 @@
|
||||
ALTER TABLE diva_profile
|
||||
ADD cnp_cid INT NOT NULL DEFAULT -1,
|
||||
ADD cnp_val INT NOT NULL DEFAULT -1,
|
||||
ADD cnp_rr INT NOT NULL DEFAULT -1,
|
||||
ADD cnp_sp VARCHAR(255) NOT NULL DEFAULT "",
|
||||
ADD btn_se_eqp INT NOT NULL DEFAULT -1,
|
||||
ADD sld_se_eqp INT NOT NULL DEFAULT -1,
|
||||
ADD chn_sld_se_eqp INT NOT NULL DEFAULT -1,
|
||||
ADD sldr_tch_se_eqp INT NOT NULL DEFAULT -1;
|
30
core/data/schema/versions/SDBT_3_rollback.sql
Normal file
30
core/data/schema/versions/SDBT_3_rollback.sql
Normal file
@ -0,0 +1,30 @@
|
||||
SET FOREIGN_KEY_CHECKS = 0;
|
||||
|
||||
ALTER TABLE chuni_score_playlog
|
||||
DROP COLUMN regionId,
|
||||
DROP COLUMN machineType;
|
||||
|
||||
ALTER TABLE chuni_static_events
|
||||
DROP COLUMN startDate;
|
||||
|
||||
ALTER TABLE chuni_profile_data
|
||||
DROP COLUMN rankUpChallengeResults;
|
||||
|
||||
ALTER TABLE chuni_static_login_bonus
|
||||
DROP FOREIGN KEY chuni_static_login_bonus_ibfk_1;
|
||||
|
||||
ALTER TABLE chuni_static_login_bonus_preset
|
||||
DROP PRIMARY KEY;
|
||||
|
||||
ALTER TABLE chuni_static_login_bonus_preset
|
||||
CHANGE COLUMN presetId id INT NOT NULL;
|
||||
ALTER TABLE chuni_static_login_bonus_preset
|
||||
ADD PRIMARY KEY(id);
|
||||
ALTER TABLE chuni_static_login_bonus_preset
|
||||
ADD CONSTRAINT chuni_static_login_bonus_preset_uk UNIQUE(id, version);
|
||||
|
||||
ALTER TABLE chuni_static_login_bonus
|
||||
ADD CONSTRAINT chuni_static_login_bonus_ibfk_1 FOREIGN KEY(presetId)
|
||||
REFERENCES chuni_static_login_bonus_preset(id) ON UPDATE CASCADE ON DELETE CASCADE;
|
||||
|
||||
SET FOREIGN_KEY_CHECKS = 1;
|
29
core/data/schema/versions/SDBT_4_upgrade.sql
Normal file
29
core/data/schema/versions/SDBT_4_upgrade.sql
Normal file
@ -0,0 +1,29 @@
|
||||
SET FOREIGN_KEY_CHECKS = 0;
|
||||
|
||||
ALTER TABLE chuni_score_playlog
|
||||
ADD COLUMN regionId INT,
|
||||
ADD COLUMN machineType INT;
|
||||
|
||||
ALTER TABLE chuni_static_events
|
||||
ADD COLUMN startDate TIMESTAMP NOT NULL DEFAULT current_timestamp();
|
||||
|
||||
ALTER TABLE chuni_profile_data
|
||||
ADD COLUMN rankUpChallengeResults JSON;
|
||||
|
||||
ALTER TABLE chuni_static_login_bonus
|
||||
DROP FOREIGN KEY chuni_static_login_bonus_ibfk_1;
|
||||
|
||||
ALTER TABLE chuni_static_login_bonus_preset
|
||||
CHANGE COLUMN id presetId INT NOT NULL;
|
||||
ALTER TABLE chuni_static_login_bonus_preset
|
||||
DROP PRIMARY KEY;
|
||||
ALTER TABLE chuni_static_login_bonus_preset
|
||||
DROP INDEX chuni_static_login_bonus_preset_uk;
|
||||
ALTER TABLE chuni_static_login_bonus_preset
|
||||
ADD CONSTRAINT chuni_static_login_bonus_preset_pk PRIMARY KEY (presetId, version);
|
||||
|
||||
ALTER TABLE chuni_static_login_bonus
|
||||
ADD CONSTRAINT chuni_static_login_bonus_ibfk_1 FOREIGN KEY (presetId, version)
|
||||
REFERENCES chuni_static_login_bonus_preset(presetId, version) ON UPDATE CASCADE ON DELETE CASCADE;
|
||||
|
||||
SET FOREIGN_KEY_CHECKS = 1;
|
2
core/data/schema/versions/SDDT_4_rollback.sql
Normal file
2
core/data/schema/versions/SDDT_4_rollback.sql
Normal file
@ -0,0 +1,2 @@
|
||||
ALTER TABLE ongeki_static_events
|
||||
DROP COLUMN startDate;
|
2
core/data/schema/versions/SDDT_5_upgrade.sql
Normal file
2
core/data/schema/versions/SDDT_5_upgrade.sql
Normal file
@ -0,0 +1,2 @@
|
||||
ALTER TABLE ongeki_static_events
|
||||
ADD COLUMN startDate TIMESTAMP NOT NULL DEFAULT current_timestamp();
|
3
core/data/schema/versions/SDEZ_4_rollback.sql
Normal file
3
core/data/schema/versions/SDEZ_4_rollback.sql
Normal file
@ -0,0 +1,3 @@
|
||||
ALTER TABLE mai2_item_card
|
||||
CHANGE COLUMN startDate startDate TIMESTAMP DEFAULT "2018-01-01 00:00:00.0",
|
||||
CHANGE COLUMN endDate endDate TIMESTAMP DEFAULT "2038-01-01 00:00:00.0";
|
78
core/data/schema/versions/SDEZ_5_rollback.sql
Normal file
78
core/data/schema/versions/SDEZ_5_rollback.sql
Normal file
@ -0,0 +1,78 @@
|
||||
DELETE FROM mai2_static_event WHERE version < 13;
|
||||
UPDATE mai2_static_event SET version = version - 13 WHERE version >= 13;
|
||||
|
||||
DELETE FROM mai2_static_music WHERE version < 13;
|
||||
UPDATE mai2_static_music SET version = version - 13 WHERE version >= 13;
|
||||
|
||||
DELETE FROM mai2_static_ticket WHERE version < 13;
|
||||
UPDATE mai2_static_ticket SET version = version - 13 WHERE version >= 13;
|
||||
|
||||
DELETE FROM mai2_static_cards WHERE version < 13;
|
||||
UPDATE mai2_static_cards SET version = version - 13 WHERE version >= 13;
|
||||
|
||||
DELETE FROM mai2_profile_detail WHERE version < 13;
|
||||
UPDATE mai2_profile_detail SET version = version - 13 WHERE version >= 13;
|
||||
|
||||
DELETE FROM mai2_profile_extend WHERE version < 13;
|
||||
UPDATE mai2_profile_extend SET version = version - 13 WHERE version >= 13;
|
||||
|
||||
DELETE FROM mai2_profile_option WHERE version < 13;
|
||||
UPDATE mai2_profile_option SET version = version - 13 WHERE version >= 13;
|
||||
|
||||
DELETE FROM mai2_profile_ghost WHERE version < 13;
|
||||
UPDATE mai2_profile_ghost SET version = version - 13 WHERE version >= 13;
|
||||
|
||||
DELETE FROM mai2_profile_rating WHERE version < 13;
|
||||
UPDATE mai2_profile_rating SET version = version - 13 WHERE version >= 13;
|
||||
|
||||
DROP TABLE maimai_score_best;
|
||||
DROP TABLE maimai_playlog;
|
||||
DROP TABLE maimai_profile_detail;
|
||||
DROP TABLE maimai_profile_option;
|
||||
DROP TABLE maimai_profile_web_option;
|
||||
DROP TABLE maimai_profile_grade_status;
|
||||
|
||||
ALTER TABLE mai2_item_character DROP COLUMN point;
|
||||
|
||||
ALTER TABLE mai2_item_card MODIFY COLUMN cardId int(11) NOT NULL;
|
||||
ALTER TABLE mai2_item_card MODIFY COLUMN cardTypeId int(11) NOT NULL;
|
||||
ALTER TABLE mai2_item_card MODIFY COLUMN charaId int(11) NOT NULL;
|
||||
ALTER TABLE mai2_item_card MODIFY COLUMN mapId int(11) NOT NULL;
|
||||
|
||||
ALTER TABLE mai2_item_character MODIFY COLUMN characterId int(11) NOT NULL;
|
||||
ALTER TABLE mai2_item_character MODIFY COLUMN level int(11) NOT NULL;
|
||||
ALTER TABLE mai2_item_character MODIFY COLUMN awakening int(11) NOT NULL;
|
||||
ALTER TABLE mai2_item_character MODIFY COLUMN useCount int(11) NOT NULL;
|
||||
|
||||
ALTER TABLE mai2_item_charge MODIFY COLUMN chargeId int(11) NOT NULL;
|
||||
ALTER TABLE mai2_item_charge MODIFY COLUMN stock int(11) NOT NULL;
|
||||
|
||||
ALTER TABLE mai2_item_favorite MODIFY COLUMN itemKind int(11) NOT NULL;
|
||||
|
||||
ALTER TABLE mai2_item_friend_season_ranking MODIFY COLUMN seasonId int(11) NOT NULL;
|
||||
ALTER TABLE mai2_item_friend_season_ranking MODIFY COLUMN point int(11) NOT NULL;
|
||||
ALTER TABLE mai2_item_friend_season_ranking MODIFY COLUMN `rank` int(11) NOT NULL;
|
||||
ALTER TABLE mai2_item_friend_season_ranking MODIFY COLUMN rewardGet tinyint(1) NOT NULL;
|
||||
ALTER TABLE mai2_item_friend_season_ranking MODIFY COLUMN userName varchar(8) NOT NULL;
|
||||
|
||||
ALTER TABLE mai2_item_item MODIFY COLUMN itemId int(11) NOT NULL;
|
||||
ALTER TABLE mai2_item_item MODIFY COLUMN itemKind int(11) NOT NULL;
|
||||
ALTER TABLE mai2_item_item MODIFY COLUMN stock int(11) NOT NULL;
|
||||
ALTER TABLE mai2_item_item MODIFY COLUMN isValid tinyint(1) NOT NULL;
|
||||
|
||||
ALTER TABLE mai2_item_login_bonus MODIFY COLUMN bonusId int(11) NOT NULL;
|
||||
ALTER TABLE mai2_item_login_bonus MODIFY COLUMN point int(11) NOT NULL;
|
||||
ALTER TABLE mai2_item_login_bonus MODIFY COLUMN isCurrent tinyint(1) NOT NULL;
|
||||
ALTER TABLE mai2_item_login_bonus MODIFY COLUMN isComplete tinyint(1) NOT NULL;
|
||||
|
||||
ALTER TABLE mai2_item_map MODIFY COLUMN mapId int(11) NOT NULL;
|
||||
ALTER TABLE mai2_item_map MODIFY COLUMN distance int(11) NOT NULL;
|
||||
ALTER TABLE mai2_item_map MODIFY COLUMN isLock tinyint(1) NOT NULL;
|
||||
ALTER TABLE mai2_item_map MODIFY COLUMN isClear tinyint(1) NOT NULL;
|
||||
ALTER TABLE mai2_item_map MODIFY COLUMN isComplete tinyint(1) NOT NULL;
|
||||
|
||||
ALTER TABLE mai2_item_print_detail MODIFY COLUMN printDate timestamp DEFAULT current_timestamp() NOT NULL;
|
||||
ALTER TABLE mai2_item_print_detail MODIFY COLUMN serialId varchar(20) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL;
|
||||
ALTER TABLE mai2_item_print_detail MODIFY COLUMN placeId int(11) NOT NULL;
|
||||
ALTER TABLE mai2_item_print_detail MODIFY COLUMN clientId varchar(11) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL;
|
||||
ALTER TABLE mai2_item_print_detail MODIFY COLUMN printerSerialId varchar(20) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL;
|
3
core/data/schema/versions/SDEZ_5_upgrade.sql
Normal file
3
core/data/schema/versions/SDEZ_5_upgrade.sql
Normal file
@ -0,0 +1,3 @@
|
||||
ALTER TABLE mai2_item_card
|
||||
CHANGE COLUMN startDate startDate TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
CHANGE COLUMN endDate endDate TIMESTAMP NOT NULL;
|
1
core/data/schema/versions/SDEZ_6_rollback.sql
Normal file
1
core/data/schema/versions/SDEZ_6_rollback.sql
Normal file
@ -0,0 +1 @@
|
||||
DROP TABLE aime.mai2_profile_consec_logins;
|
62
core/data/schema/versions/SDEZ_6_upgrade.sql
Normal file
62
core/data/schema/versions/SDEZ_6_upgrade.sql
Normal file
@ -0,0 +1,62 @@
|
||||
UPDATE mai2_static_event SET version = version + 13 WHERE version < 1000;
|
||||
|
||||
UPDATE mai2_static_music SET version = version + 13 WHERE version < 1000;
|
||||
|
||||
UPDATE mai2_static_ticket SET version = version + 13 WHERE version < 1000;
|
||||
|
||||
UPDATE mai2_static_cards SET version = version + 13 WHERE version < 1000;
|
||||
|
||||
UPDATE mai2_profile_detail SET version = version + 13 WHERE version < 1000;
|
||||
|
||||
UPDATE mai2_profile_extend SET version = version + 13 WHERE version < 1000;
|
||||
|
||||
UPDATE mai2_profile_option SET version = version + 13 WHERE version < 1000;
|
||||
|
||||
UPDATE mai2_profile_ghost SET version = version + 13 WHERE version < 1000;
|
||||
|
||||
UPDATE mai2_profile_rating SET version = version + 13 WHERE version < 1000;
|
||||
|
||||
ALTER TABLE mai2_item_character ADD point int(11) NULL;
|
||||
|
||||
ALTER TABLE mai2_item_card MODIFY COLUMN cardId int(11) NULL;
|
||||
ALTER TABLE mai2_item_card MODIFY COLUMN cardTypeId int(11) NULL;
|
||||
ALTER TABLE mai2_item_card MODIFY COLUMN charaId int(11) NULL;
|
||||
ALTER TABLE mai2_item_card MODIFY COLUMN mapId int(11) NULL;
|
||||
|
||||
ALTER TABLE mai2_item_character MODIFY COLUMN characterId int(11) NULL;
|
||||
ALTER TABLE mai2_item_character MODIFY COLUMN level int(11) NULL;
|
||||
ALTER TABLE mai2_item_character MODIFY COLUMN awakening int(11) NULL;
|
||||
ALTER TABLE mai2_item_character MODIFY COLUMN useCount int(11) NULL;
|
||||
|
||||
ALTER TABLE mai2_item_charge MODIFY COLUMN chargeId int(11) NULL;
|
||||
ALTER TABLE mai2_item_charge MODIFY COLUMN stock int(11) NULL;
|
||||
|
||||
ALTER TABLE mai2_item_favorite MODIFY COLUMN itemKind int(11) NULL;
|
||||
|
||||
ALTER TABLE mai2_item_friend_season_ranking MODIFY COLUMN seasonId int(11) NULL;
|
||||
ALTER TABLE mai2_item_friend_season_ranking MODIFY COLUMN point int(11) NULL;
|
||||
ALTER TABLE mai2_item_friend_season_ranking MODIFY COLUMN `rank` int(11) NULL;
|
||||
ALTER TABLE mai2_item_friend_season_ranking MODIFY COLUMN rewardGet tinyint(1) NULL;
|
||||
ALTER TABLE mai2_item_friend_season_ranking MODIFY COLUMN userName varchar(8) NULL;
|
||||
|
||||
ALTER TABLE mai2_item_item MODIFY COLUMN itemId int(11) NULL;
|
||||
ALTER TABLE mai2_item_item MODIFY COLUMN itemKind int(11) NULL;
|
||||
ALTER TABLE mai2_item_item MODIFY COLUMN stock int(11) NULL;
|
||||
ALTER TABLE mai2_item_item MODIFY COLUMN isValid tinyint(1) NULL;
|
||||
|
||||
ALTER TABLE mai2_item_login_bonus MODIFY COLUMN bonusId int(11) NULL;
|
||||
ALTER TABLE mai2_item_login_bonus MODIFY COLUMN point int(11) NULL;
|
||||
ALTER TABLE mai2_item_login_bonus MODIFY COLUMN isCurrent tinyint(1) NULL;
|
||||
ALTER TABLE mai2_item_login_bonus MODIFY COLUMN isComplete tinyint(1) NULL;
|
||||
|
||||
ALTER TABLE mai2_item_map MODIFY COLUMN mapId int(11) NULL;
|
||||
ALTER TABLE mai2_item_map MODIFY COLUMN distance int(11) NULL;
|
||||
ALTER TABLE mai2_item_map MODIFY COLUMN isLock tinyint(1) NULL;
|
||||
ALTER TABLE mai2_item_map MODIFY COLUMN isClear tinyint(1) NULL;
|
||||
ALTER TABLE mai2_item_map MODIFY COLUMN isComplete tinyint(1) NULL;
|
||||
|
||||
ALTER TABLE mai2_item_print_detail MODIFY COLUMN printDate timestamp DEFAULT current_timestamp() NULL;
|
||||
ALTER TABLE mai2_item_print_detail MODIFY COLUMN serialId varchar(20) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL;
|
||||
ALTER TABLE mai2_item_print_detail MODIFY COLUMN placeId int(11) NULL;
|
||||
ALTER TABLE mai2_item_print_detail MODIFY COLUMN clientId varchar(11) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL;
|
||||
ALTER TABLE mai2_item_print_detail MODIFY COLUMN printerSerialId varchar(20) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL;
|
9
core/data/schema/versions/SDEZ_7_upgrade.sql
Normal file
9
core/data/schema/versions/SDEZ_7_upgrade.sql
Normal file
@ -0,0 +1,9 @@
|
||||
CREATE TABLE `mai2_profile_consec_logins` (
|
||||
`id` int(11) NOT NULL AUTO_INCREMENT,
|
||||
`user` int(11) NOT NULL,
|
||||
`version` int(11) NOT NULL,
|
||||
`logins` int(11) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `mai2_profile_consec_logins_uk` (`user`,`version`),
|
||||
CONSTRAINT `mai2_profile_consec_logins_ibfk_1` FOREIGN KEY (`user`) REFERENCES `aime_user` (`id`) ON DELETE CASCADE ON UPDATE CASCADE
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;
|
@ -130,7 +130,7 @@ class FE_Gate(FE_Base):
|
||||
if b"e" in request.args:
|
||||
try:
|
||||
err = int(request.args[b"e"][0].decode())
|
||||
except:
|
||||
except Exception:
|
||||
err = 0
|
||||
|
||||
else:
|
||||
@ -182,7 +182,7 @@ class FE_Gate(FE_Base):
|
||||
access_code: str = request.args[b"access_code"][0].decode()
|
||||
username: str = request.args[b"username"][0]
|
||||
email: str = request.args[b"email"][0].decode()
|
||||
passwd: str = request.args[b"passwd"][0]
|
||||
passwd: bytes = request.args[b"passwd"][0]
|
||||
|
||||
uid = self.data.card.get_user_id_from_card(access_code)
|
||||
if uid is None:
|
||||
@ -197,7 +197,7 @@ class FE_Gate(FE_Base):
|
||||
if result is None:
|
||||
return redirectTo(b"/gate?e=3", request)
|
||||
|
||||
if not self.data.user.check_password(uid, passwd.encode()):
|
||||
if not self.data.user.check_password(uid, passwd):
|
||||
return redirectTo(b"/gate", request)
|
||||
|
||||
return redirectTo(b"/user", request)
|
||||
@ -228,8 +228,21 @@ class FE_User(FE_Base):
|
||||
if usr_sesh.userId == 0:
|
||||
return redirectTo(b"/gate", request)
|
||||
|
||||
cards = self.data.card.get_user_cards(usr_sesh.userId)
|
||||
user = self.data.user.get_user(usr_sesh.userId)
|
||||
card_data = []
|
||||
for c in cards:
|
||||
if c['is_locked']:
|
||||
status = 'Locked'
|
||||
elif c['is_banned']:
|
||||
status = 'Banned'
|
||||
else:
|
||||
status = 'Active'
|
||||
|
||||
card_data.append({'access_code': c['access_code'], 'status': status})
|
||||
|
||||
return template.render(
|
||||
title=f"{self.core_config.server.name} | Account", sesh=vars(usr_sesh)
|
||||
title=f"{self.core_config.server.name} | Account", sesh=vars(usr_sesh), cards=card_data, username=user['username']
|
||||
).encode("utf-16")
|
||||
|
||||
|
||||
|
@ -1,4 +1,31 @@
|
||||
{% extends "core/frontend/index.jinja" %}
|
||||
{% block content %}
|
||||
<h1>testing</h1>
|
||||
<h1>Management for {{ username }}</h1>
|
||||
<h2>Cards <button class="btn btn-success" data-bs-toggle="modal" data-bs-target="#card_add">Add</button></h2>
|
||||
<ul>
|
||||
{% for c in cards %}
|
||||
<li>{{ c.access_code }}: {{ c.status }} <button class="btn-danger btn">Delete</button></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
<div class="modal fade" id="card_add" tabindex="-1" aria-labelledby="card_add_label" aria-hidden="true">
|
||||
<div class="modal-dialog">
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<h1 class="modal-title fs-5" id="card_add_label">Add Card</h1>
|
||||
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
HOW TO:<br>
|
||||
Scan your card on any networked game and press the "View Access Code" button (varies by game) and enter the 20 digit code below.<br>
|
||||
!!FOR AMUSEIC CARDS: DO NOT ENTER THE CODE SHOWN ON THE BACK OF THE CARD ITSELF OR IT WILL NOT WORK!!
|
||||
<p /><label for="card_add_frm_access_code">Access Code: </label><input id="card_add_frm_access_code" maxlength="20" type="text" required>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<button type="button" class="btn btn-primary">Add</button>
|
||||
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Close</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% endblock content %}
|
@ -4,7 +4,7 @@
|
||||
<div style="background: #333; color: #f9f9f9; width: 80%; height: 50px; line-height: 50px; padding-left: 10px; float: left;">
|
||||
<a href=/><button class="btn btn-primary">Home</button></a>
|
||||
{% for game in game_list %}
|
||||
<a href=game/{{ game.url }}><button class="btn btn-success">{{ game.name }}</button></a>
|
||||
<a href=/game/{{ game.url }}><button class="btn btn-success">{{ game.name }}</button></a>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
|
@ -4,6 +4,7 @@ from logging.handlers import TimedRotatingFileHandler
|
||||
from twisted.web import resource
|
||||
from twisted.web.http import Request
|
||||
from datetime import datetime
|
||||
from Crypto.Cipher import Blowfish
|
||||
import pytz
|
||||
|
||||
from core import CoreConfig
|
||||
@ -33,8 +34,8 @@ class MuchaServlet:
|
||||
self.logger.addHandler(fileHandler)
|
||||
self.logger.addHandler(consoleHandler)
|
||||
|
||||
self.logger.setLevel(logging.INFO)
|
||||
coloredlogs.install(level=logging.INFO, logger=self.logger, fmt=log_fmt_str)
|
||||
self.logger.setLevel(cfg.mucha.loglevel)
|
||||
coloredlogs.install(level=cfg.mucha.loglevel, logger=self.logger, fmt=log_fmt_str)
|
||||
|
||||
all_titles = Utils.get_all_titles()
|
||||
|
||||
@ -56,17 +57,24 @@ class MuchaServlet:
|
||||
self.logger.error(
|
||||
f"Error processing mucha request {request.content.getvalue()}"
|
||||
)
|
||||
return b""
|
||||
return b"RESULTS=000"
|
||||
|
||||
req = MuchaAuthRequest(req_dict)
|
||||
self.logger.debug(f"Mucha request {vars(req)}")
|
||||
self.logger.info(f"Boardauth request from {client_ip} for {req.gameVer}")
|
||||
self.logger.debug(f"Mucha request {vars(req)}")
|
||||
|
||||
if req.gameCd not in self.mucha_registry:
|
||||
self.logger.warn(f"Unknown gameCd {req.gameCd}")
|
||||
return b""
|
||||
return b"RESULTS=000"
|
||||
|
||||
# TODO: Decrypt S/N
|
||||
b_key = b""
|
||||
for x in range(8):
|
||||
b_key += req.sendDate[(x - 1) & 7].encode()
|
||||
|
||||
cipher = Blowfish.new(b_key, Blowfish.MODE_ECB)
|
||||
sn_decrypt = cipher.decrypt(bytes.fromhex(req.serialNum))
|
||||
self.logger.debug(f"Decrypt SN to {sn_decrypt.hex()}")
|
||||
|
||||
resp = MuchaAuthResponse(
|
||||
f"{self.config.mucha.hostname}{':' + str(self.config.allnet.port) if self.config.server.is_develop else ''}"
|
||||
@ -84,22 +92,37 @@ class MuchaServlet:
|
||||
self.logger.error(
|
||||
f"Error processing mucha request {request.content.getvalue()}"
|
||||
)
|
||||
return b""
|
||||
return b"RESULTS=000"
|
||||
|
||||
req = MuchaUpdateRequest(req_dict)
|
||||
self.logger.debug(f"Mucha request {vars(req)}")
|
||||
self.logger.info(f"Updatecheck request from {client_ip} for {req.gameVer}")
|
||||
self.logger.debug(f"Mucha request {vars(req)}")
|
||||
|
||||
if req.gameCd not in self.mucha_registry:
|
||||
self.logger.warn(f"Unknown gameCd {req.gameCd}")
|
||||
return b""
|
||||
return b"RESULTS=000"
|
||||
|
||||
resp = MuchaUpdateResponseStub(req.gameVer)
|
||||
resp = MuchaUpdateResponse(req.gameVer, f"{self.config.mucha.hostname}{':' + str(self.config.allnet.port) if self.config.server.is_develop else ''}")
|
||||
|
||||
self.logger.debug(f"Mucha response {vars(resp)}")
|
||||
|
||||
return self.mucha_postprocess(vars(resp))
|
||||
|
||||
def handle_dlstate(self, request: Request, _: Dict) -> bytes:
|
||||
req_dict = self.mucha_preprocess(request.content.getvalue())
|
||||
client_ip = Utils.get_ip_addr(request)
|
||||
|
||||
if req_dict is None:
|
||||
self.logger.error(
|
||||
f"Error processing mucha request {request.content.getvalue()}"
|
||||
)
|
||||
return b""
|
||||
|
||||
req = MuchaDownloadStateRequest(req_dict)
|
||||
self.logger.info(f"DownloadState request from {client_ip} for {req.gameCd} -> {req.updateVer}")
|
||||
self.logger.debug(f"request {vars(req)}")
|
||||
return b"RESULTS=001"
|
||||
|
||||
def mucha_preprocess(self, data: bytes) -> Optional[Dict]:
|
||||
try:
|
||||
ret: Dict[str, Any] = {}
|
||||
@ -111,7 +134,7 @@ class MuchaServlet:
|
||||
|
||||
return ret
|
||||
|
||||
except:
|
||||
except Exception:
|
||||
self.logger.error(f"Error processing mucha request {data}")
|
||||
return None
|
||||
|
||||
@ -123,7 +146,7 @@ class MuchaServlet:
|
||||
|
||||
return urlencode.encode()
|
||||
|
||||
except:
|
||||
except Exception:
|
||||
self.logger.error("Error processing mucha response")
|
||||
return None
|
||||
|
||||
@ -203,21 +226,56 @@ class MuchaUpdateRequest:
|
||||
class MuchaUpdateResponse:
|
||||
def __init__(self, game_ver: str, mucha_url: str) -> None:
|
||||
self.RESULTS = "001"
|
||||
self.EXE_VER = game_ver
|
||||
|
||||
self.UPDATE_VER_1 = game_ver
|
||||
self.UPDATE_URL_1 = f"https://{mucha_url}/updUrl1/"
|
||||
self.UPDATE_SIZE_1 = "0"
|
||||
self.UPDATE_CRC_1 = "0000000000000000"
|
||||
self.CHECK_URL_1 = f"https://{mucha_url}/checkUrl/"
|
||||
self.EXE_VER_1 = game_ver
|
||||
self.UPDATE_URL_1 = f"http://{mucha_url}/updUrl1/"
|
||||
self.UPDATE_SIZE_1 = "20"
|
||||
|
||||
self.CHECK_CRC_1 = "0000000000000000"
|
||||
self.CHECK_URL_1 = f"http://{mucha_url}/checkUrl/"
|
||||
self.CHECK_SIZE_1 = "20"
|
||||
|
||||
self.INFO_SIZE_1 = "0"
|
||||
self.COM_SIZE_1 = "0"
|
||||
self.COM_TIME_1 = "0"
|
||||
self.LAN_INFO_SIZE_1 = "0"
|
||||
|
||||
self.USER_ID = ""
|
||||
self.PASSWORD = ""
|
||||
|
||||
"""
|
||||
RESULTS
|
||||
EXE_VER
|
||||
|
||||
UPDATE_VER_%d
|
||||
UPDATE_URL_%d
|
||||
UPDATE_SIZE_%d
|
||||
|
||||
CHECK_CRC_%d
|
||||
CHECK_URL_%d
|
||||
CHECK_SIZE_%d
|
||||
|
||||
INFO_SIZE_1
|
||||
COM_SIZE_1
|
||||
COM_TIME_1
|
||||
LAN_INFO_SIZE_1
|
||||
|
||||
USER_ID
|
||||
PASSWORD
|
||||
"""
|
||||
class MuchaUpdateResponseStub:
|
||||
def __init__(self, game_ver: str) -> None:
|
||||
self.RESULTS = "001"
|
||||
self.UPDATE_VER_1 = game_ver
|
||||
|
||||
class MuchaDownloadStateRequest:
|
||||
def __init__(self, request: Dict) -> None:
|
||||
self.gameCd = request.get("gameCd", "")
|
||||
self.updateVer = request.get("updateVer", "")
|
||||
self.serialNum = request.get("serialNum", "")
|
||||
self.fileSize = request.get("fileSize", "")
|
||||
self.compFileSize = request.get("compFileSize", "")
|
||||
self.boardId = request.get("boardId", "")
|
||||
self.placeId = request.get("placeId", "")
|
||||
self.storeRouterIp = request.get("storeRouterIp", "")
|
||||
|
@ -84,7 +84,7 @@ class TitleServlet:
|
||||
request.setResponseCode(405)
|
||||
return b""
|
||||
|
||||
return index.render_GET(request, endpoints["version"], endpoints["endpoint"])
|
||||
return index.render_GET(request, int(endpoints["version"]), endpoints["endpoint"])
|
||||
|
||||
def render_POST(self, request: Request, endpoints: dict) -> bytes:
|
||||
code = endpoints["game"]
|
||||
|
@ -85,4 +85,7 @@ if __name__ == "__main__":
|
||||
elif args.action == "cleanup":
|
||||
data.delete_hanging_users()
|
||||
|
||||
elif args.action == "version":
|
||||
data.show_versions()
|
||||
|
||||
data.logger.info("Done")
|
||||
|
57
docker-compose.yml
Normal file
57
docker-compose.yml
Normal file
@ -0,0 +1,57 @@
|
||||
version: "3.9"
|
||||
services:
|
||||
app:
|
||||
hostname: ma.app
|
||||
build: .
|
||||
volumes:
|
||||
- ./aime:/app/aime
|
||||
|
||||
environment:
|
||||
CFG_DEV: 1
|
||||
CFG_CORE_SERVER_HOSTNAME: 0.0.0.0
|
||||
CFG_CORE_DATABASE_HOST: ma.db
|
||||
CFG_CORE_MEMCACHED_HOSTNAME: ma.memcached
|
||||
CFG_CORE_AIMEDB_KEY: keyhere
|
||||
CFG_CHUNI_SERVER_LOGLEVEL: debug
|
||||
|
||||
ports:
|
||||
- "80:80"
|
||||
- "8443:8443"
|
||||
- "22345:22345"
|
||||
|
||||
- "8080:8080"
|
||||
- "8090:8090"
|
||||
|
||||
depends_on:
|
||||
db:
|
||||
condition: service_healthy
|
||||
|
||||
db:
|
||||
hostname: ma.db
|
||||
image: mysql:8.0.31-debian
|
||||
environment:
|
||||
MYSQL_DATABASE: aime
|
||||
MYSQL_USER: aime
|
||||
MYSQL_PASSWORD: aime
|
||||
MYSQL_ROOT_PASSWORD: AimeRootPassword
|
||||
healthcheck:
|
||||
test: ["CMD", "mysqladmin" ,"ping", "-h", "localhost"]
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
|
||||
memcached:
|
||||
hostname: ma.memcached
|
||||
image: memcached:1.6.17-bullseye
|
||||
|
||||
phpmyadmin:
|
||||
hostname: ma.phpmyadmin
|
||||
image: phpmyadmin:latest
|
||||
environment:
|
||||
PMA_HOSTS: ma.db
|
||||
PMA_USER: root
|
||||
PMA_PASSWORD: AimeRootPassword
|
||||
APACHE_PORT: 8080
|
||||
ports:
|
||||
- "8080:8080"
|
||||
|
@ -5,6 +5,7 @@
|
||||
- `allow_unregistered_serials`: Allows games that do not have registered keychips to connect and authenticate. Disable to restrict who can connect to your server. Recomended to disable for production setups. Default `True`
|
||||
- `name`: Name for the server, used by some games in their default MOTDs. Default `ARTEMiS`
|
||||
- `is_develop`: Flags that the server is a development instance without a proxy standing in front of it. Setting to `False` tells the server not to listen for SSL, because the proxy should be handling all SSL-related things, among other things. Default `True`
|
||||
- `threading`: Flags that `reactor.run` should be called via the `Thread` standard library. May provide a speed boost, but removes the ability to kill the server via `Ctrl + C`. Default: `False`
|
||||
- `log_dir`: Directory to store logs. Server MUST have read and write permissions to this directory or you will have issues. Default `logs`
|
||||
## Title
|
||||
- `loglevel`: Logging level for the title server. Default `info`
|
||||
|
@ -9,42 +9,44 @@ using the megaime database. Clean installations always create the latest databas
|
||||
# Table of content
|
||||
|
||||
- [Supported Games](#supported-games)
|
||||
- [Chunithm](#chunithm)
|
||||
- [CHUNITHM](#chunithm)
|
||||
- [crossbeats REV.](#crossbeats-rev)
|
||||
- [maimai DX](#maimai-dx)
|
||||
- [O.N.G.E.K.I.](#o-n-g-e-k-i)
|
||||
- [Card Maker](#card-maker)
|
||||
- [WACCA](#wacca)
|
||||
- [Sword Art Online Arcade](#sao)
|
||||
|
||||
|
||||
# Supported Games
|
||||
|
||||
Games listed below have been tested and confirmed working.
|
||||
|
||||
## Chunithm
|
||||
## CHUNITHM
|
||||
|
||||
### SDBT
|
||||
|
||||
| Version ID | Version Name |
|
||||
|------------|--------------------|
|
||||
| 0 | Chunithm |
|
||||
| 1 | Chunithm+ |
|
||||
| 2 | Chunithm Air |
|
||||
| 3 | Chunithm Air + |
|
||||
| 4 | Chunithm Star |
|
||||
| 5 | Chunithm Star + |
|
||||
| 6 | Chunithm Amazon |
|
||||
| 7 | Chunithm Amazon + |
|
||||
| 8 | Chunithm Crystal |
|
||||
| 9 | Chunithm Crystal + |
|
||||
| 10 | Chunithm Paradise |
|
||||
|------------|-----------------------|
|
||||
| 0 | CHUNITHM |
|
||||
| 1 | CHUNITHM PLUS |
|
||||
| 2 | CHUNITHM AIR |
|
||||
| 3 | CHUNITHM AIR PLUS |
|
||||
| 4 | CHUNITHM STAR |
|
||||
| 5 | CHUNITHM STAR PLUS |
|
||||
| 6 | CHUNITHM AMAZON |
|
||||
| 7 | CHUNITHM AMAZON PLUS |
|
||||
| 8 | CHUNITHM CRYSTAL |
|
||||
| 9 | CHUNITHM CRYSTAL PLUS |
|
||||
| 10 | CHUNITHM PARADISE |
|
||||
|
||||
### SDHD/SDBT
|
||||
|
||||
| Version ID | Version Name |
|
||||
|------------|-----------------|
|
||||
| 11 | Chunithm New!! |
|
||||
| 12 | Chunithm New!!+ |
|
||||
|------------|---------------------|
|
||||
| 11 | CHUNITHM NEW!! |
|
||||
| 12 | CHUNITHM NEW PLUS!! |
|
||||
| 13 | CHUNITHM SUN |
|
||||
|
||||
|
||||
### Importer
|
||||
@ -60,13 +62,33 @@ The importer for Chunithm will import: Events, Music, Charge Items and Avatar Ac
|
||||
### Database upgrade
|
||||
|
||||
Always make sure your database (tables) are up-to-date, to do so go to the `core/data/schema/versions` folder and see
|
||||
which version is the latest, f.e. `SDBT_3_upgrade.sql`. In order to upgrade to version 3 in this case you need to
|
||||
which version is the latest, f.e. `SDBT_4_upgrade.sql`. In order to upgrade to version 4 in this case you need to
|
||||
perform all previous updates as well:
|
||||
|
||||
```shell
|
||||
python dbutils.py --game SDBT upgrade
|
||||
```
|
||||
|
||||
### Online Battle
|
||||
|
||||
**Only matchmaking (with your imaginary friends) is supported! Online Battle does not (yet?) work!**
|
||||
|
||||
The first person to start the Online Battle (now called host) will create a "matching room" with a given `roomId`, after that max 3 other people can join the created room.
|
||||
Non used slots during the matchmaking will be filled with CPUs after the timer runs out.
|
||||
As soon as a new member will join the room the timer will jump back to 60 secs again.
|
||||
Sending those 4 messages to all other users is also working properly.
|
||||
In order to use the Online Battle every user needs the same ICF, same rom version and same data version!
|
||||
If a room is full a new room will be created if another user starts an Online Battle.
|
||||
After a failed Online Battle the room will be deleted. The host is used for the timer countdown, so if the connection failes to the host the timer will stop and could create a "frozen" state.
|
||||
|
||||
#### Information/Problems:
|
||||
|
||||
- Online Battle uses UDP hole punching and opens port 50201?
|
||||
- `reflectorUri` seems related to that?
|
||||
- Timer countdown should be handled globally and not by one user
|
||||
- Game can freeze or can crash if someone (especially the host) leaves the matchmaking
|
||||
|
||||
|
||||
## crossbeats REV.
|
||||
|
||||
### SDCA
|
||||
@ -105,28 +127,50 @@ Config file is located in `config/cxb.yaml`.
|
||||
|
||||
### SDEZ
|
||||
|
||||
| Version ID | Version Name |
|
||||
|------------|-------------------------|
|
||||
| 0 | maimai DX |
|
||||
| 1 | maimai DX PLUS |
|
||||
| 2 | maimai DX Splash |
|
||||
| 3 | maimai DX Splash PLUS |
|
||||
| 4 | maimai DX Universe |
|
||||
| 5 | maimai DX Universe PLUS |
|
||||
| 6 | maimai DX Festival |
|
||||
| Game Code | Version ID | Version Name |
|
||||
|-----------|------------|-------------------------|
|
||||
|
||||
|
||||
For versions pre-dx
|
||||
| Game Code | Version ID | Version Name |
|
||||
|-----------|------------|-------------------------|
|
||||
| SBXL | 0 | maimai |
|
||||
| SBXL | 1 | maimai PLUS |
|
||||
| SBZF | 2 | maimai GreeN |
|
||||
| SBZF | 3 | maimai GreeN PLUS |
|
||||
| SDBM | 4 | maimai ORANGE |
|
||||
| SDBM | 5 | maimai ORANGE PLUS |
|
||||
| SDCQ | 6 | maimai PiNK |
|
||||
| SDCQ | 7 | maimai PiNK PLUS |
|
||||
| SDDK | 8 | maimai MURASAKI |
|
||||
| SDDK | 9 | maimai MURASAKI PLUS |
|
||||
| SDDZ | 10 | maimai MILK |
|
||||
| SDDZ | 11 | maimai MILK PLUS |
|
||||
| SDEY | 12 | maimai FiNALE |
|
||||
| SDEZ | 13 | maimai DX |
|
||||
| SDEZ | 14 | maimai DX PLUS |
|
||||
| SDEZ | 15 | maimai DX Splash |
|
||||
| SDEZ | 16 | maimai DX Splash PLUS |
|
||||
| SDEZ | 17 | maimai DX Universe |
|
||||
| SDEZ | 18 | maimai DX Universe PLUS |
|
||||
| SDEZ | 19 | maimai DX Festival |
|
||||
|
||||
### Importer
|
||||
|
||||
In order to use the importer locate your game installation folder and execute:
|
||||
|
||||
DX:
|
||||
```shell
|
||||
python read.py --series SDEZ --version <version ID> --binfolder /path/to/game/folder --optfolder /path/to/game/option/folder
|
||||
python read.py --series <Game Code> --version <Version ID> --binfolder /path/to/StreamingAssets --optfolder /path/to/game/option/folder
|
||||
```
|
||||
Pre-DX:
|
||||
```shell
|
||||
python read.py --series <Game Code> --version <Version ID> --binfolder /path/to/data --optfolder /path/to/patch/data
|
||||
```
|
||||
|
||||
The importer for maimai DX will import Events, Music and Tickets.
|
||||
|
||||
**NOTE: It is required to use the importer because the game will
|
||||
crash without Events!**
|
||||
The importer for maimai Pre-DX will import Events and Music. Not all games will have patch data. Milk - Finale have file encryption, and need an AES key. That key is not provided by the developers. For games that do use encryption, provide the key, as a hex string, with the `--extra` flag. Ex `--extra 00112233445566778899AABBCCDDEEFF`
|
||||
|
||||
**Important: It is required to use the importer because some games may not function properly or even crash without Events!**
|
||||
|
||||
### Database upgrade
|
||||
|
||||
@ -135,6 +179,7 @@ Always make sure your database (tables) are up-to-date, to do so go to the `core
|
||||
```shell
|
||||
python dbutils.py --game SDEZ upgrade
|
||||
```
|
||||
Pre-Dx uses the same database as DX, so only upgrade using the SDEZ game code!
|
||||
|
||||
## Hatsune Miku Project Diva
|
||||
|
||||
@ -185,12 +230,12 @@ python dbutils.py --game SBZV upgrade
|
||||
|------------|----------------------------|
|
||||
| 0 | O.N.G.E.K.I. |
|
||||
| 1 | O.N.G.E.K.I. + |
|
||||
| 2 | O.N.G.E.K.I. Summer |
|
||||
| 3 | O.N.G.E.K.I. Summer + |
|
||||
| 4 | O.N.G.E.K.I. Red |
|
||||
| 5 | O.N.G.E.K.I. Red + |
|
||||
| 6 | O.N.G.E.K.I. Bright |
|
||||
| 7 | O.N.G.E.K.I. Bright Memory |
|
||||
| 2 | O.N.G.E.K.I. SUMMER |
|
||||
| 3 | O.N.G.E.K.I. SUMMER + |
|
||||
| 4 | O.N.G.E.K.I. R.E.D. |
|
||||
| 5 | O.N.G.E.K.I. R.E.D. + |
|
||||
| 6 | O.N.G.E.K.I. bright |
|
||||
| 7 | O.N.G.E.K.I. bright MEMORY |
|
||||
|
||||
|
||||
### Importer
|
||||
@ -231,21 +276,21 @@ python dbutils.py --game SDDT upgrade
|
||||
|
||||
| Version ID | Version Name |
|
||||
|------------|-----------------|
|
||||
| 0 | Card Maker 1.34 |
|
||||
| 0 | Card Maker 1.30 |
|
||||
| 1 | Card Maker 1.35 |
|
||||
|
||||
|
||||
### Support status
|
||||
|
||||
* Card Maker 1.34:
|
||||
* Chunithm New!!: Yes
|
||||
* maimai DX Universe: Yes
|
||||
* O.N.G.E.K.I. Bright: Yes
|
||||
* Card Maker 1.30:
|
||||
* CHUNITHM NEW!!: Yes
|
||||
* maimai DX UNiVERSE: Yes
|
||||
* O.N.G.E.K.I. bright: Yes
|
||||
|
||||
* Card Maker 1.35:
|
||||
* Chunithm New!!+: Yes
|
||||
* maimai DX Universe PLUS: Yes
|
||||
* O.N.G.E.K.I. Bright Memory: Yes
|
||||
* CHUNITHM SUN: Yes (NEW PLUS!! up to A032)
|
||||
* maimai DX FESTiVAL: Yes (up to A035) (UNiVERSE PLUS up to A031)
|
||||
* O.N.G.E.K.I. bright MEMORY: Yes
|
||||
|
||||
|
||||
### Importer
|
||||
@ -263,19 +308,54 @@ python read.py --series SDED --version <version ID> --binfolder titles/cm/cm_dat
|
||||
python read.py --series SDDT --version <version ID> --binfolder /path/to/game/folder --optfolder /path/to/game/option/folder
|
||||
```
|
||||
|
||||
Also make sure to import all maimai and Chunithm data as well:
|
||||
Also make sure to import all maimai DX and CHUNITHM data as well:
|
||||
|
||||
```shell
|
||||
python read.py --series SDED --version <version ID> --binfolder /path/to/cardmaker/CardMaker_Data
|
||||
```
|
||||
|
||||
The importer for Card Maker will import all required Gachas (Banners) and cards (for maimai/Chunithm) and the hardcoded
|
||||
The importer for Card Maker will import all required Gachas (Banners) and cards (for maimai DX/CHUNITHM) and the hardcoded
|
||||
Cards for each Gacha (O.N.G.E.K.I. only).
|
||||
|
||||
**NOTE: Without executing the importer Card Maker WILL NOT work!**
|
||||
|
||||
|
||||
### O.N.G.E.K.I. Gachas
|
||||
### Config setup
|
||||
|
||||
Make sure to update your `config/cardmaker.yaml` with the correct version for each game. To get the current version required to run a specific game, open every opt (Axxx) folder descending until you find all three folders:
|
||||
|
||||
- `MU3`: O.N.G.E.K.I.
|
||||
- `MAI`: maimai DX
|
||||
- `CHU`: CHUNITHM
|
||||
|
||||
Inside each folder is a `DataConfig.xml` file, for example:
|
||||
|
||||
`MU3/DataConfig.xml`:
|
||||
```xml
|
||||
<cardMakerVersion>
|
||||
<major>1</major>
|
||||
<minor>35</minor>
|
||||
<release>3</release>
|
||||
</cardMakerVersion>
|
||||
```
|
||||
|
||||
Now update your `config/cardmaker.yaml` with the correct version number, for example:
|
||||
|
||||
```yaml
|
||||
version:
|
||||
1: # Card Maker 1.35
|
||||
ongeki: 1.35.03
|
||||
```
|
||||
|
||||
For now you also need to update your `config/ongeki.yaml` with the correct version number, for example:
|
||||
|
||||
```yaml
|
||||
version:
|
||||
7: # O.N.G.E.K.I. bright MEMORY
|
||||
card_maker: 1.35.03
|
||||
```
|
||||
|
||||
### O.N.G.E.K.I.
|
||||
|
||||
Gacha "無料ガチャ" can only pull from the free cards with the following probabilities: 94%: R, 5% SR and 1% chance of
|
||||
getting an SSR card
|
||||
@ -288,20 +368,24 @@ and 3% chance of getting an SSR card
|
||||
All other (limited) gachas can pull from every card added to ongeki_static_cards but with the promoted cards
|
||||
(click on the green button under the banner) having a 10 times higher chance to get pulled
|
||||
|
||||
### Chunithm Gachas
|
||||
### CHUNITHM
|
||||
|
||||
All cards in Chunithm (basically just the characters) have the same rarity to it just pulls randomly from all cards
|
||||
All cards in CHUNITHM (basically just the characters) have the same rarity to it just pulls randomly from all cards
|
||||
from a given gacha but made sure you cannot pull the same card twice in the same 5 times gacha roll.
|
||||
|
||||
### maimai DX
|
||||
|
||||
Printed maimai DX cards: Freedom (`cardTypeId=6`) or Gold Pass (`cardTypeId=4`) can now be selected during the login process. You can only have ONE Freedom and ONE Gold Pass active at a given time. The cards will expire after 15 days.
|
||||
|
||||
Thanks GetzeAvenue for the `selectedCardList` rarity hint!
|
||||
|
||||
### Notes
|
||||
|
||||
Card Maker 1.34 will only load an O.N.G.E.K.I. Bright profile (1.30). Card Maker 1.35 will only load an O.N.G.E.K.I.
|
||||
Card Maker 1.30-1.34 will only load an O.N.G.E.K.I. Bright profile (1.30). Card Maker 1.35+ will only load an O.N.G.E.K.I.
|
||||
Bright Memory profile (1.35).
|
||||
The gachas inside the `ongeki.yaml` will make sure only the right gacha ids for the right CM version will be loaded.
|
||||
The gachas inside the `config/ongeki.yaml` will make sure only the right gacha ids for the right CM version will be loaded.
|
||||
Gacha IDs up to 1140 will be loaded for CM 1.34 and all gachas will be loaded for CM 1.35.
|
||||
|
||||
**NOTE: There is currently no way to load/use the (printed) maimai DX cards!**
|
||||
|
||||
## WACCA
|
||||
|
||||
### SDFE
|
||||
@ -344,3 +428,89 @@ Always make sure your database (tables) are up-to-date, to do so go to the `core
|
||||
```shell
|
||||
python dbutils.py --game SDFE upgrade
|
||||
```
|
||||
|
||||
### VIP Rewards
|
||||
Below is a list of VIP rewards. Currently, VIP is not implemented, and thus these are not obtainable. These 23 rewards were distributed once per month for VIP users on the real network.
|
||||
|
||||
Plates:
|
||||
211004 リッチ
|
||||
211018 特盛えりざべす
|
||||
211025 イースター
|
||||
211026 特盛りりぃ
|
||||
311004 ファンシー
|
||||
311005 インカンテーション
|
||||
311014 夜明け
|
||||
311015 ネイビー
|
||||
311016 特盛るーん
|
||||
|
||||
Ring Colors:
|
||||
203002 Gold Rushイエロー
|
||||
203009 トロピカル
|
||||
303005 ネイチャー
|
||||
|
||||
Icons:
|
||||
202020 どらみんぐ
|
||||
202063 ユニコーン
|
||||
202086 ゴリラ
|
||||
302014 ローズ
|
||||
302015 ファラオ
|
||||
302045 肉球
|
||||
302046 WACCA
|
||||
302047 WACCA Lily
|
||||
302048 WACCA Reverse
|
||||
|
||||
Note Sound Effect:
|
||||
205002 テニス
|
||||
205008 シャワー
|
||||
305003 タンバリンMk-Ⅱ
|
||||
|
||||
## SAO
|
||||
|
||||
### SDEW
|
||||
|
||||
| Version ID | Version Name |
|
||||
|------------|---------------|
|
||||
| 0 | SAO |
|
||||
|
||||
|
||||
### Importer
|
||||
|
||||
In order to use the importer locate your game installation folder and execute:
|
||||
|
||||
```shell
|
||||
python read.py --series SDEW --version <version ID> --binfolder /path/to/game/extractedassets
|
||||
```
|
||||
|
||||
The importer for SAO will import all items, heroes, support skills and titles data.
|
||||
|
||||
### Config
|
||||
|
||||
Config file is located in `config/sao.yaml`.
|
||||
|
||||
| Option | Info |
|
||||
|--------------------|-----------------------------------------------------------------------------|
|
||||
| `hostname` | Changes the server listening address for Mucha |
|
||||
| `port` | Changes the listing port |
|
||||
| `auto_register` | Allows the game to handle the automatic registration of new cards |
|
||||
|
||||
|
||||
### Database upgrade
|
||||
|
||||
Always make sure your database (tables) are up-to-date, to do so go to the `core/data/schema/versions` folder and see which version is the latest, f.e. `SDEW_1_upgrade.sql`. In order to upgrade to version 3 in this case you need to perform all previous updates as well:
|
||||
|
||||
```shell
|
||||
python dbutils.py --game SDEW upgrade
|
||||
```
|
||||
|
||||
### Notes
|
||||
- Defrag Match will crash at loading
|
||||
- Co-Op Online is not supported
|
||||
- Shop is not functionnal
|
||||
- Player title is currently static and cannot be changed in-game
|
||||
- QR Card Scanning currently only load a static hero
|
||||
|
||||
### Credits for SAO support:
|
||||
|
||||
- Midorica - Limited Network Support
|
||||
- Dniel97 - Helping with network base
|
||||
- tungnotpunk - Source
|
11
entrypoint.sh
Normal file
11
entrypoint.sh
Normal file
@ -0,0 +1,11 @@
|
||||
#!/bin/bash
|
||||
|
||||
if [[ -z "${CFG_DEV}" ]]; then
|
||||
echo Production mode
|
||||
python3 index.py
|
||||
else
|
||||
echo Development mode
|
||||
python3 dbutils.py create
|
||||
nodemon -w aime --legacy-watch index.py
|
||||
fi
|
||||
|
@ -1,3 +1,13 @@
|
||||
server:
|
||||
enable: True
|
||||
loglevel: "info"
|
||||
|
||||
version:
|
||||
0:
|
||||
ongeki: 1.30.01
|
||||
chuni: 2.00.00
|
||||
maimai: 1.20.00
|
||||
1:
|
||||
ongeki: 1.35.03
|
||||
chuni: 2.10.00
|
||||
maimai: 1.30.00
|
@ -15,6 +15,9 @@ version:
|
||||
12:
|
||||
rom: 2.05.00
|
||||
data: 2.05.00
|
||||
13:
|
||||
rom: 2.10.00
|
||||
data: 2.10.00
|
||||
|
||||
crypto:
|
||||
encrypted_only: False
|
@ -4,6 +4,7 @@ server:
|
||||
allow_unregistered_serials: True
|
||||
name: "ARTEMiS"
|
||||
is_develop: True
|
||||
threading: False
|
||||
log_dir: "logs"
|
||||
|
||||
title:
|
||||
|
@ -1,3 +1,14 @@
|
||||
server:
|
||||
enable: True
|
||||
loglevel: "info"
|
||||
|
||||
deliver:
|
||||
enable: False
|
||||
udbdl_enable: False
|
||||
content_folder: ""
|
||||
|
||||
uploads:
|
||||
photos: False
|
||||
photos_dir: ""
|
||||
movies: False
|
||||
movies_dir: ""
|
||||
|
@ -29,3 +29,9 @@ gachas:
|
||||
- 1156
|
||||
- 1163
|
||||
- 1164
|
||||
|
||||
version:
|
||||
6:
|
||||
card_maker: 1.30.01
|
||||
7:
|
||||
card_maker: 1.35.03
|
||||
|
@ -2,8 +2,11 @@ server:
|
||||
hostname: "localhost"
|
||||
enable: True
|
||||
loglevel: "info"
|
||||
port: 9000
|
||||
port_stun: 9001
|
||||
port_turn: 9002
|
||||
port_admission: 9003
|
||||
auto_register: True
|
||||
enable_matching: False
|
||||
stun_server_host: "stunserver.stunprotocol.org"
|
||||
stun_server_port: 3478
|
||||
|
||||
ports:
|
||||
game: 9000
|
||||
admission: 9001
|
||||
|
6
example_config/sao.yaml
Normal file
6
example_config/sao.yaml
Normal file
@ -0,0 +1,6 @@
|
||||
server:
|
||||
hostname: "localhost"
|
||||
enable: True
|
||||
loglevel: "info"
|
||||
port: 9000
|
||||
auto_register: True
|
47
index.py
47
index.py
@ -11,7 +11,7 @@ from twisted.web import server, resource
|
||||
from twisted.internet import reactor, endpoints
|
||||
from twisted.web.http import Request
|
||||
from routes import Mapper
|
||||
|
||||
from threading import Thread
|
||||
|
||||
class HttpDispatcher(resource.Resource):
|
||||
def __init__(self, cfg: CoreConfig, config_dir: str):
|
||||
@ -42,7 +42,7 @@ class HttpDispatcher(resource.Resource):
|
||||
conditions=dict(method=["POST"]),
|
||||
)
|
||||
|
||||
self.map_post.connect(
|
||||
self.map_get.connect(
|
||||
"allnet_ping",
|
||||
"/naomitest.html",
|
||||
controller="allnet",
|
||||
@ -63,6 +63,27 @@ class HttpDispatcher(resource.Resource):
|
||||
action="handle_dlorder",
|
||||
conditions=dict(method=["POST"]),
|
||||
)
|
||||
self.map_post.connect(
|
||||
"allnet_loaderstaterecorder",
|
||||
"/sys/servlet/LoaderStateRecorder",
|
||||
controller="allnet",
|
||||
action="handle_loaderstaterecorder",
|
||||
conditions=dict(method=["POST"]),
|
||||
)
|
||||
self.map_post.connect(
|
||||
"allnet_alive",
|
||||
"/sys/servlet/Alive",
|
||||
controller="allnet",
|
||||
action="handle_alive",
|
||||
conditions=dict(method=["POST"]),
|
||||
)
|
||||
self.map_get.connect(
|
||||
"allnet_alive",
|
||||
"/sys/servlet/Alive",
|
||||
controller="allnet",
|
||||
action="handle_alive",
|
||||
conditions=dict(method=["GET"]),
|
||||
)
|
||||
self.map_post.connect(
|
||||
"allnet_billing",
|
||||
"/request",
|
||||
@ -92,6 +113,13 @@ class HttpDispatcher(resource.Resource):
|
||||
action="handle_updatecheck",
|
||||
conditions=dict(method=["POST"]),
|
||||
)
|
||||
self.map_post.connect(
|
||||
"mucha_dlstate",
|
||||
"/mucha/downloadstate.do",
|
||||
controller="mucha",
|
||||
action="handle_dlstate",
|
||||
conditions=dict(method=["POST"]),
|
||||
)
|
||||
|
||||
self.map_get.connect(
|
||||
"title_get",
|
||||
@ -111,7 +139,6 @@ class HttpDispatcher(resource.Resource):
|
||||
)
|
||||
|
||||
def render_GET(self, request: Request) -> bytes:
|
||||
self.logger.debug(request.uri)
|
||||
test = self.map_get.match(request.uri.decode())
|
||||
client_ip = Utils.get_ip_addr(request)
|
||||
|
||||
@ -161,9 +188,16 @@ class HttpDispatcher(resource.Resource):
|
||||
|
||||
if type(ret) == str:
|
||||
return ret.encode()
|
||||
elif type(ret) == bytes:
|
||||
|
||||
elif type(ret) == bytes or type(ret) == tuple: # allow for bytes or tuple (data, response code) responses
|
||||
return ret
|
||||
|
||||
elif ret is None:
|
||||
self.logger.warn(f"None returned by controller for {request.uri.decode()} endpoint")
|
||||
return b""
|
||||
|
||||
else:
|
||||
self.logger.warn(f"Unknown data type returned by controller for {request.uri.decode()} endpoint")
|
||||
return b""
|
||||
|
||||
|
||||
@ -256,4 +290,7 @@ if __name__ == "__main__":
|
||||
server.Site(dispatcher)
|
||||
)
|
||||
|
||||
reactor.run() # type: ignore
|
||||
if cfg.server.threading:
|
||||
Thread(target=reactor.run, args=(False,)).start()
|
||||
else:
|
||||
reactor.run()
|
||||
|
26
readme.md
26
readme.md
@ -3,32 +3,36 @@ A network service emulator for games running SEGA'S ALL.NET service, and similar
|
||||
|
||||
# Supported games
|
||||
Games listed below have been tested and confirmed working. Only game versions older then the version currently active in arcades, or games versions that have not recieved a major update in over one year, are supported.
|
||||
+ Chunithm
|
||||
+ All versions up to New!! Plus
|
||||
|
||||
+ Crossbeats Rev
|
||||
+ CHUNITHM
|
||||
+ All versions up to SUN
|
||||
|
||||
+ crossbeats REV.
|
||||
+ All versions + omnimix
|
||||
|
||||
+ maimai DX
|
||||
+ All versions up to Festival
|
||||
+ All versions up to FESTiVAL
|
||||
|
||||
+ Hatsune Miku Arcade
|
||||
+ Hatsune Miku: Project DIVA Arcade
|
||||
+ All versions
|
||||
|
||||
+ Card Maker
|
||||
+ 1.34.xx
|
||||
+ 1.35.xx
|
||||
+ 1.30
|
||||
+ 1.35
|
||||
|
||||
+ Ongeki
|
||||
+ All versions up to Bright Memory
|
||||
+ O.N.G.E.K.I.
|
||||
+ All versions up to bright MEMORY
|
||||
|
||||
+ Wacca
|
||||
+ WACCA
|
||||
+ Lily R
|
||||
+ Reverse
|
||||
|
||||
+ Pokken
|
||||
+ POKKÉN TOURNAMENT
|
||||
+ Final Online
|
||||
|
||||
+ Sword Art Online Arcade (partial support)
|
||||
+ Final
|
||||
|
||||
## Requirements
|
||||
- python 3 (tested working with 3.9 and 3.10, other versions YMMV)
|
||||
- pip
|
||||
|
@ -16,3 +16,5 @@ Routes
|
||||
bcrypt
|
||||
jinja2
|
||||
protobuf
|
||||
autobahn
|
||||
pillow
|
||||
|
@ -7,4 +7,4 @@ index = ChuniServlet
|
||||
database = ChuniData
|
||||
reader = ChuniReader
|
||||
game_codes = [ChuniConstants.GAME_CODE, ChuniConstants.GAME_CODE_NEW]
|
||||
current_schema_version = 3
|
||||
current_schema_version = 4
|
||||
|
@ -4,7 +4,7 @@ from datetime import datetime, timedelta
|
||||
from time import strftime
|
||||
|
||||
import pytz
|
||||
from typing import Dict, Any
|
||||
from typing import Dict, Any, List
|
||||
|
||||
from core.config import CoreConfig
|
||||
from titles.chuni.const import ChuniConstants
|
||||
@ -44,13 +44,15 @@ class ChuniBase:
|
||||
# check if a user already has some pogress and if not add the
|
||||
# login bonus entry
|
||||
user_login_bonus = self.data.item.get_login_bonus(
|
||||
user_id, self.version, preset["id"]
|
||||
user_id, self.version, preset["presetId"]
|
||||
)
|
||||
if user_login_bonus is None:
|
||||
self.data.item.put_login_bonus(user_id, self.version, preset["id"])
|
||||
self.data.item.put_login_bonus(
|
||||
user_id, self.version, preset["presetId"]
|
||||
)
|
||||
# yeah i'm lazy
|
||||
user_login_bonus = self.data.item.get_login_bonus(
|
||||
user_id, self.version, preset["id"]
|
||||
user_id, self.version, preset["presetId"]
|
||||
)
|
||||
|
||||
# skip the login bonus entirely if its already finished
|
||||
@ -66,13 +68,13 @@ class ChuniBase:
|
||||
last_update_date = datetime.now()
|
||||
|
||||
all_login_boni = self.data.static.get_login_bonus(
|
||||
self.version, preset["id"]
|
||||
self.version, preset["presetId"]
|
||||
)
|
||||
|
||||
# skip the current bonus preset if no boni were found
|
||||
if all_login_boni is None or len(all_login_boni) < 1:
|
||||
self.logger.warn(
|
||||
f"No bonus entries found for bonus preset {preset['id']}"
|
||||
f"No bonus entries found for bonus preset {preset['presetId']}"
|
||||
)
|
||||
continue
|
||||
|
||||
@ -83,14 +85,14 @@ class ChuniBase:
|
||||
if bonus_count > max_needed_days:
|
||||
# assume that all login preset ids under 3000 needs to be
|
||||
# looped, like 30 and 40 are looped, 40 does not work?
|
||||
if preset["id"] < 3000:
|
||||
if preset["presetId"] < 3000:
|
||||
bonus_count = 1
|
||||
else:
|
||||
is_finished = True
|
||||
|
||||
# grab the item for the corresponding day
|
||||
login_item = self.data.static.get_login_bonus_by_required_days(
|
||||
self.version, preset["id"], bonus_count
|
||||
self.version, preset["presetId"], bonus_count
|
||||
)
|
||||
if login_item is not None:
|
||||
# now add the present to the database so the
|
||||
@ -108,7 +110,7 @@ class ChuniBase:
|
||||
self.data.item.put_login_bonus(
|
||||
user_id,
|
||||
self.version,
|
||||
preset["id"],
|
||||
preset["presetId"],
|
||||
bonusCount=bonus_count,
|
||||
lastUpdateDate=last_update_date,
|
||||
isWatched=False,
|
||||
@ -156,12 +158,18 @@ class ChuniBase:
|
||||
|
||||
event_list = []
|
||||
for evt_row in game_events:
|
||||
tmp = {}
|
||||
tmp["id"] = evt_row["eventId"]
|
||||
tmp["type"] = evt_row["type"]
|
||||
tmp["startDate"] = "2017-12-05 07:00:00.0"
|
||||
tmp["endDate"] = "2099-12-31 00:00:00.0"
|
||||
event_list.append(tmp)
|
||||
event_list.append(
|
||||
{
|
||||
"id": evt_row["eventId"],
|
||||
"type": evt_row["type"],
|
||||
# actually use the startDate from the import so it
|
||||
# properly shows all the events when new ones are imported
|
||||
"startDate": datetime.strftime(
|
||||
evt_row["startDate"], "%Y-%m-%d %H:%M:%S"
|
||||
),
|
||||
"endDate": "2099-12-31 00:00:00",
|
||||
}
|
||||
)
|
||||
|
||||
return {
|
||||
"type": data["type"],
|
||||
@ -228,29 +236,36 @@ class ChuniBase:
|
||||
def handle_get_user_character_api_request(self, data: Dict) -> Dict:
|
||||
characters = self.data.item.get_characters(data["userId"])
|
||||
if characters is None:
|
||||
return {}
|
||||
next_idx = -1
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": 0,
|
||||
"nextIndex": -1,
|
||||
"userCharacterList": [],
|
||||
}
|
||||
|
||||
characterList = []
|
||||
for x in range(int(data["nextIndex"]), len(characters)):
|
||||
character_list = []
|
||||
next_idx = int(data["nextIndex"])
|
||||
max_ct = int(data["maxCount"])
|
||||
|
||||
for x in range(next_idx, len(characters)):
|
||||
tmp = characters[x]._asdict()
|
||||
tmp.pop("user")
|
||||
tmp.pop("id")
|
||||
characterList.append(tmp)
|
||||
character_list.append(tmp)
|
||||
|
||||
if len(characterList) >= int(data["maxCount"]):
|
||||
if len(character_list) >= max_ct:
|
||||
break
|
||||
|
||||
if len(characterList) >= int(data["maxCount"]) and len(characters) > int(
|
||||
data["maxCount"]
|
||||
) + int(data["nextIndex"]):
|
||||
next_idx = int(data["maxCount"]) + int(data["nextIndex"]) + 1
|
||||
if len(characters) >= next_idx + max_ct:
|
||||
next_idx += max_ct
|
||||
else:
|
||||
next_idx = -1
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": len(characterList),
|
||||
"length": len(character_list),
|
||||
"nextIndex": next_idx,
|
||||
"userCharacterList": characterList,
|
||||
"userCharacterList": character_list,
|
||||
}
|
||||
|
||||
def handle_get_user_charge_api_request(self, data: Dict) -> Dict:
|
||||
@ -292,8 +307,8 @@ class ChuniBase:
|
||||
if len(user_course_list) >= max_ct:
|
||||
break
|
||||
|
||||
if len(user_course_list) >= max_ct:
|
||||
next_idx = next_idx + max_ct
|
||||
if len(user_course_list) >= next_idx + max_ct:
|
||||
next_idx += max_ct
|
||||
else:
|
||||
next_idx = -1
|
||||
|
||||
@ -347,12 +362,23 @@ class ChuniBase:
|
||||
}
|
||||
|
||||
def handle_get_user_favorite_item_api_request(self, data: Dict) -> Dict:
|
||||
user_fav_item_list = []
|
||||
|
||||
# still needs to be implemented on WebUI
|
||||
# 1: Music, 3: Character
|
||||
fav_list = self.data.item.get_all_favorites(
|
||||
data["userId"], self.version, fav_kind=int(data["kind"])
|
||||
)
|
||||
if fav_list is not None:
|
||||
for fav in fav_list:
|
||||
user_fav_item_list.append({"id": fav["favId"]})
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": 0,
|
||||
"length": len(user_fav_item_list),
|
||||
"kind": data["kind"],
|
||||
"nextIndex": -1,
|
||||
"userFavoriteItemList": [],
|
||||
"userFavoriteItemList": user_fav_item_list,
|
||||
}
|
||||
|
||||
def handle_get_user_favorite_music_api_request(self, data: Dict) -> Dict:
|
||||
@ -375,7 +401,7 @@ class ChuniBase:
|
||||
"userItemList": [],
|
||||
}
|
||||
|
||||
items: list[Dict[str, Any]] = []
|
||||
items: List[Dict[str, Any]] = []
|
||||
for i in range(next_idx, len(user_item_list)):
|
||||
tmp = user_item_list[i]._asdict()
|
||||
tmp.pop("user")
|
||||
@ -387,13 +413,13 @@ class ChuniBase:
|
||||
xout = kind * 10000000000 + next_idx + len(items)
|
||||
|
||||
if len(items) < int(data["maxCount"]):
|
||||
nextIndex = 0
|
||||
next_idx = 0
|
||||
else:
|
||||
nextIndex = xout
|
||||
next_idx = xout
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"nextIndex": nextIndex,
|
||||
"nextIndex": next_idx,
|
||||
"itemKind": kind,
|
||||
"length": len(items),
|
||||
"userItemList": items,
|
||||
@ -452,6 +478,7 @@ class ChuniBase:
|
||||
"nextIndex": -1,
|
||||
"userMusicList": [], # 240
|
||||
}
|
||||
|
||||
song_list = []
|
||||
next_idx = int(data["nextIndex"])
|
||||
max_ct = int(data["maxCount"])
|
||||
@ -474,10 +501,10 @@ class ChuniBase:
|
||||
if len(song_list) >= max_ct:
|
||||
break
|
||||
|
||||
if len(song_list) >= max_ct:
|
||||
if len(song_list) >= next_idx + max_ct:
|
||||
next_idx += max_ct
|
||||
else:
|
||||
next_idx = 0
|
||||
next_idx = -1
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
@ -617,18 +644,21 @@ class ChuniBase:
|
||||
upsert["userData"][0]["userName"] = self.read_wtf8(
|
||||
upsert["userData"][0]["userName"]
|
||||
)
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
self.data.profile.put_profile_data(
|
||||
user_id, self.version, upsert["userData"][0]
|
||||
)
|
||||
|
||||
if "userDataEx" in upsert:
|
||||
self.data.profile.put_profile_data_ex(
|
||||
user_id, self.version, upsert["userDataEx"][0]
|
||||
)
|
||||
|
||||
if "userGameOption" in upsert:
|
||||
self.data.profile.put_profile_option(user_id, upsert["userGameOption"][0])
|
||||
|
||||
if "userGameOptionEx" in upsert:
|
||||
self.data.profile.put_profile_option_ex(
|
||||
user_id, upsert["userGameOptionEx"][0]
|
||||
@ -672,6 +702,10 @@ class ChuniBase:
|
||||
|
||||
if "userPlaylogList" in upsert:
|
||||
for playlog in upsert["userPlaylogList"]:
|
||||
# convert the player names to utf-8
|
||||
playlog["playedUserName1"] = self.read_wtf8(playlog["playedUserName1"])
|
||||
playlog["playedUserName2"] = self.read_wtf8(playlog["playedUserName2"])
|
||||
playlog["playedUserName3"] = self.read_wtf8(playlog["playedUserName3"])
|
||||
self.data.score.put_playlog(user_id, playlog)
|
||||
|
||||
if "userTeamPoint" in upsert:
|
||||
|
@ -17,21 +17,23 @@ class ChuniConstants:
|
||||
VER_CHUNITHM_PARADISE = 10
|
||||
VER_CHUNITHM_NEW = 11
|
||||
VER_CHUNITHM_NEW_PLUS = 12
|
||||
VER_CHUNITHM_SUN = 13
|
||||
|
||||
VERSION_NAMES = [
|
||||
"Chunithm",
|
||||
"Chunithm+",
|
||||
"Chunithm Air",
|
||||
"Chunithm Air+",
|
||||
"Chunithm Star",
|
||||
"Chunithm Star+",
|
||||
"Chunithm Amazon",
|
||||
"Chunithm Amazon+",
|
||||
"Chunithm Crystal",
|
||||
"Chunithm Crystal+",
|
||||
"Chunithm Paradise",
|
||||
"Chunithm New!!",
|
||||
"Chunithm New!!+",
|
||||
"CHUNITHM",
|
||||
"CHUNITHM PLUS",
|
||||
"CHUNITHM AIR",
|
||||
"CHUNITHM AIR PLUS",
|
||||
"CHUNITHM STAR",
|
||||
"CHUNITHM STAR PLUS",
|
||||
"CHUNITHM AMAZON",
|
||||
"CHUNITHM AMAZON PLUS",
|
||||
"CHUNITHM CRYSTAL",
|
||||
"CHUNITHM CRYSTAL PLUS",
|
||||
"CHUNITHM PARADISE",
|
||||
"CHUNITHM NEW!!",
|
||||
"CHUNITHM NEW PLUS!!",
|
||||
"CHUNITHM SUN"
|
||||
]
|
||||
|
||||
@classmethod
|
||||
|
@ -29,6 +29,7 @@ from titles.chuni.crystalplus import ChuniCrystalPlus
|
||||
from titles.chuni.paradise import ChuniParadise
|
||||
from titles.chuni.new import ChuniNew
|
||||
from titles.chuni.newplus import ChuniNewPlus
|
||||
from titles.chuni.sun import ChuniSun
|
||||
|
||||
|
||||
class ChuniServlet:
|
||||
@ -55,6 +56,7 @@ class ChuniServlet:
|
||||
ChuniParadise,
|
||||
ChuniNew,
|
||||
ChuniNewPlus,
|
||||
ChuniSun,
|
||||
]
|
||||
|
||||
self.logger = logging.getLogger("chuni")
|
||||
@ -96,15 +98,18 @@ class ChuniServlet:
|
||||
]
|
||||
for method in method_list:
|
||||
method_fixed = inflection.camelize(method)[6:-7]
|
||||
# number of iterations was changed to 70 in SUN
|
||||
iter_count = 70 if version >= ChuniConstants.VER_CHUNITHM_SUN else 44
|
||||
hash = PBKDF2(
|
||||
method_fixed,
|
||||
bytes.fromhex(keys[2]),
|
||||
128,
|
||||
count=44,
|
||||
count=iter_count,
|
||||
hmac_hash_module=SHA1,
|
||||
)
|
||||
|
||||
self.hash_table[version][hash.hex()] = method_fixed
|
||||
hashed_name = hash.hex()[:32] # truncate unused bytes like the game does
|
||||
self.hash_table[version][hashed_name] = method_fixed
|
||||
|
||||
self.logger.debug(
|
||||
f"Hashed v{version} method {method_fixed} with {bytes.fromhex(keys[2])} to get {hash.hex()}"
|
||||
@ -145,30 +150,32 @@ class ChuniServlet:
|
||||
|
||||
if version < 105: # 1.0
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM
|
||||
elif version >= 105 and version < 110: # Plus
|
||||
elif version >= 105 and version < 110: # PLUS
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_PLUS
|
||||
elif version >= 110 and version < 115: # Air
|
||||
elif version >= 110 and version < 115: # AIR
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_AIR
|
||||
elif version >= 115 and version < 120: # Air Plus
|
||||
elif version >= 115 and version < 120: # AIR PLUS
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_AIR_PLUS
|
||||
elif version >= 120 and version < 125: # Star
|
||||
elif version >= 120 and version < 125: # STAR
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_STAR
|
||||
elif version >= 125 and version < 130: # Star Plus
|
||||
elif version >= 125 and version < 130: # STAR PLUS
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_STAR_PLUS
|
||||
elif version >= 130 and version < 135: # Amazon
|
||||
elif version >= 130 and version < 135: # AMAZON
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_AMAZON
|
||||
elif version >= 135 and version < 140: # Amazon Plus
|
||||
elif version >= 135 and version < 140: # AMAZON PLUS
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_AMAZON_PLUS
|
||||
elif version >= 140 and version < 145: # Crystal
|
||||
elif version >= 140 and version < 145: # CRYSTAL
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_CRYSTAL
|
||||
elif version >= 145 and version < 150: # Crystal Plus
|
||||
elif version >= 145 and version < 150: # CRYSTAL PLUS
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_CRYSTAL_PLUS
|
||||
elif version >= 150 and version < 200: # Paradise
|
||||
elif version >= 150 and version < 200: # PARADISE
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_PARADISE
|
||||
elif version >= 200 and version < 205: # New
|
||||
elif version >= 200 and version < 205: # NEW!!
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_NEW
|
||||
elif version >= 205 and version < 210: # New Plus
|
||||
elif version >= 205 and version < 210: # NEW PLUS!!
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_NEW_PLUS
|
||||
elif version >= 210: # SUN
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_SUN
|
||||
|
||||
if all(c in string.hexdigits for c in endpoint) and len(endpoint) == 32:
|
||||
# If we get a 32 character long hex string, it's a hash and we're
|
||||
|
@ -23,41 +23,44 @@ class ChuniNew(ChuniBase):
|
||||
self.version = ChuniConstants.VER_CHUNITHM_NEW
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
# use UTC time and convert it to JST time by adding +9
|
||||
# matching therefore starts one hour before and lasts for 8 hours
|
||||
match_start = datetime.strftime(
|
||||
datetime.now() - timedelta(hours=10), self.date_time_format
|
||||
datetime.utcnow() + timedelta(hours=8), self.date_time_format
|
||||
)
|
||||
match_end = datetime.strftime(
|
||||
datetime.now() + timedelta(hours=10), self.date_time_format
|
||||
datetime.utcnow() + timedelta(hours=16), self.date_time_format
|
||||
)
|
||||
reboot_start = datetime.strftime(
|
||||
datetime.now() - timedelta(hours=11), self.date_time_format
|
||||
datetime.utcnow() + timedelta(hours=6), self.date_time_format
|
||||
)
|
||||
reboot_end = datetime.strftime(
|
||||
datetime.now() - timedelta(hours=10), self.date_time_format
|
||||
datetime.utcnow() + timedelta(hours=7), self.date_time_format
|
||||
)
|
||||
return {
|
||||
"gameSetting": {
|
||||
"isMaintenance": "false",
|
||||
"isMaintenance": False,
|
||||
"requestInterval": 10,
|
||||
"rebootStartTime": reboot_start,
|
||||
"rebootEndTime": reboot_end,
|
||||
"isBackgroundDistribute": "false",
|
||||
"isBackgroundDistribute": False,
|
||||
"maxCountCharacter": 300,
|
||||
"maxCountItem": 300,
|
||||
"maxCountMusic": 300,
|
||||
"matchStartTime": match_start,
|
||||
"matchEndTime": match_end,
|
||||
"matchTimeLimit": 99,
|
||||
"matchTimeLimit": 60,
|
||||
"matchErrorLimit": 9999,
|
||||
"romVersion": self.game_cfg.version.version(self.version)["rom"],
|
||||
"dataVersion": self.game_cfg.version.version(self.version)["data"],
|
||||
"matchingUri": f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}/SDHD/200/ChuniServlet/",
|
||||
"matchingUriX": f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}/SDHD/200/ChuniServlet/",
|
||||
# might be really important for online battle to connect the cabs via UDP port 50201
|
||||
"udpHolePunchUri": f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}/SDHD/200/ChuniServlet/",
|
||||
"reflectorUri": f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}/SDHD/200/ChuniServlet/",
|
||||
},
|
||||
"isDumpUpload": "false",
|
||||
"isAou": "false",
|
||||
"isDumpUpload": False,
|
||||
"isAou": False,
|
||||
}
|
||||
|
||||
def handle_remove_token_api_request(self, data: Dict) -> Dict:
|
||||
@ -468,3 +471,162 @@ class ChuniNew(ChuniBase):
|
||||
self.data.item.put_user_print_state(user_id, id=order_id, hasCompleted=True)
|
||||
|
||||
return {"returnCode": "1", "apiName": "CMUpsertUserPrintCancelApi"}
|
||||
|
||||
def handle_ping_request(self, data: Dict) -> Dict:
|
||||
# matchmaking ping request
|
||||
return {"returnCode": "1"}
|
||||
|
||||
def handle_begin_matching_api_request(self, data: Dict) -> Dict:
|
||||
room_id = 1
|
||||
# check if there is a free matching room
|
||||
matching_room = self.data.item.get_oldest_free_matching(self.version)
|
||||
|
||||
if matching_room is None:
|
||||
# grab the latest roomId and add 1 for the new room
|
||||
newest_matching = self.data.item.get_newest_matching(self.version)
|
||||
if newest_matching is not None:
|
||||
room_id = newest_matching["roomId"] + 1
|
||||
|
||||
# fix userName WTF8
|
||||
new_member = data["matchingMemberInfo"]
|
||||
new_member["userName"] = self.read_wtf8(new_member["userName"])
|
||||
|
||||
# create the new room with room_id and the current user id (host)
|
||||
# user id is required for the countdown later on
|
||||
self.data.item.put_matching(
|
||||
self.version, room_id, [new_member], user_id=new_member["userId"]
|
||||
)
|
||||
|
||||
# get the newly created matching room
|
||||
matching_room = self.data.item.get_matching(self.version, room_id)
|
||||
else:
|
||||
# a room already exists, so just add the new member to it
|
||||
matching_member_list = matching_room["matchingMemberInfoList"]
|
||||
# fix userName WTF8
|
||||
new_member = data["matchingMemberInfo"]
|
||||
new_member["userName"] = self.read_wtf8(new_member["userName"])
|
||||
matching_member_list.append(new_member)
|
||||
|
||||
# add the updated room to the database, make sure to set isFull correctly!
|
||||
self.data.item.put_matching(
|
||||
self.version,
|
||||
matching_room["roomId"],
|
||||
matching_member_list,
|
||||
user_id=matching_room["user"],
|
||||
is_full=True if len(matching_member_list) >= 4 else False,
|
||||
)
|
||||
|
||||
matching_wait = {
|
||||
"isFinish": False,
|
||||
"restMSec": matching_room["restMSec"], # in sec
|
||||
"pollingInterval": 1, # in sec
|
||||
"matchingMemberInfoList": matching_room["matchingMemberInfoList"],
|
||||
}
|
||||
|
||||
return {"roomId": 1, "matchingWaitState": matching_wait}
|
||||
|
||||
def handle_end_matching_api_request(self, data: Dict) -> Dict:
|
||||
matching_room = self.data.item.get_matching(self.version, data["roomId"])
|
||||
members = matching_room["matchingMemberInfoList"]
|
||||
|
||||
# only set the host user to role 1 every other to 0?
|
||||
role_list = [
|
||||
{"role": 1} if m["userId"] == matching_room["user"] else {"role": 0}
|
||||
for m in members
|
||||
]
|
||||
|
||||
self.data.item.put_matching(
|
||||
self.version,
|
||||
matching_room["roomId"],
|
||||
members,
|
||||
user_id=matching_room["user"],
|
||||
rest_sec=0, # make sure to always set 0
|
||||
is_full=True, # and full, so no one can join
|
||||
)
|
||||
|
||||
return {
|
||||
"matchingResult": 1, # needs to be 1 for successful matching
|
||||
"matchingMemberInfoList": members,
|
||||
# no idea, maybe to differentiate between CPUs and real players?
|
||||
"matchingMemberRoleList": role_list,
|
||||
# TCP/UDP connection?
|
||||
"reflectorUri": f"{self.core_cfg.title.hostname}",
|
||||
}
|
||||
|
||||
def handle_remove_matching_member_api_request(self, data: Dict) -> Dict:
|
||||
# get all matching rooms, because Chuni only returns the userId
|
||||
# not the actual roomId
|
||||
matching_rooms = self.data.item.get_all_matchings(self.version)
|
||||
if matching_rooms is None:
|
||||
return {"returnCode": "1"}
|
||||
|
||||
for room in matching_rooms:
|
||||
old_members = room["matchingMemberInfoList"]
|
||||
new_members = [m for m in old_members if m["userId"] != data["userId"]]
|
||||
|
||||
# if nothing changed go to the next room
|
||||
if len(old_members) == len(new_members):
|
||||
continue
|
||||
|
||||
# if the last user got removed, delete the matching room
|
||||
if len(new_members) <= 0:
|
||||
self.data.item.delete_matching(self.version, room["roomId"])
|
||||
else:
|
||||
# remove the user from the room
|
||||
self.data.item.put_matching(
|
||||
self.version,
|
||||
room["roomId"],
|
||||
new_members,
|
||||
user_id=room["user"],
|
||||
rest_sec=room["restMSec"],
|
||||
)
|
||||
|
||||
return {"returnCode": "1"}
|
||||
|
||||
def handle_get_matching_state_api_request(self, data: Dict) -> Dict:
|
||||
polling_interval = 1
|
||||
# get the current active room
|
||||
matching_room = self.data.item.get_matching(self.version, data["roomId"])
|
||||
members = matching_room["matchingMemberInfoList"]
|
||||
rest_sec = matching_room["restMSec"]
|
||||
|
||||
# grab the current member
|
||||
current_member = data["matchingMemberInfo"]
|
||||
|
||||
# only the host user can decrease the countdown
|
||||
if matching_room["user"] == int(current_member["userId"]):
|
||||
# cap the restMSec to 0
|
||||
if rest_sec > 0:
|
||||
rest_sec -= polling_interval
|
||||
else:
|
||||
rest_sec = 0
|
||||
|
||||
# update the members in order to recieve messages
|
||||
for i, member in enumerate(members):
|
||||
if member["userId"] == current_member["userId"]:
|
||||
# replace the old user data with the current user data,
|
||||
# also parse WTF-8 everytime
|
||||
current_member["userName"] = self.read_wtf8(current_member["userName"])
|
||||
members[i] = current_member
|
||||
|
||||
self.data.item.put_matching(
|
||||
self.version,
|
||||
data["roomId"],
|
||||
members,
|
||||
rest_sec=rest_sec,
|
||||
user_id=matching_room["user"],
|
||||
)
|
||||
|
||||
# only add the other members to the list
|
||||
diff_members = [m for m in members if m["userId"] != current_member["userId"]]
|
||||
|
||||
matching_wait = {
|
||||
# makes no difference? Always use False?
|
||||
"isFinish": True if rest_sec == 0 else False,
|
||||
"restMSec": rest_sec,
|
||||
"pollingInterval": polling_interval,
|
||||
# the current user needs to be the first one?
|
||||
"matchingMemberInfoList": [current_member] + diff_members,
|
||||
}
|
||||
|
||||
return {"matchingWaitState": matching_wait}
|
||||
|
@ -36,6 +36,6 @@ class ChuniNewPlus(ChuniNew):
|
||||
def handle_cm_get_user_preview_api_request(self, data: Dict) -> Dict:
|
||||
user_data = super().handle_cm_get_user_preview_api_request(data)
|
||||
|
||||
# hardcode lastDataVersion for CardMaker 1.35
|
||||
# hardcode lastDataVersion for CardMaker 1.35 A028
|
||||
user_data["lastDataVersion"] = "2.05.00"
|
||||
return user_data
|
||||
|
@ -1,5 +1,12 @@
|
||||
from typing import Dict, List, Optional
|
||||
from sqlalchemy import Table, Column, UniqueConstraint, PrimaryKeyConstraint, and_
|
||||
from sqlalchemy import (
|
||||
Table,
|
||||
Column,
|
||||
UniqueConstraint,
|
||||
PrimaryKeyConstraint,
|
||||
and_,
|
||||
delete,
|
||||
)
|
||||
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON
|
||||
from sqlalchemy.engine.base import Connection
|
||||
from sqlalchemy.schema import ForeignKey
|
||||
@ -203,8 +210,141 @@ login_bonus = Table(
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
favorite = Table(
|
||||
"chuni_item_favorite",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column(
|
||||
"user",
|
||||
ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"),
|
||||
nullable=False,
|
||||
),
|
||||
Column("version", Integer, nullable=False),
|
||||
Column("favId", Integer, nullable=False),
|
||||
Column("favKind", Integer, nullable=False, server_default="1"),
|
||||
UniqueConstraint("version", "user", "favId", name="chuni_item_favorite_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
matching = Table(
|
||||
"chuni_item_matching",
|
||||
metadata,
|
||||
Column("roomId", Integer, nullable=False),
|
||||
Column(
|
||||
"user",
|
||||
ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"),
|
||||
nullable=False,
|
||||
),
|
||||
Column("version", Integer, nullable=False),
|
||||
Column("restMSec", Integer, nullable=False, server_default="60"),
|
||||
Column("isFull", Boolean, nullable=False, server_default="0"),
|
||||
PrimaryKeyConstraint("roomId", "version", name="chuni_item_matching_pk"),
|
||||
Column("matchingMemberInfoList", JSON, nullable=False),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
|
||||
class ChuniItemData(BaseData):
|
||||
def get_oldest_free_matching(self, version: int) -> Optional[Row]:
|
||||
sql = matching.select(
|
||||
and_(
|
||||
matching.c.version == version,
|
||||
matching.c.isFull == False
|
||||
)
|
||||
).order_by(matching.c.roomId.asc())
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def get_newest_matching(self, version: int) -> Optional[Row]:
|
||||
sql = matching.select(
|
||||
and_(
|
||||
matching.c.version == version
|
||||
)
|
||||
).order_by(matching.c.roomId.desc())
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def get_all_matchings(self, version: int) -> Optional[List[Row]]:
|
||||
sql = matching.select(
|
||||
and_(
|
||||
matching.c.version == version
|
||||
)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def get_matching(self, version: int, room_id: int) -> Optional[Row]:
|
||||
sql = matching.select(
|
||||
and_(matching.c.version == version, matching.c.roomId == room_id)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def put_matching(
|
||||
self,
|
||||
version: int,
|
||||
room_id: int,
|
||||
matching_member_info_list: List,
|
||||
user_id: int = None,
|
||||
rest_sec: int = 60,
|
||||
is_full: bool = False
|
||||
) -> Optional[int]:
|
||||
sql = insert(matching).values(
|
||||
roomId=room_id,
|
||||
version=version,
|
||||
restMSec=rest_sec,
|
||||
user=user_id,
|
||||
isFull=is_full,
|
||||
matchingMemberInfoList=matching_member_info_list,
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
restMSec=rest_sec, matchingMemberInfoList=matching_member_info_list
|
||||
)
|
||||
|
||||
result = self.execute(conflict)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def delete_matching(self, version: int, room_id: int):
|
||||
sql = delete(matching).where(
|
||||
and_(matching.c.roomId == room_id, matching.c.version == version)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_all_favorites(
|
||||
self, user_id: int, version: int, fav_kind: int = 1
|
||||
) -> Optional[List[Row]]:
|
||||
sql = favorite.select(
|
||||
and_(
|
||||
favorite.c.version == version,
|
||||
favorite.c.user == user_id,
|
||||
favorite.c.favKind == fav_kind,
|
||||
)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def put_login_bonus(
|
||||
self, user_id: int, version: int, preset_id: int, **login_bonus_data
|
||||
) -> Optional[int]:
|
||||
|
@ -89,8 +89,6 @@ profile = Table(
|
||||
Integer,
|
||||
ForeignKey("chuni_profile_team.id", ondelete="SET NULL", onupdate="SET NULL"),
|
||||
),
|
||||
Column("avatarBack", Integer, server_default="0"),
|
||||
Column("avatarFace", Integer, server_default="0"),
|
||||
Column("eliteRankPoint", Integer, server_default="0"),
|
||||
Column("stockedGridCount", Integer, server_default="0"),
|
||||
Column("netBattleLoseCount", Integer, server_default="0"),
|
||||
@ -98,10 +96,8 @@ profile = Table(
|
||||
Column("netBattle4thCount", Integer, server_default="0"),
|
||||
Column("overPowerRate", Integer, server_default="0"),
|
||||
Column("battleRewardStatus", Integer, server_default="0"),
|
||||
Column("avatarPoint", Integer, server_default="0"),
|
||||
Column("netBattle1stCount", Integer, server_default="0"),
|
||||
Column("charaIllustId", Integer, server_default="0"),
|
||||
Column("avatarItem", Integer, server_default="0"),
|
||||
Column("userNameEx", String(8), server_default=""),
|
||||
Column("netBattleWinCount", Integer, server_default="0"),
|
||||
Column("netBattleCorrection", Integer, server_default="0"),
|
||||
@ -112,7 +108,6 @@ profile = Table(
|
||||
Column("netBattle3rdCount", Integer, server_default="0"),
|
||||
Column("netBattleConsecutiveWinCount", Integer, server_default="0"),
|
||||
Column("overPowerLowerRank", Integer, server_default="0"),
|
||||
Column("avatarWear", Integer, server_default="0"),
|
||||
Column("classEmblemBase", Integer, server_default="0"),
|
||||
Column("battleRankPoint", Integer, server_default="0"),
|
||||
Column("netBattle2ndCount", Integer, server_default="0"),
|
||||
@ -120,13 +115,19 @@ profile = Table(
|
||||
Column("skillId", Integer, server_default="0"),
|
||||
Column("lastCountryCode", String(5), server_default="JPN"),
|
||||
Column("isNetBattleHost", Boolean, server_default="0"),
|
||||
Column("avatarFront", Integer, server_default="0"),
|
||||
Column("avatarSkin", Integer, server_default="0"),
|
||||
Column("battleRewardCount", Integer, server_default="0"),
|
||||
Column("battleRewardIndex", Integer, server_default="0"),
|
||||
Column("netBattlePlayCount", Integer, server_default="0"),
|
||||
Column("exMapLoopCount", Integer, server_default="0"),
|
||||
Column("netBattleEndState", Integer, server_default="0"),
|
||||
Column("rankUpChallengeResults", JSON),
|
||||
Column("avatarBack", Integer, server_default="0"),
|
||||
Column("avatarFace", Integer, server_default="0"),
|
||||
Column("avatarPoint", Integer, server_default="0"),
|
||||
Column("avatarItem", Integer, server_default="0"),
|
||||
Column("avatarWear", Integer, server_default="0"),
|
||||
Column("avatarFront", Integer, server_default="0"),
|
||||
Column("avatarSkin", Integer, server_default="0"),
|
||||
Column("avatarHead", Integer, server_default="0"),
|
||||
UniqueConstraint("user", "version", name="chuni_profile_profile_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
@ -417,8 +418,8 @@ class ChuniProfileData(BaseData):
|
||||
sql = (
|
||||
select([profile, option])
|
||||
.join(option, profile.c.user == option.c.user)
|
||||
.filter(and_(profile.c.user == aime_id, profile.c.version == version))
|
||||
)
|
||||
.filter(and_(profile.c.user == aime_id, profile.c.version <= version))
|
||||
).order_by(profile.c.version.desc())
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
@ -429,9 +430,9 @@ class ChuniProfileData(BaseData):
|
||||
sql = select(profile).where(
|
||||
and_(
|
||||
profile.c.user == aime_id,
|
||||
profile.c.version == version,
|
||||
)
|
||||
profile.c.version <= version,
|
||||
)
|
||||
).order_by(profile.c.version.desc())
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
@ -461,9 +462,9 @@ class ChuniProfileData(BaseData):
|
||||
sql = select(profile_ex).where(
|
||||
and_(
|
||||
profile_ex.c.user == aime_id,
|
||||
profile_ex.c.version == version,
|
||||
)
|
||||
profile_ex.c.version <= version,
|
||||
)
|
||||
).order_by(profile_ex.c.version.desc())
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
|
@ -134,7 +134,9 @@ playlog = Table(
|
||||
Column("charaIllustId", Integer),
|
||||
Column("romVersion", String(255)),
|
||||
Column("judgeHeaven", Integer),
|
||||
mysql_charset="utf8mb4",
|
||||
Column("regionId", Integer),
|
||||
Column("machineType", Integer),
|
||||
mysql_charset="utf8mb4"
|
||||
)
|
||||
|
||||
|
||||
|
@ -1,11 +1,19 @@
|
||||
from typing import Dict, List, Optional
|
||||
from sqlalchemy import Table, Column, UniqueConstraint, PrimaryKeyConstraint, and_
|
||||
from sqlalchemy import (
|
||||
ForeignKeyConstraint,
|
||||
Table,
|
||||
Column,
|
||||
UniqueConstraint,
|
||||
PrimaryKeyConstraint,
|
||||
and_,
|
||||
)
|
||||
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON, Float
|
||||
from sqlalchemy.engine.base import Connection
|
||||
from sqlalchemy.engine import Row
|
||||
from sqlalchemy.schema import ForeignKey
|
||||
from sqlalchemy.sql import func, select
|
||||
from sqlalchemy.dialects.mysql import insert
|
||||
from datetime import datetime
|
||||
|
||||
from core.data.schema import BaseData, metadata
|
||||
|
||||
@ -17,6 +25,7 @@ events = Table(
|
||||
Column("eventId", Integer),
|
||||
Column("type", Integer),
|
||||
Column("name", String(255)),
|
||||
Column("startDate", TIMESTAMP, server_default=func.now()),
|
||||
Column("enabled", Boolean, server_default="1"),
|
||||
UniqueConstraint("version", "eventId", name="chuni_static_events_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
@ -125,11 +134,13 @@ gacha_cards = Table(
|
||||
login_bonus_preset = Table(
|
||||
"chuni_static_login_bonus_preset",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("presetId", Integer, nullable=False),
|
||||
Column("version", Integer, nullable=False),
|
||||
Column("presetName", String(255), nullable=False),
|
||||
Column("isEnabled", Boolean, server_default="1"),
|
||||
UniqueConstraint("version", "id", name="chuni_static_login_bonus_preset_uk"),
|
||||
PrimaryKeyConstraint(
|
||||
"presetId", "version", name="chuni_static_login_bonus_preset_pk"
|
||||
),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
@ -138,15 +149,7 @@ login_bonus = Table(
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("version", Integer, nullable=False),
|
||||
Column(
|
||||
"presetId",
|
||||
ForeignKey(
|
||||
"chuni_static_login_bonus_preset.id",
|
||||
ondelete="cascade",
|
||||
onupdate="cascade",
|
||||
),
|
||||
nullable=False,
|
||||
),
|
||||
Column("presetId", Integer, nullable=False),
|
||||
Column("loginBonusId", Integer, nullable=False),
|
||||
Column("loginBonusName", String(255), nullable=False),
|
||||
Column("presentId", Integer, nullable=False),
|
||||
@ -157,6 +160,16 @@ login_bonus = Table(
|
||||
UniqueConstraint(
|
||||
"version", "presetId", "loginBonusId", name="chuni_static_login_bonus_uk"
|
||||
),
|
||||
ForeignKeyConstraint(
|
||||
["presetId", "version"],
|
||||
[
|
||||
"chuni_static_login_bonus_preset.presetId",
|
||||
"chuni_static_login_bonus_preset.version",
|
||||
],
|
||||
onupdate="CASCADE",
|
||||
ondelete="CASCADE",
|
||||
name="chuni_static_login_bonus_ibfk_1",
|
||||
),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
@ -236,7 +249,7 @@ class ChuniStaticData(BaseData):
|
||||
self, version: int, preset_id: int, preset_name: str, is_enabled: bool
|
||||
) -> Optional[int]:
|
||||
sql = insert(login_bonus_preset).values(
|
||||
id=preset_id,
|
||||
presetId=preset_id,
|
||||
version=version,
|
||||
presetName=preset_name,
|
||||
isEnabled=is_enabled,
|
||||
@ -416,6 +429,14 @@ class ChuniStaticData(BaseData):
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def get_music(self, version: int) -> Optional[List[Row]]:
|
||||
sql = music.select(music.c.version <= version)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def get_music_chart(
|
||||
self, version: int, song_id: int, chart_id: int
|
||||
) -> Optional[List[Row]]:
|
||||
|
37
titles/chuni/sun.py
Normal file
37
titles/chuni/sun.py
Normal file
@ -0,0 +1,37 @@
|
||||
from typing import Dict, Any
|
||||
|
||||
from core.config import CoreConfig
|
||||
from titles.chuni.newplus import ChuniNewPlus
|
||||
from titles.chuni.const import ChuniConstants
|
||||
from titles.chuni.config import ChuniConfig
|
||||
|
||||
|
||||
class ChuniSun(ChuniNewPlus):
|
||||
def __init__(self, core_cfg: CoreConfig, game_cfg: ChuniConfig) -> None:
|
||||
super().__init__(core_cfg, game_cfg)
|
||||
self.version = ChuniConstants.VER_CHUNITHM_SUN
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = super().handle_get_game_setting_api_request(data)
|
||||
ret["gameSetting"]["romVersion"] = self.game_cfg.version.version(self.version)["rom"]
|
||||
ret["gameSetting"]["dataVersion"] = self.game_cfg.version.version(self.version)["data"]
|
||||
ret["gameSetting"][
|
||||
"matchingUri"
|
||||
] = f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}/SDHD/210/ChuniServlet/"
|
||||
ret["gameSetting"][
|
||||
"matchingUriX"
|
||||
] = f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}/SDHD/210/ChuniServlet/"
|
||||
ret["gameSetting"][
|
||||
"udpHolePunchUri"
|
||||
] = f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}/SDHD/210/ChuniServlet/"
|
||||
ret["gameSetting"][
|
||||
"reflectorUri"
|
||||
] = f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}/SDHD/210/ChuniServlet/"
|
||||
return ret
|
||||
|
||||
def handle_cm_get_user_preview_api_request(self, data: Dict) -> Dict:
|
||||
user_data = super().handle_cm_get_user_preview_api_request(data)
|
||||
|
||||
# hardcode lastDataVersion for CardMaker 1.35 A032
|
||||
user_data["lastDataVersion"] = "2.10.00"
|
||||
return user_data
|
@ -23,19 +23,40 @@ class CardMakerBase:
|
||||
self.game = CardMakerConstants.GAME_CODE
|
||||
self.version = CardMakerConstants.VER_CARD_MAKER
|
||||
|
||||
@staticmethod
|
||||
def _parse_int_ver(version: str) -> str:
|
||||
return version.replace(".", "")[:3]
|
||||
|
||||
def handle_get_game_connect_api_request(self, data: Dict) -> Dict:
|
||||
if self.core_cfg.server.is_develop:
|
||||
uri = f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}"
|
||||
else:
|
||||
uri = f"http://{self.core_cfg.title.hostname}"
|
||||
|
||||
# CHUNITHM = 0, maimai = 1, ONGEKI = 2
|
||||
# grab the dict with all games version numbers from user config
|
||||
games_ver = self.game_cfg.version.version(self.version)
|
||||
|
||||
return {
|
||||
"length": 3,
|
||||
"gameConnectList": [
|
||||
{"modelKind": 0, "type": 1, "titleUri": f"{uri}/SDHD/200/"},
|
||||
{"modelKind": 1, "type": 1, "titleUri": f"{uri}/SDEZ/120/"},
|
||||
{"modelKind": 2, "type": 1, "titleUri": f"{uri}/SDDT/130/"},
|
||||
# CHUNITHM
|
||||
{
|
||||
"modelKind": 0,
|
||||
"type": 1,
|
||||
"titleUri": f"{uri}/SDHD/{self._parse_int_ver(games_ver['chuni'])}/",
|
||||
},
|
||||
# maimai DX
|
||||
{
|
||||
"modelKind": 1,
|
||||
"type": 1,
|
||||
"titleUri": f"{uri}/SDEZ/{self._parse_int_ver(games_ver['maimai'])}/",
|
||||
},
|
||||
# ONGEKI
|
||||
{
|
||||
"modelKind": 2,
|
||||
"type": 1,
|
||||
"titleUri": f"{uri}/SDDT/{self._parse_int_ver(games_ver['ongeki'])}/",
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
@ -47,12 +68,15 @@ class CardMakerBase:
|
||||
datetime.now() + timedelta(hours=4), self.date_time_format
|
||||
)
|
||||
|
||||
# grab the dict with all games version numbers from user config
|
||||
games_ver = self.game_cfg.version.version(self.version)
|
||||
|
||||
return {
|
||||
"gameSetting": {
|
||||
"dataVersion": "1.30.00",
|
||||
"ongekiCmVersion": "1.30.01",
|
||||
"chuniCmVersion": "2.00.00",
|
||||
"maimaiCmVersion": "1.20.00",
|
||||
"ongekiCmVersion": games_ver["ongeki"],
|
||||
"chuniCmVersion": games_ver["chuni"],
|
||||
"maimaiCmVersion": games_ver["maimai"],
|
||||
"requestInterval": 10,
|
||||
"rebootStartTime": reboot_start,
|
||||
"rebootEndTime": reboot_end,
|
||||
|
@ -1,8 +1,4 @@
|
||||
from datetime import date, datetime, timedelta
|
||||
from typing import Any, Dict, List
|
||||
import json
|
||||
import logging
|
||||
from enum import Enum
|
||||
from typing import Dict
|
||||
|
||||
from core.config import CoreConfig
|
||||
from core.data.cache import cached
|
||||
@ -16,23 +12,7 @@ class CardMaker135(CardMakerBase):
|
||||
super().__init__(core_cfg, game_cfg)
|
||||
self.version = CardMakerConstants.VER_CARD_MAKER_135
|
||||
|
||||
def handle_get_game_connect_api_request(self, data: Dict) -> Dict:
|
||||
ret = super().handle_get_game_connect_api_request(data)
|
||||
if self.core_cfg.server.is_develop:
|
||||
uri = f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}"
|
||||
else:
|
||||
uri = f"http://{self.core_cfg.title.hostname}"
|
||||
|
||||
ret["gameConnectList"][0]["titleUri"] = f"{uri}/SDHD/205/"
|
||||
ret["gameConnectList"][1]["titleUri"] = f"{uri}/SDEZ/125/"
|
||||
ret["gameConnectList"][2]["titleUri"] = f"{uri}/SDDT/135/"
|
||||
|
||||
return ret
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = super().handle_get_game_setting_api_request(data)
|
||||
ret["gameSetting"]["dataVersion"] = "1.35.00"
|
||||
ret["gameSetting"]["ongekiCmVersion"] = "1.35.03"
|
||||
ret["gameSetting"]["chuniCmVersion"] = "2.05.00"
|
||||
ret["gameSetting"]["maimaiCmVersion"] = "1.25.00"
|
||||
return ret
|
||||
|
@ -1,3 +1,4 @@
|
||||
from typing import Dict
|
||||
from core.config import CoreConfig
|
||||
|
||||
|
||||
@ -20,6 +21,21 @@ class CardMakerServerConfig:
|
||||
)
|
||||
|
||||
|
||||
class CardMakerVersionConfig:
|
||||
def __init__(self, parent_config: "CardMakerConfig") -> None:
|
||||
self.__config = parent_config
|
||||
|
||||
def version(self, version: int) -> Dict:
|
||||
"""
|
||||
in the form of:
|
||||
1: {"ongeki": 1.30.01, "chuni": 2.00.00, "maimai": 1.20.00}
|
||||
"""
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "cardmaker", "version", default={}
|
||||
)[version]
|
||||
|
||||
|
||||
class CardMakerConfig(dict):
|
||||
def __init__(self) -> None:
|
||||
self.server = CardMakerServerConfig(self)
|
||||
self.version = CardMakerVersionConfig(self)
|
||||
|
@ -6,7 +6,7 @@ class CardMakerConstants:
|
||||
VER_CARD_MAKER = 0
|
||||
VER_CARD_MAKER_135 = 1
|
||||
|
||||
VERSION_NAMES = ("Card Maker 1.34", "Card Maker 1.35")
|
||||
VERSION_NAMES = ("Card Maker 1.30", "Card Maker 1.35")
|
||||
|
||||
@classmethod
|
||||
def game_ver_to_string(cls, ver: int):
|
||||
|
@ -30,7 +30,7 @@ class CardMakerServlet:
|
||||
|
||||
self.versions = [
|
||||
CardMakerBase(core_cfg, self.game_cfg),
|
||||
CardMaker135(core_cfg, self.game_cfg),
|
||||
CardMaker135(core_cfg, self.game_cfg)
|
||||
]
|
||||
|
||||
self.logger = logging.getLogger("cardmaker")
|
||||
@ -89,7 +89,7 @@ class CardMakerServlet:
|
||||
|
||||
if version >= 130 and version < 135: # Card Maker
|
||||
internal_ver = CardMakerConstants.VER_CARD_MAKER
|
||||
elif version >= 135 and version < 136: # Card Maker 1.35
|
||||
elif version >= 135 and version < 140: # Card Maker 1.35
|
||||
internal_ver = CardMakerConstants.VER_CARD_MAKER_135
|
||||
|
||||
if all(c in string.hexdigits for c in endpoint) and len(endpoint) == 32:
|
||||
@ -129,6 +129,6 @@ class CardMakerServlet:
|
||||
if resp is None:
|
||||
resp = {"returnCode": 1}
|
||||
|
||||
self.logger.info(f"Response {resp}")
|
||||
self.logger.debug(f"Response {resp}")
|
||||
|
||||
return zlib.compress(json.dumps(resp, ensure_ascii=False).encode("utf-8"))
|
||||
|
@ -89,8 +89,7 @@ class CardMakerReader(BaseReader):
|
||||
version_ids = {
|
||||
"v2_00": ChuniConstants.VER_CHUNITHM_NEW,
|
||||
"v2_05": ChuniConstants.VER_CHUNITHM_NEW_PLUS,
|
||||
# Chunithm SUN, ignore for now
|
||||
"v2_10": ChuniConstants.VER_CHUNITHM_NEW_PLUS + 1,
|
||||
"v2_10": ChuniConstants.VER_CHUNITHM_SUN,
|
||||
}
|
||||
|
||||
for root, dirs, files in os.walk(base_dir):
|
||||
@ -138,8 +137,7 @@ class CardMakerReader(BaseReader):
|
||||
version_ids = {
|
||||
"v2_00": ChuniConstants.VER_CHUNITHM_NEW,
|
||||
"v2_05": ChuniConstants.VER_CHUNITHM_NEW_PLUS,
|
||||
# Chunithm SUN, ignore for now
|
||||
"v2_10": ChuniConstants.VER_CHUNITHM_NEW_PLUS + 1,
|
||||
"v2_10": ChuniConstants.VER_CHUNITHM_SUN,
|
||||
}
|
||||
|
||||
for root, dirs, files in os.walk(base_dir):
|
||||
@ -226,6 +224,12 @@ class CardMakerReader(BaseReader):
|
||||
True if troot.find("disable").text == "false" else False
|
||||
)
|
||||
|
||||
# check if a date is part of the name and disable the
|
||||
# card if it is
|
||||
enabled = (
|
||||
False if re.search(r"\d{2}/\d{2}/\d{2}", name) else enabled
|
||||
)
|
||||
|
||||
self.mai2_data.static.put_card(
|
||||
version, card_id, name, enabled=enabled
|
||||
)
|
||||
|
@ -2,7 +2,7 @@ import logging
|
||||
import json
|
||||
from decimal import Decimal
|
||||
from base64 import b64encode
|
||||
from typing import Any, Dict
|
||||
from typing import Any, Dict, List
|
||||
from hashlib import md5
|
||||
from datetime import datetime
|
||||
|
||||
@ -11,6 +11,7 @@ from titles.cxb.config import CxbConfig
|
||||
from titles.cxb.const import CxbConstants
|
||||
from titles.cxb.database import CxbData
|
||||
|
||||
from threading import Thread
|
||||
|
||||
class CxbBase:
|
||||
def __init__(self, cfg: CoreConfig, game_cfg: CxbConfig) -> None:
|
||||
@ -54,6 +55,151 @@ class CxbBase:
|
||||
self.logger.warn(f"User {data['login']['authid']} does not have a profile")
|
||||
return {}
|
||||
|
||||
def task_generateCoupon(index, data1):
|
||||
# Coupons
|
||||
for i in range(500, 510):
|
||||
index.append(str(i))
|
||||
couponid = int(i) - 500
|
||||
dataValue = [
|
||||
{
|
||||
"couponId": str(couponid),
|
||||
"couponNum": "1",
|
||||
"couponLog": [],
|
||||
}
|
||||
]
|
||||
data1.append(
|
||||
b64encode(
|
||||
bytes(json.dumps(dataValue[0], separators=(",", ":")), "utf-8")
|
||||
).decode("utf-8")
|
||||
)
|
||||
|
||||
def task_generateShopListTitle(index, data1):
|
||||
# ShopList_Title
|
||||
for i in range(200000, 201451):
|
||||
index.append(str(i))
|
||||
shopid = int(i) - 200000
|
||||
dataValue = [
|
||||
{
|
||||
"shopId": shopid,
|
||||
"shopState": "2",
|
||||
"isDisable": "t",
|
||||
"isDeleted": "f",
|
||||
"isSpecialFlag": "f",
|
||||
}
|
||||
]
|
||||
data1.append(
|
||||
b64encode(
|
||||
bytes(json.dumps(dataValue[0], separators=(",", ":")), "utf-8")
|
||||
).decode("utf-8")
|
||||
)
|
||||
|
||||
def task_generateShopListIcon(index, data1):
|
||||
# ShopList_Icon
|
||||
for i in range(202000, 202264):
|
||||
index.append(str(i))
|
||||
shopid = int(i) - 200000
|
||||
dataValue = [
|
||||
{
|
||||
"shopId": shopid,
|
||||
"shopState": "2",
|
||||
"isDisable": "t",
|
||||
"isDeleted": "f",
|
||||
"isSpecialFlag": "f",
|
||||
}
|
||||
]
|
||||
data1.append(
|
||||
b64encode(
|
||||
bytes(json.dumps(dataValue[0], separators=(",", ":")), "utf-8")
|
||||
).decode("utf-8")
|
||||
)
|
||||
|
||||
def task_generateStories(index, data1):
|
||||
# Stories
|
||||
for i in range(900000, 900003):
|
||||
index.append(str(i))
|
||||
storyid = int(i) - 900000
|
||||
dataValue = [
|
||||
{
|
||||
"storyId": storyid,
|
||||
"unlockState1": ["t"] * 10,
|
||||
"unlockState2": ["t"] * 10,
|
||||
"unlockState3": ["t"] * 10,
|
||||
"unlockState4": ["t"] * 10,
|
||||
"unlockState5": ["t"] * 10,
|
||||
"unlockState6": ["t"] * 10,
|
||||
"unlockState7": ["t"] * 10,
|
||||
"unlockState8": ["t"] * 10,
|
||||
"unlockState9": ["t"] * 10,
|
||||
"unlockState10": ["t"] * 10,
|
||||
"unlockState11": ["t"] * 10,
|
||||
"unlockState12": ["t"] * 10,
|
||||
"unlockState13": ["t"] * 10,
|
||||
"unlockState14": ["t"] * 10,
|
||||
"unlockState15": ["t"] * 10,
|
||||
"unlockState16": ["t"] * 10,
|
||||
}
|
||||
]
|
||||
data1.append(
|
||||
b64encode(
|
||||
bytes(json.dumps(dataValue[0], separators=(",", ":")), "utf-8")
|
||||
).decode("utf-8")
|
||||
)
|
||||
|
||||
def task_generateScoreData(song, index, data1):
|
||||
song_data = song["data"]
|
||||
songCode = []
|
||||
|
||||
songCode.append(
|
||||
{
|
||||
"mcode": song_data["mcode"],
|
||||
"musicState": song_data["musicState"],
|
||||
"playCount": song_data["playCount"],
|
||||
"totalScore": song_data["totalScore"],
|
||||
"highScore": song_data["highScore"],
|
||||
"everHighScore": song_data["everHighScore"]
|
||||
if "everHighScore" in song_data
|
||||
else ["0", "0", "0", "0", "0"],
|
||||
"clearRate": song_data["clearRate"],
|
||||
"rankPoint": song_data["rankPoint"],
|
||||
"normalCR": song_data["normalCR"]
|
||||
if "normalCR" in song_data
|
||||
else ["0", "0", "0", "0", "0"],
|
||||
"survivalCR": song_data["survivalCR"]
|
||||
if "survivalCR" in song_data
|
||||
else ["0", "0", "0", "0", "0"],
|
||||
"ultimateCR": song_data["ultimateCR"]
|
||||
if "ultimateCR" in song_data
|
||||
else ["0", "0", "0", "0", "0"],
|
||||
"nohopeCR": song_data["nohopeCR"]
|
||||
if "nohopeCR" in song_data
|
||||
else ["0", "0", "0", "0", "0"],
|
||||
"combo": song_data["combo"],
|
||||
"coupleUserId": song_data["coupleUserId"],
|
||||
"difficulty": song_data["difficulty"],
|
||||
"isFullCombo": song_data["isFullCombo"],
|
||||
"clearGaugeType": song_data["clearGaugeType"],
|
||||
"fieldType": song_data["fieldType"],
|
||||
"gameType": song_data["gameType"],
|
||||
"grade": song_data["grade"],
|
||||
"unlockState": song_data["unlockState"],
|
||||
"extraState": song_data["extraState"],
|
||||
}
|
||||
)
|
||||
index.append(song_data["index"])
|
||||
data1.append(
|
||||
b64encode(
|
||||
bytes(json.dumps(songCode[0], separators=(",", ":")), "utf-8")
|
||||
).decode("utf-8")
|
||||
)
|
||||
|
||||
def task_generateIndexData(versionindex):
|
||||
try:
|
||||
v_profile = self.data.profile.get_profile_index(0, uid, self.version)
|
||||
v_profile_data = v_profile["data"]
|
||||
versionindex.append(int(v_profile_data["appVersion"]))
|
||||
except Exception:
|
||||
versionindex.append("10400")
|
||||
|
||||
def handle_action_loadrange_request(self, data: Dict) -> Dict:
|
||||
range_start = data["loadrange"]["range"][0]
|
||||
range_end = data["loadrange"]["range"][1]
|
||||
@ -107,146 +253,29 @@ class CxbBase:
|
||||
900000 = Stories
|
||||
"""
|
||||
|
||||
# Coupons
|
||||
for i in range(500, 510):
|
||||
index.append(str(i))
|
||||
couponid = int(i) - 500
|
||||
dataValue = [
|
||||
{
|
||||
"couponId": str(couponid),
|
||||
"couponNum": "1",
|
||||
"couponLog": [],
|
||||
}
|
||||
]
|
||||
data1.append(
|
||||
b64encode(
|
||||
bytes(json.dumps(dataValue[0], separators=(",", ":")), "utf-8")
|
||||
).decode("utf-8")
|
||||
)
|
||||
# Async threads to generate the response
|
||||
thread_Coupon = Thread(target=CxbBase.task_generateCoupon(index, data1))
|
||||
thread_ShopListTitle = Thread(target=CxbBase.task_generateShopListTitle(index, data1))
|
||||
thread_ShopListIcon = Thread(target=CxbBase.task_generateShopListIcon(index, data1))
|
||||
thread_Stories = Thread(target=CxbBase.task_generateStories(index, data1))
|
||||
|
||||
# ShopList_Title
|
||||
for i in range(200000, 201451):
|
||||
index.append(str(i))
|
||||
shopid = int(i) - 200000
|
||||
dataValue = [
|
||||
{
|
||||
"shopId": shopid,
|
||||
"shopState": "2",
|
||||
"isDisable": "t",
|
||||
"isDeleted": "f",
|
||||
"isSpecialFlag": "f",
|
||||
}
|
||||
]
|
||||
data1.append(
|
||||
b64encode(
|
||||
bytes(json.dumps(dataValue[0], separators=(",", ":")), "utf-8")
|
||||
).decode("utf-8")
|
||||
)
|
||||
thread_Coupon.start()
|
||||
thread_ShopListTitle.start()
|
||||
thread_ShopListIcon.start()
|
||||
thread_Stories.start()
|
||||
|
||||
# ShopList_Icon
|
||||
for i in range(202000, 202264):
|
||||
index.append(str(i))
|
||||
shopid = int(i) - 200000
|
||||
dataValue = [
|
||||
{
|
||||
"shopId": shopid,
|
||||
"shopState": "2",
|
||||
"isDisable": "t",
|
||||
"isDeleted": "f",
|
||||
"isSpecialFlag": "f",
|
||||
}
|
||||
]
|
||||
data1.append(
|
||||
b64encode(
|
||||
bytes(json.dumps(dataValue[0], separators=(",", ":")), "utf-8")
|
||||
).decode("utf-8")
|
||||
)
|
||||
|
||||
# Stories
|
||||
for i in range(900000, 900003):
|
||||
index.append(str(i))
|
||||
storyid = int(i) - 900000
|
||||
dataValue = [
|
||||
{
|
||||
"storyId": storyid,
|
||||
"unlockState1": ["t"] * 10,
|
||||
"unlockState2": ["t"] * 10,
|
||||
"unlockState3": ["t"] * 10,
|
||||
"unlockState4": ["t"] * 10,
|
||||
"unlockState5": ["t"] * 10,
|
||||
"unlockState6": ["t"] * 10,
|
||||
"unlockState7": ["t"] * 10,
|
||||
"unlockState8": ["t"] * 10,
|
||||
"unlockState9": ["t"] * 10,
|
||||
"unlockState10": ["t"] * 10,
|
||||
"unlockState11": ["t"] * 10,
|
||||
"unlockState12": ["t"] * 10,
|
||||
"unlockState13": ["t"] * 10,
|
||||
"unlockState14": ["t"] * 10,
|
||||
"unlockState15": ["t"] * 10,
|
||||
"unlockState16": ["t"] * 10,
|
||||
}
|
||||
]
|
||||
data1.append(
|
||||
b64encode(
|
||||
bytes(json.dumps(dataValue[0], separators=(",", ":")), "utf-8")
|
||||
).decode("utf-8")
|
||||
)
|
||||
thread_Coupon.join()
|
||||
thread_ShopListTitle.join()
|
||||
thread_ShopListIcon.join()
|
||||
thread_Stories.join()
|
||||
|
||||
for song in songs:
|
||||
song_data = song["data"]
|
||||
songCode = []
|
||||
|
||||
songCode.append(
|
||||
{
|
||||
"mcode": song_data["mcode"],
|
||||
"musicState": song_data["musicState"],
|
||||
"playCount": song_data["playCount"],
|
||||
"totalScore": song_data["totalScore"],
|
||||
"highScore": song_data["highScore"],
|
||||
"everHighScore": song_data["everHighScore"]
|
||||
if "everHighScore" in song_data
|
||||
else ["0", "0", "0", "0", "0"],
|
||||
"clearRate": song_data["clearRate"],
|
||||
"rankPoint": song_data["rankPoint"],
|
||||
"normalCR": song_data["normalCR"]
|
||||
if "normalCR" in song_data
|
||||
else ["0", "0", "0", "0", "0"],
|
||||
"survivalCR": song_data["survivalCR"]
|
||||
if "survivalCR" in song_data
|
||||
else ["0", "0", "0", "0", "0"],
|
||||
"ultimateCR": song_data["ultimateCR"]
|
||||
if "ultimateCR" in song_data
|
||||
else ["0", "0", "0", "0", "0"],
|
||||
"nohopeCR": song_data["nohopeCR"]
|
||||
if "nohopeCR" in song_data
|
||||
else ["0", "0", "0", "0", "0"],
|
||||
"combo": song_data["combo"],
|
||||
"coupleUserId": song_data["coupleUserId"],
|
||||
"difficulty": song_data["difficulty"],
|
||||
"isFullCombo": song_data["isFullCombo"],
|
||||
"clearGaugeType": song_data["clearGaugeType"],
|
||||
"fieldType": song_data["fieldType"],
|
||||
"gameType": song_data["gameType"],
|
||||
"grade": song_data["grade"],
|
||||
"unlockState": song_data["unlockState"],
|
||||
"extraState": song_data["extraState"],
|
||||
}
|
||||
)
|
||||
index.append(song_data["index"])
|
||||
data1.append(
|
||||
b64encode(
|
||||
bytes(json.dumps(songCode[0], separators=(",", ":")), "utf-8")
|
||||
).decode("utf-8")
|
||||
)
|
||||
thread_ScoreData = Thread(target=CxbBase.task_generateScoreData(song, index, data1))
|
||||
thread_ScoreData.start()
|
||||
|
||||
for v in index:
|
||||
try:
|
||||
v_profile = self.data.profile.get_profile_index(0, uid, self.version)
|
||||
v_profile_data = v_profile["data"]
|
||||
versionindex.append(int(v_profile_data["appVersion"]))
|
||||
except:
|
||||
versionindex.append("10400")
|
||||
thread_IndexData = Thread(target=CxbBase.task_generateIndexData(versionindex))
|
||||
thread_IndexData.start()
|
||||
|
||||
return {"index": index, "data": data1, "version": versionindex}
|
||||
|
||||
@ -257,7 +286,7 @@ class CxbBase:
|
||||
# REV Omnimix Version Fetcher
|
||||
gameversion = data["saveindex"]["data"][0][2]
|
||||
self.logger.warning(f"Game Version is {gameversion}")
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if "10205" in gameversion:
|
||||
@ -319,7 +348,7 @@ class CxbBase:
|
||||
# Sunrise
|
||||
try:
|
||||
profileIndex = save_data["index"].index("0")
|
||||
except:
|
||||
except Exception:
|
||||
return {"data": ""} # Maybe
|
||||
|
||||
profile = json.loads(save_data["data"][profileIndex])
|
||||
@ -416,7 +445,7 @@ class CxbBase:
|
||||
self.logger.info(f"Get best rankings for {uid}")
|
||||
p = self.data.score.get_best_rankings(uid)
|
||||
|
||||
rankList: list[Dict[str, Any]] = []
|
||||
rankList: List[Dict[str, Any]] = []
|
||||
|
||||
for rank in p:
|
||||
if rank["song_id"] is not None:
|
||||
@ -467,7 +496,7 @@ class CxbBase:
|
||||
score=int(rid["sc"][0]),
|
||||
clear=rid["clear"],
|
||||
)
|
||||
except:
|
||||
except Exception:
|
||||
self.data.score.put_ranking(
|
||||
user_id=uid,
|
||||
rev_id=int(rid["rid"]),
|
||||
@ -485,7 +514,7 @@ class CxbBase:
|
||||
score=int(rid["sc"][0]),
|
||||
clear=0,
|
||||
)
|
||||
except:
|
||||
except Exception:
|
||||
self.data.score.put_ranking(
|
||||
user_id=uid,
|
||||
rev_id=int(rid["rid"]),
|
||||
|
@ -103,7 +103,7 @@ class CxbServlet(resource.Resource):
|
||||
else:
|
||||
self.logger.info(f"Ready on port {self.game_cfg.server.port}")
|
||||
|
||||
def render_POST(self, request: Request):
|
||||
def render_POST(self, request: Request, version: int, endpoint: str):
|
||||
version = 0
|
||||
internal_ver = 0
|
||||
func_to_find = ""
|
||||
|
@ -123,5 +123,5 @@ class CxbReader(BaseReader):
|
||||
genre,
|
||||
int(row["easy"].replace("Easy ", "").replace("N/A", "0")),
|
||||
)
|
||||
except:
|
||||
except Exception:
|
||||
self.logger.warn(f"Couldn't read csv file in {self.bin_dir}, skipping")
|
||||
|
@ -7,4 +7,4 @@ index = DivaServlet
|
||||
database = DivaData
|
||||
reader = DivaReader
|
||||
game_codes = [DivaConstants.GAME_CODE]
|
||||
current_schema_version = 4
|
||||
current_schema_version = 5
|
||||
|
@ -266,16 +266,17 @@ class DivaBase:
|
||||
def handle_festa_info_request(self, data: Dict) -> Dict:
|
||||
encoded = "&"
|
||||
params = {
|
||||
"fi_id": "1,-1",
|
||||
"fi_name": f"{self.core_cfg.server.name} Opening,xxx",
|
||||
"fi_kind": "0,0",
|
||||
"fi_id": "1,2",
|
||||
"fi_name": f"{self.core_cfg.server.name} Opening,Project DIVA Festa",
|
||||
# 0=PINK, 1=GREEN
|
||||
"fi_kind": "1,0",
|
||||
"fi_difficulty": "-1,-1",
|
||||
"fi_pv_id_lst": "ALL,ALL",
|
||||
"fi_attr": "7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF,7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF",
|
||||
"fi_add_vp": "20,0",
|
||||
"fi_mul_vp": "1,1",
|
||||
"fi_st": "2022-06-17 17:00:00.0,2014-07-08 18:10:11.0",
|
||||
"fi_et": "2029-01-01 10:00:00.0,2014-07-08 18:10:11.0",
|
||||
"fi_add_vp": "20,5",
|
||||
"fi_mul_vp": "1,2",
|
||||
"fi_st": "2019-01-01 00:00:00.0,2019-01-01 00:00:00.0",
|
||||
"fi_et": "2029-01-01 00:00:00.0,2029-01-01 00:00:00.0",
|
||||
"fi_lut": "{self.time_lut}",
|
||||
}
|
||||
|
||||
@ -401,10 +402,10 @@ class DivaBase:
|
||||
response += f"&lv_pnt={profile['lv_pnt']}"
|
||||
response += f"&vcld_pts={profile['vcld_pts']}"
|
||||
response += f"&skn_eqp={profile['use_pv_skn_eqp']}"
|
||||
response += f"&btn_se_eqp={profile['use_pv_btn_se_eqp']}"
|
||||
response += f"&sld_se_eqp={profile['use_pv_sld_se_eqp']}"
|
||||
response += f"&chn_sld_se_eqp={profile['use_pv_chn_sld_se_eqp']}"
|
||||
response += f"&sldr_tch_se_eqp={profile['use_pv_sldr_tch_se_eqp']}"
|
||||
response += f"&btn_se_eqp={profile['btn_se_eqp']}"
|
||||
response += f"&sld_se_eqp={profile['sld_se_eqp']}"
|
||||
response += f"&chn_sld_se_eqp={profile['chn_sld_se_eqp']}"
|
||||
response += f"&sldr_tch_se_eqp={profile['sldr_tch_se_eqp']}"
|
||||
response += f"&passwd_stat={profile['passwd_stat']}"
|
||||
|
||||
# Store stuff to add to rework
|
||||
@ -478,6 +479,21 @@ class DivaBase:
|
||||
response += f"&dsp_clr_sts={profile['dsp_clr_sts']}"
|
||||
response += f"&rgo_sts={profile['rgo_sts']}"
|
||||
|
||||
# Contest progress
|
||||
response += f"&cv_cid=-1,-1,-1,-1"
|
||||
response += f"&cv_sc=-1,-1,-1,-1"
|
||||
response += f"&cv_bv=-1,-1,-1,-1"
|
||||
response += f"&cv_bv=-1,-1,-1,-1"
|
||||
response += f"&cv_bf=-1,-1,-1,-1"
|
||||
|
||||
# Contest now playing id, return -1 if no current playing contest
|
||||
response += f"&cnp_cid={profile['cnp_cid']}"
|
||||
response += f"&cnp_val={profile['cnp_val']}"
|
||||
# border can be 0=bronzem 1=silver, 2=gold
|
||||
response += f"&cnp_rr={profile['cnp_rr']}"
|
||||
# only show contest specifier if it is not empty
|
||||
response += f"&cnp_sp={profile['cnp_sp']}" if profile["cnp_sp"] != "" else ""
|
||||
|
||||
# To be fully fixed
|
||||
if "my_qst_id" not in profile:
|
||||
response += f"&my_qst_id=-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1"
|
||||
@ -488,7 +504,63 @@ class DivaBase:
|
||||
|
||||
response += f"&my_qst_prgrs=0,0,0,0,0,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1"
|
||||
response += f"&my_qst_et=2022-06-19%2010%3A28%3A52.0,2022-06-19%2010%3A28%3A52.0,2022-06-19%2010%3A28%3A52.0,2100-01-01%2008%3A59%3A59.0,2100-01-01%2008%3A59%3A59.0,xxx,xxx,xxx,xxx,xxx,xxx,xxx,xxx,xxx,xxx,xxx,xxx,xxx,xxx,xxx,xxx,xxx,xxx,xxx,xxx"
|
||||
response += f"&clr_sts=0,0,0,0,0,0,0,0,56,52,35,6,6,3,1,0,0,0,0,0"
|
||||
|
||||
# define a helper class to store all counts for clear, great,
|
||||
# excellent and perfect
|
||||
class ClearSet:
|
||||
def __init__(self):
|
||||
self.clear = 0
|
||||
self.great = 0
|
||||
self.excellent = 0
|
||||
self.perfect = 0
|
||||
|
||||
# create a dict to store the ClearSets per difficulty
|
||||
clear_set_dict = {
|
||||
0: ClearSet(), # easy
|
||||
1: ClearSet(), # normal
|
||||
2: ClearSet(), # hard
|
||||
3: ClearSet(), # extreme
|
||||
4: ClearSet(), # exExtreme
|
||||
}
|
||||
|
||||
# get clear status from user scores
|
||||
pv_records = self.data.score.get_best_scores(data["pd_id"])
|
||||
clear_status = "0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0"
|
||||
|
||||
if pv_records is not None:
|
||||
for score in pv_records:
|
||||
if score["edition"] == 0:
|
||||
# cheap and standard both count to "clear"
|
||||
if score["clr_kind"] in {1, 2}:
|
||||
clear_set_dict[score["difficulty"]].clear += 1
|
||||
elif score["clr_kind"] == 3:
|
||||
clear_set_dict[score["difficulty"]].great += 1
|
||||
elif score["clr_kind"] == 4:
|
||||
clear_set_dict[score["difficulty"]].excellent += 1
|
||||
elif score["clr_kind"] == 5:
|
||||
clear_set_dict[score["difficulty"]].perfect += 1
|
||||
else:
|
||||
# 4=ExExtreme
|
||||
if score["clr_kind"] in {1, 2}:
|
||||
clear_set_dict[4].clear += 1
|
||||
elif score["clr_kind"] == 3:
|
||||
clear_set_dict[4].great += 1
|
||||
elif score["clr_kind"] == 4:
|
||||
clear_set_dict[4].excellent += 1
|
||||
elif score["clr_kind"] == 5:
|
||||
clear_set_dict[4].perfect += 1
|
||||
|
||||
# now add all values to a list
|
||||
clear_list = []
|
||||
for clear_set in clear_set_dict.values():
|
||||
clear_list.append(clear_set.clear)
|
||||
clear_list.append(clear_set.great)
|
||||
clear_list.append(clear_set.excellent)
|
||||
clear_list.append(clear_set.perfect)
|
||||
|
||||
clear_status = ",".join(map(str, clear_list))
|
||||
|
||||
response += f"&clr_sts={clear_status}"
|
||||
|
||||
# Store stuff to add to rework
|
||||
response += f"&mdl_eqp_tm={self.time_lut}"
|
||||
|
@ -34,9 +34,17 @@ profile = Table(
|
||||
Column("use_pv_sld_se_eqp", Boolean, nullable=False, server_default="0"),
|
||||
Column("use_pv_chn_sld_se_eqp", Boolean, nullable=False, server_default="0"),
|
||||
Column("use_pv_sldr_tch_se_eqp", Boolean, nullable=False, server_default="0"),
|
||||
Column("btn_se_eqp", Integer, nullable=False, server_default="-1"),
|
||||
Column("sld_se_eqp", Integer, nullable=False, server_default="-1"),
|
||||
Column("chn_sld_se_eqp", Integer, nullable=False, server_default="-1"),
|
||||
Column("sldr_tch_se_eqp", Integer, nullable=False, server_default="-1"),
|
||||
Column("nxt_pv_id", Integer, nullable=False, server_default="708"),
|
||||
Column("nxt_dffclty", Integer, nullable=False, server_default="2"),
|
||||
Column("nxt_edtn", Integer, nullable=False, server_default="0"),
|
||||
Column("cnp_cid", Integer, nullable=False, server_default="-1"),
|
||||
Column("cnp_val", Integer, nullable=False, server_default="-1"),
|
||||
Column("cnp_rr", Integer, nullable=False, server_default="-1"),
|
||||
Column("cnp_sp", String(255), nullable=False, server_default=""),
|
||||
Column("dsp_clr_brdr", Integer, nullable=False, server_default="7"),
|
||||
Column("dsp_intrm_rnk", Integer, nullable=False, server_default="1"),
|
||||
Column("dsp_clr_sts", Integer, nullable=False, server_default="1"),
|
||||
|
@ -3,6 +3,7 @@ from sqlalchemy.types import Integer, String, TIMESTAMP, JSON, Boolean
|
||||
from sqlalchemy.schema import ForeignKey
|
||||
from sqlalchemy.sql import func, select
|
||||
from sqlalchemy.dialects.mysql import insert
|
||||
from sqlalchemy.engine import Row
|
||||
from typing import Optional, List, Dict, Any
|
||||
|
||||
from core.data.schema import BaseData, metadata
|
||||
@ -167,7 +168,7 @@ class DivaScoreData(BaseData):
|
||||
|
||||
def get_best_user_score(
|
||||
self, user_id: int, pv_id: int, difficulty: int, edition: int
|
||||
) -> Optional[Dict]:
|
||||
) -> Optional[Row]:
|
||||
sql = score.select(
|
||||
and_(
|
||||
score.c.user == user_id,
|
||||
@ -184,7 +185,7 @@ class DivaScoreData(BaseData):
|
||||
|
||||
def get_top3_scores(
|
||||
self, pv_id: int, difficulty: int, edition: int
|
||||
) -> Optional[List[Dict]]:
|
||||
) -> Optional[List[Row]]:
|
||||
sql = (
|
||||
score.select(
|
||||
and_(
|
||||
@ -204,7 +205,7 @@ class DivaScoreData(BaseData):
|
||||
|
||||
def get_global_ranking(
|
||||
self, user_id: int, pv_id: int, difficulty: int, edition: int
|
||||
) -> Optional[List]:
|
||||
) -> Optional[List[Row]]:
|
||||
# get the subquery max score of a user with pv_id, difficulty and
|
||||
# edition
|
||||
sql_sub = (
|
||||
@ -231,7 +232,7 @@ class DivaScoreData(BaseData):
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def get_best_scores(self, user_id: int) -> Optional[List]:
|
||||
def get_best_scores(self, user_id: int) -> Optional[List[Row]]:
|
||||
sql = score.select(score.c.user == user_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
|
@ -83,7 +83,13 @@ class IDZUserDBProtocol(Protocol):
|
||||
def dataReceived(self, data: bytes) -> None:
|
||||
self.logger.debug(f"Receive data {data.hex()}")
|
||||
crypt = AES.new(self.static_key, AES.MODE_ECB)
|
||||
|
||||
try:
|
||||
data_dec = crypt.decrypt(data)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to decrypt UserDB request from {self.transport.getPeer().host} because {e} - {data.hex()}")
|
||||
|
||||
self.logger.debug(f"Decrypt data {data_dec.hex()}")
|
||||
|
||||
magic = struct.unpack_from("<I", data_dec, 0)[0]
|
||||
|
@ -6,5 +6,14 @@ from titles.mai2.read import Mai2Reader
|
||||
index = Mai2Servlet
|
||||
database = Mai2Data
|
||||
reader = Mai2Reader
|
||||
game_codes = [Mai2Constants.GAME_CODE]
|
||||
current_schema_version = 4
|
||||
game_codes = [
|
||||
Mai2Constants.GAME_CODE_DX,
|
||||
Mai2Constants.GAME_CODE_FINALE,
|
||||
Mai2Constants.GAME_CODE_MILK,
|
||||
Mai2Constants.GAME_CODE_MURASAKI,
|
||||
Mai2Constants.GAME_CODE_PINK,
|
||||
Mai2Constants.GAME_CODE_ORANGE,
|
||||
Mai2Constants.GAME_CODE_GREEN,
|
||||
Mai2Constants.GAME_CODE,
|
||||
]
|
||||
current_schema_version = 7
|
||||
|
@ -1,6 +1,9 @@
|
||||
from datetime import datetime, date, timedelta
|
||||
from typing import Any, Dict
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, List
|
||||
import logging
|
||||
from base64 import b64decode
|
||||
from os import path, stat, remove
|
||||
from PIL import ImageFile
|
||||
|
||||
from core.config import CoreConfig
|
||||
from titles.mai2.const import Mai2Constants
|
||||
@ -12,33 +15,35 @@ class Mai2Base:
|
||||
def __init__(self, cfg: CoreConfig, game_cfg: Mai2Config) -> None:
|
||||
self.core_config = cfg
|
||||
self.game_config = game_cfg
|
||||
self.game = Mai2Constants.GAME_CODE
|
||||
self.version = Mai2Constants.VER_MAIMAI_DX
|
||||
self.version = Mai2Constants.VER_MAIMAI
|
||||
self.data = Mai2Data(cfg)
|
||||
self.logger = logging.getLogger("mai2")
|
||||
self.can_deliver = False
|
||||
self.can_usbdl = False
|
||||
self.old_server = ""
|
||||
|
||||
if self.core_config.server.is_develop and self.core_config.title.port > 0:
|
||||
self.old_server = f"http://{self.core_config.title.hostname}:{self.core_config.title.port}/SDEY/197/"
|
||||
|
||||
else:
|
||||
self.old_server = f"http://{self.core_config.title.hostname}/SDEY/197/"
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict):
|
||||
reboot_start = date.strftime(
|
||||
datetime.now() + timedelta(hours=3), Mai2Constants.DATE_TIME_FORMAT
|
||||
)
|
||||
reboot_end = date.strftime(
|
||||
datetime.now() + timedelta(hours=4), Mai2Constants.DATE_TIME_FORMAT
|
||||
)
|
||||
return {
|
||||
"isDevelop": False,
|
||||
"isAouAccession": False,
|
||||
"gameSetting": {
|
||||
"isMaintenance": "false",
|
||||
"requestInterval": 10,
|
||||
"rebootStartTime": reboot_start,
|
||||
"rebootEndTime": reboot_end,
|
||||
"movieUploadLimit": 10000,
|
||||
"movieStatus": 0,
|
||||
"movieServerUri": "",
|
||||
"deliverServerUri": "",
|
||||
"oldServerUri": "",
|
||||
"usbDlServerUri": "",
|
||||
"rebootInterval": 0,
|
||||
"isMaintenance": False,
|
||||
"requestInterval": 1800,
|
||||
"rebootStartTime": "2020-01-01 07:00:00.0",
|
||||
"rebootEndTime": "2020-01-01 07:59:59.0",
|
||||
"movieUploadLimit": 100,
|
||||
"movieStatus": 1,
|
||||
"movieServerUri": self.old_server + "api/movie" if self.game_config.uploads.movies else "movie",
|
||||
"deliverServerUri": self.old_server + "deliver/" if self.can_deliver and self.game_config.deliver.enable else "",
|
||||
"oldServerUri": self.old_server + "old",
|
||||
"usbDlServerUri": self.old_server + "usbdl/" if self.can_deliver and self.game_config.deliver.udbdl_enable else "",
|
||||
},
|
||||
"isAouAccession": "true",
|
||||
}
|
||||
|
||||
def handle_get_game_ranking_api_request(self, data: Dict) -> Dict:
|
||||
@ -51,7 +56,7 @@ class Mai2Base:
|
||||
def handle_get_game_event_api_request(self, data: Dict) -> Dict:
|
||||
events = self.data.static.get_enabled_events(self.version)
|
||||
events_lst = []
|
||||
if events is None:
|
||||
if events is None or not events:
|
||||
self.logger.warn("No enabled events, did you run the reader?")
|
||||
return {"type": data["type"], "length": 0, "gameEventList": []}
|
||||
|
||||
@ -87,7 +92,7 @@ class Mai2Base:
|
||||
for i, charge in enumerate(game_charge_list):
|
||||
charge_list.append(
|
||||
{
|
||||
"orderId": i,
|
||||
"orderId": i + 1,
|
||||
"chargeId": charge["ticketId"],
|
||||
"price": charge["price"],
|
||||
"startDate": "2017-12-05 07:00:00.0",
|
||||
@ -110,43 +115,35 @@ class Mai2Base:
|
||||
return {"returnCode": 1, "apiName": "UpsertClientTestmodeApi"}
|
||||
|
||||
def handle_get_user_preview_api_request(self, data: Dict) -> Dict:
|
||||
p = self.data.profile.get_profile_detail(data["userId"], self.version)
|
||||
o = self.data.profile.get_profile_option(data["userId"], self.version)
|
||||
if p is None or o is None:
|
||||
p = self.data.profile.get_profile_detail(data["userId"], self.version, False)
|
||||
w = self.data.profile.get_web_option(data["userId"], self.version)
|
||||
if p is None or w is None:
|
||||
return {} # Register
|
||||
profile = p._asdict()
|
||||
option = o._asdict()
|
||||
web_opt = w._asdict()
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"userName": profile["userName"],
|
||||
"isLogin": False,
|
||||
"lastGameId": profile["lastGameId"],
|
||||
"lastDataVersion": profile["lastDataVersion"],
|
||||
"lastRomVersion": profile["lastRomVersion"],
|
||||
"lastLoginDate": profile["lastLoginDate"],
|
||||
"lastLoginDate": profile["lastPlayDate"],
|
||||
"lastPlayDate": profile["lastPlayDate"],
|
||||
"playerRating": profile["playerRating"],
|
||||
"nameplateId": 0, # Unused
|
||||
"iconId": profile["iconId"],
|
||||
"trophyId": 0, # Unused
|
||||
"partnerId": profile["partnerId"],
|
||||
"nameplateId": profile["nameplateId"],
|
||||
"frameId": profile["frameId"],
|
||||
"dispRate": option[
|
||||
"dispRate"
|
||||
], # 0: all/begin, 1: disprate, 2: dispDan, 3: hide, 4: end
|
||||
"totalAwake": profile["totalAwake"],
|
||||
"isNetMember": profile["isNetMember"],
|
||||
"dailyBonusDate": profile["dailyBonusDate"],
|
||||
"headPhoneVolume": option["headPhoneVolume"],
|
||||
"isInherit": False, # Not sure what this is or does??
|
||||
"banState": profile["banState"]
|
||||
if profile["banState"] is not None
|
||||
else 0, # New with uni+
|
||||
"iconId": profile["iconId"],
|
||||
"trophyId": profile["trophyId"],
|
||||
"dispRate": web_opt["dispRate"], # 0: all, 1: dispRate, 2: dispDan, 3: hide
|
||||
"dispRank": web_opt["dispRank"],
|
||||
"dispHomeRanker": web_opt["dispHomeRanker"],
|
||||
"dispTotalLv": web_opt["dispTotalLv"],
|
||||
"totalLv": profile["totalLv"],
|
||||
}
|
||||
|
||||
def handle_user_login_api_request(self, data: Dict) -> Dict:
|
||||
profile = self.data.profile.get_profile_detail(data["userId"], self.version)
|
||||
consec = self.data.profile.get_consec_login(data["userId"], self.version)
|
||||
|
||||
if profile is not None:
|
||||
lastLoginDate = profile["lastLoginDate"]
|
||||
@ -158,12 +155,31 @@ class Mai2Base:
|
||||
loginCt = 0
|
||||
lastLoginDate = "2017-12-05 07:00:00.0"
|
||||
|
||||
if consec is None or not consec:
|
||||
consec_ct = 1
|
||||
|
||||
else:
|
||||
lastlogindate_ = datetime.strptime(profile["lastLoginDate"], "%Y-%m-%d %H:%M:%S.%f").timestamp()
|
||||
today_midnight = datetime.now().replace(hour=0, minute=0, second=0, microsecond=0).timestamp()
|
||||
yesterday_midnight = today_midnight - 86400
|
||||
|
||||
if lastlogindate_ < today_midnight:
|
||||
consec_ct = consec['logins'] + 1
|
||||
self.data.profile.add_consec_login(data["userId"], self.version)
|
||||
|
||||
elif lastlogindate_ < yesterday_midnight:
|
||||
consec_ct = 1
|
||||
self.data.profile.reset_consec_login(data["userId"], self.version)
|
||||
|
||||
else:
|
||||
consec_ct = consec['logins']
|
||||
|
||||
|
||||
return {
|
||||
"returnCode": 1,
|
||||
"lastLoginDate": lastLoginDate,
|
||||
"loginCount": loginCt,
|
||||
"consecutiveLoginCount": 0, # We don't really have a way to track this...
|
||||
"loginId": loginCt, # Used with the playlog!
|
||||
"consecutiveLoginCount": consec_ct, # Number of consecutive days we've logged in.
|
||||
}
|
||||
|
||||
def handle_upload_user_playlog_api_request(self, data: Dict) -> Dict:
|
||||
@ -195,10 +211,33 @@ class Mai2Base:
|
||||
upsert = data["upsertUserAll"]
|
||||
|
||||
if "userData" in upsert and len(upsert["userData"]) > 0:
|
||||
upsert["userData"][0]["isNetMember"] = 1
|
||||
upsert["userData"][0].pop("accessCode")
|
||||
upsert["userData"][0].pop("userId")
|
||||
|
||||
self.data.profile.put_profile_detail(
|
||||
user_id, self.version, upsert["userData"][0]
|
||||
user_id, self.version, upsert["userData"][0], False
|
||||
)
|
||||
|
||||
if "userWebOption" in upsert and len(upsert["userWebOption"]) > 0:
|
||||
upsert["userWebOption"][0]["isNetMember"] = True
|
||||
self.data.profile.put_web_option(
|
||||
user_id, self.version, upsert["userWebOption"][0]
|
||||
)
|
||||
|
||||
if "userGradeStatusList" in upsert and len(upsert["userGradeStatusList"]) > 0:
|
||||
self.data.profile.put_grade_status(
|
||||
user_id, upsert["userGradeStatusList"][0]
|
||||
)
|
||||
|
||||
if "userBossList" in upsert and len(upsert["userBossList"]) > 0:
|
||||
self.data.profile.put_boss_list(
|
||||
user_id, upsert["userBossList"][0]
|
||||
)
|
||||
|
||||
if "userPlaylogList" in upsert and len(upsert["userPlaylogList"]) > 0:
|
||||
for playlog in upsert["userPlaylogList"]:
|
||||
self.data.score.put_playlog(
|
||||
user_id, playlog, False
|
||||
)
|
||||
|
||||
if "userExtend" in upsert and len(upsert["userExtend"]) > 0:
|
||||
@ -208,11 +247,15 @@ class Mai2Base:
|
||||
|
||||
if "userGhost" in upsert:
|
||||
for ghost in upsert["userGhost"]:
|
||||
self.data.profile.put_profile_extend(user_id, self.version, ghost)
|
||||
self.data.profile.put_profile_ghost(user_id, self.version, ghost)
|
||||
|
||||
if "userRecentRatingList" in upsert:
|
||||
self.data.profile.put_recent_rating(user_id, upsert["userRecentRatingList"])
|
||||
|
||||
if "userOption" in upsert and len(upsert["userOption"]) > 0:
|
||||
upsert["userOption"][0].pop("userId")
|
||||
self.data.profile.put_profile_option(
|
||||
user_id, self.version, upsert["userOption"][0]
|
||||
user_id, self.version, upsert["userOption"][0], False
|
||||
)
|
||||
|
||||
if "userRatingList" in upsert and len(upsert["userRatingList"]) > 0:
|
||||
@ -221,8 +264,7 @@ class Mai2Base:
|
||||
)
|
||||
|
||||
if "userActivityList" in upsert and len(upsert["userActivityList"]) > 0:
|
||||
for k, v in upsert["userActivityList"][0].items():
|
||||
for act in v:
|
||||
for act in upsert["userActivityList"]:
|
||||
self.data.profile.put_profile_activity(user_id, act)
|
||||
|
||||
if "userChargeList" in upsert and len(upsert["userChargeList"]) > 0:
|
||||
@ -243,12 +285,9 @@ class Mai2Base:
|
||||
|
||||
if "userCharacterList" in upsert and len(upsert["userCharacterList"]) > 0:
|
||||
for char in upsert["userCharacterList"]:
|
||||
self.data.item.put_character(
|
||||
self.data.item.put_character_(
|
||||
user_id,
|
||||
char["characterId"],
|
||||
char["level"],
|
||||
char["awakening"],
|
||||
char["useCount"],
|
||||
char
|
||||
)
|
||||
|
||||
if "userItemList" in upsert and len(upsert["userItemList"]) > 0:
|
||||
@ -258,7 +297,7 @@ class Mai2Base:
|
||||
int(item["itemKind"]),
|
||||
item["itemId"],
|
||||
item["stock"],
|
||||
item["isValid"],
|
||||
True
|
||||
)
|
||||
|
||||
if "userLoginBonusList" in upsert and len(upsert["userLoginBonusList"]) > 0:
|
||||
@ -284,7 +323,7 @@ class Mai2Base:
|
||||
|
||||
if "userMusicDetailList" in upsert and len(upsert["userMusicDetailList"]) > 0:
|
||||
for music in upsert["userMusicDetailList"]:
|
||||
self.data.score.put_best_score(user_id, music)
|
||||
self.data.score.put_best_score(user_id, music, False)
|
||||
|
||||
if "userCourseList" in upsert and len(upsert["userCourseList"]) > 0:
|
||||
for course in upsert["userCourseList"]:
|
||||
@ -312,7 +351,7 @@ class Mai2Base:
|
||||
return {"returnCode": 1}
|
||||
|
||||
def handle_get_user_data_api_request(self, data: Dict) -> Dict:
|
||||
profile = self.data.profile.get_profile_detail(data["userId"], self.version)
|
||||
profile = self.data.profile.get_profile_detail(data["userId"], self.version, False)
|
||||
if profile is None:
|
||||
return
|
||||
|
||||
@ -336,7 +375,7 @@ class Mai2Base:
|
||||
return {"userId": data["userId"], "userExtend": extend_dict}
|
||||
|
||||
def handle_get_user_option_api_request(self, data: Dict) -> Dict:
|
||||
options = self.data.profile.get_profile_option(data["userId"], self.version)
|
||||
options = self.data.profile.get_profile_option(data["userId"], self.version, False)
|
||||
if options is None:
|
||||
return
|
||||
|
||||
@ -406,12 +445,31 @@ class Mai2Base:
|
||||
"userChargeList": user_charge_list,
|
||||
}
|
||||
|
||||
def handle_get_user_present_api_request(self, data: Dict) -> Dict:
|
||||
return { "userId": data.get("userId", 0), "length": 0, "userPresentList": []}
|
||||
|
||||
def handle_get_transfer_friend_api_request(self, data: Dict) -> Dict:
|
||||
return {}
|
||||
|
||||
def handle_get_user_present_event_api_request(self, data: Dict) -> Dict:
|
||||
return { "userId": data.get("userId", 0), "length": 0, "userPresentEventList": []}
|
||||
|
||||
def handle_get_user_boss_api_request(self, data: Dict) -> Dict:
|
||||
b = self.data.profile.get_boss_list(data["userId"])
|
||||
if b is None:
|
||||
return { "userId": data.get("userId", 0), "userBossData": {}}
|
||||
boss_lst = b._asdict()
|
||||
boss_lst.pop("id")
|
||||
boss_lst.pop("user")
|
||||
|
||||
return { "userId": data.get("userId", 0), "userBossData": boss_lst}
|
||||
|
||||
def handle_get_user_item_api_request(self, data: Dict) -> Dict:
|
||||
kind = int(data["nextIndex"] / 10000000000)
|
||||
next_idx = int(data["nextIndex"] % 10000000000)
|
||||
user_item_list = self.data.item.get_items(data["userId"], kind)
|
||||
|
||||
items: list[Dict[str, Any]] = []
|
||||
items: List[Dict[str, Any]] = []
|
||||
for i in range(next_idx, len(user_item_list)):
|
||||
tmp = user_item_list[i]._asdict()
|
||||
tmp.pop("user")
|
||||
@ -442,6 +500,8 @@ class Mai2Base:
|
||||
tmp = chara._asdict()
|
||||
tmp.pop("id")
|
||||
tmp.pop("user")
|
||||
tmp.pop("awakening")
|
||||
tmp.pop("useCount")
|
||||
chara_list.append(tmp)
|
||||
|
||||
return {"userId": data["userId"], "userCharacterList": chara_list}
|
||||
@ -475,6 +535,16 @@ class Mai2Base:
|
||||
|
||||
return {"userId": data["userId"], "userGhost": ghost_dict}
|
||||
|
||||
def handle_get_user_recent_rating_api_request(self, data: Dict) -> Dict:
|
||||
rating = self.data.profile.get_recent_rating(data["userId"])
|
||||
if rating is None:
|
||||
return
|
||||
|
||||
r = rating._asdict()
|
||||
lst = r.get("userRecentRatingList", [])
|
||||
|
||||
return {"userId": data["userId"], "length": len(lst), "userRecentRatingList": lst}
|
||||
|
||||
def handle_get_user_rating_api_request(self, data: Dict) -> Dict:
|
||||
rating = self.data.profile.get_profile_rating(data["userId"], self.version)
|
||||
if rating is None:
|
||||
@ -638,24 +708,157 @@ class Mai2Base:
|
||||
def handle_get_user_region_api_request(self, data: Dict) -> Dict:
|
||||
return {"userId": data["userId"], "length": 0, "userRegionList": []}
|
||||
|
||||
def handle_get_user_music_api_request(self, data: Dict) -> Dict:
|
||||
songs = self.data.score.get_best_scores(data["userId"])
|
||||
music_detail_list = []
|
||||
next_index = 0
|
||||
def handle_get_user_web_option_api_request(self, data: Dict) -> Dict:
|
||||
w = self.data.profile.get_web_option(data["userId"], self.version)
|
||||
if w is None:
|
||||
return {"userId": data["userId"], "userWebOption": {}}
|
||||
|
||||
if songs is not None:
|
||||
for song in songs:
|
||||
tmp = song._asdict()
|
||||
web_opt = w._asdict()
|
||||
web_opt.pop("id")
|
||||
web_opt.pop("user")
|
||||
web_opt.pop("version")
|
||||
|
||||
return {"userId": data["userId"], "userWebOption": web_opt}
|
||||
|
||||
def handle_get_user_survival_api_request(self, data: Dict) -> Dict:
|
||||
return {"userId": data["userId"], "length": 0, "userSurvivalList": []}
|
||||
|
||||
def handle_get_user_grade_api_request(self, data: Dict) -> Dict:
|
||||
g = self.data.profile.get_grade_status(data["userId"])
|
||||
if g is None:
|
||||
return {"userId": data["userId"], "userGradeStatus": {}, "length": 0, "userGradeList": []}
|
||||
grade_stat = g._asdict()
|
||||
grade_stat.pop("id")
|
||||
grade_stat.pop("user")
|
||||
|
||||
return {"userId": data["userId"], "userGradeStatus": grade_stat, "length": 0, "userGradeList": []}
|
||||
|
||||
def handle_get_user_music_api_request(self, data: Dict) -> Dict:
|
||||
user_id = data.get("userId", 0)
|
||||
next_index = data.get("nextIndex", 0)
|
||||
max_ct = data.get("maxCount", 50)
|
||||
upper_lim = next_index + max_ct
|
||||
music_detail_list = []
|
||||
|
||||
if user_id <= 0:
|
||||
self.logger.warn("handle_get_user_music_api_request: Could not find userid in data, or userId is 0")
|
||||
return {}
|
||||
|
||||
songs = self.data.score.get_best_scores(user_id, is_dx=False)
|
||||
if songs is None:
|
||||
self.logger.debug("handle_get_user_music_api_request: get_best_scores returned None!")
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"nextIndex": 0,
|
||||
"userMusicList": [],
|
||||
}
|
||||
|
||||
num_user_songs = len(songs)
|
||||
|
||||
for x in range(next_index, upper_lim):
|
||||
if num_user_songs <= x:
|
||||
break
|
||||
|
||||
tmp = songs[x]._asdict()
|
||||
tmp.pop("id")
|
||||
tmp.pop("user")
|
||||
music_detail_list.append(tmp)
|
||||
|
||||
if len(music_detail_list) == data["maxCount"]:
|
||||
next_index = data["maxCount"] + data["nextIndex"]
|
||||
break
|
||||
|
||||
next_index = 0 if len(music_detail_list) < max_ct or num_user_songs == upper_lim else upper_lim
|
||||
self.logger.info(f"Send songs {next_index}-{upper_lim} ({len(music_detail_list)}) out of {num_user_songs} for user {user_id} (next idx {next_index})")
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"nextIndex": next_index,
|
||||
"userMusicList": [{"userMusicDetailList": music_detail_list}],
|
||||
}
|
||||
|
||||
def handle_upload_user_portrait_api_request(self, data: Dict) -> Dict:
|
||||
self.logger.debug(data)
|
||||
|
||||
def handle_upload_user_photo_api_request(self, data: Dict) -> Dict:
|
||||
if not self.game_config.uploads.photos or not self.game_config.uploads.photos_dir:
|
||||
return {'returnCode': 0, 'apiName': 'UploadUserPhotoApi'}
|
||||
|
||||
photo = data.get("userPhoto", {})
|
||||
|
||||
if photo is None or not photo:
|
||||
return {'returnCode': 0, 'apiName': 'UploadUserPhotoApi'}
|
||||
|
||||
order_id = int(photo.get("orderId", -1))
|
||||
user_id = int(photo.get("userId", -1))
|
||||
div_num = int(photo.get("divNumber", -1))
|
||||
div_len = int(photo.get("divLength", -1))
|
||||
div_data = photo.get("divData", "")
|
||||
playlog_id = int(photo.get("playlogId", -1))
|
||||
track_num = int(photo.get("trackNo", -1))
|
||||
upload_date = photo.get("uploadDate", "")
|
||||
|
||||
if order_id < 0 or user_id <= 0 or div_num < 0 or div_len <= 0 or not div_data or playlog_id < 0 or track_num <= 0 or not upload_date:
|
||||
self.logger.warn(f"Malformed photo upload request")
|
||||
return {'returnCode': 0, 'apiName': 'UploadUserPhotoApi'}
|
||||
|
||||
if order_id == 0 and div_num > 0:
|
||||
self.logger.warn(f"Failed to set orderId properly (still 0 after first chunk)")
|
||||
return {'returnCode': 0, 'apiName': 'UploadUserPhotoApi'}
|
||||
|
||||
if div_num == 0 and order_id > 0:
|
||||
self.logger.warn(f"First chuck re-send, Ignore")
|
||||
return {'returnCode': 0, 'apiName': 'UploadUserPhotoApi'}
|
||||
|
||||
if div_num >= div_len:
|
||||
self.logger.warn(f"Sent extra chunks ({div_num} >= {div_len})")
|
||||
return {'returnCode': 0, 'apiName': 'UploadUserPhotoApi'}
|
||||
|
||||
if div_len >= 100:
|
||||
self.logger.warn(f"Photo too large ({div_len} * 10240 = {div_len * 10240} bytes)")
|
||||
return {'returnCode': 0, 'apiName': 'UploadUserPhotoApi'}
|
||||
|
||||
ret_code = order_id + 1
|
||||
photo_chunk = b64decode(div_data)
|
||||
|
||||
if len(photo_chunk) > 10240 or (len(photo_chunk) < 10240 and div_num + 1 != div_len):
|
||||
self.logger.warn(f"Incorrect data size after decoding (Expected 10240, got {len(photo_chunk)})")
|
||||
return {'returnCode': 0, 'apiName': 'UploadUserPhotoApi'}
|
||||
|
||||
out_name = f"{self.game_config.uploads.photos_dir}/{user_id}_{playlog_id}_{track_num}"
|
||||
|
||||
if not path.exists(f"{out_name}.bin") and div_num != 0:
|
||||
self.logger.warn(f"Out of order photo upload (div_num {div_num})")
|
||||
return {'returnCode': 0, 'apiName': 'UploadUserPhotoApi'}
|
||||
|
||||
if path.exists(f"{out_name}.bin") and div_num == 0:
|
||||
self.logger.warn(f"Duplicate file upload")
|
||||
return {'returnCode': 0, 'apiName': 'UploadUserPhotoApi'}
|
||||
|
||||
elif path.exists(f"{out_name}.bin"):
|
||||
fstats = stat(f"{out_name}.bin")
|
||||
if fstats.st_size != 10240 * div_num:
|
||||
self.logger.warn(f"Out of order photo upload (trying to upload div {div_num}, expected div {fstats.st_size / 10240} for file sized {fstats.st_size} bytes)")
|
||||
return {'returnCode': 0, 'apiName': 'UploadUserPhotoApi'}
|
||||
|
||||
try:
|
||||
with open(f"{out_name}.bin", "ab") as f:
|
||||
f.write(photo_chunk)
|
||||
|
||||
except Exception:
|
||||
self.logger.error(f"Failed writing to {out_name}.bin")
|
||||
return {'returnCode': 0, 'apiName': 'UploadUserPhotoApi'}
|
||||
|
||||
if div_num + 1 == div_len and path.exists(f"{out_name}.bin"):
|
||||
try:
|
||||
p = ImageFile.Parser()
|
||||
with open(f"{out_name}.bin", "rb") as f:
|
||||
p.feed(f.read())
|
||||
|
||||
im = p.close()
|
||||
im.save(f"{out_name}.jpeg")
|
||||
except Exception:
|
||||
self.logger.error(f"File {out_name}.bin failed image validation")
|
||||
|
||||
try:
|
||||
remove(f"{out_name}.bin")
|
||||
|
||||
except Exception:
|
||||
self.logger.error(f"Failed to delete {out_name}.bin, please remove it manually")
|
||||
|
||||
return {'returnCode': ret_code, 'apiName': 'UploadUserPhotoApi'}
|
||||
|
@ -19,7 +19,59 @@ class Mai2ServerConfig:
|
||||
)
|
||||
)
|
||||
|
||||
class Mai2DeliverConfig:
|
||||
def __init__(self, parent: "Mai2Config") -> None:
|
||||
self.__config = parent
|
||||
|
||||
@property
|
||||
def enable(self) -> bool:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "mai2", "deliver", "enable", default=False
|
||||
)
|
||||
|
||||
@property
|
||||
def udbdl_enable(self) -> bool:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "mai2", "deliver", "udbdl_enable", default=False
|
||||
)
|
||||
|
||||
@property
|
||||
def content_folder(self) -> int:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "mai2", "server", "content_folder", default=""
|
||||
)
|
||||
|
||||
class Mai2UploadsConfig:
|
||||
def __init__(self, parent: "Mai2Config") -> None:
|
||||
self.__config = parent
|
||||
|
||||
@property
|
||||
def photos(self) -> bool:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "mai2", "uploads", "photos", default=False
|
||||
)
|
||||
|
||||
@property
|
||||
def photos_dir(self) -> str:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "mai2", "uploads", "photos_dir", default=""
|
||||
)
|
||||
|
||||
@property
|
||||
def movies(self) -> bool:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "mai2", "uploads", "movies", default=False
|
||||
)
|
||||
|
||||
@property
|
||||
def movies_dir(self) -> str:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "mai2", "uploads", "movies_dir", default=""
|
||||
)
|
||||
|
||||
|
||||
class Mai2Config(dict):
|
||||
def __init__(self) -> None:
|
||||
self.server = Mai2ServerConfig(self)
|
||||
self.deliver = Mai2DeliverConfig(self)
|
||||
self.uploads = Mai2UploadsConfig(self)
|
||||
|
@ -20,26 +20,60 @@ class Mai2Constants:
|
||||
|
||||
DATE_TIME_FORMAT = "%Y-%m-%d %H:%M:%S"
|
||||
|
||||
GAME_CODE = "SDEZ"
|
||||
GAME_CODE = "SBXL"
|
||||
GAME_CODE_GREEN = "SBZF"
|
||||
GAME_CODE_ORANGE = "SDBM"
|
||||
GAME_CODE_PINK = "SDCQ"
|
||||
GAME_CODE_MURASAKI = "SDDK"
|
||||
GAME_CODE_MILK = "SDDZ"
|
||||
GAME_CODE_FINALE = "SDEY"
|
||||
GAME_CODE_DX = "SDEZ"
|
||||
|
||||
CONFIG_NAME = "mai2.yaml"
|
||||
|
||||
VER_MAIMAI_DX = 0
|
||||
VER_MAIMAI_DX_PLUS = 1
|
||||
VER_MAIMAI_DX_SPLASH = 2
|
||||
VER_MAIMAI_DX_SPLASH_PLUS = 3
|
||||
VER_MAIMAI_DX_UNIVERSE = 4
|
||||
VER_MAIMAI_DX_UNIVERSE_PLUS = 5
|
||||
VER_MAIMAI_DX_FESTIVAL = 6
|
||||
VER_MAIMAI = 0
|
||||
VER_MAIMAI_PLUS = 1
|
||||
VER_MAIMAI_GREEN = 2
|
||||
VER_MAIMAI_GREEN_PLUS = 3
|
||||
VER_MAIMAI_ORANGE = 4
|
||||
VER_MAIMAI_ORANGE_PLUS = 5
|
||||
VER_MAIMAI_PINK = 6
|
||||
VER_MAIMAI_PINK_PLUS = 7
|
||||
VER_MAIMAI_MURASAKI = 8
|
||||
VER_MAIMAI_MURASAKI_PLUS = 9
|
||||
VER_MAIMAI_MILK = 10
|
||||
VER_MAIMAI_MILK_PLUS = 11
|
||||
VER_MAIMAI_FINALE = 12
|
||||
|
||||
VER_MAIMAI_DX = 13
|
||||
VER_MAIMAI_DX_PLUS = 14
|
||||
VER_MAIMAI_DX_SPLASH = 15
|
||||
VER_MAIMAI_DX_SPLASH_PLUS = 16
|
||||
VER_MAIMAI_DX_UNIVERSE = 17
|
||||
VER_MAIMAI_DX_UNIVERSE_PLUS = 18
|
||||
VER_MAIMAI_DX_FESTIVAL = 19
|
||||
|
||||
VERSION_STRING = (
|
||||
"maimai",
|
||||
"maimai PLUS",
|
||||
"maimai GreeN",
|
||||
"maimai GreeN PLUS",
|
||||
"maimai ORANGE",
|
||||
"maimai ORANGE PLUS",
|
||||
"maimai PiNK",
|
||||
"maimai PiNK PLUS",
|
||||
"maimai MURASAKi",
|
||||
"maimai MURASAKi PLUS",
|
||||
"maimai MiLK",
|
||||
"maimai MiLK PLUS",
|
||||
"maimai FiNALE",
|
||||
"maimai DX",
|
||||
"maimai DX PLUS",
|
||||
"maimai DX Splash",
|
||||
"maimai DX Splash PLUS",
|
||||
"maimai DX Universe",
|
||||
"maimai DX Universe PLUS",
|
||||
"maimai DX Festival",
|
||||
"maimai DX UNiVERSE",
|
||||
"maimai DX UNiVERSE PLUS",
|
||||
"maimai DX FESTiVAL",
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
574
titles/mai2/dx.py
Normal file
574
titles/mai2/dx.py
Normal file
@ -0,0 +1,574 @@
|
||||
from typing import Any, List, Dict
|
||||
from datetime import datetime, timedelta
|
||||
import pytz
|
||||
import json
|
||||
from random import randint
|
||||
|
||||
from core.config import CoreConfig
|
||||
from titles.mai2.base import Mai2Base
|
||||
from titles.mai2.config import Mai2Config
|
||||
from titles.mai2.const import Mai2Constants
|
||||
|
||||
|
||||
class Mai2DX(Mai2Base):
|
||||
def __init__(self, cfg: CoreConfig, game_cfg: Mai2Config) -> None:
|
||||
super().__init__(cfg, game_cfg)
|
||||
self.version = Mai2Constants.VER_MAIMAI_DX
|
||||
|
||||
if self.core_config.server.is_develop and self.core_config.title.port > 0:
|
||||
self.old_server = f"http://{self.core_config.title.hostname}:{self.core_config.title.port}/SDEZ/100/"
|
||||
|
||||
else:
|
||||
self.old_server = f"http://{self.core_config.title.hostname}/SDEZ/100/"
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict):
|
||||
return {
|
||||
"gameSetting": {
|
||||
"isMaintenance": False,
|
||||
"requestInterval": 1800,
|
||||
"rebootStartTime": "2020-01-01 07:00:00.0",
|
||||
"rebootEndTime": "2020-01-01 07:59:59.0",
|
||||
"movieUploadLimit": 100,
|
||||
"movieStatus": 1,
|
||||
"movieServerUri": self.old_server + "movie/",
|
||||
"deliverServerUri": self.old_server + "deliver/" if self.can_deliver and self.game_config.deliver.enable else "",
|
||||
"oldServerUri": self.old_server + "old",
|
||||
"usbDlServerUri": self.old_server + "usbdl/" if self.can_deliver and self.game_config.deliver.udbdl_enable else "",
|
||||
"rebootInterval": 0,
|
||||
},
|
||||
"isAouAccession": False,
|
||||
}
|
||||
|
||||
def handle_get_user_preview_api_request(self, data: Dict) -> Dict:
|
||||
p = self.data.profile.get_profile_detail(data["userId"], self.version)
|
||||
o = self.data.profile.get_profile_option(data["userId"], self.version)
|
||||
if p is None or o is None:
|
||||
return {} # Register
|
||||
profile = p._asdict()
|
||||
option = o._asdict()
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"userName": profile["userName"],
|
||||
"isLogin": False,
|
||||
"lastGameId": profile["lastGameId"],
|
||||
"lastDataVersion": profile["lastDataVersion"],
|
||||
"lastRomVersion": profile["lastRomVersion"],
|
||||
"lastLoginDate": profile["lastLoginDate"],
|
||||
"lastPlayDate": profile["lastPlayDate"],
|
||||
"playerRating": profile["playerRating"],
|
||||
"nameplateId": 0, # Unused
|
||||
"iconId": profile["iconId"],
|
||||
"trophyId": 0, # Unused
|
||||
"partnerId": profile["partnerId"],
|
||||
"frameId": profile["frameId"],
|
||||
"dispRate": option[
|
||||
"dispRate"
|
||||
], # 0: all/begin, 1: disprate, 2: dispDan, 3: hide, 4: end
|
||||
"totalAwake": profile["totalAwake"],
|
||||
"isNetMember": profile["isNetMember"],
|
||||
"dailyBonusDate": profile["dailyBonusDate"],
|
||||
"headPhoneVolume": option["headPhoneVolume"],
|
||||
"isInherit": False, # Not sure what this is or does??
|
||||
"banState": profile["banState"]
|
||||
if profile["banState"] is not None
|
||||
else 0, # New with uni+
|
||||
}
|
||||
|
||||
def handle_upload_user_playlog_api_request(self, data: Dict) -> Dict:
|
||||
user_id = data["userId"]
|
||||
playlog = data["userPlaylog"]
|
||||
|
||||
self.data.score.put_playlog(user_id, playlog)
|
||||
|
||||
return {"returnCode": 1, "apiName": "UploadUserPlaylogApi"}
|
||||
|
||||
def handle_upsert_user_chargelog_api_request(self, data: Dict) -> Dict:
|
||||
user_id = data["userId"]
|
||||
charge = data["userCharge"]
|
||||
|
||||
# remove the ".0" from the date string, festival only?
|
||||
charge["purchaseDate"] = charge["purchaseDate"].replace(".0", "")
|
||||
self.data.item.put_charge(
|
||||
user_id,
|
||||
charge["chargeId"],
|
||||
charge["stock"],
|
||||
datetime.strptime(charge["purchaseDate"], Mai2Constants.DATE_TIME_FORMAT),
|
||||
datetime.strptime(charge["validDate"], Mai2Constants.DATE_TIME_FORMAT),
|
||||
)
|
||||
|
||||
return {"returnCode": 1, "apiName": "UpsertUserChargelogApi"}
|
||||
|
||||
def handle_upsert_user_all_api_request(self, data: Dict) -> Dict:
|
||||
user_id = data["userId"]
|
||||
upsert = data["upsertUserAll"]
|
||||
|
||||
if "userData" in upsert and len(upsert["userData"]) > 0:
|
||||
upsert["userData"][0]["isNetMember"] = 1
|
||||
upsert["userData"][0].pop("accessCode")
|
||||
self.data.profile.put_profile_detail(
|
||||
user_id, self.version, upsert["userData"][0]
|
||||
)
|
||||
|
||||
if "userExtend" in upsert and len(upsert["userExtend"]) > 0:
|
||||
self.data.profile.put_profile_extend(
|
||||
user_id, self.version, upsert["userExtend"][0]
|
||||
)
|
||||
|
||||
if "userGhost" in upsert:
|
||||
for ghost in upsert["userGhost"]:
|
||||
self.data.profile.put_profile_ghost(user_id, self.version, ghost)
|
||||
|
||||
if "userOption" in upsert and len(upsert["userOption"]) > 0:
|
||||
self.data.profile.put_profile_option(
|
||||
user_id, self.version, upsert["userOption"][0]
|
||||
)
|
||||
|
||||
if "userRatingList" in upsert and len(upsert["userRatingList"]) > 0:
|
||||
self.data.profile.put_profile_rating(
|
||||
user_id, self.version, upsert["userRatingList"][0]
|
||||
)
|
||||
|
||||
if "userActivityList" in upsert and len(upsert["userActivityList"]) > 0:
|
||||
for k, v in upsert["userActivityList"][0].items():
|
||||
for act in v:
|
||||
self.data.profile.put_profile_activity(user_id, act)
|
||||
|
||||
if "userChargeList" in upsert and len(upsert["userChargeList"]) > 0:
|
||||
for charge in upsert["userChargeList"]:
|
||||
# remove the ".0" from the date string, festival only?
|
||||
charge["purchaseDate"] = charge["purchaseDate"].replace(".0", "")
|
||||
self.data.item.put_charge(
|
||||
user_id,
|
||||
charge["chargeId"],
|
||||
charge["stock"],
|
||||
datetime.strptime(
|
||||
charge["purchaseDate"], Mai2Constants.DATE_TIME_FORMAT
|
||||
),
|
||||
datetime.strptime(
|
||||
charge["validDate"], Mai2Constants.DATE_TIME_FORMAT
|
||||
),
|
||||
)
|
||||
|
||||
if "userCharacterList" in upsert and len(upsert["userCharacterList"]) > 0:
|
||||
for char in upsert["userCharacterList"]:
|
||||
self.data.item.put_character(
|
||||
user_id,
|
||||
char["characterId"],
|
||||
char["level"],
|
||||
char["awakening"],
|
||||
char["useCount"],
|
||||
)
|
||||
|
||||
if "userItemList" in upsert and len(upsert["userItemList"]) > 0:
|
||||
for item in upsert["userItemList"]:
|
||||
self.data.item.put_item(
|
||||
user_id,
|
||||
int(item["itemKind"]),
|
||||
item["itemId"],
|
||||
item["stock"],
|
||||
item["isValid"],
|
||||
)
|
||||
|
||||
if "userLoginBonusList" in upsert and len(upsert["userLoginBonusList"]) > 0:
|
||||
for login_bonus in upsert["userLoginBonusList"]:
|
||||
self.data.item.put_login_bonus(
|
||||
user_id,
|
||||
login_bonus["bonusId"],
|
||||
login_bonus["point"],
|
||||
login_bonus["isCurrent"],
|
||||
login_bonus["isComplete"],
|
||||
)
|
||||
|
||||
if "userMapList" in upsert and len(upsert["userMapList"]) > 0:
|
||||
for map in upsert["userMapList"]:
|
||||
self.data.item.put_map(
|
||||
user_id,
|
||||
map["mapId"],
|
||||
map["distance"],
|
||||
map["isLock"],
|
||||
map["isClear"],
|
||||
map["isComplete"],
|
||||
)
|
||||
|
||||
if "userMusicDetailList" in upsert and len(upsert["userMusicDetailList"]) > 0:
|
||||
for music in upsert["userMusicDetailList"]:
|
||||
self.data.score.put_best_score(user_id, music)
|
||||
|
||||
if "userCourseList" in upsert and len(upsert["userCourseList"]) > 0:
|
||||
for course in upsert["userCourseList"]:
|
||||
self.data.score.put_course(user_id, course)
|
||||
|
||||
if "userFavoriteList" in upsert and len(upsert["userFavoriteList"]) > 0:
|
||||
for fav in upsert["userFavoriteList"]:
|
||||
self.data.item.put_favorite(user_id, fav["kind"], fav["itemIdList"])
|
||||
|
||||
if (
|
||||
"userFriendSeasonRankingList" in upsert
|
||||
and len(upsert["userFriendSeasonRankingList"]) > 0
|
||||
):
|
||||
for fsr in upsert["userFriendSeasonRankingList"]:
|
||||
fsr["recordDate"] = (
|
||||
datetime.strptime(
|
||||
fsr["recordDate"], f"{Mai2Constants.DATE_TIME_FORMAT}.0"
|
||||
),
|
||||
)
|
||||
self.data.item.put_friend_season_ranking(user_id, fsr)
|
||||
|
||||
return {"returnCode": 1, "apiName": "UpsertUserAllApi"}
|
||||
|
||||
def handle_get_user_data_api_request(self, data: Dict) -> Dict:
|
||||
profile = self.data.profile.get_profile_detail(data["userId"], self.version)
|
||||
if profile is None:
|
||||
return
|
||||
|
||||
profile_dict = profile._asdict()
|
||||
profile_dict.pop("id")
|
||||
profile_dict.pop("user")
|
||||
profile_dict.pop("version")
|
||||
|
||||
return {"userId": data["userId"], "userData": profile_dict}
|
||||
|
||||
def handle_get_user_extend_api_request(self, data: Dict) -> Dict:
|
||||
extend = self.data.profile.get_profile_extend(data["userId"], self.version)
|
||||
if extend is None:
|
||||
return
|
||||
|
||||
extend_dict = extend._asdict()
|
||||
extend_dict.pop("id")
|
||||
extend_dict.pop("user")
|
||||
extend_dict.pop("version")
|
||||
|
||||
return {"userId": data["userId"], "userExtend": extend_dict}
|
||||
|
||||
def handle_get_user_option_api_request(self, data: Dict) -> Dict:
|
||||
options = self.data.profile.get_profile_option(data["userId"], self.version)
|
||||
if options is None:
|
||||
return
|
||||
|
||||
options_dict = options._asdict()
|
||||
options_dict.pop("id")
|
||||
options_dict.pop("user")
|
||||
options_dict.pop("version")
|
||||
|
||||
return {"userId": data["userId"], "userOption": options_dict}
|
||||
|
||||
def handle_get_user_card_api_request(self, data: Dict) -> Dict:
|
||||
user_cards = self.data.item.get_cards(data["userId"])
|
||||
if user_cards is None:
|
||||
return {"userId": data["userId"], "nextIndex": 0, "userCardList": []}
|
||||
|
||||
max_ct = data["maxCount"]
|
||||
next_idx = data["nextIndex"]
|
||||
start_idx = next_idx
|
||||
end_idx = max_ct + start_idx
|
||||
|
||||
if len(user_cards[start_idx:]) > max_ct:
|
||||
next_idx += max_ct
|
||||
else:
|
||||
next_idx = 0
|
||||
|
||||
card_list = []
|
||||
for card in user_cards:
|
||||
tmp = card._asdict()
|
||||
tmp.pop("id")
|
||||
tmp.pop("user")
|
||||
tmp["startDate"] = datetime.strftime(
|
||||
tmp["startDate"], Mai2Constants.DATE_TIME_FORMAT
|
||||
)
|
||||
tmp["endDate"] = datetime.strftime(
|
||||
tmp["endDate"], Mai2Constants.DATE_TIME_FORMAT
|
||||
)
|
||||
card_list.append(tmp)
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"nextIndex": next_idx,
|
||||
"userCardList": card_list[start_idx:end_idx],
|
||||
}
|
||||
|
||||
def handle_get_user_charge_api_request(self, data: Dict) -> Dict:
|
||||
user_charges = self.data.item.get_charges(data["userId"])
|
||||
if user_charges is None:
|
||||
return {"userId": data["userId"], "length": 0, "userChargeList": []}
|
||||
|
||||
user_charge_list = []
|
||||
for charge in user_charges:
|
||||
tmp = charge._asdict()
|
||||
tmp.pop("id")
|
||||
tmp.pop("user")
|
||||
tmp["purchaseDate"] = datetime.strftime(
|
||||
tmp["purchaseDate"], Mai2Constants.DATE_TIME_FORMAT
|
||||
)
|
||||
tmp["validDate"] = datetime.strftime(
|
||||
tmp["validDate"], Mai2Constants.DATE_TIME_FORMAT
|
||||
)
|
||||
|
||||
user_charge_list.append(tmp)
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": len(user_charge_list),
|
||||
"userChargeList": user_charge_list,
|
||||
}
|
||||
|
||||
def handle_get_user_item_api_request(self, data: Dict) -> Dict:
|
||||
kind = int(data["nextIndex"] / 10000000000)
|
||||
next_idx = int(data["nextIndex"] % 10000000000)
|
||||
user_item_list = self.data.item.get_items(data["userId"], kind)
|
||||
|
||||
items: List[Dict[str, Any]] = []
|
||||
for i in range(next_idx, len(user_item_list)):
|
||||
tmp = user_item_list[i]._asdict()
|
||||
tmp.pop("user")
|
||||
tmp.pop("id")
|
||||
items.append(tmp)
|
||||
if len(items) >= int(data["maxCount"]):
|
||||
break
|
||||
|
||||
xout = kind * 10000000000 + next_idx + len(items)
|
||||
|
||||
if len(items) < int(data["maxCount"]):
|
||||
next_idx = 0
|
||||
else:
|
||||
next_idx = xout
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"nextIndex": next_idx,
|
||||
"itemKind": kind,
|
||||
"userItemList": items,
|
||||
}
|
||||
|
||||
def handle_get_user_character_api_request(self, data: Dict) -> Dict:
|
||||
characters = self.data.item.get_characters(data["userId"])
|
||||
|
||||
chara_list = []
|
||||
for chara in characters:
|
||||
tmp = chara._asdict()
|
||||
tmp.pop("id")
|
||||
tmp.pop("user")
|
||||
chara_list.append(tmp)
|
||||
|
||||
return {"userId": data["userId"], "userCharacterList": chara_list}
|
||||
|
||||
def handle_get_user_favorite_api_request(self, data: Dict) -> Dict:
|
||||
favorites = self.data.item.get_favorites(data["userId"], data["itemKind"])
|
||||
if favorites is None:
|
||||
return
|
||||
|
||||
userFavs = []
|
||||
for fav in favorites:
|
||||
userFavs.append(
|
||||
{
|
||||
"userId": data["userId"],
|
||||
"itemKind": fav["itemKind"],
|
||||
"itemIdList": fav["itemIdList"],
|
||||
}
|
||||
)
|
||||
|
||||
return {"userId": data["userId"], "userFavoriteData": userFavs}
|
||||
|
||||
def handle_get_user_ghost_api_request(self, data: Dict) -> Dict:
|
||||
ghost = self.data.profile.get_profile_ghost(data["userId"], self.version)
|
||||
if ghost is None:
|
||||
return
|
||||
|
||||
ghost_dict = ghost._asdict()
|
||||
ghost_dict.pop("user")
|
||||
ghost_dict.pop("id")
|
||||
ghost_dict.pop("version_int")
|
||||
|
||||
return {"userId": data["userId"], "userGhost": ghost_dict}
|
||||
|
||||
def handle_get_user_rating_api_request(self, data: Dict) -> Dict:
|
||||
rating = self.data.profile.get_profile_rating(data["userId"], self.version)
|
||||
if rating is None:
|
||||
return
|
||||
|
||||
rating_dict = rating._asdict()
|
||||
rating_dict.pop("user")
|
||||
rating_dict.pop("id")
|
||||
rating_dict.pop("version")
|
||||
|
||||
return {"userId": data["userId"], "userRating": rating_dict}
|
||||
|
||||
def handle_get_user_activity_api_request(self, data: Dict) -> Dict:
|
||||
"""
|
||||
kind 1 is playlist, kind 2 is music list
|
||||
"""
|
||||
playlist = self.data.profile.get_profile_activity(data["userId"], 1)
|
||||
musiclist = self.data.profile.get_profile_activity(data["userId"], 2)
|
||||
if playlist is None or musiclist is None:
|
||||
return
|
||||
|
||||
plst = []
|
||||
mlst = []
|
||||
|
||||
for play in playlist:
|
||||
tmp = play._asdict()
|
||||
tmp["id"] = tmp["activityId"]
|
||||
tmp.pop("activityId")
|
||||
tmp.pop("user")
|
||||
plst.append(tmp)
|
||||
|
||||
for music in musiclist:
|
||||
tmp = music._asdict()
|
||||
tmp["id"] = tmp["activityId"]
|
||||
tmp.pop("activityId")
|
||||
tmp.pop("user")
|
||||
mlst.append(tmp)
|
||||
|
||||
return {"userActivity": {"playList": plst, "musicList": mlst}}
|
||||
|
||||
def handle_get_user_course_api_request(self, data: Dict) -> Dict:
|
||||
user_courses = self.data.score.get_courses(data["userId"])
|
||||
if user_courses is None:
|
||||
return {"userId": data["userId"], "nextIndex": 0, "userCourseList": []}
|
||||
|
||||
course_list = []
|
||||
for course in user_courses:
|
||||
tmp = course._asdict()
|
||||
tmp.pop("user")
|
||||
tmp.pop("id")
|
||||
course_list.append(tmp)
|
||||
|
||||
return {"userId": data["userId"], "nextIndex": 0, "userCourseList": course_list}
|
||||
|
||||
def handle_get_user_portrait_api_request(self, data: Dict) -> Dict:
|
||||
# No support for custom pfps
|
||||
return {"length": 0, "userPortraitList": []}
|
||||
|
||||
def handle_get_user_friend_season_ranking_api_request(self, data: Dict) -> Dict:
|
||||
friend_season_ranking = self.data.item.get_friend_season_ranking(data["userId"])
|
||||
if friend_season_ranking is None:
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"nextIndex": 0,
|
||||
"userFriendSeasonRankingList": [],
|
||||
}
|
||||
|
||||
friend_season_ranking_list = []
|
||||
next_idx = int(data["nextIndex"])
|
||||
max_ct = int(data["maxCount"])
|
||||
|
||||
for x in range(next_idx, len(friend_season_ranking)):
|
||||
tmp = friend_season_ranking[x]._asdict()
|
||||
tmp.pop("user")
|
||||
tmp.pop("id")
|
||||
tmp["recordDate"] = datetime.strftime(
|
||||
tmp["recordDate"], f"{Mai2Constants.DATE_TIME_FORMAT}.0"
|
||||
)
|
||||
friend_season_ranking_list.append(tmp)
|
||||
|
||||
if len(friend_season_ranking_list) >= max_ct:
|
||||
break
|
||||
|
||||
if len(friend_season_ranking) >= next_idx + max_ct:
|
||||
next_idx += max_ct
|
||||
else:
|
||||
next_idx = 0
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"nextIndex": next_idx,
|
||||
"userFriendSeasonRankingList": friend_season_ranking_list,
|
||||
}
|
||||
|
||||
def handle_get_user_map_api_request(self, data: Dict) -> Dict:
|
||||
maps = self.data.item.get_maps(data["userId"])
|
||||
if maps is None:
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"nextIndex": 0,
|
||||
"userMapList": [],
|
||||
}
|
||||
|
||||
map_list = []
|
||||
next_idx = int(data["nextIndex"])
|
||||
max_ct = int(data["maxCount"])
|
||||
|
||||
for x in range(next_idx, len(maps)):
|
||||
tmp = maps[x]._asdict()
|
||||
tmp.pop("user")
|
||||
tmp.pop("id")
|
||||
map_list.append(tmp)
|
||||
|
||||
if len(map_list) >= max_ct:
|
||||
break
|
||||
|
||||
if len(maps) >= next_idx + max_ct:
|
||||
next_idx += max_ct
|
||||
else:
|
||||
next_idx = 0
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"nextIndex": next_idx,
|
||||
"userMapList": map_list,
|
||||
}
|
||||
|
||||
def handle_get_user_login_bonus_api_request(self, data: Dict) -> Dict:
|
||||
login_bonuses = self.data.item.get_login_bonuses(data["userId"])
|
||||
if login_bonuses is None:
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"nextIndex": 0,
|
||||
"userLoginBonusList": [],
|
||||
}
|
||||
|
||||
login_bonus_list = []
|
||||
next_idx = int(data["nextIndex"])
|
||||
max_ct = int(data["maxCount"])
|
||||
|
||||
for x in range(next_idx, len(login_bonuses)):
|
||||
tmp = login_bonuses[x]._asdict()
|
||||
tmp.pop("user")
|
||||
tmp.pop("id")
|
||||
login_bonus_list.append(tmp)
|
||||
|
||||
if len(login_bonus_list) >= max_ct:
|
||||
break
|
||||
|
||||
if len(login_bonuses) >= next_idx + max_ct:
|
||||
next_idx += max_ct
|
||||
else:
|
||||
next_idx = 0
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"nextIndex": next_idx,
|
||||
"userLoginBonusList": login_bonus_list,
|
||||
}
|
||||
|
||||
def handle_get_user_region_api_request(self, data: Dict) -> Dict:
|
||||
return {"userId": data["userId"], "length": 0, "userRegionList": []}
|
||||
|
||||
def handle_get_user_music_api_request(self, data: Dict) -> Dict:
|
||||
songs = self.data.score.get_best_scores(data["userId"])
|
||||
music_detail_list = []
|
||||
next_index = 0
|
||||
|
||||
if songs is not None:
|
||||
for song in songs:
|
||||
tmp = song._asdict()
|
||||
tmp.pop("id")
|
||||
tmp.pop("user")
|
||||
music_detail_list.append(tmp)
|
||||
|
||||
if len(music_detail_list) == data["maxCount"]:
|
||||
next_index = data["maxCount"] + data["nextIndex"]
|
||||
break
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"nextIndex": next_index,
|
||||
"userMusicList": [{"userMusicDetailList": music_detail_list}],
|
||||
}
|
||||
|
||||
def handle_user_login_api_request(self, data: Dict) -> Dict:
|
||||
ret = super().handle_user_login_api_request(data)
|
||||
if ret is None or not ret:
|
||||
return ret
|
||||
ret['loginId'] = ret.get('loginCount', 0)
|
||||
return ret
|
@ -4,12 +4,12 @@ import pytz
|
||||
import json
|
||||
|
||||
from core.config import CoreConfig
|
||||
from titles.mai2.base import Mai2Base
|
||||
from titles.mai2.dx import Mai2DX
|
||||
from titles.mai2.config import Mai2Config
|
||||
from titles.mai2.const import Mai2Constants
|
||||
|
||||
|
||||
class Mai2Plus(Mai2Base):
|
||||
class Mai2DXPlus(Mai2DX):
|
||||
def __init__(self, cfg: CoreConfig, game_cfg: Mai2Config) -> None:
|
||||
super().__init__(cfg, game_cfg)
|
||||
self.version = Mai2Constants.VER_MAIMAI_DX_PLUS
|
@ -14,7 +14,7 @@ class Mai2Festival(Mai2UniversePlus):
|
||||
def handle_cm_get_user_preview_api_request(self, data: Dict) -> Dict:
|
||||
user_data = super().handle_cm_get_user_preview_api_request(data)
|
||||
|
||||
# hardcode lastDataVersion for CardMaker 1.36
|
||||
# hardcode lastDataVersion for CardMaker 1.35
|
||||
user_data["lastDataVersion"] = "1.30.00"
|
||||
return user_data
|
||||
|
||||
|
23
titles/mai2/finale.py
Normal file
23
titles/mai2/finale.py
Normal file
@ -0,0 +1,23 @@
|
||||
from typing import Any, List, Dict
|
||||
from datetime import datetime, timedelta
|
||||
import pytz
|
||||
import json
|
||||
|
||||
from core.config import CoreConfig
|
||||
from titles.mai2.base import Mai2Base
|
||||
from titles.mai2.config import Mai2Config
|
||||
from titles.mai2.const import Mai2Constants
|
||||
|
||||
|
||||
class Mai2Finale(Mai2Base):
|
||||
def __init__(self, cfg: CoreConfig, game_cfg: Mai2Config) -> None:
|
||||
super().__init__(cfg, game_cfg)
|
||||
self.version = Mai2Constants.VER_MAIMAI_FINALE
|
||||
self.can_deliver = True
|
||||
self.can_usbdl = True
|
||||
|
||||
if self.core_config.server.is_develop and self.core_config.title.port > 0:
|
||||
self.old_server = f"http://{self.core_config.title.hostname}:{self.core_config.title.port}/SDEY/197/"
|
||||
|
||||
else:
|
||||
self.old_server = f"http://{self.core_config.title.hostname}/SDEY/197/"
|
@ -1,4 +1,5 @@
|
||||
from twisted.web.http import Request
|
||||
from twisted.web.server import NOT_DONE_YET
|
||||
import json
|
||||
import inflection
|
||||
import yaml
|
||||
@ -6,7 +7,7 @@ import string
|
||||
import logging, coloredlogs
|
||||
import zlib
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
from os import path
|
||||
from os import path, mkdir
|
||||
from typing import Tuple
|
||||
|
||||
from core.config import CoreConfig
|
||||
@ -14,7 +15,9 @@ from core.utils import Utils
|
||||
from titles.mai2.config import Mai2Config
|
||||
from titles.mai2.const import Mai2Constants
|
||||
from titles.mai2.base import Mai2Base
|
||||
from titles.mai2.plus import Mai2Plus
|
||||
from titles.mai2.finale import Mai2Finale
|
||||
from titles.mai2.dx import Mai2DX
|
||||
from titles.mai2.dxplus import Mai2DXPlus
|
||||
from titles.mai2.splash import Mai2Splash
|
||||
from titles.mai2.splashplus import Mai2SplashPlus
|
||||
from titles.mai2.universe import Mai2Universe
|
||||
@ -33,7 +36,20 @@ class Mai2Servlet:
|
||||
|
||||
self.versions = [
|
||||
Mai2Base,
|
||||
Mai2Plus,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
Mai2Finale,
|
||||
Mai2DX,
|
||||
Mai2DXPlus,
|
||||
Mai2Splash,
|
||||
Mai2SplashPlus,
|
||||
Mai2Universe,
|
||||
@ -42,6 +58,7 @@ class Mai2Servlet:
|
||||
]
|
||||
|
||||
self.logger = logging.getLogger("mai2")
|
||||
if not hasattr(self.logger, "initted"):
|
||||
log_fmt_str = "[%(asctime)s] Mai2 | %(levelname)s | %(message)s"
|
||||
log_fmt = logging.Formatter(log_fmt_str)
|
||||
fileHandler = TimedRotatingFileHandler(
|
||||
@ -63,6 +80,7 @@ class Mai2Servlet:
|
||||
coloredlogs.install(
|
||||
level=self.game_cfg.server.loglevel, logger=self.logger, fmt=log_fmt_str
|
||||
)
|
||||
self.logger.initted = True
|
||||
|
||||
@classmethod
|
||||
def get_allnet_info(
|
||||
@ -82,19 +100,36 @@ class Mai2Servlet:
|
||||
return (
|
||||
True,
|
||||
f"http://{core_cfg.title.hostname}:{core_cfg.title.port}/{game_code}/$v/",
|
||||
f"{core_cfg.title.hostname}:{core_cfg.title.port}/",
|
||||
f"{core_cfg.title.hostname}",
|
||||
)
|
||||
|
||||
return (
|
||||
True,
|
||||
f"http://{core_cfg.title.hostname}/{game_code}/$v/",
|
||||
f"{core_cfg.title.hostname}/",
|
||||
f"{core_cfg.title.hostname}",
|
||||
)
|
||||
|
||||
def setup(self):
|
||||
if self.game_cfg.uploads.photos and self.game_cfg.uploads.photos_dir and not path.exists(self.game_cfg.uploads.photos_dir):
|
||||
try:
|
||||
mkdir(self.game_cfg.uploads.photos_dir)
|
||||
except Exception:
|
||||
self.logger.error(f"Failed to make photo upload directory at {self.game_cfg.uploads.photos_dir}")
|
||||
|
||||
if self.game_cfg.uploads.movies and self.game_cfg.uploads.movies_dir and not path.exists(self.game_cfg.uploads.movies_dir):
|
||||
try:
|
||||
mkdir(self.game_cfg.uploads.movies_dir)
|
||||
except Exception:
|
||||
self.logger.error(f"Failed to make movie upload directory at {self.game_cfg.uploads.movies_dir}")
|
||||
|
||||
def render_POST(self, request: Request, version: int, url_path: str) -> bytes:
|
||||
if url_path.lower() == "ping":
|
||||
return zlib.compress(b'{"returnCode": "1"}')
|
||||
|
||||
elif url_path.startswith("api/movie/"):
|
||||
self.logger.info(f"Movie data: {url_path} - {request.content.getvalue()}")
|
||||
return b""
|
||||
|
||||
req_raw = request.content.getvalue()
|
||||
url = request.uri.decode()
|
||||
url_split = url_path.split("/")
|
||||
@ -102,6 +137,7 @@ class Mai2Servlet:
|
||||
endpoint = url_split[len(url_split) - 1]
|
||||
client_ip = Utils.get_ip_addr(request)
|
||||
|
||||
if request.uri.startswith(b"/SDEZ"):
|
||||
if version < 105: # 1.0
|
||||
internal_ver = Mai2Constants.VER_MAIMAI_DX
|
||||
elif version >= 105 and version < 110: # Plus
|
||||
@ -117,6 +153,34 @@ class Mai2Servlet:
|
||||
elif version >= 130: # Festival
|
||||
internal_ver = Mai2Constants.VER_MAIMAI_DX_FESTIVAL
|
||||
|
||||
else:
|
||||
if version < 110: # 1.0
|
||||
internal_ver = Mai2Constants.VER_MAIMAI
|
||||
elif version >= 110 and version < 120: # Plus
|
||||
internal_ver = Mai2Constants.VER_MAIMAI_PLUS
|
||||
elif version >= 120 and version < 130: # Green
|
||||
internal_ver = Mai2Constants.VER_MAIMAI_GREEN
|
||||
elif version >= 130 and version < 140: # Green Plus
|
||||
internal_ver = Mai2Constants.VER_MAIMAI_GREEN_PLUS
|
||||
elif version >= 140 and version < 150: # Orange
|
||||
internal_ver = Mai2Constants.VER_MAIMAI_ORANGE
|
||||
elif version >= 150 and version < 160: # Orange Plus
|
||||
internal_ver = Mai2Constants.VER_MAIMAI_ORANGE_PLUS
|
||||
elif version >= 160 and version < 170: # Pink
|
||||
internal_ver = Mai2Constants.VER_MAIMAI_PINK
|
||||
elif version >= 170 and version < 180: # Pink Plus
|
||||
internal_ver = Mai2Constants.VER_MAIMAI_PINK_PLUS
|
||||
elif version >= 180 and version < 185: # Murasaki
|
||||
internal_ver = Mai2Constants.VER_MAIMAI_MURASAKI
|
||||
elif version >= 185 and version < 190: # Murasaki Plus
|
||||
internal_ver = Mai2Constants.VER_MAIMAI_MURASAKI_PLUS
|
||||
elif version >= 190 and version < 195: # Milk
|
||||
internal_ver = Mai2Constants.VER_MAIMAI_MILK
|
||||
elif version >= 195 and version < 197: # Milk Plus
|
||||
internal_ver = Mai2Constants.VER_MAIMAI_MILK_PLUS
|
||||
elif version >= 197: # Finale
|
||||
internal_ver = Mai2Constants.VER_MAIMAI_FINALE
|
||||
|
||||
if all(c in string.hexdigits for c in endpoint) and len(endpoint) == 32:
|
||||
# If we get a 32 character long hex string, it's a hash and we're
|
||||
# doing encrypted. The likelyhood of false positives is low but
|
||||
@ -159,3 +223,39 @@ class Mai2Servlet:
|
||||
self.logger.debug(f"Response {resp}")
|
||||
|
||||
return zlib.compress(json.dumps(resp, ensure_ascii=False).encode("utf-8"))
|
||||
|
||||
def render_GET(self, request: Request, version: int, url_path: str) -> bytes:
|
||||
self.logger.info(f"v{version} GET {url_path}")
|
||||
url_split = url_path.split("/")
|
||||
|
||||
if (url_split[0] == "api" and url_split[1] == "movie") or url_split[0] == "movie":
|
||||
if url_split[2] == "moviestart":
|
||||
return json.dumps({"moviestart":{"status":"OK"}}).encode()
|
||||
|
||||
if url_split[0] == "old":
|
||||
if url_split[1] == "ping":
|
||||
self.logger.info(f"v{version} old server ping")
|
||||
return zlib.compress(b"ok")
|
||||
|
||||
elif url_split[1].startswith("userdata"):
|
||||
self.logger.info(f"v{version} old server userdata inquire")
|
||||
return zlib.compress(b"{}")
|
||||
|
||||
elif url_split[1].startswith("friend"):
|
||||
self.logger.info(f"v{version} old server friend inquire")
|
||||
return zlib.compress(b"{}")
|
||||
|
||||
elif url_split[0] == "usbdl":
|
||||
if url_split[1] == "CONNECTIONTEST":
|
||||
self.logger.info(f"v{version} usbdl server test")
|
||||
return zlib.compress(b"ok")
|
||||
|
||||
elif url_split[0] == "deliver":
|
||||
file = url_split[len(url_split) - 1]
|
||||
self.logger.info(f"v{version} {file} deliver inquire")
|
||||
|
||||
if not self.game_cfg.deliver.enable or not path.exists(f"{self.game_cfg.deliver.content_folder}/{file}"):
|
||||
return zlib.compress(b"")
|
||||
|
||||
else:
|
||||
return zlib.compress(b"{}")
|
||||
|
@ -4,6 +4,9 @@ import os
|
||||
import re
|
||||
import xml.etree.ElementTree as ET
|
||||
from typing import Any, Dict, List, Optional
|
||||
from Crypto.Cipher import AES
|
||||
import zlib
|
||||
import codecs
|
||||
|
||||
from core.config import CoreConfig
|
||||
from core.data import Data
|
||||
@ -34,6 +37,7 @@ class Mai2Reader(BaseReader):
|
||||
|
||||
def read(self) -> None:
|
||||
data_dirs = []
|
||||
if self.version >= Mai2Constants.VER_MAIMAI_DX:
|
||||
if self.bin_dir is not None:
|
||||
data_dirs += self.get_data_directories(self.bin_dir)
|
||||
|
||||
@ -47,6 +51,134 @@ class Mai2Reader(BaseReader):
|
||||
self.read_music(f"{dir}/music")
|
||||
self.read_tickets(f"{dir}/ticket")
|
||||
|
||||
else:
|
||||
if not os.path.exists(f"{self.bin_dir}/tables"):
|
||||
self.logger.error(f"tables directory not found in {self.bin_dir}")
|
||||
return
|
||||
|
||||
if self.version >= Mai2Constants.VER_MAIMAI_MILK:
|
||||
if self.extra is None:
|
||||
self.logger.error("Milk - Finale requre an AES key via a hex string send as the --extra flag")
|
||||
return
|
||||
|
||||
key = bytes.fromhex(self.extra)
|
||||
|
||||
else:
|
||||
key = None
|
||||
|
||||
evt_table = self.load_table_raw(f"{self.bin_dir}/tables", "mmEvent.bin", key)
|
||||
txt_table = self.load_table_raw(f"{self.bin_dir}/tables", "mmtextout_jp.bin", key)
|
||||
score_table = self.load_table_raw(f"{self.bin_dir}/tables", "mmScore.bin", key)
|
||||
|
||||
self.read_old_events(evt_table)
|
||||
self.read_old_music(score_table, txt_table)
|
||||
|
||||
if self.opt_dir is not None:
|
||||
evt_table = self.load_table_raw(f"{self.opt_dir}/tables", "mmEvent.bin", key)
|
||||
txt_table = self.load_table_raw(f"{self.opt_dir}/tables", "mmtextout_jp.bin", key)
|
||||
score_table = self.load_table_raw(f"{self.opt_dir}/tables", "mmScore.bin", key)
|
||||
|
||||
self.read_old_events(evt_table)
|
||||
self.read_old_music(score_table, txt_table)
|
||||
|
||||
return
|
||||
|
||||
def load_table_raw(self, dir: str, file: str, key: Optional[bytes]) -> Optional[List[Dict[str, str]]]:
|
||||
if not os.path.exists(f"{dir}/{file}"):
|
||||
self.logger.warn(f"file {file} does not exist in directory {dir}, skipping")
|
||||
return
|
||||
|
||||
self.logger.info(f"Load table {file} from {dir}")
|
||||
if key is not None:
|
||||
cipher = AES.new(key, AES.MODE_CBC)
|
||||
with open(f"{dir}/{file}", "rb") as f:
|
||||
f_encrypted = f.read()
|
||||
f_data = cipher.decrypt(f_encrypted)[0x10:]
|
||||
|
||||
else:
|
||||
with open(f"{dir}/{file}", "rb") as f:
|
||||
f_data = f.read()[0x10:]
|
||||
|
||||
if f_data is None or not f_data:
|
||||
self.logger.warn(f"file {dir} could not be read, skipping")
|
||||
return
|
||||
|
||||
f_data_deflate = zlib.decompress(f_data, wbits = zlib.MAX_WBITS | 16)[0x12:] # lop off the junk at the beginning
|
||||
f_decoded = codecs.utf_16_le_decode(f_data_deflate)[0]
|
||||
f_split = f_decoded.splitlines()
|
||||
|
||||
has_struct_def = "struct " in f_decoded
|
||||
is_struct = False
|
||||
struct_def = []
|
||||
tbl_content = []
|
||||
|
||||
if has_struct_def:
|
||||
for x in f_split:
|
||||
if x.startswith("struct "):
|
||||
is_struct = True
|
||||
struct_name = x[7:-1]
|
||||
continue
|
||||
|
||||
if x.startswith("};"):
|
||||
is_struct = False
|
||||
break
|
||||
|
||||
if is_struct:
|
||||
try:
|
||||
struct_def.append(x[x.rindex(" ") + 2: -1])
|
||||
except ValueError:
|
||||
self.logger.warn(f"rindex failed on line {x}")
|
||||
|
||||
if is_struct:
|
||||
self.logger.warn("Struct not formatted properly")
|
||||
|
||||
if not struct_def:
|
||||
self.logger.warn("Struct def not found")
|
||||
|
||||
name = file[:file.index(".")]
|
||||
if "_" in name:
|
||||
name = name[:file.index("_")]
|
||||
|
||||
for x in f_split:
|
||||
if not x.startswith(name.upper()):
|
||||
continue
|
||||
|
||||
line_match = re.match(r"(\w+)\((.*?)\)([ ]+\/{3}<[ ]+(.*))?", x)
|
||||
if line_match is None:
|
||||
continue
|
||||
|
||||
if not line_match.group(1) == name.upper():
|
||||
self.logger.warn(f"Strange regex match for line {x} -> {line_match}")
|
||||
continue
|
||||
|
||||
vals = line_match.group(2)
|
||||
comment = line_match.group(4)
|
||||
line_dict = {}
|
||||
|
||||
vals_split = vals.split(",")
|
||||
for y in range(len(vals_split)):
|
||||
stripped = vals_split[y].strip().lstrip("L\"").lstrip("\"").rstrip("\"")
|
||||
if not stripped or stripped is None:
|
||||
continue
|
||||
|
||||
if has_struct_def and len(struct_def) > y:
|
||||
line_dict[struct_def[y]] = stripped
|
||||
|
||||
else:
|
||||
line_dict[f'item_{y}'] = stripped
|
||||
|
||||
if comment:
|
||||
line_dict['comment'] = comment
|
||||
|
||||
tbl_content.append(line_dict)
|
||||
|
||||
if tbl_content:
|
||||
return tbl_content
|
||||
|
||||
else:
|
||||
self.logger.warning("Failed load table content, skipping")
|
||||
return
|
||||
|
||||
def get_events(self, base_dir: str) -> None:
|
||||
self.logger.info(f"Reading events from {base_dir}...")
|
||||
|
||||
@ -188,3 +320,24 @@ class Mai2Reader(BaseReader):
|
||||
self.version, id, ticket_type, price, name
|
||||
)
|
||||
self.logger.info(f"Added ticket {id}...")
|
||||
|
||||
def read_old_events(self, events: Optional[List[Dict[str, str]]]) -> None:
|
||||
if events is None:
|
||||
return
|
||||
|
||||
for event in events:
|
||||
evt_id = int(event.get('イベントID', '0'))
|
||||
evt_expire_time = float(event.get('オフ時強制時期', '0.0'))
|
||||
is_exp = bool(int(event.get('海外許可', '0')))
|
||||
is_aou = bool(int(event.get('AOU許可', '0')))
|
||||
name = event.get('comment', f'evt_{evt_id}')
|
||||
|
||||
self.data.static.put_game_event(self.version, 0, evt_id, name)
|
||||
|
||||
if not (is_exp or is_aou):
|
||||
self.data.static.toggle_game_event(self.version, evt_id, False)
|
||||
|
||||
def read_old_music(self, scores: Optional[List[Dict[str, str]]], text: Optional[List[Dict[str, str]]]) -> None:
|
||||
if scores is None or text is None:
|
||||
return
|
||||
# TODO
|
||||
|
@ -18,10 +18,11 @@ character = Table(
|
||||
ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"),
|
||||
nullable=False,
|
||||
),
|
||||
Column("characterId", Integer, nullable=False),
|
||||
Column("level", Integer, nullable=False, server_default="1"),
|
||||
Column("awakening", Integer, nullable=False, server_default="0"),
|
||||
Column("useCount", Integer, nullable=False, server_default="0"),
|
||||
Column("characterId", Integer),
|
||||
Column("level", Integer),
|
||||
Column("awakening", Integer),
|
||||
Column("useCount", Integer),
|
||||
Column("point", Integer),
|
||||
UniqueConstraint("user", "characterId", name="mai2_item_character_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@ -35,12 +36,12 @@ card = Table(
|
||||
ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"),
|
||||
nullable=False,
|
||||
),
|
||||
Column("cardId", Integer, nullable=False),
|
||||
Column("cardTypeId", Integer, nullable=False),
|
||||
Column("charaId", Integer, nullable=False),
|
||||
Column("mapId", Integer, nullable=False),
|
||||
Column("startDate", TIMESTAMP, server_default="2018-01-01 00:00:00.0"),
|
||||
Column("endDate", TIMESTAMP, server_default="2038-01-01 00:00:00.0"),
|
||||
Column("cardId", Integer),
|
||||
Column("cardTypeId", Integer),
|
||||
Column("charaId", Integer),
|
||||
Column("mapId", Integer),
|
||||
Column("startDate", TIMESTAMP, server_default=func.now()),
|
||||
Column("endDate", TIMESTAMP),
|
||||
UniqueConstraint("user", "cardId", "cardTypeId", name="mai2_item_card_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@ -54,10 +55,10 @@ item = Table(
|
||||
ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"),
|
||||
nullable=False,
|
||||
),
|
||||
Column("itemId", Integer, nullable=False),
|
||||
Column("itemKind", Integer, nullable=False),
|
||||
Column("stock", Integer, nullable=False, server_default="1"),
|
||||
Column("isValid", Boolean, nullable=False, server_default="1"),
|
||||
Column("itemId", Integer),
|
||||
Column("itemKind", Integer),
|
||||
Column("stock", Integer),
|
||||
Column("isValid", Boolean),
|
||||
UniqueConstraint("user", "itemId", "itemKind", name="mai2_item_item_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@ -71,11 +72,11 @@ map = Table(
|
||||
ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"),
|
||||
nullable=False,
|
||||
),
|
||||
Column("mapId", Integer, nullable=False),
|
||||
Column("distance", Integer, nullable=False),
|
||||
Column("isLock", Boolean, nullable=False, server_default="0"),
|
||||
Column("isClear", Boolean, nullable=False, server_default="0"),
|
||||
Column("isComplete", Boolean, nullable=False, server_default="0"),
|
||||
Column("mapId", Integer),
|
||||
Column("distance", Integer),
|
||||
Column("isLock", Boolean),
|
||||
Column("isClear", Boolean),
|
||||
Column("isComplete", Boolean),
|
||||
UniqueConstraint("user", "mapId", name="mai2_item_map_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@ -89,10 +90,10 @@ login_bonus = Table(
|
||||
ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"),
|
||||
nullable=False,
|
||||
),
|
||||
Column("bonusId", Integer, nullable=False),
|
||||
Column("point", Integer, nullable=False),
|
||||
Column("isCurrent", Boolean, nullable=False, server_default="0"),
|
||||
Column("isComplete", Boolean, nullable=False, server_default="0"),
|
||||
Column("bonusId", Integer),
|
||||
Column("point", Integer),
|
||||
Column("isCurrent", Boolean),
|
||||
Column("isComplete", Boolean),
|
||||
UniqueConstraint("user", "bonusId", name="mai2_item_login_bonus_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@ -106,12 +107,12 @@ friend_season_ranking = Table(
|
||||
ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"),
|
||||
nullable=False,
|
||||
),
|
||||
Column("seasonId", Integer, nullable=False),
|
||||
Column("point", Integer, nullable=False),
|
||||
Column("rank", Integer, nullable=False),
|
||||
Column("rewardGet", Boolean, nullable=False),
|
||||
Column("userName", String(8), nullable=False),
|
||||
Column("recordDate", TIMESTAMP, nullable=False),
|
||||
Column("seasonId", Integer),
|
||||
Column("point", Integer),
|
||||
Column("rank", Integer),
|
||||
Column("rewardGet", Boolean),
|
||||
Column("userName", String(8)),
|
||||
Column("recordDate", TIMESTAMP),
|
||||
UniqueConstraint(
|
||||
"user", "seasonId", "userName", name="mai2_item_friend_season_ranking_uk"
|
||||
),
|
||||
@ -127,7 +128,7 @@ favorite = Table(
|
||||
ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"),
|
||||
nullable=False,
|
||||
),
|
||||
Column("itemKind", Integer, nullable=False),
|
||||
Column("itemKind", Integer),
|
||||
Column("itemIdList", JSON),
|
||||
UniqueConstraint("user", "itemKind", name="mai2_item_favorite_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
@ -142,10 +143,10 @@ charge = Table(
|
||||
ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"),
|
||||
nullable=False,
|
||||
),
|
||||
Column("chargeId", Integer, nullable=False),
|
||||
Column("stock", Integer, nullable=False),
|
||||
Column("purchaseDate", String(255), nullable=False),
|
||||
Column("validDate", String(255), nullable=False),
|
||||
Column("chargeId", Integer),
|
||||
Column("stock", Integer),
|
||||
Column("purchaseDate", String(255)),
|
||||
Column("validDate", String(255)),
|
||||
UniqueConstraint("user", "chargeId", name="mai2_item_charge_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@ -161,11 +162,11 @@ print_detail = Table(
|
||||
),
|
||||
Column("orderId", Integer),
|
||||
Column("printNumber", Integer),
|
||||
Column("printDate", TIMESTAMP, nullable=False, server_default=func.now()),
|
||||
Column("serialId", String(20), nullable=False),
|
||||
Column("placeId", Integer, nullable=False),
|
||||
Column("clientId", String(11), nullable=False),
|
||||
Column("printerSerialId", String(20), nullable=False),
|
||||
Column("printDate", TIMESTAMP, server_default=func.now()),
|
||||
Column("serialId", String(20)),
|
||||
Column("placeId", Integer),
|
||||
Column("clientId", String(11)),
|
||||
Column("printerSerialId", String(20)),
|
||||
Column("cardRomVersion", Integer),
|
||||
Column("isHolograph", Boolean, server_default="1"),
|
||||
Column("printOption1", Boolean, server_default="0"),
|
||||
@ -333,6 +334,19 @@ class Mai2ItemData(BaseData):
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def put_character_(self, user_id: int, char_data: Dict) -> Optional[int]:
|
||||
char_data["user"] = user_id
|
||||
sql = insert(character).values(**char_data)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(**char_data)
|
||||
result = self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.warn(
|
||||
f"put_character_: failed to insert item! user_id: {user_id}"
|
||||
)
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def put_character(
|
||||
self,
|
||||
user_id: int,
|
||||
@ -444,6 +458,8 @@ class Mai2ItemData(BaseData):
|
||||
card_kind: int,
|
||||
chara_id: int,
|
||||
map_id: int,
|
||||
start_date: datetime,
|
||||
end_date: datetime,
|
||||
) -> Optional[Row]:
|
||||
sql = insert(card).values(
|
||||
user=user_id,
|
||||
@ -451,9 +467,13 @@ class Mai2ItemData(BaseData):
|
||||
cardTypeId=card_kind,
|
||||
charaId=chara_id,
|
||||
mapId=map_id,
|
||||
startDate=start_date,
|
||||
endDate=end_date,
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(charaId=chara_id, mapId=map_id)
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
charaId=chara_id, mapId=map_id, startDate=start_date, endDate=end_date
|
||||
)
|
||||
|
||||
result = self.execute(conflict)
|
||||
if result is None:
|
||||
|
@ -99,6 +99,68 @@ detail = Table(
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
detail_old = Table(
|
||||
"maimai_profile_detail",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column(
|
||||
"user",
|
||||
ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"),
|
||||
nullable=False,
|
||||
),
|
||||
Column("version", Integer, nullable=False),
|
||||
Column("lastDataVersion", Integer),
|
||||
Column("userName", String(8)),
|
||||
Column("point", Integer),
|
||||
Column("totalPoint", Integer),
|
||||
Column("iconId", Integer),
|
||||
Column("nameplateId", Integer),
|
||||
Column("frameId", Integer),
|
||||
Column("trophyId", Integer),
|
||||
Column("playCount", Integer),
|
||||
Column("playVsCount", Integer),
|
||||
Column("playSyncCount", Integer),
|
||||
Column("winCount", Integer),
|
||||
Column("helpCount", Integer),
|
||||
Column("comboCount", Integer),
|
||||
Column("feverCount", Integer),
|
||||
Column("totalHiScore", Integer),
|
||||
Column("totalEasyHighScore", Integer),
|
||||
Column("totalBasicHighScore", Integer),
|
||||
Column("totalAdvancedHighScore", Integer),
|
||||
Column("totalExpertHighScore", Integer),
|
||||
Column("totalMasterHighScore", Integer),
|
||||
Column("totalReMasterHighScore", Integer),
|
||||
Column("totalHighSync", Integer),
|
||||
Column("totalEasySync", Integer),
|
||||
Column("totalBasicSync", Integer),
|
||||
Column("totalAdvancedSync", Integer),
|
||||
Column("totalExpertSync", Integer),
|
||||
Column("totalMasterSync", Integer),
|
||||
Column("totalReMasterSync", Integer),
|
||||
Column("playerRating", Integer),
|
||||
Column("highestRating", Integer),
|
||||
Column("rankAuthTailId", Integer),
|
||||
Column("eventWatchedDate", String(255)),
|
||||
Column("webLimitDate", String(255)),
|
||||
Column("challengeTrackPhase", Integer),
|
||||
Column("firstPlayBits", Integer),
|
||||
Column("lastPlayDate", String(255)),
|
||||
Column("lastPlaceId", Integer),
|
||||
Column("lastPlaceName", String(255)),
|
||||
Column("lastRegionId", Integer),
|
||||
Column("lastRegionName", String(255)),
|
||||
Column("lastClientId", String(255)),
|
||||
Column("lastCountryCode", String(255)),
|
||||
Column("eventPoint", Integer),
|
||||
Column("totalLv", Integer),
|
||||
Column("lastLoginBonusDay", Integer),
|
||||
Column("lastSurvivalBonusDay", Integer),
|
||||
Column("loginBonusLv", Integer),
|
||||
UniqueConstraint("user", "version", name="maimai_profile_detail_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
ghost = Table(
|
||||
"mai2_profile_ghost",
|
||||
metadata,
|
||||
@ -223,6 +285,99 @@ option = Table(
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
option_old = Table(
|
||||
"maimai_profile_option",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column(
|
||||
"user",
|
||||
ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"),
|
||||
nullable=False,
|
||||
),
|
||||
Column("version", Integer, nullable=False),
|
||||
Column("soudEffect", Integer),
|
||||
Column("mirrorMode", Integer),
|
||||
Column("guideSpeed", Integer),
|
||||
Column("bgInfo", Integer),
|
||||
Column("brightness", Integer),
|
||||
Column("isStarRot", Integer),
|
||||
Column("breakSe", Integer),
|
||||
Column("slideSe", Integer),
|
||||
Column("hardJudge", Integer),
|
||||
Column("isTagJump", Integer),
|
||||
Column("breakSeVol", Integer),
|
||||
Column("slideSeVol", Integer),
|
||||
Column("isUpperDisp", Integer),
|
||||
Column("trackSkip", Integer),
|
||||
Column("optionMode", Integer),
|
||||
Column("simpleOptionParam", Integer),
|
||||
Column("adjustTiming", Integer),
|
||||
Column("dispTiming", Integer),
|
||||
Column("timingPos", Integer),
|
||||
Column("ansVol", Integer),
|
||||
Column("noteVol", Integer),
|
||||
Column("dmgVol", Integer),
|
||||
Column("appealFlame", Integer),
|
||||
Column("isFeverDisp", Integer),
|
||||
Column("dispJudge", Integer),
|
||||
Column("judgePos", Integer),
|
||||
Column("ratingGuard", Integer),
|
||||
Column("selectChara", Integer),
|
||||
Column("sortType", Integer),
|
||||
Column("filterGenre", Integer),
|
||||
Column("filterLevel", Integer),
|
||||
Column("filterRank", Integer),
|
||||
Column("filterVersion", Integer),
|
||||
Column("filterRec", Integer),
|
||||
Column("filterFullCombo", Integer),
|
||||
Column("filterAllPerfect", Integer),
|
||||
Column("filterDifficulty", Integer),
|
||||
Column("filterFullSync", Integer),
|
||||
Column("filterReMaster", Integer),
|
||||
Column("filterMaxFever", Integer),
|
||||
Column("finalSelectId", Integer),
|
||||
Column("finalSelectCategory", Integer),
|
||||
UniqueConstraint("user", "version", name="maimai_profile_option_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
web_opt = Table(
|
||||
"maimai_profile_web_option",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column(
|
||||
"user",
|
||||
ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"),
|
||||
nullable=False,
|
||||
),
|
||||
Column("version", Integer, nullable=False),
|
||||
Column("isNetMember", Boolean),
|
||||
Column("dispRate", Integer),
|
||||
Column("dispJudgeStyle", Integer),
|
||||
Column("dispRank", Integer),
|
||||
Column("dispHomeRanker", Integer),
|
||||
Column("dispTotalLv", Integer),
|
||||
UniqueConstraint("user", "version", name="maimai_profile_web_option_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
grade_status = Table(
|
||||
"maimai_profile_grade_status",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column(
|
||||
"user",
|
||||
ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"),
|
||||
nullable=False,
|
||||
),
|
||||
Column("gradeVersion", Integer),
|
||||
Column("gradeLevel", Integer),
|
||||
Column("gradeSubLevel", Integer),
|
||||
Column("gradeMaxId", Integer),
|
||||
UniqueConstraint("user", "gradeVersion", name="maimai_profile_grade_status_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
rating = Table(
|
||||
"mai2_profile_rating",
|
||||
metadata,
|
||||
@ -268,43 +423,92 @@ activity = Table(
|
||||
ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"),
|
||||
nullable=False,
|
||||
),
|
||||
Column("kind", Integer, nullable=False),
|
||||
Column("activityId", Integer, nullable=False),
|
||||
Column("param1", Integer, nullable=False),
|
||||
Column("param2", Integer, nullable=False),
|
||||
Column("param3", Integer, nullable=False),
|
||||
Column("param4", Integer, nullable=False),
|
||||
Column("sortNumber", Integer, nullable=False),
|
||||
Column("kind", Integer),
|
||||
Column("activityId", Integer),
|
||||
Column("param1", Integer),
|
||||
Column("param2", Integer),
|
||||
Column("param3", Integer),
|
||||
Column("param4", Integer),
|
||||
Column("sortNumber", Integer),
|
||||
UniqueConstraint("user", "kind", "activityId", name="mai2_profile_activity_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
boss = Table(
|
||||
"maimai_profile_boss",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("user", ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"), nullable=False),
|
||||
Column("pandoraFlagList0", Integer),
|
||||
Column("pandoraFlagList1", Integer),
|
||||
Column("pandoraFlagList2", Integer),
|
||||
Column("pandoraFlagList3", Integer),
|
||||
Column("pandoraFlagList4", Integer),
|
||||
Column("pandoraFlagList5", Integer),
|
||||
Column("pandoraFlagList6", Integer),
|
||||
Column("emblemFlagList", Integer),
|
||||
UniqueConstraint("user", name="mai2_profile_boss_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
recent_rating = Table(
|
||||
"maimai_profile_recent_rating",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("user", ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"), nullable=False),
|
||||
Column("userRecentRatingList", JSON),
|
||||
UniqueConstraint("user", name="mai2_profile_recent_rating_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
consec_logins = Table(
|
||||
"mai2_profile_consec_logins",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("user", ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"), nullable=False),
|
||||
Column("version", Integer, nullable=False),
|
||||
Column("logins", Integer),
|
||||
UniqueConstraint("user", "version", name="mai2_profile_consec_logins_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
class Mai2ProfileData(BaseData):
|
||||
def put_profile_detail(
|
||||
self, user_id: int, version: int, detail_data: Dict
|
||||
self, user_id: int, version: int, detail_data: Dict, is_dx: bool = True
|
||||
) -> Optional[Row]:
|
||||
detail_data["user"] = user_id
|
||||
detail_data["version"] = version
|
||||
|
||||
if is_dx:
|
||||
sql = insert(detail).values(**detail_data)
|
||||
else:
|
||||
sql = insert(detail_old).values(**detail_data)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(**detail_data)
|
||||
|
||||
result = self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.warn(
|
||||
f"put_profile: Failed to create profile! user_id {user_id}"
|
||||
f"put_profile: Failed to create profile! user_id {user_id} is_dx {is_dx}"
|
||||
)
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_profile_detail(self, user_id: int, version: int) -> Optional[Row]:
|
||||
def get_profile_detail(self, user_id: int, version: int, is_dx: bool = True) -> Optional[Row]:
|
||||
if is_dx:
|
||||
sql = (
|
||||
select(detail)
|
||||
.where(and_(detail.c.user == user_id, detail.c.version <= version))
|
||||
.order_by(detail.c.version.desc())
|
||||
)
|
||||
|
||||
else:
|
||||
sql = (
|
||||
select(detail_old)
|
||||
.where(and_(detail_old.c.user == user_id, detail_old.c.version <= version))
|
||||
.order_by(detail_old.c.version.desc())
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
@ -365,26 +569,36 @@ class Mai2ProfileData(BaseData):
|
||||
return result.fetchone()
|
||||
|
||||
def put_profile_option(
|
||||
self, user_id: int, version: int, option_data: Dict
|
||||
self, user_id: int, version: int, option_data: Dict, is_dx: bool = True
|
||||
) -> Optional[int]:
|
||||
option_data["user"] = user_id
|
||||
option_data["version"] = version
|
||||
|
||||
if is_dx:
|
||||
sql = insert(option).values(**option_data)
|
||||
else:
|
||||
sql = insert(option_old).values(**option_data)
|
||||
conflict = sql.on_duplicate_key_update(**option_data)
|
||||
|
||||
result = self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.warn(f"put_profile_option: failed to update! {user_id}")
|
||||
self.logger.warn(f"put_profile_option: failed to update! {user_id} is_dx {is_dx}")
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_profile_option(self, user_id: int, version: int) -> Optional[Row]:
|
||||
def get_profile_option(self, user_id: int, version: int, is_dx: bool = True) -> Optional[Row]:
|
||||
if is_dx:
|
||||
sql = (
|
||||
select(option)
|
||||
.where(and_(option.c.user == user_id, option.c.version <= version))
|
||||
.order_by(option.c.version.desc())
|
||||
)
|
||||
else:
|
||||
sql = (
|
||||
select(option_old)
|
||||
.where(and_(option_old.c.user == user_id, option_old.c.version <= version))
|
||||
.order_by(option_old.c.version.desc())
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
@ -474,3 +688,130 @@ class Mai2ProfileData(BaseData):
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def put_web_option(self, user_id: int, version: int, web_opts: Dict) -> Optional[int]:
|
||||
web_opts["user"] = user_id
|
||||
web_opts["version"] = version
|
||||
sql = insert(web_opt).values(**web_opts)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(**web_opts)
|
||||
|
||||
result = self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.warn(
|
||||
f"put_web_option: failed to update! user_id: {user_id}"
|
||||
)
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_web_option(self, user_id: int, version: int) -> Optional[Row]:
|
||||
sql = web_opt.select(and_(web_opt.c.user == user_id, web_opt.c.version == version))
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def put_grade_status(self, user_id: int, grade_stat: Dict) -> Optional[int]:
|
||||
grade_stat["user"] = user_id
|
||||
sql = insert(grade_status).values(**grade_stat)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(**grade_stat)
|
||||
|
||||
result = self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.warn(
|
||||
f"put_grade_status: failed to update! user_id: {user_id}"
|
||||
)
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_grade_status(self, user_id: int) -> Optional[Row]:
|
||||
sql = grade_status.select(grade_status.c.user == user_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def put_boss_list(self, user_id: int, boss_stat: Dict) -> Optional[int]:
|
||||
boss_stat["user"] = user_id
|
||||
sql = insert(boss).values(**boss_stat)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(**boss_stat)
|
||||
|
||||
result = self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.warn(
|
||||
f"put_boss_list: failed to update! user_id: {user_id}"
|
||||
)
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_boss_list(self, user_id: int) -> Optional[Row]:
|
||||
sql = boss.select(boss.c.user == user_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def put_recent_rating(self, user_id: int, rr: Dict) -> Optional[int]:
|
||||
sql = insert(recent_rating).values(user=user_id, userRecentRatingList=rr)
|
||||
|
||||
conflict = sql.on_duplicate_key_update({'userRecentRatingList': rr})
|
||||
|
||||
result = self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.warn(
|
||||
f"put_recent_rating: failed to update! user_id: {user_id}"
|
||||
)
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_recent_rating(self, user_id: int) -> Optional[Row]:
|
||||
sql = recent_rating.select(recent_rating.c.user == user_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def add_consec_login(self, user_id: int, version: int) -> None:
|
||||
sql = insert(consec_logins).values(
|
||||
user=user_id,
|
||||
version=version,
|
||||
logins=1
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
logins=consec_logins.c.logins + 1
|
||||
)
|
||||
|
||||
result = self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.error(f"Failed to update consecutive login count for user {user_id} version {version}")
|
||||
|
||||
def get_consec_login(self, user_id: int, version: int) -> Optional[Row]:
|
||||
sql = select(consec_logins).where(and_(
|
||||
consec_logins.c.user==user_id,
|
||||
consec_logins.c.version==version,
|
||||
))
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def reset_consec_login(self, user_id: int, version: int) -> Optional[Row]:
|
||||
sql = consec_logins.update(and_(
|
||||
consec_logins.c.user==user_id,
|
||||
consec_logins.c.version==version,
|
||||
)).values(
|
||||
logins=1
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
@ -7,6 +7,7 @@ from sqlalchemy.engine import Row
|
||||
from sqlalchemy.dialects.mysql import insert
|
||||
|
||||
from core.data.schema import BaseData, metadata
|
||||
from core.data import cached
|
||||
|
||||
best_score = Table(
|
||||
"mai2_score_best",
|
||||
@ -174,29 +175,137 @@ course = Table(
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
playlog_old = Table(
|
||||
"maimai_playlog",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column(
|
||||
"user",
|
||||
ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"),
|
||||
nullable=False,
|
||||
),
|
||||
Column("version", Integer),
|
||||
# Pop access code
|
||||
Column("orderId", Integer),
|
||||
Column("sortNumber", Integer),
|
||||
Column("placeId", Integer),
|
||||
Column("placeName", String(255)),
|
||||
Column("country", String(255)),
|
||||
Column("regionId", Integer),
|
||||
Column("playDate", String(255)),
|
||||
Column("userPlayDate", String(255)),
|
||||
Column("musicId", Integer),
|
||||
Column("level", Integer),
|
||||
Column("gameMode", Integer),
|
||||
Column("rivalNum", Integer),
|
||||
Column("track", Integer),
|
||||
Column("eventId", Integer),
|
||||
Column("isFreeToPlay", Boolean),
|
||||
Column("playerRating", Integer),
|
||||
Column("playedUserId1", Integer),
|
||||
Column("playedUserId2", Integer),
|
||||
Column("playedUserId3", Integer),
|
||||
Column("playedUserName1", String(255)),
|
||||
Column("playedUserName2", String(255)),
|
||||
Column("playedUserName3", String(255)),
|
||||
Column("playedMusicLevel1", Integer),
|
||||
Column("playedMusicLevel2", Integer),
|
||||
Column("playedMusicLevel3", Integer),
|
||||
Column("achievement", Integer),
|
||||
Column("score", Integer),
|
||||
Column("tapScore", Integer),
|
||||
Column("holdScore", Integer),
|
||||
Column("slideScore", Integer),
|
||||
Column("breakScore", Integer),
|
||||
Column("syncRate", Integer),
|
||||
Column("vsWin", Integer),
|
||||
Column("isAllPerfect", Boolean),
|
||||
Column("fullCombo", Integer),
|
||||
Column("maxFever", Integer),
|
||||
Column("maxCombo", Integer),
|
||||
Column("tapPerfect", Integer),
|
||||
Column("tapGreat", Integer),
|
||||
Column("tapGood", Integer),
|
||||
Column("tapBad", Integer),
|
||||
Column("holdPerfect", Integer),
|
||||
Column("holdGreat", Integer),
|
||||
Column("holdGood", Integer),
|
||||
Column("holdBad", Integer),
|
||||
Column("slidePerfect", Integer),
|
||||
Column("slideGreat", Integer),
|
||||
Column("slideGood", Integer),
|
||||
Column("slideBad", Integer),
|
||||
Column("breakPerfect", Integer),
|
||||
Column("breakGreat", Integer),
|
||||
Column("breakGood", Integer),
|
||||
Column("breakBad", Integer),
|
||||
Column("judgeStyle", Integer),
|
||||
Column("isTrackSkip", Boolean),
|
||||
Column("isHighScore", Boolean),
|
||||
Column("isChallengeTrack", Boolean),
|
||||
Column("challengeLife", Integer),
|
||||
Column("challengeRemain", Integer),
|
||||
Column("isAllPerfectPlus", Integer),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
best_score_old = Table(
|
||||
"maimai_score_best",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column(
|
||||
"user",
|
||||
ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"),
|
||||
nullable=False,
|
||||
),
|
||||
Column("musicId", Integer),
|
||||
Column("level", Integer),
|
||||
Column("playCount", Integer),
|
||||
Column("achievement", Integer),
|
||||
Column("scoreMax", Integer),
|
||||
Column("syncRateMax", Integer),
|
||||
Column("isAllPerfect", Boolean),
|
||||
Column("isAllPerfectPlus", Integer),
|
||||
Column("fullCombo", Integer),
|
||||
Column("maxFever", Integer),
|
||||
UniqueConstraint("user", "musicId", "level", name="maimai_score_best_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
class Mai2ScoreData(BaseData):
|
||||
def put_best_score(self, user_id: int, score_data: Dict) -> Optional[int]:
|
||||
def put_best_score(self, user_id: int, score_data: Dict, is_dx: bool = True) -> Optional[int]:
|
||||
score_data["user"] = user_id
|
||||
sql = insert(best_score).values(**score_data)
|
||||
|
||||
if is_dx:
|
||||
sql = insert(best_score).values(**score_data)
|
||||
else:
|
||||
sql = insert(best_score_old).values(**score_data)
|
||||
conflict = sql.on_duplicate_key_update(**score_data)
|
||||
|
||||
result = self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.error(
|
||||
f"put_best_score: Failed to insert best score! user_id {user_id}"
|
||||
f"put_best_score: Failed to insert best score! user_id {user_id} is_dx {is_dx}"
|
||||
)
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_best_scores(self, user_id: int, song_id: int = None) -> Optional[List[Row]]:
|
||||
@cached(2)
|
||||
def get_best_scores(self, user_id: int, song_id: int = None, is_dx: bool = True) -> Optional[List[Row]]:
|
||||
if is_dx:
|
||||
sql = best_score.select(
|
||||
and_(
|
||||
best_score.c.user == user_id,
|
||||
(best_score.c.song_id == song_id) if song_id is not None else True,
|
||||
)
|
||||
)
|
||||
else:
|
||||
sql = best_score_old.select(
|
||||
and_(
|
||||
best_score_old.c.user == user_id,
|
||||
(best_score_old.c.song_id == song_id) if song_id is not None else True,
|
||||
)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
@ -219,15 +328,19 @@ class Mai2ScoreData(BaseData):
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def put_playlog(self, user_id: int, playlog_data: Dict) -> Optional[int]:
|
||||
def put_playlog(self, user_id: int, playlog_data: Dict, is_dx: bool = True) -> Optional[int]:
|
||||
playlog_data["user"] = user_id
|
||||
|
||||
if is_dx:
|
||||
sql = insert(playlog).values(**playlog_data)
|
||||
else:
|
||||
sql = insert(playlog_old).values(**playlog_data)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(**playlog_data)
|
||||
|
||||
result = self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.error(f"put_playlog: Failed to insert! user_id {user_id}")
|
||||
self.logger.error(f"put_playlog: Failed to insert! user_id {user_id} is_dx {is_dx}")
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
@ -249,4 +362,4 @@ class Mai2ScoreData(BaseData):
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
return result.fetchall()
|
||||
|
@ -4,12 +4,12 @@ import pytz
|
||||
import json
|
||||
|
||||
from core.config import CoreConfig
|
||||
from titles.mai2.base import Mai2Base
|
||||
from titles.mai2.dxplus import Mai2DXPlus
|
||||
from titles.mai2.config import Mai2Config
|
||||
from titles.mai2.const import Mai2Constants
|
||||
|
||||
|
||||
class Mai2Splash(Mai2Base):
|
||||
class Mai2Splash(Mai2DXPlus):
|
||||
def __init__(self, cfg: CoreConfig, game_cfg: Mai2Config) -> None:
|
||||
super().__init__(cfg, game_cfg)
|
||||
self.version = Mai2Constants.VER_MAIMAI_DX_SPLASH
|
||||
|
@ -4,12 +4,12 @@ import pytz
|
||||
import json
|
||||
|
||||
from core.config import CoreConfig
|
||||
from titles.mai2.base import Mai2Base
|
||||
from titles.mai2.splash import Mai2Splash
|
||||
from titles.mai2.config import Mai2Config
|
||||
from titles.mai2.const import Mai2Constants
|
||||
|
||||
|
||||
class Mai2SplashPlus(Mai2Base):
|
||||
class Mai2SplashPlus(Mai2Splash):
|
||||
def __init__(self, cfg: CoreConfig, game_cfg: Mai2Config) -> None:
|
||||
super().__init__(cfg, game_cfg)
|
||||
self.version = Mai2Constants.VER_MAIMAI_DX_SPLASH_PLUS
|
||||
|
@ -5,12 +5,12 @@ import pytz
|
||||
import json
|
||||
|
||||
from core.config import CoreConfig
|
||||
from titles.mai2.base import Mai2Base
|
||||
from titles.mai2.splashplus import Mai2SplashPlus
|
||||
from titles.mai2.const import Mai2Constants
|
||||
from titles.mai2.config import Mai2Config
|
||||
|
||||
|
||||
class Mai2Universe(Mai2Base):
|
||||
class Mai2Universe(Mai2SplashPlus):
|
||||
def __init__(self, cfg: CoreConfig, game_cfg: Mai2Config) -> None:
|
||||
super().__init__(cfg, game_cfg)
|
||||
self.version = Mai2Constants.VER_MAIMAI_DX_UNIVERSE
|
||||
@ -70,13 +70,13 @@ class Mai2Universe(Mai2Base):
|
||||
tmp.pop("cardName")
|
||||
tmp.pop("enabled")
|
||||
|
||||
tmp["startDate"] = datetime.strftime(tmp["startDate"], "%Y-%m-%d %H:%M:%S")
|
||||
tmp["endDate"] = datetime.strftime(tmp["endDate"], "%Y-%m-%d %H:%M:%S")
|
||||
tmp["startDate"] = datetime.strftime(tmp["startDate"], Mai2Constants.DATE_TIME_FORMAT)
|
||||
tmp["endDate"] = datetime.strftime(tmp["endDate"], Mai2Constants.DATE_TIME_FORMAT)
|
||||
tmp["noticeStartDate"] = datetime.strftime(
|
||||
tmp["noticeStartDate"], "%Y-%m-%d %H:%M:%S"
|
||||
tmp["noticeStartDate"], Mai2Constants.DATE_TIME_FORMAT
|
||||
)
|
||||
tmp["noticeEndDate"] = datetime.strftime(
|
||||
tmp["noticeEndDate"], "%Y-%m-%d %H:%M:%S"
|
||||
tmp["noticeEndDate"], Mai2Constants.DATE_TIME_FORMAT
|
||||
)
|
||||
|
||||
selling_card_list.append(tmp)
|
||||
@ -104,8 +104,12 @@ class Mai2Universe(Mai2Base):
|
||||
tmp.pop("id")
|
||||
tmp.pop("user")
|
||||
|
||||
tmp["startDate"] = datetime.strftime(tmp["startDate"], "%Y-%m-%d %H:%M:%S")
|
||||
tmp["endDate"] = datetime.strftime(tmp["endDate"], "%Y-%m-%d %H:%M:%S")
|
||||
tmp["startDate"] = datetime.strftime(
|
||||
tmp["startDate"], Mai2Constants.DATE_TIME_FORMAT
|
||||
)
|
||||
tmp["endDate"] = datetime.strftime(
|
||||
tmp["endDate"], Mai2Constants.DATE_TIME_FORMAT
|
||||
)
|
||||
card_list.append(tmp)
|
||||
|
||||
return {
|
||||
@ -154,6 +158,10 @@ class Mai2Universe(Mai2Base):
|
||||
# set a random card serial number
|
||||
serial_id = "".join([str(randint(0, 9)) for _ in range(20)])
|
||||
|
||||
# calculate start and end date of the card
|
||||
start_date = datetime.utcnow()
|
||||
end_date = datetime.utcnow() + timedelta(days=15)
|
||||
|
||||
user_card = upsert["userCard"]
|
||||
self.data.item.put_card(
|
||||
user_id,
|
||||
@ -161,8 +169,26 @@ class Mai2Universe(Mai2Base):
|
||||
user_card["cardTypeId"],
|
||||
user_card["charaId"],
|
||||
user_card["mapId"],
|
||||
# add the correct start date and also the end date in 15 days
|
||||
start_date,
|
||||
end_date,
|
||||
)
|
||||
|
||||
# get the profile extend to save the new bought card
|
||||
extend = self.data.profile.get_profile_extend(user_id, self.version)
|
||||
if extend:
|
||||
extend = extend._asdict()
|
||||
# parse the selectedCardList
|
||||
# 6 = Freedom Pass, 4 = Gold Pass (cardTypeId)
|
||||
selected_cards: List = extend["selectedCardList"]
|
||||
|
||||
# if no pass is already added, add the corresponding pass
|
||||
if not user_card["cardTypeId"] in selected_cards:
|
||||
selected_cards.insert(0, user_card["cardTypeId"])
|
||||
|
||||
extend["selectedCardList"] = selected_cards
|
||||
self.data.profile.put_profile_extend(user_id, self.version, extend)
|
||||
|
||||
# properly format userPrintDetail for the database
|
||||
upsert.pop("userCard")
|
||||
upsert.pop("serialId")
|
||||
@ -174,8 +200,8 @@ class Mai2Universe(Mai2Base):
|
||||
"returnCode": 1,
|
||||
"orderId": 0,
|
||||
"serialId": serial_id,
|
||||
"startDate": "2018-01-01 00:00:00",
|
||||
"endDate": "2038-01-01 00:00:00",
|
||||
"startDate": datetime.strftime(start_date, Mai2Constants.DATE_TIME_FORMAT),
|
||||
"endDate": datetime.strftime(end_date, Mai2Constants.DATE_TIME_FORMAT),
|
||||
}
|
||||
|
||||
def handle_cm_upsert_user_printlog_api_request(self, data: Dict) -> Dict:
|
||||
|
@ -7,4 +7,4 @@ index = OngekiServlet
|
||||
database = OngekiData
|
||||
reader = OngekiReader
|
||||
game_codes = [OngekiConstants.GAME_CODE]
|
||||
current_schema_version = 4
|
||||
current_schema_version = 5
|
||||
|
@ -142,7 +142,7 @@ class OngekiBase:
|
||||
|
||||
def handle_get_game_point_api_request(self, data: Dict) -> Dict:
|
||||
"""
|
||||
Sets the GP ammount for A and B sets for 1 - 3 crdits
|
||||
Sets the GP amount for A and B sets for 1 - 3 credits
|
||||
"""
|
||||
return {
|
||||
"length": 6,
|
||||
@ -155,13 +155,13 @@ class OngekiBase:
|
||||
},
|
||||
{
|
||||
"type": 1,
|
||||
"cost": 200,
|
||||
"cost": 230,
|
||||
"startDate": "2000-01-01 05:00:00.0",
|
||||
"endDate": "2099-01-01 05:00:00.0",
|
||||
},
|
||||
{
|
||||
"type": 2,
|
||||
"cost": 300,
|
||||
"cost": 370,
|
||||
"startDate": "2000-01-01 05:00:00.0",
|
||||
"endDate": "2099-01-01 05:00:00.0",
|
||||
},
|
||||
@ -256,7 +256,11 @@ class OngekiBase:
|
||||
{
|
||||
"type": event["type"],
|
||||
"id": event["eventId"],
|
||||
"startDate": "2017-12-05 07:00:00.0",
|
||||
# actually use the startDate from the import so it
|
||||
# properly shows all the events when new ones are imported
|
||||
"startDate": datetime.strftime(
|
||||
event["startDate"], "%Y-%m-%d %H:%M:%S.0"
|
||||
),
|
||||
"endDate": "2099-12-31 00:00:00.0",
|
||||
}
|
||||
)
|
||||
@ -268,7 +272,7 @@ class OngekiBase:
|
||||
}
|
||||
|
||||
def handle_get_game_id_list_api_request(self, data: Dict) -> Dict:
|
||||
game_idlist: list[str, Any] = [] # 1 to 230 & 8000 to 8050
|
||||
game_idlist: List[str, Any] = [] # 1 to 230 & 8000 to 8050
|
||||
|
||||
if data["type"] == 1:
|
||||
for i in range(1, 231):
|
||||
@ -443,7 +447,7 @@ class OngekiBase:
|
||||
"userItemList": [],
|
||||
}
|
||||
|
||||
items: list[Dict[str, Any]] = []
|
||||
items: List[Dict[str, Any]] = []
|
||||
for i in range(data["nextIndex"] % 10000000000, len(p)):
|
||||
if len(items) > data["maxCount"]:
|
||||
break
|
||||
@ -560,7 +564,11 @@ class OngekiBase:
|
||||
def handle_get_user_recent_rating_api_request(self, data: Dict) -> Dict:
|
||||
recent_rating = self.data.profile.get_profile_recent_rating(data["userId"])
|
||||
if recent_rating is None:
|
||||
return {}
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": 0,
|
||||
"userRecentRatingList": [],
|
||||
}
|
||||
|
||||
userRecentRatingList = recent_rating["recentRating"]
|
||||
|
||||
|
@ -43,15 +43,15 @@ class OngekiBright(OngekiBase):
|
||||
user_data.pop("user")
|
||||
user_data.pop("version")
|
||||
|
||||
# TODO: replace datetime objects with strings
|
||||
|
||||
# add access code that we don't store
|
||||
user_data["accessCode"] = cards[0]["access_code"]
|
||||
|
||||
# hardcode Card Maker version for now
|
||||
# Card Maker 1.34.00 = 1.30.01
|
||||
# Card Maker 1.36.00 = 1.35.04
|
||||
user_data["compatibleCmVersion"] = "1.30.01"
|
||||
# add the compatible card maker version from config
|
||||
card_maker_ver = self.game_cfg.version.version(self.version)
|
||||
if card_maker_ver and card_maker_ver.get("card_maker"):
|
||||
# Card Maker 1.30 = 1.30.01+
|
||||
# Card Maker 1.35 = 1.35.03+
|
||||
user_data["compatibleCmVersion"] = card_maker_ver.get("card_maker")
|
||||
|
||||
return {"userId": data["userId"], "userData": user_data}
|
||||
|
||||
@ -333,6 +333,8 @@ class OngekiBright(OngekiBase):
|
||||
select_point = data["selectPoint"]
|
||||
|
||||
total_gacha_count, ceiling_gacha_count = 0, 0
|
||||
# 0 = can still use Gacha Select, 1 = already used Gacha Select
|
||||
use_select_point = 0
|
||||
daily_gacha_cnt, five_gacha_cnt, eleven_gacha_cnt = 0, 0, 0
|
||||
daily_gacha_date = datetime.strptime("2000-01-01", "%Y-%m-%d")
|
||||
|
||||
@ -344,6 +346,9 @@ class OngekiBright(OngekiBase):
|
||||
daily_gacha_cnt = user_gacha["dailyGachaCnt"]
|
||||
five_gacha_cnt = user_gacha["fiveGachaCnt"]
|
||||
eleven_gacha_cnt = user_gacha["elevenGachaCnt"]
|
||||
# if the Gacha Select has been used, make sure to keep it
|
||||
if user_gacha["useSelectPoint"] == 1:
|
||||
use_select_point = 1
|
||||
# parse just the year, month and date
|
||||
daily_gacha_date = user_gacha["dailyGachaDate"]
|
||||
|
||||
@ -359,7 +364,7 @@ class OngekiBright(OngekiBase):
|
||||
totalGachaCnt=total_gacha_count + gacha_count,
|
||||
ceilingGachaCnt=ceiling_gacha_count + gacha_count,
|
||||
selectPoint=select_point,
|
||||
useSelectPoint=0,
|
||||
useSelectPoint=use_select_point,
|
||||
dailyGachaCnt=daily_gacha_cnt + gacha_count,
|
||||
fiveGachaCnt=five_gacha_cnt + 1 if gacha_count == 5 else five_gacha_cnt,
|
||||
elevenGachaCnt=eleven_gacha_cnt + 1
|
||||
|
@ -136,14 +136,3 @@ class OngekiBrightMemory(OngekiBright):
|
||||
|
||||
def handle_get_game_music_release_state_api_request(self, data: Dict) -> Dict:
|
||||
return {"techScore": 0, "cardNum": 0}
|
||||
|
||||
def handle_cm_get_user_data_api_request(self, data: Dict) -> Dict:
|
||||
# check for a bright memory profile
|
||||
user_data = super().handle_cm_get_user_data_api_request(data)
|
||||
|
||||
# hardcode Card Maker version for now
|
||||
# Card Maker 1.34 = 1.30.01
|
||||
# Card Maker 1.35 = 1.35.03
|
||||
user_data["userData"]["compatibleCmVersion"] = "1.35.03"
|
||||
|
||||
return user_data
|
||||
|
@ -1,3 +1,4 @@
|
||||
from ast import Dict
|
||||
from typing import List
|
||||
|
||||
from core.config import CoreConfig
|
||||
@ -33,7 +34,23 @@ class OngekiGachaConfig:
|
||||
)
|
||||
|
||||
|
||||
class OngekiCardMakerVersionConfig:
|
||||
def __init__(self, parent_config: "OngekiConfig") -> None:
|
||||
self.__config = parent_config
|
||||
|
||||
def version(self, version: int) -> Dict:
|
||||
"""
|
||||
in the form of:
|
||||
<ongeki version>: {"card_maker": <compatible card maker version>}
|
||||
6: {"card_maker": 1.30.01}
|
||||
"""
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "ongeki", "version", default={}
|
||||
).get(version)
|
||||
|
||||
|
||||
class OngekiConfig(dict):
|
||||
def __init__(self) -> None:
|
||||
self.server = OngekiServerConfig(self)
|
||||
self.gachas = OngekiGachaConfig(self)
|
||||
self.version = OngekiCardMakerVersionConfig(self)
|
||||
|
@ -67,12 +67,12 @@ class OngekiConstants:
|
||||
VERSION_NAMES = (
|
||||
"ONGEKI",
|
||||
"ONGEKI +",
|
||||
"ONGEKI Summer",
|
||||
"ONGEKI Summer+",
|
||||
"ONGEKI Red",
|
||||
"ONGEKI Red+",
|
||||
"ONGEKI Bright",
|
||||
"ONGEKI Bright Memory",
|
||||
"ONGEKI SUMMER",
|
||||
"ONGEKI SUMMER +",
|
||||
"ONGEKI R.E.D.",
|
||||
"ONGEKI R.E.D. +",
|
||||
"ONGEKI bright",
|
||||
"ONGEKI bright MEMORY",
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
@ -11,6 +11,7 @@ from os import path
|
||||
from typing import Tuple
|
||||
|
||||
from core.config import CoreConfig
|
||||
from core.utils import Utils
|
||||
from titles.ongeki.config import OngekiConfig
|
||||
from titles.ongeki.const import OngekiConstants
|
||||
from titles.ongeki.base import OngekiBase
|
||||
@ -101,6 +102,7 @@ class OngekiServlet:
|
||||
url_split = url_path.split("/")
|
||||
internal_ver = 0
|
||||
endpoint = url_split[len(url_split) - 1]
|
||||
client_ip = Utils.get_ip_addr(request)
|
||||
|
||||
if version < 105: # 1.0
|
||||
internal_ver = OngekiConstants.VER_ONGEKI
|
||||
@ -137,7 +139,10 @@ class OngekiServlet:
|
||||
|
||||
req_data = json.loads(unzip)
|
||||
|
||||
self.logger.info(f"v{version} {endpoint} request - {req_data}")
|
||||
self.logger.info(
|
||||
f"v{version} {endpoint} request from {client_ip}"
|
||||
)
|
||||
self.logger.debug(req_data)
|
||||
|
||||
func_to_find = "handle_" + inflection.underscore(endpoint) + "_request"
|
||||
|
||||
@ -156,6 +161,6 @@ class OngekiServlet:
|
||||
if resp == None:
|
||||
resp = {"returnCode": 1}
|
||||
|
||||
self.logger.info(f"Response {resp}")
|
||||
self.logger.debug(f"Response {resp}")
|
||||
|
||||
return zlib.compress(json.dumps(resp, ensure_ascii=False).encode("utf-8"))
|
||||
|
@ -316,7 +316,7 @@ class OngekiProfileData(BaseData):
|
||||
return result.fetchone()
|
||||
|
||||
def get_profile_rating_log(self, aime_id: int) -> Optional[List[Row]]:
|
||||
sql = select(rating_log).where(recent_rating.c.user == aime_id)
|
||||
sql = select(rating_log).where(rating_log.c.user == aime_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
|
@ -16,6 +16,7 @@ events = Table(
|
||||
Column("eventId", Integer),
|
||||
Column("type", Integer),
|
||||
Column("name", String(255)),
|
||||
Column("startDate", TIMESTAMP, server_default=func.now()),
|
||||
Column("enabled", Boolean, server_default="1"),
|
||||
UniqueConstraint("version", "eventId", "type", name="ongeki_static_events_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
|
@ -1,6 +1,6 @@
|
||||
from datetime import datetime, timedelta
|
||||
import json, logging
|
||||
from typing import Any, Dict
|
||||
from typing import Any, Dict, List
|
||||
import random
|
||||
|
||||
from core.data import Data
|
||||
@ -8,6 +8,7 @@ from core import CoreConfig
|
||||
from .config import PokkenConfig
|
||||
from .proto import jackal_pb2
|
||||
from .database import PokkenData
|
||||
from .const import PokkenConstants
|
||||
|
||||
|
||||
class PokkenBase:
|
||||
@ -44,19 +45,19 @@ class PokkenBase:
|
||||
biwa_setting = {
|
||||
"MatchingServer": {
|
||||
"host": f"https://{self.game_cfg.server.hostname}",
|
||||
"port": self.game_cfg.server.port,
|
||||
"port": self.game_cfg.ports.game,
|
||||
"url": "/SDAK/100/matching",
|
||||
},
|
||||
"StunServer": {
|
||||
"addr": self.game_cfg.server.hostname,
|
||||
"port": self.game_cfg.server.port_stun,
|
||||
"addr": self.game_cfg.server.stun_server_host,
|
||||
"port": self.game_cfg.server.stun_server_port,
|
||||
},
|
||||
"TurnServer": {
|
||||
"addr": self.game_cfg.server.hostname,
|
||||
"port": self.game_cfg.server.port_turn,
|
||||
"addr": self.game_cfg.server.stun_server_host,
|
||||
"port": self.game_cfg.server.stun_server_port,
|
||||
},
|
||||
"AdmissionUrl": f"ws://{self.game_cfg.server.hostname}:{self.game_cfg.server.port_admission}",
|
||||
"locationId": 123,
|
||||
"AdmissionUrl": f"ws://{self.game_cfg.server.hostname}:{self.game_cfg.ports.admission}",
|
||||
"locationId": 123, # FIXME: Get arcade's ID from the database
|
||||
"logfilename": "JackalMatchingLibrary.log",
|
||||
"biwalogfilename": "./biwa.log",
|
||||
}
|
||||
@ -94,6 +95,7 @@ class PokkenBase:
|
||||
res.type = jackal_pb2.MessageType.LOAD_CLIENT_SETTINGS
|
||||
settings = jackal_pb2.LoadClientSettingsResponseData()
|
||||
|
||||
# TODO: Make configurable
|
||||
settings.money_magnification = 1
|
||||
settings.continue_bonus_exp = 100
|
||||
settings.continue_fight_money = 100
|
||||
@ -274,6 +276,100 @@ class PokkenBase:
|
||||
res.result = 1
|
||||
res.type = jackal_pb2.MessageType.SAVE_USER
|
||||
|
||||
req = request.save_user
|
||||
user_id = req.banapass_id
|
||||
|
||||
tut_flgs: List[int] = []
|
||||
ach_flgs: List[int] = []
|
||||
evt_flgs: List[int] = []
|
||||
evt_params: List[int] = []
|
||||
|
||||
get_rank_pts: int = req.get_trainer_rank_point if req.get_trainer_rank_point else 0
|
||||
get_money: int = req.get_money
|
||||
get_score_pts: int = req.get_score_point if req.get_score_point else 0
|
||||
grade_max: int = req.grade_max_num
|
||||
extra_counter: int = req.extra_counter
|
||||
evt_reward_get_flg: int = req.event_reward_get_flag
|
||||
num_continues: int = req.continue_num
|
||||
total_play_days: int = req.total_play_days
|
||||
awake_num: int = req.awake_num # ?
|
||||
use_support_ct: int = req.use_support_num
|
||||
beat_num: int = req.beat_num # ?
|
||||
evt_state: int = req.event_state
|
||||
aid_skill: int = req.aid_skill
|
||||
last_evt: int = req.last_play_event_id
|
||||
|
||||
battle = req.battle_data
|
||||
mon = req.pokemon_data
|
||||
|
||||
p = self.data.profile.touch_profile(user_id)
|
||||
if p is None or not p:
|
||||
self.data.profile.create_profile(user_id)
|
||||
|
||||
if req.trainer_name_pending is not None and req.trainer_name_pending: # we're saving for the first time
|
||||
self.data.profile.set_profile_name(user_id, req.trainer_name_pending, req.avatar_gender if req.avatar_gender else None)
|
||||
|
||||
for tut_flg in req.tutorial_progress_flag:
|
||||
tut_flgs.append(tut_flg)
|
||||
|
||||
self.data.profile.update_profile_tutorial_flags(user_id, tut_flgs)
|
||||
|
||||
for ach_flg in req.achievement_flag:
|
||||
ach_flgs.append(ach_flg)
|
||||
|
||||
self.data.profile.update_profile_tutorial_flags(user_id, ach_flg)
|
||||
|
||||
for evt_flg in req.event_achievement_flag:
|
||||
evt_flgs.append(evt_flg)
|
||||
|
||||
for evt_param in req.event_achievement_param:
|
||||
evt_params.append(evt_param)
|
||||
|
||||
self.data.profile.update_profile_event(user_id, evt_state, evt_flgs, evt_params, )
|
||||
|
||||
for reward in req.reward_data:
|
||||
self.data.item.add_reward(user_id, reward.get_category_id, reward.get_content_id, reward.get_type_id)
|
||||
|
||||
self.data.profile.add_profile_points(user_id, get_rank_pts, get_money, get_score_pts, grade_max)
|
||||
|
||||
self.data.profile.update_support_team(user_id, 1, req.support_set_1[0], req.support_set_1[1])
|
||||
self.data.profile.update_support_team(user_id, 2, req.support_set_2[0], req.support_set_2[1])
|
||||
self.data.profile.update_support_team(user_id, 3, req.support_set_3[0], req.support_set_3[1])
|
||||
|
||||
self.data.profile.put_pokemon(user_id, mon.char_id, mon.illustration_book_no, mon.bp_point_atk, mon.bp_point_res, mon.bp_point_def, mon.bp_point_sp)
|
||||
self.data.profile.add_pokemon_xp(user_id, mon.char_id, mon.get_pokemon_exp)
|
||||
|
||||
for x in range(len(battle.play_mode)):
|
||||
self.data.profile.put_pokemon_battle_result(
|
||||
user_id,
|
||||
mon.char_id,
|
||||
PokkenConstants.BATTLE_TYPE(battle.play_mode[x]),
|
||||
PokkenConstants.BATTLE_RESULT(battle.result[x])
|
||||
)
|
||||
|
||||
self.data.profile.put_stats(
|
||||
user_id,
|
||||
battle.ex_ko_num,
|
||||
battle.wko_num,
|
||||
battle.timeup_win_num,
|
||||
battle.cool_ko_num,
|
||||
battle.perfect_ko_num,
|
||||
num_continues
|
||||
)
|
||||
|
||||
self.data.profile.put_extra(
|
||||
user_id,
|
||||
extra_counter,
|
||||
evt_reward_get_flg,
|
||||
total_play_days,
|
||||
awake_num,
|
||||
use_support_ct,
|
||||
beat_num,
|
||||
aid_skill,
|
||||
last_evt
|
||||
)
|
||||
|
||||
|
||||
return res.SerializeToString()
|
||||
|
||||
def handle_save_ingame_log(self, data: jackal_pb2.Request) -> bytes:
|
||||
@ -307,11 +403,30 @@ class PokkenBase:
|
||||
"pcb_id": data["data"]["must"]["pcb_id"],
|
||||
"gip": client_ip
|
||||
},
|
||||
"list":[]
|
||||
"""
|
||||
return {}
|
||||
return {
|
||||
"data": {
|
||||
"sessionId":"12345678",
|
||||
"A":{
|
||||
"pcb_id": data["data"]["must"]["pcb_id"],
|
||||
"gip": client_ip
|
||||
},
|
||||
"list":[]
|
||||
}
|
||||
}
|
||||
|
||||
def handle_matching_stop_matching(
|
||||
self, data: Dict = {}, client_ip: str = "127.0.0.1"
|
||||
) -> Dict:
|
||||
return {}
|
||||
|
||||
def handle_admission_noop(self, data: Dict, req_ip: str = "127.0.0.1") -> Dict:
|
||||
return {}
|
||||
|
||||
def handle_admission_joinsession(self, data: Dict, req_ip: str = "127.0.0.1") -> Dict:
|
||||
self.logger.info(f"Admission: JoinSession from {req_ip}")
|
||||
return {
|
||||
'data': {
|
||||
"id": 12345678
|
||||
}
|
||||
}
|
||||
|
@ -25,30 +25,6 @@ class PokkenServerConfig:
|
||||
)
|
||||
)
|
||||
|
||||
@property
|
||||
def port(self) -> int:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "pokken", "server", "port", default=9000
|
||||
)
|
||||
|
||||
@property
|
||||
def port_stun(self) -> int:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "pokken", "server", "port_stun", default=9001
|
||||
)
|
||||
|
||||
@property
|
||||
def port_turn(self) -> int:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "pokken", "server", "port_turn", default=9002
|
||||
)
|
||||
|
||||
@property
|
||||
def port_admission(self) -> int:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "pokken", "server", "port_admission", default=9003
|
||||
)
|
||||
|
||||
@property
|
||||
def auto_register(self) -> bool:
|
||||
"""
|
||||
@ -59,7 +35,51 @@ class PokkenServerConfig:
|
||||
self.__config, "pokken", "server", "auto_register", default=True
|
||||
)
|
||||
|
||||
@property
|
||||
def enable_matching(self) -> bool:
|
||||
"""
|
||||
If global matching should happen
|
||||
"""
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "pokken", "server", "enable_matching", default=False
|
||||
)
|
||||
|
||||
@property
|
||||
def stun_server_host(self) -> str:
|
||||
"""
|
||||
Hostname of the EXTERNAL stun server the game should connect to. This is not handled by artemis.
|
||||
"""
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "pokken", "server", "stun_server_host", default="stunserver.stunprotocol.org"
|
||||
)
|
||||
|
||||
@property
|
||||
def stun_server_port(self) -> int:
|
||||
"""
|
||||
Port of the EXTERNAL stun server the game should connect to. This is not handled by artemis.
|
||||
"""
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "pokken", "server", "stun_server_port", default=3478
|
||||
)
|
||||
|
||||
class PokkenPortsConfig:
|
||||
def __init__(self, parent_config: "PokkenConfig"):
|
||||
self.__config = parent_config
|
||||
|
||||
@property
|
||||
def game(self) -> int:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "pokken", "ports", "game", default=9000
|
||||
)
|
||||
|
||||
@property
|
||||
def admission(self) -> int:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "pokken", "ports", "admission", default=9001
|
||||
)
|
||||
|
||||
|
||||
class PokkenConfig(dict):
|
||||
def __init__(self) -> None:
|
||||
self.server = PokkenServerConfig(self)
|
||||
self.ports = PokkenPortsConfig(self)
|
||||
|
@ -11,14 +11,14 @@ class PokkenConstants:
|
||||
VERSION_NAMES = "Pokken Tournament"
|
||||
|
||||
class BATTLE_TYPE(Enum):
|
||||
BATTLE_TYPE_TUTORIAL = 1
|
||||
BATTLE_TYPE_AI = 2
|
||||
BATTLE_TYPE_LAN = 3
|
||||
BATTLE_TYPE_WAN = 4
|
||||
TUTORIAL = 1
|
||||
AI = 2
|
||||
LAN = 3
|
||||
WAN = 4
|
||||
|
||||
class BATTLE_RESULT(Enum):
|
||||
BATTLE_RESULT_WIN = 1
|
||||
BATTLE_RESULT_LOSS = 2
|
||||
WIN = 1
|
||||
LOSS = 2
|
||||
|
||||
@classmethod
|
||||
def game_ver_to_string(cls, ver: int):
|
||||
|
@ -2,8 +2,9 @@ import yaml
|
||||
import jinja2
|
||||
from twisted.web.http import Request
|
||||
from os import path
|
||||
from twisted.web.server import Session
|
||||
|
||||
from core.frontend import FE_Base
|
||||
from core.frontend import FE_Base, IUserSession
|
||||
from core.config import CoreConfig
|
||||
from .database import PokkenData
|
||||
from .config import PokkenConfig
|
||||
@ -27,7 +28,12 @@ class PokkenFrontend(FE_Base):
|
||||
template = self.environment.get_template(
|
||||
"titles/pokken/frontend/pokken_index.jinja"
|
||||
)
|
||||
|
||||
sesh: Session = request.getSession()
|
||||
usr_sesh = IUserSession(sesh)
|
||||
|
||||
return template.render(
|
||||
title=f"{self.core_config.server.name} | {self.nav_name}",
|
||||
game_list=self.environment.globals["game_list"],
|
||||
sesh=vars(usr_sesh)
|
||||
).encode("utf-16")
|
||||
|
@ -1,6 +1,7 @@
|
||||
from typing import Tuple
|
||||
from twisted.web.http import Request
|
||||
from twisted.web import resource
|
||||
from twisted.internet import reactor
|
||||
import json, ast
|
||||
from datetime import datetime
|
||||
import yaml
|
||||
@ -11,10 +12,11 @@ from os import path
|
||||
from google.protobuf.message import DecodeError
|
||||
|
||||
from core import CoreConfig, Utils
|
||||
from titles.pokken.config import PokkenConfig
|
||||
from titles.pokken.base import PokkenBase
|
||||
from titles.pokken.const import PokkenConstants
|
||||
from titles.pokken.proto import jackal_pb2
|
||||
from .config import PokkenConfig
|
||||
from .base import PokkenBase
|
||||
from .const import PokkenConstants
|
||||
from .proto import jackal_pb2
|
||||
from .services import PokkenAdmissionFactory
|
||||
|
||||
|
||||
class PokkenServlet(resource.Resource):
|
||||
@ -69,7 +71,7 @@ class PokkenServlet(resource.Resource):
|
||||
|
||||
return (
|
||||
True,
|
||||
f"https://{game_cfg.server.hostname}:{game_cfg.server.port}/{game_code}/$v/",
|
||||
f"https://{game_cfg.server.hostname}:{game_cfg.ports.game}/{game_code}/$v/",
|
||||
f"{game_cfg.server.hostname}/SDAK/$v/",
|
||||
)
|
||||
|
||||
@ -90,8 +92,10 @@ class PokkenServlet(resource.Resource):
|
||||
return (True, "PKF1")
|
||||
|
||||
def setup(self) -> None:
|
||||
# TODO: Setup stun, turn (UDP) and admission (WSS) servers
|
||||
pass
|
||||
if self.game_cfg.server.enable_matching:
|
||||
reactor.listenTCP(
|
||||
self.game_cfg.ports.admission, PokkenAdmissionFactory(self.core_cfg, self.game_cfg)
|
||||
)
|
||||
|
||||
def render_POST(
|
||||
self, request: Request, version: int = 0, endpoints: str = ""
|
||||
@ -128,6 +132,9 @@ class PokkenServlet(resource.Resource):
|
||||
return ret
|
||||
|
||||
def handle_matching(self, request: Request) -> bytes:
|
||||
if not self.game_cfg.server.enable_matching:
|
||||
return b""
|
||||
|
||||
content = request.content.getvalue()
|
||||
client_ip = Utils.get_ip_addr(request)
|
||||
|
||||
|
@ -31,4 +31,16 @@ class PokkenItemData(BaseData):
|
||||
Items obtained as rewards
|
||||
"""
|
||||
|
||||
pass
|
||||
def add_reward(self, user_id: int, category: int, content: int, item_type: int) -> Optional[int]:
|
||||
sql = insert(item).values(
|
||||
user=user_id,
|
||||
category=category,
|
||||
content=content,
|
||||
type=item_type,
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.warn(f"Failed to insert reward for user {user_id}: {category}-{content}-{item_type}")
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
@ -1,4 +1,4 @@
|
||||
from typing import Optional, Dict, List
|
||||
from typing import Optional, Dict, List, Union
|
||||
from sqlalchemy import Table, Column, UniqueConstraint, PrimaryKeyConstraint, and_, case
|
||||
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON
|
||||
from sqlalchemy.schema import ForeignKey
|
||||
@ -125,7 +125,7 @@ pokemon_data = Table(
|
||||
Column("win_vs_lan", Integer),
|
||||
Column("battle_num_vs_cpu", Integer), # 2
|
||||
Column("win_cpu", Integer),
|
||||
Column("battle_all_num_tutorial", Integer),
|
||||
Column("battle_all_num_tutorial", Integer), # ???
|
||||
Column("battle_num_tutorial", Integer), # 1?
|
||||
Column("bp_point_atk", Integer),
|
||||
Column("bp_point_res", Integer),
|
||||
@ -137,6 +137,14 @@ pokemon_data = Table(
|
||||
|
||||
|
||||
class PokkenProfileData(BaseData):
|
||||
def touch_profile(self, user_id: int) -> Optional[int]:
|
||||
sql = select([profile.c.id]).where(profile.c.user == user_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()['id']
|
||||
|
||||
def create_profile(self, user_id: int) -> Optional[int]:
|
||||
sql = insert(profile).values(user=user_id)
|
||||
conflict = sql.on_duplicate_key_update(user=user_id)
|
||||
@ -147,11 +155,10 @@ class PokkenProfileData(BaseData):
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def set_profile_name(self, user_id: int, new_name: str) -> None:
|
||||
sql = (
|
||||
update(profile)
|
||||
.where(profile.c.user == user_id)
|
||||
.values(trainer_name=new_name)
|
||||
def set_profile_name(self, user_id: int, new_name: str, gender: Union[int, None] = None) -> None:
|
||||
sql = update(profile).where(profile.c.user == user_id).values(
|
||||
trainer_name=new_name,
|
||||
avatar_gender=gender if gender is not None else profile.c.avatar_gender
|
||||
)
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
@ -159,13 +166,75 @@ class PokkenProfileData(BaseData):
|
||||
f"Failed to update pokken profile name for user {user_id}!"
|
||||
)
|
||||
|
||||
def update_profile_tutorial_flags(self, user_id: int, tutorial_flags: Dict) -> None:
|
||||
pass
|
||||
def put_extra(
|
||||
self,
|
||||
user_id: int,
|
||||
extra_counter: int,
|
||||
evt_reward_get_flg: int,
|
||||
total_play_days: int,
|
||||
awake_num: int,
|
||||
use_support_ct: int,
|
||||
beat_num: int,
|
||||
aid_skill: int,
|
||||
last_evt: int
|
||||
) -> None:
|
||||
sql = update(profile).where(profile.c.user == user_id).values(
|
||||
extra_counter=extra_counter,
|
||||
event_reward_get_flag=evt_reward_get_flg,
|
||||
total_play_days=total_play_days,
|
||||
awake_num=awake_num,
|
||||
use_support_num=use_support_ct,
|
||||
beat_num=beat_num,
|
||||
aid_skill=aid_skill,
|
||||
last_play_event_id=last_evt
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.error(f"Failed to put extra data for user {user_id}")
|
||||
|
||||
def update_profile_tutorial_flags(self, user_id: int, tutorial_flags: List) -> None:
|
||||
sql = update(profile).where(profile.c.user == user_id).values(
|
||||
tutorial_progress_flag=tutorial_flags,
|
||||
)
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.error(
|
||||
f"Failed to update pokken profile tutorial flags for user {user_id}!"
|
||||
)
|
||||
|
||||
def update_profile_achievement_flags(self, user_id: int, achievement_flags: List) -> None:
|
||||
sql = update(profile).where(profile.c.user == user_id).values(
|
||||
achievement_flag=achievement_flags,
|
||||
)
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.error(
|
||||
f"Failed to update pokken profile achievement flags for user {user_id}!"
|
||||
)
|
||||
|
||||
def update_profile_event(self, user_id: int, event_state: List, event_flags: List[int], event_param: List[int], last_evt: int = None) -> None:
|
||||
sql = update(profile).where(profile.c.user == user_id).values(
|
||||
event_state=event_state,
|
||||
event_achievement_flag=event_flags,
|
||||
event_achievement_param=event_param,
|
||||
last_play_event_id=last_evt if last_evt is not None else profile.c.last_play_event_id,
|
||||
)
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.error(
|
||||
f"Failed to update pokken profile event state for user {user_id}!"
|
||||
)
|
||||
|
||||
def add_profile_points(
|
||||
self, user_id: int, rank_pts: int, money: int, score_pts: int
|
||||
self, user_id: int, rank_pts: int, money: int, score_pts: int, grade_max: int
|
||||
) -> None:
|
||||
pass
|
||||
sql = update(profile).where(profile.c.user == user_id).values(
|
||||
trainer_rank_point = profile.c.trainer_rank_point + rank_pts,
|
||||
fight_money = profile.c.fight_money + money,
|
||||
score_point = profile.c.score_point + score_pts,
|
||||
grade_max_num = grade_max
|
||||
)
|
||||
|
||||
def get_profile(self, user_id: int) -> Optional[Row]:
|
||||
sql = profile.select(profile.c.user == user_id)
|
||||
@ -174,18 +243,53 @@ class PokkenProfileData(BaseData):
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def put_pokemon_data(
|
||||
def put_pokemon(
|
||||
self,
|
||||
user_id: int,
|
||||
pokemon_id: int,
|
||||
illust_no: int,
|
||||
get_exp: int,
|
||||
atk: int,
|
||||
res: int,
|
||||
defe: int,
|
||||
sp: int,
|
||||
sp: int
|
||||
) -> Optional[int]:
|
||||
pass
|
||||
sql = insert(pokemon_data).values(
|
||||
user=user_id,
|
||||
char_id=pokemon_id,
|
||||
illustration_book_no=illust_no,
|
||||
bp_point_atk=atk,
|
||||
bp_point_res=res,
|
||||
bp_point_defe=defe,
|
||||
bp_point_sp=sp,
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
illustration_book_no=illust_no,
|
||||
bp_point_atk=atk,
|
||||
bp_point_res=res,
|
||||
bp_point_defe=defe,
|
||||
bp_point_sp=sp,
|
||||
)
|
||||
|
||||
result = self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.warn(f"Failed to insert pokemon ID {pokemon_id} for user {user_id}")
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def add_pokemon_xp(
|
||||
self,
|
||||
user_id: int,
|
||||
pokemon_id: int,
|
||||
xp: int
|
||||
) -> None:
|
||||
sql = update(pokemon_data).where(and_(pokemon_data.c.user==user_id, pokemon_data.c.char_id==pokemon_id)).values(
|
||||
pokemon_exp=pokemon_data.c.pokemon_exp + xp
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.warn(f"Failed to add {xp} XP to pokemon ID {pokemon_id} for user {user_id}")
|
||||
|
||||
def get_pokemon_data(self, user_id: int, pokemon_id: int) -> Optional[Row]:
|
||||
pass
|
||||
@ -193,13 +297,29 @@ class PokkenProfileData(BaseData):
|
||||
def get_all_pokemon_data(self, user_id: int) -> Optional[List[Row]]:
|
||||
pass
|
||||
|
||||
def put_results(
|
||||
self, user_id: int, pokemon_id: int, match_type: int, match_result: int
|
||||
def put_pokemon_battle_result(
|
||||
self, user_id: int, pokemon_id: int, match_type: PokkenConstants.BATTLE_TYPE, match_result: PokkenConstants.BATTLE_RESULT
|
||||
) -> None:
|
||||
"""
|
||||
Records the match stats (type and win/loss) for the pokemon and profile
|
||||
"""
|
||||
pass
|
||||
sql = update(pokemon_data).where(and_(pokemon_data.c.user==user_id, pokemon_data.c.char_id==pokemon_id)).values(
|
||||
battle_num_tutorial=pokemon_data.c.battle_num_tutorial + 1 if match_type==PokkenConstants.BATTLE_TYPE.TUTORIAL else pokemon_data.c.battle_num_tutorial,
|
||||
battle_all_num_tutorial=pokemon_data.c.battle_all_num_tutorial + 1 if match_type==PokkenConstants.BATTLE_TYPE.TUTORIAL else pokemon_data.c.battle_all_num_tutorial,
|
||||
|
||||
battle_num_vs_cpu=pokemon_data.c.battle_num_vs_cpu + 1 if match_type==PokkenConstants.BATTLE_TYPE.AI else pokemon_data.c.battle_num_vs_cpu,
|
||||
win_cpu=pokemon_data.c.win_cpu + 1 if match_type==PokkenConstants.BATTLE_TYPE.AI and match_result==PokkenConstants.BATTLE_RESULT.WIN else pokemon_data.c.win_cpu,
|
||||
|
||||
battle_num_vs_lan=pokemon_data.c.battle_num_vs_lan + 1 if match_type==PokkenConstants.BATTLE_TYPE.LAN else pokemon_data.c.battle_num_vs_lan,
|
||||
win_vs_lan=pokemon_data.c.win_vs_lan + 1 if match_type==PokkenConstants.BATTLE_TYPE.LAN and match_result==PokkenConstants.BATTLE_RESULT.WIN else pokemon_data.c.win_vs_lan,
|
||||
|
||||
battle_num_vs_wan=pokemon_data.c.battle_num_vs_wan + 1 if match_type==PokkenConstants.BATTLE_TYPE.WAN else pokemon_data.c.battle_num_vs_wan,
|
||||
win_vs_wan=pokemon_data.c.win_vs_wan + 1 if match_type==PokkenConstants.BATTLE_TYPE.WAN and match_result==PokkenConstants.BATTLE_RESULT.WIN else pokemon_data.c.win_vs_wan,
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.warn(f"Failed to record match stats for user {user_id}'s pokemon {pokemon_id} (type {match_type.name} | result {match_result.name})")
|
||||
|
||||
def put_stats(
|
||||
self,
|
||||
@ -214,4 +334,29 @@ class PokkenProfileData(BaseData):
|
||||
"""
|
||||
Records profile stats
|
||||
"""
|
||||
pass
|
||||
sql = update(profile).where(profile.c.user==user_id).values(
|
||||
ex_ko_num=profile.c.ex_ko_num + exkos,
|
||||
wko_num=profile.c.wko_num + wkos,
|
||||
timeup_win_num=profile.c.timeup_win_num + timeout_wins,
|
||||
cool_ko_num=profile.c.cool_ko_num + cool_kos,
|
||||
perfect_ko_num=profile.c.perfect_ko_num + perfects,
|
||||
continue_num=continues,
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.warn(f"Failed to update stats for user {user_id}")
|
||||
|
||||
def update_support_team(self, user_id: int, support_id: int, support1: int = 4294967295, support2: int = 4294967295) -> None:
|
||||
sql = update(profile).where(profile.c.user==user_id).values(
|
||||
support_set_1_1=support1 if support_id == 1 else profile.c.support_set_1_1,
|
||||
support_set_1_2=support2 if support_id == 1 else profile.c.support_set_1_2,
|
||||
support_set_2_1=support1 if support_id == 2 else profile.c.support_set_2_1,
|
||||
support_set_2_2=support2 if support_id == 2 else profile.c.support_set_2_2,
|
||||
support_set_3_1=support1 if support_id == 3 else profile.c.support_set_3_1,
|
||||
support_set_3_2=support2 if support_id == 3 else profile.c.support_set_3_2,
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.warn(f"Failed to update support team {support_id} for user {user_id}")
|
||||
|
66
titles/pokken/services.py
Normal file
66
titles/pokken/services.py
Normal file
@ -0,0 +1,66 @@
|
||||
from twisted.internet.interfaces import IAddress
|
||||
from twisted.internet.protocol import Protocol
|
||||
from autobahn.twisted.websocket import WebSocketServerProtocol, WebSocketServerFactory
|
||||
from autobahn.websocket.types import ConnectionRequest
|
||||
from typing import Dict
|
||||
import logging
|
||||
import json
|
||||
|
||||
from core.config import CoreConfig
|
||||
from .config import PokkenConfig
|
||||
from .base import PokkenBase
|
||||
|
||||
class PokkenAdmissionProtocol(WebSocketServerProtocol):
|
||||
def __init__(self, cfg: CoreConfig, game_cfg: PokkenConfig):
|
||||
super().__init__()
|
||||
self.core_config = cfg
|
||||
self.game_config = game_cfg
|
||||
self.logger = logging.getLogger("pokken")
|
||||
|
||||
self.base = PokkenBase(cfg, game_cfg)
|
||||
|
||||
def onConnect(self, request: ConnectionRequest) -> None:
|
||||
self.logger.debug(f"Admission: Connection from {request.peer}")
|
||||
|
||||
def onClose(self, wasClean: bool, code: int, reason: str) -> None:
|
||||
self.logger.debug(f"Admission: Connection with {self.transport.getPeer().host} closed {'cleanly ' if wasClean else ''}with code {code} - {reason}")
|
||||
|
||||
def onMessage(self, payload, isBinary: bool) -> None:
|
||||
msg: Dict = json.loads(payload)
|
||||
self.logger.debug(f"Admission: Message from {self.transport.getPeer().host}:{self.transport.getPeer().port} - {msg}")
|
||||
|
||||
api = msg.get("api", "noop")
|
||||
handler = getattr(self.base, f"handle_admission_{api.lower()}")
|
||||
resp = handler(msg, self.transport.getPeer().host)
|
||||
|
||||
if resp is None:
|
||||
resp = {}
|
||||
|
||||
if "type" not in resp:
|
||||
resp['type'] = "res"
|
||||
if "data" not in resp:
|
||||
resp['data'] = {}
|
||||
if "api" not in resp:
|
||||
resp['api'] = api
|
||||
if "result" not in resp:
|
||||
resp['result'] = 'true'
|
||||
|
||||
self.logger.debug(f"Websocket response: {resp}")
|
||||
self.sendMessage(json.dumps(resp).encode(), isBinary)
|
||||
|
||||
class PokkenAdmissionFactory(WebSocketServerFactory):
|
||||
protocol = PokkenAdmissionProtocol
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
cfg: CoreConfig,
|
||||
game_cfg: PokkenConfig
|
||||
) -> None:
|
||||
self.core_config = cfg
|
||||
self.game_config = game_cfg
|
||||
super().__init__(f"ws://{self.game_config.server.hostname}:{self.game_config.ports.admission}")
|
||||
|
||||
def buildProtocol(self, addr: IAddress) -> Protocol:
|
||||
p = self.protocol(self.core_config, self.game_config)
|
||||
p.factory = self
|
||||
return p
|
10
titles/sao/__init__.py
Normal file
10
titles/sao/__init__.py
Normal file
@ -0,0 +1,10 @@
|
||||
from .index import SaoServlet
|
||||
from .const import SaoConstants
|
||||
from .database import SaoData
|
||||
from .read import SaoReader
|
||||
|
||||
index = SaoServlet
|
||||
database = SaoData
|
||||
reader = SaoReader
|
||||
game_codes = [SaoConstants.GAME_CODE]
|
||||
current_schema_version = 1
|
1244
titles/sao/base.py
Normal file
1244
titles/sao/base.py
Normal file
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user