94 Commits

Author SHA1 Message Date
132106499a Add map conditions for LUMINOUS ep.III 2024-07-22 12:00:29 +07:00
9df834c540 is this used anywhere? 2024-07-11 23:23:21 +07:00
5e690cb28c feat(cm.read): Add support for reading packed files 2024-07-08 22:55:42 +07:00
4dcaf93dc7 fix(chuni.read): Actually set something 2024-07-08 02:08:58 +07:00
e6153b022c fix(ongeki): actually tell the game the profile is locked 2024-07-07 21:13:37 +07:00
fb8d1deb53 fix typing 2024-07-06 07:38:11 +07:00
cd33076d0e log responses at info 2024-07-06 06:58:48 +07:00
5894831154 things 2024-07-06 06:57:59 +07:00
6f12f9988e act2 is apparently not actually a thing 2024-07-06 06:16:37 +07:00
7738b8a56f ongeki: act2/act3 bits i forgot 2024-07-06 00:22:19 +07:00
dd8143a9f1 ongeki: Treat Act2/Act3 as separate versions 2024-07-06 00:19:46 +07:00
043bd83285 unindent 2024-07-05 23:37:07 +07:00
f234b7aa18 support SDDT 1.45 2024-07-05 22:42:33 +07:00
e03243c485 i am stupid 2024-06-29 03:31:50 +07:00
d565d0860b oops 2024-06-29 01:02:35 +07:00
e1429a0d7e extend the lock when upserting 2024-06-29 01:01:31 +07:00
93b4052727 feat(chuni.read): Only enable announcement events of the latest imported option 2024-06-27 11:06:47 +07:00
549fd88b32 is this why you're supposed to quote conditions? 2024-06-24 09:49:31 +07:00
e4ad50fb0c i am stupid (2) 2024-06-23 15:07:00 +07:00
b6e5cc3065 i am stupid 2024-06-23 12:58:51 +07:00
6f864d1d7a add login state at preview 2024-06-23 12:56:08 +07:00
d4c149d5a2 feat(chuni): Improved logging 2024-06-23 12:38:53 +07:00
63987f2095 feat(chuni): Implement GameLoginApi/GameLogoutApi 2024-06-23 12:30:30 +07:00
79d27b0582 feat(chuni): Also check the machine serial in the title server 2024-06-23 11:55:06 +07:00
725b5df6e8 fix luminous ep. I event ID 2024-06-23 04:20:51 +07:00
a90e6b60b0 properly log database exceptions 2024-06-23 04:08:20 +07:00
f9bc8cedfc put the upsert into transactions, show all purchasable tickets 2024-06-23 04:08:07 +07:00
f1b02a3bc6 use sql pagination (offset/limit) 2024-06-23 03:19:35 +07:00
2abda8a1f0 use the map flag event, not the announcement event 2024-06-23 01:34:16 +07:00
ea63df23d6 fix logging in pokken 2024-06-23 01:17:59 +07:00
6dd9707440 fix net battle migration 2024-06-23 01:06:29 +07:00
e89a7b0cae chuni: add missing net battle funcs 2024-06-23 00:55:37 +07:00
6f7f08d019 [chuni] Improve GetGameMapAreaConditionApi 2024-06-23 00:49:31 +07:00
b89dbb89cc add idm and chip_id fields to card table 2024-06-23 00:48:12 +07:00
e5463d61a5 chuni: fix reader issue caused by encoding weirdness (#55) 2024-06-23 00:48:04 +07:00
a992c1851c Diva front end (#150)
Basic diva frontend implemented. Changing name and level string working. And some basic playlog page which needs more data

Co-authored-by: = <=>
Reviewed-on: #150
Co-authored-by: ThatzOkay <thatzokay@noreply.gitea.tendokyu.moe>
Co-committed-by: ThatzOkay <thatzokay@noreply.gitea.tendokyu.moe>
2024-06-23 00:47:47 +07:00
b5b9d702ac wacca: fix vs song updates 2024-06-23 00:47:10 +07:00
eedae234c5 fix up revision tree to match cozynet's early support of luminous 2024-06-23 00:37:27 +07:00
08b6eec08a ongeki: I forgot json load 2024-06-23 00:29:54 +07:00
2acd6c5204 ongeki: fix base version title work 2024-06-23 00:29:53 +07:00
36b71255d1 pokken: fix default title plate id 2024-06-23 00:29:33 +07:00
e69601210d fix event log ordering, paging, add timestamp 2024-06-23 00:29:33 +07:00
f8e0933bbc pokken: fix pokemon_data uk 2024-06-23 00:29:33 +07:00
8ee4b5daf9 pokken: fix some image links 2024-06-23 00:29:33 +07:00
32913fe069 pokken: finish filling out support list 2024-06-23 00:29:33 +07:00
8c28a6de3a pokken: readd mysql_charset to profile and pokemon data 2024-06-23 00:29:33 +07:00
6ec7bd3b5d pokken: fix profile loading fail 2024-06-23 00:29:33 +07:00
4bf0362ff4 pokken: add additional logging 2024-06-23 00:29:33 +07:00
cc3aa68003 pokken: fix bnp_baseuri 2024-06-23 00:29:33 +07:00
05a8db2c25 pokken: backport changes from Diana 2024-06-23 00:29:31 +07:00
58e4f1f3da pokken: fix achievement flags 2024-06-23 00:29:09 +07:00
346ea41db4 ongeki: fix frontend versions 2024-06-23 00:29:06 +07:00
560de25d49 mai2: add basic webui 2024-06-23 00:28:36 +07:00
c8719386e8 mai2: oops, forgot version 2024-06-23 00:28:10 +07:00
c505c2eaad dx: fix GetUserRivalMusicApi list index out of range 2024-06-23 00:28:10 +07:00
e82ab8009b mai2: fix rival data load failing due to inheritance 2024-06-23 00:28:10 +07:00
7f3a6c463b mai2: add rivals support 2024-06-23 00:28:10 +07:00
946a4b8298 mai2: fix aggressive find and replace 2024-06-23 00:28:10 +07:00
58df65bd3e frontend: add username login 2024-06-23 00:28:10 +07:00
2b8bfb8f40 chuni: fix frontend if no chunithm profiles are loaded 2024-06-23 00:28:08 +07:00
6c0cec4904 mai2: add tables for rivals and favorite music 2024-06-23 00:27:49 +07:00
6ca16b28d5 Fix for diva reader when trying to read modded content. When it can't parse a number. So instead of crashing give a friendly error and continue 2024-06-23 00:26:35 +07:00
fb56c3fea0 ongeki: clearstatus db migration 2024-06-23 00:26:20 +07:00
34111a0bee ongeki: fix clearstatus type 2024-06-23 00:26:12 +07:00
10251d6c2b cxb: add grade to playlog 2024-06-23 00:26:05 +07:00
eb42caf29d add no logs message to event viewer 2024-06-23 00:25:57 +07:00
cf3f728995 Fix missing await when starting diva profile 2024-06-23 00:25:08 +07:00
c24e3f2901 diva: fix handle_start_request 2024-06-23 00:25:00 +07:00
d68aa5c20a DIVA: Fixed binary handler & render_POST errors 2024-06-23 00:24:53 +07:00
3c18a1b147 add basic event log viewer 2024-06-23 00:24:46 +07:00
da0e186bed allnet: enhance logging 2024-06-23 00:24:34 +07:00
200ee03373 add additional details to event_log 2024-06-23 00:24:24 +07:00
1f489bce51 chuni: Fix endpoint for older version of SDGS (#141)
Reviewed-on: #141
Co-authored-by: roaz <roaz@noreply.gitea.tendokyu.moe>
Co-committed-by: roaz <roaz@noreply.gitea.tendokyu.moe>
2024-06-23 00:23:37 +07:00
004e0bd377 fix(aimedb): Ignore requests with invalid header 2024-05-17 09:40:04 +07:00
07f25b0703 mai2: some improve for DX earlier version and return game code in uri (#125)
Attention: There are all talking about maimai DX and newer version, not Pre-DX

dx and newer version request these but no used, they are just exist in game code, only found `oldServerUrl` used in SDEZ 1.00, this should also fix SDGA and SDGB try to visit `ServerUrl + movieServerUrl` although that just because of SEGA shit code
tested work

![image](/attachments/f2c79134-4651-4976-8278-bbcf268f424a)

Reviewed-on: #125
Co-authored-by: zaphkito <zaphkito@noreply.gitea.tendokyu.moe>
Co-committed-by: zaphkito <zaphkito@noreply.gitea.tendokyu.moe>
2024-05-07 12:42:10 +07:00
b97764a127 [mai2] Support encryption (#130)
Similar to O.N.G.E.K.I. and CHUNITHM, with the caveat that the obfuscated endpoint is created using `md5(endpoint + salt)` instead of using PBKDF2 like other games.

Tested and confirmed working on FESTiVAL+.

The current implementation is also affected by #129, so I'm open to ideas.

Reviewed-on: #130
Co-authored-by: beerpsi <beerpsi@duck.com>
Co-committed-by: beerpsi <beerpsi@duck.com>
2024-05-02 00:31:35 +07:00
c8f3fe0b15 Ongeki: fixed missing await under get_tech_count 2024-05-02 00:27:13 +07:00
e3dc282bd8 [allnet] Enable DFI-encoded responses 2024-04-30 22:41:13 +07:00
8d7ef7471e fix: relax keychip format 2024-04-23 14:57:36 +00:00
19732c1a02 fix: ignore guest plays correctly 2024-04-23 14:48:40 +00:00
2537822ec3 fix: proper guest detection maybe 2024-04-21 19:49:57 +00:00
7a489ce6e7 Downgrade compression errors to warnings 2024-04-21 18:57:54 +00:00
1717435f9d fix allnet requests not being received if billing is not standalone
https://github.com/encode/starlette/issues/380#issuecomment-462257965
2024-04-21 07:54:18 +00:00
2d0c26c6ca DIANA_CFG_DIR -> ARTEMIS_CFG_DIR 2024-04-21 04:57:58 +00:00
752d27b30a Reuse app_allnet and app_billing 2024-04-21 04:53:32 +00:00
91c8547af2 fix: Store reference to async tasks
https://docs.astral.sh/ruff/rules/asyncio-dangling-task/
2024-04-21 04:39:31 +00:00
2773f9a2f8 Don't emit errors on malformed AimeDB requests 2024-04-21 00:39:19 +00:00
f613a32d12 Also check if discord_webhook is an empty string 2024-04-20 14:27:11 +00:00
d8e8db3872 add notice about fork 2024-04-20 14:24:10 +00:00
f067514f71 add aiohttp to requirements (for webhooks) 2024-04-20 14:14:54 +00:00
a2a1a578a4 Support CHUNITHM LUMINOUS 2024-04-20 14:07:37 +00:00
f4211e741a chuni: Ignore guest plays in upserts 2024-04-20 13:54:27 +00:00
fed5f9395a Add ability to send logs to Discord 2024-04-20 13:49:23 +00:00
994fa0d41e Centralized logging
Allows us to extend the logging infrastructure, e.g. by adding Loki/Discord webhooks to the mix.
2024-04-20 12:11:37 +00:00
187 changed files with 6192 additions and 1804 deletions

View File

@ -1,6 +1,30 @@
# Changelog
Documenting updates to ARTEMiS, to be updated every time the master branch is pushed to.
## 20240620
### CHUNITHM
+ CHUNITHM LUMINOUS support
## 20240616
### DIVA
+ Working frontend with name and level strings edit and playlog
## 20240530
### DIVA
+ Fix reader for when dificulty is not a int
## 20240526
### DIVA
+ Fixed missing awaits causing coroutine error
## 20240524
### DIVA
+ Fixed new profile start request causing coroutine error
## 20240523
### DIVA
+ Fixed binary handler & render_POST errors
## 20240408
### System
+ Modified the game specific documentation

View File

@ -1,4 +1,4 @@
from construct import Struct, Int16ul, Padding, Bytes, Int32ul, Int32sl
from construct import Struct, Padding, Bytes, Int32ul, Int32sl
from .base import *

View File

@ -1,4 +1,4 @@
from construct import Struct, Int32sl, Padding, Int8ub, Int16sl
from construct import Struct, Int32sl, Padding, Int8ub
from typing import Union
from .base import *
@ -10,13 +10,14 @@ class ADBFelicaLookupRequest(ADBBaseRequest):
self.pmm = hex(pmm)[2:].upper()
class ADBFelicaLookupResponse(ADBBaseResponse):
def __init__(self, access_code: str = None, game_id: str = "SXXX", store_id: int = 1, keychip_id: str = "A69E01A8888", code: int = 0x03, length: int = 0x30, status: int = 1) -> None:
def __init__(self, access_code: str = None, idx: int = 0, game_id: str = "SXXX", store_id: int = 1, keychip_id: str = "A69E01A8888", code: int = 0x03, length: int = 0x30, status: int = 1) -> None:
super().__init__(code, length, status, game_id, store_id, keychip_id)
self.access_code = access_code if access_code is not None else "00000000000000000000"
self.idx = idx
@classmethod
def from_req(cls, req: ADBHeader, access_code: str = None) -> "ADBFelicaLookupResponse":
c = cls(access_code, req.game_id, req.store_id, req.keychip_id)
def from_req(cls, req: ADBHeader, access_code: str = None, idx: int = 0) -> "ADBFelicaLookupResponse":
c = cls(access_code, idx, req.game_id, req.store_id, req.keychip_id)
c.head.protocol_ver = req.protocol_ver
return c
@ -26,7 +27,7 @@ class ADBFelicaLookupResponse(ADBBaseResponse):
"access_code" / Int8ub[10],
Padding(2)
).build(dict(
felica_idx = 0,
felica_idx = self.idx,
access_code = bytes.fromhex(self.access_code)
))

View File

@ -1,9 +1,8 @@
import logging, coloredlogs
from Crypto.Cipher import AES
from typing import Dict, Tuple, Callable, Union, Optional
import asyncio
from logging.handlers import TimedRotatingFileHandler
import core.logger
from core.config import CoreConfig
from core.utils import create_sega_auth_key
from core.data import Data
@ -14,30 +13,7 @@ class AimedbServlette():
def __init__(self, core_cfg: CoreConfig) -> None:
self.config = core_cfg
self.data = Data(core_cfg)
self.logger = logging.getLogger("aimedb")
if not hasattr(self.logger, "initted"):
log_fmt_str = "[%(asctime)s] Aimedb | %(levelname)s | %(message)s"
log_fmt = logging.Formatter(log_fmt_str)
fileHandler = TimedRotatingFileHandler(
"{0}/{1}.log".format(self.config.server.log_dir, "aimedb"),
when="d",
backupCount=10,
)
fileHandler.setFormatter(log_fmt)
consoleHandler = logging.StreamHandler()
consoleHandler.setFormatter(log_fmt)
self.logger.addHandler(fileHandler)
self.logger.addHandler(consoleHandler)
self.logger.setLevel(self.config.aimedb.loglevel)
coloredlogs.install(
level=core_cfg.aimedb.loglevel, logger=self.logger, fmt=log_fmt_str
)
self.logger.initted = True
self.logger = core.logger.create_logger("AimeDB", core_cfg.aimedb.loglevel)
if not core_cfg.aimedb.key:
self.logger.error("!!!KEY NOT SET!!!")
@ -91,16 +67,21 @@ class AimedbServlette():
decrypted = cipher.decrypt(data)
except Exception as e:
self.logger.error(f"Failed to decrypt {data.hex()} because {e}")
self.logger.debug("Failed to decrypt %s because %s", data.hex(), e)
return
self.logger.debug(f"{addr} wrote {decrypted.hex()}")
self.logger.debug("%s wrote %s", addr, decrypted.hex())
if decrypted[0] != 0x3E or decrypted[1] != 0xA1:
self.logger.debug("Ignoring request with invalid magic bytes")
return
try:
head = ADBHeader.from_data(decrypted)
except ADBHeaderException as e:
self.logger.error(f"Error parsing ADB header: {e}")
self.logger.warning("Error parsing ADB header: %s", e)
try:
encrypted = cipher.encrypt(ADBBaseResponse().make())
writer.write(encrypted)
@ -194,6 +175,9 @@ class AimedbServlette():
if user_id and user_id > 0:
await self.data.card.update_card_last_login(req.access_code)
if req.access_code.startswith("010") or req.access_code.startswith("3"):
await self.data.card.set_chip_id_by_access_code(req.access_code, req.serial_number)
self.logger.info(f"Attempt to set chip id to {req.serial_number} for access code {req.access_code}")
return ret
async def handle_lookup_ex(self, data: bytes, resp_code: int) -> ADBBaseResponse:
@ -229,15 +213,24 @@ class AimedbServlette():
async def handle_felica_lookup(self, data: bytes, resp_code: int) -> bytes:
"""
On official, I think a card has to be registered for this to actually work, but
I'm making the executive decision to not implement that and just kick back our
faux generated access code. The real felica IDm -> access code conversion is done
on the ADB server, which we do not and will not ever have access to. Because we can
assure that all IDms will be unique, this basic 0-padded hex -> int conversion will
be fine.
On official, the IDm is used as a key to look up the stored access code in a large
database. We do not have access to that database so we have to make due with what we got.
Interestingly, namco games are able to read S_PAD0 and send the server the correct access
code, but aimedb doesn't. Until somebody either enters the correct code manually, or scans
on a game that reads it correctly from the card, this will have to do. It's the same conversion
used on the big boy networks.
"""
req = ADBFelicaLookupRequest(data)
ac = self.data.card.to_access_code(req.idm)
card = await self.data.card.get_card_by_idm(req.idm)
if not card:
ac = self.data.card.to_access_code(req.idm)
test = await self.data.card.get_card_by_access_code(ac)
if test:
await self.data.card.set_idm_by_access_code(ac, req.idm)
else:
ac = card['access_code']
self.logger.info(
f"idm {req.idm} ipm {req.pmm} -> access_code {ac}"
)
@ -245,7 +238,8 @@ class AimedbServlette():
async def handle_felica_register(self, data: bytes, resp_code: int) -> bytes:
"""
I've never seen this used.
Used to register felica moble access codes. Will never be used on our network
because we don't implement felica_lookup properly.
"""
req = ADBFelicaLookupRequest(data)
ac = self.data.card.to_access_code(req.idm)
@ -279,8 +273,18 @@ class AimedbServlette():
async def handle_felica_lookup_ex(self, data: bytes, resp_code: int) -> bytes:
req = ADBFelicaLookup2Request(data)
access_code = self.data.card.to_access_code(req.idm)
user_id = await self.data.card.get_user_id_from_card(access_code=access_code)
user_id = None
card = await self.data.card.get_card_by_idm(req.idm)
if not card:
access_code = self.data.card.to_access_code(req.idm)
card = await self.data.card.get_card_by_access_code(access_code)
if card:
user_id = card['user']
await self.data.card.set_idm_by_access_code(access_code, req.idm)
else:
user_id = card['user']
access_code = card['access_code']
if user_id is None:
user_id = -1
@ -290,6 +294,14 @@ class AimedbServlette():
)
resp = ADBFelicaLookup2Response.from_req(req.head, user_id, access_code)
if user_id > 0:
if card['is_banned'] and card['is_locked']:
resp.head.status = ADBStatus.BAN_SYS_USER
elif card['is_banned']:
resp.head.status = ADBStatus.BAN_SYS
elif card['is_locked']:
resp.head.status = ADBStatus.LOCK_USER
if user_id and user_id > 0 and self.config.aimedb.id_secret:
auth_key = create_sega_auth_key(user_id, req.head.game_id, req.head.store_id, req.head.keychip_id, self.config.aimedb.id_secret, self.config.aimedb.id_lifetime_seconds)
@ -337,6 +349,16 @@ class AimedbServlette():
self.logger.info(
f"Registration blocked!: access code {req.access_code}"
)
if user_id > 0:
if req.access_code.startswith("010") or req.access_code.startswith("3"):
await self.data.card.set_chip_id_by_access_code(req.access_code, req.serial_number)
self.logger.info(f"Attempt to set chip id to {req.serial_number} for access code {req.access_code}")
elif req.access_code.startswith("0008"):
idm = self.data.card.to_idm(req.access_code)
await self.data.card.set_idm_by_access_code(req.access_code, idm)
self.logger.info(f"Attempt to set IDm to {idm} for access code {req.access_code}")
resp = ADBLookupResponse.from_req(req.head, user_id)
if resp.user_id <= 0:

View File

@ -4,11 +4,9 @@ import zlib
import json
import yaml
import logging
import coloredlogs
import urllib.parse
import math
from typing import Dict, List, Any, Optional, Union, Final
from logging.handlers import TimedRotatingFileHandler
from starlette.requests import Request
from starlette.responses import PlainTextResponse
from starlette.applications import Starlette
@ -20,6 +18,7 @@ from Crypto.Hash import SHA
from Crypto.Signature import PKCS1_v1_5
from os import path, environ, mkdir, access, W_OK
import core.logger
from .config import CoreConfig
from .utils import Utils
from .data import Data
@ -99,30 +98,10 @@ class AllnetServlet:
self.config = core_cfg
self.config_folder = cfg_folder
self.data = Data(core_cfg)
self.logger = logging.getLogger("allnet")
if not hasattr(self.logger, "initialized"):
log_fmt_str = "[%(asctime)s] Allnet | %(levelname)s | %(message)s"
log_fmt = logging.Formatter(log_fmt_str)
fileHandler = TimedRotatingFileHandler(
"{0}/{1}.log".format(self.config.server.log_dir, "allnet"),
when="d",
backupCount=10,
)
fileHandler.setFormatter(log_fmt)
consoleHandler = logging.StreamHandler()
consoleHandler.setFormatter(log_fmt)
self.logger.addHandler(fileHandler)
self.logger.addHandler(consoleHandler)
self.logger.setLevel(core_cfg.allnet.loglevel)
coloredlogs.install(
level=core_cfg.allnet.loglevel, logger=self.logger, fmt=log_fmt_str
)
self.logger.initialized = True
self.logger = core.logger.create_logger(
"Allnet",
core_cfg.allnet.loglevel,
)
def startup(self) -> None:
self.logger.info(f"Ready on port {self.config.allnet.port if self.config.allnet.standalone else self.config.server.port}")
@ -132,7 +111,7 @@ class AllnetServlet:
async def handle_poweron(self, request: Request):
request_ip = Utils.get_ip_addr(request)
pragma_header = request.headers.get('Pragma', "")
is_dfi = pragma_header is not None and pragma_header == "DFI"
is_dfi = pragma_header == "DFI"
data = await request.body()
try:
@ -171,7 +150,7 @@ class AllnetServlet:
if machine is None and not self.config.server.allow_unregistered_serials:
msg = f"Unrecognised serial {req.serial} attempted allnet auth from {request_ip}."
await self.data.base.log_event(
"allnet", "ALLNET_AUTH_UNKNOWN_SERIAL", logging.WARN, msg
"allnet", "ALLNET_AUTH_UNKNOWN_SERIAL", logging.WARN, msg, {"serial": req.serial}, None, None, None, request_ip, req.game_id, req.ver
)
self.logger.warning(msg)
@ -183,9 +162,9 @@ class AllnetServlet:
arcade = await self.data.arcade.get_arcade(machine["arcade"])
if self.config.server.check_arcade_ip:
if arcade["ip"] and arcade["ip"] is not None and arcade["ip"] != req.ip:
msg = f"Serial {req.serial} attempted allnet auth from bad IP {req.ip} (expected {arcade['ip']})."
msg = f"{req.serial} attempted allnet auth from bad IP {req.ip} (expected {arcade['ip']})."
await self.data.base.log_event(
"allnet", "ALLNET_AUTH_BAD_IP", logging.ERROR, msg
"allnet", "ALLNET_AUTH_BAD_IP", logging.ERROR, msg, {}, None, arcade['id'], machine['id'], request_ip, req.game_id, req.ver
)
self.logger.warning(msg)
@ -194,9 +173,9 @@ class AllnetServlet:
return PlainTextResponse(urllib.parse.unquote(urllib.parse.urlencode(resp_dict)) + "\n")
elif (not arcade["ip"] or arcade["ip"] is None) and self.config.server.strict_ip_checking:
msg = f"Serial {req.serial} attempted allnet auth from bad IP {req.ip}, but arcade {arcade['id']} has no IP set! (strict checking enabled)."
msg = f"{req.serial} attempted allnet auth from bad IP {req.ip}, but arcade {arcade['id']} has no IP set! (strict checking enabled)."
await self.data.base.log_event(
"allnet", "ALLNET_AUTH_NO_SHOP_IP", logging.ERROR, msg
"allnet", "ALLNET_AUTH_NO_SHOP_IP", logging.ERROR, msg, {}, None, arcade['id'], machine['id'], request_ip, req.game_id, req.ver
)
self.logger.warning(msg)
@ -204,7 +183,17 @@ class AllnetServlet:
resp_dict = {k: v for k, v in vars(resp).items() if v is not None}
return PlainTextResponse(urllib.parse.unquote(urllib.parse.urlencode(resp_dict)) + "\n")
if machine['game'] and machine['game'] != req.game_id:
msg = f"{req.serial} attempted allnet auth with bad game ID {req.game_id} (expected {machine['game']})."
await self.data.base.log_event(
"allnet", "ALLNET_AUTH_BAD_GAME", logging.ERROR, msg, {}, None, arcade['id'], machine['id'], request_ip, req.game_id, req.ver
)
self.logger.warning(msg)
resp.stat = ALLNET_STAT.bad_game.value
resp_dict = {k: v for k, v in vars(resp).items() if v is not None}
return PlainTextResponse(urllib.parse.unquote(urllib.parse.urlencode(resp_dict)) + "\n")
country = (
arcade["country"] if machine["country"] is None else machine["country"]
)
@ -236,11 +225,14 @@ class AllnetServlet:
arcade["timezone"] if arcade["timezone"] is not None else "+0900" if req.format_ver == 3 else "+09:00"
)
else:
arcade = None
if req.game_id not in TitleServlet.title_registry:
if not self.config.server.is_develop:
msg = f"Unrecognised game {req.game_id} attempted allnet auth from {request_ip}."
await self.data.base.log_event(
"allnet", "ALLNET_AUTH_UNKNOWN_GAME", logging.WARN, msg
"allnet", "ALLNET_AUTH_UNKNOWN_GAME", logging.WARN, msg, {}, None, arcade['id'] if arcade else None, machine['id'] if machine else None, request_ip, req.game_id, req.ver
)
self.logger.warning(msg)
@ -271,25 +263,37 @@ class AllnetServlet:
resp_dict = {k: v for k, v in vars(resp).items() if v is not None}
return PlainTextResponse(urllib.parse.unquote(urllib.parse.urlencode(resp_dict)) + "\n")
msg = f"{req.serial} authenticated from {request_ip}: {req.game_id} v{req.ver}"
await self.data.base.log_event("allnet", "ALLNET_AUTH_SUCCESS", logging.INFO, msg)
if machine and arcade:
msg = f"{req.serial} authenticated from {request_ip}: {req.game_id} v{req.ver}"
await self.data.base.log_event(
"allnet", "ALLNET_AUTH_SUCCESS", logging.INFO, msg, {}, None, arcade['id'], machine['id'], request_ip, req.game_id, req.ver
)
else:
msg = f"Allow unregistered serial {req.serial} to authenticate from {request_ip}: {req.game_id} v{req.ver}"
await self.data.base.log_event(
"allnet", "ALLNET_AUTH_SUCCESS_UNREG", logging.INFO, msg, {"serial": req.serial}, None, None, None, request_ip, req.game_id, req.ver
)
self.logger.info(msg)
resp_dict = {k: v for k, v in vars(resp).items() if v is not None}
resp_str = urllib.parse.unquote(urllib.parse.urlencode(resp_dict))
resp_str = urllib.parse.unquote(urllib.parse.urlencode(resp_dict)) + "\n"
self.logger.debug(f"Allnet response: {resp_dict}")
resp_str += "\n"
"""if is_dfi:
request.responseHeaders.addRawHeader('Pragma', 'DFI')
return self.to_dfi(resp_str)"""
if is_dfi:
return PlainTextResponse(
content=self.to_dfi(resp_str) + b"\r\n",
headers={
"Pragma": "DFI",
},
)
return PlainTextResponse(resp_str)
async def handle_dlorder(self, request: Request):
request_ip = Utils.get_ip_addr(request)
pragma_header = request.headers.get('Pragma', "")
is_dfi = pragma_header is not None and pragma_header == "DFI"
is_dfi = pragma_header == "DFI"
data = await request.body()
try:
@ -326,7 +330,11 @@ class AllnetServlet:
):
return PlainTextResponse(urllib.parse.unquote(urllib.parse.urlencode(vars(resp))) + "\n")
else: # TODO: Keychip check
else:
machine = await self.data.arcade.get_machine(req.serial)
if not machine or not machine['ota_enable'] or not machine['is_cab'] or machine['is_blacklisted']:
return PlainTextResponse(urllib.parse.unquote(urllib.parse.urlencode(vars(resp))) + "\n")
if path.exists(
f"{self.config.allnet.update_cfg_folder}/{req.game_id}-{req.ver.replace('.', '')}-app.ini"
):
@ -337,25 +345,38 @@ class AllnetServlet:
):
resp.uri += f"|http://{self.config.server.hostname}:{self.config.server.port}/dl/ini/{req.game_id}-{req.ver.replace('.', '')}-opt.ini"
self.logger.debug(f"Sending download uri {resp.uri}")
await self.data.base.log_event("allnet", "DLORDER_REQ_SUCCESS", logging.INFO, f"{Utils.get_ip_addr(request)} requested DL Order for {req.serial} {req.game_id} v{req.ver}")
if resp.uri:
self.logger.info(f"Sending download uri {resp.uri}")
await self.data.base.log_event(
"allnet", "DLORDER_REQ_SUCCESS", logging.INFO, f"Send download URI to {req.serial} for {req.game_id} v{req.ver} from {Utils.get_ip_addr(request)}", {"uri": resp.uri}, None,
machine['arcade'], machine['id'], request_ip, req.game_id, req.ver
)
# Maybe add a log event for checkin but no url sent?
res_str = urllib.parse.unquote(urllib.parse.urlencode(vars(resp))) + "\n"
"""if is_dfi:
request.responseHeaders.addRawHeader('Pragma', 'DFI')
return self.to_dfi(res_str)"""
if is_dfi:
return PlainTextResponse(
content=self.to_dfi(res_str) + b"\r\n",
headers={
"Pragma": "DFI",
},
)
return PlainTextResponse(res_str)
async def handle_dlorder_ini(self, request: Request) -> bytes:
req_file = request.path_params.get("file", "").replace("%0A", "").replace("\n", "")
request_ip = Utils.get_ip_addr(request)
if not req_file:
return PlainTextResponse(status_code=404)
if path.exists(f"{self.config.allnet.update_cfg_folder}/{req_file}"):
self.logger.info(f"Request for DL INI file {req_file} from {Utils.get_ip_addr(request)} successful")
await self.data.base.log_event("allnet", "DLORDER_INI_SENT", logging.INFO, f"{Utils.get_ip_addr(request)} successfully recieved {req_file}")
self.logger.info(f"Request for DL INI file {req_file} from {request_ip} successful")
await self.data.base.log_event(
"allnet", "DLORDER_INI_SENT", logging.INFO, f"{request_ip} successfully recieved {req_file}", {"file": req_file}, ip=request_ip
)
return PlainTextResponse(open(
f"{self.config.allnet.update_cfg_folder}/{req_file}", "r", encoding="utf-8"
@ -393,7 +414,13 @@ class AllnetServlet:
msg = f"{rep.serial} @ {client_ip} reported {rep.rep_type.name} download state {rep.rf_state.name} for {rep.gd} v{rep.dav}:"\
f" {rep.tdsc}/{rep.tsc} segments downloaded for working files {rep.wfl} with {rep.dfl if rep.dfl else 'none'} complete."
await self.data.base.log_event("allnet", "DL_REPORT", logging.INFO, msg, dl_data)
machine = await self.data.arcade.get_machine(rep.serial)
if machine:
await self.data.base.log_event("allnet", "DL_REPORT", logging.INFO, msg, dl_data, None, machine['arcade'], machine['id'], client_ip, rep.gd, rep.dav)
else:
msg = "Unknown serial " + msg
await self.data.base.log_event("allnet", "DL_REPORT_UNREG", logging.INFO, msg, dl_data, None, None, None, client_ip, rep.gd, rep.dav)
self.logger.info(msg)
return PlainTextResponse("OK")
@ -413,14 +440,24 @@ class AllnetServlet:
if serial is None or num_files_dld is None or num_files_to_dl is None or dl_state is None:
return PlainTextResponse("NG")
self.logger.info(f"LoaderStateRecorder Request from {ip} {serial}: {num_files_dld}/{num_files_to_dl} Files download (State: {dl_state})")
msg = f"LoaderStateRecorder Request from {ip} {serial}: {num_files_dld}/{num_files_to_dl} Files download (State: {dl_state})"
machine = await self.data.arcade.get_machine(serial)
if machine:
await self.data.base.log_event("allnet", "LSR_REPORT", logging.INFO, msg, req_dict, None, machine['arcade'], machine['id'], ip)
else:
msg = "Unregistered " + msg
await self.data.base.log_event("allnet", "LSR_REPORT_UNREG", logging.INFO, msg, req_dict, None, None, None, ip)
self.logger.info(msg)
return PlainTextResponse("OK")
async def handle_alive(self, request: Request) -> bytes:
return PlainTextResponse("OK")
async def handle_naomitest(self, request: Request) -> bytes:
self.logger.info(f"Ping from {Utils.get_ip_addr(request)}")
# This could be spam-able, removing
#self.logger.info(f"Ping from {Utils.get_ip_addr(request)}")
return PlainTextResponse("naomi ok")
def allnet_req_to_dict(self, data: str) -> Optional[List[Dict[str, Any]]]:
@ -454,30 +491,7 @@ class BillingServlet:
self.config = core_cfg
self.config_folder = cfg_folder
self.data = Data(core_cfg)
self.logger = logging.getLogger("billing")
if not hasattr(self.logger, "initialized"):
log_fmt_str = "[%(asctime)s] Billing | %(levelname)s | %(message)s"
log_fmt = logging.Formatter(log_fmt_str)
fileHandler = TimedRotatingFileHandler(
"{0}/{1}.log".format(self.config.server.log_dir, "billing"),
when="d",
backupCount=10,
)
fileHandler.setFormatter(log_fmt)
consoleHandler = logging.StreamHandler()
consoleHandler.setFormatter(log_fmt)
self.logger.addHandler(fileHandler)
self.logger.addHandler(consoleHandler)
self.logger.setLevel(core_cfg.allnet.loglevel)
coloredlogs.install(
level=core_cfg.billing.loglevel, logger=self.logger, fmt=log_fmt_str
)
self.logger.initialized = True
self.logger = core.logger.create_logger("Billing", core_cfg.billing.loglevel)
def startup(self) -> None:
self.logger.info(f"Ready on port {self.config.billing.port if self.config.billing.standalone else self.config.server.port}")
@ -550,18 +564,35 @@ class BillingServlet:
if machine is None and not self.config.server.allow_unregistered_serials:
msg = f"Unrecognised serial {req.keychipid} attempted billing checkin from {request_ip} for {req.gameid} v{req.gamever}."
await self.data.base.log_event(
"allnet", "BILLING_CHECKIN_NG_SERIAL", logging.WARN, msg
"allnet", "BILLING_CHECKIN_NG_SERIAL", logging.WARN, msg, ip=request_ip, game=req.gameid, version=req.gamever
)
self.logger.warning(msg)
return PlainTextResponse(f"result=1&requestno={req.requestno}&message=Keychip Serial bad\r\n")
log_details = {
"playcount": req.playcnt,
"billing_type": req.billingtype.name,
"nearfull": req.nearfull,
"playlimit": req.playlimit,
}
if machine is not None:
await self.data.base.log_event("billing", "BILLING_CHECKIN_OK", logging.INFO, "", log_details, None, machine['arcade'], machine['id'], request_ip, req.gameid, req.gamever)
self.logger.info(
f"Unregistered Billing checkin from {request_ip}: game {req.gameid} ver {req.gamever} keychip {req.keychipid} playcount "
f"{req.playcnt} billing_type {req.billingtype.name} nearfull {req.nearfull} playlimit {req.playlimit}"
)
else:
log_details['serial'] = req.keychipid
await self.data.base.log_event("billing", "BILLING_CHECKIN_OK_UNREG", logging.INFO, "", log_details, None, None, None, request_ip, req.gameid, req.gamever)
self.logger.info(
f"Unregistered Billing checkin from {request_ip}: game {req.gameid} ver {req.gamever} keychip {req.keychipid} playcount "
f"{req.playcnt} billing_type {req.billingtype.name} nearfull {req.nearfull} playlimit {req.playlimit}"
)
msg = (
f"Billing checkin from {request_ip}: game {req.gameid} ver {req.gamever} keychip {req.keychipid} playcount "
f"{req.playcnt} billing_type {req.billingtype.name} nearfull {req.nearfull} playlimit {req.playlimit}"
)
self.logger.info(msg)
await self.data.base.log_event("billing", "BILLING_CHECKIN_OK", logging.INFO, msg)
if req.traceleft > 0:
self.logger.warn(f"{req.traceleft} unsent tracelogs")
kc_playlimit = req.playlimit
@ -885,19 +916,14 @@ class DLReport:
return True
cfg_dir = environ.get("DIANA_CFG_DIR", "config")
cfg_dir = environ.get("ARTEMIS_CFG_DIR", "config")
cfg: CoreConfig = CoreConfig()
if path.exists(f"{cfg_dir}/core.yaml"):
cfg.update(yaml.safe_load(open(f"{cfg_dir}/core.yaml")))
if not path.exists(cfg.server.log_dir):
mkdir(cfg.server.log_dir)
if not access(cfg.server.log_dir, W_OK):
print(
f"Log directory {cfg.server.log_dir} NOT writable, please check permissions"
)
exit(1)
# HACK: Turns out the best place to initialize the root logger is here
# because of core module getting imported and stuff...
core.logger.init_root_logger(cfg)
billing = BillingServlet(cfg, cfg_dir)
app_billing = Starlette(

View File

@ -1,15 +1,14 @@
import yaml
import logging
import coloredlogs
from logging.handlers import TimedRotatingFileHandler
from starlette.routing import Route
from starlette.routing import Mount, Route
from starlette.requests import Request
from starlette.applications import Starlette
from starlette.responses import PlainTextResponse
from os import environ, path, mkdir, W_OK, access
from os import environ, path
from typing import List
from core import CoreConfig, TitleServlet, MuchaServlet, AllnetServlet, BillingServlet, AimedbServlette
from core import CoreConfig, TitleServlet, MuchaServlet, logger
from core.allnet import app_allnet, app_billing
from core.frontend import FrontendServlet
async def dummy_rt(request: Request):
@ -20,33 +19,10 @@ cfg: CoreConfig = CoreConfig()
if path.exists(f"{cfg_dir}/core.yaml"):
cfg.update(yaml.safe_load(open(f"{cfg_dir}/core.yaml")))
if not path.exists(cfg.server.log_dir):
mkdir(cfg.server.log_dir)
if not access(cfg.server.log_dir, W_OK):
print(
f"Log directory {cfg.server.log_dir} NOT writable, please check permissions"
)
exit(1)
logger = logging.getLogger("core")
log_fmt_str = "[%(asctime)s] Core | %(levelname)s | %(message)s"
log_fmt = logging.Formatter(log_fmt_str)
fileHandler = TimedRotatingFileHandler(
"{0}/{1}.log".format(cfg.server.log_dir, "core"), when="d", backupCount=10
logger = logger.create_logger(
"Core",
logging.DEBUG if cfg.server.is_develop else logging.INFO,
)
fileHandler.setFormatter(log_fmt)
consoleHandler = logging.StreamHandler()
consoleHandler.setFormatter(log_fmt)
logger.addHandler(fileHandler)
logger.addHandler(consoleHandler)
log_lv = logging.DEBUG if cfg.server.is_develop else logging.INFO
logger.setLevel(log_lv)
coloredlogs.install(level=log_lv, logger=logger, fmt=log_fmt_str)
logger.info(f"Artemis starting in {'develop' if cfg.server.is_develop else 'production'} mode")
@ -64,27 +40,10 @@ route_lst: List[Route] = [
]
if not cfg.billing.standalone:
billing = BillingServlet(cfg, cfg_dir)
route_lst += [
Route("/request", billing.handle_billing_request, methods=["POST"]),
Route("/request/", billing.handle_billing_request, methods=["POST"]),
]
route_lst += app_billing.router.routes
if not cfg.allnet.standalone:
allnet = AllnetServlet(cfg, cfg_dir)
route_lst += [
Route("/sys/servlet/PowerOn", allnet.handle_poweron, methods=["GET", "POST"]),
Route("/sys/servlet/DownloadOrder", allnet.handle_dlorder, methods=["GET", "POST"]),
Route("/sys/servlet/LoaderStateRecorder", allnet.handle_loaderstaterecorder, methods=["GET", "POST"]),
Route("/sys/servlet/Alive", allnet.handle_alive, methods=["GET", "POST"]),
Route("/naomitest.html", allnet.handle_naomitest),
]
if cfg.allnet.allow_online_updates:
route_lst += [
Route("/report-api/Report", allnet.handle_dlorder_report, methods=["POST"]),
Route("/dl/ini/{file:str}", allnet.handle_dlorder_ini),
]
if not cfg.allnet.standalone:
route_lst += app_allnet.router.routes
for code, game in title.title_registry.items():
route_lst += game.get_routes()

View File

@ -1,5 +1,6 @@
import logging, os
from typing import Any
import logging
import os
from typing import Any, Optional
class ServerConfig:
def __init__(self, parent_config: "CoreConfig") -> None:
@ -94,12 +95,6 @@ class ServerConfig:
self.__config, "core", "server", "proxy_port_ssl", default=0
)
@property
def log_dir(self) -> str:
return CoreConfig.get_config_field(
self.__config, "core", "server", "log_dir", default="logs"
)
@property
def check_arcade_ip(self) -> bool:
return CoreConfig.get_config_field(
@ -112,6 +107,37 @@ class ServerConfig:
self.__config, "core", "server", "strict_ip_checking", default=False
)
class LoggingConfig:
def __init__(self, parent_config: "CoreConfig") -> None:
self.__config = parent_config
@property
def log_dir(self) -> str:
new_log_dir = CoreConfig.get_config_field(
self.__config, "core", "logging", "log_dir", default=None
)
if new_log_dir is None:
return CoreConfig.get_config_field(
self.__config, "core", "server", "log_dir", default="logs"
)
return new_log_dir
@property
def discord_webhook(self) -> Optional[str]:
return CoreConfig.get_config_field(
self.__config, "core", "logging", "discord_webhook", default=None
)
@property
def discord_who_to_ping(self) -> Optional[str]:
return CoreConfig.get_config_field(
self.__config, "core", "logging", "discord_who_to_ping", default=[]
)
class TitleConfig:
def __init__(self, parent_config: "CoreConfig") -> None:
self.__config = parent_config
@ -373,6 +399,7 @@ class MuchaConfig:
class CoreConfig(dict):
def __init__(self) -> None:
self.server = ServerConfig(self)
self.logging = LoggingConfig(self)
self.title = TitleConfig(self)
self.database = DatabaseConfig(self)
self.frontend = FrontendConfig(self)

View File

@ -0,0 +1,27 @@
"""chuni_add_net_battle_uk
Revision ID: 1e150d16ab6b
Revises: b23f985100ba
Create Date: 2024-06-21 22:57:18.418488
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '1e150d16ab6b'
down_revision = '3657efefc5a4'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_unique_constraint(None, 'chuni_profile_net_battle', ['user'])
# ### end Alembic commands ###
def downgrade():
op.drop_constraint(None, 'chuni_profile_net_battle', type_='unique')
# ### end Alembic commands ###

View File

@ -0,0 +1,36 @@
"""Implement GameLoginApi/GameLogoutApi
Revision ID: 263169f3a129
Revises: 48f4acc43a7e
Create Date: 2024-06-23 12:16:57.881129
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '263169f3a129'
down_revision = '48f4acc43a7e'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('aime_user_game_locks',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user', sa.Integer(), nullable=False),
sa.Column('game', sa.String(length=4), nullable=False),
sa.Column('expires_at', sa.TIMESTAMP(), server_default=sa.text('date_add(now(), INTERVAL 15 MINUTE)'), nullable=True),
sa.Column('extra', sa.JSON(), nullable=True),
sa.ForeignKeyConstraint(['user'], ['aime_user.id'], onupdate='cascade', ondelete='cascade'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('user', 'game', name='aime_user_title_locks'),
mysql_charset='utf8mb4',
)
# ### end Alembic commands ###
def downgrade():
op.drop_table("aime_user_game_locks")

View File

@ -0,0 +1,48 @@
"""add_event_log_info
Revision ID: 2bf9f38d9444
Revises: 81e44dd6047a
Create Date: 2024-05-21 23:00:17.468407
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '2bf9f38d9444'
down_revision = '81e44dd6047a'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('event_log', sa.Column('user', sa.INTEGER(), nullable=True))
op.add_column('event_log', sa.Column('arcade', sa.INTEGER(), nullable=True))
op.add_column('event_log', sa.Column('machine', sa.INTEGER(), nullable=True))
op.add_column('event_log', sa.Column('ip', sa.TEXT(length=39), nullable=True))
op.alter_column('event_log', 'when_logged',
existing_type=mysql.TIMESTAMP(),
server_default=sa.text('now()'),
existing_nullable=False)
op.create_foreign_key(None, 'event_log', 'machine', ['machine'], ['id'], onupdate='cascade', ondelete='cascade')
op.create_foreign_key(None, 'event_log', 'arcade', ['arcade'], ['id'], onupdate='cascade', ondelete='cascade')
op.create_foreign_key(None, 'event_log', 'aime_user', ['user'], ['id'], onupdate='cascade', ondelete='cascade')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'event_log', type_='foreignkey')
op.drop_constraint(None, 'event_log', type_='foreignkey')
op.drop_constraint(None, 'event_log', type_='foreignkey')
op.alter_column('event_log', 'when_logged',
existing_type=mysql.TIMESTAMP(),
server_default=sa.text('current_timestamp()'),
existing_nullable=False)
op.drop_column('event_log', 'ip')
op.drop_column('event_log', 'machine')
op.drop_column('event_log', 'arcade')
op.drop_column('event_log', 'user')
# ### end Alembic commands ###

View File

@ -0,0 +1,46 @@
"""add_event_log_game_version
Revision ID: 2d024cf145a1
Revises: 2bf9f38d9444
Create Date: 2024-05-21 23:41:31.445331
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '2d024cf145a1'
down_revision = '2bf9f38d9444'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('event_log', sa.Column('game', sa.TEXT(length=4), nullable=True))
op.add_column('event_log', sa.Column('version', sa.TEXT(length=24), nullable=True))
op.alter_column('event_log', 'ip',
existing_type=mysql.TINYTEXT(),
type_=sa.TEXT(length=39),
existing_nullable=True)
op.alter_column('event_log', 'when_logged',
existing_type=mysql.TIMESTAMP(),
server_default=sa.text('now()'),
existing_nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('event_log', 'when_logged',
existing_type=mysql.TIMESTAMP(),
server_default=sa.text('current_timestamp()'),
existing_nullable=False)
op.alter_column('event_log', 'ip',
existing_type=sa.TEXT(length=39),
type_=mysql.TINYTEXT(),
existing_nullable=True)
op.drop_column('event_log', 'version')
op.drop_column('event_log', 'game')
# ### end Alembic commands ###

View File

@ -0,0 +1,54 @@
"""pokken_fix_pokemon_uk
Revision ID: 3657efefc5a4
Revises: 4a02e623e5e6
Create Date: 2024-06-13 23:50:57.611998
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '3657efefc5a4'
down_revision = '4a02e623e5e6'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('pokken_pokemon_data', 'char_id',
existing_type=mysql.INTEGER(display_width=11),
nullable=True)
op.alter_column('pokken_pokemon_data', 'illustration_book_no',
existing_type=mysql.INTEGER(display_width=11),
nullable=False)
op.drop_constraint('pokken_pokemon_data_ibfk_1', table_name='pokken_pokemon_data', type_='foreignkey')
op.drop_index('pokken_pokemon_data_uk', table_name='pokken_pokemon_data')
op.create_unique_constraint('pokken_pokemon_uk', 'pokken_pokemon_data', ['user', 'illustration_book_no'])
op.create_foreign_key("pokken_pokemon_data_ibfk_1", "pokken_pokemon_data", "aime_user", ['user'], ['id'])
op.alter_column('pokken_profile', 'trainer_name',
existing_type=mysql.VARCHAR(length=16),
type_=sa.String(length=14),
existing_nullable=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('pokken_profile', 'trainer_name',
existing_type=sa.String(length=14),
type_=mysql.VARCHAR(length=16),
existing_nullable=True)
op.drop_constraint('pokken_pokemon_data_ibfk_1', table_name='pokken_pokemon_data', type_='foreignkey')
op.drop_constraint('pokken_pokemon_uk', 'pokken_pokemon_data', type_='unique')
op.create_index('pokken_pokemon_data_uk', 'pokken_pokemon_data', ['user', 'char_id'], unique=True)
op.create_foreign_key("pokken_pokemon_data_ibfk_1", "pokken_pokemon_data", "aime_user", ['user'], ['id'])
op.alter_column('pokken_pokemon_data', 'illustration_book_no',
existing_type=mysql.INTEGER(display_width=11),
nullable=True)
op.alter_column('pokken_pokemon_data', 'char_id',
existing_type=mysql.INTEGER(display_width=11),
nullable=False)
# ### end Alembic commands ###

View File

@ -0,0 +1,50 @@
"""card_add_idm_chip_id
Revision ID: 48f4acc43a7e
Revises: 1e150d16ab6b
Create Date: 2024-06-21 23:53:34.369134
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '48f4acc43a7e'
down_revision = '1e150d16ab6b'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('aime_card', sa.Column('idm', sa.String(length=16), nullable=True))
op.add_column('aime_card', sa.Column('chip_id', sa.BIGINT(), nullable=True))
op.alter_column('aime_card', 'access_code',
existing_type=mysql.VARCHAR(length=20),
nullable=False)
op.alter_column('aime_card', 'created_date',
existing_type=mysql.TIMESTAMP(),
server_default=sa.text('now()'),
existing_nullable=True)
op.create_unique_constraint(None, 'aime_card', ['chip_id'])
op.create_unique_constraint(None, 'aime_card', ['idm'])
op.create_unique_constraint(None, 'aime_card', ['access_code'])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'aime_card', type_='unique')
op.drop_constraint(None, 'aime_card', type_='unique')
op.drop_constraint(None, 'aime_card', type_='unique')
op.alter_column('aime_card', 'created_date',
existing_type=mysql.TIMESTAMP(),
server_default=sa.text('CURRENT_TIMESTAMP'),
existing_nullable=True)
op.alter_column('aime_card', 'access_code',
existing_type=mysql.VARCHAR(length=20),
nullable=True)
op.drop_column('aime_card', 'chip_id')
op.drop_column('aime_card', 'idm')
# ### end Alembic commands ###

View File

@ -0,0 +1,48 @@
"""mai2_add_favs_rivals
Revision ID: 4a02e623e5e6
Revises: 8ad40a6e7be2
Create Date: 2024-06-08 19:02:43.856395
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '4a02e623e5e6'
down_revision = '8ad40a6e7be2'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('mai2_item_favorite_music',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user', sa.Integer(), nullable=False),
sa.Column('musicId', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['user'], ['aime_user.id'], onupdate='cascade', ondelete='cascade'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('user', 'musicId', name='mai2_item_favorite_music_uk'),
mysql_charset='utf8mb4'
)
op.create_table('mai2_user_rival',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user', sa.Integer(), nullable=False),
sa.Column('rival', sa.Integer(), nullable=False),
sa.Column('show', sa.Boolean(), server_default='0', nullable=False),
sa.ForeignKeyConstraint(['rival'], ['aime_user.id'], onupdate='cascade', ondelete='cascade'),
sa.ForeignKeyConstraint(['user'], ['aime_user.id'], onupdate='cascade', ondelete='cascade'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('user', 'rival', name='mai2_user_rival_uk'),
mysql_charset='utf8mb4'
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('mai2_user_rival')
op.drop_table('mai2_item_favorite_music')
# ### end Alembic commands ###

View File

@ -0,0 +1,28 @@
"""cxb_add_playlog_grade
Revision ID: 7dc13e364e53
Revises: 2d024cf145a1
Create Date: 2024-05-28 22:31:22.264926
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '7dc13e364e53'
down_revision = '2d024cf145a1'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('cxb_playlog', sa.Column('grade', sa.Integer(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('cxb_playlog', 'grade')
# ### end Alembic commands ###

View File

@ -1,18 +1,17 @@
"""mai2_buddies_support
Revision ID: 81e44dd6047a
Revises: d8950c7ce2fc
Revises: 6a7e8277763b
Create Date: 2024-03-12 19:10:37.063907
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = "81e44dd6047a"
down_revision = "6a7e8277763b"
down_revision = "c143b80bd966"
branch_labels = None
depends_on = None

View File

@ -5,8 +5,6 @@ Revises:
Create Date: 2024-01-09 13:06:10.787432
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.

View File

@ -0,0 +1,30 @@
"""ongeki: fix clearStatus
Revision ID: 8ad40a6e7be2
Revises: 7dc13e364e53
Create Date: 2024-05-29 19:03:30.062157
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '8ad40a6e7be2'
down_revision = '7dc13e364e53'
branch_labels = None
depends_on = None
def upgrade():
op.alter_column('ongeki_score_best', 'clearStatus',
existing_type=mysql.TINYINT(display_width=1),
type_=sa.Integer(),
existing_nullable=False)
def downgrade():
op.alter_column('ongeki_score_best', 'clearStatus',
existing_type=sa.Integer(),
type_=mysql.TINYINT(display_width=1),
existing_nullable=False)

View File

@ -0,0 +1,89 @@
"""chunithm luminous
Revision ID: c143b80bd966
Revises: 6a7e8277763b
Create Date: 2024-04-20 14:06:54.630558
"""
import sqlalchemy as sa
from alembic import op
from sqlalchemy import Column, UniqueConstraint
from sqlalchemy.types import Integer, Boolean
# revision identifiers, used by Alembic.
revision = 'c143b80bd966'
down_revision = '6a7e8277763b'
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
"chuni_profile_net_battle",
Column("id", Integer, primary_key=True, nullable=False),
Column("user", Integer, nullable=False),
Column("isRankUpChallengeFailed", Boolean),
Column("highestBattleRankId", Integer),
Column("battleIconId", Integer),
Column("battleIconNum", Integer),
Column("avatarEffectPoint", Integer),
mysql_charset="utf8mb4",
)
op.create_foreign_key(
None,
"chuni_profile_net_battle",
"aime_user",
["user"],
["id"],
ondelete="cascade",
onupdate="cascade",
)
op.create_table(
"chuni_item_cmission",
Column("id", Integer, primary_key=True, nullable=False),
Column("user", Integer, nullable=False),
Column("missionId", Integer, nullable=False),
Column("point", Integer),
UniqueConstraint("user", "missionId", name="chuni_item_cmission_uk"),
mysql_charset="utf8mb4",
)
op.create_foreign_key(
None,
"chuni_item_cmission",
"aime_user",
["user"],
["id"],
ondelete="cascade",
onupdate="cascade",
)
op.create_table(
"chuni_item_cmission_progress",
Column("id", Integer, primary_key=True, nullable=False),
Column("user", Integer, nullable=False),
Column("missionId", Integer, nullable=False),
Column("order", Integer),
Column("stage", Integer),
Column("progress", Integer),
UniqueConstraint(
"user", "missionId", "order", name="chuni_item_cmission_progress_uk"
),
mysql_charset="utf8mb4",
)
op.create_foreign_key(
None,
"chuni_item_cmission_progress",
"aime_user",
["user"],
["id"],
ondelete="cascade",
onupdate="cascade",
)
def downgrade():
op.drop_table("chuni_profile_net_battle")
op.drop_table("chuni_item_cmission")
op.drop_table("chuni_item_cmission_progress")

View File

@ -2,7 +2,8 @@ from typing import Any, Callable
from functools import wraps
import hashlib
import pickle
import logging
import core.logger
from core.config import CoreConfig
cfg: CoreConfig = None # type: ignore
@ -39,11 +40,11 @@ def cached(lifetime: int = 10, extra_key: Any = None) -> Callable:
try:
result = memcache.get(cache_key)
except pylibmc.Error as e:
logging.getLogger("database").error(f"Memcache failed: {e}")
core.logger.create_logger("Database").error(f"Memcache failed: {e}")
result = None
if result is not None:
logging.getLogger("database").debug(f"Cache hit: {result}")
core.logger.create_logger("Database").debug(f"Cache hit: {result}")
return result
# Generate output
@ -51,7 +52,7 @@ def cached(lifetime: int = 10, extra_key: Any = None) -> Callable:
# Cache output if allowed
if lifetime is not None and result is not None:
logging.getLogger("database").debug(f"Setting cache: {result}")
core.logger.create_logger("Database").debug(f"Setting cache: {result}")
memcache.set(cache_key, result, lifetime)
return result

View File

@ -1,15 +1,13 @@
import logging, coloredlogs
from typing import Optional
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy import create_engine
from logging.handlers import TimedRotatingFileHandler
import os
import secrets, string
import bcrypt
from hashlib import sha256
import alembic.config
import glob
import core.logger
from core.config import CoreConfig
from core.data.schema import *
from core.utils import Utils
@ -51,31 +49,7 @@ class Data:
if Data.base is None:
Data.base = BaseData(self.config, self.session)
self.logger = logging.getLogger("database")
# Prevent the logger from adding handlers multiple times
if not getattr(self.logger, "handler_set", None):
log_fmt_str = "[%(asctime)s] %(levelname)s | Database | %(message)s"
log_fmt = logging.Formatter(log_fmt_str)
fileHandler = TimedRotatingFileHandler(
"{0}/{1}.log".format(self.config.server.log_dir, "db"),
encoding="utf-8",
when="d",
backupCount=10,
)
fileHandler.setFormatter(log_fmt)
consoleHandler = logging.StreamHandler()
consoleHandler.setFormatter(log_fmt)
self.logger.addHandler(fileHandler)
self.logger.addHandler(consoleHandler)
self.logger.setLevel(self.config.database.loglevel)
coloredlogs.install(
cfg.database.loglevel, logger=self.logger, fmt=log_fmt_str
)
self.logger.handler_set = True # type: ignore
self.logger = core.logger.create_logger("Database", cfg.database.loglevel)
def __alembic_cmd(self, command: str, *args: str) -> None:
old_dir = os.path.abspath(os.path.curdir)

View File

@ -1,8 +1,8 @@
from typing import Optional, Dict, List
from typing import Optional, List
from sqlalchemy import Table, Column, and_, or_
from sqlalchemy.sql.schema import ForeignKey, PrimaryKeyConstraint
from sqlalchemy.types import Integer, String, Boolean, JSON
from sqlalchemy.sql import func, select
from sqlalchemy.sql import select
from sqlalchemy.dialects.mysql import insert
from sqlalchemy.engine import Row
import re
@ -212,7 +212,7 @@ class ArcadeData(BaseData):
return f"{platform_code}{platform_rev:02d}A{serial_num:04d}{append:04d}" # 0x41 = A, 0x52 = R
def validate_keychip_format(self, serial: str) -> bool:
if re.fullmatch(r"^A[0-9]{2}[E|X][-]?[0-9]{2}[A-HJ-NP-Z][0-9]{4}([0-9]{4})?$", serial) is None:
if re.fullmatch(r"^A[0-9]{2}[E|X|C][-]?[0-9]{2}[A-HJ-NP-Z][0-9]{4}([0-9]{4})?$", serial) is None:
return False
return True

View File

@ -1,16 +1,17 @@
import json
import logging
from random import randrange
from typing import Any, Optional, Dict, List
from sqlalchemy.engine import Row
from sqlalchemy.engine.cursor import CursorResult
from sqlalchemy.engine.base import Connection
from sqlalchemy.sql import text, func, select
from sqlalchemy.sql import text, func
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy import MetaData, Table, Column
from sqlalchemy.types import Integer, String, TIMESTAMP, JSON
from sqlalchemy.engine import Row
from sqlalchemy.types import Integer, String, TIMESTAMP, JSON, INTEGER, TEXT
from sqlalchemy.schema import ForeignKey
from sqlalchemy.dialects.mysql import insert
import core.logger
from core.config import CoreConfig
metadata = MetaData()
@ -22,6 +23,12 @@ event_log = Table(
Column("system", String(255), nullable=False),
Column("type", String(255), nullable=False),
Column("severity", Integer, nullable=False),
Column("user", INTEGER, ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade")),
Column("arcade", INTEGER, ForeignKey("arcade.id", ondelete="cascade", onupdate="cascade")),
Column("machine", INTEGER, ForeignKey("machine.id", ondelete="cascade", onupdate="cascade")),
Column("ip", TEXT(39)),
Column("game", TEXT(4)),
Column("version", TEXT(24)),
Column("message", String(1000), nullable=False),
Column("details", JSON, nullable=False),
Column("when_logged", TIMESTAMP, nullable=False, server_default=func.now()),
@ -33,7 +40,7 @@ class BaseData:
def __init__(self, cfg: CoreConfig, conn: Connection) -> None:
self.config = cfg
self.conn = conn
self.logger = logging.getLogger("database")
self.logger = core.logger.create_logger("Database", cfg.database.loglevel)
async def execute(self, sql: str, opts: Dict[str, Any] = {}) -> Optional[CursorResult]:
res = None
@ -43,11 +50,11 @@ class BaseData:
res = self.conn.execute(text(sql), opts)
except SQLAlchemyError as e:
self.logger.error(f"SQLAlchemy error {e}")
self.logger.exception("SQLAlchemy error", exc_info=e)
return None
except UnicodeEncodeError as e:
self.logger.error(f"UnicodeEncodeError error {e}")
self.logger.exception("UnicodeEncodeError error", exc_info=e)
return None
except Exception:
@ -55,15 +62,15 @@ class BaseData:
res = self.conn.execute(sql, opts)
except SQLAlchemyError as e:
self.logger.error(f"SQLAlchemy error {e}")
self.logger.exception("SQLAlchemy error", exc_info=e)
return None
except UnicodeEncodeError as e:
self.logger.error(f"UnicodeEncodeError error {e}")
self.logger.exception("UnicodeEncodeError error", exc_info=e)
return None
except Exception:
self.logger.error(f"Unknown error")
except Exception as e:
self.logger.error("Unknown error", exc_info=e)
raise
return res
@ -75,12 +82,19 @@ class BaseData:
return randrange(10000, 9999999)
async def log_event(
self, system: str, type: str, severity: int, message: str, details: Dict = {}
self, system: str, type: str, severity: int, message: str, details: Dict = {}, user: int = None,
arcade: int = None, machine: int = None, ip: str = None, game: str = None, version: str = None
) -> Optional[int]:
sql = event_log.insert().values(
system=system,
type=type,
severity=severity,
user=user,
arcade=arcade,
machine=machine,
ip=ip,
game=game,
version=version,
message=message,
details=json.dumps(details),
)
@ -94,8 +108,8 @@ class BaseData:
return result.lastrowid
async def get_event_log(self, entries: int = 100) -> Optional[List[Dict]]:
sql = event_log.select().limit(entries).all()
async def get_event_log(self, entries: int = 100) -> Optional[List[Row]]:
sql = event_log.select().order_by(event_log.c.id.desc()).limit(entries)
result = await self.execute(sql)
if result is None:

View File

@ -1,6 +1,6 @@
from typing import Dict, List, Optional
from typing import List, Optional
from sqlalchemy import Table, Column, UniqueConstraint
from sqlalchemy.types import Integer, String, Boolean, TIMESTAMP
from sqlalchemy.types import Integer, String, Boolean, TIMESTAMP, BIGINT
from sqlalchemy.sql.schema import ForeignKey
from sqlalchemy.sql import func
from sqlalchemy.engine import Row
@ -11,12 +11,10 @@ aime_card = Table(
"aime_card",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
Column(
"user",
ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"),
nullable=False,
),
Column("access_code", String(20)),
Column("user", ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"), nullable=False),
Column("access_code", String(20), nullable=False, unique=True),
Column("idm", String(16), unique=True),
Column("chip_id", BIGINT, unique=True),
Column("created_date", TIMESTAMP, server_default=func.now()),
Column("last_login_date", TIMESTAMP, onupdate=func.now()),
Column("is_locked", Boolean, server_default="0"),
@ -121,7 +119,27 @@ class CardData(BaseData):
result = await self.execute(sql)
if result is None:
self.logger.warn(f"Failed to update last login time for {access_code}")
async def get_card_by_idm(self, idm: str) -> Optional[Row]:
result = await self.execute(aime_card.select(aime_card.c.idm == idm))
if result:
return result.fetchone()
async def get_card_by_chip_id(self, chip_id: int) -> Optional[Row]:
result = await self.execute(aime_card.select(aime_card.c.chip_id == chip_id))
if result:
return result.fetchone()
async def set_chip_id_by_access_code(self, access_code: str, chip_id: int) -> Optional[Row]:
result = await self.execute(aime_card.update(aime_card.c.access_code == access_code).values(chip_id=chip_id))
if not result:
self.logger.error(f"Failed to update chip ID to {chip_id} for {access_code}")
async def set_idm_by_access_code(self, access_code: str, idm: str) -> Optional[Row]:
result = await self.execute(aime_card.update(aime_card.c.access_code == access_code).values(idm=idm))
if not result:
self.logger.error(f"Failed to update IDm to {idm} for {access_code}")
def to_access_code(self, luid: str) -> str:
"""
Given a felica cards internal 16 hex character luid, convert it to a 0-padded 20 digit access code as a string
@ -132,4 +150,4 @@ class CardData(BaseData):
"""
Given a 20 digit access code as a string, return the 16 hex character luid
"""
return f"{int(access_code):0{16}x}"
return f"{int(access_code):0{16}X}"

View File

@ -1,7 +1,9 @@
from typing import Optional, List
from sqlalchemy import Table, Column
from sqlalchemy.types import Integer, String, TIMESTAMP
from sqlalchemy import Table, Column, text, UniqueConstraint
from sqlalchemy.dialects import mysql
from sqlalchemy.types import Integer, String, TIMESTAMP, JSON
from sqlalchemy.sql import func
from sqlalchemy.sql.schema import ForeignKey
from sqlalchemy.dialects.mysql import insert
from sqlalchemy.sql import func, select
from sqlalchemy.engine import Row
@ -23,6 +25,18 @@ aime_user = Table(
mysql_charset="utf8mb4",
)
game_locks = Table(
"aime_user_game_locks",
metadata,
Column("id", Integer, nullable=False, primary_key=True, autoincrement=True),
Column("user", ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"), nullable=False),
Column("game", String(4), nullable=False),
Column("expires_at", TIMESTAMP, server_default=func.date_add(func.now(), text("INTERVAL 15 MINUTE"))),
Column("extra", JSON),
UniqueConstraint("user", "game", name="aime_user_title_locks"),
mysql_charset="utf8mb4",
)
class UserData(BaseData):
async def create_user(
self,
@ -120,3 +134,55 @@ class UserData(BaseData):
result = await self.execute(sql)
return result is not None
async def get_user_by_username(self, username: str) -> Optional[Row]:
result = await self.execute(aime_user.select(aime_user.c.username == username))
if result:
return result.fetchone()
async def check_lock_for_game(self, user_id: int, game: str):
sql = game_locks.select(
(game_locks.c.user == user_id)
& (game_locks.c.game == game)
& (func.timestampdiff(text("SECOND"), func.now(), game_locks.c.expires_at) > 0))
result = await self.execute(sql)
if result:
return result.fetchone()
return None
async def acquire_lock_for_game(self, user_id: int, game: str, extra: dict | None = None):
result = await self.check_lock_for_game(user_id, game)
if result is not None:
return result
sql = (
insert(game_locks)
.values(user=user_id, game=game, extra=extra)
.on_duplicate_key_update(
expires_at=func.date_add(func.now(), text("INTERVAL 15 MINUTE")),
extra=extra,
)
)
await self.execute(sql)
return None
async def release_lock_for_game(self, user_id: int, game: str):
sql = game_locks.delete((game_locks.c.user == user_id) & (game_locks.c.game == game))
await self.execute(sql)
async def extend_lock_for_game(self, user_id: int, game: str, extra: dict | None = None):
sql = (
insert(game_locks)
.values(user=user_id, game=game, extra=extra or {})
.on_duplicate_key_update(
expires_at=func.date_add(func.now(), text("INTERVAL 15 MINUTE")),
)
)
await self.execute(sql)

View File

@ -1,10 +1,9 @@
import logging, coloredlogs
import logging
from typing import Any, Dict, List, Union, Optional
from starlette.requests import Request
from starlette.routing import Route, Mount
from starlette.responses import Response, PlainTextResponse, RedirectResponse
from starlette.applications import Starlette
from logging.handlers import TimedRotatingFileHandler
import jinja2
import bcrypt
import re
@ -18,7 +17,7 @@ from enum import Enum
from datetime import datetime, timezone
from os import path, environ, mkdir, W_OK, access
from core import CoreConfig, Utils
from core import CoreConfig, Utils, logger
from core.data import Data
class PermissionOffset(Enum):
@ -44,42 +43,21 @@ class ShopOwner():
self.permissions = perms
class UserSession():
def __init__(self, usr_id: int = 0, ip: str = "", perms: int = 0, ongeki_ver: int = 7):
def __init__(self, usr_id: int = 0, ip: str = "", perms: int = 0, ongeki_ver: int = 7, chunithm_ver: int = -1, maimai_version: int = -1):
self.user_id = usr_id
self.current_ip = ip
self.permissions = perms
self.ongeki_version = ongeki_ver
self.chunithm_version = chunithm_ver
self.maimai_version = maimai_version
class FrontendServlet():
def __init__(self, cfg: CoreConfig, config_dir: str) -> None:
self.config = cfg
log_fmt_str = "[%(asctime)s] Frontend | %(levelname)s | %(message)s"
log_fmt = logging.Formatter(log_fmt_str)
self.environment = jinja2.Environment(loader=jinja2.FileSystemLoader("."))
self.game_list: Dict[str, Dict[str, Any]] = {}
self.sn_cvt: Dict[str, str] = {}
self.logger = logging.getLogger("frontend")
if not hasattr(self.logger, "inited"):
fileHandler = TimedRotatingFileHandler(
"{0}/{1}.log".format(self.config.server.log_dir, "frontend"),
when="d",
backupCount=10,
)
fileHandler.setFormatter(log_fmt)
consoleHandler = logging.StreamHandler()
consoleHandler.setFormatter(log_fmt)
self.logger.addHandler(fileHandler)
self.logger.addHandler(consoleHandler)
self.logger.setLevel(cfg.frontend.loglevel)
coloredlogs.install(
level=cfg.frontend.loglevel, logger=self.logger, fmt=log_fmt_str
)
self.logger.inited = True
self.sn_cvt: Dict[str, str] = {}
self.logger = logger.create_logger("Frontend", cfg.frontend.loglevel)
games = Utils.get_all_titles()
for game_dir, game_mod in games.items():
@ -132,6 +110,7 @@ class FrontendServlet():
]),
Mount("/sys", routes=[
Route("/", self.system.render_GET, methods=['GET']),
Route("/logs", self.system.render_logs, methods=['GET']),
Route("/lookup.user", self.system.lookup_user, methods=['GET']),
Route("/lookup.shop", self.system.lookup_shop, methods=['GET']),
Route("/add.user", self.system.add_user, methods=['POST']),
@ -174,7 +153,7 @@ class FE_Base():
def __init__(self, cfg: CoreConfig, environment: jinja2.Environment) -> None:
self.core_config = cfg
self.data = Data(cfg)
self.logger = logging.getLogger("frontend")
self.logger = logger.create_logger("Frontend", cfg.frontend.loglevel)
self.environment = environment
self.nav_name = "index"
@ -190,7 +169,7 @@ class FE_Base():
), media_type="text/html; charset=utf-8")
if sesh is None:
resp.delete_cookie("DIANA_SESH")
resp.delete_cookie("ARTEMIS_SESH")
return resp
def get_routes(self) -> List[Route]:
@ -213,7 +192,10 @@ class FE_Base():
sesh.user_id = tk['user_id']
sesh.current_ip = tk['current_ip']
sesh.permissions = tk['permissions']
sesh.chunithm_version = tk['chunithm_version']
sesh.maimai_version = tk['maimai_version']
sesh.ongeki_version = tk['ongeki_version']
if sesh.user_id <= 0:
self.logger.error("User session failed to validate due to an invalid ID!")
return UserSession()
@ -238,7 +220,7 @@ class FE_Base():
return UserSession()
def validate_session(self, request: Request) -> Optional[UserSession]:
sesh = request.cookies.get('DIANA_SESH', "")
sesh = request.cookies.get('ARTEMIS_SESH', "")
if not sesh:
return None
@ -257,7 +239,17 @@ class FE_Base():
def encode_session(self, sesh: UserSession, exp_seconds: int = 86400) -> str:
try:
return jwt.encode({ "user_id": sesh.user_id, "current_ip": sesh.current_ip, "permissions": sesh.permissions, "ongeki_version": sesh.ongeki_version, "exp": int(datetime.now(tz=timezone.utc).timestamp()) + exp_seconds }, b64decode(self.core_config.frontend.secret), algorithm="HS256")
return jwt.encode({
"user_id": sesh.user_id,
"current_ip": sesh.current_ip,
"permissions": sesh.permissions,
"ongeki_version": sesh.ongeki_version,
"chunithm_version": sesh.chunithm_version,
"maimai_version": sesh.maimai_version,
"exp": int(datetime.now(tz=timezone.utc).timestamp()) + exp_seconds },
b64decode(self.core_config.frontend.secret),
algorithm="HS256"
)
except jwt.InvalidKeyError:
self.logger.error("Failed to encode User session because the secret is invalid!")
return ""
@ -289,7 +281,7 @@ class FE_Gate(FE_Base):
error=err,
sesh=vars(UserSession()),
), media_type="text/html; charset=utf-8")
resp.delete_cookie("DIANA_SESH")
resp.delete_cookie("ARTEMIS_SESH")
return resp
async def render_login(self, request: Request):
@ -305,8 +297,12 @@ class FE_Gate(FE_Base):
uid = await self.data.card.get_user_id_from_card(access_code)
if uid is None:
self.logger.debug(f"Failed to find user for card {access_code}")
return RedirectResponse("/gate/?e=1", 303)
user = await self.data.user.get_user_by_username(access_code) # Lookup as username
if not user:
self.logger.debug(f"Failed to find user for card/username {access_code}")
return RedirectResponse("/gate/?e=1", 303)
uid = user['id']
user = await self.data.user.get_user(uid)
if user is None:
@ -335,7 +331,7 @@ class FE_Gate(FE_Base):
usr_sesh = self.encode_session(sesh)
self.logger.debug(f"Created session with JWT {usr_sesh}")
resp = RedirectResponse("/user/", 303)
resp.set_cookie("DIANA_SESH", usr_sesh)
resp.set_cookie("ARTEMIS_SESH", usr_sesh)
return resp
@ -374,7 +370,7 @@ class FE_Gate(FE_Base):
usr_sesh = self.encode_session(sesh)
self.logger.debug(f"Created session with JWT {usr_sesh}")
resp = RedirectResponse("/user/", 303)
resp.set_cookie("DIANA_SESH", usr_sesh)
resp.set_cookie("ARTEMIS_SESH", usr_sesh)
return resp
@ -492,7 +488,7 @@ class FE_User(FE_Base):
async def render_logout(self, request: Request):
resp = RedirectResponse("/gate/", 303)
resp.delete_cookie("DIANA_SESH")
resp.delete_cookie("ARTEMIS_SESH")
return resp
async def edit_card(self, request: Request) -> RedirectResponse:
@ -781,6 +777,35 @@ class FE_System(FE_Base):
cabadd={"id": cab_id, "serial": serial},
), media_type="text/html; charset=utf-8")
async def render_logs(self, request: Request):
template = self.environment.get_template("core/templates/sys/logs.jinja")
events = []
usr_sesh = self.validate_session(request)
if not usr_sesh or not self.test_perm(usr_sesh.permissions, PermissionOffset.SYSADMIN):
return RedirectResponse("/sys/?e=11", 303)
logs = await self.data.base.get_event_log()
if not logs:
logs = []
for log in logs:
evt = log._asdict()
if not evt['user']: evt["user"] = "NONE"
if not evt['arcade']: evt["arcade"] = "NONE"
if not evt['machine']: evt["machine"] = "NONE"
if not evt['ip']: evt["ip"] = "NONE"
if not evt['game']: evt["game"] = "NONE"
if not evt['version']: evt["version"] = "NONE"
evt['when_logged'] = evt['when_logged'].strftime("%x %X")
events.append(evt)
return Response(template.render(
title=f"{self.core_config.server.name} | Event Logs",
sesh=vars(usr_sesh),
events=events
), media_type="text/html; charset=utf-8")
class FE_Arcade(FE_Base):
async def render_GET(self, request: Request):
template = self.environment.get_template("core/templates/arcade/index.jinja")
@ -847,19 +872,10 @@ class FE_Machine(FE_Base):
arcade={}
), media_type="text/html; charset=utf-8")
cfg_dir = environ.get("DIANA_CFG_DIR", "config")
cfg_dir = environ.get("ARTEMIS_CFG_DIR", "config")
cfg: CoreConfig = CoreConfig()
if path.exists(f"{cfg_dir}/core.yaml"):
cfg.update(yaml.safe_load(open(f"{cfg_dir}/core.yaml")))
if not path.exists(cfg.server.log_dir):
mkdir(cfg.server.log_dir)
if not access(cfg.server.log_dir, W_OK):
print(
f"Log directory {cfg.server.log_dir} NOT writable, please check permissions"
)
exit(1)
fe = FrontendServlet(cfg, cfg_dir)
app = Starlette(cfg.server.is_develop, fe.get_routes(), on_startup=[fe.startup])

76
core/logger/__init__.py Normal file
View File

@ -0,0 +1,76 @@
import logging
import logging.handlers
import os
from typing import TYPE_CHECKING, Optional
from .adapter import ExtraFormatter, MergedLoggerAdapter
from .discord import DiscordLogHandler
if TYPE_CHECKING:
from core.config import CoreConfig
__all__ = (
"init_root_logger",
"create_logger",
)
core_cfg: Optional["CoreConfig"] = None
artemis_logger = logging.getLogger("artemis")
log_fmt_str = "[%(asctime)s] %(title)s | %(levelname)s | %(message)s"
log_fmt = ExtraFormatter(log_fmt_str)
def init_root_logger(cfg: "CoreConfig"):
global core_cfg
core_cfg = cfg
if not os.path.exists(cfg.logging.log_dir):
os.mkdir(cfg.logging.log_dir)
if not os.access(cfg.logging.log_dir, os.W_OK):
print(
f"Log directory {cfg.logging.log_dir} NOT writable, please check permissions"
)
exit(1)
console_handler = logging.StreamHandler()
console_handler.setFormatter(log_fmt)
artemis_logger.addHandler(console_handler)
if cfg.logging.discord_webhook:
artemis_logger.addHandler(DiscordLogHandler(cfg.logging.discord_webhook, cfg.logging.discord_who_to_ping))
def create_logger(
title: str,
level: Optional["logging._Level"] = None,
*,
logger_name: Optional[str] = None,
):
logger_name = logger_name or title.lower().replace(" ", "_")
logger = artemis_logger.getChild(logger_name)
if getattr(logger, "initialized", False):
logger = MergedLoggerAdapter(logger, {"title": title})
logger.setLevel(level or logging.INFO)
return logger
if core_cfg is None:
raise RuntimeError("Root logger has not been initialized.")
file_handler = logging.handlers.TimedRotatingFileHandler(
os.path.join(core_cfg.logging.log_dir, f"{logger_name}.log"),
when="d",
backupCount=10,
)
file_handler.setFormatter(log_fmt)
logger.addHandler(file_handler)
logger.setLevel(level or logging.INFO)
logger.initialized = True
return MergedLoggerAdapter(logger, {"title": title})

46
core/logger/adapter.py Normal file
View File

@ -0,0 +1,46 @@
from logging import Formatter, LogRecord, LoggerAdapter
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from typing import Any, MutableMapping
# skip natural LogRecord attributes
# http://docs.python.org/library/logging.html#logrecord-attributes
RESERVED_ATTRS: set[str] = {
'args', 'asctime', 'created', 'exc_info', 'exc_text', 'filename',
'funcName', 'levelname', 'levelno', 'lineno', 'module',
'msecs', 'message', 'msg', 'name', 'pathname', 'process',
'processName', 'relativeCreated', 'stack_info', 'thread', 'threadName'}
class MergedLoggerAdapter(LoggerAdapter):
def process(self, msg: "Any", kwargs: "MutableMapping[str, Any]") -> tuple["Any", "MutableMapping[str, Any]"]:
kwargs["extra"] = {**self.extra, **kwargs.get("extra", {})}
return msg, kwargs
class ExtraFormatter(Formatter):
def format(self, record: LogRecord):
s = super().format(record)
extras = {}
for key, value in record.__dict__.items():
if key in {"title", "taskName"} or key in RESERVED_ATTRS or (hasattr(key, "startswith") and key.startswith("_")):
continue
extras[key] = value
if len(extras) == 0:
return s
str_extras = str(extras)
if len(str_extras) >= 300:
str_extras = str_extras[:297] + "..."
s += " " + str_extras
return s

166
core/logger/discord.py Normal file
View File

@ -0,0 +1,166 @@
import asyncio
import logging
import time
from datetime import datetime
from http import HTTPStatus
from typing import TYPE_CHECKING, Optional, Union
import aiohttp
if TYPE_CHECKING:
from logging import _Level, LogRecord
_log_level_to_discord_colors = {
"CRITICAL": 0xFF0000,
"ERROR": 0xCC0000,
"WARNING": 0xFFCC00,
logging.CRITICAL: 0xFF0000,
logging.ERROR: 0xCC0000,
logging.WARNING: 0xFFCC00,
}
class DiscordLogHandler(logging.Handler):
"""
A handler for Python's logging infrastructure that batches log reports
and sends error/critical logs directly.
"""
def __init__(self, webhook: str, who_to_ping: Optional[list[Union[str, int]]] = None) -> None:
super().__init__(logging.WARNING)
self._webhook = webhook
self._who_to_ping = who_to_ping or []
self._client = None
self._is_bucketing = False
self._bucket_start = datetime.now()
self._bucket_data: dict["_Level", "LogRecord"] = {
logging.WARNING: [],
}
self._tasks = set()
def emit(self, record: "LogRecord") -> None:
if record.levelno in {logging.CRITICAL, logging.ERROR}:
return self._send_log_to_discord(record)
if record.levelno not in self._bucket_data:
return
self._bucket_data[record.levelno].append(record)
if not self._is_bucketing:
self._is_bucketing = True
self._bucket_start = datetime.now()
async def schedule():
await asyncio.sleep(60)
self._send_bucket_data()
t = asyncio.ensure_future(schedule())
self._tasks.add(t)
t.add_done_callback(self._tasks.discard)
def _send_bucket_data(self):
loglevels = sorted(self._bucket_data.keys(), reverse=True)
log_lines = []
log_counts: dict[int, int] = {}
for level in loglevels:
log_counts[level] = 0
for record in self._bucket_data[level]:
record_title = record.__dict__.get("title", record.name)
log_counts[level] += 1
log_lines.append(f"{record_title} | {record.levelname} | {record.message}")
log_summary = "\n".join(log_lines)
if len(log_summary) > 1994:
log_summary = f"{log_summary[:1991]}..."
body = {
"content": f"```{log_summary}```",
"embeds": [
{
"title": "ARTEMiS log summary",
"fields": [
{"name": logging.getLevelName(level), "value": str(count), "inline": True}
for level, count in log_counts.items()
],
"description": f"Log summary for <t:{int(self._bucket_start.timestamp())}:f> to <t:{int(time.time())}:f>",
"color": _log_level_to_discord_colors.get(loglevels[0], 0x000000),
"timestamp": datetime.now().isoformat(),
}
]
}
self._post_data(body)
self._is_bucketing = False
for level in loglevels:
self._bucket_data[level] = []
def _send_log_to_discord(self, record: "LogRecord"):
message = record.message
if len(message) > 4090:
message = record.message[:4087] + "..."
body = {
"content": "",
"embeds": [
{
"title": f"[{record.levelname}] {record.__dict__.get('title', record.name)}",
"description": f"```{message}```",
"color": _log_level_to_discord_colors.get(record.levelname, 0x000000),
"timestamp": datetime.fromtimestamp(record.created).isoformat(),
},
],
}
if record.levelno == logging.CRITICAL:
body["content"] = f"CRITICAL ERROR: {self._get_pings()}"
if record.levelno == logging.ERROR:
body["content"] = f"ERROR: {self._get_pings()}"
self._post_data(body)
def _get_pings(self) -> str:
pings = []
for ping in self._who_to_ping:
if isinstance(ping, int):
pings.append(f"<@{ping}>")
else:
pings.append(str(ping))
return " ".join(pings)
def _post_data(self, body: dict):
t = asyncio.ensure_future(self._post_data_inner(body))
self._tasks.add(t)
t.add_done_callback(self._tasks.discard)
async def _post_data_inner(self, body: dict):
scale_retry_debounce = 2
if self._client is None:
self._client = aiohttp.ClientSession()
while True:
response = await self._client.post(self._webhook, json=body)
if response.status == HTTPStatus.TOO_MANY_REQUESTS:
data = await response.json()
retry_after = data.get("retry_after")
if isinstance(int, retry_after) or isinstance(float, retry_after):
asyncio.sleep(retry_after * scale_retry_debounce)
scale_retry_debounce = scale_retry_debounce ** 2
continue
elif not response.ok:
print(f"[ERROR] Failed to send log to Discord: {response.status_code} {response.text}")
break
else:
break

View File

@ -1,12 +1,11 @@
from typing import Dict, Any, Optional
import logging, coloredlogs
from logging.handlers import TimedRotatingFileHandler
from starlette.requests import Request
from starlette.responses import PlainTextResponse
from datetime import datetime
from Crypto.Cipher import Blowfish
import pytz
import core.logger
from .config import CoreConfig
from .utils import Utils
from .title import TitleServlet
@ -18,27 +17,7 @@ class MuchaServlet:
def __init__(self, cfg: CoreConfig, cfg_dir: str) -> None:
self.config = cfg
self.config_dir = cfg_dir
self.logger = logging.getLogger("mucha")
log_fmt_str = "[%(asctime)s] Mucha | %(levelname)s | %(message)s"
log_fmt = logging.Formatter(log_fmt_str)
fileHandler = TimedRotatingFileHandler(
"{0}/{1}.log".format(self.config.server.log_dir, "mucha"),
when="d",
backupCount=10,
)
fileHandler.setFormatter(log_fmt)
consoleHandler = logging.StreamHandler()
consoleHandler.setFormatter(log_fmt)
self.logger.addHandler(fileHandler)
self.logger.addHandler(consoleHandler)
self.logger.setLevel(cfg.mucha.loglevel)
coloredlogs.install(level=cfg.mucha.loglevel, logger=self.logger, fmt=log_fmt_str)
self.logger = core.logger.create_logger("Mucha", cfg.mucha.loglevel)
self.data = Data(cfg)
for _, mod in TitleServlet.title_registry.items():

View File

@ -15,18 +15,18 @@
-moz-appearance: textfield;
}
</style>
<form id="login" style="max-width: 240px; min-width: 10%;" action="/gate/gate.login" method="post">
<form id="login" style="max-width: 240px; min-width: 15%;" action="/gate/gate.login" method="post">
<div class="form-group row">
<label for="access_code">Card Access Code</label><br>
<input form="login" class="form-control" name="access_code" id="access_code" type="number" placeholder="00000000000000000000" maxlength="20" required>
<label for="access_code">Access Code or Username</label><br>
<input form="login" class="form-control" name="access_code" id="access_code" placeholder="00000000000000000000" maxlength="20" required aria-describedby="access_code_help">
<div id="access_code_help" class="form-text">20 Digit access code from a card registered to your account, or your account username. (NOT your username from a game!)</div>
</div>
<div class="form-group row">
<label for="passwd">Password</label><br>
<input id="passwd" class="form-control" name="passwd" type="password" placeholder="password">
<input id="passwd" class="form-control" name="passwd" type="password" placeholder="password" aria-describedby="passwd_help">
<div id="passwd_help" class="form-text">Leave blank if registering for the webui. Your card must have been used on a game connected to this server to register.</div>
</div>
<p></p>
<input id="submit" class="btn btn-primary" style="display: block; margin: 0 auto;" form="login" type="submit" value="Login">
</form>
<h6>*To register for the webui, type in the access code of your card, as shown in a game, and leave the password field blank.</h6>
<h6>*If you have not registered a card with this server, you cannot create a webui account.</h6>
{% endblock content %}

View File

@ -51,6 +51,9 @@
<button type="submit" class="btn btn-primary">Search</button>
</form>
</div>
<div class="col-sm-6" style="max-width: 25%;">
<a href="/sys/logs"><button class="btn btn-primary">Event Logs</button></a>
</div>
{% endif %}
</div>
<div class="row" id="rowResult" style="margin: 10px;">

View File

@ -0,0 +1,198 @@
{% extends "core/templates/index.jinja" %}
{% block content %}
<h1>Event Logs</h1>
<table class="table table-dark table-striped-columns" id="tbl_events">
<caption>Viewing last 100 logs</caption>
<thead>
<tr>
<th>Severity</th>
<th>Timestamp</th>
<th>System</th>
<th>Name</th>
<th>User</th>
<th>Arcade</th>
<th>Machine</th>
<th>Game</th>
<th>Version</th>
<th>Message</th>
<th>Params</th>
</tr>
</thead>
{% if events is not defined or events|length == 0 %}
<tr>
<td colspan="11" style="text-align:center"><i>No Events</i></td>
</tr>
{% endif %}
</table>
<div id="div_tbl_ctrl">
<select id="sel_per_page" onchange="update_tbl()">
<option value="10" selected>10</option>
<option value="25">25</option>
<option value="50">50</option>
<option value="100">100</option>
</select>
&nbsp;
<button class="btn btn-primary" id="btn_prev" disabled onclick="chg_page(-1)"><<</button>
<button class="btn btn-primary" id="btn_next" onclick="chg_page(1)">>></button>
</div>
<script type="text/javascript">
{% if events is defined %}
const TBL_DATA = {{events}};
{% else %}
const TBL_DATA = [];
{% endif %}
var per_page = 0;
var page = 0;
function update_tbl() {
if (TBL_DATA.length == 0) { return; }
var tbl = document.getElementById("tbl_events");
for (var i = 0; i < per_page; i++) {
try{
tbl.deleteRow(1);
} catch {
break;
}
}
per_page = document.getElementById("sel_per_page").value;
if (per_page >= TBL_DATA.length) {
page = 0;
document.getElementById("btn_next").disabled = true;
document.getElementById("btn_prev").disabled = true;
}
for (var i = 0; i < per_page; i++) {
let off = (page * per_page) + i;
if (off >= TBL_DATA.length) {
if (page != 0) {
document.getElementById("btn_next").disabled = true;
document.getElementById("btn_prev").disabled = false;
}
break;
}
var data = TBL_DATA[off];
var row = tbl.insertRow(i + 1);
var cell_severity = row.insertCell(0);
switch (data.severity) {
case 10:
cell_severity.innerHTML = "DEBUG";
row.classList.add("table-success");
break;
case 20:
cell_severity.innerHTML = "INFO";
row.classList.add("table-info");
break;
case 30:
cell_severity.innerHTML = "WARN";
row.classList.add("table-warning");
break;
case 40:
cell_severity.innerHTML = "ERROR";
row.classList.add("table-danger");
break;
case 50:
cell_severity.innerHTML = "CRITICAL";
row.classList.add("table-danger");
break;
default:
cell_severity.innerHTML = "---";
row.classList.add("table-primary");
break;
}
var cell_ts = row.insertCell(1);
cell_ts.innerHTML = data.when_logged;
var cell_mod = row.insertCell(2);
cell_mod.innerHTML = data.system;
var cell_name = row.insertCell(3);
cell_name.innerHTML = data.type;
var cell_usr = row.insertCell(4);
if (data.user == 'NONE') {
cell_usr.innerHTML = "---";
} else {
cell_usr.innerHTML = "<a href=\"/user/" + data.user + "\">" + data.user + "</a>";
}
var cell_arcade = row.insertCell(5);
if (data.arcade == 'NONE') {
cell_arcade.innerHTML = "---";
} else {
cell_arcade.innerHTML = "<a href=\"/shop/" + data.arcade + "\">" + data.arcade + "</a>";
}
var cell_machine = row.insertCell(6);
if (data.arcade == 'NONE') {
cell_machine.innerHTML = "---";
} else {
cell_machine.innerHTML = "<a href=\"/cab/" + data.machine + "\">" + data.machine + "</a>";
}
var cell_game = row.insertCell(7);
if (data.game == 'NONE') {
cell_game.innerHTML = "---";
} else {
cell_game.innerHTML = data.game;
}
var cell_version = row.insertCell(8);
if (data.version == 'NONE') {
cell_version.innerHTML = "---";
} else {
cell_version.innerHTML = data.version;
}
var cell_msg = row.insertCell(9);
if (data.message == '') {
cell_msg.innerHTML = "---";
} else {
cell_msg.innerHTML = data.message;
}
var cell_deets = row.insertCell(10);
if (data.details == '{}') {
cell_deets.innerHTML = "---";
} else {
cell_deets.innerHTML = data.details;
}
}
}
function chg_page(num) {
var max_page = TBL_DATA.length / per_page;
console.log(max_page);
page = page + num;
if (page > max_page && max_page >= 1) {
page = max_page;
document.getElementById("btn_next").disabled = true;
document.getElementById("btn_prev").disabled = false;
return;
} else if (page < 0) {
page = 0;
document.getElementById("btn_next").disabled = false;
document.getElementById("btn_prev").disabled = true;
return;
} else if (page == 0) {
document.getElementById("btn_next").disabled = false;
document.getElementById("btn_prev").disabled = true;
} else {
document.getElementById("btn_next").disabled = false;
document.getElementById("btn_prev").disabled = false;
}
update_tbl();
}
update_tbl();
</script>
{% endblock content %}

View File

@ -21,6 +21,8 @@ New Nickname too long
You must be logged in to preform this action
{% elif error == 10 %}
Invalid serial number
{% elif error == 11 %}
Access Denied
{% else %}
An unknown error occoured
{% endif %}

View File

@ -1,11 +1,10 @@
from typing import Dict, List, Tuple, Any
import json
import logging, coloredlogs
from logging.handlers import TimedRotatingFileHandler
from starlette.requests import Request
from starlette.responses import Response
from starlette.routing import Route
import core.logger
from core.config import CoreConfig
from core.data import Data
from core.utils import Utils
@ -23,7 +22,7 @@ class BaseServlet:
def __init__(self, core_cfg: CoreConfig, cfg_dir: str) -> None:
self.core_cfg = core_cfg
self.game_cfg = None
self.logger = logging.getLogger("title")
self.logger = core.logger.create_logger("Title", core_cfg.title.loglevel)
@classmethod
def is_game_enabled(cls, game_code: str, core_cfg: CoreConfig, cfg_dir: str) -> bool:
@ -99,30 +98,7 @@ class TitleServlet:
self.config = core_cfg
self.config_folder = cfg_folder
self.data = Data(core_cfg)
self.logger = logging.getLogger("title")
if not hasattr(self.logger, "initialized"):
log_fmt_str = "[%(asctime)s] Title | %(levelname)s | %(message)s"
log_fmt = logging.Formatter(log_fmt_str)
fileHandler = TimedRotatingFileHandler(
"{0}/{1}.log".format(self.config.server.log_dir, "title"),
when="d",
backupCount=10,
)
fileHandler.setFormatter(log_fmt)
consoleHandler = logging.StreamHandler()
consoleHandler.setFormatter(log_fmt)
self.logger.addHandler(fileHandler)
self.logger.addHandler(consoleHandler)
self.logger.setLevel(core_cfg.title.loglevel)
coloredlogs.install(
level=core_cfg.title.loglevel, logger=self.logger, fmt=log_fmt_str
)
self.logger.initialized = True
self.logger = core.logger.create_logger("Title", core_cfg.title.loglevel)
plugins = Utils.get_all_titles()

View File

@ -8,6 +8,7 @@ import jwt
from base64 import b64decode
from datetime import datetime, timezone
import core.logger
from .config import CoreConfig
class Utils:
@ -28,7 +29,7 @@ class Utils:
ret[dir] = mod
except ImportError as e:
logging.getLogger("core").error(f"get_all_titles: {dir} - {e}")
core.logger.create_logger("Core").error(f"get_all_titles: {dir} - {e}")
raise
return ret
@ -49,7 +50,7 @@ class Utils:
def get_title_port_ssl(cls, cfg: CoreConfig):
if cls.real_title_port_ssl is not None: return cls.real_title_port_ssl
cls.real_title_port_ssl = cfg.server.proxy_port_ssl if cfg.server.is_using_proxy and cfg.server.proxy_port_ssl else Utils.get_title_port(cfg)
cls.real_title_port_ssl = cfg.server.proxy_port_ssl if cfg.server.is_using_proxy and cfg.server.proxy_port_ssl else 443
return cls.real_title_port_ssl

View File

@ -1,10 +1,11 @@
#!/usr/bin/env python3
import argparse
import logging
from os import mkdir, path, access, W_OK
import os
from os import path
import yaml
import asyncio
import core.logger
from core.data import Data
from core.config import CoreConfig
@ -30,17 +31,13 @@ if __name__ == "__main__":
cfg_dict = yaml.safe_load(open(f"{args.config}/core.yaml"))
cfg_dict.get("database", {})["loglevel"] = "info"
cfg.update(cfg_dict)
os.environ["ARTEMIS_CFG_DIR"] = args.config
if not path.exists(cfg.server.log_dir):
mkdir(cfg.server.log_dir)
if not access(cfg.server.log_dir, W_OK):
print(
f"Log directory {cfg.server.log_dir} NOT writable, please check permissions"
)
exit(1)
core.logger.init_root_logger(cfg)
data = Data(cfg)
database_logger = core.logger.create_logger("Database", cfg.database.loglevel)
if args.action == "create":
data.create_database()
@ -50,7 +47,7 @@ if __name__ == "__main__":
elif args.action == "downgrade":
if not args.version:
logging.getLogger("database").error(f"Version argument required for downgrade")
database_logger.error("Version argument required for downgrade")
exit(1)
data.schema_downgrade(args.version)
@ -71,4 +68,4 @@ if __name__ == "__main__":
loop.run_until_complete(data.create_revision_auto(args.message))
else:
logging.getLogger("database").info(f"Unknown action {args.action}")
database_logger.info(f"Unknown action {args.action}")

View File

@ -22,6 +22,9 @@ version:
14:
rom: 2.15.00
data: 2.15.00
15:
rom: 2.20.00
data: 2.20.00
crypto:
encrypted_only: False

View File

@ -11,10 +11,14 @@ server:
is_using_proxy: False
proxy_port: 0
proxy_port_ssl: 0
log_dir: "logs"
check_arcade_ip: False
strict_ip_checking: False
logging:
log_dir: "logs"
discord_webhook: ""
discord_who_to_ping: []
title:
loglevel: "info"
reboot_start_time: "04:00"

View File

@ -12,3 +12,6 @@ uploads:
photos_dir: ""
movies: False
movies_dir: ""
crypto:
encrypted_only: False

View File

@ -3,10 +3,11 @@ import argparse
import yaml
from os import path, environ
import uvicorn
import logging
import asyncio
from core import CoreConfig, AimedbServlette
import core.logger
from core.aimedb import AimedbServlette
from core.config import CoreConfig
async def launch_main(cfg: CoreConfig, ssl: bool) -> None:
if ssl:
@ -86,7 +87,7 @@ async def launcher(cfg: CoreConfig, ssl: bool) -> None:
return_when=asyncio.FIRST_COMPLETED,
)
logging.getLogger("core").info("Shutdown")
core.logger.create_logger("Core").info("Shutdown")
for pending_task in pending:
pending_task.cancel("Another service died, server is shutting down")

2
mypy.ini Normal file
View File

@ -0,0 +1,2 @@
[mypy]
plugins = sqlmypy

28
read.py
View File

@ -5,13 +5,11 @@ import os
import yaml
from os import path
import logging
import coloredlogs
import asyncio
from logging.handlers import TimedRotatingFileHandler
from typing import List, Optional
from core import CoreConfig, Utils
from core import CoreConfig, Utils, logger
class BaseReader:
@ -23,8 +21,8 @@ class BaseReader:
opt_dir: Optional[str],
extra: Optional[str],
) -> None:
self.logger = logging.getLogger("reader")
self.config = config
self.logger = logger.create_logger("Reader")
self.bin_dir = bin_dir
self.opt_dir = opt_dir
self.version = version
@ -93,25 +91,13 @@ if __name__ == "__main__":
config = CoreConfig()
if path.exists(f"{args.config}/core.yaml"):
config.update(yaml.safe_load(open(f"{args.config}/core.yaml")))
os.environ["ARTEMIS_CFG_DIR"] = args.config
log_fmt_str = "[%(asctime)s] Reader | %(levelname)s | %(message)s"
log_fmt = logging.Formatter(log_fmt_str)
logger = logging.getLogger("reader")
fileHandler = TimedRotatingFileHandler(
"{0}/{1}.log".format(config.server.log_dir, "reader"), when="d", backupCount=10
logger = logger.create_logger(
"Reader",
logging.DEBUG if config.server.is_develop else logging.INFO
)
fileHandler.setFormatter(log_fmt)
consoleHandler = logging.StreamHandler()
consoleHandler.setFormatter(log_fmt)
logger.addHandler(fileHandler)
logger.addHandler(consoleHandler)
log_lv = logging.DEBUG if config.server.is_develop else logging.INFO
logger.setLevel(log_lv)
coloredlogs.install(level=log_lv, logger=logger, fmt=log_fmt_str)
if args.game is None or args.version is None:
logger.error("Game or version not specified")

View File

@ -1,3 +1,13 @@
This is a fork of ARTEMiS specifically tailored to the needs of CozyNet, an invite-only arcade
(mostly CHUNITHM) network.
Changes will be upstreamed if we believe them to be helpful and if upstream accepts them (so no n-0
version support upstreamed).
NO SUPPORT IS AVAIALBLE. Do not use this fork if you don't know what you're doing. Good luck.
----
# ARTEMiS
A network service emulator for games running SEGA'S ALL.NET service, and similar.
@ -10,6 +20,7 @@ Games listed below have been tested and confirmed working. Only game versions ol
+ CHUNITHM INTL
+ SUPERSTAR
+ SUPERSTAR PLUS
+ NEW
+ NEW PLUS
+ SUN

View File

@ -21,4 +21,5 @@ starlette
asyncio
uvicorn
alembic
python-multipart
python-multipart
aiohttp

38
shell.nix Normal file
View File

@ -0,0 +1,38 @@
let
pkgs = import <nixpkgs> { };
pythonPackages = pkgs.python39Packages;
in
pkgs.mkShell {
name = "artemis";
venvDir = "./.venv";
NIX_LD = pkgs.lib.fileContents "${pkgs.stdenv.cc}/nix-support/dynamic-linker";
NIX_LD_LIBRARY_PATH = pkgs.lib.makeLibraryPath [
pkgs.stdenv.cc.cc
pkgs.zlib
];
packages = with pkgs; [
ruff
];
buildInputs = with pkgs; [
pythonPackages.python
pythonPackages.venvShellHook
zlib
];
nativeBuildInputs = with pkgs; [
libmysqlclient
pkg-config
];
postVenvCreation = ''
unset SOURCE_DATE_EPOCH
pip install -r requirements.txt
'';
postShellHook = ''
export LD_LIBRARY_PATH=$NIX_LD_LIBRARY_PATH
'';
}

View File

@ -1,6 +1,4 @@
from datetime import datetime, timedelta
from typing import Dict, Any
import pytz
from typing import Dict
from core.config import CoreConfig
from titles.chuni.base import ChuniBase

View File

@ -1,6 +1,4 @@
from datetime import datetime, timedelta
from typing import Dict, Any
import pytz
from typing import Dict
from core.config import CoreConfig
from titles.chuni.base import ChuniBase

View File

@ -1,11 +1,10 @@
import logging
import json
import itertools
from datetime import datetime, timedelta
from time import strftime
import pytz
from typing import Dict, Any, List
import core.logger
from core.config import CoreConfig
from titles.chuni.const import ChuniConstants
from titles.chuni.database import ChuniData
@ -18,7 +17,7 @@ class ChuniBase:
self.game_cfg = game_cfg
self.data = ChuniData(core_cfg)
self.date_time_format = "%Y-%m-%d %H:%M:%S"
self.logger = logging.getLogger("chuni")
self.logger = core.logger.create_logger("Chunithm", logger_name="chuni")
self.game = ChuniConstants.GAME_CODE
self.version = ChuniConstants.VER_CHUNITHM
@ -32,12 +31,21 @@ class ChuniBase:
loginBonus 30 gets looped, only show the login banner every 24 hours,
adds the bonus to items (itemKind 6)
"""
user_id = int(data["userId"])
lock_result = await self.data.user.acquire_lock_for_game(user_id, self.game, data)
if lock_result is not None:
self.logger.warn(
"User ID %d attempted to log in while having a profile lock expiring on %s.",
user_id, lock_result["expires_at"],
extra=lock_result["extra"] or {}
)
return {"returnCode": 0}
# ignore the login bonus if disabled in config
if not self.game_cfg.mods.use_login_bonus:
return {"returnCode": 1}
user_id = data["userId"]
login_bonus_presets = await self.data.static.get_login_bonus_presets(self.version)
for preset in login_bonus_presets:
@ -121,6 +129,9 @@ class ChuniBase:
async def handle_game_logout_api_request(self, data: Dict) -> Dict:
# self.data.base.log_event("chuni", "logout", logging.INFO, {"version": self.version, "user": data["userId"]})
user_id = int(data["userId"])
await self.data.user.release_lock_for_game(user_id, self.game)
return {"returnCode": 1}
async def handle_get_game_charge_api_request(self, data: Dict) -> Dict:
@ -130,19 +141,22 @@ class ChuniBase:
return {"length": 0, "gameChargeList": []}
charges = []
for x in range(len(game_charge_list)):
charges.append(
{
"orderId": x,
"chargeId": game_charge_list[x]["chargeId"],
"price": 1,
"startDate": "2017-12-05 07:00:00.0",
"endDate": "2099-12-31 00:00:00.0",
"salePrice": 1,
"saleStartDate": "2017-12-05 07:00:00.0",
"saleEndDate": "2099-12-31 00:00:00.0",
}
)
for _, charge_list in itertools.groupby(game_charge_list, key=lambda x: x["chargeId"] // 1000):
for price, charge in enumerate(charge_list):
charges.append(
{
"orderId": len(charges),
"chargeId": charge["chargeId"],
"price": price + 1,
"startDate": "2017-12-05 07:00:00.0",
"endDate": "2099-12-31 00:00:00.0",
"salePrice": 1,
"saleStartDate": "2099-12-31 07:00:00.0",
"saleEndDate": "2099-12-31 00:00:00.0",
}
)
return {"length": len(charges), "gameChargeList": charges}
async def handle_get_game_event_api_request(self, data: Dict) -> Dict:
@ -201,6 +215,41 @@ class ChuniBase:
return {"type": data["type"], "length": 0, "gameSaleList": []}
async def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
place_id = int(data["placeId"])
client_id = data["clientId"]
if not self.core_cfg.server.allow_unregistered_serials:
machine = await self.data.arcade.get_machine(client_id)
if machine is None:
self.logger.warning(
"Unrecognized serial %s attempted to reach %s title server.",
client_id, self.game
)
return {"returnCode": 0}
if machine["game"] is not None and machine["game"] != self.game:
self.logger.warning(
"Serial %s attempted to reach the title server for a different game.",
client_id,
extra={
"requestedGame": self.game,
"expectedGame": machine["game"],
}
)
return {"returnCode": 0}
if place_id != machine["arcade"]:
self.logger.warning(
"Serial %s attempted to reach %s title server from a different arcade.",
client_id, self.game,
extra={
"requestedPlaceId": place_id,
"expectedPlaceId": machine["arcade"]
}
)
return {"returnCode": 0}
# if reboot start/end time is not defined use the default behavior of being a few hours ago
if self.core_cfg.title.reboot_start_time == "" or self.core_cfg.title.reboot_end_time == "":
reboot_start = datetime.strftime(
@ -233,9 +282,9 @@ class ChuniBase:
"rebootStartTime": reboot_start,
"rebootEndTime": reboot_end,
"isBackgroundDistribute": "false",
"maxCountCharacter": 300,
"maxCountItem": 300,
"maxCountMusic": 300,
"maxCountCharacter": 700,
"maxCountItem": 2200,
"maxCountMusic": 100,
},
"isDumpUpload": "false",
"isAou": "false",
@ -262,7 +311,11 @@ class ChuniBase:
}
async def handle_get_user_character_api_request(self, data: Dict) -> Dict:
characters = await self.data.item.get_characters(data["userId"])
next_idx = int(data["nextIndex"])
max_ct = int(data["maxCount"])
characters = await self.data.item.get_characters(data["userId"], next_idx, max_ct)
if characters is None:
return {
"userId": data["userId"],
@ -272,19 +325,15 @@ class ChuniBase:
}
character_list = []
next_idx = int(data["nextIndex"])
max_ct = int(data["maxCount"])
for x in range(next_idx, len(characters)):
tmp = characters[x]._asdict()
for character in characters:
tmp = character._asdict()
tmp.pop("user")
tmp.pop("id")
character_list.append(tmp)
if len(character_list) >= max_ct:
break
if len(characters) >= next_idx + max_ct:
if len(characters) == max_ct:
next_idx += max_ct
else:
next_idx = -1
@ -320,7 +369,11 @@ class ChuniBase:
}
async def handle_get_user_course_api_request(self, data: Dict) -> Dict:
user_course_list = await self.data.score.get_courses(data["userId"])
next_idx = int(data.get("nextIndex", 0))
max_ct = int(data.get("maxCount", 300))
user_course_list = await self.data.score.get_courses(data["userId"], next_idx, max_ct)
if user_course_list is None:
return {
"userId": data["userId"],
@ -330,19 +383,15 @@ class ChuniBase:
}
course_list = []
next_idx = int(data.get("nextIndex", 0))
max_ct = int(data.get("maxCount", 300))
for x in range(next_idx, len(user_course_list)):
tmp = user_course_list[x]._asdict()
for user_course in user_course_list:
tmp = user_course._asdict()
tmp.pop("user")
tmp.pop("id")
course_list.append(tmp)
if len(user_course_list) >= max_ct:
break
if len(user_course_list) >= next_idx + max_ct:
if len(user_course_list) == max_ct:
next_idx += max_ct
else:
next_idx = -1
@ -398,8 +447,10 @@ class ChuniBase:
async def handle_get_user_rival_data_api_request(self, data: Dict) -> Dict:
p = await self.data.profile.get_rival(data["rivalId"])
if p is None:
return {}
userRivalData = {
"rivalId": p.user,
"rivalName": p.userName
@ -410,63 +461,51 @@ class ChuniBase:
}
async def handle_get_user_rival_music_api_request(self, data: Dict) -> Dict:
rival_id = data["rivalId"]
rival_id = int(data["rivalId"])
next_index = int(data["nextIndex"])
max_count = int(data["maxCount"])
levels = [int(x["level"]) for x in data["userRivalMusicLevelList"]]
user_rival_music_list = []
# Fetch all the rival music entries for the user
all_entries = await self.data.score.get_rival_music(rival_id)
entries = await self.data.score.get_scores(rival_id, next_index, max_count, levels)
# Process the entries based on max_count and nextIndex
for music in all_entries:
music_id = music["musicId"]
level = music["level"]
score = music["scoreMax"]
rank = music["scoreRank"]
for music_id, pbs in itertools.groupby([x._asdict() for x in entries], key=lambda x: x["musicId"]):
detail_list = [
{
"level": pb["level"],
"scoreMax": pb["scoreMax"],
"scoreRank": pb["scoreRank"],
}
for pb in pbs
]
# Create a music entry for the current music_id if it's unique
music_entry = next((entry for entry in user_rival_music_list if entry["musicId"] == music_id), None)
if music_entry is None:
music_entry = {
user_rival_music_list.append(
{
"musicId": music_id,
"length": 0,
"userRivalMusicDetailList": []
"length": len(detail_list),
"userRivalMusicDetailList": detail_list,
}
user_rival_music_list.append(music_entry)
# Create a level entry for the current level if it's unique or has a higher score
level_entry = next((entry for entry in music_entry["userRivalMusicDetailList"] if entry["level"] == level), None)
if level_entry is None:
level_entry = {
"level": level,
"scoreMax": score,
"scoreRank": rank
}
music_entry["userRivalMusicDetailList"].append(level_entry)
elif score > level_entry["scoreMax"]:
level_entry["scoreMax"] = score
level_entry["scoreRank"] = rank
# Calculate the length for each "musicId" by counting the unique levels
for music_entry in user_rival_music_list:
music_entry["length"] = len(music_entry["userRivalMusicDetailList"])
)
if len(entries) >= max_count:
next_index += max_count
else:
next_index = -1
# Prepare the result dictionary with user rival music data
result = {
return {
"userId": data["userId"],
"rivalId": data["rivalId"],
"nextIndex": str(next_index + len(user_rival_music_list[next_index: next_index + max_count]) if max_count <= len(user_rival_music_list[next_index: next_index + max_count]) else -1),
"userRivalMusicList": user_rival_music_list[next_index: next_index + max_count]
"nextIndex": next_index,
"userRivalMusicList": user_rival_music_list,
}
return result
async def handle_get_user_favorite_item_api_request(self, data: Dict) -> Dict:
user_fav_item_list = []
# still needs to be implemented on WebUI
# 1: Music, 2: User, 3: Character
# 1: Music, 2: Rival, 3: Character, 4: ???
fav_list = await self.data.item.get_all_favorites(
data["userId"], self.version, fav_kind=int(data["kind"])
)
@ -490,9 +529,11 @@ class ChuniBase:
return {"userId": data["userId"], "length": 0, "userFavoriteMusicList": []}
async def handle_get_user_item_api_request(self, data: Dict) -> Dict:
kind = int(int(data["nextIndex"]) / 10000000000)
next_idx = int(int(data["nextIndex"]) % 10000000000)
user_item_list = await self.data.item.get_items(data["userId"], kind)
next_idx = int(data["nextIndex"])
max_count = int(data["maxCount"])
kind = next_idx // 10000000000
next_idx = next_idx % 10000000000
user_item_list = await self.data.item.get_items(data["userId"], kind, next_idx, max_count)
if user_item_list is None or len(user_item_list) == 0:
return {
@ -503,20 +544,18 @@ class ChuniBase:
}
items: List[Dict[str, Any]] = []
for i in range(next_idx, len(user_item_list)):
tmp = user_item_list[i]._asdict()
for user_item in user_item_list:
tmp = user_item._asdict()
tmp.pop("user")
tmp.pop("id")
items.append(tmp)
if len(items) >= int(data["maxCount"]):
break
xout = kind * 10000000000 + next_idx + len(items)
if len(items) < int(data["maxCount"]):
if len(items) < max_count:
next_idx = 0
else:
next_idx = xout
next_idx = kind * 10000000000 + next_idx + len(items)
return {
"userId": data["userId"],
@ -571,7 +610,10 @@ class ChuniBase:
}
async def handle_get_user_music_api_request(self, data: Dict) -> Dict:
music_detail = await self.data.score.get_scores(data["userId"])
next_idx = int(data["nextIndex"])
max_ct = int(data["maxCount"])
music_detail = await self.data.score.get_scores(data["userId"], next_idx, max_ct)
if music_detail is None:
return {
"userId": data["userId"],
@ -580,53 +622,29 @@ class ChuniBase:
"userMusicList": [], # 240
}
song_list = []
next_idx = int(data["nextIndex"])
max_ct = int(data["maxCount"])
music_list = []
for x in range(next_idx, len(music_detail)):
found = False
tmp = music_detail[x]._asdict()
tmp.pop("user")
tmp.pop("id")
for _, group in itertools.groupby([x._asdict() for x in music_detail], key=lambda x: x["musicId"]):
detail_list = []
for song in song_list:
score_buf = SCORE_BUFFER.get(str(data["userId"])) or []
if song["userMusicDetailList"][0]["musicId"] == tmp["musicId"]:
found = True
song["userMusicDetailList"].append(tmp)
song["length"] = len(song["userMusicDetailList"])
score_buf.append(tmp["musicId"])
SCORE_BUFFER[str(data["userId"])] = score_buf
for detail in group:
detail.pop("id")
detail.pop("user")
score_buf = SCORE_BUFFER.get(str(data["userId"])) or []
if not found and tmp["musicId"] not in score_buf:
song_list.append({"length": 1, "userMusicDetailList": [tmp]})
score_buf.append(tmp["musicId"])
SCORE_BUFFER[str(data["userId"])] = score_buf
detail_list.append(detail)
if len(song_list) >= max_ct:
break
music_list.append({"length": len(detail_list), "userMusicDetailList": detail_list})
for songIdx in range(len(song_list)):
for recordIdx in range(x+1, len(music_detail)):
if song_list[songIdx]["userMusicDetailList"][0]["musicId"] == music_detail[recordIdx]["musicId"]:
music = music_detail[recordIdx]._asdict()
music.pop("user")
music.pop("id")
song_list[songIdx]["userMusicDetailList"].append(music)
song_list[songIdx]["length"] += 1
if len(song_list) >= max_ct:
next_idx += len(song_list)
if len(music_detail) == max_ct:
next_idx += len(music_detail)
else:
next_idx = -1
SCORE_BUFFER[str(data["userId"])] = []
return {
"userId": data["userId"],
"length": len(song_list),
"length": len(music_list),
"nextIndex": next_idx,
"userMusicList": song_list, # 240
"userMusicList": music_list, # 240
}
async def handle_get_user_option_api_request(self, data: Dict) -> Dict:
@ -651,9 +669,12 @@ class ChuniBase:
return bytes([ord(c) for c in src]).decode("utf-8")
async def handle_get_user_preview_api_request(self, data: Dict) -> Dict:
profile = await self.data.profile.get_profile_preview(data["userId"], self.version)
user_id = int(data["userId"])
profile = await self.data.profile.get_profile_preview(user_id, self.version)
if profile is None:
return None
profile_character = await self.data.item.get_character(
data["userId"], profile["characterId"]
)
@ -665,10 +686,12 @@ class ChuniBase:
chara.pop("id")
chara.pop("user")
lock_result = await self.data.user.check_lock_for_game(user_id, self.game)
return {
"userId": data["userId"],
# Current Login State
"isLogin": False,
"isLogin": lock_result is not None,
"lastLoginDate": profile["lastPlayDate"],
# User Profile
"userName": profile["userName"],
@ -811,130 +834,165 @@ class ChuniBase:
upsert = data["upsertUserAll"]
user_id = data["userId"]
if "userData" in upsert:
try:
upsert["userData"][0]["userName"] = self.read_wtf8(
upsert["userData"][0]["userName"]
)
except Exception:
pass
await self.data.profile.put_profile_data(
user_id, self.version, upsert["userData"][0]
)
if "userDataEx" in upsert:
await self.data.profile.put_profile_data_ex(
user_id, self.version, upsert["userDataEx"][0]
)
if "userGameOption" in upsert:
await self.data.profile.put_profile_option(user_id, upsert["userGameOption"][0])
if "userGameOptionEx" in upsert:
await self.data.profile.put_profile_option_ex(
user_id, upsert["userGameOptionEx"][0]
)
if "userRecentRatingList" in upsert:
await self.data.profile.put_profile_recent_rating(
user_id, upsert["userRecentRatingList"]
)
if "userCharacterList" in upsert:
for character in upsert["userCharacterList"]:
await self.data.item.put_character(user_id, character)
if "userMapList" in upsert:
for map in upsert["userMapList"]:
await self.data.item.put_map(user_id, map)
if "userCourseList" in upsert:
for course in upsert["userCourseList"]:
await self.data.score.put_course(user_id, course)
if "userDuelList" in upsert:
for duel in upsert["userDuelList"]:
await self.data.item.put_duel(user_id, duel)
if "userItemList" in upsert:
for item in upsert["userItemList"]:
await self.data.item.put_item(user_id, item)
if "userActivityList" in upsert:
for activity in upsert["userActivityList"]:
await self.data.profile.put_profile_activity(user_id, activity)
if "userChargeList" in upsert:
for charge in upsert["userChargeList"]:
await self.data.profile.put_profile_charge(user_id, charge)
if "userMusicDetailList" in upsert:
for song in upsert["userMusicDetailList"]:
await self.data.score.put_score(user_id, song)
if "userPlaylogList" in upsert:
for playlog in upsert["userPlaylogList"]:
# convert the player names to utf-8
if playlog["playedUserName1"] is not None:
playlog["playedUserName1"] = self.read_wtf8(playlog["playedUserName1"])
if playlog["playedUserName2"] is not None:
playlog["playedUserName2"] = self.read_wtf8(playlog["playedUserName2"])
if playlog["playedUserName3"] is not None:
playlog["playedUserName3"] = self.read_wtf8(playlog["playedUserName3"])
await self.data.score.put_playlog(user_id, playlog, self.version)
if "userTeamPoint" in upsert:
team_points = upsert["userTeamPoint"]
try:
for tp in team_points:
if tp["teamId"] != '65535':
# Fetch the current team data
current_team = await self.data.profile.get_team_by_id(tp["teamId"])
# Calculate the new teamPoint
new_team_point = int(tp["teamPoint"]) + current_team["teamPoint"]
# Prepare the data to update
team_data = {
"teamPoint": new_team_point
}
# Update the team data
await self.data.profile.update_team(tp["teamId"], team_data)
except:
pass # Probably a better way to catch if the team is not set yet (new profiles), but let's just pass
if "userMapAreaList" in upsert:
for map_area in upsert["userMapAreaList"]:
await self.data.item.put_map_area(user_id, map_area)
if "userOverPowerList" in upsert:
for overpower in upsert["userOverPowerList"]:
await self.data.profile.put_profile_overpower(user_id, overpower)
if "userEmoneyList" in upsert:
for emoney in upsert["userEmoneyList"]:
await self.data.profile.put_profile_emoney(user_id, emoney)
if "userLoginBonusList" in upsert:
for login in upsert["userLoginBonusList"]:
await self.data.item.put_login_bonus(
user_id, self.version, login["presetId"], isWatched=True
)
if "userRecentPlayerList" in upsert: # TODO: Seen in Air, maybe implement sometime
for rp in upsert["userRecentPlayerList"]:
pass
for rating_type in {"userRatingBaseList", "userRatingBaseHotList", "userRatingBaseNextList"}:
if rating_type not in upsert:
continue
if int(user_id) & 0x1000000000001 == 0x1000000000001:
place_id = (int(user_id) & 0xFFFC00000000) >> 34
await self.data.profile.put_profile_rating(
user_id,
self.version,
rating_type,
upsert[rating_type],
)
self.logger.info("Guest play from place ID %d, ignoring.", place_id)
return {"returnCode": "1"}
await self.data.user.extend_lock_for_game(int(user_id), self.game)
with self.data.session.begin():
if "userData" in upsert:
try:
upsert["userData"][0]["userName"] = self.read_wtf8(
upsert["userData"][0]["userName"]
)
except Exception:
pass
await self.data.profile.put_profile_data(
user_id, self.version, upsert["userData"][0]
)
if "userDataEx" in upsert:
await self.data.profile.put_profile_data_ex(
user_id, self.version, upsert["userDataEx"][0]
)
if "userGameOption" in upsert:
await self.data.profile.put_profile_option(user_id, upsert["userGameOption"][0])
if "userGameOptionEx" in upsert:
await self.data.profile.put_profile_option_ex(
user_id, upsert["userGameOptionEx"][0]
)
if "userRecentRatingList" in upsert:
await self.data.profile.put_profile_recent_rating(
user_id, upsert["userRecentRatingList"]
)
if "userCharacterList" in upsert:
for character in upsert["userCharacterList"]:
await self.data.item.put_character(user_id, character)
if "userMapList" in upsert:
for map in upsert["userMapList"]:
await self.data.item.put_map(user_id, map)
if "userCourseList" in upsert:
for course in upsert["userCourseList"]:
await self.data.score.put_course(user_id, course)
if "userDuelList" in upsert:
for duel in upsert["userDuelList"]:
await self.data.item.put_duel(user_id, duel)
if "userItemList" in upsert:
for item in upsert["userItemList"]:
await self.data.item.put_item(user_id, item)
if "userActivityList" in upsert:
for activity in upsert["userActivityList"]:
await self.data.profile.put_profile_activity(user_id, activity)
if "userChargeList" in upsert:
for charge in upsert["userChargeList"]:
await self.data.profile.put_profile_charge(user_id, charge)
if "userMusicDetailList" in upsert:
for song in upsert["userMusicDetailList"]:
await self.data.score.put_score(user_id, song)
if "userTeamPoint" in upsert:
team_points = upsert["userTeamPoint"]
try:
for tp in team_points:
if tp["teamId"] != '65535':
# Fetch the current team data
current_team = await self.data.profile.get_team_by_id(tp["teamId"])
# Calculate the new teamPoint
new_team_point = int(tp["teamPoint"]) + current_team["teamPoint"]
# Prepare the data to update
team_data = {
"teamPoint": new_team_point
}
# Update the team data
await self.data.profile.update_team(tp["teamId"], team_data)
except:
pass # Probably a better way to catch if the team is not set yet (new profiles), but let's just pass
if "userMapAreaList" in upsert:
for map_area in upsert["userMapAreaList"]:
await self.data.item.put_map_area(user_id, map_area)
if "userOverPowerList" in upsert:
for overpower in upsert["userOverPowerList"]:
await self.data.profile.put_profile_overpower(user_id, overpower)
if "userEmoneyList" in upsert:
for emoney in upsert["userEmoneyList"]:
await self.data.profile.put_profile_emoney(user_id, emoney)
if "userLoginBonusList" in upsert:
for login in upsert["userLoginBonusList"]:
await self.data.item.put_login_bonus(
user_id, self.version, login["presetId"], isWatched=True
)
if "userRecentPlayerList" in upsert: # TODO: Seen in Air, maybe implement sometime
for rp in upsert["userRecentPlayerList"]:
pass
# added in LUMINOUS
if "userCMissionList" in upsert:
for cmission in upsert["userCMissionList"]:
mission_id = cmission["missionId"]
await self.data.item.put_cmission(
user_id,
{
"missionId": mission_id,
"point": cmission["point"],
},
)
for progress in cmission["userCMissionProgressList"]:
await self.data.item.put_cmission_progress(user_id, mission_id, progress)
if "userNetBattleData" in upsert:
net_battle = upsert["userNetBattleData"][0]
# fix the boolean
net_battle["isRankUpChallengeFailed"] = (
False if net_battle["isRankUpChallengeFailed"] == "false" else True
)
await self.data.profile.put_net_battle(user_id, net_battle)
with self.data.session.begin():
for rating_type in {"userRatingBaseList", "userRatingBaseHotList", "userRatingBaseNextList"}:
if rating_type not in upsert:
continue
await self.data.profile.put_profile_rating(
user_id,
self.version,
rating_type,
upsert[rating_type],
)
if "userPlaylogList" in upsert:
for playlog in upsert["userPlaylogList"]:
# convert the player names to utf-8
if playlog["playedUserName1"] is not None:
playlog["playedUserName1"] = self.read_wtf8(playlog["playedUserName1"])
if playlog["playedUserName2"] is not None:
playlog["playedUserName2"] = self.read_wtf8(playlog["playedUserName2"])
if playlog["playedUserName3"] is not None:
playlog["playedUserName3"] = self.read_wtf8(playlog["playedUserName3"])
await self.data.score.put_playlog(user_id, playlog, self.version)
return {"returnCode": "1"}
@ -963,4 +1021,4 @@ class ChuniBase:
return {
"userId": data["userId"],
"userNetBattleData": {"recentNBSelectMusicList": []},
}
}

View File

@ -1,3 +1,6 @@
from enum import Enum
class ChuniConstants:
GAME_CODE = "SDBT"
GAME_CODE_NEW = "SDHD"
@ -20,6 +23,7 @@ class ChuniConstants:
VER_CHUNITHM_NEW_PLUS = 12
VER_CHUNITHM_SUN = 13
VER_CHUNITHM_SUN_PLUS = 14
VER_CHUNITHM_LUMINOUS = 15
VERSION_NAMES = [
"CHUNITHM",
"CHUNITHM PLUS",
@ -35,9 +39,22 @@ class ChuniConstants:
"CHUNITHM NEW!!",
"CHUNITHM NEW PLUS!!",
"CHUNITHM SUN",
"CHUNITHM SUN PLUS"
"CHUNITHM SUN PLUS",
"CHUNITHM_LUMINOUS",
]
@classmethod
def game_ver_to_string(cls, ver: int):
return cls.VERSION_NAMES[ver]
return cls.VERSION_NAMES[ver]
class MapAreaConditionType(Enum):
UNLOCKED = 0
MAP_CLEARED = 1
MAP_AREA_CLEARED = 2
TROPHY_OBTAINED = 3
class MapAreaConditionLogicalOperator(Enum):
AND = 1
OR = 2

View File

@ -1,6 +1,4 @@
from datetime import datetime, timedelta
from typing import Dict, Any
import pytz
from typing import Dict
from core.config import CoreConfig
from titles.chuni.base import ChuniBase

View File

@ -1,6 +1,4 @@
from datetime import datetime, timedelta
from typing import Dict, Any
import pytz
from typing import Dict
from core.config import CoreConfig
from titles.chuni.base import ChuniBase

View File

@ -39,12 +39,149 @@ class ChuniFrontend(FE_Base):
usr_sesh = self.validate_session(request)
if not usr_sesh:
usr_sesh = UserSession()
return Response(template.render(
title=f"{self.core_config.server.name} | {self.nav_name}",
game_list=self.environment.globals["game_list"],
sesh=vars(usr_sesh)
), media_type="text/html; charset=utf-8")
if usr_sesh.user_id > 0:
versions = await self.data.profile.get_all_profile_versions(usr_sesh.user_id)
profile = []
if versions:
# chunithm_version is -1 means it is not initialized yet, select a default version from existing.
if usr_sesh.chunithm_version < 0:
usr_sesh.chunithm_version = versions[0]
profile = await self.data.profile.get_profile_data(usr_sesh.user_id, usr_sesh.chunithm_version)
resp = Response(template.render(
title=f"{self.core_config.server.name} | {self.nav_name}",
game_list=self.environment.globals["game_list"],
sesh=vars(usr_sesh),
user_id=usr_sesh.user_id,
profile=profile,
version_list=ChuniConstants.VERSION_NAMES,
versions=versions,
cur_version=usr_sesh.chunithm_version
), media_type="text/html; charset=utf-8")
if usr_sesh.chunithm_version >= 0:
encoded_sesh = self.encode_session(usr_sesh)
resp.set_cookie("ARTEMIS_SESH", encoded_sesh)
return resp
else:
return RedirectResponse("/gate/", 303)
async def render_GET_rating(self, request: Request) -> bytes:
template = self.environment.get_template(
"titles/chuni/templates/chuni_rating.jinja"
)
usr_sesh = self.validate_session(request)
if not usr_sesh:
usr_sesh = UserSession()
if usr_sesh.user_id > 0:
if usr_sesh.chunithm_version < 0:
return RedirectResponse("/game/chuni/", 303)
profile = await self.data.profile.get_profile_data(usr_sesh.user_id, usr_sesh.chunithm_version)
rating = await self.data.profile.get_profile_rating(usr_sesh.user_id, usr_sesh.chunithm_version)
hot_list=[]
base_list=[]
if profile and rating:
song_records = []
for song in rating:
music_chart = await self.data.static.get_music_chart(usr_sesh.chunithm_version, song.musicId, song.difficultId)
if music_chart:
if (song.score < 800000):
song_rating = 0
elif (song.score >= 800000 and song.score < 900000):
song_rating = music_chart.level / 2 - 5
elif (song.score >= 900000 and song.score < 925000):
song_rating = music_chart.level - 5
elif (song.score >= 925000 and song.score < 975000):
song_rating = music_chart.level - 3
elif (song.score >= 975000 and song.score < 1000000):
song_rating = (song.score - 975000) / 2500 * 0.1 + music_chart.level
elif (song.score >= 1000000 and song.score < 1005000):
song_rating = (song.score - 1000000) / 1000 * 0.1 + 1 + music_chart.level
elif (song.score >= 1005000 and song.score < 1007500):
song_rating = (song.score - 1005000) / 500 * 0.1 + 1.5 + music_chart.level
elif (song.score >= 1007500 and song.score < 1009000):
song_rating = (song.score - 1007500) / 100 * 0.01 + 2 + music_chart.level
elif (song.score >= 1009000):
song_rating = 2.15 + music_chart.level
song_rating = int(song_rating * 10 ** 2) / 10 ** 2
song_records.append({
"difficultId": song.difficultId,
"musicId": song.musicId,
"title": music_chart.title,
"level": music_chart.level,
"score": song.score,
"type": song.type,
"song_rating": song_rating,
})
hot_list = [obj for obj in song_records if obj["type"] == "userRatingBaseHotList"]
base_list = [obj for obj in song_records if obj["type"] == "userRatingBaseList"]
return Response(template.render(
title=f"{self.core_config.server.name} | {self.nav_name}",
game_list=self.environment.globals["game_list"],
sesh=vars(usr_sesh),
profile=profile,
hot_list=hot_list,
base_list=base_list,
), media_type="text/html; charset=utf-8")
else:
return RedirectResponse("/gate/", 303)
async def render_GET_playlog(self, request: Request) -> bytes:
template = self.environment.get_template(
"titles/chuni/templates/chuni_playlog.jinja"
)
usr_sesh = self.validate_session(request)
if not usr_sesh:
usr_sesh = UserSession()
if usr_sesh.user_id > 0:
if usr_sesh.chunithm_version < 0:
return RedirectResponse("/game/chuni/", 303)
path_index = request.path_params.get('index')
if not path_index or int(path_index) < 1:
index = 0
else:
index = int(path_index) - 1 # 0 and 1 are 1st page
user_id = usr_sesh.user_id
playlog_count = await self.data.score.get_user_playlogs_count(user_id)
if playlog_count < index * 20 :
return Response(template.render(
title=f"{self.core_config.server.name} | {self.nav_name}",
game_list=self.environment.globals["game_list"],
sesh=vars(usr_sesh),
playlog_count=0
), media_type="text/html; charset=utf-8")
playlog = await self.data.score.get_playlogs_limited(user_id, index, 20)
playlog_with_title = []
for record in playlog:
music_chart = await self.data.static.get_music_chart(usr_sesh.chunithm_version, record.musicId, record.level)
if music_chart:
difficultyNum=music_chart.level
artist=music_chart.artist
title=music_chart.title
else:
difficultyNum=0
artist="unknown"
title="musicid: " + str(record.musicId)
playlog_with_title.append({
"raw": record,
"title": title,
"difficultyNum": difficultyNum,
"artist": artist,
})
return Response(template.render(
title=f"{self.core_config.server.name} | {self.nav_name}",
game_list=self.environment.globals["game_list"],
sesh=vars(usr_sesh),
user_id=usr_sesh.user_id,
playlog=playlog_with_title,
playlog_count=playlog_count
), media_type="text/html; charset=utf-8")
else:
return RedirectResponse("/gate/", 303)
async def update_name(self, request: Request) -> bytes:
usr_sesh = self.validate_session(request)
@ -80,4 +217,23 @@ class ChuniFrontend(FE_Base):
if not await self.data.profile.update_name(usr_sesh, new_name_full):
return RedirectResponse("/gate/?e=999", 303)
return RedirectResponse("/gate/?s=1", 303)
return RedirectResponse("/game/chuni/?s=1", 303)
async def version_change(self, request: Request):
usr_sesh = self.validate_session(request)
if not usr_sesh:
usr_sesh = UserSession()
if usr_sesh.user_id > 0:
form_data = await request.form()
chunithm_version = form_data.get("version")
self.logger.info(f"version change to: {chunithm_version}")
if(chunithm_version.isdigit()):
usr_sesh.chunithm_version=int(chunithm_version)
encoded_sesh = self.encode_session(usr_sesh)
self.logger.info(f"Created session with JWT {encoded_sesh}")
resp = RedirectResponse("/game/chuni/", 303)
resp.set_cookie("ARTEMIS_SESH", encoded_sesh)
return resp
else:
return RedirectResponse("/gate/", 303)

View File

@ -1,8 +1,6 @@
from starlette.requests import Request
from starlette.routing import Route
from starlette.responses import Response
import logging, coloredlogs
from logging.handlers import TimedRotatingFileHandler
from starlette.responses import PlainTextResponse, Response
import zlib
import yaml
import json
@ -15,7 +13,7 @@ from Crypto.Hash import SHA1
from os import path
from typing import Tuple, Dict, List
from core import CoreConfig, Utils
from core import CoreConfig, Utils, logger
from core.title import BaseServlet
from .config import ChuniConfig
from .const import ChuniConstants
@ -34,12 +32,14 @@ from .new import ChuniNew
from .newplus import ChuniNewPlus
from .sun import ChuniSun
from .sunplus import ChuniSunPlus
from .luminous import ChuniLuminous
class ChuniServlet(BaseServlet):
def __init__(self, core_cfg: CoreConfig, cfg_dir: str) -> None:
super().__init__(core_cfg, cfg_dir)
self.game_cfg = ChuniConfig()
self.hash_table: Dict[Dict[str, str]] = {}
if path.exists(f"{cfg_dir}/{ChuniConstants.CONFIG_NAME}"):
self.game_cfg.update(
yaml.safe_load(open(f"{cfg_dir}/{ChuniConstants.CONFIG_NAME}"))
@ -61,33 +61,16 @@ class ChuniServlet(BaseServlet):
ChuniNewPlus,
ChuniSun,
ChuniSunPlus,
ChuniLuminous,
]
self.logger = logging.getLogger("chuni")
self.logger = logger.create_logger(
"Chunithm",
self.game_cfg.server.loglevel,
logger_name="chuni",
)
if not hasattr(self.logger, "inited"):
log_fmt_str = "[%(asctime)s] Chunithm | %(levelname)s | %(message)s"
log_fmt = logging.Formatter(log_fmt_str)
fileHandler = TimedRotatingFileHandler(
"{0}/{1}.log".format(self.core_cfg.server.log_dir, "chuni"),
encoding="utf8",
when="d",
backupCount=10,
)
fileHandler.setFormatter(log_fmt)
consoleHandler = logging.StreamHandler()
consoleHandler.setFormatter(log_fmt)
self.logger.addHandler(fileHandler)
self.logger.addHandler(consoleHandler)
self.logger.setLevel(self.game_cfg.server.loglevel)
coloredlogs.install(
level=self.game_cfg.server.loglevel, logger=self.logger, fmt=log_fmt_str
)
self.logger.inited = True
self.hash_table: Dict[int, Dict[str, str]] = {}
for version, keys in self.game_cfg.crypto.keys.items():
if len(keys) < 3:
@ -98,31 +81,46 @@ class ChuniServlet(BaseServlet):
method_list = [
method
for method in dir(self.versions[version])
if not method.startswith("__")
if method.startswith("handle_") and method.endswith("_request")
]
for method in method_list:
method_fixed = inflection.camelize(method)[6:-7]
# number of iterations was changed to 70 in SUN and then to 36
if version == ChuniConstants.VER_CHUNITHM_SUN_PLUS:
if version == ChuniConstants.VER_CHUNITHM_LUMINOUS:
iter_count = 8
elif version == ChuniConstants.VER_CHUNITHM_SUN_PLUS:
iter_count = 36
elif version == ChuniConstants.VER_CHUNITHM_SUN:
iter_count = 70
else:
elif version == ChuniConstants.VER_CHUNITHM_NEW_PLUS:
iter_count = 25
elif version == ChuniConstants.VER_CHUNITHM_NEW:
iter_count = 54
elif version == ChuniConstants.VER_CHUNITHM_PARADISE:
iter_count = 44
elif version == ChuniConstants.VER_CHUNITHM_CRYSTAL_PLUS:
iter_count = 67
else:
self.logger.warn("v%d doesn't support encryption, or the iteration count is not known.", version)
continue
hash = PBKDF2(
method_fixed,
bytes.fromhex(keys[2]),
128,
16,
count=iter_count,
hmac_hash_module=SHA1,
)
hashed_name = hash.hex()[:32] # truncate unused bytes like the game does
hashed_name = hash.hex()
self.hash_table[version][hashed_name] = method_fixed
self.logger.debug(
f"Hashed v{version} method {method_fixed} with {bytes.fromhex(keys[2])} to get {hash.hex()}"
"Hashed v%d method %s with %s to get %s",
version,
method_fixed,
keys[2],
hashed_name,
)
@classmethod
@ -162,7 +160,7 @@ class ChuniServlet(BaseServlet):
req_raw = await request.body()
encrtped = False
encrypted = False
internal_ver = 0
client_ip = Utils.get_ip_addr(request)
@ -195,39 +193,43 @@ class ChuniServlet(BaseServlet):
internal_ver = ChuniConstants.VER_CHUNITHM_NEW_PLUS
elif version >= 210 and version < 215: # SUN
internal_ver = ChuniConstants.VER_CHUNITHM_SUN
elif version >= 215: # SUN
elif version >= 215 and version < 220: # SUN PLUS
internal_ver = ChuniConstants.VER_CHUNITHM_SUN_PLUS
elif version >= 220:
internal_ver = ChuniConstants.VER_CHUNITHM_LUMINOUS
elif game_code == "SDGS": # Int
if version < 110: # SUPERSTAR
internal_ver = ChuniConstants.VER_CHUNITHM_PARADISE # FIXME: Not sure what was intended to go here? was just "PARADISE"
if version < 110: # SUPERSTAR / SUPERSTAR PLUS
internal_ver = ChuniConstants.VER_CHUNITHM_PARADISE # SUPERSTAR / SUPERSTAR PLUS worked fine with it
elif version >= 110 and version < 115: # NEW
internal_ver = ChuniConstants.VER_CHUNITHM_NEW
elif version >= 115 and version < 120: # NEW PLUS!!
internal_ver = ChuniConstants.VER_CHUNITHM_NEW_PLUS
elif version >= 120 and version < 125: # SUN
internal_ver = ChuniConstants.VER_CHUNITHM_SUN
elif version >= 125: # SUN PLUS
elif version >= 125 and version < 130: # SUN PLUS
internal_ver = ChuniConstants.VER_CHUNITHM_SUN_PLUS
elif version >= 130:
internal_ver = ChuniConstants.VER_CHUNITHM_LUMINOUS
if all(c in string.hexdigits for c in endpoint) and len(endpoint) == 32:
# If we get a 32 character long hex string, it's a hash and we're
# doing encrypted. The likelyhood of false positives is low but
# technically not 0
if (chuni_encoding := request.headers.get("chuni-encoding")) is not None:
# CRYSTAL PLUS/PARADISE forgot to hash the endpoint name in the User-Agent.
if internal_ver < ChuniConstants.VER_CHUNITHM_NEW:
endpoint = request.headers.get("User-Agent").split("#")[0]
else:
if internal_ver not in self.hash_table:
self.logger.error(
f"v{version} does not support encryption or no keys entered"
"v%d does not support encryption or no keys entered",
version,
extra={"chuni-encoding": chuni_encoding, "internal_ver": internal_ver},
)
return Response(zlib.compress(b'{"stat": "0"}'))
return Response(zlib.compress(b'{"returnCode": "0"}'))
elif endpoint.lower() not in self.hash_table[internal_ver]:
self.logger.error(
f"No hash found for v{version} endpoint {endpoint}"
f"No hash found for v{version} endpoint {endpoint}",
extra={"chuni-encoding": chuni_encoding, "internal_ver": internal_ver},
)
return Response(zlib.compress(b'{"stat": "0"}'))
return Response(zlib.compress(b'{"returnCode": "0"}'))
endpoint = self.hash_table[internal_ver][endpoint.lower()]
@ -241,62 +243,84 @@ class ChuniServlet(BaseServlet):
req_raw = crypt.decrypt(req_raw)
except Exception as e:
self.logger.error(
f"Failed to decrypt v{version} request to {endpoint} -> {e}"
self.logger.exception(
f"Failed to decrypt v{version} request to {endpoint}",
exc_info=e,
)
return Response(zlib.compress(b'{"stat": "0"}'))
return Response(zlib.compress(b'{"returnCode": "0"}'))
encrtped = True
encrypted = True
if (
not encrtped
not encrypted
and self.game_cfg.crypto.encrypted_only
and internal_ver >= ChuniConstants.VER_CHUNITHM_CRYSTAL_PLUS
):
self.logger.error(
f"Unencrypted v{version} {endpoint} request, but config is set to encrypted only: {req_raw}"
f"Unencrypted v{version} {endpoint} request, but config is set to encrypted only: {req_raw}",
extra={"internal_ver": internal_ver},
)
return Response(zlib.compress(b'{"stat": "0"}'))
return Response(zlib.compress(b'{"returnCode": "0"}'))
try:
unzip = zlib.decompress(req_raw)
except zlib.error as e:
self.logger.error(
f"Failed to decompress v{version} {endpoint} request -> {e}"
self.logger.warning(
f"Failed to decompress v{version} {endpoint} request",
exc_info=e,
)
return Response(zlib.compress(b'{"stat": "0"}'))
return Response(zlib.compress(b'{"returnCode": "0"}'))
req_data = json.loads(unzip)
self.logger.info(f"v{version} {endpoint} request from {client_ip}")
self.logger.debug(req_data)
self.logger.debug(
"Received request v%d %s from %s.",
version, endpoint, client_ip,
extra={
"body": req_data,
},
)
if game_code == "SDGS" and version >= 110:
endpoint = endpoint.replace("C3Exp", "")
elif game_code == "SDGS" and version < 110:
endpoint = endpoint.replace("Exp", "")
else:
endpoint = endpoint
endpoint = endpoint.replace("C3Exp", "") if game_code == "SDGS" else endpoint
func_to_find = "handle_" + inflection.underscore(endpoint) + "_request"
handler_cls = self.versions[internal_ver](self.core_cfg, self.game_cfg)
handler = getattr(handler_cls, func_to_find, None)
if not hasattr(handler_cls, func_to_find):
self.logger.warning(f"Unhandled v{version} request {endpoint}")
resp = {"returnCode": 1}
if handler is None:
if self.core_cfg.server.is_develop:
self.logger.warning(f"Unhandled v{version} request {endpoint}, returning stub response")
resp = {"returnCode": 1}
else:
self.logger.debug("(v%d %s) Returned 404.", version, endpoint)
return PlainTextResponse("Not Found", status_code=404)
else:
try:
handler = getattr(handler_cls, func_to_find)
resp = await handler(req_data)
except Exception as e:
self.logger.error(f"Error handling v{version} method {endpoint} - {e}")
return Response(zlib.compress(b'{"stat": "0"}'))
self.logger.exception("Error handling v%d method %s", version, endpoint, exc_info=e)
if resp == None:
if not self.core_cfg.server.is_develop:
self.logger.debug("(v%d %s) Returned 500.", version, endpoint)
return PlainTextResponse("Internal Server Error", status_code=500)
resp = {"returnCode": 0}
if resp is None:
resp = {"returnCode": 1}
self.logger.debug(f"Response {resp}")
self.logger.info("(v%d %s) Returned 200.", version, endpoint, extra=resp)
zipped = zlib.compress(json.dumps(resp, ensure_ascii=False).encode("utf-8"))
if not encrtped:
if not encrypted:
return Response(zipped)
padded = pad(zipped, 16)
@ -307,4 +331,4 @@ class ChuniServlet(BaseServlet):
bytes.fromhex(self.game_cfg.crypto.keys[internal_ver][1]),
)
return Response(crypt.encrypt(padded))
return Response(crypt.encrypt(padded))

404
titles/chuni/luminous.py Normal file
View File

@ -0,0 +1,404 @@
from datetime import timedelta
from typing import Dict
from core.config import CoreConfig
from core.utils import Utils
from titles.chuni.sunplus import ChuniSunPlus
from titles.chuni.const import ChuniConstants, MapAreaConditionLogicalOperator, MapAreaConditionType
from titles.chuni.config import ChuniConfig
class ChuniLuminous(ChuniSunPlus):
def __init__(self, core_cfg: CoreConfig, game_cfg: ChuniConfig) -> None:
super().__init__(core_cfg, game_cfg)
self.version = ChuniConstants.VER_CHUNITHM_LUMINOUS
async def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
ret = await super().handle_get_game_setting_api_request(data)
ret["gameSetting"]["romVersion"] = self.game_cfg.version.version(self.version)[
"rom"
]
ret["gameSetting"]["dataVersion"] = self.game_cfg.version.version(self.version)[
"data"
]
t_port = ""
if (
not self.core_cfg.server.is_using_proxy
and Utils.get_title_port(self.core_cfg) != 80
):
t_port = f":{self.core_cfg.server.port}"
ret["gameSetting"][
"matchingUri"
] = f"http://{self.core_cfg.server.hostname}{t_port}/SDHD/220/ChuniServlet/"
ret["gameSetting"][
"matchingUriX"
] = f"http://{self.core_cfg.server.hostname}{t_port}/SDHD/220/ChuniServlet/"
ret["gameSetting"][
"udpHolePunchUri"
] = f"http://{self.core_cfg.server.hostname}{t_port}/SDHD/220/ChuniServlet/"
ret["gameSetting"][
"reflectorUri"
] = f"http://{self.core_cfg.server.hostname}{t_port}/SDHD/220/ChuniServlet/"
return ret
async def handle_cm_get_user_preview_api_request(self, data: Dict) -> Dict:
user_data = await super().handle_cm_get_user_preview_api_request(data)
# I don't know if lastDataVersion is going to matter, I don't think CardMaker 1.35 works this far up
user_data["lastDataVersion"] = "2.20.00"
return user_data
async def handle_get_user_c_mission_api_request(self, data: Dict) -> Dict:
user_id = data["userId"]
mission_id = data["missionId"]
progress_list = []
point = 0
mission_data = await self.data.item.get_cmission(user_id, mission_id)
progress_data = await self.data.item.get_cmission_progress(user_id, mission_id)
if mission_data and progress_data:
point = mission_data["point"]
for progress in progress_data:
progress_list.append(
{
"order": progress["order"],
"stage": progress["stage"],
"progress": progress["progress"],
}
)
return {
"userId": user_id,
"missionId": mission_id,
"point": point,
"userCMissionProgressList": progress_list,
}
async def handle_get_user_net_battle_ranking_info_api_request(self, data: Dict) -> Dict:
user_id = data["userId"]
net_battle = {}
net_battle_data = await self.data.profile.get_net_battle(user_id)
if net_battle_data:
net_battle = {
"isRankUpChallengeFailed": net_battle_data["isRankUpChallengeFailed"],
"highestBattleRankId": net_battle_data["highestBattleRankId"],
"battleIconId": net_battle_data["battleIconId"],
"battleIconNum": net_battle_data["battleIconNum"],
"avatarEffectPoint": net_battle_data["avatarEffectPoint"],
}
return {
"userId": user_id,
"userNetBattleData": net_battle,
}
async def handle_get_game_map_area_condition_api_request(self, data: Dict) -> Dict:
# There is no game data for this, everything is server side.
# However, we can selectively show/hide events as data is imported into the server.
events = await self.data.static.get_enabled_events(self.version)
event_by_id = {evt["eventId"]: evt for evt in events}
conditions = []
# The Mystic Rainbow of LUMINOUS map unlocks when any mainline LUMINOUS area
# (ep. I, ep. II, ep. III) are completed.
mystic_area_1_conditions = {
"mapAreaId": 3229301, # Mystic Rainbow of LUMINOUS Area 1
"length": 0,
"mapAreaConditionList": [],
}
mystic_area_1_added = False
# Secret AREA: MUSIC GAME
if 14029 in event_by_id:
start_date = event_by_id[14029]["startDate"].strftime(self.date_time_format)
mission_in_progress_end_date = "2099-12-31 00:00:00.0"
# The "MISSION in progress" trophy required to trigger the secret area
# is only available in the first CHUNITHM mission. If the second mission
# (event ID 14214) was imported into ARTEMiS, we disable the requirement
# for this trophy.
if 14214 in event_by_id:
mission_in_progress_end_date = (event_by_id[14214]["startDate"] - timedelta(hours=2)).strftime(self.date_time_format)
conditions.extend([
{
"mapAreaId": 2206201, # BlythE ULTIMA
"length": 1,
# Obtain the trophy "MISSION in progress".
"mapAreaConditionList": [
{
"type": MapAreaConditionType.TROPHY_OBTAINED.value,
"conditionId": 6832,
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
"startDate": start_date,
"endDate": mission_in_progress_end_date,
}
],
},
{
"mapAreaId": 2206202, # PRIVATE SERVICE ULTIMA
"length": 1,
# Obtain the trophy "MISSION in progress".
"mapAreaConditionList": [
{
"type": MapAreaConditionType.TROPHY_OBTAINED.value,
"conditionId": 6832,
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
"startDate": start_date,
"endDate": mission_in_progress_end_date,
}
],
},
{
"mapAreaId": 2206203, # New York Back Raise
"length": 1,
# SS NightTheater's EXPERT chart and get the title
# "今宵、劇場に映し出される景色とは――――。"
"mapAreaConditionList": [
{
"type": MapAreaConditionType.TROPHY_OBTAINED.value,
"conditionId": 6833,
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
"startDate": start_date,
"endDate": "2099-12-31 00:00:00.0",
},
],
},
{
"mapAreaId": 2206204, # Spasmodic
"length": 2,
# - Get 1 miss on Random (any difficulty) and get the title "当たり待ち"
# - Get 1 miss on 花たちに希望を (any difficulty) and get the title "花たちに希望を"
"mapAreaConditionList": [
{
"type": MapAreaConditionType.TROPHY_OBTAINED.value,
"conditionId": 6834,
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
"startDate": start_date,
"endDate": "2099-12-31 00:00:00.0",
},
{
"type": MapAreaConditionType.TROPHY_OBTAINED.value,
"conditionId": 6835,
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
"startDate": start_date,
"endDate": "2099-12-31 00:00:00.0",
},
],
},
{
"mapAreaId": 2206205, # ΩΩPARTS
"length": 2,
# - S Sage EXPERT to get the title "マターリ進行キボンヌ"
# - Equip this title and play cab-to-cab with another person with this title
# to get "マターリしようよ". Disabled because it is difficult to play cab2cab
# on data setups. A network operator may consider re-enabling it by uncommenting
# the second condition.
"mapAreaConditionList": [
{
"type": MapAreaConditionType.TROPHY_OBTAINED.value,
"conditionId": 6836,
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
"startDate": start_date,
"endDate": "2099-12-31 00:00:00.0",
},
# {
# "type": MapAreaConditionType.TROPHY_OBTAINED.value,
# "conditionId": 6837,
# "logicalOpe": MapAreaConditionLogicalOperator.AND.value,
# "startDate": start_date,
# "endDate": "2099-12-31 00:00:00.0",
# },
],
},
{
"mapAreaId": 2206206, # Blow My Mind
"length": 1,
# SS on CHAOS EXPERT, Hydra EXPERT, Surive EXPERT and Jakarta PROGRESSION EXPERT
# to get the title "Can you hear me?"
"mapAreaConditionList": [
{
"type": MapAreaConditionType.TROPHY_OBTAINED.value,
"conditionId": 6838,
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
"startDate": start_date,
"endDate": "2099-12-31 00:00:00.0",
},
],
},
{
"mapAreaId": 2206207, # VALLIS-NERIA
"length": 6,
# Finish the 6 other areas
"mapAreaConditionList": [
{
"type": MapAreaConditionType.MAP_AREA_CLEARED.value,
"conditionId": x,
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
"startDate": start_date,
"endDate": "2099-12-31 00:00:00.0",
}
for x in range(2206201, 2206207)
],
},
])
# LUMINOUS ep. I
if 14005 in event_by_id:
start_date = event_by_id[14005]["startDate"].strftime(self.date_time_format)
if not mystic_area_1_added:
conditions.append(mystic_area_1_conditions)
mystic_area_1_added = True
mystic_area_1_conditions["length"] += 1
mystic_area_1_conditions["mapAreaConditionList"].append(
{
"type": MapAreaConditionType.MAP_CLEARED.value,
"conditionId": 3020701,
"logicalOpe": MapAreaConditionLogicalOperator.OR.value,
"startDate": start_date,
"endDate": "2099-12-31 00:00:00.0",
}
)
conditions.append(
{
"mapAreaId": 3229302, # Mystic Rainbow of LUMINOUS Area 2,
"length": 1,
# Unlocks when LUMINOUS ep. I is completed.
"mapAreaConditionList": [
{
"type": MapAreaConditionType.MAP_CLEARED.value,
"conditionId": 3020701,
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
"startDate": start_date,
"endDate": "2099-12-31 00:00:00.0",
},
],
}
)
# LUMINOUS ep. II
if 14251 in event_by_id:
start_date = event_by_id[14251]["startDate"].strftime(self.date_time_format)
if not mystic_area_1_added:
conditions.append(mystic_area_1_conditions)
mystic_area_1_added = True
mystic_area_1_conditions["length"] += 1
mystic_area_1_conditions["mapAreaConditionList"].append(
{
"type": MapAreaConditionType.MAP_CLEARED.value,
"conditionId": 3020702,
"logicalOpe": MapAreaConditionLogicalOperator.OR.value,
"startDate": start_date,
"endDate": "2099-12-31 00:00:00.0",
}
)
conditions.append(
{
"mapAreaId": 3229303, # Mystic Rainbow of LUMINOUS Area 3,
"length": 1,
# Unlocks when LUMINOUS ep. II is completed.
"mapAreaConditionList": [
{
"type": MapAreaConditionType.MAP_CLEARED.value,
"conditionId": 3020702,
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
"startDate": start_date,
"endDate": "2099-12-31 00:00:00.0",
},
],
}
)
# LUMINOUS ep. III
if 14481 in event_by_id:
start_date = event_by_id[14481]["startDate"].strftime(self.date_time_format)
if not mystic_area_1_added:
conditions.append(mystic_area_1_conditions)
mystic_area_1_added = True
mystic_area_1_conditions["length"] += 1
mystic_area_1_conditions["mapAreaConditionList"].append(
{
"type": MapAreaConditionType.MAP_CLEARED.value,
"conditionId": 3020703,
"logicalOpe": MapAreaConditionLogicalOperator.OR.value,
"startDate": start_date,
"endDate": "2099-12-31 00:00:00.0",
}
)
conditions.append(
{
"mapAreaId": 3229304, # Mystic Rainbow of LUMINOUS Area 4,
"length": 1,
# Unlocks when LUMINOUS ep. III is completed.
"mapAreaConditionList": [
{
"type": MapAreaConditionType.MAP_CLEARED.value,
"conditionId": 3020703,
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
"startDate": start_date,
"endDate": "2099-12-31 00:00:00.0",
},
],
}
)
# 1UM1N0U5 ep. 111
if 14483 in event_by_id:
start_date = event_by_id[14483]["startDate"].strftime(self.date_time_format)
# Finish LUMINOUS ep.III to unlock the six titles in 1UM1N0U5 ep.111.
for i in range(3229201, 3229207):
conditions.append(
{
"mapAreaId": i,
"length": 1,
"mapAreaConditionList": [
{
"type": MapAreaConditionType.MAP_CLEARED.value,
"conditionId": 3020703,
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
"startDate": start_date,
"endDate": "2099-12-31 00:00:00.0",
}
],
},
)
# Finish the 6 title map areas to unlock Ultimate Force.
conditions.append(
{
"mapAreaId": 3229207, # Ultimate Force
"length": 6,
# Finish the 6 other areas
"mapAreaConditionList": [
{
"type": MapAreaConditionType.MAP_AREA_CLEARED.value,
"conditionId": x,
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
"startDate": start_date,
"endDate": "2099-12-31 00:00:00.0",
}
for x in range(3229201, 3229207)
],
},
)
return {
"length": len(conditions),
"gameMapAreaConditionList": conditions,
}

View File

@ -1,9 +1,9 @@
import logging
from datetime import datetime, timedelta
from random import randint
from typing import Dict
import pytz
import core.logger
from core.config import CoreConfig
from core.utils import Utils
from titles.chuni.const import ChuniConstants
@ -19,8 +19,8 @@ class ChuniNew(ChuniBase):
self.game_cfg = game_cfg
self.data = ChuniData(core_cfg)
self.date_time_format = "%Y-%m-%d %H:%M:%S"
self.logger = logging.getLogger("chuni")
self.game = ChuniConstants.GAME_CODE
self.logger = core.logger.create_logger("Chunithm", logger_name="chuni")
self.game = ChuniConstants.GAME_CODE_NEW
self.version = ChuniConstants.VER_CHUNITHM_NEW
def _interal_ver_to_intver(self) -> str:
@ -32,8 +32,45 @@ class ChuniNew(ChuniBase):
return "210"
if self.version == ChuniConstants.VER_CHUNITHM_SUN_PLUS:
return "215"
if self.version == ChuniConstants.VER_CHUNITHM_LUMINOUS:
return "220"
async def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
place_id = int(data["placeId"])
client_id = data["clientId"]
if not self.core_cfg.server.allow_unregistered_serials:
machine = await self.data.arcade.get_machine(client_id)
if machine is None:
self.logger.warning(
"Unrecognized serial %s attempted to reach %s title server.",
client_id, self.game
)
return {"returnCode": 0}
if machine["game"] is not None and machine["game"] != self.game:
self.logger.warning(
"Serial %s attempted to reach the title server for a different game.",
client_id,
extra={
"requestedGame": self.game,
"expectedGame": machine["game"],
}
)
return {"returnCode": 0}
if place_id != machine["arcade"]:
self.logger.warning(
"Serial %s attempted to reach %s title server from a different arcade.",
client_id, self.game,
extra={
"requestedPlaceId": place_id,
"expectedPlaceId": machine["arcade"]
}
)
return {"returnCode": 0}
# use UTC time and convert it to JST time by adding +9
# matching therefore starts one hour before and lasts for 8 hours
match_start = datetime.strftime(
@ -73,9 +110,9 @@ class ChuniNew(ChuniBase):
"rebootStartTime": reboot_start,
"rebootEndTime": reboot_end,
"isBackgroundDistribute": False,
"maxCountCharacter": 300,
"maxCountItem": 300,
"maxCountMusic": 300,
"maxCountCharacter": 700,
"maxCountItem": 2200,
"maxCountMusic": 100,
"matchStartTime": match_start,
"matchEndTime": match_end,
"matchTimeLimit": self.game_cfg.matching.match_time_limit,
@ -117,9 +154,12 @@ class ChuniNew(ChuniBase):
return {"userId": data["userId"], "symbolCharInfoList": []}
async def handle_get_user_preview_api_request(self, data: Dict) -> Dict:
profile = await self.data.profile.get_profile_preview(data["userId"], self.version)
user_id = int(data["userId"])
profile = await self.data.profile.get_profile_preview(user_id, self.version)
if profile is None:
return None
profile_character = await self.data.item.get_character(
data["userId"], profile["characterId"]
)
@ -130,11 +170,13 @@ class ChuniNew(ChuniBase):
chara = profile_character._asdict()
chara.pop("id")
chara.pop("user")
lock_result = await self.data.user.check_lock_for_game(user_id, self.game)
data1 = {
"userId": data["userId"],
# Current Login State
"isLogin": False,
"isLogin": lock_result is not None,
"lastLoginDate": profile["lastPlayDate"],
# User Profile
"userName": profile["userName"],
@ -282,9 +324,12 @@ class ChuniNew(ChuniBase):
}
async def handle_get_user_printed_card_api_request(self, data: Dict) -> Dict:
next_idx = int(data["nextIndex"])
max_ct = int(data["maxCount"])
user_print_list = await self.data.item.get_user_print_states(
data["userId"], has_completed=True
data["userId"], has_completed=True, offset=next_idx, limit=max_ct
)
if user_print_list is None:
return {
"userId": data["userId"],
@ -294,18 +339,13 @@ class ChuniNew(ChuniBase):
}
print_list = []
next_idx = int(data["nextIndex"])
max_ct = int(data["maxCount"])
for x in range(next_idx, len(user_print_list)):
tmp = user_print_list[x]._asdict()
for x in user_print_list:
tmp = x._asdict()
print_list.append(tmp["cardId"])
if len(print_list) >= max_ct:
break
if len(print_list) >= max_ct:
next_idx = next_idx + max_ct
next_idx += max_ct
else:
next_idx = -1

View File

@ -1,6 +1,5 @@
from typing import Dict, Any
from typing import Dict
from core.utils import Utils
from core.config import CoreConfig
from titles.chuni.new import ChuniNew
from titles.chuni.const import ChuniConstants

View File

@ -1,6 +1,4 @@
from datetime import datetime, timedelta
from typing import Dict, Any
import pytz
from typing import Dict
from core.config import CoreConfig
from titles.chuni.base import ChuniBase

View File

@ -48,9 +48,8 @@ class ChuniReader(BaseReader):
for root, dirs, files in walk(f"{root_dir}loginBonusPreset"):
for dir in dirs:
if path.exists(f"{root}/{dir}/LoginBonusPreset.xml"):
with open(f"{root}/{dir}/LoginBonusPreset.xml", "rb") as fp:
bytedata = fp.read()
strdata = bytedata.decode("UTF-8")
with open(f"{root}/{dir}/LoginBonusPreset.xml", "r", encoding="utf-8") as fp:
strdata = fp.read()
xml_root = ET.fromstring(strdata)
for name in xml_root.findall("name"):
@ -118,12 +117,16 @@ class ChuniReader(BaseReader):
)
async def read_events(self, evt_dir: str) -> None:
# Disable all previous announcement events
# All new announcement events will be enabled, so we don't end up with banner spam
# every time we card in.
await self.data.static.disable_all_events(self.version, 1)
for root, dirs, files in walk(evt_dir):
for dir in dirs:
if path.exists(f"{root}/{dir}/Event.xml"):
with open(f"{root}/{dir}/Event.xml", "rb") as fp:
bytedata = fp.read()
strdata = bytedata.decode("UTF-8")
with open(f"{root}/{dir}/Event.xml", "r", encoding="utf-8") as fp:
strdata = fp.read()
xml_root = ET.fromstring(strdata)
for name in xml_root.findall("name"):
@ -144,9 +147,8 @@ class ChuniReader(BaseReader):
for root, dirs, files in walk(music_dir):
for dir in dirs:
if path.exists(f"{root}/{dir}/Music.xml"):
with open(f"{root}/{dir}/Music.xml", "rb") as fp:
bytedata = fp.read()
strdata = bytedata.decode("UTF-8")
with open(f"{root}/{dir}/Music.xml", "r", encoding='utf-8') as fp:
strdata = fp.read()
xml_root = ET.fromstring(strdata)
for name in xml_root.findall("name"):
@ -210,9 +212,8 @@ class ChuniReader(BaseReader):
for root, dirs, files in walk(charge_dir):
for dir in dirs:
if path.exists(f"{root}/{dir}/ChargeItem.xml"):
with open(f"{root}/{dir}/ChargeItem.xml", "rb") as fp:
bytedata = fp.read()
strdata = bytedata.decode("UTF-8")
with open(f"{root}/{dir}/ChargeItem.xml", "r", encoding='utf-8') as fp:
strdata = fp.read()
xml_root = ET.fromstring(strdata)
for name in xml_root.findall("name"):
@ -240,9 +241,8 @@ class ChuniReader(BaseReader):
for root, dirs, files in walk(avatar_dir):
for dir in dirs:
if path.exists(f"{root}/{dir}/AvatarAccessory.xml"):
with open(f"{root}/{dir}/AvatarAccessory.xml", "rb") as fp:
bytedata = fp.read()
strdata = bytedata.decode("UTF-8")
with open(f"{root}/{dir}/AvatarAccessory.xml", "r", encoding='utf-8') as fp:
strdata = fp.read()
xml_root = ET.fromstring(strdata)
for name in xml_root.findall("name"):

View File

@ -8,7 +8,6 @@ from sqlalchemy import (
delete,
)
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON
from sqlalchemy.engine.base import Connection
from sqlalchemy.schema import ForeignKey
from sqlalchemy.sql import func, select
from sqlalchemy.dialects.mysql import insert
@ -243,13 +242,43 @@ matching = Table(
mysql_charset="utf8mb4",
)
cmission = Table(
"chuni_item_cmission",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
Column(
"user",
ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"),
nullable=False,
),
Column("missionId", Integer, nullable=False),
Column("point", Integer),
UniqueConstraint("user", "missionId", name="chuni_item_cmission_uk"),
mysql_charset="utf8mb4",
)
cmission_progress = Table(
"chuni_item_cmission_progress",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
Column("user", ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"), nullable=False),
Column("missionId", Integer, nullable=False),
Column("order", Integer),
Column("stage", Integer),
Column("progress", Integer),
UniqueConstraint(
"user", "missionId", "order", name="chuni_item_cmission_progress_uk"
),
mysql_charset="utf8mb4",
)
class ChuniItemData(BaseData):
async def get_oldest_free_matching(self, version: int) -> Optional[Row]:
sql = matching.select(
and_(
matching.c.version == version,
matching.c.isFull == False
matching.c.isFull == False # noqa: E712
)
).order_by(matching.c.roomId.asc())
@ -414,12 +443,19 @@ class ChuniItemData(BaseData):
return None
return result.fetchone()
async def get_characters(self, user_id: int) -> Optional[List[Row]]:
async def get_characters(self, user_id: int, offset: int | None = None, limit: int | None = None) -> Optional[List[Row]]:
sql = select(character).where(character.c.user == user_id)
if offset is not None:
sql = sql.offset(offset)
if limit is not None:
sql = sql.limit(limit)
result = await self.execute(sql)
if result is None:
return None
return result.fetchall()
async def put_item(self, user_id: int, item_data: Dict) -> Optional[int]:
@ -435,7 +471,7 @@ class ChuniItemData(BaseData):
return None
return result.lastrowid
async def get_items(self, user_id: int, kind: int = None) -> Optional[List[Row]]:
async def get_items(self, user_id: int, kind: int = None, offset: int | None = None, limit: int | None = None) -> Optional[List[Row]]:
if kind is None:
sql = select(item).where(item.c.user == user_id)
else:
@ -443,6 +479,11 @@ class ChuniItemData(BaseData):
and_(item.c.user == user_id, item.c.itemKind == kind)
)
if offset is not None:
sql = sql.offset(offset)
if limit is not None:
sql = sql.limit(limit)
result = await self.execute(sql)
if result is None:
return None
@ -535,7 +576,7 @@ class ChuniItemData(BaseData):
return result.lastrowid
async def get_user_print_states(
self, aime_id: int, has_completed: bool = False
self, aime_id: int, has_completed: bool = False, offset: int | None = None, limit: int | None = None,
) -> Optional[List[Row]]:
sql = print_state.select(
and_(
@ -544,6 +585,11 @@ class ChuniItemData(BaseData):
)
)
if offset is not None:
sql = sql.offset(offset)
if limit is not None:
sql = sql.limit(limit)
result = await self.execute(sql)
if result is None:
return None
@ -594,3 +640,61 @@ class ChuniItemData(BaseData):
)
return None
return result.lastrowid
async def put_cmission_progress(
self, user_id: int, mission_id: int, progress_data: Dict
) -> Optional[int]:
progress_data["user"] = user_id
progress_data["missionId"] = mission_id
sql = insert(cmission_progress).values(**progress_data)
conflict = sql.on_duplicate_key_update(**progress_data)
result = await self.execute(conflict)
if result is None:
return None
return result.lastrowid
async def get_cmission_progress(
self, user_id: int, mission_id: int
) -> Optional[List[Row]]:
sql = cmission_progress.select(
and_(
cmission_progress.c.user == user_id,
cmission_progress.c.missionId == mission_id,
)
).order_by(cmission_progress.c.order.asc())
result = await self.execute(sql)
if result is None:
return None
return result.fetchall()
async def get_cmission(self, user_id: int, mission_id: int) -> Optional[Row]:
sql = cmission.select(
and_(cmission.c.user == user_id, cmission.c.missionId == mission_id)
)
result = await self.execute(sql)
if result is None:
return None
return result.fetchone()
async def put_cmission(self, user_id: int, mission_data: Dict) -> Optional[int]:
mission_data["user"] = user_id
sql = insert(cmission).values(**mission_data)
conflict = sql.on_duplicate_key_update(**mission_data)
result = await self.execute(conflict)
if result is None:
return None
return result.lastrowid
async def get_cmissions(self, user_id: int) -> Optional[List[Row]]:
sql = cmission.select(cmission.c.user == user_id)
result = await self.execute(sql)
if result is None:
return None
return result.fetchall()

View File

@ -3,7 +3,7 @@ from sqlalchemy import Table, Column, UniqueConstraint, and_
from sqlalchemy.types import Integer, String, Boolean, JSON, BigInteger
from sqlalchemy.schema import ForeignKey
from sqlalchemy.engine import Row
from sqlalchemy.sql import select, delete
from sqlalchemy.sql import select
from sqlalchemy.dialects.mysql import insert
from core.data.schema import BaseData, metadata
@ -412,6 +412,19 @@ rating = Table(
mysql_charset="utf8mb4",
)
net_battle = Table(
"chuni_profile_net_battle",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
Column("user", Integer, ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"), nullable=False, unique=True),
Column("isRankUpChallengeFailed", Boolean),
Column("highestBattleRankId", Integer),
Column("battleIconId", Integer),
Column("battleIconNum", Integer),
Column("avatarEffectPoint", Integer),
mysql_charset="utf8mb4",
)
class ChuniProfileData(BaseData):
async def update_name(self, user_id: int, new_name: str) -> bool:
@ -723,13 +736,13 @@ class ChuniProfileData(BaseData):
if playcount_sql is None:
self.logger.warn(
f"get_overview: Couldn't pull playcounts"
"get_overview: Couldn't pull playcounts"
)
return 0
total_play_count = 0
for row in playcount_sql:
total_play_count += row[0]
total_play_count = row[0]
return {
"total_play_count": total_play_count
}
@ -757,3 +770,36 @@ class ChuniProfileData(BaseData):
return
return result.lastrowid
async def put_net_battle(self, user_id: int, net_battle_data: Dict) -> Optional[int]:
sql = insert(net_battle).values(
user=user_id,
isRankUpChallengeFailed=net_battle_data['isRankUpChallengeFailed'],
highestBattleRankId=net_battle_data['highestBattleRankId'],
battleIconId=net_battle_data['battleIconId'],
battleIconNum=net_battle_data['battleIconNum'],
avatarEffectPoint=net_battle_data['avatarEffectPoint'],
)
conflict = sql.on_duplicate_key_update(
isRankUpChallengeFailed=net_battle_data['isRankUpChallengeFailed'],
highestBattleRankId=net_battle_data['highestBattleRankId'],
battleIconId=net_battle_data['battleIconId'],
battleIconNum=net_battle_data['battleIconNum'],
avatarEffectPoint=net_battle_data['avatarEffectPoint'],
)
result = await self.execute(conflict)
if result:
return result.inserted_primary_key['id']
self.logger.error(f"Failed to put net battle data for user {user_id}")
async def get_net_battle(self, user_id: int) -> Optional[Row]:
result = await self.execute(net_battle.select(net_battle.c.user == user_id))
if result:
return result.fetchone()
return None

View File

@ -1,12 +1,10 @@
from typing import Dict, List, Optional
from sqlalchemy import Table, Column, UniqueConstraint, PrimaryKeyConstraint, and_
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON, BigInteger
from sqlalchemy.engine.base import Connection
from sqlalchemy import Table, Column, UniqueConstraint
from sqlalchemy.types import Integer, String, Boolean
from sqlalchemy.schema import ForeignKey
from sqlalchemy.engine import Row
from sqlalchemy.sql import func, select
from sqlalchemy.dialects.mysql import insert
from sqlalchemy.sql.expression import exists
from core.data.schema import BaseData, metadata
course = Table(
@ -142,12 +140,19 @@ playlog = Table(
class ChuniScoreData(BaseData):
async def get_courses(self, aime_id: int) -> Optional[Row]:
async def get_courses(self, aime_id: int, offset: int | None = None, limit: int | None = None) -> Optional[Row]:
sql = select(course).where(course.c.user == aime_id)
if offset is not None:
sql = sql.offset(offset)
if limit is not None:
sql = sql.limit(limit)
result = await self.execute(sql)
if result is None:
return None
return result.fetchall()
async def put_course(self, aime_id: int, course_data: Dict) -> Optional[int]:
@ -162,12 +167,30 @@ class ChuniScoreData(BaseData):
return None
return result.lastrowid
async def get_scores(self, aime_id: int) -> Optional[Row]:
sql = select(best_score).where(best_score.c.user == aime_id)
async def get_scores(
self,
aime_id: int,
offset: int | None = None,
limit: int | None = None,
levels: list[int] = None,
) -> Optional[Row]:
condition = best_score.c.user == aime_id
if levels is not None:
condition = condition & best_score.c.level.in_(levels)
sql = select(best_score).where(condition)
if offset is not None:
sql = sql.offset(offset)
if limit is not None:
sql = sql.limit(limit)
result = await self.execute(sql)
if result is None:
return None
return result.fetchall()
async def put_score(self, aime_id: int, score_data: Dict) -> Optional[int]:

View File

@ -7,13 +7,10 @@ from sqlalchemy import (
PrimaryKeyConstraint,
and_,
)
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON, Float
from sqlalchemy.engine.base import Connection
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, Float
from sqlalchemy.engine import Row
from sqlalchemy.schema import ForeignKey
from sqlalchemy.sql import func, select
from sqlalchemy.dialects.mysql import insert
from datetime import datetime
from core.data.schema import BaseData, metadata
@ -342,6 +339,10 @@ class ChuniStaticData(BaseData):
if result is None:
return None
return result.fetchall()
async def disable_all_events(self, version: int, type: int):
sql = events.update().values(enabled=False).where((events.c.version == version) & (events.c.type == type))
await self.execute(sql)
async def put_music(
self,
@ -596,4 +597,4 @@ class ChuniStaticData(BaseData):
result = await self.execute(sql)
if result is None:
return None
return result.fetchone()
return result.fetchone()

View File

@ -1,4 +1,4 @@
from typing import Dict, Any
from typing import Dict
from core.config import CoreConfig
from titles.chuni.newplus import ChuniNewPlus

View File

@ -1,4 +1,4 @@
from typing import Dict, Any
from typing import Dict
from core.config import CoreConfig
from titles.chuni.sun import ChuniSun

View File

@ -1,20 +1,150 @@
{% extends "core/templates/index.jinja" %}
{% block content %}
<h1>Chunithm</h1>
{% if profile is defined and profile is not none and profile.id > 0 %}
<script type="text/javascript">
function toggle_new_name_form() {
let frm = document.getElementById("new_name_form");
let btn = document.getElementById("btn_toggle_form");
if (frm.style['display'] != "") {
frm.style['display'] = "";
frm.style['max-height'] = "";
btn.innerText = "Cancel";
} else {
frm.style['display'] = "none";
frm.style['max-height'] = "0px";
btn.innerText = "Edit";
<style>
{% include 'titles/chuni/templates/css/chuni_style.css' %}
</style>
<div class="container">
{% include 'titles/chuni/templates/chuni_header.jinja' %}
{% if profile is defined and profile is not none and profile|length > 0 %}
<div class="row">
<div class="col-lg-8 m-auto mt-3">
<div class="card bg-card rounded">
<table class="table-large table-rowdistinct">
<caption align="top">OVERVIEW</caption>
<tr>
<th>{{ profile.userName }}</th>
<th>
<button type="button" class="btn btn-primary btn-sm" data-bs-toggle="modal" data-bs-target="#name_change">Edit</button>
</th>
</tr>
<tr>
<td>version:</td>
<td>
<select name="version" id="version" onChange="changeVersion(this)">
{% for ver in versions %}
{% if ver == cur_version %}
<option value="{{ ver }}" selected>{{ version_list[ver] }}</option>
{% else %}
<option value="{{ ver }}">{{ version_list[ver] }}</option>
{% endif %}
{% endfor %}
</select>
{% if versions | length > 1 %}
<p style="margin-block-end: 0;">You have {{ versions | length }} versions.</p>
{% endif %}
</td>
</tr>
<tr>
<td>Level:</td>
<td>{{ profile.level }}</td>
</tr>
<tr>
<td>Rating:</td>
<td>
<span class="{% if profile.playerRating >= 1600 %}rainbow{% elif profile.playerRating < 1600 and profile.playerRating >= 1525 %}platinum{% elif profile.playerRating < 1525 and profile.playerRating >=1500 %}platinum{% endif %}">
{{ profile.playerRating|float/100 }}
</span>
<span>
(highest: {{ profile.highestRating|float/100 }})
</span>
</td>
</tr>
<tr>
<td>Over Power:</td>
<td>{{ profile.overPowerPoint|float/100 }}({{ profile.overPowerRate|float/100 }})</td>
</tr>
<tr>
<td>Current Point:</td>
<td>{{ profile.point }}</td>
</tr>
<tr>
<td>Total Point:</td>
<td>{{ profile.totalPoint }}</td>
</tr>
<tr>
<td>Play Counts:</td>
<td>{{ profile.playCount }}</td>
</tr>
<tr>
<td>Last Play Date:</td>
<td>{{ profile.lastPlayDate }}</td>
</tr>
</table>
</div>
</div>
<div class="col-lg-8 m-auto mt-3">
<div class="card bg-card rounded">
<table class="table-large table-rowdistinct">
<caption align="top">SCORE</caption>
<tr>
<td>Total High Score:</td>
<td>{{ profile.totalHiScore }}</td>
</tr>
<tr>
<td>Total Basic High Score:</td>
<td>{{ profile.totalBasicHighScore }}</td>
</tr>
<tr>
<td>Total Advanced High Score:</td>
<td>{{ profile.totalAdvancedHighScore }}</td>
</tr>
<tr>
<td>Total Expert High Score:</td>
<td>{{ profile.totalExpertHighScore }}</td>
</tr>
<tr>
<td>Total Master High Score:</td>
<td>{{ profile.totalMasterHighScore }}</td>
</tr>
<tr>
<td>Total Ultima High Score :</td>
<td>{{ profile.totalUltimaHighScore }}</td>
</tr>
</table>
</div>
</div>
</div>
{% if error is defined %}
{% include "core/templates/widgets/err_banner.jinja" %}
{% endif %}
{% elif sesh is defined and sesh is not none and sesh.user_id > 0 %}
No profile information found for this account.
{% else %}
Login to view profile information.
{% endif %}
</div>
<div class="modal fade" id="name_change" tabindex="-1" aria-labelledby="name_change_label" data-bs-theme="dark"
aria-hidden="true">
<div class="modal-dialog modal-dialog-centered">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title">Name change</h5>
</div>
<div class="modal-body">
<form id="new_name_form" action="/game/chuni/update.name" method="post" style="outline: 0;">
<label class="form-label" for="new_name">new name:</label>
<input class="form-control" aria-describedby="newNameHelp" form="new_name_form" id="new_name"
name="new_name" maxlength="14" type="text" required>
<div id="newNameHelp" class="form-text">name must be full-width character string.
</div>
</form>
</div>
<div class="modal-footer">
<input type=submit class="btn btn-primary" type="button" form="new_name_form">
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Close</button>
</div>
</div>
</div>
</div>
<script>
function changeVersion(sel) {
$.post("/game/chuni/version.change", { version: sel.value })
.done(function (data) {
location.reload();
})
.fail(function () {
alert("Failed to update version.");
});
}
}
</script>
@ -40,4 +170,4 @@ No profile information found for this account.
{% else %}
Login to view profile information.
{% endif %}
{% endblock content %}
{% endblock content %}

View File

@ -1,13 +1,10 @@
from datetime import date, datetime, timedelta
from typing import Any, Dict, List
import json
import logging
from enum import Enum
from datetime import datetime, timedelta
from typing import Dict
import pytz
import core.logger
from core.config import CoreConfig
from core.utils import Utils
from core.data.cache import cached
from titles.cm.const import CardMakerConstants
from titles.cm.config import CardMakerConfig
@ -21,7 +18,7 @@ class CardMakerBase:
"%Y-%m-%d %H:%M:%S.%f" # needs to be lopped off at [:-5]
)
self.date_time_format_short = "%Y-%m-%d"
self.logger = logging.getLogger("cardmaker")
self.logger = core.logger.create_logger("Card Maker", game_cfg.server.loglevel, logger_name="cardmaker")
self.game = CardMakerConstants.GAME_CODE
self.version = CardMakerConstants.VER_CARD_MAKER
@ -51,7 +48,7 @@ class CardMakerBase:
{
"modelKind": 1,
"type": 1,
"titleUri": f"{uri}/{self._parse_int_ver(games_ver['maimai'])}/Maimai2Servlet/",
"titleUri": f"{uri}/SDEZ/{self._parse_int_ver(games_ver['maimai'])}/Maimai2Servlet/",
},
# ONGEKI
{

View File

@ -1,7 +1,6 @@
from typing import Dict
from core.config import CoreConfig
from core.data.cache import cached
from titles.cm.base import CardMakerBase
from titles.cm.const import CardMakerConstants
from titles.cm.config import CardMakerConfig

View File

@ -2,16 +2,14 @@ import json
import inflection
import yaml
import string
import logging
import coloredlogs
import zlib
from starlette.routing import Route
from starlette.responses import Response
from starlette.requests import Request
from os import path
from typing import List
from logging.handlers import TimedRotatingFileHandler
import core.logger
from core.config import CoreConfig
from core.utils import Utils
from core.title import BaseServlet
@ -34,27 +32,10 @@ class CardMakerServlet(BaseServlet):
CardMaker135(core_cfg, self.game_cfg)
]
self.logger = logging.getLogger("cardmaker")
log_fmt_str = "[%(asctime)s] Card Maker | %(levelname)s | %(message)s"
log_fmt = logging.Formatter(log_fmt_str)
fileHandler = TimedRotatingFileHandler(
"{0}/{1}.log".format(self.core_cfg.server.log_dir, "cardmaker"),
encoding="utf8",
when="d",
backupCount=10,
)
fileHandler.setFormatter(log_fmt)
consoleHandler = logging.StreamHandler()
consoleHandler.setFormatter(log_fmt)
self.logger.addHandler(fileHandler)
self.logger.addHandler(consoleHandler)
self.logger.setLevel(self.game_cfg.server.loglevel)
coloredlogs.install(
level=self.game_cfg.server.loglevel, logger=self.logger, fmt=log_fmt_str
self.logger = core.logger.create_logger(
"Card Maker",
self.game_cfg.server.loglevel,
logger_name="cardmaker",
)
@classmethod
@ -99,7 +80,7 @@ class CardMakerServlet(BaseServlet):
unzip = zlib.decompress(req_raw)
except zlib.error as e:
self.logger.error(
self.logger.warning(
f"Failed to decompress v{version} {endpoint} request -> {e}"
)
return Response(zlib.compress(b'{"stat": "0"}'))

View File

@ -1,17 +1,16 @@
from decimal import Decimal
import logging
from io import IOBase
import os
import re
import csv
import xml.etree.ElementTree as ET
from typing import Any, Dict, List, Optional
from typing import Optional
from construct import Array, Int32sl, Int64sl, Struct, Int32ul, Const
from read import BaseReader
from core.config import CoreConfig
from titles.ongeki.database import OngekiData
from titles.cm.const import CardMakerConstants
from titles.ongeki.const import OngekiConstants
from titles.ongeki.config import OngekiConfig
from titles.mai2.database import Mai2Data
from titles.mai2.const import Mai2Constants
from titles.chuni.database import ChuniData
@ -63,6 +62,9 @@ class CardMakerReader(BaseReader):
await self.read_chuni_gacha(f"{data_dir}/CHU/Data/A000/gacha")
await self.read_mai2_card(f"{data_dir}/MAI/Data/A000/card")
await self.read_ongeki_pack(f"{data_dir}/MU3/Data/A000.pac")
for file, func in static_datas.items():
if os.path.exists(f"{self.bin_dir}/MU3/{file}"):
read_csv = getattr(CardMakerReader, func)
@ -75,8 +77,6 @@ class CardMakerReader(BaseReader):
if self.opt_dir is not None:
data_dirs = self.get_data_directories(self.opt_dir)
# ONGEKI (MU3) cnnot easily access the bin data(A000.pac)
# so only opt_dir will work for now
for dir in data_dirs:
await self.read_chuni_card(f"{dir}/CHU/card")
await self.read_chuni_gacha(f"{dir}/CHU/gacha")
@ -87,6 +87,7 @@ class CardMakerReader(BaseReader):
self.logger.info(f"Reading cards from {base_dir}...")
version_ids = {
"v1_50": ChuniConstants.VER_CHUNITHM_PARADISE,
"v2_00": ChuniConstants.VER_CHUNITHM_NEW,
"v2_05": ChuniConstants.VER_CHUNITHM_NEW_PLUS,
"v2_10": ChuniConstants.VER_CHUNITHM_SUN,
@ -135,6 +136,7 @@ class CardMakerReader(BaseReader):
self.logger.info(f"Reading gachas from {base_dir}...")
version_ids = {
"v1_50": ChuniConstants.VER_CHUNITHM_PARADISE,
"v2_00": ChuniConstants.VER_CHUNITHM_NEW,
"v2_05": ChuniConstants.VER_CHUNITHM_NEW_PLUS,
"v2_10": ChuniConstants.VER_CHUNITHM_SUN,
@ -217,6 +219,11 @@ class CardMakerReader(BaseReader):
name = troot.find("name").find("str").text
card_id = int(troot.find("name").find("id").text)
version = troot.find("enableVersion").find("str").text
if version not in version_ids:
self.logger.warning(f"Card {card_id} is on an unsupported version {version}.")
continue
version = version_ids[
troot.find("enableVersion").find("str").text
@ -266,9 +273,7 @@ class CardMakerReader(BaseReader):
self.logger.info(f"Added ongeki card {row['cardId']} to gacha")
async def read_ongeki_gacha(self, base_dir: str) -> None:
self.logger.info(f"Reading gachas from {base_dir}...")
async def read_ongeki_gacha_xml(self, troot: ET.Element):
# assuming some GachaKinds based on the GachaType
type_to_kind = {
"Normal": "Normal",
@ -278,49 +283,161 @@ class CardMakerReader(BaseReader):
"FreeSR": "Free",
}
for root, dirs, files in os.walk(base_dir):
name = troot.find("Name").find("str").text
gacha_id = int(troot.find("Name").find("id").text)
# skip already existing gachas
if (
await self.ongeki_data.static.get_gacha(
OngekiConstants.VER_ONGEKI_BRIGHT_MEMORY, gacha_id
)
is not None
):
self.logger.info(f"Gacha {gacha_id} already added, skipping")
return
# 1140 is the first bright memory gacha
if gacha_id < 1140:
version = OngekiConstants.VER_ONGEKI_BRIGHT
else:
version = OngekiConstants.VER_ONGEKI_BRIGHT_MEMORY
gacha_kind = OngekiConstants.CM_GACHA_KINDS[
type_to_kind[troot.find("Type").text]
].value
# hardcode which gachas get "Select Gacha" with 33 points
is_ceiling, max_select_point = 0, 0
if gacha_id in {1163, 1164, 1165, 1166, 1167, 1168}:
is_ceiling = 1
max_select_point = 33
await self.ongeki_data.static.put_gacha(
version,
gacha_id,
name,
gacha_kind,
isCeiling=is_ceiling,
maxSelectPoint=max_select_point,
)
self.logger.info(f"Added ongeki gacha {gacha_id}")
async def read_ongeki_gacha(self, base_dir: str) -> None:
self.logger.info(f"Reading gachas from {base_dir}...")
for root, dirs, _ in os.walk(base_dir):
for dir in dirs:
if os.path.exists(f"{root}/{dir}/Gacha.xml"):
with open(f"{root}/{dir}/Gacha.xml", "r", encoding="utf-8") as f:
troot = ET.fromstring(f.read())
await self.read_ongeki_gacha_xml(troot)
name = troot.find("Name").find("str").text
gacha_id = int(troot.find("Name").find("id").text)
async def read_ongeki_pack(self, packfile: str):
if not os.path.exists(packfile):
self.logger.warning(f"O.N.G.E.K.I. VFS file {packfile} not found.")
return
# skip already existing gachas
if (
await self.ongeki_data.static.get_gacha(
OngekiConstants.VER_ONGEKI_BRIGHT_MEMORY, gacha_id
)
is not None
):
self.logger.info(
f"Gacha {gacha_id} already added, skipping"
)
continue
with open(packfile, "rb") as f:
# First, read the header to know how much data is metadata
header = VFSHeader.parse(f.read(VFSHeader.sizeof()))
meta_size = header.num_entries * VFSFileData.sizeof() + VFSHeader.sizeof()
# 1140 is the first bright memory gacha
if gacha_id < 1140:
version = OngekiConstants.VER_ONGEKI_BRIGHT
else:
version = OngekiConstants.VER_ONGEKI_BRIGHT_MEMORY
# Now that we know, read the entire metadata in.
f.seek(0, os.SEEK_SET)
metadata = VFSMetadata.parse(f.read(meta_size))
gacha_kind = OngekiConstants.CM_GACHA_KINDS[
type_to_kind[troot.find("Type").text]
].value
# Read the string table, which contains the file names.
f.seek(header.offset_string, os.SEEK_SET)
string_table = f.read(header.offset_data - header.offset_string).decode(
"utf-16-le"
)
# hardcode which gachas get "Select Gacha" with 33 points
is_ceiling, max_select_point = 0, 0
if gacha_id in {1163, 1164, 1165, 1166, 1167, 1168}:
is_ceiling = 1
max_select_point = 33
# Now that we have everything, time to build the VFS tree. The first item in the tree should be the root.
tree = VFSDirectory(metadata, string_table, 0)
gachas = tree.directories["gacha"]
await self.ongeki_data.static.put_gacha(
version,
gacha_id,
name,
gacha_kind,
isCeiling=is_ceiling,
maxSelectPoint=max_select_point,
)
self.logger.info(f"Added ongeki gacha {gacha_id}")
for gacha in gachas.directories.values():
troot = ET.fromstring(
gacha.files["Gacha.xml"].read_from_file(f).decode("utf-8")
)
await self.read_ongeki_gacha_xml(troot)
# For whatever reason, CardMaker uses a packed filesystem for O.N.G.E.K.I. A000,
# whose data structures are the 3 structs below.
VFSHeader = Struct(
"magic" / Const(0x4B434150, Int32ul),
"reserved" / Int32ul,
"num_entries" / Int64sl,
"offset_string" / Int64sl * "offset for the string table which contains all filenamess",
"offset_data" / Int64sl * "offset of actual file data",
)
VFSFileData = Struct(
"type" / Int32sl * "0 for file, 1 for directory",
"flags" / Int32sl * "doesn't seem to be used",
"name_offset" / Int32sl * "offset of the filename in the string table",
"name_length" / Int32sl * "length of the filename",
"data_length" / Int32sl * "length of file data",
"data_offset" / Int64sl * "offset of file data starting from offset_data",
)
VFSMetadata = Struct(
"header" / VFSHeader,
"files" / Array(lambda ctx: ctx.header.num_entries, VFSFileData),
)
class VFSFile:
# vfs is an instance of VFSMetadata, Python type system sucks.
def __init__(self, vfs, string_table: str, file_id: int) -> None:
metadata = vfs.files[file_id]
if metadata.type != 0:
raise ValueError(
f"File ID {file_id} is not a file (found type {metadata.type})"
)
self.name = string_table[
metadata.name_offset : metadata.name_offset + metadata.name_length
]
self.offset = vfs.header.offset_data + metadata.data_offset
self.length = metadata.data_length
def read_from_file(self, f: IOBase):
if not f.seekable():
raise ValueError("Received an IO object that could not seek.")
f.seek(self.offset, os.SEEK_SET)
return f.read(self.length)
class VFSDirectory:
def __init__(self, vfs, string_table: str, file_id: int) -> None:
metadata = vfs.files[file_id]
if metadata.type != 1:
raise ValueError(
f"File ID {file_id} is not a directory (found type {metadata.type})"
)
self.name = string_table[
metadata.name_offset : metadata.name_offset + metadata.name_length
]
self.files: dict[str, VFSFile] = {}
self.directories: dict[str, VFSDirectory] = {}
for i in range(metadata.data_length):
child_id = metadata.data_offset + i
child_metadata = vfs.files[child_id]
if child_metadata.type == 0:
f = VFSFile(vfs, string_table, child_id)
self.files[f.name] = f
elif child_metadata.type == 1:
d = VFSDirectory(vfs, string_table, child_id)
self.directories[d.name] = d
else:
raise ValueError(
f"File ID {child_id} has invalid file type {child_metadata.type} (expected 0 or 1)"
)

View File

@ -1,10 +1,9 @@
import logging
import json
from decimal import Decimal
from base64 import b64encode
from typing import Any, Dict, List
from os import path
import core.logger
from core.config import CoreConfig
from .config import CxbConfig
from .const import CxbConstants
@ -18,7 +17,7 @@ class CxbBase:
self.game_config = game_cfg
self.data = CxbData(cfg) # Database
self.game = CxbConstants.GAME_CODE
self.logger = logging.getLogger("cxb")
self.logger = self.logger = core.logger.create_logger("CXB")
self.version = CxbConstants.VER_CROSSBEATS_REV
def _get_data_contents(self, folder: str, filetype: str, encoding: str = None, subfolder: str = "") -> List[str]:

View File

@ -7,11 +7,11 @@ import yaml
import json
import re
import inflection
import logging, coloredlogs
from logging.handlers import TimedRotatingFileHandler
import logging
from typing import Dict, Tuple, List
from os import path
import core.logger
from core.config import CoreConfig
from core.title import BaseServlet, JSONResponseNoASCII
from core.utils import Utils
@ -33,31 +33,7 @@ class CxbServlet(BaseServlet):
yaml.safe_load(open(f"{cfg_dir}/{CxbConstants.CONFIG_NAME}"))
)
self.logger = logging.getLogger("cxb")
if not hasattr(self.logger, "inited"):
log_fmt_str = "[%(asctime)s] CXB | %(levelname)s | %(message)s"
log_fmt = logging.Formatter(log_fmt_str)
fileHandler = TimedRotatingFileHandler(
"{0}/{1}.log".format(self.core_cfg.server.log_dir, "cxb"),
encoding="utf8",
when="d",
backupCount=10,
)
fileHandler.setFormatter(log_fmt)
consoleHandler = logging.StreamHandler()
consoleHandler.setFormatter(log_fmt)
self.logger.addHandler(fileHandler)
self.logger.addHandler(consoleHandler)
self.logger.setLevel(self.game_cfg.server.loglevel)
coloredlogs.install(
level=self.game_cfg.server.loglevel, logger=self.logger, fmt=log_fmt_str
)
self.logger.inited = True
self.logger = core.logger.create_logger("CXB", self.game_cfg.server.loglevel)
self.versions = [
CxbRev(core_cfg, self.game_cfg),
CxbRevSunriseS1(core_cfg, self.game_cfg),
@ -196,7 +172,7 @@ class CxbServlet(BaseServlet):
if self.logger.level == logging.DEBUG:
tp, val, tb = sys.exc_info()
traceback.print_exception(tp, val, tb, limit=1)
with open("{0}/{1}.log".format(self.core_cfg.server.log_dir, "cxb"), "a") as f:
with open("{0}/{1}.log".format(self.core_cfg.logging.log_dir, "cxb"), "a") as f:
traceback.print_exception(tp, val, tb, limit=1, file=f)
return Response()
@ -225,7 +201,7 @@ class CxbServlet(BaseServlet):
if self.logger.level == logging.DEBUG:
tp, val, tb = sys.exc_info()
traceback.print_exception(tp, val, tb, limit=1)
with open("{0}/{1}.log".format(self.core_cfg.server.log_dir, "cxb"), "a") as f:
with open("{0}/{1}.log".format(self.core_cfg.logging.log_dir, "cxb"), "a") as f:
traceback.print_exception(tp, val, tb, limit=1, file=f)
return Response()
@ -254,7 +230,7 @@ class CxbServlet(BaseServlet):
if self.logger.level == logging.DEBUG:
tp, val, tb = sys.exc_info()
traceback.print_exception(tp, val, tb, limit=1)
with open("{0}/{1}.log".format(self.core_cfg.server.log_dir, "cxb"), "a") as f:
with open("{0}/{1}.log".format(self.core_cfg.logging.log_dir, "cxb"), "a") as f:
traceback.print_exception(tp, val, tb, limit=1, file=f)
return Response()

View File

@ -1,12 +1,9 @@
import json
from decimal import Decimal
from base64 import b64encode
from typing import Any, Dict
from hashlib import md5
from datetime import datetime
from typing import Dict
from core.config import CoreConfig
from core.data import Data, cached
from core.data import cached
from .config import CxbConfig
from .base import CxbBase
from .const import CxbConstants
@ -40,6 +37,7 @@ class CxbRev(CxbBase):
score_data["slow2"],
score_data["fail"],
score_data["combo"],
score_data["grade"],
)
return {"data": True}
return {"data": True}

View File

@ -1,12 +1,7 @@
import json
from decimal import Decimal
from base64 import b64encode
from typing import Any, Dict
from hashlib import md5
from datetime import datetime
from typing import Dict
from core.config import CoreConfig
from core.data import Data, cached
from core.data import cached
from .config import CxbConfig
from .base import CxbBase
from .const import CxbConstants

View File

@ -1,12 +1,7 @@
import json
from decimal import Decimal
from base64 import b64encode
from typing import Any, Dict
from hashlib import md5
from datetime import datetime
from typing import Dict
from core.config import CoreConfig
from core.data import Data, cached
from core.data import cached
from .config import CxbConfig
from .base import CxbBase
from .const import CxbConstants

View File

@ -1,8 +1,7 @@
from typing import Optional, Dict, List
from sqlalchemy import Table, Column, UniqueConstraint, PrimaryKeyConstraint, and_, case
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean
from typing import Optional, Dict
from sqlalchemy import Table, Column, UniqueConstraint, and_
from sqlalchemy.types import Integer
from sqlalchemy.schema import ForeignKey
from sqlalchemy.sql import func
from sqlalchemy.dialects.mysql import insert
from core.data.schema import BaseData, metadata

View File

@ -1,8 +1,7 @@
from typing import Optional, Dict, List
from sqlalchemy import Table, Column, UniqueConstraint, PrimaryKeyConstraint, and_
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON
from sqlalchemy import Table, Column, UniqueConstraint, and_
from sqlalchemy.types import Integer, JSON
from sqlalchemy.schema import ForeignKey
from sqlalchemy.sql import func, select
from sqlalchemy.dialects.mysql import insert
from core.data.schema import BaseData, metadata

View File

@ -1,12 +1,11 @@
from sqlalchemy import Table, Column, UniqueConstraint, PrimaryKeyConstraint, and_
from sqlalchemy.types import Integer, String, TIMESTAMP, JSON, Boolean
from sqlalchemy import Table, Column, UniqueConstraint, and_
from sqlalchemy.types import Integer, String, TIMESTAMP, JSON
from sqlalchemy.schema import ForeignKey
from sqlalchemy.sql import func
from sqlalchemy.dialects.mysql import insert
from typing import Optional, List, Dict, Any
from typing import Optional, List, Dict
from core.data.schema import BaseData, metadata
from core.data import cached
score = Table(
"cxb_score",
@ -39,6 +38,7 @@ playlog = Table(
Column("slow2", Integer),
Column("fail", Integer),
Column("combo", Integer),
Column("grade", Integer),
Column("date_scored", TIMESTAMP, server_default=func.now()),
mysql_charset="utf8mb4",
)
@ -104,6 +104,7 @@ class CxbScoreData(BaseData):
this_slow2: int,
fail: int,
combo: int,
grade: int,
) -> Optional[int]:
"""
Add an entry to the user's play log
@ -123,6 +124,7 @@ class CxbScoreData(BaseData):
slow2=this_slow2,
fail=fail,
combo=combo,
grade=grade,
)
result = await self.execute(sql)

View File

@ -1,10 +1,8 @@
from typing import Dict, List, Optional
from sqlalchemy import Table, Column, UniqueConstraint, PrimaryKeyConstraint, and_
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON, Float
from sqlalchemy.engine.base import Connection
from typing import List, Optional
from sqlalchemy import Table, Column, UniqueConstraint, and_
from sqlalchemy.types import Integer, String, Float
from sqlalchemy.engine import Row
from sqlalchemy.schema import ForeignKey
from sqlalchemy.sql import func, select
from sqlalchemy.sql import select
from sqlalchemy.dialects.mysql import insert
from core.data.schema import BaseData, metadata

View File

@ -2,8 +2,10 @@ from titles.diva.index import DivaServlet
from titles.diva.const import DivaConstants
from titles.diva.database import DivaData
from titles.diva.read import DivaReader
from .frontend import DivaFrontend
index = DivaServlet
database = DivaData
reader = DivaReader
frontend = DivaFrontend
game_codes = [DivaConstants.GAME_CODE]

View File

@ -1,9 +1,9 @@
import datetime
from typing import Dict
import logging
import urllib.parse
from threading import Thread
import core.logger
from core.config import CoreConfig
from titles.diva.config import DivaConfig
from titles.diva.const import DivaConstants
@ -16,7 +16,7 @@ class DivaBase:
self.game_config = game_cfg
self.data = DivaData(cfg) # Database
self.date_time_format = "%Y-%m-%d %H:%M:%S"
self.logger = logging.getLogger("diva")
self.logger = core.logger.create_logger("Diva", game_cfg.server.loglevel)
self.game = DivaConstants.GAME_CODE
self.version = DivaConstants.VER_PROJECT_DIVA_ARCADE_FUTURE_TONE
@ -431,7 +431,7 @@ class DivaBase:
profile = await self.data.profile.get_profile(data["pd_id"], self.version)
profile_shop = await self.data.item.get_shop(data["pd_id"], self.version)
if profile is None:
return
return {}
mdl_have = "F" * 250
# generate the mdl_have string if "unlock_all_modules" is disabled

182
titles/diva/frontend.py Normal file
View File

@ -0,0 +1,182 @@
from typing import List
from starlette.routing import Route, Mount
from starlette.requests import Request
from starlette.responses import Response, RedirectResponse
from os import path
import yaml
import jinja2
from core.frontend import FE_Base, UserSession
from core.config import CoreConfig
from .database import DivaData
from .config import DivaConfig
from .const import DivaConstants
class DivaFrontend(FE_Base):
def __init__(
self, cfg: CoreConfig, environment: jinja2.Environment, cfg_dir: str
) -> None:
super().__init__(cfg, environment)
self.data = DivaData(cfg)
self.game_cfg = DivaConfig()
if path.exists(f"{cfg_dir}/{DivaConstants.CONFIG_NAME}"):
self.game_cfg.update(
yaml.safe_load(open(f"{cfg_dir}/{DivaConstants.CONFIG_NAME}"))
)
self.nav_name = "diva"
def get_routes(self) -> List[Route]:
return [
Route("/", self.render_GET, methods=['GET']),
Mount("/playlog", routes=[
Route("/", self.render_GET_playlog, methods=['GET']),
Route("/{index}", self.render_GET_playlog, methods=['GET']),
]),
Route("/update.name", self.update_name, methods=['POST']),
Route("/update.lv", self.update_lv, methods=['POST']),
]
async def render_GET(self, request: Request) -> bytes:
template = self.environment.get_template(
"titles/diva/templates/diva_index.jinja"
)
usr_sesh = self.validate_session(request)
if not usr_sesh:
usr_sesh = UserSession()
if usr_sesh.user_id > 0:
profile = await self.data.profile.get_profile(usr_sesh.user_id, 1)
resp = Response(template.render(
title=f"{self.core_config.server.name} | {self.nav_name}",
game_list=self.environment.globals["game_list"],
sesh=vars(usr_sesh),
user_id=usr_sesh.user_id,
profile=profile
), media_type="text/html; charset=utf-8")
return resp
else:
return RedirectResponse("/gate")
async def render_GET_playlog(self, request: Request) -> bytes:
template = self.environment.get_template(
"titles/diva/templates/diva_playlog.jinja"
)
usr_sesh = self.validate_session(request)
if not usr_sesh:
usr_sesh = UserSession()
if usr_sesh.user_id > 0:
path_index = request.path_params.get("index")
if not path_index or int(path_index) < 1:
index = 0
else:
index = int(path_index) - 1 # 0 and 1 are 1st page
user_id = usr_sesh.user_id
playlog_count = await self.data.score.get_user_playlogs_count(user_id)
if playlog_count < index * 20 :
return Response(template.render(
title=f"{self.core_config.server.name} | {self.nav_name}",
game_list=self.environment.globals["game_list"],
sesh=vars(usr_sesh),
score_count=0
), media_type="text/html; charset=utf-8")
playlog = await self.data.score.get_playlogs(user_id, index, 20) #Maybe change to the playlog instead of direct scores
playlog_with_title = []
for record in playlog:
song = await self.data.static.get_music_chart(record[2], record[3], record[4])
if song:
title = song.title
vocaloid_arranger = song.vocaloid_arranger
else:
title = "Unknown"
vocaloid_arranger = "Unknown"
playlog_with_title.append({
"raw": record,
"title": title,
"vocaloid_arranger": vocaloid_arranger
})
return Response(template.render(
title=f"{self.core_config.server.name} | {self.nav_name}",
game_list=self.environment.globals["game_list"],
sesh=vars(usr_sesh),
user_id=usr_sesh.user_id,
playlog=playlog_with_title,
playlog_count=playlog_count
), media_type="text/html; charset=utf-8")
else:
return RedirectResponse("/gate/", 300)
async def update_name(self, request: Request) -> Response:
usr_sesh = self.validate_session(request)
if not usr_sesh:
return RedirectResponse("/gate")
form_data = await request.form()
new_name: str = form_data.get("new_name")
new_name_full = ""
if not new_name:
return RedirectResponse("/gate/?e=4", 303)
if len(new_name) > 8:
return RedirectResponse("/gate/?e=8", 303)
for x in new_name: # FIXME: This will let some invalid characters through atm
o = ord(x)
try:
if o == 0x20:
new_name_full += chr(0x3000)
elif o < 0x7F and o > 0x20:
new_name_full += chr(o + 0xFEE0)
elif o <= 0x7F:
self.logger.warn(f"Invalid ascii character {o:02X}")
return RedirectResponse("/gate/?e=4", 303)
else:
new_name_full += x
except Exception as e:
self.logger.error(f"Something went wrong parsing character {o:04X} - {e}")
return RedirectResponse("/gate/?e=4", 303)
if not await self.data.profile.update_profile(usr_sesh.user_id, player_name=new_name_full):
return RedirectResponse("/gate/?e=999", 303)
return RedirectResponse("/game/diva", 303)
async def update_lv(self, request: Request) -> Response:
usr_sesh = self.validate_session(request)
if not usr_sesh:
return RedirectResponse("/gate")
form_data = await request.form()
new_lv: str = form_data.get("new_lv")
new_lv_full = ""
if not new_lv:
return RedirectResponse("/gate/?e=4", 303)
if len(new_lv) > 8:
return RedirectResponse("/gate/?e=8", 303)
for x in new_lv: # FIXME: This will let some invalid characters through atm
o = ord(x)
try:
if o == 0x20:
new_lv_full += chr(0x3000)
elif o < 0x7F and o > 0x20:
new_lv_full += chr(o + 0xFEE0)
elif o <= 0x7F:
self.logger.warn(f"Invalid ascii character {o:02X}")
return RedirectResponse("/gate/?e=4", 303)
else:
new_lv_full += x
except Exception as e:
self.logger.error(f"Something went wrong parsing character {o:04X} - {e}")
return RedirectResponse("/gate/?e=4", 303)
if not await self.data.profile.update_profile(usr_sesh.user_id, lv_str=new_lv_full):
return RedirectResponse("/gate/?e=999", 303)
return RedirectResponse("/game/diva", 303)

View File

@ -2,8 +2,6 @@ from starlette.requests import Request
from starlette.responses import PlainTextResponse
from starlette.routing import Route
import yaml
import logging, coloredlogs
from logging.handlers import TimedRotatingFileHandler
import zlib
import json
import urllib.parse
@ -11,6 +9,7 @@ import base64
from os import path
from typing import Tuple, Dict, List
import core.logger
from core.config import CoreConfig
from core.title import BaseServlet
from core.utils import Utils
@ -29,29 +28,7 @@ class DivaServlet(BaseServlet):
)
self.base = DivaBase(core_cfg, self.game_cfg)
self.logger = logging.getLogger("diva")
log_fmt_str = "[%(asctime)s] Diva | %(levelname)s | %(message)s"
log_fmt = logging.Formatter(log_fmt_str)
fileHandler = TimedRotatingFileHandler(
"{0}/{1}.log".format(self.core_cfg.server.log_dir, "diva"),
encoding="utf8",
when="d",
backupCount=10,
)
fileHandler.setFormatter(log_fmt)
consoleHandler = logging.StreamHandler()
consoleHandler.setFormatter(log_fmt)
self.logger.addHandler(fileHandler)
self.logger.addHandler(consoleHandler)
self.logger.setLevel(self.game_cfg.server.loglevel)
coloredlogs.install(
level=self.game_cfg.server.loglevel, logger=self.logger, fmt=log_fmt_str
)
self.logger = core.logger.create_logger("Diva", self.game_cfg.server.loglevel)
def get_routes(self) -> List[Route]:
return [
@ -79,7 +56,7 @@ class DivaServlet(BaseServlet):
return True
async def render_POST(self, request: Request, game_code: str, matchers: Dict) -> bytes:
async def render_POST(self, request: Request) -> bytes:
req_raw = await request.body()
url_header = request.headers
@ -98,8 +75,17 @@ class DivaServlet(BaseServlet):
self.logger.info(f"Binary {bin_req_data['cmd']} Request")
self.logger.debug(bin_req_data)
handler = getattr(self.base, f"handle_{bin_req_data['cmd']}_request")
resp = handler(bin_req_data)
try:
handler = getattr(self.base, f"handle_{bin_req_data['cmd']}_request")
resp = handler(bin_req_data)
except AttributeError as e:
self.logger.warning(f"Unhandled {bin_req_data['cmd']} request {e}")
return PlainTextResponse(f"cmd={bin_req_data['cmd']}&req_id={bin_req_data['req_id']}&stat=ok")
except Exception as e:
self.logger.error(f"Error handling method {e}")
return PlainTextResponse(f"cmd={bin_req_data['cmd']}&req_id={bin_req_data['req_id']}&stat=ok")
self.logger.debug(
f"Response cmd={bin_req_data['cmd']}&req_id={bin_req_data['req_id']}&stat=ok{resp}"

View File

@ -183,7 +183,11 @@ class DivaReader(BaseReader):
pv_list[pv_id] = self.add_branch(pv_list[pv_id], key_args, val)
for pv_id, pv_data in pv_list.items():
song_id = int(pv_id.split("_")[1])
try:
song_id = int(pv_id.split("_")[1])
except ValueError:
self.logger.error(f"Invalid song ID format: {pv_id}")
continue
if "songinfo" not in pv_data:
continue
if "illustrator" not in pv_data["songinfo"]:

View File

@ -54,7 +54,7 @@ class DivaCustomizeItemData(BaseData):
Given a game version and an aime id, return the cstmz_itm_have hex string
required for diva directly
"""
items_list = self.get_customize_items(aime_id, version)
items_list = await self.get_customize_items(aime_id, version)
if items_list is None:
items_list = []
item_have = 0

View File

@ -1,8 +1,7 @@
from typing import Optional, Dict, List
from sqlalchemy import Table, Column, UniqueConstraint, PrimaryKeyConstraint, and_, case
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean
from sqlalchemy import Table, Column, UniqueConstraint, and_
from sqlalchemy.types import Integer, String
from sqlalchemy.schema import ForeignKey
from sqlalchemy.sql import func
from sqlalchemy.dialects.mysql import insert
from core.data.schema import BaseData, metadata

View File

@ -50,7 +50,7 @@ class DivaModuleData(BaseData):
Given a game version and an aime id, return the mdl_have hex string
required for diva directly
"""
module_list = self.get_modules(aime_id, version)
module_list = await self.get_modules(aime_id, version)
if module_list is None:
module_list = []
module_have = 0

View File

@ -1,8 +1,7 @@
from typing import Optional, Dict, List
from sqlalchemy import Table, Column, UniqueConstraint, PrimaryKeyConstraint, and_
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON
from sqlalchemy import Table, Column, UniqueConstraint, and_
from sqlalchemy.types import Integer, String, Boolean
from sqlalchemy.schema import ForeignKey
from sqlalchemy.sql import func, select
from sqlalchemy.dialects.mysql import insert
from core.data.schema import BaseData, metadata
@ -90,7 +89,7 @@ class DivaProfileData(BaseData):
return None
return result.lastrowid
async def update_profile(self, aime_id: int, **profile_args) -> None:
async def update_profile(self, aime_id: int, **profile_args) -> bool:
"""
Given an aime_id update the profile corresponding to the arguments
which are the diva_profile Columns
@ -102,7 +101,9 @@ class DivaProfileData(BaseData):
self.logger.error(
f"update_profile: failed to update profile! profile: {aime_id}"
)
return None
return False
return True
async def get_profile(self, aime_id: int, version: int) -> Optional[List[Dict]]:
"""

View File

@ -1,13 +1,12 @@
from sqlalchemy import Table, Column, UniqueConstraint, PrimaryKeyConstraint, and_
from sqlalchemy.types import Integer, String, TIMESTAMP, JSON, Boolean
from sqlalchemy import Table, Column, UniqueConstraint, and_
from sqlalchemy.types import Integer, TIMESTAMP
from sqlalchemy.schema import ForeignKey
from sqlalchemy.sql import func, select
from sqlalchemy.dialects.mysql import insert
from sqlalchemy.engine import Row
from typing import Optional, List, Dict, Any
from typing import Optional, List
from core.data.schema import BaseData, metadata
from core.data import cached
score = Table(
"diva_score",
@ -239,3 +238,23 @@ class DivaScoreData(BaseData):
if result is None:
return None
return result.fetchall()
async def get_playlogs(self, aime_id: int, idx: int = 0, limit: int = 0) -> Optional[Row]:
sql = select(playlog).where(playlog.c.user == aime_id).order_by(playlog.c.date_scored.desc())
if limit:
sql = sql.limit(limit)
if idx:
sql = sql.offset(idx)
result = await self.execute(sql)
if result:
return result.fetchall()
async def get_user_playlogs_count(self, aime_id: int) -> Optional[int]:
sql = select(func.count()).where(playlog.c.user == aime_id)
result = await self.execute(sql)
if result is None:
self.logger.warning(f"aimu_id {aime_id} has no scores ")
return None
return result.scalar()

View File

@ -1,10 +1,8 @@
from typing import Dict, List, Optional
from sqlalchemy import Table, Column, UniqueConstraint, PrimaryKeyConstraint, and_
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON, Float
from sqlalchemy.engine.base import Connection
from typing import List, Optional
from sqlalchemy import Table, Column, UniqueConstraint, and_
from sqlalchemy.types import Integer, String, Boolean, Float
from sqlalchemy.engine import Row
from sqlalchemy.schema import ForeignKey
from sqlalchemy.sql import func, select
from sqlalchemy.sql import select
from sqlalchemy.dialects.mysql import insert
from core.data.schema import BaseData, metadata

View File

@ -0,0 +1,195 @@
.diva-header {
text-align: center;
}
ul.diva-navi {
list-style-type: none;
padding: 0;
overflow: hidden;
background-color: #333;
text-align: center;
display: inline-block;
}
ul.diva-navi li {
display: inline-block;
}
ul.diva-navi li a {
display: block;
color: white;
text-align: center;
padding: 14px 16px;
text-decoration: none;
}
ul.diva-navi li a:hover:not(.active) {
background-color: #111;
}
ul.diva-navi li a.active {
background-color: #4CAF50;
}
ul.diva-navi li.right {
float: right;
}
@media screen and (max-width: 600px) {
ul.diva-navi li.right,
ul.diva-navi li {
float: none;
display: block;
text-align: center;
}
}
table {
border-collapse: collapse;
border-spacing: 0;
border-collapse: separate;
overflow: hidden;
background-color: #555555;
}
th, td {
text-align: left;
border: none;
}
th {
color: white;
}
.table-rowdistinct tr:nth-child(even) {
background-color: #303030;
}
.table-rowdistinct tr:nth-child(odd) {
background-color: #555555;
}
caption {
text-align: center;
color: white;
font-size: 18px;
font-weight: bold;
}
.table-large {
margin: 16px;
}
.table-large th,
.table-large td {
padding: 8px;
}
.table-small {
width: 100%;
margin: 4px;
}
.table-small th,
.table-small td {
padding: 2px;
}
.bg-card {
background-color: #555555;
}
.card-hover {
transition: all 0.2s ease-in-out;
}
.card-hover:hover {
transform: scale(1.02);
}
.basic {
color: #28a745;
font-weight: bold;
}
.hard {
color: #ffc107;
font-weight: bold;
}
.expert {
color: #dc3545;
font-weight: bold;
}
.master {
color: #dd09e8;
font-weight: bold;
}
.ultimate {
color: #000000;
font-weight: bold;
}
.score {
color: #ffffff;
font-weight: bold;
}
.rainbow {
background: linear-gradient(to right, red, yellow, lime, aqua, blue, fuchsia) 0 / 5em;
background-clip: text;
-webkit-background-clip: text;
-webkit-text-fill-color: transparent;
font-weight: bold;
}
.platinum {
color: #FFFF00;
font-weight: bold;
}
.gold {
color: #FFFF00;
font-weight: bold;
}
.scrolling-text {
overflow: hidden;
}
.scrolling-text p {
white-space: nowrap;
display: inline-block;
}
.scrolling-text h6 {
white-space: nowrap;
display: inline-block;
}
.scrolling-text h5 {
white-space: nowrap;
display: inline-block;
}
.scrolling {
animation: scroll 10s linear infinite;
}
@keyframes scroll {
0% {
transform: translateX(100%);
}
100% {
transform: translateX(-100%);
}
}

View File

@ -0,0 +1,17 @@
<div class="diva-header">
<h1>diva</h1>
<ul class="diva-navi">
<li><a class="nav-link" href="/game/diva/">PROFILE</a></li>
<li><a class="nav-link" href="/game/diva/playlog/">RECORD</a></li>
</ul>
</div>
<script>
$(document).ready(function () {
var currentPath = window.location.pathname;
if (currentPath === '/game/diva/') {
$('.nav-link[href="/game/diva/"]').addClass('active');
} else if (currentPath.startsWith('/game/diva/playlog/')) {
$('.nav-link[href="/game/diva/playlog/"]').addClass('active');
}
});
</script>

View File

@ -0,0 +1,111 @@
{% extends "core/templates/index.jinja" %}
{% block content %}
<style>
{% include 'titles/diva/templates/css/diva_style.css' %}
</style>
<div class="container">
{% include 'titles/diva/templates/diva_header.jinja' %}
{% if profile is defined and profile is not none and profile|length > 0 %}
<div class="row">
<div class="col-lg-8 m-auto mt-3">
<div class="card bg-card rounded">
<table class="table-large table-rowdistinct">
<caption align="top" class="text-center">OVERVIEW</caption>
<tr>
<th>Player name:</th>
<th>{{ profile[3] }}</th>
<th>
<button type="button" class="btn btn-primary btn-sm" data-bs-toggle="modal"
data-bs-target="#name_change">Edit</button>
</th>
<th>Level string:</th>
<th>{{ profile[4] }}</th>
<th>
<button type="button" class="btn btn-primary btn-sm" data-bs-toggle="modal"
data-bs-target="#lv_change">Edit</button>
</th>
</tr>
<tr>
<td>Lvl:</td>
<td>{{ profile[5] }}</td>
<td></td>
<td></td>
<td></td>
<td></td>
</tr>
<tr>
<td>Lvl points:</td>
<td>{{ profile[6] }}</td>
<td></td>
<td></td>
<td></td>
<td></td>
</tr>
<tr>
<td>Vocaloid points:</td>
<td>{{ profile[7] }}</td>
<td></td>
<td></td>
<td></td>
<td></td>
</tr>
</table>
</div>
</div>
</div>
{% if error is defined %}
{% include "core/templates/widgets/err_banner.jinja" %}
{% endif %}
{% elif sesh is defined and sesh is not none and sesh.user_id > 0 %}
No profile information found for this account.
{% else %}
Login to view profile information.
{% endif %}
</div>
<div class="modal fade" id="name_change" tabindex="-1" aria-labelledby="name_change_label" data-bs-theme="dark"
aria-hidden="true">
<div class="modal-dialog modal-dialog-centered">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title">Name change</h5>
</div>
<div class="modal-body">
<form id="new_name_form" action="/game/diva/update.name" method="post" style="outline: 0;">
<label class="form-label" for="new_name">new name:</label>
<input class="form-control" aria-describedby="newNameHelp" form="new_name_form" id="new_name"
name="new_name" maxlength="14" type="text" required>
<div id="newNameHelp" class="form-text">name must be full-width character string.
</div>
</form>
</div>
<div class="modal-footer">
<input type=submit class="btn btn-primary" type="button" form="new_name_form">
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Close</button>
</div>
</div>
</div>
</div>
<div class="modal fade" id="lv_change" tabindex="-1" aria-labelledby="lv_change_label" data-bs-theme="dark"
aria-hidden="true">
<div class="modal-dialog modal-dialog-centered">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title">Level string change</h5>
</div>
<div class="modal-body">
<form id="new_lv_form" action="/game/diva/update.lv" method="post" style="outline: 0;">
<label class="form-label" for="new_lv">new level string:</label>
<input class="form-control" aria-describedby="newLvHelp" form="new_lv_form" id="new_lv" name="new_lv"
maxlength="14" type="text" required>
<div id="newLvHelp" class="form-text">level string must be full-width character string.
</div>
</form>
<div class="modal-footer">
<input type=submit class="btn btn-primary" type="button" form="new_lv_form">
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Close</button>
</div>
</div>
</div>
</div>
</div>
{% endblock %}

View File

@ -0,0 +1,169 @@
{% extends "core/templates/index.jinja" %}
{% block content %}
<style>
{% include 'titles/diva/templates/css/diva_style.css' %}
</style>
<div class="container">
{% include 'titles/diva/templates/diva_header.jinja' %}
{% if playlog is defined and playlog is not none %}
<div class="row">
<h4 style="text-align: center;">Score counts: {{ playlog_count }}</h4>
{% set difficultyName = ['easy', 'normal', 'hard', 'extreme', 'extra extreme'] %}
{% set clearState = ['MISSxTAKE', 'STANDARD', 'GREAT', 'EXELLENT', 'PERFECT'] %}
{% for record in playlog %}
<div class="col-lg-6 mt-3">
<div class="card bg-card rounded card-hover">
<div class="card bg-card rounded card-hover">
<div class="card-header row">
<div class="col-8 scrolling-text">
<h5 class="card-text">{{ record.title }}</h5>
<br>
<h6 class="card-text">{{ record.vocaloid_arranger }}</h6>
</div>
<div class="col-4">
<h6 class="card-text">{{record.raw.date_scored}}</h6>
</div>
</div>
<div class="card-body row">
<div class="col-3" style="text-align: center;">
<h4 class="card-text">{{ record.raw.score }}</h4>
<h2>{{ record.raw.atn_pnt / 100 }}%</h2>
<h6>{{ difficultyName[record.raw.difficulty] }}</h6>
</div>
<div class="col-6" style="text-align: center;">
<table class="table-small table-rowdistinc">
<tr>
<td>COOL</td>
<td>{{ record.raw.cool }}</td>
</tr>
<tr>
<td>FINE</td>
<td>{{ record.raw.fine }}</td>
</tr>
<tr>
<td>SAFE</td>
<td>{{ record.raw.safe }}</td>
</tr>
<tr>
<td>SAD</td>
<td>{{ record.raw.sad }}</td>
</tr>
<tr>
<td>WORST</td>
<td>{{ record.raw.worst }}</td>
</tr>
</table>
</div>
<div class="col-3" style="text-align: center;">
<h6>{{ record.raw.max_combo }}</h6>
{% if record.raw.clr_kind == -1 %}
<h6>{{ clearState[0] }}</h6>
{% elif record.raw.clr_kind == 2 %}
<h6>{{ clearState[1] }}</h6>
{% elif record.raw.clr_kind == 3 %}
<h6>{{ clearState[2] }}</h6>
{% elif record.raw.clr_kind == 4 %}
<h6>{{ clearState[3] }}</h6>
{% elif record.raw.clr_kind == 5 %}
<h6>{{ clearState[4] }}</h6>
{% endif %}
{% if record.raw.clr_kind == -1 %}
<h6>NOT CLEAR</h6>
{% else %}
<h6>CLEAR</h6>
{% endif %}
</div>
</div>
</div>
</div>
</div>
{% endfor %}
</div>
{% set playlog_pages = playlog_count // 20 + 1 %}
{% elif sesh is defined and sesh is not none and sesh.user_id > 0 %}
No Score information found for this account.
{% else %}
Login to view profile information.
{% endif %}
</div>
<footer class="navbar-fixed-bottom">
<nav aria-label="Score page navication">
<ul class="pagination justify-content-center mt-3">
<li class="page-item"><a id="prev_page" class="page-link" href="#">Previous</a></li>
<li class="page-item"><a id="first_page" class="page-link" href="/game/diva/playlog/">1</a></li>
<li class="page-item"><a id="prev_3_page" class="page-link" href="">...</a></li>
<li class="page-item"><a id="front_page" class="page-link" href="">2</a></li>
<li class="page-item"><a id="cur_page" class="page-link active" href="">3</a></li>
<li class="page-item"><a id="back_page" class="page-link" href="">4</a></li>
<li class="page-item"><a id="next_3_page" class="page-link" href="">...</a></li>
<li class="page-item"><a id="last_page" class="page-link" href="/game/diva/playlog/{{ playlog_pages }}">{{
playlog_pages }}</a></li>
<li class="page-item"><a id="next_page" class="page-link" href="#">Next</a></li>
&nbsp
</ul>
</nav>
</footer>
<script>
$(document).ready(function () {
$('.scrolling-text p, .scrolling-text h1, .scrolling-text h2, .scrolling-text h3, .scrolling-text h4, .scrolling-text h5, .scrolling-text h6').each(function () {
var parentWidth = $(this).parent().width();
var elementWidth = $(this).outerWidth();
var elementWidthWithPadding = $(this).outerWidth(true);
if (elementWidthWithPadding > parentWidth) {
$(this).addClass('scrolling');
}
});
var currentUrl = window.location.pathname;
var currentPage = parseInt(currentUrl.split('/').pop());
var rootUrl = '/game/diva/playlog/';
var scorePages = {{ playlog_pages }};
if (Number.isNaN(currentPage)) {
currentPage = 1;
}
$('#cur_page').text(currentPage);
$('#prev_page').attr('href', rootUrl + (currentPage - 1))
$('#next_page').attr('href', rootUrl + (currentPage + 1))
$('#front_page').attr('href', rootUrl + (currentPage - 1))
$('#front_page').text(currentPage - 1);
$('#back_page').attr('href', rootUrl + (currentPage + 1))
$('#back_page').text(currentPage + 1);
$('#prev_3_page').attr('href', rootUrl + (currentPage - 3))
$('#next_3_page').attr('href', rootUrl + (currentPage + 3))
if ((currentPage - 1) < 3) {
$('#prev_3_page').hide();
if ((currentPage - 1) < 2) {
$('#front_page').hide();
if (currentPage === 1) {
$('#first_page').hide();
$('#prev_page').addClass('disabled');
}
}
}
if ((scorePages - currentPage) < 3) {
$('#next_3_page').hide();
if ((scorePages - currentPage) < 2) {
$('#back_page').hide();
if (currentPage === scorePages) {
$('#last_page').hide();
$('#next_page').addClass('disabled');
}
}
}
$('#go_button').click(function () {
var pageNumber = parseInt($('#page_input').val());
if (!Number.isNaN(pageNumber) && pageNumber <= scorePages && pageNumber >= 0) {
var url = '/game/diva/playlog/' + pageNumber;
window.location.href = url;
} else {
$('#page_input').val('');
$('#page_input').attr('placeholder', 'invalid input!');
}
});
});
</script>
{% endblock content %}

View File

@ -1,5 +1,5 @@
import logging
import core.logger
from core.config import CoreConfig
from titles.idac.config import IDACConfig
from titles.idac.const import IDACConstants
@ -13,4 +13,4 @@ class IDACBase:
self.game = IDACConstants.GAME_CODE
self.version = IDACConstants.VER_IDAC_SEASON_1
self.data = IDACData(core_cfg)
self.logger = logging.getLogger("idac")
self.logger = self.logger = core.logger.create_logger("IDAC", game_cfg.server.loglevel)

View File

@ -4,6 +4,7 @@ import socket
from socketserver import BaseRequestHandler, TCPServer
from typing import Tuple
import core.logger
from core.config import CoreConfig
from titles.idac.config import IDACConfig
from titles.idac.database import IDACData
@ -25,7 +26,7 @@ class IDACEchoTCP(BaseRequestHandler):
) -> None:
self.core_config = cfg
self.game_config = game_cfg
self.logger = logging.getLogger("idac")
self.logger = core.logger.create_logger("IDAC")
self.data = IDACData(cfg)
super().__init__(request, client_address, server)

View File

@ -4,13 +4,11 @@ from starlette.routing import Route
from starlette.requests import Request
from starlette.responses import JSONResponse
import yaml
import logging
import coloredlogs
from os import path
from typing import Dict, List, Tuple
from logging.handlers import TimedRotatingFileHandler
import asyncio
import core.logger
from core.config import CoreConfig
from core.title import BaseServlet, JSONResponseNoASCII
from core.utils import Utils
@ -33,29 +31,7 @@ class IDACServlet(BaseServlet):
IDACBase(core_cfg, self.game_cfg),
IDACSeason2(core_cfg, self.game_cfg)
]
self.logger = logging.getLogger("idac")
log_fmt_str = "[%(asctime)s] IDAC | %(levelname)s | %(message)s"
log_fmt = logging.Formatter(log_fmt_str)
fileHandler = TimedRotatingFileHandler(
"{0}/{1}.log".format(self.core_cfg.server.log_dir, "idac"),
encoding="utf8",
when="d",
backupCount=10,
)
fileHandler.setFormatter(log_fmt)
consoleHandler = logging.StreamHandler()
consoleHandler.setFormatter(log_fmt)
self.logger.addHandler(fileHandler)
self.logger.addHandler(consoleHandler)
self.logger.setLevel(self.game_cfg.server.loglevel)
coloredlogs.install(
level=self.game_cfg.server.loglevel, logger=self.logger, fmt=log_fmt_str
)
self.logger = core.logger.create_logger("IDAC", self.game_cfg.server.loglevel)
@classmethod
def is_game_enabled(cls, game_code: str, core_cfg: CoreConfig, cfg_dir: str) -> bool:

View File

@ -1,7 +1,6 @@
import json
import logging
import os
from typing import Any, Dict, List, Optional
from typing import Any, Dict, Optional
from read import BaseReader
from core.data import Data

View File

@ -3,11 +3,10 @@ from sqlalchemy import (
Table,
Column,
UniqueConstraint,
PrimaryKeyConstraint,
and_,
update,
)
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON
from sqlalchemy.types import Integer, String, TIMESTAMP, JSON
from sqlalchemy.schema import ForeignKey
from sqlalchemy.engine import Row
from sqlalchemy.sql import func, select

View File

@ -1,6 +1,6 @@
from typing import Dict, List, Optional
from sqlalchemy import Table, Column, UniqueConstraint, PrimaryKeyConstraint, and_
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON, BigInteger
from typing import Dict, Optional
from sqlalchemy import Table, Column, UniqueConstraint, and_
from sqlalchemy.types import Integer, String, TIMESTAMP
from sqlalchemy.engine.base import Connection
from sqlalchemy.schema import ForeignKey
from sqlalchemy.sql import func, select

Some files were not shown because too many files have changed in this diff Show More