forked from Dniel97/artemis
Compare commits
339 Commits
master
...
mai2_fixes
Author | SHA1 | Date |
---|---|---|
Dniel97 | f39317301b | |
Dniel97 | 389784ce82 | |
Hay1tsme | 2f13596885 | |
Hay1tsme | 85b73e634d | |
Midorica | 09c4f8cda4 | |
Dniel97 | 36d338e618 | |
Hay1tsme | 03cf535ff6 | |
Hay1tsme | 6c155a5e48 | |
Midorica | 737312ca3d | |
Hay1tsme | 1edec7dba2 | |
Hay1tsme | d60f827000 | |
Midorica | da422e602b | |
Midorica | d276ac8598 | |
Midorica | 84e880e94f | |
Hay1tsme | 432177957a | |
Hay1tsme | a89247cdd6 | |
Hay1tsme | f279adb894 | |
Hay1tsme | a680699939 | |
Hay1tsme | d204954447 | |
Hay1tsme | 042440c76e | |
Hay1tsme | c4c0566cd5 | |
Hay1tsme | 3e9cec3a20 | |
Hay1tsme | 8f9584c3d2 | |
Hay1tsme | b29cb0fbaa | |
Hay1tsme | d9a92f5865 | |
Hay1tsme | 9859ab4fdb | |
Hay1tsme | d89eb61e62 | |
Hay1tsme | dc8c27046e | |
Hay1tsme | 3e461f4d71 | |
Hay1tsme | 2c6902a546 | |
Hay1tsme | 318b73dd57 | |
Hay1tsme | 9d33091bb8 | |
Hay1tsme | 8b43d554fc | |
Hay1tsme | 610ef70bad | |
Hay1tsme | 60b3bc7750 | |
Hay1tsme | 4ea83f6025 | |
Midorica | 20389011e9 | |
Midorica | e446816b9a | |
Midorica | 9dd2b4d524 | |
Midorica | b60cf6258d | |
Hay1tsme | 127e6f8aa8 | |
Midorica | 5155353360 | |
Hay1tsme | e3d38dacde | |
Hay1tsme | 0c6d9a36ce | |
Hay1tsme | b1968fe320 | |
Midorica | 03f91d18c9 | |
Midorica | 17508f09b2 | |
Midorica | aae4afe7b8 | |
Hay1tsme | 514f786e2d | |
Midorica | ec9ad1ebb0 | |
Midorica | 08ebb5c907 | |
Midorica | 571b92d0cd | |
Midorica | 01b5282899 | |
Midorica | 391edd3354 | |
Midorica | d5bff0e891 | |
Hay1tsme | 402e753469 | |
Hay1tsme | 154ccbdae5 | |
Hay1tsme | 858b101a36 | |
Midorica | 1d10e798a5 | |
Hay1tsme | 3c385f505b | |
Hay1tsme | b12938bcd8 | |
Hay1tsme | 1b2f5e3709 | |
Hay1tsme | 65686fb615 | |
Hay1tsme | f56332141e | |
Hay1tsme | 5a35b1c823 | |
Hay1tsme | 5ca16f2067 | |
Midorica | a0b25e2b7b | |
Midorica | 84fc002cdb | |
Midorica | 3bd03c592e | |
Midorica | cf6cfdbd3b | |
Hay1tsme | db77e61b79 | |
Hay1tsme | ac9e71ee2f | |
Hay1tsme | 20865dc495 | |
Hay1tsme | 37d24b3b4d | |
Hay1tsme | 2418abacce | |
Hay1tsme | 5c3f812caf | |
Midorica | 4854bcfcad | |
Midorica | bf6d126f8a | |
Midorica | e466ddce55 | |
Dniel97 | ad820ed091 | |
Dniel97 | 960a0e3fd9 | |
Midorica | a2fe11d654 | |
Midorica | 2b4ac06389 | |
Midorica | d8af7be4a4 | |
Midorica | 84cb786bde | |
Hay1tsme | 05dee87a9a | |
Midorica | 049dc40a8b | |
Midorica | cab1d6814a | |
Midorica | 72594fef31 | |
Hay1tsme | 7ed294e9f7 | |
Hay1tsme | b9fd4f294d | |
Hay1tsme | 5ddfb88182 | |
Hay1tsme | 4da8622977 | |
Hay1tsme | 97892d6a7d | |
Hay1tsme | 02078080a8 | |
Hay1tsme | 61e3a2c930 | |
Hay1tsme | 8ae0aba89c | |
Hay1tsme | 49166c1a7b | |
Midorica | 013e83420b | |
Dniel97 | 0dce7e7849 | |
Raymonf | f959236af0 | |
Dniel97 | b85a65204f | |
Dniel97 | 0ab539173a | |
Hay1tsme | 42ed222095 | |
Hay1tsme | d172e5582b | |
Hay1tsme | 9766e3ab78 | |
Hay1tsme | b34b441ba8 | |
Hay1tsme | 8149f09a40 | |
Hay1tsme | cad523dfce | |
Hay1tsme | 8b9771b5af | |
Hay1tsme | 989c080657 | |
Hay1tsme | dcff8adbab | |
Hay1tsme | e3b1addce6 | |
Hay1tsme | b6f43d887a | |
Hay1tsme | efd8f86e48 | |
Hay1tsme | d0242b456d | |
Hay1tsme | 7bb8c2c80c | |
Hay1tsme | 6d1855a6bc | |
Hay1tsme | 8d94d25893 | |
Hay1tsme | ae6dcb68df | |
Hay1tsme | 3b6fc6618c | |
Hay1tsme | deeac1d8db | |
Midorica | 6ad5194bb8 | |
Dniel97 | a0793aa13a | |
Dniel97 | 7364181de1 | |
Hay1tsme | 9d8762d3da | |
Hay1tsme | 238d437519 | |
Hay1tsme | 9d23d59e43 | |
Hay1tsme | f4ee4238d9 | |
Hay1tsme | b498e82bf8 | |
Hay1tsme | 0668488ccf | |
Hay1tsme | 47f4aaddf8 | |
Hay1tsme | 26c4bcb466 | |
Hay1tsme | d8c3ed5c01 | |
Hay1tsme | 58a088b9a4 | |
Hay1tsme | 190c41e03e | |
Hay1tsme | a30967e8d7 | |
Hay1tsme | 00b127361b | |
Hay1tsme | 241f29e29c | |
Hay1tsme | 4d6afd757f | |
Hay1tsme | 68b0894e47 | |
Midorica | 017ef1e224 | |
Dniel97 | 958471b8eb | |
Hay1tsme | 15433b681c | |
Hay1tsme | b0042bc776 | |
Hay1tsme | 469ead7a84 | |
Hay1tsme | 0dc96f33e1 | |
Hay1tsme | 4102ba21fc | |
Hay1tsme | 83d2151b6b | |
Hay1tsme | 9895068125 | |
Hay1tsme | 4419310086 | |
Hay1tsme | a416fb09e1 | |
Hay1tsme | baa885f674 | |
Hay1tsme | 0d5567c990 | |
Hay1tsme | b1f9be0121 | |
Hay1tsme | dc3e3e1fb3 | |
Dniel97 | 97e3f1af01 | |
Hay1tsme | 71c43a4a57 | |
Hay1tsme | bd356af272 | |
Dniel97 | 28c06335b6 | |
Hay1tsme | 68e25b9c5e | |
Dniel97 | f63dd07937 | |
Dniel97 | 7fdb3e8222 | |
Hay1tsme | bf6c7d39f5 | |
Hay1tsme | 5ec280ab8c | |
Hay1tsme | de5f61b0de | |
Hay1tsme | 0f642629a2 | |
Midorica | 979bd7d718 | |
Dniel97 | a60d52b742 | |
Dniel97 | 571a691e0e | |
Dniel97 | 1aa92458f4 | |
Dniel97 | 541fe76a7c | |
Hay1tsme | 6489e3ca21 | |
Dniel97 | 2a290f2a3d | |
Dniel97 | b21ddb92ce | |
Hay1tsme | ac8a660e13 | |
Hay1tsme | 12fd663eb7 | |
Hay1tsme | dfd3877889 | |
Hay1tsme | 62b62db5b5 | |
Hay1tsme | 188be2dfc1 | |
Hay1tsme | 6ff8c4d931 | |
Hay1tsme | 401623f20b | |
Hay1tsme | 5a388e2a24 | |
Hay1tsme | 6965132e5b | |
Hay1tsme | 7ca4e6adb9 | |
Hay1tsme | 4bd1dea6bf | |
Hay1tsme | 8c5c7f31b6 | |
Hay1tsme | a7db5eed77 | |
Hay1tsme | a6e9e80bc7 | |
Hay1tsme | 71eec6e34d | |
Hay1tsme | 8b718c601f | |
Dniel97 | 2af7751504 | |
Hay1tsme | a791142f95 | |
Hay1tsme | 346e898983 | |
Hay1tsme | fddf2e448a | |
Hay1tsme | 65e9ecd58c | |
Hay1tsme | 6fa0175baa | |
Hay1tsme | a97509bb43 | |
Hay1tsme | 18a95f5213 | |
Hay1tsme | ea14f105d5 | |
Hay1tsme | e4b7809e34 | |
Hay1tsme | eb51fc315c | |
Hay1tsme | a9f49e8d5d | |
Hay1tsme | edb9ec1971 | |
Dniel97 | 3a234244d4 | |
Hay1tsme | 2dd84bbe3e | |
Hay1tsme | f283dd10a9 | |
Midorica | 57ecff641a | |
Midorica | a088dd82de | |
Midorica | 9295299dca | |
Raymonf | b076a9a9df | |
Hay1tsme | 2033bc897f | |
Hay1tsme | e9ffd95435 | |
Hay1tsme | dafc030050 | |
Hay1tsme | a76bb94eb1 | |
Hay1tsme | fa7206848c | |
Hay1tsme | 6761915a3f | |
Hay1tsme | c8d4bc6109 | |
Hay1tsme | 6dcd7b67ef | |
Hay1tsme | 2f1728b64d | |
Midorica | fb6a026b84 | |
Midorica | 4c64305f15 | |
Dniel97 | 44c75d0156 | |
Dniel97 | 78b2a81c79 | |
Dniel97 | 6609732546 | |
Dniel97 | 74f3ab7c3f | |
Hay1tsme | 36054ebb66 | |
Hay1tsme | 82b159e5b1 | |
Hay1tsme | 59b2401a67 | |
Hay1tsme | f25152a6bf | |
Hay1tsme | ff6ef16b89 | |
Hay1tsme | a7a830c6b7 | |
Hay1tsme | b12f61198f | |
Hay1tsme | bfe5294d51 | |
Hay1tsme | b2b28850dd | |
Hay1tsme | e0fdd937e6 | |
Hay1tsme | b8fd0baee5 | |
Hay1tsme | a340bcf1dd | |
Dniel97 | fe8f40c627 | |
Hay1tsme | f5d4f519d3 | |
Hay1tsme | 3181e1f4f8 | |
Hay1tsme | 279f48dc0c | |
Hay1tsme | dc5e5c1440 | |
Hay1tsme | 4f3d3d8395 | |
Dniel97 | 3acc2dc197 | |
Dniel97 | 8fe0acae93 | |
Hay1tsme | 02e1838d95 | |
Hay1tsme | 102bf3b5a4 | |
Hay1tsme | 2a6842db24 | |
Hay1tsme | c26f6b7b1d | |
Hay1tsme | 9ad724d64b | |
Hay1tsme | f24d554a44 | |
Hay1tsme | 34e2c50fb5 | |
Hay1tsme | b35e7d6983 | |
Hay1tsme | f6cfb9e36d | |
Hay1tsme | 101b966e3a | |
Hay1tsme | fae6b77403 | |
Midorica | 4c64554383 | |
Hay1tsme | 45fedd8425 | |
Hay1tsme | 2da12e515e | |
Hay1tsme | cd78ecd7ea | |
Hay1tsme | 7953519e68 | |
Hay1tsme | 524f99879f | |
Hay1tsme | 4626ec36cd | |
Midorica | 3791b2b238 | |
Midorica | 937dba20ca | |
Hay1tsme | c5fc879af6 | |
Hay1tsme | e205777693 | |
Hay1tsme | 4d9ae19cb2 | |
Hay1tsme | 99881ea220 | |
Hay1tsme | d5a7247a7f | |
Hay1tsme | 846a556c5b | |
Hay1tsme | 7071ab0bd9 | |
Hay1tsme | 5965362a0f | |
Hay1tsme | 6b0838062e | |
Hay1tsme | 1567ec23ab | |
Hay1tsme | a0739436cc | |
Hay1tsme | e961c1dfb3 | |
Hay1tsme | e46c8e7dbd | |
Raymonf | 379388c749 | |
Hay1tsme | 078059f54e | |
Hay1tsme | 382e36e60f | |
Hay1tsme | 88f6eba30b | |
Midorica | 447743da4c | |
Raymonf | b0bf151c9f | |
Raymonf | 97aeba20e5 | |
Dniel97 | a65055fc8c | |
Midorica | e98a7c8ae0 | |
Dniel97 | 842e3a313e | |
Dniel97 | b81767af8a | |
Dniel97 | 435a098fe0 | |
Hay1tsme | 0e3265a162 | |
Hay1tsme | 0284885926 | |
Hay1tsme | 066f92d94b | |
Hay1tsme | abe1fa7853 | |
Hay1tsme | 0da3053454 | |
Dniel97 | d3862b7483 | |
Hay1tsme | 1f2d12f318 | |
Dniel97 | b30e9570e7 | |
Hay1tsme | 806dd717e6 | |
Hay1tsme | b31e739ecd | |
Hay1tsme | cb227f9cf4 | |
Hay1tsme | bd1665a849 | |
Hay1tsme | c213926893 | |
Hay1tsme | 2bd980165e | |
Hay1tsme | e7d73dd257 | |
Hay1tsme | 6b265ea866 | |
Midorica | b105418431 | |
Midorica | e8e6414b66 | |
Hay1tsme | 7df998a51a | |
Hay1tsme | c3aac4c38e | |
Midorica | 026fcc5182 | |
Midorica | b300bb302b | |
God601 | fff7eb4666 | |
Hay1tsme | 9c62ea24be | |
Hay1tsme | 3f40e083ce | |
Hay1tsme | b343228072 | |
Dniel97 | 0b76c61059 | |
Dniel97 | 18a1923f6a | |
Dniel97 | a7821fade8 | |
Hay1tsme | db6b950c29 | |
Hay1tsme | 97d16365df | |
Dniel97 | 285bb966a5 | |
Dniel97 | d1535d7be1 | |
Dniel97 | 8bdc2071da | |
Hay1tsme | df4efa1fda | |
Hay1tsme | a57d2cf71c | |
Hay1tsme | d434bf084d | |
Hay1tsme | f5d9bd8003 | |
Hay1tsme | ed479866cc | |
Hay1tsme | a3c689cd09 | |
Hay1tsme | a843e3d3ac | |
Hay1tsme | f42c2d7785 | |
Hay1tsme | b09d2326c2 | |
Hay1tsme | 68b9c64f71 | |
Hay1tsme | 655d9dc530 | |
Hay1tsme | 9a43303880 | |
Hay1tsme | edddb2e9d4 | |
Dniel97 | c99bfda015 |
|
@ -158,5 +158,6 @@ cert/*
|
|||
!cert/server.pem
|
||||
config/*
|
||||
deliver/*
|
||||
*.gz
|
||||
|
||||
dbdump-*.json
|
|
@ -0,0 +1,21 @@
|
|||
FROM python:3.9.15-slim-bullseye
|
||||
|
||||
RUN apt update && apt install default-libmysqlclient-dev build-essential libtk nodejs npm -y
|
||||
|
||||
WORKDIR /app
|
||||
COPY requirements.txt requirements.txt
|
||||
RUN pip3 install -r requirements.txt
|
||||
RUN npm i -g nodemon
|
||||
|
||||
COPY entrypoint.sh entrypoint.sh
|
||||
RUN chmod +x entrypoint.sh
|
||||
|
||||
COPY index.py index.py
|
||||
COPY dbutils.py dbutils.py
|
||||
ADD core core
|
||||
ADD titles titles
|
||||
ADD config config
|
||||
ADD log log
|
||||
ADD cert cert
|
||||
|
||||
ENTRYPOINT [ "/app/entrypoint.sh" ]
|
|
@ -0,0 +1,49 @@
|
|||
# Changelog
|
||||
Documenting updates to ARTEMiS, to be updated every time the master branch is pushed to.
|
||||
|
||||
## 2023042300
|
||||
### Wacca
|
||||
+ Time free now works properly
|
||||
+ Fix reverse gate mission causing a fatal error
|
||||
+ Other misc. fixes
|
||||
+ Latest DB: 5
|
||||
|
||||
### Pokken
|
||||
+ Added preliminary support
|
||||
+ Nothing saves currently, but the game will boot and function properly.
|
||||
|
||||
### Initial D Zero
|
||||
+ Added preliminary support
|
||||
+ Nothing saves currently, but the game will boot and function for the most part.
|
||||
|
||||
### Mai2
|
||||
+ Added support for Festival
|
||||
+ Lasted DB Version: 4
|
||||
|
||||
### Ongeki
|
||||
+ Misc fixes
|
||||
+ Lasted DB Version: 4
|
||||
|
||||
### Diva
|
||||
+ Misc fixes
|
||||
+ Lasted DB Version: 4
|
||||
|
||||
### Chuni
|
||||
+ Fix network encryption
|
||||
+ Add `handle_remove_token_api_request` for event mode
|
||||
|
||||
### Allnet
|
||||
+ Added download order support
|
||||
+ It is up to the sysop to provide the INI file, and host the files.
|
||||
+ ONLY for use with cabs. It's not checked currently, which it's why it's default disabled
|
||||
+ YMMV, use at your own risk
|
||||
+ When running develop mode, games that are not recognised will still be able to authenticate.
|
||||
|
||||
### Database
|
||||
+ Add autoupgrade command
|
||||
+ Invoke to automatically upgrade all schemas to their latest versions
|
||||
|
||||
+ `version` arg no longer required, leave it blank to update the game schema to latest if it isn't already
|
||||
|
||||
### Misc
|
||||
+ Update example nginx config file
|
|
@ -0,0 +1,8 @@
|
|||
# Contributing to ARTEMiS
|
||||
If you would like to contribute to artemis, either by adding features, games, or fixing bugs, you can do so by forking the repo and submitting a pull request [here](https://gitea.tendokyu.moe/Hay1tsme/artemis/pulls). Please make sure, if you're submitting a PR for a game or game version, that you're following the n-0/y-1 guidelines, or it will be rejected.
|
||||
|
||||
## Adding games
|
||||
Guide WIP
|
||||
|
||||
## Adding game versions
|
||||
Guide WIP
|
|
@ -4,3 +4,4 @@ from core.aimedb import AimedbFactory
|
|||
from core.title import TitleServlet
|
||||
from core.utils import Utils
|
||||
from core.mucha import MuchaServlet
|
||||
from core.frontend import FrontendServlet
|
||||
|
|
169
core/aimedb.py
169
core/aimedb.py
|
@ -8,17 +8,18 @@ from logging.handlers import TimedRotatingFileHandler
|
|||
from core.config import CoreConfig
|
||||
from core.data import Data
|
||||
|
||||
|
||||
class AimedbProtocol(Protocol):
|
||||
AIMEDB_RESPONSE_CODES = {
|
||||
"felica_lookup": 0x03,
|
||||
"lookup": 0x06,
|
||||
"log": 0x0a,
|
||||
"campaign": 0x0c,
|
||||
"touch": 0x0e,
|
||||
"log": 0x0A,
|
||||
"campaign": 0x0C,
|
||||
"touch": 0x0E,
|
||||
"lookup2": 0x10,
|
||||
"felica_lookup2": 0x12,
|
||||
"log2": 0x14,
|
||||
"hello": 0x65
|
||||
"hello": 0x65,
|
||||
}
|
||||
|
||||
request_list: Dict[int, Any] = {}
|
||||
|
@ -30,14 +31,14 @@ class AimedbProtocol(Protocol):
|
|||
if core_cfg.aimedb.key == "":
|
||||
self.logger.error("!!!KEY NOT SET!!!")
|
||||
exit(1)
|
||||
|
||||
|
||||
self.request_list[0x01] = self.handle_felica_lookup
|
||||
self.request_list[0x04] = self.handle_lookup
|
||||
self.request_list[0x05] = self.handle_register
|
||||
self.request_list[0x09] = self.handle_log
|
||||
self.request_list[0x0b] = self.handle_campaign
|
||||
self.request_list[0x0d] = self.handle_touch
|
||||
self.request_list[0x0f] = self.handle_lookup2
|
||||
self.request_list[0x09] = self.handle_log
|
||||
self.request_list[0x0B] = self.handle_campaign
|
||||
self.request_list[0x0D] = self.handle_touch
|
||||
self.request_list[0x0F] = self.handle_lookup2
|
||||
self.request_list[0x11] = self.handle_felica_lookup2
|
||||
self.request_list[0x13] = self.handle_log2
|
||||
self.request_list[0x64] = self.handle_hello
|
||||
|
@ -53,8 +54,10 @@ class AimedbProtocol(Protocol):
|
|||
self.logger.debug(f"{self.transport.getPeer().host} Connected")
|
||||
|
||||
def connectionLost(self, reason) -> None:
|
||||
self.logger.debug(f"{self.transport.getPeer().host} Disconnected - {reason.value}")
|
||||
|
||||
self.logger.debug(
|
||||
f"{self.transport.getPeer().host} Disconnected - {reason.value}"
|
||||
)
|
||||
|
||||
def dataReceived(self, data: bytes) -> None:
|
||||
cipher = AES.new(self.config.aimedb.key.encode(), AES.MODE_ECB)
|
||||
|
||||
|
@ -66,7 +69,7 @@ class AimedbProtocol(Protocol):
|
|||
|
||||
self.logger.debug(f"{self.transport.getPeer().host} wrote {decrypted.hex()}")
|
||||
|
||||
if not decrypted[1] == 0xa1 and not decrypted[0] == 0x3e:
|
||||
if not decrypted[1] == 0xA1 and not decrypted[0] == 0x3E:
|
||||
self.logger.error(f"Bad magic")
|
||||
return None
|
||||
|
||||
|
@ -90,30 +93,46 @@ class AimedbProtocol(Protocol):
|
|||
except ValueError as e:
|
||||
self.logger.error(f"Failed to encrypt {resp.hex()} because {e}")
|
||||
return None
|
||||
|
||||
|
||||
def handle_campaign(self, data: bytes) -> bytes:
|
||||
self.logger.info(f"campaign from {self.transport.getPeer().host}")
|
||||
ret = struct.pack("<5H", 0xa13e, 0x3087, self.AIMEDB_RESPONSE_CODES["campaign"], 0x0200, 0x0001)
|
||||
ret = struct.pack(
|
||||
"<5H",
|
||||
0xA13E,
|
||||
0x3087,
|
||||
self.AIMEDB_RESPONSE_CODES["campaign"],
|
||||
0x0200,
|
||||
0x0001,
|
||||
)
|
||||
return self.append_padding(ret)
|
||||
|
||||
|
||||
def handle_hello(self, data: bytes) -> bytes:
|
||||
self.logger.info(f"hello from {self.transport.getPeer().host}")
|
||||
ret = struct.pack("<5H", 0xa13e, 0x3087, self.AIMEDB_RESPONSE_CODES["hello"], 0x0020, 0x0001)
|
||||
ret = struct.pack(
|
||||
"<5H", 0xA13E, 0x3087, self.AIMEDB_RESPONSE_CODES["hello"], 0x0020, 0x0001
|
||||
)
|
||||
return self.append_padding(ret)
|
||||
|
||||
def handle_lookup(self, data: bytes) -> bytes:
|
||||
luid = data[0x20: 0x2a].hex()
|
||||
luid = data[0x20:0x2A].hex()
|
||||
user_id = self.data.card.get_user_id_from_card(access_code=luid)
|
||||
|
||||
if user_id is None: user_id = -1
|
||||
if user_id is None:
|
||||
user_id = -1
|
||||
|
||||
self.logger.info(f"lookup from {self.transport.getPeer().host}: luid {luid} -> user_id {user_id}")
|
||||
self.logger.info(
|
||||
f"lookup from {self.transport.getPeer().host}: luid {luid} -> user_id {user_id}"
|
||||
)
|
||||
|
||||
ret = struct.pack("<5H", 0xa13e, 0x3087, self.AIMEDB_RESPONSE_CODES["lookup"], 0x0130, 0x0001)
|
||||
ret = struct.pack(
|
||||
"<5H", 0xA13E, 0x3087, self.AIMEDB_RESPONSE_CODES["lookup"], 0x0130, 0x0001
|
||||
)
|
||||
ret += bytes(0x20 - len(ret))
|
||||
|
||||
if user_id is None: ret += struct.pack("<iH", -1, 0)
|
||||
else: ret += struct.pack("<l", user_id)
|
||||
if user_id is None:
|
||||
ret += struct.pack("<iH", -1, 0)
|
||||
else:
|
||||
ret += struct.pack("<l", user_id)
|
||||
return self.append_padding(ret)
|
||||
|
||||
def handle_lookup2(self, data: bytes) -> bytes:
|
||||
|
@ -125,66 +144,98 @@ class AimedbProtocol(Protocol):
|
|||
return bytes(ret)
|
||||
|
||||
def handle_felica_lookup(self, data: bytes) -> bytes:
|
||||
idm = data[0x20: 0x28].hex()
|
||||
pmm = data[0x28: 0x30].hex()
|
||||
idm = data[0x20:0x28].hex()
|
||||
pmm = data[0x28:0x30].hex()
|
||||
access_code = self.data.card.to_access_code(idm)
|
||||
self.logger.info(f"felica_lookup from {self.transport.getPeer().host}: idm {idm} pmm {pmm} -> access_code {access_code}")
|
||||
self.logger.info(
|
||||
f"felica_lookup from {self.transport.getPeer().host}: idm {idm} pmm {pmm} -> access_code {access_code}"
|
||||
)
|
||||
|
||||
ret = struct.pack("<5H", 0xa13e, 0x3087, self.AIMEDB_RESPONSE_CODES["felica_lookup"], 0x0030, 0x0001)
|
||||
ret = struct.pack(
|
||||
"<5H",
|
||||
0xA13E,
|
||||
0x3087,
|
||||
self.AIMEDB_RESPONSE_CODES["felica_lookup"],
|
||||
0x0030,
|
||||
0x0001,
|
||||
)
|
||||
ret += bytes(26)
|
||||
ret += bytes.fromhex(access_code)
|
||||
|
||||
return self.append_padding(ret)
|
||||
|
||||
def handle_felica_lookup2(self, data: bytes) -> bytes:
|
||||
idm = data[0x30: 0x38].hex()
|
||||
pmm = data[0x38: 0x40].hex()
|
||||
idm = data[0x30:0x38].hex()
|
||||
pmm = data[0x38:0x40].hex()
|
||||
access_code = self.data.card.to_access_code(idm)
|
||||
user_id = self.data.card.get_user_id_from_card(access_code=access_code)
|
||||
|
||||
if user_id is None: user_id = -1
|
||||
if user_id is None:
|
||||
user_id = -1
|
||||
|
||||
self.logger.info(f"felica_lookup2 from {self.transport.getPeer().host}: idm {idm} ipm {pmm} -> access_code {access_code} user_id {user_id}")
|
||||
self.logger.info(
|
||||
f"felica_lookup2 from {self.transport.getPeer().host}: idm {idm} ipm {pmm} -> access_code {access_code} user_id {user_id}"
|
||||
)
|
||||
|
||||
ret = struct.pack("<5H", 0xa13e, 0x3087, self.AIMEDB_RESPONSE_CODES["felica_lookup2"], 0x0140, 0x0001)
|
||||
ret = struct.pack(
|
||||
"<5H",
|
||||
0xA13E,
|
||||
0x3087,
|
||||
self.AIMEDB_RESPONSE_CODES["felica_lookup2"],
|
||||
0x0140,
|
||||
0x0001,
|
||||
)
|
||||
ret += bytes(22)
|
||||
ret += struct.pack("<lq", user_id, -1) # first -1 is ext_id, 3rd is access code
|
||||
ret += struct.pack("<lq", user_id, -1) # first -1 is ext_id, 3rd is access code
|
||||
ret += bytes.fromhex(access_code)
|
||||
ret += struct.pack("<l", 1)
|
||||
|
||||
|
||||
return self.append_padding(ret)
|
||||
|
||||
|
||||
def handle_touch(self, data: bytes) -> bytes:
|
||||
self.logger.info(f"touch from {self.transport.getPeer().host}")
|
||||
ret = struct.pack("<5H", 0xa13e, 0x3087, self.AIMEDB_RESPONSE_CODES["touch"], 0x0050, 0x0001)
|
||||
ret = struct.pack(
|
||||
"<5H", 0xA13E, 0x3087, self.AIMEDB_RESPONSE_CODES["touch"], 0x0050, 0x0001
|
||||
)
|
||||
ret += bytes(5)
|
||||
ret += struct.pack("<3H", 0x6f, 0, 1)
|
||||
ret += struct.pack("<3H", 0x6F, 0, 1)
|
||||
|
||||
return self.append_padding(ret)
|
||||
|
||||
def handle_register(self, data: bytes) -> bytes:
|
||||
luid = data[0x20: 0x2a].hex()
|
||||
if self.config.server.allow_registration:
|
||||
def handle_register(self, data: bytes) -> bytes:
|
||||
luid = data[0x20:0x2A].hex()
|
||||
if self.config.server.allow_user_registration:
|
||||
user_id = self.data.user.create_user()
|
||||
|
||||
if user_id is None:
|
||||
if user_id is None:
|
||||
user_id = -1
|
||||
self.logger.error("Failed to register user!")
|
||||
|
||||
else:
|
||||
card_id = self.data.card.create_card(user_id, luid)
|
||||
|
||||
if card_id is None:
|
||||
if card_id is None:
|
||||
user_id = -1
|
||||
self.logger.error("Failed to register card!")
|
||||
|
||||
self.logger.info(f"register from {self.transport.getPeer().host}: luid {luid} -> user_id {user_id}")
|
||||
|
||||
self.logger.info(
|
||||
f"register from {self.transport.getPeer().host}: luid {luid} -> user_id {user_id}"
|
||||
)
|
||||
|
||||
else:
|
||||
self.logger.info(f"register from {self.transport.getPeer().host} blocked!: luid {luid}")
|
||||
self.logger.info(
|
||||
f"register from {self.transport.getPeer().host} blocked!: luid {luid}"
|
||||
)
|
||||
user_id = -1
|
||||
|
||||
ret = struct.pack("<5H", 0xa13e, 0x3087, self.AIMEDB_RESPONSE_CODES["lookup"], 0x0030, 0x0001 if user_id > -1 else 0)
|
||||
ret = struct.pack(
|
||||
"<5H",
|
||||
0xA13E,
|
||||
0x3087,
|
||||
self.AIMEDB_RESPONSE_CODES["lookup"],
|
||||
0x0030,
|
||||
0x0001 if user_id > -1 else 0,
|
||||
)
|
||||
ret += bytes(0x20 - len(ret))
|
||||
ret += struct.pack("<l", user_id)
|
||||
|
||||
|
@ -193,42 +244,54 @@ class AimedbProtocol(Protocol):
|
|||
def handle_log(self, data: bytes) -> bytes:
|
||||
# TODO: Save aimedb logs
|
||||
self.logger.info(f"log from {self.transport.getPeer().host}")
|
||||
ret = struct.pack("<5H", 0xa13e, 0x3087, self.AIMEDB_RESPONSE_CODES["log"], 0x0020, 0x0001)
|
||||
ret = struct.pack(
|
||||
"<5H", 0xA13E, 0x3087, self.AIMEDB_RESPONSE_CODES["log"], 0x0020, 0x0001
|
||||
)
|
||||
return self.append_padding(ret)
|
||||
|
||||
def handle_log2(self, data: bytes) -> bytes:
|
||||
self.logger.info(f"log2 from {self.transport.getPeer().host}")
|
||||
ret = struct.pack("<5H", 0xa13e, 0x3087, self.AIMEDB_RESPONSE_CODES["log2"], 0x0040, 0x0001)
|
||||
ret = struct.pack(
|
||||
"<5H", 0xA13E, 0x3087, self.AIMEDB_RESPONSE_CODES["log2"], 0x0040, 0x0001
|
||||
)
|
||||
ret += bytes(22)
|
||||
ret += struct.pack("H", 1)
|
||||
|
||||
return self.append_padding(ret)
|
||||
|
||||
|
||||
class AimedbFactory(Factory):
|
||||
protocol = AimedbProtocol
|
||||
|
||||
def __init__(self, cfg: CoreConfig) -> None:
|
||||
self.config = cfg
|
||||
log_fmt_str = "[%(asctime)s] Aimedb | %(levelname)s | %(message)s"
|
||||
log_fmt = logging.Formatter(log_fmt_str)
|
||||
self.logger = logging.getLogger("aimedb")
|
||||
|
||||
fileHandler = TimedRotatingFileHandler("{0}/{1}.log".format(self.config.server.log_dir, "aimedb"), when="d", backupCount=10)
|
||||
fileHandler = TimedRotatingFileHandler(
|
||||
"{0}/{1}.log".format(self.config.server.log_dir, "aimedb"),
|
||||
when="d",
|
||||
backupCount=10,
|
||||
)
|
||||
fileHandler.setFormatter(log_fmt)
|
||||
|
||||
|
||||
consoleHandler = logging.StreamHandler()
|
||||
consoleHandler.setFormatter(log_fmt)
|
||||
|
||||
self.logger.addHandler(fileHandler)
|
||||
self.logger.addHandler(consoleHandler)
|
||||
|
||||
|
||||
self.logger.setLevel(self.config.aimedb.loglevel)
|
||||
coloredlogs.install(level=cfg.aimedb.loglevel, logger=self.logger, fmt=log_fmt_str)
|
||||
|
||||
coloredlogs.install(
|
||||
level=cfg.aimedb.loglevel, logger=self.logger, fmt=log_fmt_str
|
||||
)
|
||||
|
||||
if self.config.aimedb.key == "":
|
||||
self.logger.error("Please set 'key' field in your config file.")
|
||||
exit(1)
|
||||
|
||||
self.logger.info(f"Ready on port {self.config.aimedb.port}")
|
||||
|
||||
|
||||
def buildProtocol(self, addr):
|
||||
return AimedbProtocol(self.config)
|
||||
|
|
533
core/allnet.py
533
core/allnet.py
|
@ -1,4 +1,4 @@
|
|||
from typing import Dict, List, Any, Optional, Tuple
|
||||
from typing import Dict, List, Any, Optional, Tuple, Union
|
||||
import logging, coloredlogs
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
from twisted.web.http import Request
|
||||
|
@ -10,13 +10,17 @@ from Crypto.PublicKey import RSA
|
|||
from Crypto.Hash import SHA
|
||||
from Crypto.Signature import PKCS1_v1_5
|
||||
from time import strptime
|
||||
from os import path
|
||||
import urllib.parse
|
||||
|
||||
from core.config import CoreConfig
|
||||
from core.data import Data
|
||||
from core.utils import Utils
|
||||
from core.data import Data
|
||||
from core.const import *
|
||||
|
||||
|
||||
class AllnetServlet:
|
||||
def __init__(self, core_cfg: CoreConfig, cfg_folder: str):
|
||||
def __init__(self, core_cfg: CoreConfig, cfg_folder: str):
|
||||
super().__init__()
|
||||
self.config = core_cfg
|
||||
self.config_folder = cfg_folder
|
||||
|
@ -26,197 +30,308 @@ class AllnetServlet:
|
|||
self.logger = logging.getLogger("allnet")
|
||||
if not hasattr(self.logger, "initialized"):
|
||||
log_fmt_str = "[%(asctime)s] Allnet | %(levelname)s | %(message)s"
|
||||
log_fmt = logging.Formatter(log_fmt_str)
|
||||
log_fmt = logging.Formatter(log_fmt_str)
|
||||
|
||||
fileHandler = TimedRotatingFileHandler("{0}/{1}.log".format(self.config.server.log_dir, "allnet"), when="d", backupCount=10)
|
||||
fileHandler = TimedRotatingFileHandler(
|
||||
"{0}/{1}.log".format(self.config.server.log_dir, "allnet"),
|
||||
when="d",
|
||||
backupCount=10,
|
||||
)
|
||||
fileHandler.setFormatter(log_fmt)
|
||||
|
||||
|
||||
consoleHandler = logging.StreamHandler()
|
||||
consoleHandler.setFormatter(log_fmt)
|
||||
|
||||
self.logger.addHandler(fileHandler)
|
||||
self.logger.addHandler(consoleHandler)
|
||||
|
||||
|
||||
self.logger.setLevel(core_cfg.allnet.loglevel)
|
||||
coloredlogs.install(level=core_cfg.allnet.loglevel, logger=self.logger, fmt=log_fmt_str)
|
||||
coloredlogs.install(
|
||||
level=core_cfg.allnet.loglevel, logger=self.logger, fmt=log_fmt_str
|
||||
)
|
||||
self.logger.initialized = True
|
||||
|
||||
plugins = Utils.get_all_titles()
|
||||
|
||||
if len(plugins) == 0:
|
||||
self.logger.error("No games detected!")
|
||||
|
||||
|
||||
for _, mod in plugins.items():
|
||||
for code in mod.game_codes:
|
||||
if hasattr(mod, "use_default_title") and mod.use_default_title:
|
||||
if hasattr(mod, "include_protocol") and mod.include_protocol:
|
||||
if hasattr(mod, "title_secure") and mod.title_secure:
|
||||
uri = "https://"
|
||||
|
||||
else:
|
||||
uri = "http://"
|
||||
if hasattr(mod, "index") and hasattr(mod.index, "get_allnet_info"):
|
||||
for code in mod.game_codes:
|
||||
enabled, uri, host = mod.index.get_allnet_info(
|
||||
code, self.config, self.config_folder
|
||||
)
|
||||
|
||||
else:
|
||||
uri = ""
|
||||
|
||||
if core_cfg.server.is_develop:
|
||||
uri += f"{core_cfg.title.hostname}:{core_cfg.title.port}"
|
||||
|
||||
else:
|
||||
uri += f"{core_cfg.title.hostname}"
|
||||
|
||||
uri += f"/{code}/$v"
|
||||
if enabled:
|
||||
self.uri_registry[code] = (uri, host)
|
||||
|
||||
if hasattr(mod, "trailing_slash") and mod.trailing_slash:
|
||||
uri += "/"
|
||||
|
||||
else:
|
||||
if hasattr(mod, "uri"):
|
||||
uri = mod.uri
|
||||
else:
|
||||
uri = ""
|
||||
self.logger.info(
|
||||
f"Serving {len(self.uri_registry)} game codes port {core_cfg.allnet.port}"
|
||||
)
|
||||
|
||||
if hasattr(mod, "host"):
|
||||
host = mod.host
|
||||
|
||||
elif hasattr(mod, "use_default_host") and mod.use_default_host:
|
||||
if core_cfg.server.is_develop:
|
||||
host = f"{core_cfg.title.hostname}:{core_cfg.title.port}"
|
||||
|
||||
else:
|
||||
host = f"{core_cfg.title.hostname}"
|
||||
|
||||
else:
|
||||
host = ""
|
||||
|
||||
self.uri_registry[code] = (uri, host)
|
||||
self.logger.info(f"Allnet serving {len(self.uri_registry)} games on port {core_cfg.allnet.port}")
|
||||
|
||||
def handle_poweron(self, request: Request):
|
||||
request_ip = request.getClientAddress().host
|
||||
def handle_poweron(self, request: Request, _: Dict):
|
||||
request_ip = Utils.get_ip_addr(request)
|
||||
try:
|
||||
req = AllnetPowerOnRequest(self.allnet_req_to_dict(request.content.getvalue()))
|
||||
req_dict = self.allnet_req_to_dict(request.content.getvalue())
|
||||
if req_dict is None:
|
||||
raise AllnetRequestException()
|
||||
|
||||
req = AllnetPowerOnRequest(req_dict[0])
|
||||
# Validate the request. Currently we only validate the fields we plan on using
|
||||
|
||||
if not req.game_id or not req.ver or not req.token or not req.serial or not req.ip:
|
||||
raise AllnetRequestException(f"Bad auth request params from {request_ip} - {vars(req)}")
|
||||
|
||||
if not req.game_id or not req.ver or not req.serial or not req.ip or not req.firm_ver or not req.boot_ver:
|
||||
raise AllnetRequestException(
|
||||
f"Bad auth request params from {request_ip} - {vars(req)}"
|
||||
)
|
||||
|
||||
except AllnetRequestException as e:
|
||||
self.logger.error(e)
|
||||
if e.message != "":
|
||||
self.logger.error(e)
|
||||
return b""
|
||||
|
||||
|
||||
if req.format_ver == 3:
|
||||
resp = AllnetPowerOnResponse3(req.token)
|
||||
else:
|
||||
elif req.format_ver == 2:
|
||||
resp = AllnetPowerOnResponse2()
|
||||
else:
|
||||
resp = AllnetPowerOnResponse()
|
||||
|
||||
self.logger.debug(f"Allnet request: {vars(req)}")
|
||||
if req.game_id not in self.uri_registry:
|
||||
msg = f"Unrecognised game {req.game_id} attempted allnet auth from {request_ip}."
|
||||
self.data.base.log_event("allnet", "ALLNET_AUTH_UNKNOWN_GAME", logging.WARN, msg)
|
||||
self.logger.warn(msg)
|
||||
if not self.config.server.is_develop:
|
||||
msg = f"Unrecognised game {req.game_id} attempted allnet auth from {request_ip}."
|
||||
self.data.base.log_event(
|
||||
"allnet", "ALLNET_AUTH_UNKNOWN_GAME", logging.WARN, msg
|
||||
)
|
||||
self.logger.warn(msg)
|
||||
|
||||
resp.stat = -1
|
||||
resp_dict = {k: v for k, v in vars(resp).items() if v is not None}
|
||||
return (urllib.parse.unquote(urllib.parse.urlencode(resp_dict)) + "\n").encode("utf-8")
|
||||
|
||||
else:
|
||||
self.logger.info(
|
||||
f"Allowed unknown game {req.game_id} v{req.ver} to authenticate from {request_ip} due to 'is_develop' being enabled. S/N: {req.serial}"
|
||||
)
|
||||
resp.uri = f"http://{self.config.title.hostname}:{self.config.title.port}/{req.game_id}/{req.ver.replace('.', '')}/"
|
||||
resp.host = f"{self.config.title.hostname}:{self.config.title.port}"
|
||||
|
||||
resp_dict = {k: v for k, v in vars(resp).items() if v is not None}
|
||||
resp_str = urllib.parse.unquote(urllib.parse.urlencode(resp_dict))
|
||||
|
||||
self.logger.debug(f"Allnet response: {resp_str}")
|
||||
return (resp_str + "\n").encode("utf-8")
|
||||
|
||||
resp.stat = 0
|
||||
return self.dict_to_http_form_string([vars(resp)])
|
||||
|
||||
resp.uri, resp.host = self.uri_registry[req.game_id]
|
||||
|
||||
machine = self.data.arcade.get_machine(req.serial)
|
||||
machine = self.data.arcade.get_machine(req.serial)
|
||||
if machine is None and not self.config.server.allow_unregistered_serials:
|
||||
msg = f"Unrecognised serial {req.serial} attempted allnet auth from {request_ip}."
|
||||
self.data.base.log_event("allnet", "ALLNET_AUTH_UNKNOWN_SERIAL", logging.WARN, msg)
|
||||
self.data.base.log_event(
|
||||
"allnet", "ALLNET_AUTH_UNKNOWN_SERIAL", logging.WARN, msg
|
||||
)
|
||||
self.logger.warn(msg)
|
||||
|
||||
resp.stat = 0
|
||||
return self.dict_to_http_form_string([vars(resp)])
|
||||
|
||||
resp.stat = -2
|
||||
resp_dict = {k: v for k, v in vars(resp).items() if v is not None}
|
||||
return (urllib.parse.unquote(urllib.parse.urlencode(resp_dict)) + "\n").encode("utf-8")
|
||||
|
||||
if machine is not None:
|
||||
arcade = self.data.arcade.get_arcade(machine["arcade"])
|
||||
req.country = arcade["country"] if machine["country"] is None else machine["country"]
|
||||
req.place_id = arcade["id"]
|
||||
req.allnet_id = machine["id"]
|
||||
req.name = arcade["name"]
|
||||
req.nickname = arcade["nickname"]
|
||||
req.region0 = arcade["region_id"]
|
||||
req.region_name0 = arcade["country"]
|
||||
req.region_name1 = arcade["state"]
|
||||
req.region_name2 = arcade["city"]
|
||||
req.client_timezone = arcade["timezone"] if arcade["timezone"] is not None else "+0900"
|
||||
|
||||
country = (
|
||||
arcade["country"] if machine["country"] is None else machine["country"]
|
||||
)
|
||||
if country is None:
|
||||
country = AllnetCountryCode.JAPAN.value
|
||||
|
||||
resp.country = country
|
||||
resp.place_id = arcade["id"]
|
||||
resp.allnet_id = machine["id"]
|
||||
resp.name = arcade["name"] if arcade["name"] is not None else ""
|
||||
resp.nickname = arcade["nickname"] if arcade["nickname"] is not None else ""
|
||||
resp.region0 = (
|
||||
arcade["region_id"]
|
||||
if arcade["region_id"] is not None
|
||||
else AllnetJapanRegionId.AICHI.value
|
||||
)
|
||||
resp.region_name0 = (
|
||||
arcade["country"]
|
||||
if arcade["country"] is not None
|
||||
else AllnetCountryCode.JAPAN.value
|
||||
)
|
||||
resp.region_name1 = (
|
||||
arcade["state"]
|
||||
if arcade["state"] is not None
|
||||
else AllnetJapanRegionId.AICHI.name
|
||||
)
|
||||
resp.region_name2 = arcade["city"] if arcade["city"] is not None else ""
|
||||
resp.client_timezone = (
|
||||
arcade["timezone"] if arcade["timezone"] is not None else "+0900"
|
||||
)
|
||||
|
||||
int_ver = req.ver.replace(".", "")
|
||||
resp.uri = resp.uri.replace("$v", int_ver)
|
||||
resp.host = resp.host.replace("$v", int_ver)
|
||||
|
||||
msg = f"{req.serial} authenticated from {request_ip}: {req.game_id} v{req.ver}"
|
||||
self.data.base.log_event("allnet", "ALLNET_AUTH_SUCCESS", logging.INFO, msg)
|
||||
self.logger.info(msg)
|
||||
|
||||
return self.dict_to_http_form_string([vars(resp)])
|
||||
resp_dict = {k: v for k, v in vars(resp).items() if v is not None}
|
||||
resp_str = urllib.parse.unquote(urllib.parse.urlencode(resp_dict))
|
||||
self.logger.debug(f"Allnet response: {resp_dict}")
|
||||
resp_str += "\n"
|
||||
|
||||
def handle_dlorder(self, request: Request):
|
||||
request_ip = request.getClientAddress().host
|
||||
return resp_str.encode("utf-8")
|
||||
|
||||
def handle_dlorder(self, request: Request, _: Dict):
|
||||
request_ip = Utils.get_ip_addr(request)
|
||||
try:
|
||||
req = AllnetDownloadOrderRequest(self.allnet_req_to_dict(request.content.getvalue()))
|
||||
req_dict = self.allnet_req_to_dict(request.content.getvalue())
|
||||
if req_dict is None:
|
||||
raise AllnetRequestException()
|
||||
|
||||
req = AllnetDownloadOrderRequest(req_dict[0])
|
||||
# Validate the request. Currently we only validate the fields we plan on using
|
||||
|
||||
if not req.game_id or not req.ver or not req.token or not req.serial or not req.ip:
|
||||
raise AllnetRequestException(f"Bad auth request params from {request_ip} - {vars(req)}")
|
||||
|
||||
if not req.game_id or not req.ver or not req.serial:
|
||||
raise AllnetRequestException(
|
||||
f"Bad download request params from {request_ip} - {vars(req)}"
|
||||
)
|
||||
|
||||
except AllnetRequestException as e:
|
||||
self.logger.error(e)
|
||||
if e.message != "":
|
||||
self.logger.error(e)
|
||||
return b""
|
||||
|
||||
resp = AllnetDownloadOrderResponse()
|
||||
if not self.config.allnet.allow_online_updates:
|
||||
return self.dict_to_http_form_string(vars(resp))
|
||||
|
||||
else: # TODO: Actual dlorder response
|
||||
return self.dict_to_http_form_string(vars(resp))
|
||||
self.logger.info(
|
||||
f"DownloadOrder from {request_ip} -> {req.game_id} v{req.ver} serial {req.serial}"
|
||||
)
|
||||
resp = AllnetDownloadOrderResponse(serial=req.serial)
|
||||
|
||||
def handle_billing_request(self, request: Request):
|
||||
if (
|
||||
not self.config.allnet.allow_online_updates
|
||||
or not self.config.allnet.update_cfg_folder
|
||||
):
|
||||
return urllib.parse.unquote(urllib.parse.urlencode(vars(resp))) + "\n"
|
||||
|
||||
else: # TODO: Keychip check
|
||||
if path.exists(
|
||||
f"{self.config.allnet.update_cfg_folder}/{req.game_id}-{req.ver.replace('.', '')}-app.ini"
|
||||
):
|
||||
resp.uri = f"http://{self.config.title.hostname}:{self.config.title.port}/dl/ini/{req.game_id}-{req.ver.replace('.', '')}-app.ini"
|
||||
|
||||
if path.exists(
|
||||
f"{self.config.allnet.update_cfg_folder}/{req.game_id}-{req.ver.replace('.', '')}-opt.ini"
|
||||
):
|
||||
resp.uri += f"|http://{self.config.title.hostname}:{self.config.title.port}/dl/ini/{req.game_id}-{req.ver.replace('.', '')}-opt.ini"
|
||||
|
||||
self.logger.debug(f"Sending download uri {resp.uri}")
|
||||
self.data.base.log_event("allnet", "DLORDER_REQ_SUCCESS", logging.INFO, f"{Utils.get_ip_addr(request)} requested DL Order for {req.serial} {req.game_id} v{req.ver}")
|
||||
|
||||
return urllib.parse.unquote(urllib.parse.urlencode(vars(resp))) + "\n"
|
||||
|
||||
def handle_dlorder_ini(self, request: Request, match: Dict) -> bytes:
|
||||
if "file" not in match:
|
||||
return b""
|
||||
|
||||
req_file = match["file"].replace("%0A", "")
|
||||
|
||||
if path.exists(f"{self.config.allnet.update_cfg_folder}/{req_file}"):
|
||||
self.logger.info(f"Request for DL INI file {req_file} from {Utils.get_ip_addr(request)} successful")
|
||||
self.data.base.log_event("allnet", "DLORDER_INI_SENT", logging.INFO, f"{Utils.get_ip_addr(request)} successfully recieved {req_file}")
|
||||
return open(
|
||||
f"{self.config.allnet.update_cfg_folder}/{req_file}", "rb"
|
||||
).read()
|
||||
|
||||
self.logger.info(f"DL INI File {req_file} not found")
|
||||
return b""
|
||||
|
||||
def handle_dlorder_report(self, request: Request, match: Dict) -> bytes:
|
||||
self.logger.info(
|
||||
f"DLI Report from {Utils.get_ip_addr(request)}: {request.content.getvalue()}"
|
||||
)
|
||||
return b""
|
||||
|
||||
def handle_loaderstaterecorder(self, request: Request, match: Dict) -> bytes:
|
||||
req_data = request.content.getvalue()
|
||||
sections = req_data.decode("utf-8").split("\r\n")
|
||||
|
||||
req_dict = dict(urllib.parse.parse_qsl(sections[0]))
|
||||
|
||||
serial: Union[str, None] = req_dict.get("serial", None)
|
||||
num_files_to_dl: Union[str, None] = req_dict.get("nb_ftd", None)
|
||||
num_files_dld: Union[str, None] = req_dict.get("nb_dld", None)
|
||||
dl_state: Union[str, None] = req_dict.get("dld_st", None)
|
||||
ip = Utils.get_ip_addr(request)
|
||||
|
||||
if serial is None or num_files_dld is None or num_files_to_dl is None or dl_state is None:
|
||||
return "NG".encode()
|
||||
|
||||
self.logger.info(f"LoaderStateRecorder Request from {ip} {serial}: {num_files_dld}/{num_files_to_dl} Files download (State: {dl_state})")
|
||||
return "OK".encode()
|
||||
|
||||
def handle_alive(self, request: Request, match: Dict) -> bytes:
|
||||
return "OK".encode()
|
||||
|
||||
def handle_billing_request(self, request: Request, _: Dict):
|
||||
req_dict = self.billing_req_to_dict(request.content.getvalue())
|
||||
request_ip = request.getClientAddress()
|
||||
request_ip = Utils.get_ip_addr(request)
|
||||
if req_dict is None:
|
||||
self.logger.error(f"Failed to parse request {request.content.getvalue()}")
|
||||
return b""
|
||||
|
||||
|
||||
self.logger.debug(f"request {req_dict}")
|
||||
|
||||
rsa = RSA.import_key(open(self.config.billing.sign_key, 'rb').read())
|
||||
rsa = RSA.import_key(open(self.config.billing.signing_key, "rb").read())
|
||||
signer = PKCS1_v1_5.new(rsa)
|
||||
digest = SHA.new()
|
||||
|
||||
kc_playlimit = int(req_dict[0]["playlimit"])
|
||||
kc_nearfull = int(req_dict[0]["nearfull"])
|
||||
kc_billigtype = int(req_dict[0]["billingtype"])
|
||||
kc_playcount = int(req_dict[0]["playcnt"])
|
||||
kc_serial: str = req_dict[0]["keychipid"]
|
||||
kc_game: str = req_dict[0]["gameid"]
|
||||
kc_date = strptime(req_dict[0]["date"], "%Y%m%d%H%M%S")
|
||||
kc_serial_bytes = kc_serial.encode()
|
||||
try:
|
||||
kc_playlimit = int(req_dict[0]["playlimit"])
|
||||
kc_nearfull = int(req_dict[0]["nearfull"])
|
||||
kc_billigtype = int(req_dict[0]["billingtype"])
|
||||
kc_playcount = int(req_dict[0]["playcnt"])
|
||||
kc_serial: str = req_dict[0]["keychipid"]
|
||||
kc_game: str = req_dict[0]["gameid"]
|
||||
kc_date = strptime(req_dict[0]["date"], "%Y%m%d%H%M%S")
|
||||
kc_serial_bytes = kc_serial.encode()
|
||||
|
||||
except KeyError as e:
|
||||
return f"result=5&linelimit=&message={e} field is missing".encode()
|
||||
|
||||
machine = self.data.arcade.get_machine(kc_serial)
|
||||
if machine is None and not self.config.server.allow_unregistered_serials:
|
||||
msg = f"Unrecognised serial {kc_serial} attempted billing checkin from {request_ip} for game {kc_game}."
|
||||
self.data.base.log_event("allnet", "BILLING_CHECKIN_NG_SERIAL", logging.WARN, msg)
|
||||
self.data.base.log_event(
|
||||
"allnet", "BILLING_CHECKIN_NG_SERIAL", logging.WARN, msg
|
||||
)
|
||||
self.logger.warn(msg)
|
||||
|
||||
resp = BillingResponse("", "", "", "")
|
||||
resp.result = "1"
|
||||
return self.dict_to_http_form_string([vars(resp)])
|
||||
|
||||
msg = f"Billing checkin from {request.getClientIP()}: game {kc_game} keychip {kc_serial} playcount " \
|
||||
msg = (
|
||||
f"Billing checkin from {request_ip}: game {kc_game} keychip {kc_serial} playcount "
|
||||
f"{kc_playcount} billing_type {kc_billigtype} nearfull {kc_nearfull} playlimit {kc_playlimit}"
|
||||
)
|
||||
self.logger.info(msg)
|
||||
self.data.base.log_event('billing', 'BILLING_CHECKIN_OK', logging.INFO, msg)
|
||||
self.data.base.log_event("billing", "BILLING_CHECKIN_OK", logging.INFO, msg)
|
||||
|
||||
while kc_playcount > kc_playlimit:
|
||||
kc_playlimit += 1024
|
||||
kc_nearfull += 1024
|
||||
|
||||
|
||||
playlimit = kc_playlimit
|
||||
nearfull = kc_nearfull + (kc_billigtype * 0x00010000)
|
||||
|
||||
digest.update(playlimit.to_bytes(4, 'little') + kc_serial_bytes)
|
||||
digest.update(playlimit.to_bytes(4, "little") + kc_serial_bytes)
|
||||
playlimit_sig = signer.sign(digest).hex()
|
||||
|
||||
digest = SHA.new()
|
||||
digest.update(nearfull.to_bytes(4, 'little') + kc_serial_bytes)
|
||||
digest.update(nearfull.to_bytes(4, "little") + kc_serial_bytes)
|
||||
nearfull_sig = signer.sign(digest).hex()
|
||||
|
||||
# TODO: playhistory
|
||||
|
@ -230,157 +345,170 @@ class AllnetServlet:
|
|||
self.logger.debug(f"response {vars(resp)}")
|
||||
return resp_str.encode("utf-8")
|
||||
|
||||
def kvp_to_dict(self, *kvp: str) -> List[Dict[str, Any]]:
|
||||
ret: List[Dict[str, Any]] = []
|
||||
for x in kvp:
|
||||
items = x.split('&')
|
||||
tmp = {}
|
||||
def handle_naomitest(self, request: Request, _: Dict) -> bytes:
|
||||
self.logger.info(f"Ping from {Utils.get_ip_addr(request)}")
|
||||
return b"naomi ok"
|
||||
|
||||
for item in items:
|
||||
kvp = item.split('=')
|
||||
if len(kvp) == 2:
|
||||
tmp[kvp[0]] = kvp[1]
|
||||
|
||||
ret.append(tmp)
|
||||
|
||||
def allnet_req_to_dict(self, data: bytes):
|
||||
def billing_req_to_dict(self, data: bytes):
|
||||
"""
|
||||
Parses an billing request string into a python dictionary
|
||||
"""
|
||||
try:
|
||||
decomp = zlib.decompressobj(-zlib.MAX_WBITS)
|
||||
unzipped = decomp.decompress(data)
|
||||
sections = unzipped.decode('ascii').split('\r\n')
|
||||
|
||||
return Utils.kvp_to_dict(sections)
|
||||
sections = unzipped.decode("ascii").split("\r\n")
|
||||
|
||||
ret = []
|
||||
for x in sections:
|
||||
ret.append(dict(urllib.parse.parse_qsl(x)))
|
||||
return ret
|
||||
|
||||
except Exception as e:
|
||||
print(e)
|
||||
self.logger.error(f"billing_req_to_dict: {e} while parsing {data}")
|
||||
return None
|
||||
|
||||
def billing_req_to_dict(self, data: str) -> Optional[List[Dict[str, Any]]]:
|
||||
def allnet_req_to_dict(self, data: str) -> Optional[List[Dict[str, Any]]]:
|
||||
"""
|
||||
Parses an allnet request string into a python dictionary
|
||||
"""
|
||||
"""
|
||||
try:
|
||||
zipped = base64.b64decode(data)
|
||||
unzipped = zlib.decompress(zipped)
|
||||
sections = unzipped.decode('utf-8').split('\r\n')
|
||||
|
||||
return Utils.kvp_to_dict(sections)
|
||||
sections = unzipped.decode("utf-8").split("\r\n")
|
||||
|
||||
ret = []
|
||||
for x in sections:
|
||||
ret.append(dict(urllib.parse.parse_qsl(x)))
|
||||
return ret
|
||||
|
||||
except Exception as e:
|
||||
print(e)
|
||||
self.logger.error(f"allnet_req_to_dict: {e} while parsing {data}")
|
||||
return None
|
||||
|
||||
def dict_to_http_form_string(self, data:List[Dict[str, Any]], crlf: bool = False, trailing_newline: bool = True) -> Optional[str]:
|
||||
def dict_to_http_form_string(
|
||||
self,
|
||||
data: List[Dict[str, Any]],
|
||||
crlf: bool = True,
|
||||
trailing_newline: bool = True,
|
||||
) -> Optional[str]:
|
||||
"""
|
||||
Takes a python dictionary and parses it into an allnet response string
|
||||
"""
|
||||
try:
|
||||
urlencode = ""
|
||||
for item in data:
|
||||
for k,v in item.items():
|
||||
for k, v in item.items():
|
||||
if k is None or v is None:
|
||||
continue
|
||||
urlencode += f"{k}={v}&"
|
||||
|
||||
if crlf:
|
||||
urlencode = urlencode[:-1] + "\r\n"
|
||||
else:
|
||||
urlencode = urlencode[:-1] + "\n"
|
||||
|
||||
if not trailing_newline:
|
||||
if crlf:
|
||||
urlencode = urlencode[:-2]
|
||||
else:
|
||||
urlencode = urlencode[:-1]
|
||||
|
||||
return urlencode
|
||||
|
||||
except Exception as e:
|
||||
print(e)
|
||||
self.logger.error(f"dict_to_http_form_string: {e} while parsing {data}")
|
||||
return None
|
||||
|
||||
class AllnetPowerOnRequest():
|
||||
|
||||
class AllnetPowerOnRequest:
|
||||
def __init__(self, req: Dict) -> None:
|
||||
if req is None:
|
||||
raise AllnetRequestException("Request processing failed")
|
||||
self.game_id: str = req["game_id"] if "game_id" in req else ""
|
||||
self.ver: str = req["ver"] if "ver" in req else ""
|
||||
self.serial: str = req["serial"] if "serial" in req else ""
|
||||
self.ip: str = req["ip"] if "ip" in req else ""
|
||||
self.firm_ver: str = req["firm_ver"] if "firm_ver" in req else ""
|
||||
self.boot_ver: str = req["boot_ver"] if "boot_ver" in req else ""
|
||||
self.encode: str = req["encode"] if "encode" in req else ""
|
||||
|
||||
try:
|
||||
self.hops = int(req["hops"]) if "hops" in req else 0
|
||||
self.format_ver = int(req["format_ver"]) if "format_ver" in req else 2
|
||||
self.token = int(req["token"]) if "token" in req else 0
|
||||
except ValueError as e:
|
||||
raise AllnetRequestException(f"Failed to parse int: {e}")
|
||||
self.game_id: str = req.get("game_id", None)
|
||||
self.ver: str = req.get("ver", None)
|
||||
self.serial: str = req.get("serial", None)
|
||||
self.ip: str = req.get("ip", None)
|
||||
self.firm_ver: str = req.get("firm_ver", None)
|
||||
self.boot_ver: str = req.get("boot_ver", None)
|
||||
self.encode: str = req.get("encode", "EUC-JP")
|
||||
self.hops = int(req.get("hops", "-1"))
|
||||
self.format_ver = float(req.get("format_ver", "1.00"))
|
||||
self.token: str = req.get("token", "0")
|
||||
|
||||
class AllnetPowerOnResponse3():
|
||||
def __init__(self, token) -> None:
|
||||
self.stat = 1
|
||||
self.uri = ""
|
||||
self.host = ""
|
||||
self.place_id = "123"
|
||||
self.name = ""
|
||||
self.nickname = ""
|
||||
self.region0 = "1"
|
||||
self.region_name0 = "W"
|
||||
self.region_name1 = ""
|
||||
self.region_name2 = ""
|
||||
self.region_name3 = ""
|
||||
self.country = "JPN"
|
||||
self.allnet_id = "123"
|
||||
self.client_timezone = "+0900"
|
||||
self.utc_time = datetime.now(tz=pytz.timezone('UTC')).strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
self.setting = ""
|
||||
self.res_ver = "3"
|
||||
self.token = str(token)
|
||||
|
||||
class AllnetPowerOnResponse2():
|
||||
class AllnetPowerOnResponse:
|
||||
def __init__(self) -> None:
|
||||
self.stat = 1
|
||||
self.uri = ""
|
||||
self.host = ""
|
||||
self.place_id = "123"
|
||||
self.name = "Test"
|
||||
self.nickname = "Test123"
|
||||
self.name = "ARTEMiS"
|
||||
self.nickname = "ARTEMiS"
|
||||
self.region0 = "1"
|
||||
self.region_name0 = "W"
|
||||
self.region_name1 = "X"
|
||||
self.region_name2 = "Y"
|
||||
self.region_name3 = "Z"
|
||||
self.country = "JPN"
|
||||
self.region_name1 = ""
|
||||
self.region_name2 = ""
|
||||
self.region_name3 = ""
|
||||
self.setting = "1"
|
||||
self.year = datetime.now().year
|
||||
self.month = datetime.now().month
|
||||
self.day = datetime.now().day
|
||||
self.hour = datetime.now().hour
|
||||
self.minute = datetime.now().minute
|
||||
self.second = datetime.now().second
|
||||
self.setting = "1"
|
||||
self.timezone = "+0900"
|
||||
|
||||
class AllnetPowerOnResponse3(AllnetPowerOnResponse):
|
||||
def __init__(self, token) -> None:
|
||||
super().__init__()
|
||||
|
||||
# Added in v3
|
||||
self.country = "JPN"
|
||||
self.allnet_id = "123"
|
||||
self.client_timezone = "+0900"
|
||||
self.utc_time = datetime.now(tz=pytz.timezone("UTC")).strftime(
|
||||
"%Y-%m-%dT%H:%M:%SZ"
|
||||
)
|
||||
self.res_ver = "3"
|
||||
self.token = token
|
||||
|
||||
# Removed in v3
|
||||
self.year = None
|
||||
self.month = None
|
||||
self.day = None
|
||||
self.hour = None
|
||||
self.minute = None
|
||||
self.second = None
|
||||
|
||||
|
||||
class AllnetPowerOnResponse2(AllnetPowerOnResponse):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
|
||||
# Added in v2
|
||||
self.country = "JPN"
|
||||
self.timezone = "+09:00"
|
||||
self.res_class = "PowerOnResponseV2"
|
||||
|
||||
class AllnetDownloadOrderRequest():
|
||||
def __init__(self, req: Dict) -> None:
|
||||
self.game_id = req["game_id"] if "game_id" in req else ""
|
||||
self.ver = req["ver"] if "ver" in req else ""
|
||||
self.serial = req["serial"] if "serial" in req else ""
|
||||
self.encode = req["encode"] if "encode" in req else ""
|
||||
|
||||
class AllnetDownloadOrderResponse():
|
||||
def __init__(self, stat: int = 1, serial: str = "", uri: str = "null") -> None:
|
||||
class AllnetDownloadOrderRequest:
|
||||
def __init__(self, req: Dict) -> None:
|
||||
self.game_id = req.get("game_id", "")
|
||||
self.ver = req.get("ver", "")
|
||||
self.serial = req.get("serial", "")
|
||||
self.encode = req.get("encode", "")
|
||||
|
||||
|
||||
class AllnetDownloadOrderResponse:
|
||||
def __init__(self, stat: int = 1, serial: str = "", uri: str = "") -> None:
|
||||
self.stat = stat
|
||||
self.serial = serial
|
||||
self.uri = uri
|
||||
|
||||
class BillingResponse():
|
||||
def __init__(self, playlimit: str = "", playlimit_sig: str = "", nearfull: str = "", nearfull_sig: str = "",
|
||||
playhistory: str = "000000/0:000000/0:000000/0") -> None:
|
||||
|
||||
class BillingResponse:
|
||||
def __init__(
|
||||
self,
|
||||
playlimit: str = "",
|
||||
playlimit_sig: str = "",
|
||||
nearfull: str = "",
|
||||
nearfull_sig: str = "",
|
||||
playhistory: str = "000000/0:000000/0:000000/0",
|
||||
) -> None:
|
||||
self.result = "0"
|
||||
self.waitime = "100"
|
||||
self.linelimit = "1"
|
||||
|
@ -392,11 +520,12 @@ class BillingResponse():
|
|||
self.nearfullsig = nearfull_sig
|
||||
self.fixlogincnt = "0"
|
||||
self.fixinterval = "5"
|
||||
self.playhistory = playhistory
|
||||
self.playhistory = playhistory
|
||||
# playhistory -> YYYYMM/C:...
|
||||
# YYYY -> 4 digit year, MM -> 2 digit month, C -> Playcount during that period
|
||||
|
||||
|
||||
class AllnetRequestException(Exception):
|
||||
def __init__(self, message="Allnet Request Error") -> None:
|
||||
def __init__(self, message="") -> None:
|
||||
self.message = message
|
||||
super().__init__(self.message)
|
||||
|
|
226
core/config.py
226
core/config.py
|
@ -1,33 +1,53 @@
|
|||
import logging, os
|
||||
from typing import Any
|
||||
|
||||
|
||||
class ServerConfig:
|
||||
def __init__(self, parent_config: "CoreConfig") -> None:
|
||||
self.__config = parent_config
|
||||
|
||||
@property
|
||||
def listen_address(self) -> str:
|
||||
return CoreConfig.get_config_field(self.__config, 'core', 'server', 'listen_address', default='127.0.0.1')
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "server", "listen_address", default="127.0.0.1"
|
||||
)
|
||||
|
||||
@property
|
||||
def allow_user_registration(self) -> bool:
|
||||
return CoreConfig.get_config_field(self.__config, 'core', 'server', 'allow_user_registration', default=True)
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "server", "allow_user_registration", default=True
|
||||
)
|
||||
|
||||
@property
|
||||
def allow_unregistered_serials(self) -> bool:
|
||||
return CoreConfig.get_config_field(self.__config, 'core', 'server', 'allow_unregistered_serials', default=True)
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "server", "allow_unregistered_serials", default=True
|
||||
)
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return CoreConfig.get_config_field(self.__config, 'core', 'server', 'name', default="ARTEMiS")
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "server", "name", default="ARTEMiS"
|
||||
)
|
||||
|
||||
@property
|
||||
def is_develop(self) -> bool:
|
||||
return CoreConfig.get_config_field(self.__config, 'core', 'server', 'is_develop', default=True)
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "server", "is_develop", default=True
|
||||
)
|
||||
|
||||
@property
|
||||
def threading(self) -> bool:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "server", "threading", default=False
|
||||
)
|
||||
|
||||
@property
|
||||
def log_dir(self) -> str:
|
||||
return CoreConfig.get_config_field(self.__config, 'core', 'server', 'log_dir', default='logs')
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "server", "log_dir", default="logs"
|
||||
)
|
||||
|
||||
|
||||
class TitleConfig:
|
||||
def __init__(self, parent_config: "CoreConfig") -> None:
|
||||
|
@ -35,15 +55,24 @@ class TitleConfig:
|
|||
|
||||
@property
|
||||
def loglevel(self) -> int:
|
||||
return CoreConfig.str_to_loglevel(CoreConfig.get_config_field(self.__config, 'core', 'title', 'loglevel', default="info"))
|
||||
return CoreConfig.str_to_loglevel(
|
||||
CoreConfig.get_config_field(
|
||||
self.__config, "core", "title", "loglevel", default="info"
|
||||
)
|
||||
)
|
||||
|
||||
@property
|
||||
def hostname(self) -> str:
|
||||
return CoreConfig.get_config_field(self.__config, 'core', 'title', 'hostname', default="localhost")
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "title", "hostname", default="localhost"
|
||||
)
|
||||
|
||||
@property
|
||||
def port(self) -> int:
|
||||
return CoreConfig.get_config_field(self.__config, 'core', 'title', 'port', default=8080)
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "title", "port", default=8080
|
||||
)
|
||||
|
||||
|
||||
class DatabaseConfig:
|
||||
def __init__(self, parent_config: "CoreConfig") -> None:
|
||||
|
@ -51,43 +80,70 @@ class DatabaseConfig:
|
|||
|
||||
@property
|
||||
def host(self) -> str:
|
||||
return CoreConfig.get_config_field(self.__config, 'core', 'database', 'host', default="localhost")
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "database", "host", default="localhost"
|
||||
)
|
||||
|
||||
@property
|
||||
def username(self) -> str:
|
||||
return CoreConfig.get_config_field(self.__config, 'core', 'database', 'username', default='aime')
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "database", "username", default="aime"
|
||||
)
|
||||
|
||||
@property
|
||||
def password(self) -> str:
|
||||
return CoreConfig.get_config_field(self.__config, 'core', 'database', 'password', default='aime')
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "database", "password", default="aime"
|
||||
)
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return CoreConfig.get_config_field(self.__config, 'core', 'database', 'name', default='aime')
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "database", "name", default="aime"
|
||||
)
|
||||
|
||||
@property
|
||||
def port(self) -> int:
|
||||
return CoreConfig.get_config_field(self.__config, 'core', 'database', 'port', default=3306)
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "database", "port", default=3306
|
||||
)
|
||||
|
||||
@property
|
||||
def protocol(self) -> str:
|
||||
return CoreConfig.get_config_field(self.__config, 'core', 'database', 'type', default="mysql")
|
||||
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "database", "type", default="mysql"
|
||||
)
|
||||
|
||||
@property
|
||||
def sha2_password(self) -> bool:
|
||||
return CoreConfig.get_config_field(self.__config, 'core', 'database', 'sha2_password', default=False)
|
||||
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "database", "sha2_password", default=False
|
||||
)
|
||||
|
||||
@property
|
||||
def loglevel(self) -> int:
|
||||
return CoreConfig.str_to_loglevel(CoreConfig.get_config_field(self.__config, 'core', 'database', 'loglevel', default="info"))
|
||||
return CoreConfig.str_to_loglevel(
|
||||
CoreConfig.get_config_field(
|
||||
self.__config, "core", "database", "loglevel", default="info"
|
||||
)
|
||||
)
|
||||
|
||||
@property
|
||||
def user_table_autoincrement_start(self) -> int:
|
||||
return CoreConfig.get_config_field(self.__config, 'core', 'database', 'user_table_autoincrement_start', default=10000)
|
||||
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config,
|
||||
"core",
|
||||
"database",
|
||||
"user_table_autoincrement_start",
|
||||
default=10000,
|
||||
)
|
||||
|
||||
@property
|
||||
def memcached_host(self) -> str:
|
||||
return CoreConfig.get_config_field(self.__config, 'core', 'database', 'memcached_host', default="localhost")
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "database", "memcached_host", default="localhost"
|
||||
)
|
||||
|
||||
|
||||
class FrontendConfig:
|
||||
def __init__(self, parent_config: "CoreConfig") -> None:
|
||||
|
@ -95,15 +151,24 @@ class FrontendConfig:
|
|||
|
||||
@property
|
||||
def enable(self) -> int:
|
||||
return CoreConfig.get_config_field(self.__config, 'core', 'frontend', 'enable', default=False)
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "frontend", "enable", default=False
|
||||
)
|
||||
|
||||
@property
|
||||
def port(self) -> int:
|
||||
return CoreConfig.get_config_field(self.__config, 'core', 'frontend', 'port', default=8090)
|
||||
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "frontend", "port", default=8090
|
||||
)
|
||||
|
||||
@property
|
||||
def loglevel(self) -> int:
|
||||
return CoreConfig.str_to_loglevel(CoreConfig.get_config_field(self.__config, 'core', 'frontend', 'loglevel', default="info"))
|
||||
return CoreConfig.str_to_loglevel(
|
||||
CoreConfig.get_config_field(
|
||||
self.__config, "core", "frontend", "loglevel", default="info"
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class AllnetConfig:
|
||||
def __init__(self, parent_config: "CoreConfig") -> None:
|
||||
|
@ -111,15 +176,30 @@ class AllnetConfig:
|
|||
|
||||
@property
|
||||
def loglevel(self) -> int:
|
||||
return CoreConfig.str_to_loglevel(CoreConfig.get_config_field(self.__config, 'core', 'allnet', 'loglevel', default="info"))
|
||||
return CoreConfig.str_to_loglevel(
|
||||
CoreConfig.get_config_field(
|
||||
self.__config, "core", "allnet", "loglevel", default="info"
|
||||
)
|
||||
)
|
||||
|
||||
@property
|
||||
def port(self) -> int:
|
||||
return CoreConfig.get_config_field(self.__config, 'core', 'allnet', 'port', default=80)
|
||||
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "allnet", "port", default=80
|
||||
)
|
||||
|
||||
@property
|
||||
def allow_online_updates(self) -> int:
|
||||
return CoreConfig.get_config_field(self.__config, 'core', 'allnet', 'allow_online_updates', default=False)
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "allnet", "allow_online_updates", default=False
|
||||
)
|
||||
|
||||
@property
|
||||
def update_cfg_folder(self) -> str:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "allnet", "update_cfg_folder", default=""
|
||||
)
|
||||
|
||||
|
||||
class BillingConfig:
|
||||
def __init__(self, parent_config: "CoreConfig") -> None:
|
||||
|
@ -127,35 +207,53 @@ class BillingConfig:
|
|||
|
||||
@property
|
||||
def port(self) -> int:
|
||||
return CoreConfig.get_config_field(self.__config, 'core', 'billing', 'port', default=8443)
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "billing", "port", default=8443
|
||||
)
|
||||
|
||||
@property
|
||||
def ssl_key(self) -> str:
|
||||
return CoreConfig.get_config_field(self.__config, 'core', 'billing', 'ssl_key', default="cert/server.key")
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "billing", "ssl_key", default="cert/server.key"
|
||||
)
|
||||
|
||||
@property
|
||||
def ssl_cert(self) -> str:
|
||||
return CoreConfig.get_config_field(self.__config, 'core', 'billing', 'ssl_cert', default="cert/server.pem")
|
||||
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "billing", "ssl_cert", default="cert/server.pem"
|
||||
)
|
||||
|
||||
@property
|
||||
def signing_key(self) -> str:
|
||||
return CoreConfig.get_config_field(self.__config, 'core', 'billing', 'signing_key', default="cert/billing.key")
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "billing", "signing_key", default="cert/billing.key"
|
||||
)
|
||||
|
||||
|
||||
class AimedbConfig:
|
||||
def __init__(self, parent_config: "CoreConfig") -> None:
|
||||
self.__config = parent_config
|
||||
|
||||
|
||||
@property
|
||||
def loglevel(self) -> int:
|
||||
return CoreConfig.str_to_loglevel(CoreConfig.get_config_field(self.__config, 'core', 'aimedb', 'loglevel', default="info"))
|
||||
return CoreConfig.str_to_loglevel(
|
||||
CoreConfig.get_config_field(
|
||||
self.__config, "core", "aimedb", "loglevel", default="info"
|
||||
)
|
||||
)
|
||||
|
||||
@property
|
||||
def port(self) -> int:
|
||||
return CoreConfig.get_config_field(self.__config, 'core', 'aimedb', 'port', default=22345)
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "aimedb", "port", default=22345
|
||||
)
|
||||
|
||||
@property
|
||||
def key(self) -> str:
|
||||
return CoreConfig.get_config_field(self.__config, 'core', 'aimedb', 'key', default="")
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "aimedb", "key", default=""
|
||||
)
|
||||
|
||||
|
||||
class MuchaConfig:
|
||||
def __init__(self, parent_config: "CoreConfig") -> None:
|
||||
|
@ -163,27 +261,24 @@ class MuchaConfig:
|
|||
|
||||
@property
|
||||
def enable(self) -> int:
|
||||
return CoreConfig.get_config_field(self.__config, 'core', 'mucha', 'enable', default=False)
|
||||
|
||||
@property
|
||||
def loglevel(self) -> int:
|
||||
return CoreConfig.str_to_loglevel(CoreConfig.get_config_field(self.__config, 'core', 'mucha', 'loglevel', default="info"))
|
||||
|
||||
@property
|
||||
def hostname(self) -> str:
|
||||
return CoreConfig.get_config_field(self.__config, 'core', 'mucha', 'hostname', default="localhost")
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "mucha", "enable", default=False
|
||||
)
|
||||
|
||||
@property
|
||||
def port(self) -> int:
|
||||
return CoreConfig.get_config_field(self.__config, 'core', 'mucha', 'port', default=8444)
|
||||
|
||||
def loglevel(self) -> int:
|
||||
return CoreConfig.str_to_loglevel(
|
||||
CoreConfig.get_config_field(
|
||||
self.__config, "core", "mucha", "loglevel", default="info"
|
||||
)
|
||||
)
|
||||
|
||||
@property
|
||||
def ssl_cert(self) -> str:
|
||||
return CoreConfig.get_config_field(self.__config, 'core', 'mucha', 'ssl_cert', default="cert/server.pem")
|
||||
|
||||
@property
|
||||
def signing_key(self) -> str:
|
||||
return CoreConfig.get_config_field(self.__config, 'core', 'mucha', 'signing_key', default="cert/billing.key")
|
||||
def hostname(self) -> str:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "mucha", "hostname", default="localhost"
|
||||
)
|
||||
|
||||
|
||||
class CoreConfig(dict):
|
||||
def __init__(self) -> None:
|
||||
|
@ -194,25 +289,28 @@ class CoreConfig(dict):
|
|||
self.allnet = AllnetConfig(self)
|
||||
self.billing = BillingConfig(self)
|
||||
self.aimedb = AimedbConfig(self)
|
||||
self.mucha = MuchaConfig(self)
|
||||
|
||||
@classmethod
|
||||
def str_to_loglevel(cls, level_str: str):
|
||||
if level_str.lower() == "error":
|
||||
return logging.ERROR
|
||||
elif level_str.lower().startswith("warn"): # Fits warn or warning
|
||||
elif level_str.lower().startswith("warn"): # Fits warn or warning
|
||||
return logging.WARN
|
||||
elif level_str.lower() == "debug":
|
||||
return logging.DEBUG
|
||||
else:
|
||||
return logging.INFO
|
||||
return logging.INFO
|
||||
|
||||
@classmethod
|
||||
def get_config_field(cls, __config: dict, module, *path: str, default: Any = "") -> Any:
|
||||
envKey = f'CFG_{module}_'
|
||||
def get_config_field(
|
||||
cls, __config: dict, module, *path: str, default: Any = ""
|
||||
) -> Any:
|
||||
envKey = f"CFG_{module}_"
|
||||
for arg in path:
|
||||
envKey += arg + '_'
|
||||
|
||||
if envKey.endswith('_'):
|
||||
envKey += arg + "_"
|
||||
|
||||
if envKey.endswith("_"):
|
||||
envKey = envKey[:-1]
|
||||
|
||||
if envKey in os.environ:
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
from enum import Enum
|
||||
|
||||
class MainboardPlatformCodes():
|
||||
|
||||
class MainboardPlatformCodes:
|
||||
RINGEDGE = "AALE"
|
||||
RINGWIDE = "AAML"
|
||||
NU = "AAVE"
|
||||
|
@ -8,7 +9,8 @@ class MainboardPlatformCodes():
|
|||
ALLS_UX = "ACAE"
|
||||
ALLS_HX = "ACAX"
|
||||
|
||||
class MainboardRevisions():
|
||||
|
||||
class MainboardRevisions:
|
||||
RINGEDGE = 1
|
||||
RINGEDGE2 = 2
|
||||
|
||||
|
@ -26,11 +28,70 @@ class MainboardRevisions():
|
|||
ALLS_UX2 = 2
|
||||
ALLS_HX2 = 12
|
||||
|
||||
class KeychipPlatformsCodes():
|
||||
|
||||
class KeychipPlatformsCodes:
|
||||
RING = "A72E"
|
||||
NU = ("A60E", "A60E", "A60E")
|
||||
NUSX = ("A61X", "A69X")
|
||||
ALLS = "A63E"
|
||||
|
||||
class RegionIDs(Enum):
|
||||
pass
|
||||
|
||||
|
||||
class AllnetCountryCode(Enum):
|
||||
JAPAN = "JPN"
|
||||
UNITED_STATES = "USA"
|
||||
HONG_KONG = "HKG"
|
||||
SINGAPORE = "SGP"
|
||||
SOUTH_KOREA = "KOR"
|
||||
TAIWAN = "TWN"
|
||||
CHINA = "CHN"
|
||||
|
||||
|
||||
class AllnetJapanRegionId(Enum):
|
||||
NONE = 0
|
||||
AICHI = 1
|
||||
AOMORI = 2
|
||||
AKITA = 3
|
||||
ISHIKAWA = 4
|
||||
IBARAKI = 5
|
||||
IWATE = 6
|
||||
EHIME = 7
|
||||
OITA = 8
|
||||
OSAKA = 9
|
||||
OKAYAMA = 10
|
||||
OKINAWA = 11
|
||||
KAGAWA = 12
|
||||
KAGOSHIMA = 13
|
||||
KANAGAWA = 14
|
||||
GIFU = 15
|
||||
KYOTO = 16
|
||||
KUMAMOTO = 17
|
||||
GUNMA = 18
|
||||
KOCHI = 19
|
||||
SAITAMA = 20
|
||||
SAGA = 21
|
||||
SHIGA = 22
|
||||
SHIZUOKA = 23
|
||||
SHIMANE = 24
|
||||
CHIBA = 25
|
||||
TOKYO = 26
|
||||
TOKUSHIMA = 27
|
||||
TOCHIGI = 28
|
||||
TOTTORI = 29
|
||||
TOYAMA = 30
|
||||
NAGASAKI = 31
|
||||
NAGANO = 32
|
||||
NARA = 33
|
||||
NIIGATA = 34
|
||||
HYOGO = 35
|
||||
HIROSHIMA = 36
|
||||
FUKUI = 37
|
||||
FUKUOKA = 38
|
||||
FUKUSHIMA = 39
|
||||
HOKKAIDO = 40
|
||||
MIE = 41
|
||||
MIYAGI = 42
|
||||
MIYAZAKI = 43
|
||||
YAMAGATA = 44
|
||||
YAMAGUCHI = 45
|
||||
YAMANASHI = 46
|
||||
WAKAYAMA = 47
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
from core.data.database import Data
|
||||
from core.data.cache import cached
|
||||
from core.data.cache import cached
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
|
||||
from typing import Any, Callable
|
||||
from functools import wraps
|
||||
import hashlib
|
||||
|
@ -6,15 +5,17 @@ import pickle
|
|||
import logging
|
||||
from core.config import CoreConfig
|
||||
|
||||
cfg:CoreConfig = None # type: ignore
|
||||
cfg: CoreConfig = None # type: ignore
|
||||
# Make memcache optional
|
||||
try:
|
||||
import pylibmc # type: ignore
|
||||
|
||||
has_mc = True
|
||||
except ModuleNotFoundError:
|
||||
has_mc = False
|
||||
|
||||
def cached(lifetime: int=10, extra_key: Any=None) -> Callable:
|
||||
|
||||
def cached(lifetime: int = 10, extra_key: Any = None) -> Callable:
|
||||
def _cached(func: Callable) -> Callable:
|
||||
if has_mc:
|
||||
hostname = "127.0.0.1"
|
||||
|
@ -22,11 +23,10 @@ def cached(lifetime: int=10, extra_key: Any=None) -> Callable:
|
|||
hostname = cfg.database.memcached_host
|
||||
memcache = pylibmc.Client([hostname], binary=True)
|
||||
memcache.behaviors = {"tcp_nodelay": True, "ketama": True}
|
||||
|
||||
|
||||
@wraps(func)
|
||||
def wrapper(*args: Any, **kwargs: Any) -> Any:
|
||||
if lifetime is not None:
|
||||
|
||||
# Hash function args
|
||||
items = kwargs.items()
|
||||
hashable_args = (args[1:], sorted(list(items)))
|
||||
|
@ -41,7 +41,7 @@ def cached(lifetime: int=10, extra_key: Any=None) -> Callable:
|
|||
except pylibmc.Error as e:
|
||||
logging.getLogger("database").error(f"Memcache failed: {e}")
|
||||
result = None
|
||||
|
||||
|
||||
if result is not None:
|
||||
logging.getLogger("database").debug(f"Cache hit: {result}")
|
||||
return result
|
||||
|
@ -55,7 +55,9 @@ def cached(lifetime: int=10, extra_key: Any=None) -> Callable:
|
|||
memcache.set(cache_key, result, lifetime)
|
||||
|
||||
return result
|
||||
|
||||
else:
|
||||
|
||||
@wraps(func)
|
||||
def wrapper(*args: Any, **kwargs: Any) -> Any:
|
||||
return func(*args, **kwargs)
|
||||
|
|
|
@ -1,45 +1,70 @@
|
|||
import logging, coloredlogs
|
||||
from typing import Any, Dict, List
|
||||
from typing import Optional, Dict, List
|
||||
from sqlalchemy.orm import scoped_session, sessionmaker
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
from sqlalchemy import create_engine
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
|
||||
import importlib, os
|
||||
import secrets, string
|
||||
import bcrypt
|
||||
from hashlib import sha256
|
||||
|
||||
from core.config import CoreConfig
|
||||
from core.data.schema import *
|
||||
from core.utils import Utils
|
||||
|
||||
|
||||
class Data:
|
||||
current_schema_version = 4
|
||||
engine = None
|
||||
session = None
|
||||
user = None
|
||||
arcade = None
|
||||
card = None
|
||||
base = None
|
||||
def __init__(self, cfg: CoreConfig) -> None:
|
||||
self.config = cfg
|
||||
|
||||
if self.config.database.sha2_password:
|
||||
passwd = sha256(self.config.database.password.encode()).digest()
|
||||
self.__url = f"{self.config.database.protocol}://{self.config.database.username}:{passwd.hex()}@{self.config.database.host}/{self.config.database.name}?charset=utf8mb4"
|
||||
self.__url = f"{self.config.database.protocol}://{self.config.database.username}:{passwd.hex()}@{self.config.database.host}:{self.config.database.port}/{self.config.database.name}?charset=utf8mb4"
|
||||
else:
|
||||
self.__url = f"{self.config.database.protocol}://{self.config.database.username}:{self.config.database.password}@{self.config.database.host}/{self.config.database.name}?charset=utf8mb4"
|
||||
self.__url = f"{self.config.database.protocol}://{self.config.database.username}:{self.config.database.password}@{self.config.database.host}:{self.config.database.port}/{self.config.database.name}?charset=utf8mb4"
|
||||
|
||||
if Data.engine is None:
|
||||
Data.engine = create_engine(self.__url, pool_recycle=3600)
|
||||
self.__engine = Data.engine
|
||||
|
||||
if Data.session is None:
|
||||
s = sessionmaker(bind=Data.engine, autoflush=True, autocommit=True)
|
||||
Data.session = scoped_session(s)
|
||||
|
||||
if Data.user is None:
|
||||
Data.user = UserData(self.config, self.session)
|
||||
|
||||
self.__engine = create_engine(self.__url, pool_recycle=3600)
|
||||
session = sessionmaker(bind=self.__engine, autoflush=True, autocommit=True)
|
||||
self.session = scoped_session(session)
|
||||
if Data.arcade is None:
|
||||
Data.arcade = ArcadeData(self.config, self.session)
|
||||
|
||||
if Data.card is None:
|
||||
Data.card = CardData(self.config, self.session)
|
||||
|
||||
if Data.base is None:
|
||||
Data.base = BaseData(self.config, self.session)
|
||||
|
||||
self.user = UserData(self.config, self.session)
|
||||
self.arcade = ArcadeData(self.config, self.session)
|
||||
self.card = CardData(self.config, self.session)
|
||||
self.base = BaseData(self.config, self.session)
|
||||
self.schema_ver_latest = 1
|
||||
|
||||
log_fmt_str = "[%(asctime)s] %(levelname)s | Database | %(message)s"
|
||||
log_fmt = logging.Formatter(log_fmt_str)
|
||||
self.logger = logging.getLogger("database")
|
||||
|
||||
# Prevent the logger from adding handlers multiple times
|
||||
if not getattr(self.logger, 'handler_set', None):
|
||||
fileHandler = TimedRotatingFileHandler("{0}/{1}.log".format(self.config.server.log_dir, "db"), encoding="utf-8",
|
||||
when="d", backupCount=10)
|
||||
if not getattr(self.logger, "handler_set", None):
|
||||
log_fmt_str = "[%(asctime)s] %(levelname)s | Database | %(message)s"
|
||||
log_fmt = logging.Formatter(log_fmt_str)
|
||||
fileHandler = TimedRotatingFileHandler(
|
||||
"{0}/{1}.log".format(self.config.server.log_dir, "db"),
|
||||
encoding="utf-8",
|
||||
when="d",
|
||||
backupCount=10,
|
||||
)
|
||||
fileHandler.setFormatter(log_fmt)
|
||||
|
||||
|
||||
consoleHandler = logging.StreamHandler()
|
||||
consoleHandler.setFormatter(log_fmt)
|
||||
|
||||
|
@ -47,7 +72,286 @@ class Data:
|
|||
self.logger.addHandler(consoleHandler)
|
||||
|
||||
self.logger.setLevel(self.config.database.loglevel)
|
||||
coloredlogs.install(cfg.database.loglevel, logger=self.logger, fmt=log_fmt_str)
|
||||
self.logger.handler_set = True # type: ignore
|
||||
coloredlogs.install(
|
||||
cfg.database.loglevel, logger=self.logger, fmt=log_fmt_str
|
||||
)
|
||||
self.logger.handler_set = True # type: ignore
|
||||
|
||||
|
||||
def create_database(self):
|
||||
self.logger.info("Creating databases...")
|
||||
try:
|
||||
metadata.create_all(self.__engine.connect())
|
||||
except SQLAlchemyError as e:
|
||||
self.logger.error(f"Failed to create databases! {e}")
|
||||
return
|
||||
|
||||
games = Utils.get_all_titles()
|
||||
for game_dir, game_mod in games.items():
|
||||
try:
|
||||
if hasattr(game_mod, "database") and hasattr(
|
||||
game_mod, "current_schema_version"
|
||||
):
|
||||
game_mod.database(self.config)
|
||||
metadata.create_all(self.__engine.connect())
|
||||
|
||||
self.base.touch_schema_ver(
|
||||
game_mod.current_schema_version, game_mod.game_codes[0]
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning(
|
||||
f"Could not load database schema from {game_dir} - {e}"
|
||||
)
|
||||
|
||||
self.logger.info(f"Setting base_schema_ver to {self.current_schema_version}")
|
||||
self.base.set_schema_ver(self.current_schema_version)
|
||||
|
||||
self.logger.info(
|
||||
f"Setting user auto_incrememnt to {self.config.database.user_table_autoincrement_start}"
|
||||
)
|
||||
self.user.reset_autoincrement(
|
||||
self.config.database.user_table_autoincrement_start
|
||||
)
|
||||
|
||||
def recreate_database(self):
|
||||
self.logger.info("Dropping all databases...")
|
||||
self.base.execute("SET FOREIGN_KEY_CHECKS=0")
|
||||
try:
|
||||
metadata.drop_all(self.__engine.connect())
|
||||
except SQLAlchemyError as e:
|
||||
self.logger.error(f"Failed to drop databases! {e}")
|
||||
return
|
||||
|
||||
for root, dirs, files in os.walk("./titles"):
|
||||
for dir in dirs:
|
||||
if not dir.startswith("__"):
|
||||
try:
|
||||
mod = importlib.import_module(f"titles.{dir}")
|
||||
|
||||
try:
|
||||
if hasattr(mod, "database"):
|
||||
mod.database(self.config)
|
||||
metadata.drop_all(self.__engine.connect())
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning(
|
||||
f"Could not load database schema from {dir} - {e}"
|
||||
)
|
||||
|
||||
except ImportError as e:
|
||||
self.logger.warning(
|
||||
f"Failed to load database schema dir {dir} - {e}"
|
||||
)
|
||||
break
|
||||
|
||||
self.base.execute("SET FOREIGN_KEY_CHECKS=1")
|
||||
|
||||
self.create_database()
|
||||
|
||||
def migrate_database(self, game: str, version: Optional[int], action: str) -> None:
|
||||
old_ver = self.base.get_schema_ver(game)
|
||||
sql = ""
|
||||
if version is None:
|
||||
if not game == "CORE":
|
||||
titles = Utils.get_all_titles()
|
||||
|
||||
for folder, mod in titles.items():
|
||||
if not mod.game_codes[0] == game:
|
||||
continue
|
||||
|
||||
if hasattr(mod, "current_schema_version"):
|
||||
version = mod.current_schema_version
|
||||
|
||||
else:
|
||||
self.logger.warn(
|
||||
f"current_schema_version not found for {folder}"
|
||||
)
|
||||
|
||||
else:
|
||||
version = self.current_schema_version
|
||||
|
||||
if version is None:
|
||||
self.logger.warn(
|
||||
f"Could not determine latest version for {game}, please specify --version"
|
||||
)
|
||||
|
||||
if old_ver is None:
|
||||
self.logger.error(
|
||||
f"Schema for game {game} does not exist, did you run the creation script?"
|
||||
)
|
||||
return
|
||||
|
||||
if old_ver == version:
|
||||
self.logger.info(
|
||||
f"Schema for game {game} is already version {old_ver}, nothing to do"
|
||||
)
|
||||
return
|
||||
|
||||
if action == "upgrade":
|
||||
for x in range(old_ver, version):
|
||||
if not os.path.exists(
|
||||
f"core/data/schema/versions/{game.upper()}_{x + 1}_{action}.sql"
|
||||
):
|
||||
self.logger.error(
|
||||
f"Could not find {action} script {game.upper()}_{x + 1}_{action}.sql in core/data/schema/versions folder"
|
||||
)
|
||||
return
|
||||
|
||||
with open(
|
||||
f"core/data/schema/versions/{game.upper()}_{x + 1}_{action}.sql",
|
||||
"r",
|
||||
encoding="utf-8",
|
||||
) as f:
|
||||
sql = f.read()
|
||||
|
||||
result = self.base.execute(sql)
|
||||
if result is None:
|
||||
self.logger.error("Error execuing sql script!")
|
||||
return None
|
||||
|
||||
else:
|
||||
for x in range(old_ver, version, -1):
|
||||
if not os.path.exists(
|
||||
f"core/data/schema/versions/{game.upper()}_{x - 1}_{action}.sql"
|
||||
):
|
||||
self.logger.error(
|
||||
f"Could not find {action} script {game.upper()}_{x - 1}_{action}.sql in core/data/schema/versions folder"
|
||||
)
|
||||
return
|
||||
|
||||
with open(
|
||||
f"core/data/schema/versions/{game.upper()}_{x - 1}_{action}.sql",
|
||||
"r",
|
||||
encoding="utf-8",
|
||||
) as f:
|
||||
sql = f.read()
|
||||
|
||||
result = self.base.execute(sql)
|
||||
if result is None:
|
||||
self.logger.error("Error execuing sql script!")
|
||||
return None
|
||||
|
||||
result = self.base.set_schema_ver(version, game)
|
||||
if result is None:
|
||||
self.logger.error("Error setting version in schema_version table!")
|
||||
return None
|
||||
|
||||
self.logger.info(f"Successfully migrated {game} to schema version {version}")
|
||||
|
||||
def create_owner(self, email: Optional[str] = None) -> None:
|
||||
pw = "".join(
|
||||
secrets.choice(string.ascii_letters + string.digits) for i in range(20)
|
||||
)
|
||||
hash = bcrypt.hashpw(pw.encode(), bcrypt.gensalt())
|
||||
|
||||
user_id = self.user.create_user(email=email, permission=255, password=hash)
|
||||
if user_id is None:
|
||||
self.logger.error(f"Failed to create owner with email {email}")
|
||||
return
|
||||
|
||||
card_id = self.card.create_card(user_id, "00000000000000000000")
|
||||
if card_id is None:
|
||||
self.logger.error(f"Failed to create card for owner with id {user_id}")
|
||||
return
|
||||
|
||||
self.logger.warn(
|
||||
f"Successfully created owner with email {email}, access code 00000000000000000000, and password {pw} Make sure to change this password and assign a real card ASAP!"
|
||||
)
|
||||
|
||||
def migrate_card(self, old_ac: str, new_ac: str, should_force: bool) -> None:
|
||||
if old_ac == new_ac:
|
||||
self.logger.error("Both access codes are the same!")
|
||||
return
|
||||
|
||||
new_card = self.card.get_card_by_access_code(new_ac)
|
||||
if new_card is None:
|
||||
self.card.update_access_code(old_ac, new_ac)
|
||||
return
|
||||
|
||||
if not should_force:
|
||||
self.logger.warn(
|
||||
f"Card already exists for access code {new_ac} (id {new_card['id']}). If you wish to continue, rerun with the '--force' flag."
|
||||
f" All exiting data on the target card {new_ac} will be perminently erased and replaced with data from card {old_ac}."
|
||||
)
|
||||
return
|
||||
|
||||
self.logger.info(
|
||||
f"All exiting data on the target card {new_ac} will be perminently erased and replaced with data from card {old_ac}."
|
||||
)
|
||||
self.card.delete_card(new_card["id"])
|
||||
self.card.update_access_code(old_ac, new_ac)
|
||||
|
||||
hanging_user = self.user.get_user(new_card["user"])
|
||||
if hanging_user["password"] is None:
|
||||
self.logger.info(f"Delete hanging user {hanging_user['id']}")
|
||||
self.user.delete_user(hanging_user["id"])
|
||||
|
||||
def delete_hanging_users(self) -> None:
|
||||
"""
|
||||
Finds and deletes users that have not registered for the webui that have no cards assocated with them.
|
||||
"""
|
||||
unreg_users = self.user.get_unregistered_users()
|
||||
if unreg_users is None:
|
||||
self.logger.error("Error occoured finding unregistered users")
|
||||
|
||||
for user in unreg_users:
|
||||
cards = self.card.get_user_cards(user["id"])
|
||||
if cards is None:
|
||||
self.logger.error(f"Error getting cards for user {user['id']}")
|
||||
continue
|
||||
|
||||
if not cards:
|
||||
self.logger.info(f"Delete hanging user {user['id']}")
|
||||
self.user.delete_user(user["id"])
|
||||
|
||||
def autoupgrade(self) -> None:
|
||||
all_game_versions = self.base.get_all_schema_vers()
|
||||
if all_game_versions is None:
|
||||
self.logger.warn("Failed to get schema versions")
|
||||
return
|
||||
|
||||
all_games = Utils.get_all_titles()
|
||||
all_games_list: Dict[str, int] = {}
|
||||
for _, mod in all_games.items():
|
||||
if hasattr(mod, "current_schema_version"):
|
||||
all_games_list[mod.game_codes[0]] = mod.current_schema_version
|
||||
|
||||
for x in all_game_versions:
|
||||
failed = False
|
||||
game = x["game"].upper()
|
||||
update_ver = int(x["version"])
|
||||
latest_ver = all_games_list.get(game, 1)
|
||||
if game == "CORE":
|
||||
latest_ver = self.current_schema_version
|
||||
|
||||
if update_ver == latest_ver:
|
||||
self.logger.info(f"{game} is already latest version")
|
||||
continue
|
||||
|
||||
for y in range(update_ver + 1, latest_ver + 1):
|
||||
if os.path.exists(f"core/data/schema/versions/{game}_{y}_upgrade.sql"):
|
||||
with open(
|
||||
f"core/data/schema/versions/{game}_{y}_upgrade.sql",
|
||||
"r",
|
||||
encoding="utf-8",
|
||||
) as f:
|
||||
sql = f.read()
|
||||
|
||||
result = self.base.execute(sql)
|
||||
if result is None:
|
||||
self.logger.error(
|
||||
f"Error execuing sql script for game {game} v{y}!"
|
||||
)
|
||||
failed = True
|
||||
break
|
||||
else:
|
||||
self.logger.warning(f"Could not find script {game}_{y}_upgrade.sql")
|
||||
failed = True
|
||||
|
||||
if not failed:
|
||||
self.base.set_schema_ver(latest_ver, game)
|
||||
|
||||
def show_versions(self) -> None:
|
||||
all_game_versions = self.base.get_all_schema_vers()
|
||||
for ver in all_game_versions:
|
||||
self.logger.info(f"{ver['game']} -> v{ver['version']}")
|
||||
|
|
|
@ -3,4 +3,4 @@ from core.data.schema.card import CardData
|
|||
from core.data.schema.base import BaseData, metadata
|
||||
from core.data.schema.arcade import ArcadeData
|
||||
|
||||
__all__ = ["UserData", "CardData", "BaseData", "metadata", "ArcadeData"]
|
||||
__all__ = ["UserData", "CardData", "BaseData", "metadata", "ArcadeData"]
|
||||
|
|
|
@ -4,110 +4,216 @@ from sqlalchemy.sql.schema import ForeignKey, PrimaryKeyConstraint
|
|||
from sqlalchemy.types import Integer, String, Boolean
|
||||
from sqlalchemy.sql import func, select
|
||||
from sqlalchemy.dialects.mysql import insert
|
||||
import re
|
||||
|
||||
from core.data.schema.base import BaseData, metadata
|
||||
from core.const import *
|
||||
|
||||
arcade = Table(
|
||||
"arcade",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("name", String(255)),
|
||||
Column("nickname", String(255)),
|
||||
Column("nickname", String(255)),
|
||||
Column("country", String(3)),
|
||||
Column("country_id", Integer),
|
||||
Column("state", String(255)),
|
||||
Column("city", String(255)),
|
||||
Column("region_id", Integer),
|
||||
Column("timezone", String(255)),
|
||||
mysql_charset='utf8mb4'
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
machine = Table(
|
||||
"machine",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("arcade", ForeignKey("arcade.id", ondelete="cascade", onupdate="cascade"), nullable=False),
|
||||
Column(
|
||||
"arcade",
|
||||
ForeignKey("arcade.id", ondelete="cascade", onupdate="cascade"),
|
||||
nullable=False,
|
||||
),
|
||||
Column("serial", String(15), nullable=False),
|
||||
Column("board", String(15)),
|
||||
Column("game", String(4)),
|
||||
Column("country", String(3)), # overwrites if not null
|
||||
Column("country", String(3)), # overwrites if not null
|
||||
Column("timezone", String(255)),
|
||||
Column("ota_enable", Boolean),
|
||||
Column("is_cab", Boolean),
|
||||
mysql_charset='utf8mb4'
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
arcade_owner = Table(
|
||||
'arcade_owner',
|
||||
"arcade_owner",
|
||||
metadata,
|
||||
Column('user', Integer, ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"), nullable=False),
|
||||
Column('arcade', Integer, ForeignKey("arcade.id", ondelete="cascade", onupdate="cascade"), nullable=False),
|
||||
Column('permissions', Integer, nullable=False),
|
||||
PrimaryKeyConstraint('user', 'arcade', name='arcade_owner_pk'),
|
||||
mysql_charset='utf8mb4'
|
||||
Column(
|
||||
"user",
|
||||
Integer,
|
||||
ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"),
|
||||
nullable=False,
|
||||
),
|
||||
Column(
|
||||
"arcade",
|
||||
Integer,
|
||||
ForeignKey("arcade.id", ondelete="cascade", onupdate="cascade"),
|
||||
nullable=False,
|
||||
),
|
||||
Column("permissions", Integer, nullable=False),
|
||||
PrimaryKeyConstraint("user", "arcade", name="arcade_owner_pk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
|
||||
class ArcadeData(BaseData):
|
||||
def get_machine(self, serial: str = None, id: int = None) -> Optional[Dict]:
|
||||
if serial is not None:
|
||||
sql = machine.select(machine.c.serial == serial)
|
||||
serial = serial.replace("-", "")
|
||||
if len(serial) == 11:
|
||||
sql = machine.select(machine.c.serial.like(f"{serial}%"))
|
||||
|
||||
elif len(serial) == 15:
|
||||
sql = machine.select(machine.c.serial == serial)
|
||||
|
||||
else:
|
||||
self.logger.error(f"{__name__ }: Malformed serial {serial}")
|
||||
return None
|
||||
|
||||
elif id is not None:
|
||||
sql = machine.select(machine.c.id == id)
|
||||
else:
|
||||
|
||||
else:
|
||||
self.logger.error(f"{__name__ }: Need either serial or ID to look up!")
|
||||
return None
|
||||
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None: return None
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def put_machine(self, arcade_id: int, serial: str = None, board: str = None, game: str = None, is_cab: bool = False) -> Optional[int]:
|
||||
|
||||
def put_machine(
|
||||
self,
|
||||
arcade_id: int,
|
||||
serial: str = "",
|
||||
board: str = None,
|
||||
game: str = None,
|
||||
is_cab: bool = False,
|
||||
) -> Optional[int]:
|
||||
if arcade_id:
|
||||
self.logger.error(f"{__name__ }: Need arcade id!")
|
||||
return None
|
||||
|
||||
if serial is None:
|
||||
pass
|
||||
|
||||
sql = machine.insert().values(arcade = arcade_id, keychip = serial, board = board, game = game, is_cab = is_cab)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None: return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_arcade(self, id: int) -> Optional[Dict]:
|
||||
sql = arcade.select(arcade.c.id == id)
|
||||
result = self.execute(sql)
|
||||
if result is None: return None
|
||||
return result.fetchone()
|
||||
|
||||
def put_arcade(self, name: str, nickname: str = None, country: str = "JPN", country_id: int = 1,
|
||||
state: str = "", city: str = "", regional_id: int = 1) -> Optional[int]:
|
||||
if nickname is None: nickname = name
|
||||
|
||||
sql = arcade.insert().values(name = name, nickname = nickname, country = country, country_id = country_id,
|
||||
state = state, city = city, regional_id = regional_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None: return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_arcade_owners(self, arcade_id: int) -> Optional[Dict]:
|
||||
sql = select(arcade_owner).where(arcade_owner.c.arcade==arcade_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None: return None
|
||||
return result.fetchall()
|
||||
|
||||
def add_arcade_owner(self, arcade_id: int, user_id: int) -> None:
|
||||
sql = insert(arcade_owner).values(
|
||||
arcade=arcade_id,
|
||||
user=user_id
|
||||
sql = machine.insert().values(
|
||||
arcade=arcade_id, keychip=serial, board=board, game=game, is_cab=is_cab
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None: return None
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def generate_keychip_serial(self, platform_id: int) -> str:
|
||||
pass
|
||||
def set_machine_serial(self, machine_id: int, serial: str) -> None:
|
||||
result = self.execute(
|
||||
machine.update(machine.c.id == machine_id).values(keychip=serial)
|
||||
)
|
||||
if result is None:
|
||||
self.logger.error(
|
||||
f"Failed to update serial for machine {machine_id} -> {serial}"
|
||||
)
|
||||
return result.lastrowid
|
||||
|
||||
def set_machine_boardid(self, machine_id: int, boardid: str) -> None:
|
||||
result = self.execute(
|
||||
machine.update(machine.c.id == machine_id).values(board=boardid)
|
||||
)
|
||||
if result is None:
|
||||
self.logger.error(
|
||||
f"Failed to update board id for machine {machine_id} -> {boardid}"
|
||||
)
|
||||
|
||||
def get_arcade(self, id: int) -> Optional[Dict]:
|
||||
sql = arcade.select(arcade.c.id == id)
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def put_arcade(
|
||||
self,
|
||||
name: str,
|
||||
nickname: str = None,
|
||||
country: str = "JPN",
|
||||
country_id: int = 1,
|
||||
state: str = "",
|
||||
city: str = "",
|
||||
regional_id: int = 1,
|
||||
) -> Optional[int]:
|
||||
if nickname is None:
|
||||
nickname = name
|
||||
|
||||
sql = arcade.insert().values(
|
||||
name=name,
|
||||
nickname=nickname,
|
||||
country=country,
|
||||
country_id=country_id,
|
||||
state=state,
|
||||
city=city,
|
||||
regional_id=regional_id,
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_arcade_owners(self, arcade_id: int) -> Optional[Dict]:
|
||||
sql = select(arcade_owner).where(arcade_owner.c.arcade == arcade_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def add_arcade_owner(self, arcade_id: int, user_id: int) -> None:
|
||||
sql = insert(arcade_owner).values(arcade=arcade_id, user=user_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def format_serial(
|
||||
self, platform_code: str, platform_rev: int, serial_num: int, append: int = 4152
|
||||
) -> str:
|
||||
return f"{platform_code}{platform_rev:02d}A{serial_num:04d}{append:04d}" # 0x41 = A, 0x52 = R
|
||||
|
||||
def validate_keychip_format(self, serial: str) -> bool:
|
||||
serial = serial.replace("-", "")
|
||||
if len(serial) != 11 or len(serial) != 15:
|
||||
self.logger.error(
|
||||
f"Serial validate failed: Incorrect length for {serial} (len {len(serial)})"
|
||||
)
|
||||
return False
|
||||
|
||||
platform_code = serial[:4]
|
||||
platform_rev = serial[4:6]
|
||||
const_a = serial[6]
|
||||
num = serial[7:11]
|
||||
append = serial[11:15]
|
||||
|
||||
if re.match("A[7|6]\d[E|X][0|1][0|1|2]A\d{4,8}", serial) is None:
|
||||
self.logger.error(f"Serial validate failed: {serial} failed regex")
|
||||
return False
|
||||
|
||||
if len(append) != 0 or len(append) != 4:
|
||||
self.logger.error(
|
||||
f"Serial validate failed: {serial} had malformed append {append}"
|
||||
)
|
||||
return False
|
||||
|
||||
if len(num) != 4:
|
||||
self.logger.error(
|
||||
f"Serial validate failed: {serial} had malformed number {num}"
|
||||
)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
|
|
@ -2,6 +2,7 @@ import json
|
|||
import logging
|
||||
from random import randrange
|
||||
from typing import Any, Optional, Dict, List
|
||||
from sqlalchemy.engine import Row
|
||||
from sqlalchemy.engine.cursor import CursorResult
|
||||
from sqlalchemy.engine.base import Connection
|
||||
from sqlalchemy.sql import text, func, select
|
||||
|
@ -19,7 +20,7 @@ schema_ver = Table(
|
|||
metadata,
|
||||
Column("game", String(4), primary_key=True, nullable=False),
|
||||
Column("version", Integer, nullable=False, server_default="1"),
|
||||
mysql_charset='utf8mb4'
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
event_log = Table(
|
||||
|
@ -29,28 +30,30 @@ event_log = Table(
|
|||
Column("system", String(255), nullable=False),
|
||||
Column("type", String(255), nullable=False),
|
||||
Column("severity", Integer, nullable=False),
|
||||
Column("message", String(1000), nullable=False),
|
||||
Column("details", JSON, nullable=False),
|
||||
Column("when_logged", TIMESTAMP, nullable=False, server_default=func.now()),
|
||||
mysql_charset='utf8mb4'
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
class BaseData():
|
||||
|
||||
class BaseData:
|
||||
def __init__(self, cfg: CoreConfig, conn: Connection) -> None:
|
||||
self.config = cfg
|
||||
self.conn = conn
|
||||
self.logger = logging.getLogger("database")
|
||||
|
||||
def execute(self, sql: str, opts: Dict[str, Any]={}) -> Optional[CursorResult]:
|
||||
|
||||
def execute(self, sql: str, opts: Dict[str, Any] = {}) -> Optional[CursorResult]:
|
||||
res = None
|
||||
|
||||
try:
|
||||
self.logger.info(f"SQL Execute: {''.join(str(sql).splitlines())} || {opts}")
|
||||
self.logger.info(f"SQL Execute: {''.join(str(sql).splitlines())}")
|
||||
res = self.conn.execute(text(sql), opts)
|
||||
|
||||
except SQLAlchemyError as e:
|
||||
self.logger.error(f"SQLAlchemy error {e}")
|
||||
return None
|
||||
|
||||
|
||||
except UnicodeEncodeError as e:
|
||||
self.logger.error(f"UnicodeEncodeError error {e}")
|
||||
return None
|
||||
|
@ -62,7 +65,7 @@ class BaseData():
|
|||
except SQLAlchemyError as e:
|
||||
self.logger.error(f"SQLAlchemy error {e}")
|
||||
return None
|
||||
|
||||
|
||||
except UnicodeEncodeError as e:
|
||||
self.logger.error(f"UnicodeEncodeError error {e}")
|
||||
return None
|
||||
|
@ -72,53 +75,91 @@ class BaseData():
|
|||
raise
|
||||
|
||||
return res
|
||||
|
||||
|
||||
def generate_id(self) -> int:
|
||||
"""
|
||||
Generate a random 5-7 digit id
|
||||
"""
|
||||
return randrange(10000, 9999999)
|
||||
|
||||
|
||||
def get_all_schema_vers(self) -> Optional[List[Row]]:
|
||||
sql = select(schema_ver)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def get_schema_ver(self, game: str) -> Optional[int]:
|
||||
sql = select(schema_ver).where(schema_ver.c.game == game)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()["version"]
|
||||
|
||||
row = result.fetchone()
|
||||
if row is None:
|
||||
return None
|
||||
|
||||
return row["version"]
|
||||
|
||||
def set_schema_ver(self, ver: int, game: str = "CORE") -> Optional[int]:
|
||||
sql = insert(schema_ver).values(game = game, version = ver)
|
||||
conflict = sql.on_duplicate_key_update(version = ver)
|
||||
|
||||
def touch_schema_ver(self, ver: int, game: str = "CORE") -> Optional[int]:
|
||||
sql = insert(schema_ver).values(game=game, version=ver)
|
||||
conflict = sql.on_duplicate_key_update(version=schema_ver.c.version)
|
||||
|
||||
result = self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.error(f"Failed to update schema version for game {game} (v{ver})")
|
||||
self.logger.error(
|
||||
f"Failed to update schema version for game {game} (v{ver})"
|
||||
)
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def log_event(self, system: str, type: str, severity: int, details: Dict) -> Optional[int]:
|
||||
sql = event_log.insert().values(system = system, type = type, severity = severity, details = json.dumps(details))
|
||||
def set_schema_ver(self, ver: int, game: str = "CORE") -> Optional[int]:
|
||||
sql = insert(schema_ver).values(game=game, version=ver)
|
||||
conflict = sql.on_duplicate_key_update(version=ver)
|
||||
|
||||
result = self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.error(
|
||||
f"Failed to update schema version for game {game} (v{ver})"
|
||||
)
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def log_event(
|
||||
self, system: str, type: str, severity: int, message: str, details: Dict = {}
|
||||
) -> Optional[int]:
|
||||
sql = event_log.insert().values(
|
||||
system=system,
|
||||
type=type,
|
||||
severity=severity,
|
||||
message=message,
|
||||
details=json.dumps(details),
|
||||
)
|
||||
result = self.execute(sql)
|
||||
|
||||
if result is None:
|
||||
self.logger.error(f"{__name__}: Failed to insert event into event log! system = {system}, type = {type}, severity = {severity}, details = {details}")
|
||||
self.logger.error(
|
||||
f"{__name__}: Failed to insert event into event log! system = {system}, type = {type}, severity = {severity}, message = {message}"
|
||||
)
|
||||
return None
|
||||
|
||||
return result.lastrowid
|
||||
|
||||
|
||||
def get_event_log(self, entries: int = 100) -> Optional[List[Dict]]:
|
||||
sql = event_log.select().limit(entries).all()
|
||||
result = self.execute(sql)
|
||||
|
||||
if result is None: return None
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
|
||||
def fix_bools(self, data: Dict) -> Dict:
|
||||
for k,v in data.items():
|
||||
for k, v in data.items():
|
||||
if type(v) == str and v.lower() == "true":
|
||||
data[k] = True
|
||||
elif type(v) == str and v.lower() == "false":
|
||||
data[k] = False
|
||||
|
||||
|
||||
return data
|
||||
|
|
|
@ -3,55 +3,92 @@ from sqlalchemy import Table, Column, UniqueConstraint
|
|||
from sqlalchemy.types import Integer, String, Boolean, TIMESTAMP
|
||||
from sqlalchemy.sql.schema import ForeignKey
|
||||
from sqlalchemy.sql import func
|
||||
from sqlalchemy.engine import Row
|
||||
|
||||
from core.data.schema.base import BaseData, metadata
|
||||
|
||||
aime_card = Table(
|
||||
'aime_card',
|
||||
"aime_card",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("user", ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"), nullable=False),
|
||||
Column(
|
||||
"user",
|
||||
ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"),
|
||||
nullable=False,
|
||||
),
|
||||
Column("access_code", String(20)),
|
||||
Column("created_date", TIMESTAMP, server_default=func.now()),
|
||||
Column("last_login_date", TIMESTAMP, onupdate=func.now()),
|
||||
Column("is_locked", Boolean, server_default="0"),
|
||||
Column("is_banned", Boolean, server_default="0"),
|
||||
UniqueConstraint("user", "access_code", name="aime_card_uk"),
|
||||
mysql_charset='utf8mb4'
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
|
||||
class CardData(BaseData):
|
||||
def get_card_by_access_code(self, access_code: str) -> Optional[Row]:
|
||||
sql = aime_card.select(aime_card.c.access_code == access_code)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def get_card_by_id(self, card_id: int) -> Optional[Row]:
|
||||
sql = aime_card.select(aime_card.c.id == card_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def update_access_code(self, old_ac: str, new_ac: str) -> None:
|
||||
sql = aime_card.update(aime_card.c.access_code == old_ac).values(
|
||||
access_code=new_ac
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.error(
|
||||
f"Failed to change card access code from {old_ac} to {new_ac}"
|
||||
)
|
||||
|
||||
def get_user_id_from_card(self, access_code: str) -> Optional[int]:
|
||||
"""
|
||||
Given a 20 digit access code as a string, get the user id associated with that card
|
||||
"""
|
||||
sql = aime_card.select(aime_card.c.access_code == access_code)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None: return None
|
||||
|
||||
card = result.fetchone()
|
||||
if card is None: return None
|
||||
card = self.get_card_by_access_code(access_code)
|
||||
if card is None:
|
||||
return None
|
||||
|
||||
return int(card["user"])
|
||||
|
||||
def get_user_cards(self, aime_id: int) -> Optional[List[Dict]]:
|
||||
def delete_card(self, card_id: int) -> None:
|
||||
sql = aime_card.delete(aime_card.c.id == card_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.error(f"Failed to delete card with id {card_id}")
|
||||
|
||||
def get_user_cards(self, aime_id: int) -> Optional[List[Row]]:
|
||||
"""
|
||||
Returns all cards owned by a user
|
||||
"""
|
||||
sql = aime_card.select(aime_card.c.user == aime_id)
|
||||
result = self.execute(sql)
|
||||
if result is None: return None
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
|
||||
def create_card(self, user_id: int, access_code: str) -> Optional[int]:
|
||||
"""
|
||||
Given a aime_user id and a 20 digit access code as a string, create a card and return the ID if successful
|
||||
"""
|
||||
sql = aime_card.insert().values(user=user_id, access_code=access_code)
|
||||
result = self.execute(sql)
|
||||
if result is None: return None
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def to_access_code(self, luid: str) -> str:
|
||||
|
@ -64,4 +101,4 @@ class CardData(BaseData):
|
|||
"""
|
||||
Given a 20 digit access code as a string, return the 16 hex character luid
|
||||
"""
|
||||
return f'{int(access_code):0{16}x}'
|
||||
return f"{int(access_code):0{16}x}"
|
||||
|
|
|
@ -1,9 +1,12 @@
|
|||
from enum import Enum
|
||||
from typing import Dict, Optional
|
||||
from typing import Optional, List
|
||||
from sqlalchemy import Table, Column
|
||||
from sqlalchemy.types import Integer, String, TIMESTAMP
|
||||
from sqlalchemy.sql.schema import ForeignKey
|
||||
from sqlalchemy.sql import func
|
||||
from sqlalchemy.dialects.mysql import insert
|
||||
from sqlalchemy.sql import func, select
|
||||
from sqlalchemy.engine import Row
|
||||
import bcrypt
|
||||
|
||||
from core.data.schema.base import BaseData, metadata
|
||||
|
||||
|
@ -14,44 +17,93 @@ aime_user = Table(
|
|||
Column("username", String(25), unique=True),
|
||||
Column("email", String(255), unique=True),
|
||||
Column("password", String(255)),
|
||||
Column("permissions", Integer),
|
||||
Column("permissions", Integer),
|
||||
Column("created_date", TIMESTAMP, server_default=func.now()),
|
||||
Column("last_login_date", TIMESTAMP, onupdate=func.now()),
|
||||
Column("suspend_expire_time", TIMESTAMP),
|
||||
mysql_charset='utf8mb4'
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
frontend_session = Table(
|
||||
"frontend_session",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, unique=True),
|
||||
Column("user", ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"), nullable=False),
|
||||
Column('session_cookie', String(32), nullable=False, unique=True),
|
||||
Column("expires", TIMESTAMP, nullable=False),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
|
||||
class PermissionBits(Enum):
|
||||
PermUser = 1
|
||||
PermMod = 2
|
||||
PermSysAdmin = 4
|
||||
|
||||
|
||||
class UserData(BaseData):
|
||||
def create_user(self, username: str = None, email: str = None, password: str = None) -> Optional[int]:
|
||||
|
||||
if email is None:
|
||||
permission = None
|
||||
def create_user(
|
||||
self,
|
||||
id: int = None,
|
||||
username: str = None,
|
||||
email: str = None,
|
||||
password: str = None,
|
||||
permission: int = 1,
|
||||
) -> Optional[int]:
|
||||
if id is None:
|
||||
sql = insert(aime_user).values(
|
||||
username=username,
|
||||
email=email,
|
||||
password=password,
|
||||
permissions=permission,
|
||||
)
|
||||
else:
|
||||
permission = 0
|
||||
sql = insert(aime_user).values(
|
||||
id=id,
|
||||
username=username,
|
||||
email=email,
|
||||
password=password,
|
||||
permissions=permission,
|
||||
)
|
||||
|
||||
sql = aime_user.insert().values(username=username, email=email, password=password, permissions=permission)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None: return None
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
username=username, email=email, password=password, permissions=permission
|
||||
)
|
||||
|
||||
result = self.execute(conflict)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
|
||||
def get_user(self, user_id: int) -> Optional[Row]:
|
||||
sql = select(aime_user).where(aime_user.c.id == user_id)
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
return False
|
||||
return result.fetchone()
|
||||
|
||||
def check_password(self, user_id: int, passwd: bytes = None) -> bool:
|
||||
usr = self.get_user(user_id)
|
||||
if usr is None:
|
||||
return False
|
||||
|
||||
if usr["password"] is None:
|
||||
return False
|
||||
|
||||
if passwd is None or not passwd:
|
||||
return False
|
||||
|
||||
return bcrypt.checkpw(passwd, usr["password"].encode())
|
||||
|
||||
def reset_autoincrement(self, ai_value: int) -> None:
|
||||
# Didn't feel like learning how to do this the right way
|
||||
# if somebody wants a free PR go nuts I guess
|
||||
# ALTER TABLE isn't in sqlalchemy so we do this the ugly way
|
||||
sql = f"ALTER TABLE aime_user AUTO_INCREMENT={ai_value}"
|
||||
self.execute(sql)
|
||||
self.execute(sql)
|
||||
|
||||
def delete_user(self, user_id: int) -> None:
|
||||
sql = aime_user.delete(aime_user.c.id == user_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.error(f"Failed to delete user with id {user_id}")
|
||||
|
||||
def get_unregistered_users(self) -> List[Row]:
|
||||
"""
|
||||
Returns a list of users who have not registered with the webui. They may or may not have cards.
|
||||
"""
|
||||
sql = select(aime_user).where(aime_user.c.password == None)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
|
|
@ -0,0 +1,2 @@
|
|||
ALTER TABLE `frontend_session`
|
||||
DROP COLUMN `ip`;
|
|
@ -0,0 +1 @@
|
|||
ALTER TABLE `event_log` DROP COLUMN `message`;
|
|
@ -0,0 +1,2 @@
|
|||
ALTER TABLE `frontend_session`
|
||||
ADD `ip` CHAR(15);
|
|
@ -0,0 +1,12 @@
|
|||
CREATE TABLE `frontend_session` (
|
||||
`id` int(11) NOT NULL AUTO_INCREMENT,
|
||||
`user` int(11) NOT NULL,
|
||||
`ip` varchar(15) DEFAULT NULL,
|
||||
`session_cookie` varchar(32) NOT NULL,
|
||||
`expires` timestamp NOT NULL DEFAULT current_timestamp() ON UPDATE current_timestamp(),
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `id` (`id`),
|
||||
UNIQUE KEY `session_cookie` (`session_cookie`),
|
||||
KEY `user` (`user`),
|
||||
CONSTRAINT `frontend_session_ibfk_1` FOREIGN KEY (`user`) REFERENCES `aime_user` (`id`) ON DELETE CASCADE ON UPDATE CASCADE
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=utf8mb4;
|
|
@ -0,0 +1 @@
|
|||
ALTER TABLE `event_log` ADD COLUMN `message` VARCHAR(1000) NOT NULL AFTER `severity`;
|
|
@ -0,0 +1 @@
|
|||
DROP TABLE `frontend_session`;
|
|
@ -0,0 +1,9 @@
|
|||
SET FOREIGN_KEY_CHECKS=0;
|
||||
ALTER TABLE diva_score DROP FOREIGN KEY diva_score_ibfk_1;
|
||||
ALTER TABLE diva_score DROP CONSTRAINT diva_score_uk;
|
||||
ALTER TABLE diva_score ADD CONSTRAINT diva_score_uk UNIQUE (user, pv_id, difficulty);
|
||||
ALTER TABLE diva_score ADD CONSTRAINT diva_score_ibfk_1 FOREIGN KEY (user) REFERENCES aime_user(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE diva_score DROP COLUMN edition;
|
||||
ALTER TABLE diva_playlog DROP COLUMN edition;
|
||||
SET FOREIGN_KEY_CHECKS=1;
|
|
@ -0,0 +1,17 @@
|
|||
ALTER TABLE diva_profile_shop DROP COLUMN c_itm_eqp_ary;
|
||||
ALTER TABLE diva_profile_shop DROP COLUMN ms_itm_flg_ary;
|
||||
|
||||
ALTER TABLE diva_profile DROP COLUMN use_pv_mdl_eqp;
|
||||
ALTER TABLE diva_profile DROP COLUMN use_mdl_pri;
|
||||
ALTER TABLE diva_profile DROP COLUMN use_pv_skn_eqp;
|
||||
ALTER TABLE diva_profile DROP COLUMN use_pv_btn_se_eqp;
|
||||
ALTER TABLE diva_profile DROP COLUMN use_pv_sld_se_eqp;
|
||||
ALTER TABLE diva_profile DROP COLUMN use_pv_chn_sld_se_eqp;
|
||||
ALTER TABLE diva_profile DROP COLUMN use_pv_sldr_tch_se_eqp;
|
||||
ALTER TABLE diva_profile ADD COLUMN use_pv_mdl_eqp VARCHAR(8) NOT NULL DEFAULT "true" AFTER sort_kind;
|
||||
ALTER TABLE diva_profile ADD COLUMN use_pv_btn_se_eqp VARCHAR(8) NOT NULL DEFAULT "true" AFTER use_pv_mdl_eqp;
|
||||
ALTER TABLE diva_profile ADD COLUMN use_pv_sld_se_eqp VARCHAR(8) NOT NULL DEFAULT "false" AFTER use_pv_btn_se_eqp;
|
||||
ALTER TABLE diva_profile ADD COLUMN use_pv_chn_sld_se_eqp VARCHAR(8) NOT NULL DEFAULT "false" AFTER use_pv_sld_se_eqp;
|
||||
ALTER TABLE diva_profile ADD COLUMN use_pv_sldr_tch_se_eqp VARCHAR(8) NOT NULL DEFAULT "false" AFTER use_pv_chn_sld_se_eqp;
|
||||
|
||||
DROP TABLE IF EXISTS `diva_profile_pv_customize`;
|
|
@ -0,0 +1,9 @@
|
|||
SET FOREIGN_KEY_CHECKS=0;
|
||||
ALTER TABLE diva_score ADD COLUMN edition int(11) DEFAULT 0 AFTER difficulty;
|
||||
ALTER TABLE diva_playlog ADD COLUMN edition int(11) DEFAULT 0 AFTER difficulty;
|
||||
|
||||
ALTER TABLE diva_score DROP FOREIGN KEY diva_score_ibfk_1;
|
||||
ALTER TABLE diva_score DROP CONSTRAINT diva_score_uk;
|
||||
ALTER TABLE diva_score ADD CONSTRAINT diva_score_uk UNIQUE (user, pv_id, difficulty, edition);
|
||||
ALTER TABLE diva_score ADD CONSTRAINT diva_score_ibfk_1 FOREIGN KEY (user) REFERENCES aime_user(id) ON DELETE CASCADE;
|
||||
SET FOREIGN_KEY_CHECKS=1;
|
|
@ -0,0 +1,3 @@
|
|||
ALTER TABLE diva_profile DROP COLUMN passwd_stat;
|
||||
ALTER TABLE diva_profile DROP COLUMN passwd;
|
||||
ALTER TABLE diva_profile MODIFY player_name VARCHAR(8);
|
|
@ -0,0 +1,33 @@
|
|||
ALTER TABLE diva_profile_shop ADD COLUMN c_itm_eqp_ary varchar(59) DEFAULT "-999,-999,-999,-999,-999,-999,-999,-999,-999,-999,-999,-999";
|
||||
ALTER TABLE diva_profile_shop ADD COLUMN ms_itm_flg_ary varchar(59) DEFAULT "-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1";
|
||||
|
||||
ALTER TABLE diva_profile DROP COLUMN use_pv_mdl_eqp;
|
||||
ALTER TABLE diva_profile DROP COLUMN use_pv_btn_se_eqp;
|
||||
ALTER TABLE diva_profile DROP COLUMN use_pv_sld_se_eqp;
|
||||
ALTER TABLE diva_profile DROP COLUMN use_pv_chn_sld_se_eqp;
|
||||
ALTER TABLE diva_profile DROP COLUMN use_pv_sldr_tch_se_eqp;
|
||||
ALTER TABLE diva_profile ADD COLUMN use_pv_mdl_eqp BOOLEAN NOT NULL DEFAULT true AFTER sort_kind;
|
||||
ALTER TABLE diva_profile ADD COLUMN use_mdl_pri BOOLEAN NOT NULL DEFAULT false AFTER use_pv_mdl_eqp;
|
||||
ALTER TABLE diva_profile ADD COLUMN use_pv_skn_eqp BOOLEAN NOT NULL DEFAULT false AFTER use_mdl_pri;
|
||||
ALTER TABLE diva_profile ADD COLUMN use_pv_btn_se_eqp BOOLEAN NOT NULL DEFAULT true AFTER use_pv_skn_eqp;
|
||||
ALTER TABLE diva_profile ADD COLUMN use_pv_sld_se_eqp BOOLEAN NOT NULL DEFAULT false AFTER use_pv_btn_se_eqp;
|
||||
ALTER TABLE diva_profile ADD COLUMN use_pv_chn_sld_se_eqp BOOLEAN NOT NULL DEFAULT false AFTER use_pv_sld_se_eqp;
|
||||
ALTER TABLE diva_profile ADD COLUMN use_pv_sldr_tch_se_eqp BOOLEAN NOT NULL DEFAULT false AFTER use_pv_chn_sld_se_eqp;
|
||||
|
||||
|
||||
CREATE TABLE diva_profile_pv_customize (
|
||||
id INT PRIMARY KEY NOT NULL AUTO_INCREMENT,
|
||||
user INT NOT NULL,
|
||||
version INT NOT NULL,
|
||||
pv_id INT NOT NULL,
|
||||
mdl_eqp_ary VARCHAR(14) DEFAULT '-999,-999,-999',
|
||||
c_itm_eqp_ary VARCHAR(59) DEFAULT '-999,-999,-999,-999,-999,-999,-999,-999,-999,-999,-999,-999',
|
||||
ms_itm_flg_ary VARCHAR(59) DEFAULT '-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1',
|
||||
skin INT DEFAULT '-1',
|
||||
btn_se INT DEFAULT '-1',
|
||||
sld_se INT DEFAULT '-1',
|
||||
chsld_se INT DEFAULT '-1',
|
||||
sldtch_se INT DEFAULT '-1',
|
||||
UNIQUE KEY diva_profile_pv_customize_uk (user, version, pv_id),
|
||||
CONSTRAINT diva_profile_pv_customize_ibfk_1 FOREIGN KEY (user) REFERENCES aime_user (id) ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
|
@ -0,0 +1,9 @@
|
|||
ALTER TABLE diva_profile
|
||||
DROP cnp_cid,
|
||||
DROP cnp_val,
|
||||
DROP cnp_rr,
|
||||
DROP cnp_sp,
|
||||
DROP btn_se_eqp,
|
||||
DROP sld_se_eqp,
|
||||
DROP chn_sld_se_eqp,
|
||||
DROP sldr_tch_se_eqp;
|
|
@ -0,0 +1,3 @@
|
|||
ALTER TABLE diva_profile ADD COLUMN passwd_stat INTEGER NOT NULL DEFAULT 0;
|
||||
ALTER TABLE diva_profile ADD COLUMN passwd VARCHAR(12) NOT NULL DEFAULT "**********";
|
||||
ALTER TABLE diva_profile MODIFY player_name VARCHAR(10);
|
|
@ -0,0 +1,9 @@
|
|||
ALTER TABLE diva_profile
|
||||
ADD cnp_cid INT NOT NULL DEFAULT -1,
|
||||
ADD cnp_val INT NOT NULL DEFAULT -1,
|
||||
ADD cnp_rr INT NOT NULL DEFAULT -1,
|
||||
ADD cnp_sp VARCHAR(255) NOT NULL DEFAULT "",
|
||||
ADD btn_se_eqp INT NOT NULL DEFAULT -1,
|
||||
ADD sld_se_eqp INT NOT NULL DEFAULT -1,
|
||||
ADD chn_sld_se_eqp INT NOT NULL DEFAULT -1,
|
||||
ADD sldr_tch_se_eqp INT NOT NULL DEFAULT -1;
|
|
@ -0,0 +1 @@
|
|||
ALTER TABLE chuni_static_music CHANGE COLUMN worldsEndTag worldsEndTag VARCHAR(20) NULL DEFAULT NULL ;
|
|
@ -0,0 +1 @@
|
|||
ALTER TABLE chuni_score_course DROP COLUMN theoryCount, DROP COLUMN orderId, DROP COLUMN playerRating;
|
|
@ -0,0 +1 @@
|
|||
ALTER TABLE chuni_static_music CHANGE COLUMN worldsEndTag worldsEndTag VARCHAR(7) NULL DEFAULT NULL ;
|
|
@ -0,0 +1,30 @@
|
|||
SET FOREIGN_KEY_CHECKS = 0;
|
||||
|
||||
ALTER TABLE chuni_score_playlog
|
||||
DROP COLUMN regionId,
|
||||
DROP COLUMN machineType;
|
||||
|
||||
ALTER TABLE chuni_static_events
|
||||
DROP COLUMN startDate;
|
||||
|
||||
ALTER TABLE chuni_profile_data
|
||||
DROP COLUMN rankUpChallengeResults;
|
||||
|
||||
ALTER TABLE chuni_static_login_bonus
|
||||
DROP FOREIGN KEY chuni_static_login_bonus_ibfk_1;
|
||||
|
||||
ALTER TABLE chuni_static_login_bonus_preset
|
||||
DROP PRIMARY KEY;
|
||||
|
||||
ALTER TABLE chuni_static_login_bonus_preset
|
||||
CHANGE COLUMN presetId id INT NOT NULL;
|
||||
ALTER TABLE chuni_static_login_bonus_preset
|
||||
ADD PRIMARY KEY(id);
|
||||
ALTER TABLE chuni_static_login_bonus_preset
|
||||
ADD CONSTRAINT chuni_static_login_bonus_preset_uk UNIQUE(id, version);
|
||||
|
||||
ALTER TABLE chuni_static_login_bonus
|
||||
ADD CONSTRAINT chuni_static_login_bonus_ibfk_1 FOREIGN KEY(presetId)
|
||||
REFERENCES chuni_static_login_bonus_preset(id) ON UPDATE CASCADE ON DELETE CASCADE;
|
||||
|
||||
SET FOREIGN_KEY_CHECKS = 1;
|
|
@ -0,0 +1 @@
|
|||
ALTER TABLE chuni_score_course ADD theoryCount int(11), ADD orderId int(11), ADD playerRating int(11);
|
|
@ -0,0 +1,29 @@
|
|||
SET FOREIGN_KEY_CHECKS = 0;
|
||||
|
||||
ALTER TABLE chuni_score_playlog
|
||||
ADD COLUMN regionId INT,
|
||||
ADD COLUMN machineType INT;
|
||||
|
||||
ALTER TABLE chuni_static_events
|
||||
ADD COLUMN startDate TIMESTAMP NOT NULL DEFAULT current_timestamp();
|
||||
|
||||
ALTER TABLE chuni_profile_data
|
||||
ADD COLUMN rankUpChallengeResults JSON;
|
||||
|
||||
ALTER TABLE chuni_static_login_bonus
|
||||
DROP FOREIGN KEY chuni_static_login_bonus_ibfk_1;
|
||||
|
||||
ALTER TABLE chuni_static_login_bonus_preset
|
||||
CHANGE COLUMN id presetId INT NOT NULL;
|
||||
ALTER TABLE chuni_static_login_bonus_preset
|
||||
DROP PRIMARY KEY;
|
||||
ALTER TABLE chuni_static_login_bonus_preset
|
||||
DROP INDEX chuni_static_login_bonus_preset_uk;
|
||||
ALTER TABLE chuni_static_login_bonus_preset
|
||||
ADD CONSTRAINT chuni_static_login_bonus_preset_pk PRIMARY KEY (presetId, version);
|
||||
|
||||
ALTER TABLE chuni_static_login_bonus
|
||||
ADD CONSTRAINT chuni_static_login_bonus_ibfk_1 FOREIGN KEY (presetId, version)
|
||||
REFERENCES chuni_static_login_bonus_preset(presetId, version) ON UPDATE CASCADE ON DELETE CASCADE;
|
||||
|
||||
SET FOREIGN_KEY_CHECKS = 1;
|
|
@ -0,0 +1,7 @@
|
|||
SET FOREIGN_KEY_CHECKS=0;
|
||||
ALTER TABLE ongeki_profile_data DROP COLUMN isDialogWatchedSuggestMemory;
|
||||
ALTER TABLE ongeki_score_best DROP COLUMN platinumScoreMax;
|
||||
ALTER TABLE ongeki_score_playlog DROP COLUMN platinumScore;
|
||||
ALTER TABLE ongeki_score_playlog DROP COLUMN platinumScoreMax;
|
||||
DROP TABLE IF EXISTS `ongeki_user_memorychapter`;
|
||||
SET FOREIGN_KEY_CHECKS=1;
|
|
@ -0,0 +1 @@
|
|||
ALTER TABLE ongeki_profile_data DROP COLUMN lastEmoneyCredit;
|
|
@ -0,0 +1,27 @@
|
|||
SET FOREIGN_KEY_CHECKS=0;
|
||||
|
||||
ALTER TABLE ongeki_profile_data ADD COLUMN isDialogWatchedSuggestMemory BOOLEAN;
|
||||
ALTER TABLE ongeki_score_best ADD COLUMN platinumScoreMax INTEGER;
|
||||
ALTER TABLE ongeki_score_playlog ADD COLUMN platinumScore INTEGER;
|
||||
ALTER TABLE ongeki_score_playlog ADD COLUMN platinumScoreMax INTEGER;
|
||||
|
||||
CREATE TABLE ongeki_user_memorychapter (
|
||||
id INT PRIMARY KEY NOT NULL AUTO_INCREMENT,
|
||||
user INT NOT NULL,
|
||||
chapterId INT NOT NULL,
|
||||
gaugeId INT NOT NULL,
|
||||
gaugeNum INT NOT NULL,
|
||||
jewelCount INT NOT NULL,
|
||||
isStoryWatched BOOLEAN NOT NULL,
|
||||
isBossWatched BOOLEAN NOT NULL,
|
||||
isDialogWatched BOOLEAN NOT NULL,
|
||||
isEndingWatched BOOLEAN NOT NULL,
|
||||
isClear BOOLEAN NOT NULL,
|
||||
lastPlayMusicId INT NOT NULL,
|
||||
lastPlayMusicLevel INT NOT NULL,
|
||||
lastPlayMusicCategory INT NOT NULL,
|
||||
UNIQUE KEY ongeki_user_memorychapter_uk (user, chapterId),
|
||||
CONSTRAINT ongeki_user_memorychapter_ibfk_1 FOREIGN KEY (user) REFERENCES aime_user (id) ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
SET FOREIGN_KEY_CHECKS=1;
|
|
@ -0,0 +1 @@
|
|||
ALTER TABLE ongeki_profile_data ADD COLUMN lastEmoneyCredit INTEGER DEFAULT 0;
|
|
@ -0,0 +1,3 @@
|
|||
SET FOREIGN_KEY_CHECKS=0;
|
||||
ALTER TABLE mai2_playlog DROP COLUMN trialPlayAchievement;
|
||||
SET FOREIGN_KEY_CHECKS=1;
|
|
@ -0,0 +1,21 @@
|
|||
ALTER TABLE mai2_item_card
|
||||
CHANGE COLUMN cardId card_id INT NOT NULL AFTER user,
|
||||
CHANGE COLUMN cardTypeId card_kind INT NOT NULL,
|
||||
CHANGE COLUMN charaId chara_id INT NOT NULL,
|
||||
CHANGE COLUMN mapId map_id INT NOT NULL,
|
||||
CHANGE COLUMN startDate start_date TIMESTAMP NULL DEFAULT '2018-01-01 00:00:00',
|
||||
CHANGE COLUMN endDate end_date TIMESTAMP NULL DEFAULT '2038-01-01 00:00:00';
|
||||
|
||||
ALTER TABLE mai2_item_item
|
||||
CHANGE COLUMN itemId item_id INT NOT NULL AFTER user,
|
||||
CHANGE COLUMN itemKind item_kind INT NOT NULL,
|
||||
CHANGE COLUMN isValid is_valid TINYINT(1) NOT NULL DEFAULT '1';
|
||||
|
||||
ALTER TABLE mai2_item_character
|
||||
CHANGE COLUMN characterId character_id INT NOT NULL,
|
||||
CHANGE COLUMN useCount use_count INT NOT NULL DEFAULT '0';
|
||||
|
||||
ALTER TABLE mai2_item_charge
|
||||
CHANGE COLUMN chargeId charge_id INT NOT NULL,
|
||||
CHANGE COLUMN purchaseDate purchase_date TIMESTAMP NOT NULL,
|
||||
CHANGE COLUMN validDate valid_date TIMESTAMP NOT NULL;
|
|
@ -0,0 +1,3 @@
|
|||
SET FOREIGN_KEY_CHECKS=0;
|
||||
ALTER TABLE mai2_playlog ADD trialPlayAchievement INT NULL;
|
||||
SET FOREIGN_KEY_CHECKS=1;
|
|
@ -0,0 +1,31 @@
|
|||
ALTER TABLE mai2_profile_option
|
||||
DROP COLUMN tapSe;
|
||||
|
||||
ALTER TABLE mai2_score_best
|
||||
DROP COLUMN extNum1;
|
||||
|
||||
ALTER TABLE mai2_profile_extend
|
||||
DROP COLUMN playStatusSetting;
|
||||
|
||||
ALTER TABLE mai2_playlog
|
||||
DROP COLUMN extNum4;
|
||||
|
||||
ALTER TABLE mai2_static_event
|
||||
DROP COLUMN startDate;
|
||||
|
||||
ALTER TABLE mai2_item_map
|
||||
CHANGE COLUMN mapId map_id INT NOT NULL,
|
||||
CHANGE COLUMN isLock is_lock BOOLEAN NOT NULL DEFAULT 0,
|
||||
CHANGE COLUMN isClear is_clear BOOLEAN NOT NULL DEFAULT 0,
|
||||
CHANGE COLUMN isComplete is_complete BOOLEAN NOT NULL DEFAULT 0;
|
||||
|
||||
ALTER TABLE mai2_item_friend_season_ranking
|
||||
CHANGE COLUMN seasonId season_id INT NOT NULL,
|
||||
CHANGE COLUMN rewardGet reward_get BOOLEAN NOT NULL,
|
||||
CHANGE COLUMN userName user_name VARCHAR(8) NOT NULL,
|
||||
CHANGE COLUMN recordDate record_date VARCHAR(255) NOT NULL;
|
||||
|
||||
ALTER TABLE mai2_item_login_bonus
|
||||
CHANGE COLUMN bonusId bonus_id INT NOT NULL,
|
||||
CHANGE COLUMN isCurrent is_current BOOLEAN NOT NULL DEFAULT 0,
|
||||
CHANGE COLUMN isComplete is_complete BOOLEAN NOT NULL DEFAULT 0;
|
|
@ -0,0 +1,21 @@
|
|||
ALTER TABLE mai2_item_card
|
||||
CHANGE COLUMN card_id cardId INT NOT NULL AFTER user,
|
||||
CHANGE COLUMN card_kind cardTypeId INT NOT NULL,
|
||||
CHANGE COLUMN chara_id charaId INT NOT NULL,
|
||||
CHANGE COLUMN map_id mapId INT NOT NULL,
|
||||
CHANGE COLUMN start_date startDate TIMESTAMP NULL DEFAULT '2018-01-01 00:00:00',
|
||||
CHANGE COLUMN end_date endDate TIMESTAMP NULL DEFAULT '2038-01-01 00:00:00';
|
||||
|
||||
ALTER TABLE mai2_item_item
|
||||
CHANGE COLUMN item_id itemId INT NOT NULL AFTER user,
|
||||
CHANGE COLUMN item_kind itemKind INT NOT NULL,
|
||||
CHANGE COLUMN is_valid isValid TINYINT(1) NOT NULL DEFAULT '1';
|
||||
|
||||
ALTER TABLE mai2_item_character
|
||||
CHANGE COLUMN character_id characterId INT NOT NULL,
|
||||
CHANGE COLUMN use_count useCount INT NOT NULL DEFAULT '0';
|
||||
|
||||
ALTER TABLE mai2_item_charge
|
||||
CHANGE COLUMN charge_id chargeId INT NOT NULL,
|
||||
CHANGE COLUMN purchase_date purchaseDate TIMESTAMP NOT NULL,
|
||||
CHANGE COLUMN valid_date validDate TIMESTAMP NOT NULL;
|
|
@ -0,0 +1,3 @@
|
|||
ALTER TABLE mai2_item_card
|
||||
CHANGE COLUMN startDate startDate TIMESTAMP DEFAULT "2018-01-01 00:00:00.0",
|
||||
CHANGE COLUMN endDate endDate TIMESTAMP DEFAULT "2038-01-01 00:00:00.0";
|
|
@ -0,0 +1,31 @@
|
|||
ALTER TABLE mai2_profile_option
|
||||
ADD COLUMN tapSe INT NOT NULL DEFAULT 0 AFTER tapDesign;
|
||||
|
||||
ALTER TABLE mai2_score_best
|
||||
ADD COLUMN extNum1 INT NOT NULL DEFAULT 0;
|
||||
|
||||
ALTER TABLE mai2_profile_extend
|
||||
ADD COLUMN playStatusSetting INT NOT NULL DEFAULT 0;
|
||||
|
||||
ALTER TABLE mai2_playlog
|
||||
ADD COLUMN extNum4 INT NOT NULL DEFAULT 0;
|
||||
|
||||
ALTER TABLE mai2_static_event
|
||||
ADD COLUMN startDate TIMESTAMP NOT NULL DEFAULT current_timestamp();
|
||||
|
||||
ALTER TABLE mai2_item_map
|
||||
CHANGE COLUMN map_id mapId INT NOT NULL,
|
||||
CHANGE COLUMN is_lock isLock BOOLEAN NOT NULL DEFAULT 0,
|
||||
CHANGE COLUMN is_clear isClear BOOLEAN NOT NULL DEFAULT 0,
|
||||
CHANGE COLUMN is_complete isComplete BOOLEAN NOT NULL DEFAULT 0;
|
||||
|
||||
ALTER TABLE mai2_item_friend_season_ranking
|
||||
CHANGE COLUMN season_id seasonId INT NOT NULL,
|
||||
CHANGE COLUMN reward_get rewardGet BOOLEAN NOT NULL,
|
||||
CHANGE COLUMN user_name userName VARCHAR(8) NOT NULL,
|
||||
CHANGE COLUMN record_date recordDate TIMESTAMP NOT NULL;
|
||||
|
||||
ALTER TABLE mai2_item_login_bonus
|
||||
CHANGE COLUMN bonus_id bonusId INT NOT NULL,
|
||||
CHANGE COLUMN is_current isCurrent BOOLEAN NOT NULL DEFAULT 0,
|
||||
CHANGE COLUMN is_complete isComplete BOOLEAN NOT NULL DEFAULT 0;
|
|
@ -0,0 +1,78 @@
|
|||
DELETE FROM mai2_static_event WHERE version < 13;
|
||||
UPDATE mai2_static_event SET version = version - 13 WHERE version >= 13;
|
||||
|
||||
DELETE FROM mai2_static_music WHERE version < 13;
|
||||
UPDATE mai2_static_music SET version = version - 13 WHERE version >= 13;
|
||||
|
||||
DELETE FROM mai2_static_ticket WHERE version < 13;
|
||||
UPDATE mai2_static_ticket SET version = version - 13 WHERE version >= 13;
|
||||
|
||||
DELETE FROM mai2_static_cards WHERE version < 13;
|
||||
UPDATE mai2_static_cards SET version = version - 13 WHERE version >= 13;
|
||||
|
||||
DELETE FROM mai2_profile_detail WHERE version < 13;
|
||||
UPDATE mai2_profile_detail SET version = version - 13 WHERE version >= 13;
|
||||
|
||||
DELETE FROM mai2_profile_extend WHERE version < 13;
|
||||
UPDATE mai2_profile_extend SET version = version - 13 WHERE version >= 13;
|
||||
|
||||
DELETE FROM mai2_profile_option WHERE version < 13;
|
||||
UPDATE mai2_profile_option SET version = version - 13 WHERE version >= 13;
|
||||
|
||||
DELETE FROM mai2_profile_ghost WHERE version < 13;
|
||||
UPDATE mai2_profile_ghost SET version = version - 13 WHERE version >= 13;
|
||||
|
||||
DELETE FROM mai2_profile_rating WHERE version < 13;
|
||||
UPDATE mai2_profile_rating SET version = version - 13 WHERE version >= 13;
|
||||
|
||||
DROP TABLE maimai_score_best;
|
||||
DROP TABLE maimai_playlog;
|
||||
DROP TABLE maimai_profile_detail;
|
||||
DROP TABLE maimai_profile_option;
|
||||
DROP TABLE maimai_profile_web_option;
|
||||
DROP TABLE maimai_profile_grade_status;
|
||||
|
||||
ALTER TABLE mai2_item_character DROP COLUMN point;
|
||||
|
||||
ALTER TABLE mai2_item_card MODIFY COLUMN cardId int(11) NOT NULL;
|
||||
ALTER TABLE mai2_item_card MODIFY COLUMN cardTypeId int(11) NOT NULL;
|
||||
ALTER TABLE mai2_item_card MODIFY COLUMN charaId int(11) NOT NULL;
|
||||
ALTER TABLE mai2_item_card MODIFY COLUMN mapId int(11) NOT NULL;
|
||||
|
||||
ALTER TABLE mai2_item_character MODIFY COLUMN characterId int(11) NOT NULL;
|
||||
ALTER TABLE mai2_item_character MODIFY COLUMN level int(11) NOT NULL;
|
||||
ALTER TABLE mai2_item_character MODIFY COLUMN awakening int(11) NOT NULL;
|
||||
ALTER TABLE mai2_item_character MODIFY COLUMN useCount int(11) NOT NULL;
|
||||
|
||||
ALTER TABLE mai2_item_charge MODIFY COLUMN chargeId int(11) NOT NULL;
|
||||
ALTER TABLE mai2_item_charge MODIFY COLUMN stock int(11) NOT NULL;
|
||||
|
||||
ALTER TABLE mai2_item_favorite MODIFY COLUMN itemKind int(11) NOT NULL;
|
||||
|
||||
ALTER TABLE mai2_item_friend_season_ranking MODIFY COLUMN seasonId int(11) NOT NULL;
|
||||
ALTER TABLE mai2_item_friend_season_ranking MODIFY COLUMN point int(11) NOT NULL;
|
||||
ALTER TABLE mai2_item_friend_season_ranking MODIFY COLUMN `rank` int(11) NOT NULL;
|
||||
ALTER TABLE mai2_item_friend_season_ranking MODIFY COLUMN rewardGet tinyint(1) NOT NULL;
|
||||
ALTER TABLE mai2_item_friend_season_ranking MODIFY COLUMN userName varchar(8) NOT NULL;
|
||||
|
||||
ALTER TABLE mai2_item_item MODIFY COLUMN itemId int(11) NOT NULL;
|
||||
ALTER TABLE mai2_item_item MODIFY COLUMN itemKind int(11) NOT NULL;
|
||||
ALTER TABLE mai2_item_item MODIFY COLUMN stock int(11) NOT NULL;
|
||||
ALTER TABLE mai2_item_item MODIFY COLUMN isValid tinyint(1) NOT NULL;
|
||||
|
||||
ALTER TABLE mai2_item_login_bonus MODIFY COLUMN bonusId int(11) NOT NULL;
|
||||
ALTER TABLE mai2_item_login_bonus MODIFY COLUMN point int(11) NOT NULL;
|
||||
ALTER TABLE mai2_item_login_bonus MODIFY COLUMN isCurrent tinyint(1) NOT NULL;
|
||||
ALTER TABLE mai2_item_login_bonus MODIFY COLUMN isComplete tinyint(1) NOT NULL;
|
||||
|
||||
ALTER TABLE mai2_item_map MODIFY COLUMN mapId int(11) NOT NULL;
|
||||
ALTER TABLE mai2_item_map MODIFY COLUMN distance int(11) NOT NULL;
|
||||
ALTER TABLE mai2_item_map MODIFY COLUMN isLock tinyint(1) NOT NULL;
|
||||
ALTER TABLE mai2_item_map MODIFY COLUMN isClear tinyint(1) NOT NULL;
|
||||
ALTER TABLE mai2_item_map MODIFY COLUMN isComplete tinyint(1) NOT NULL;
|
||||
|
||||
ALTER TABLE mai2_item_print_detail MODIFY COLUMN printDate timestamp DEFAULT current_timestamp() NOT NULL;
|
||||
ALTER TABLE mai2_item_print_detail MODIFY COLUMN serialId varchar(20) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL;
|
||||
ALTER TABLE mai2_item_print_detail MODIFY COLUMN placeId int(11) NOT NULL;
|
||||
ALTER TABLE mai2_item_print_detail MODIFY COLUMN clientId varchar(11) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL;
|
||||
ALTER TABLE mai2_item_print_detail MODIFY COLUMN printerSerialId varchar(20) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL;
|
|
@ -0,0 +1,3 @@
|
|||
ALTER TABLE mai2_item_card
|
||||
CHANGE COLUMN startDate startDate TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
CHANGE COLUMN endDate endDate TIMESTAMP NOT NULL;
|
|
@ -0,0 +1 @@
|
|||
DROP TABLE aime.mai2_profile_consec_logins;
|
|
@ -0,0 +1,62 @@
|
|||
UPDATE mai2_static_event SET version = version + 13 WHERE version < 1000;
|
||||
|
||||
UPDATE mai2_static_music SET version = version + 13 WHERE version < 1000;
|
||||
|
||||
UPDATE mai2_static_ticket SET version = version + 13 WHERE version < 1000;
|
||||
|
||||
UPDATE mai2_static_cards SET version = version + 13 WHERE version < 1000;
|
||||
|
||||
UPDATE mai2_profile_detail SET version = version + 13 WHERE version < 1000;
|
||||
|
||||
UPDATE mai2_profile_extend SET version = version + 13 WHERE version < 1000;
|
||||
|
||||
UPDATE mai2_profile_option SET version = version + 13 WHERE version < 1000;
|
||||
|
||||
UPDATE mai2_profile_ghost SET version = version + 13 WHERE version < 1000;
|
||||
|
||||
UPDATE mai2_profile_rating SET version = version + 13 WHERE version < 1000;
|
||||
|
||||
ALTER TABLE mai2_item_character ADD point int(11) NULL;
|
||||
|
||||
ALTER TABLE mai2_item_card MODIFY COLUMN cardId int(11) NULL;
|
||||
ALTER TABLE mai2_item_card MODIFY COLUMN cardTypeId int(11) NULL;
|
||||
ALTER TABLE mai2_item_card MODIFY COLUMN charaId int(11) NULL;
|
||||
ALTER TABLE mai2_item_card MODIFY COLUMN mapId int(11) NULL;
|
||||
|
||||
ALTER TABLE mai2_item_character MODIFY COLUMN characterId int(11) NULL;
|
||||
ALTER TABLE mai2_item_character MODIFY COLUMN level int(11) NULL;
|
||||
ALTER TABLE mai2_item_character MODIFY COLUMN awakening int(11) NULL;
|
||||
ALTER TABLE mai2_item_character MODIFY COLUMN useCount int(11) NULL;
|
||||
|
||||
ALTER TABLE mai2_item_charge MODIFY COLUMN chargeId int(11) NULL;
|
||||
ALTER TABLE mai2_item_charge MODIFY COLUMN stock int(11) NULL;
|
||||
|
||||
ALTER TABLE mai2_item_favorite MODIFY COLUMN itemKind int(11) NULL;
|
||||
|
||||
ALTER TABLE mai2_item_friend_season_ranking MODIFY COLUMN seasonId int(11) NULL;
|
||||
ALTER TABLE mai2_item_friend_season_ranking MODIFY COLUMN point int(11) NULL;
|
||||
ALTER TABLE mai2_item_friend_season_ranking MODIFY COLUMN `rank` int(11) NULL;
|
||||
ALTER TABLE mai2_item_friend_season_ranking MODIFY COLUMN rewardGet tinyint(1) NULL;
|
||||
ALTER TABLE mai2_item_friend_season_ranking MODIFY COLUMN userName varchar(8) NULL;
|
||||
|
||||
ALTER TABLE mai2_item_item MODIFY COLUMN itemId int(11) NULL;
|
||||
ALTER TABLE mai2_item_item MODIFY COLUMN itemKind int(11) NULL;
|
||||
ALTER TABLE mai2_item_item MODIFY COLUMN stock int(11) NULL;
|
||||
ALTER TABLE mai2_item_item MODIFY COLUMN isValid tinyint(1) NULL;
|
||||
|
||||
ALTER TABLE mai2_item_login_bonus MODIFY COLUMN bonusId int(11) NULL;
|
||||
ALTER TABLE mai2_item_login_bonus MODIFY COLUMN point int(11) NULL;
|
||||
ALTER TABLE mai2_item_login_bonus MODIFY COLUMN isCurrent tinyint(1) NULL;
|
||||
ALTER TABLE mai2_item_login_bonus MODIFY COLUMN isComplete tinyint(1) NULL;
|
||||
|
||||
ALTER TABLE mai2_item_map MODIFY COLUMN mapId int(11) NULL;
|
||||
ALTER TABLE mai2_item_map MODIFY COLUMN distance int(11) NULL;
|
||||
ALTER TABLE mai2_item_map MODIFY COLUMN isLock tinyint(1) NULL;
|
||||
ALTER TABLE mai2_item_map MODIFY COLUMN isClear tinyint(1) NULL;
|
||||
ALTER TABLE mai2_item_map MODIFY COLUMN isComplete tinyint(1) NULL;
|
||||
|
||||
ALTER TABLE mai2_item_print_detail MODIFY COLUMN printDate timestamp DEFAULT current_timestamp() NULL;
|
||||
ALTER TABLE mai2_item_print_detail MODIFY COLUMN serialId varchar(20) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL;
|
||||
ALTER TABLE mai2_item_print_detail MODIFY COLUMN placeId int(11) NULL;
|
||||
ALTER TABLE mai2_item_print_detail MODIFY COLUMN clientId varchar(11) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL;
|
||||
ALTER TABLE mai2_item_print_detail MODIFY COLUMN printerSerialId varchar(20) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL;
|
|
@ -0,0 +1,9 @@
|
|||
CREATE TABLE `mai2_profile_consec_logins` (
|
||||
`id` int(11) NOT NULL AUTO_INCREMENT,
|
||||
`user` int(11) NOT NULL,
|
||||
`version` int(11) NOT NULL,
|
||||
`logins` int(11) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `mai2_profile_consec_logins_uk` (`user`,`version`),
|
||||
CONSTRAINT `mai2_profile_consec_logins_ibfk_1` FOREIGN KEY (`user`) REFERENCES `aime_user` (`id`) ON DELETE CASCADE ON UPDATE CASCADE
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;
|
|
@ -0,0 +1,2 @@
|
|||
SET FOREIGN_KEY_CHECKS=0;
|
||||
SET FOREIGN_KEY_CHECKS=1;
|
|
@ -0,0 +1 @@
|
|||
ALTER TABLE wacca_profile DROP COLUMN playcount_time_free;
|
|
@ -0,0 +1 @@
|
|||
DELETE FROM wacca_item WHERE type=17 AND item_id=312002;
|
|
@ -0,0 +1 @@
|
|||
ALTER TABLE wacca_profile ADD playcount_time_free int(11) DEFAULT 0 NULL AFTER playcount_stageup;
|
|
@ -0,0 +1,259 @@
|
|||
import logging, coloredlogs
|
||||
from typing import Any, Dict, List
|
||||
from twisted.web import resource
|
||||
from twisted.web.util import redirectTo
|
||||
from twisted.web.http import Request
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
from twisted.web.server import Session
|
||||
from zope.interface import Interface, Attribute, implementer
|
||||
from twisted.python.components import registerAdapter
|
||||
import jinja2
|
||||
import bcrypt
|
||||
|
||||
from core import CoreConfig, Utils
|
||||
from core.data import Data
|
||||
|
||||
|
||||
class IUserSession(Interface):
|
||||
userId = Attribute("User's ID")
|
||||
current_ip = Attribute("User's current ip address")
|
||||
permissions = Attribute("User's permission level")
|
||||
|
||||
|
||||
@implementer(IUserSession)
|
||||
class UserSession(object):
|
||||
def __init__(self, session):
|
||||
self.userId = 0
|
||||
self.current_ip = "0.0.0.0"
|
||||
self.permissions = 0
|
||||
|
||||
|
||||
class FrontendServlet(resource.Resource):
|
||||
def getChild(self, name: bytes, request: Request):
|
||||
self.logger.debug(f"{Utils.get_ip_addr(request)} -> {name.decode()}")
|
||||
if name == b"":
|
||||
return self
|
||||
return resource.Resource.getChild(self, name, request)
|
||||
|
||||
def __init__(self, cfg: CoreConfig, config_dir: str) -> None:
|
||||
self.config = cfg
|
||||
log_fmt_str = "[%(asctime)s] Frontend | %(levelname)s | %(message)s"
|
||||
log_fmt = logging.Formatter(log_fmt_str)
|
||||
self.logger = logging.getLogger("frontend")
|
||||
self.environment = jinja2.Environment(loader=jinja2.FileSystemLoader("."))
|
||||
self.game_list: List[Dict[str, str]] = []
|
||||
self.children: Dict[str, Any] = {}
|
||||
|
||||
fileHandler = TimedRotatingFileHandler(
|
||||
"{0}/{1}.log".format(self.config.server.log_dir, "frontend"),
|
||||
when="d",
|
||||
backupCount=10,
|
||||
)
|
||||
fileHandler.setFormatter(log_fmt)
|
||||
|
||||
consoleHandler = logging.StreamHandler()
|
||||
consoleHandler.setFormatter(log_fmt)
|
||||
|
||||
self.logger.addHandler(fileHandler)
|
||||
self.logger.addHandler(consoleHandler)
|
||||
|
||||
self.logger.setLevel(cfg.frontend.loglevel)
|
||||
coloredlogs.install(
|
||||
level=cfg.frontend.loglevel, logger=self.logger, fmt=log_fmt_str
|
||||
)
|
||||
registerAdapter(UserSession, Session, IUserSession)
|
||||
|
||||
fe_game = FE_Game(cfg, self.environment)
|
||||
games = Utils.get_all_titles()
|
||||
for game_dir, game_mod in games.items():
|
||||
if hasattr(game_mod, "frontend"):
|
||||
try:
|
||||
game_fe = game_mod.frontend(cfg, self.environment, config_dir)
|
||||
self.game_list.append({"url": game_dir, "name": game_fe.nav_name})
|
||||
fe_game.putChild(game_dir.encode(), game_fe)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(
|
||||
f"Failed to import frontend from {game_dir} because {e}"
|
||||
)
|
||||
|
||||
self.environment.globals["game_list"] = self.game_list
|
||||
self.putChild(b"gate", FE_Gate(cfg, self.environment))
|
||||
self.putChild(b"user", FE_User(cfg, self.environment))
|
||||
self.putChild(b"game", fe_game)
|
||||
|
||||
self.logger.info(
|
||||
f"Ready on port {self.config.frontend.port} serving {len(fe_game.children)} games"
|
||||
)
|
||||
|
||||
def render_GET(self, request):
|
||||
self.logger.debug(f"{Utils.get_ip_addr(request)} -> {request.uri.decode()}")
|
||||
template = self.environment.get_template("core/frontend/index.jinja")
|
||||
return template.render(
|
||||
server_name=self.config.server.name,
|
||||
title=self.config.server.name,
|
||||
game_list=self.game_list,
|
||||
sesh=vars(IUserSession(request.getSession())),
|
||||
).encode("utf-16")
|
||||
|
||||
|
||||
class FE_Base(resource.Resource):
|
||||
"""
|
||||
A Generic skeleton class that all frontend handlers should inherit from
|
||||
Initializes the environment, data, logger, config, and sets isLeaf to true
|
||||
It is expected that game implementations of this class overwrite many of these
|
||||
"""
|
||||
|
||||
isLeaf = True
|
||||
|
||||
def __init__(self, cfg: CoreConfig, environment: jinja2.Environment) -> None:
|
||||
self.core_config = cfg
|
||||
self.data = Data(cfg)
|
||||
self.logger = logging.getLogger("frontend")
|
||||
self.environment = environment
|
||||
self.nav_name = "nav_name"
|
||||
|
||||
|
||||
class FE_Gate(FE_Base):
|
||||
def render_GET(self, request: Request):
|
||||
self.logger.debug(f"{Utils.get_ip_addr(request)} -> {request.uri.decode()}")
|
||||
uri: str = request.uri.decode()
|
||||
|
||||
sesh = request.getSession()
|
||||
usr_sesh = IUserSession(sesh)
|
||||
if usr_sesh.userId > 0:
|
||||
return redirectTo(b"/user", request)
|
||||
|
||||
if uri.startswith("/gate/create"):
|
||||
return self.create_user(request)
|
||||
|
||||
if b"e" in request.args:
|
||||
try:
|
||||
err = int(request.args[b"e"][0].decode())
|
||||
except:
|
||||
err = 0
|
||||
|
||||
else:
|
||||
err = 0
|
||||
|
||||
template = self.environment.get_template("core/frontend/gate/gate.jinja")
|
||||
return template.render(
|
||||
title=f"{self.core_config.server.name} | Login Gate",
|
||||
error=err,
|
||||
sesh=vars(usr_sesh),
|
||||
).encode("utf-16")
|
||||
|
||||
def render_POST(self, request: Request):
|
||||
uri = request.uri.decode()
|
||||
ip = Utils.get_ip_addr(request)
|
||||
|
||||
if uri == "/gate/gate.login":
|
||||
access_code: str = request.args[b"access_code"][0].decode()
|
||||
passwd: bytes = request.args[b"passwd"][0]
|
||||
if passwd == b"":
|
||||
passwd = None
|
||||
|
||||
uid = self.data.card.get_user_id_from_card(access_code)
|
||||
if uid is None:
|
||||
return redirectTo(b"/gate?e=1", request)
|
||||
|
||||
if passwd is None:
|
||||
sesh = self.data.user.check_password(uid)
|
||||
|
||||
if sesh is not None:
|
||||
return redirectTo(
|
||||
f"/gate/create?ac={access_code}".encode(), request
|
||||
)
|
||||
return redirectTo(b"/gate?e=1", request)
|
||||
|
||||
if not self.data.user.check_password(uid, passwd):
|
||||
return redirectTo(b"/gate?e=1", request)
|
||||
|
||||
self.logger.info(f"Successful login of user {uid} at {ip}")
|
||||
|
||||
sesh = request.getSession()
|
||||
usr_sesh = IUserSession(sesh)
|
||||
usr_sesh.userId = uid
|
||||
usr_sesh.current_ip = ip
|
||||
|
||||
return redirectTo(b"/user", request)
|
||||
|
||||
elif uri == "/gate/gate.create":
|
||||
access_code: str = request.args[b"access_code"][0].decode()
|
||||
username: str = request.args[b"username"][0]
|
||||
email: str = request.args[b"email"][0].decode()
|
||||
passwd: bytes = request.args[b"passwd"][0]
|
||||
|
||||
uid = self.data.card.get_user_id_from_card(access_code)
|
||||
if uid is None:
|
||||
return redirectTo(b"/gate?e=1", request)
|
||||
|
||||
salt = bcrypt.gensalt()
|
||||
hashed = bcrypt.hashpw(passwd, salt)
|
||||
|
||||
result = self.data.user.create_user(
|
||||
uid, username, email, hashed.decode(), 1
|
||||
)
|
||||
if result is None:
|
||||
return redirectTo(b"/gate?e=3", request)
|
||||
|
||||
if not self.data.user.check_password(uid, passwd):
|
||||
return redirectTo(b"/gate", request)
|
||||
|
||||
return redirectTo(b"/user", request)
|
||||
|
||||
else:
|
||||
return b""
|
||||
|
||||
def create_user(self, request: Request):
|
||||
if b"ac" not in request.args or len(request.args[b"ac"][0].decode()) != 20:
|
||||
return redirectTo(b"/gate?e=2", request)
|
||||
|
||||
ac = request.args[b"ac"][0].decode()
|
||||
|
||||
template = self.environment.get_template("core/frontend/gate/create.jinja")
|
||||
return template.render(
|
||||
title=f"{self.core_config.server.name} | Create User",
|
||||
code=ac,
|
||||
sesh={"userId": 0},
|
||||
).encode("utf-16")
|
||||
|
||||
|
||||
class FE_User(FE_Base):
|
||||
def render_GET(self, request: Request):
|
||||
template = self.environment.get_template("core/frontend/user/index.jinja")
|
||||
|
||||
sesh: Session = request.getSession()
|
||||
usr_sesh = IUserSession(sesh)
|
||||
if usr_sesh.userId == 0:
|
||||
return redirectTo(b"/gate", request)
|
||||
|
||||
cards = self.data.card.get_user_cards(usr_sesh.userId)
|
||||
user = self.data.user.get_user(usr_sesh.userId)
|
||||
card_data = []
|
||||
for c in cards:
|
||||
if c['is_locked']:
|
||||
status = 'Locked'
|
||||
elif c['is_banned']:
|
||||
status = 'Banned'
|
||||
else:
|
||||
status = 'Active'
|
||||
|
||||
card_data.append({'access_code': c['access_code'], 'status': status})
|
||||
|
||||
return template.render(
|
||||
title=f"{self.core_config.server.name} | Account", sesh=vars(usr_sesh), cards=card_data, username=user['username']
|
||||
).encode("utf-16")
|
||||
|
||||
|
||||
class FE_Game(FE_Base):
|
||||
isLeaf = False
|
||||
children: Dict[str, Any] = {}
|
||||
|
||||
def getChild(self, name: bytes, request: Request):
|
||||
if name == b"":
|
||||
return self
|
||||
return resource.Resource.getChild(self, name, request)
|
||||
|
||||
def render_GET(self, request: Request) -> bytes:
|
||||
return redirectTo(b"/user", request)
|
|
@ -0,0 +1,24 @@
|
|||
{% extends "core/frontend/index.jinja" %}
|
||||
{% block content %}
|
||||
<h1>Create User</h1>
|
||||
<form id="create" style="max-width: 240px; min-width: 10%;" action="/gate/gate.create" method="post">
|
||||
<div class="form-group row">
|
||||
<label for="access_code">Card Access Code</label><br>
|
||||
<input class="form-control" name="access_code" id="access_code" type="text" placeholder="00000000000000000000" value={{ code }} maxlength="20" readonly>
|
||||
</div>
|
||||
<div class="form-group row">
|
||||
<label for="username">Username</label><br>
|
||||
<input id="username" class="form-control" name="username" type="text" placeholder="username">
|
||||
</div>
|
||||
<div class="form-group row">
|
||||
<label for="email">Email</label><br>
|
||||
<input id="email" class="form-control" name="email" type="email" placeholder="example@example.com">
|
||||
</div>
|
||||
<div class="form-group row">
|
||||
<label for="passwd">Password</label><br>
|
||||
<input id="passwd" class="form-control" name="passwd" type="password" placeholder="password">
|
||||
</div>
|
||||
<p></p>
|
||||
<input id="submit" class="btn btn-primary" style="display: block; margin: 0 auto;" type="submit" value="Create">
|
||||
</form>
|
||||
{% endblock content %}
|
|
@ -0,0 +1,32 @@
|
|||
{% extends "core/frontend/index.jinja" %}
|
||||
{% block content %}
|
||||
<h1>Gate</h1>
|
||||
{% include "core/frontend/widgets/err_banner.jinja" %}
|
||||
<style>
|
||||
/* Chrome, Safari, Edge, Opera */
|
||||
input::-webkit-outer-spin-button,
|
||||
input::-webkit-inner-spin-button {
|
||||
-webkit-appearance: none;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
/* Firefox */
|
||||
input[type=number] {
|
||||
-moz-appearance: textfield;
|
||||
}
|
||||
</style>
|
||||
<form id="login" style="max-width: 240px; min-width: 10%;" action="/gate/gate.login" method="post">
|
||||
<div class="form-group row">
|
||||
<label for="access_code">Card Access Code</label><br>
|
||||
<input form="login" class="form-control" name="access_code" id="access_code" type="number" placeholder="00000000000000000000" maxlength="20" required>
|
||||
</div>
|
||||
<div class="form-group row">
|
||||
<label for="passwd">Password</label><br>
|
||||
<input id="passwd" class="form-control" name="passwd" type="password" placeholder="password">
|
||||
</div>
|
||||
<p></p>
|
||||
<input id="submit" class="btn btn-primary" style="display: block; margin: 0 auto;" form="login" type="submit" value="Login">
|
||||
</form>
|
||||
<h6>*To register for the webui, type in the access code of your card, as shown in a game, and leave the password field blank.</h6>
|
||||
<h6>*If you have not registered a card with this server, you cannot create a webui account.</h6>
|
||||
{% endblock content %}
|
|
@ -0,0 +1,88 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>{{ title }}</title>
|
||||
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.2.3/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-rbsA2VBKQhggwzxH7pPCaAqO46MgnOM80zW1RWuH61DGLwZJEdK2Kadq2F9CUG65" crossorigin="anonymous">
|
||||
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.2.3/dist/js/bootstrap.bundle.min.js" integrity="sha384-kenU1KFdBIe4zVF0s0G1M5b4hcpxyD9F7jL+jjXkk+Q2h455rYXK/7HAuoJl+0I4" crossorigin="anonymous"></script>
|
||||
<style>
|
||||
html {
|
||||
background-color: #181a1b !important;
|
||||
margin: 10px;
|
||||
}
|
||||
html {
|
||||
color-scheme: dark !important;
|
||||
}
|
||||
html, body, input, textarea, select, button, dialog {
|
||||
background-color: #181a1b;
|
||||
}
|
||||
html, body, input, textarea, select, button {
|
||||
border-color: #736b5e;
|
||||
color: #e8e6e3;
|
||||
}
|
||||
a {
|
||||
color: #3391ff;
|
||||
}
|
||||
table {
|
||||
border-color: #545b5e;
|
||||
}
|
||||
::placeholder {
|
||||
color: #b2aba1;
|
||||
}
|
||||
input:-webkit-autofill,
|
||||
textarea:-webkit-autofill,
|
||||
select:-webkit-autofill {
|
||||
background-color: #404400 !important;
|
||||
color: #e8e6e3 !important;
|
||||
}
|
||||
::-webkit-scrollbar {
|
||||
background-color: #202324;
|
||||
color: #aba499;
|
||||
}
|
||||
::-webkit-scrollbar-thumb {
|
||||
background-color: #454a4d;
|
||||
}
|
||||
::-webkit-scrollbar-thumb:hover {
|
||||
background-color: #575e62;
|
||||
}
|
||||
::-webkit-scrollbar-thumb:active {
|
||||
background-color: #484e51;
|
||||
}
|
||||
::-webkit-scrollbar-corner {
|
||||
background-color: #181a1b;
|
||||
}
|
||||
* {
|
||||
scrollbar-color: #454a4d #202324;
|
||||
}
|
||||
::selection {
|
||||
background-color: #004daa !important;
|
||||
color: #e8e6e3 !important;
|
||||
}
|
||||
::-moz-selection {
|
||||
background-color: #004daa !important;
|
||||
color: #e8e6e3 !important;
|
||||
}
|
||||
input[type="text"], input[type="text"]:focus, input[type="password"], input[type="password"]:focus, input[type="email"], input[type="email"]:focus {
|
||||
background-color: #202324 !important;
|
||||
color: #e8e6e3;
|
||||
}
|
||||
form {
|
||||
outline: 1px solid grey;
|
||||
padding: 20px;
|
||||
padding-top: 10px;
|
||||
padding-bottom: 10px;
|
||||
}
|
||||
.err-banner {
|
||||
background-color: #AA0000;
|
||||
padding: 20px;
|
||||
margin-bottom: 10px;
|
||||
width: 15%;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
{% include "core/frontend/widgets/topbar.jinja" %}
|
||||
{% block content %}
|
||||
<h1>{{ server_name }}</h1>
|
||||
{% endblock content %}
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,31 @@
|
|||
{% extends "core/frontend/index.jinja" %}
|
||||
{% block content %}
|
||||
<h1>Management for {{ username }}</h1>
|
||||
<h2>Cards <button class="btn btn-success" data-bs-toggle="modal" data-bs-target="#card_add">Add</button></h2>
|
||||
<ul>
|
||||
{% for c in cards %}
|
||||
<li>{{ c.access_code }}: {{ c.status }} <button class="btn-danger btn">Delete</button></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
<div class="modal fade" id="card_add" tabindex="-1" aria-labelledby="card_add_label" aria-hidden="true">
|
||||
<div class="modal-dialog">
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<h1 class="modal-title fs-5" id="card_add_label">Add Card</h1>
|
||||
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
HOW TO:<br>
|
||||
Scan your card on any networked game and press the "View Access Code" button (varies by game) and enter the 20 digit code below.<br>
|
||||
!!FOR AMUSEIC CARDS: DO NOT ENTER THE CODE SHOWN ON THE BACK OF THE CARD ITSELF OR IT WILL NOT WORK!!
|
||||
<p /><label for="card_add_frm_access_code">Access Code: </label><input id="card_add_frm_access_code" maxlength="20" type="text" required>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<button type="button" class="btn btn-primary">Add</button>
|
||||
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Close</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% endblock content %}
|
|
@ -0,0 +1,14 @@
|
|||
{% if error > 0 %}
|
||||
<div class="err-banner">
|
||||
<h3>Error</h3>
|
||||
{% if error == 1 %}
|
||||
Card not registered, or wrong password
|
||||
{% elif error == 2 %}
|
||||
Missing or malformed access code
|
||||
{% elif error == 3 %}
|
||||
Failed to create user
|
||||
{% else %}
|
||||
An unknown error occoured
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endif %}
|
|
@ -0,0 +1,18 @@
|
|||
<div style="background: #333; color: #f9f9f9; width: 10%; height: 50px; line-height: 50px; text-align: center; float: left;">
|
||||
Navigation
|
||||
</div>
|
||||
<div style="background: #333; color: #f9f9f9; width: 80%; height: 50px; line-height: 50px; padding-left: 10px; float: left;">
|
||||
<a href=/><button class="btn btn-primary">Home</button></a>
|
||||
{% for game in game_list %}
|
||||
<a href=/game/{{ game.url }}><button class="btn btn-success">{{ game.name }}</button></a>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
<div style="background: #333; color: #f9f9f9; width: 10%; height: 50px; line-height: 50px; text-align: center; float: left;">
|
||||
{% if sesh is defined and sesh["userId"] > 0 %}
|
||||
<a href="/user"><button class="btn btn-primary">Account</button></a>
|
||||
{% else %}
|
||||
<a href="/gate"><button class="btn btn-primary">Gate</button></a>
|
||||
{% endif %}
|
||||
|
||||
</div>
|
247
core/mucha.py
247
core/mucha.py
|
@ -1,70 +1,136 @@
|
|||
from typing import Dict, Any, Optional
|
||||
from typing import Dict, Any, Optional, List
|
||||
import logging, coloredlogs
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
from twisted.web import resource
|
||||
from twisted.web.http import Request
|
||||
from datetime import datetime
|
||||
from Crypto.Cipher import Blowfish
|
||||
import pytz
|
||||
|
||||
from core.config import CoreConfig
|
||||
from core import CoreConfig
|
||||
from core.utils import Utils
|
||||
|
||||
|
||||
class MuchaServlet:
|
||||
def __init__(self, cfg: CoreConfig) -> None:
|
||||
def __init__(self, cfg: CoreConfig, cfg_dir: str) -> None:
|
||||
self.config = cfg
|
||||
self.config_dir = cfg_dir
|
||||
self.mucha_registry: List[str] = []
|
||||
|
||||
self.logger = logging.getLogger('mucha')
|
||||
self.logger = logging.getLogger("mucha")
|
||||
log_fmt_str = "[%(asctime)s] Mucha | %(levelname)s | %(message)s"
|
||||
log_fmt = logging.Formatter(log_fmt_str)
|
||||
|
||||
fileHandler = TimedRotatingFileHandler("{0}/{1}.log".format(self.config.server.log_dir, "mucha"), when="d", backupCount=10)
|
||||
|
||||
fileHandler = TimedRotatingFileHandler(
|
||||
"{0}/{1}.log".format(self.config.server.log_dir, "mucha"),
|
||||
when="d",
|
||||
backupCount=10,
|
||||
)
|
||||
fileHandler.setFormatter(log_fmt)
|
||||
|
||||
|
||||
consoleHandler = logging.StreamHandler()
|
||||
consoleHandler.setFormatter(log_fmt)
|
||||
|
||||
self.logger.addHandler(fileHandler)
|
||||
self.logger.addHandler(consoleHandler)
|
||||
|
||||
self.logger.setLevel(logging.INFO)
|
||||
coloredlogs.install(level=logging.INFO, logger=self.logger, fmt=log_fmt_str)
|
||||
|
||||
def handle_boardauth(self, request: Request) -> bytes:
|
||||
self.logger.setLevel(cfg.mucha.loglevel)
|
||||
coloredlogs.install(level=cfg.mucha.loglevel, logger=self.logger, fmt=log_fmt_str)
|
||||
|
||||
all_titles = Utils.get_all_titles()
|
||||
|
||||
for _, mod in all_titles.items():
|
||||
if hasattr(mod, "index") and hasattr(mod.index, "get_mucha_info"):
|
||||
enabled, game_cd = mod.index.get_mucha_info(
|
||||
self.config, self.config_dir
|
||||
)
|
||||
if enabled:
|
||||
self.mucha_registry.append(game_cd)
|
||||
|
||||
self.logger.info(f"Serving {len(self.mucha_registry)} games")
|
||||
|
||||
def handle_boardauth(self, request: Request, _: Dict) -> bytes:
|
||||
req_dict = self.mucha_preprocess(request.content.getvalue())
|
||||
client_ip = Utils.get_ip_addr(request)
|
||||
|
||||
if req_dict is None:
|
||||
self.logger.error(f"Error processing mucha request {request.content.getvalue()}")
|
||||
return b""
|
||||
self.logger.error(
|
||||
f"Error processing mucha request {request.content.getvalue()}"
|
||||
)
|
||||
return b"RESULTS=000"
|
||||
|
||||
req = MuchaAuthRequest(req_dict)
|
||||
self.logger.info(f"Mucha request {vars(req)}")
|
||||
resp = MuchaAuthResponse(mucha_url=f"{self.config.mucha.hostname}:{self.config.mucha.port}")
|
||||
self.logger.info(f"Mucha response {vars(resp)}")
|
||||
self.logger.info(f"Boardauth request from {client_ip} for {req.gameVer}")
|
||||
self.logger.debug(f"Mucha request {vars(req)}")
|
||||
|
||||
if req.gameCd not in self.mucha_registry:
|
||||
self.logger.warn(f"Unknown gameCd {req.gameCd}")
|
||||
return b"RESULTS=000"
|
||||
|
||||
# TODO: Decrypt S/N
|
||||
|
||||
#cipher = Blowfish.new(req.sendDate.encode(), Blowfish.MODE_ECB)
|
||||
#sn_decrypt = cipher.decrypt(bytes.fromhex(req.serialNum))
|
||||
#self.logger.debug(f"Decrypt SN to {sn_decrypt.hex()}")
|
||||
|
||||
resp = MuchaAuthResponse(
|
||||
f"{self.config.mucha.hostname}{':' + str(self.config.allnet.port) if self.config.server.is_develop else ''}"
|
||||
)
|
||||
|
||||
self.logger.debug(f"Mucha response {vars(resp)}")
|
||||
|
||||
return self.mucha_postprocess(vars(resp))
|
||||
|
||||
def handle_updatecheck(self, request: Request) -> bytes:
|
||||
|
||||
def handle_updatecheck(self, request: Request, _: Dict) -> bytes:
|
||||
req_dict = self.mucha_preprocess(request.content.getvalue())
|
||||
client_ip = Utils.get_ip_addr(request)
|
||||
|
||||
if req_dict is None:
|
||||
self.logger.error(f"Error processing mucha request {request.content.getvalue()}")
|
||||
return b""
|
||||
self.logger.error(
|
||||
f"Error processing mucha request {request.content.getvalue()}"
|
||||
)
|
||||
return b"RESULTS=000"
|
||||
|
||||
req = MuchaUpdateRequest(req_dict)
|
||||
self.logger.info(f"Mucha request {vars(req)}")
|
||||
resp = MuchaUpdateResponse(mucha_url=f"{self.config.mucha.hostname}:{self.config.mucha.port}")
|
||||
self.logger.info(f"Mucha response {vars(resp)}")
|
||||
self.logger.info(f"Updatecheck request from {client_ip} for {req.gameVer}")
|
||||
self.logger.debug(f"Mucha request {vars(req)}")
|
||||
|
||||
if req.gameCd not in self.mucha_registry:
|
||||
self.logger.warn(f"Unknown gameCd {req.gameCd}")
|
||||
return b"RESULTS=000"
|
||||
|
||||
resp = MuchaUpdateResponse(req.gameVer, f"{self.config.mucha.hostname}{':' + str(self.config.allnet.port) if self.config.server.is_develop else ''}")
|
||||
|
||||
self.logger.debug(f"Mucha response {vars(resp)}")
|
||||
|
||||
return self.mucha_postprocess(vars(resp))
|
||||
|
||||
def handle_dlstate(self, request: Request, _: Dict) -> bytes:
|
||||
req_dict = self.mucha_preprocess(request.content.getvalue())
|
||||
client_ip = Utils.get_ip_addr(request)
|
||||
|
||||
if req_dict is None:
|
||||
self.logger.error(
|
||||
f"Error processing mucha request {request.content.getvalue()}"
|
||||
)
|
||||
return b""
|
||||
|
||||
req = MuchaDownloadStateRequest(req_dict)
|
||||
self.logger.info(f"DownloadState request from {client_ip} for {req.gameCd} -> {req.updateVer}")
|
||||
self.logger.debug(f"request {vars(req)}")
|
||||
return b"RESULTS=001"
|
||||
|
||||
def mucha_preprocess(self, data: bytes) -> Optional[Dict]:
|
||||
try:
|
||||
ret: Dict[str, Any] = {}
|
||||
|
||||
for x in data.decode().split('&'):
|
||||
kvp = x.split('=')
|
||||
|
||||
for x in data.decode().split("&"):
|
||||
kvp = x.split("=")
|
||||
if len(kvp) == 2:
|
||||
ret[kvp[0]] = kvp[1]
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
except:
|
||||
self.logger.error(f"Error processing mucha request {data}")
|
||||
return None
|
||||
|
@ -72,7 +138,7 @@ class MuchaServlet:
|
|||
def mucha_postprocess(self, data: dict) -> Optional[bytes]:
|
||||
try:
|
||||
urlencode = ""
|
||||
for k,v in data.items():
|
||||
for k, v in data.items():
|
||||
urlencode += f"{k}={v}&"
|
||||
|
||||
return urlencode.encode()
|
||||
|
@ -81,35 +147,39 @@ class MuchaServlet:
|
|||
self.logger.error("Error processing mucha response")
|
||||
return None
|
||||
|
||||
class MuchaAuthRequest():
|
||||
def __init__(self, request: Dict) -> None:
|
||||
self.gameVer = "" if "gameVer" not in request else request["gameVer"]
|
||||
self.sendDate = "" if "sendDate" not in request else request["sendDate"]
|
||||
self.serialNum = "" if "serialNum" not in request else request["serialNum"]
|
||||
self.gameCd = "" if "gameCd" not in request else request["gameCd"]
|
||||
self.boardType = "" if "boardType" not in request else request["boardType"]
|
||||
self.boardId = "" if "boardId" not in request else request["boardId"]
|
||||
self.placeId = "" if "placeId" not in request else request["placeId"]
|
||||
self.storeRouterIp = "" if "storeRouterIp" not in request else request["storeRouterIp"]
|
||||
self.countryCd = "" if "countryCd" not in request else request["countryCd"]
|
||||
self.useToken = "" if "useToken" not in request else request["useToken"]
|
||||
self.allToken = "" if "allToken" not in request else request["allToken"]
|
||||
|
||||
class MuchaAuthResponse():
|
||||
def __init__(self, mucha_url: str = "localhost") -> None:
|
||||
self.RESULTS = "001"
|
||||
class MuchaAuthRequest:
|
||||
def __init__(self, request: Dict) -> None:
|
||||
# gameCd + boardType + countryCd + version
|
||||
self.gameVer = request.get("gameVer", "")
|
||||
self.sendDate = request.get("sendDate", "") # %Y%m%d
|
||||
self.serialNum = request.get("serialNum", "")
|
||||
self.gameCd = request.get("gameCd", "")
|
||||
self.boardType = request.get("boardType", "")
|
||||
self.boardId = request.get("boardId", "")
|
||||
self.mac = request.get("mac", "")
|
||||
self.placeId = request.get("placeId", "")
|
||||
self.storeRouterIp = request.get("storeRouterIp", "")
|
||||
self.countryCd = request.get("countryCd", "")
|
||||
self.useToken = request.get("useToken", "")
|
||||
self.allToken = request.get("allToken", "")
|
||||
|
||||
|
||||
class MuchaAuthResponse:
|
||||
def __init__(self, mucha_url: str) -> None:
|
||||
self.RESULTS = "001"
|
||||
self.AUTH_INTERVAL = "86400"
|
||||
self.SERVER_TIME = datetime.strftime(datetime.now(), "%Y%m%d%H%M")
|
||||
self.UTC_SERVER_TIME = datetime.strftime(datetime.now(pytz.UTC), "%Y%m%d%H%M")
|
||||
|
||||
self.CHARGE_URL = f"https://{mucha_url}/charge/"
|
||||
self.CHARGE_URL = f"https://{mucha_url}/charge/"
|
||||
self.FILE_URL = f"https://{mucha_url}/file/"
|
||||
self.URL_1 = f"https://{mucha_url}/url1/"
|
||||
self.URL_2 = f"https://{mucha_url}/url2/"
|
||||
self.URL_3 = f"https://{mucha_url}/url3/"
|
||||
|
||||
self.PLACE_ID = "JPN123"
|
||||
self.COUNTRY_CD = "JPN"
|
||||
|
||||
self.PLACE_ID = "JPN123"
|
||||
self.COUNTRY_CD = "JPN"
|
||||
self.SHOP_NAME = "TestShop!"
|
||||
self.SHOP_NICKNAME = "TestShop"
|
||||
self.AREA_0 = "008"
|
||||
|
@ -120,7 +190,7 @@ class MuchaAuthResponse():
|
|||
self.AREA_FULL_1 = ""
|
||||
self.AREA_FULL_2 = ""
|
||||
self.AREA_FULL_3 = ""
|
||||
|
||||
|
||||
self.SHOP_NAME_EN = "TestShop!"
|
||||
self.SHOP_NICKNAME_EN = "TestShop"
|
||||
self.AREA_0_EN = "008"
|
||||
|
@ -132,32 +202,77 @@ class MuchaAuthResponse():
|
|||
self.AREA_FULL_2_EN = ""
|
||||
self.AREA_FULL_3_EN = ""
|
||||
|
||||
self.PREFECTURE_ID = "1"
|
||||
self.PREFECTURE_ID = "1"
|
||||
self.EXPIRATION_DATE = "null"
|
||||
self.USE_TOKEN = "0"
|
||||
self.CONSUME_TOKEN = "0"
|
||||
self.DONGLE_FLG = "1"
|
||||
self.FORCE_BOOT = "0"
|
||||
|
||||
class MuchaUpdateRequest():
|
||||
def __init__(self, request: Dict) -> None:
|
||||
self.gameVer = "" if "gameVer" not in request else request["gameVer"]
|
||||
self.gameCd = "" if "gameCd" not in request else request["gameCd"]
|
||||
self.serialNum = "" if "serialNum" not in request else request["serialNum"]
|
||||
self.countryCd = "" if "countryCd" not in request else request["countryCd"]
|
||||
self.placeId = "" if "placeId" not in request else request["placeId"]
|
||||
self.storeRouterIp = "" if "storeRouterIp" not in request else request["storeRouterIp"]
|
||||
|
||||
class MuchaUpdateResponse():
|
||||
def __init__(self, game_ver: str = "PKFN0JPN01.01", mucha_url: str = "localhost") -> None:
|
||||
self.RESULTS = "001"
|
||||
class MuchaUpdateRequest:
|
||||
def __init__(self, request: Dict) -> None:
|
||||
self.gameVer = request.get("gameVer", "")
|
||||
self.gameCd = request.get("gameCd", "")
|
||||
self.serialNum = request.get("serialNum", "")
|
||||
self.countryCd = request.get("countryCd", "")
|
||||
self.placeId = request.get("placeId", "")
|
||||
self.storeRouterIp = request.get("storeRouterIp", "")
|
||||
|
||||
|
||||
class MuchaUpdateResponse:
|
||||
def __init__(self, game_ver: str, mucha_url: str) -> None:
|
||||
self.RESULTS = "001"
|
||||
self.EXE_VER = game_ver
|
||||
|
||||
self.UPDATE_VER_1 = game_ver
|
||||
self.UPDATE_URL_1 = f"https://{mucha_url}/updUrl1/"
|
||||
self.UPDATE_SIZE_1 = "0"
|
||||
self.UPDATE_CRC_1 = "0000000000000000"
|
||||
self.CHECK_URL_1 = f"https://{mucha_url}/checkUrl/"
|
||||
self.EXE_VER_1 = game_ver
|
||||
self.UPDATE_URL_1 = f"http://{mucha_url}/updUrl1/"
|
||||
self.UPDATE_SIZE_1 = "20"
|
||||
|
||||
self.CHECK_CRC_1 = "0000000000000000"
|
||||
self.CHECK_URL_1 = f"http://{mucha_url}/checkUrl/"
|
||||
self.CHECK_SIZE_1 = "20"
|
||||
|
||||
self.INFO_SIZE_1 = "0"
|
||||
self.COM_SIZE_1 = "0"
|
||||
self.COM_TIME_1 = "0"
|
||||
self.LAN_INFO_SIZE_1 = "0"
|
||||
|
||||
self.USER_ID = ""
|
||||
self.PASSWORD = ""
|
||||
|
||||
"""
|
||||
RESULTS
|
||||
EXE_VER
|
||||
|
||||
UPDATE_VER_%d
|
||||
UPDATE_URL_%d
|
||||
UPDATE_SIZE_%d
|
||||
|
||||
CHECK_CRC_%d
|
||||
CHECK_URL_%d
|
||||
CHECK_SIZE_%d
|
||||
|
||||
INFO_SIZE_1
|
||||
COM_SIZE_1
|
||||
COM_TIME_1
|
||||
LAN_INFO_SIZE_1
|
||||
|
||||
USER_ID
|
||||
PASSWORD
|
||||
"""
|
||||
class MuchaUpdateResponseStub:
|
||||
def __init__(self, game_ver: str) -> None:
|
||||
self.RESULTS = "001"
|
||||
self.UPDATE_VER_1 = game_ver
|
||||
|
||||
class MuchaDownloadStateRequest:
|
||||
def __init__(self, request: Dict) -> None:
|
||||
self.gameCd = request.get("gameCd", "")
|
||||
self.updateVer = request.get("updateVer", "")
|
||||
self.serialNum = request.get("serialNum", "")
|
||||
self.fileSize = request.get("fileSize", "")
|
||||
self.compFileSize = request.get("compFileSize", "")
|
||||
self.boardId = request.get("boardId", "")
|
||||
self.placeId = request.get("placeId", "")
|
||||
self.storeRouterIp = request.get("storeRouterIp", "")
|
||||
|
|
|
@ -7,8 +7,9 @@ from core.config import CoreConfig
|
|||
from core.data import Data
|
||||
from core.utils import Utils
|
||||
|
||||
class TitleServlet():
|
||||
def __init__(self, core_cfg: CoreConfig, cfg_folder: str):
|
||||
|
||||
class TitleServlet:
|
||||
def __init__(self, core_cfg: CoreConfig, cfg_folder: str):
|
||||
super().__init__()
|
||||
self.config = core_cfg
|
||||
self.config_folder = cfg_folder
|
||||
|
@ -18,57 +19,86 @@ class TitleServlet():
|
|||
self.logger = logging.getLogger("title")
|
||||
if not hasattr(self.logger, "initialized"):
|
||||
log_fmt_str = "[%(asctime)s] Title | %(levelname)s | %(message)s"
|
||||
log_fmt = logging.Formatter(log_fmt_str)
|
||||
log_fmt = logging.Formatter(log_fmt_str)
|
||||
|
||||
fileHandler = TimedRotatingFileHandler("{0}/{1}.log".format(self.config.server.log_dir, "title"), when="d", backupCount=10)
|
||||
fileHandler = TimedRotatingFileHandler(
|
||||
"{0}/{1}.log".format(self.config.server.log_dir, "title"),
|
||||
when="d",
|
||||
backupCount=10,
|
||||
)
|
||||
fileHandler.setFormatter(log_fmt)
|
||||
|
||||
|
||||
consoleHandler = logging.StreamHandler()
|
||||
consoleHandler.setFormatter(log_fmt)
|
||||
|
||||
self.logger.addHandler(fileHandler)
|
||||
self.logger.addHandler(consoleHandler)
|
||||
|
||||
|
||||
self.logger.setLevel(core_cfg.title.loglevel)
|
||||
coloredlogs.install(level=core_cfg.title.loglevel, logger=self.logger, fmt=log_fmt_str)
|
||||
coloredlogs.install(
|
||||
level=core_cfg.title.loglevel, logger=self.logger, fmt=log_fmt_str
|
||||
)
|
||||
self.logger.initialized = True
|
||||
|
||||
|
||||
plugins = Utils.get_all_titles()
|
||||
|
||||
|
||||
for folder, mod in plugins.items():
|
||||
if hasattr(mod, "game_codes") and hasattr(mod, "index"):
|
||||
handler_cls = mod.index(self.config, self.config_folder)
|
||||
if hasattr(handler_cls, "setup"):
|
||||
handler_cls.setup()
|
||||
|
||||
for code in mod.game_codes:
|
||||
self.title_registry[code] = handler_cls
|
||||
|
||||
should_call_setup = True
|
||||
|
||||
if hasattr(mod.index, "get_allnet_info"):
|
||||
for code in mod.game_codes:
|
||||
enabled, _, _ = mod.index.get_allnet_info(
|
||||
code, self.config, self.config_folder
|
||||
)
|
||||
|
||||
if enabled:
|
||||
handler_cls = mod.index(self.config, self.config_folder)
|
||||
|
||||
if hasattr(handler_cls, "setup") and should_call_setup:
|
||||
handler_cls.setup()
|
||||
should_call_setup = False
|
||||
|
||||
self.title_registry[code] = handler_cls
|
||||
|
||||
else:
|
||||
self.logger.warn(f"Game {folder} has no get_allnet_info")
|
||||
|
||||
else:
|
||||
self.logger.error(f"{folder} missing game_code or index in __init__.py")
|
||||
|
||||
self.logger.info(f"Serving {len(self.title_registry)} game codes on port {core_cfg.title.port}")
|
||||
|
||||
self.logger.info(
|
||||
f"Serving {len(self.title_registry)} game codes {'on port ' + str(core_cfg.title.port) if core_cfg.title.port > 0 else ''}"
|
||||
)
|
||||
|
||||
def render_GET(self, request: Request, endpoints: dict) -> bytes:
|
||||
code = endpoints["game"]
|
||||
if code not in self.title_registry:
|
||||
self.logger.warn(f"Unknown game code {code}")
|
||||
|
||||
request.setResponseCode(404)
|
||||
return b""
|
||||
|
||||
index = self.title_registry[code]
|
||||
if not hasattr(index, "render_GET"):
|
||||
self.logger.warn(f"{code} does not dispatch GET")
|
||||
request.setResponseCode(405)
|
||||
return b""
|
||||
|
||||
return index.render_GET(request, endpoints["version"], endpoints["endpoint"])
|
||||
|
||||
return index.render_GET(request, int(endpoints["version"]), endpoints["endpoint"])
|
||||
|
||||
def render_POST(self, request: Request, endpoints: dict) -> bytes:
|
||||
code = endpoints["game"]
|
||||
if code not in self.title_registry:
|
||||
self.logger.warn(f"Unknown game code {code}")
|
||||
|
||||
request.setResponseCode(404)
|
||||
return b""
|
||||
|
||||
index = self.title_registry[code]
|
||||
if not hasattr(index, "render_POST"):
|
||||
self.logger.warn(f"{code} does not dispatch POST")
|
||||
request.setResponseCode(405)
|
||||
return b""
|
||||
|
||||
return index.render_POST(request, endpoints["version"], endpoints["endpoint"])
|
||||
return index.render_POST(
|
||||
request, int(endpoints["version"]), endpoints["endpoint"]
|
||||
)
|
||||
|
|
|
@ -1,22 +1,35 @@
|
|||
from typing import Dict, List, Any, Optional
|
||||
from typing import Dict, Any
|
||||
from types import ModuleType
|
||||
import zlib, base64
|
||||
from twisted.web.http import Request
|
||||
import logging
|
||||
import importlib
|
||||
from os import walk
|
||||
|
||||
|
||||
class Utils:
|
||||
@classmethod
|
||||
def get_all_titles(cls) -> Dict[str, ModuleType]:
|
||||
ret: Dict[str, Any] = {}
|
||||
|
||||
for root, dirs, files in walk("titles"):
|
||||
for dir in dirs:
|
||||
for dir in dirs:
|
||||
if not dir.startswith("__"):
|
||||
try:
|
||||
mod = importlib.import_module(f"titles.{dir}")
|
||||
ret[dir] = mod
|
||||
if hasattr(mod, "game_codes") and hasattr(
|
||||
mod, "index"
|
||||
): # Minimum required to function
|
||||
ret[dir] = mod
|
||||
|
||||
except ImportError as e:
|
||||
print(f"{dir} - {e}")
|
||||
logging.getLogger("core").error(f"get_all_titles: {dir} - {e}")
|
||||
raise
|
||||
return ret
|
||||
|
||||
@classmethod
|
||||
def get_ip_addr(cls, req: Request) -> str:
|
||||
return (
|
||||
req.getAllHeaders()[b"x-forwarded-for"].decode()
|
||||
if b"x-forwarded-for" in req.getAllHeaders()
|
||||
else req.getClientAddress().host
|
||||
)
|
||||
|
|
88
dbutils.py
88
dbutils.py
|
@ -1,47 +1,91 @@
|
|||
import yaml
|
||||
import argparse
|
||||
import logging
|
||||
from core.config import CoreConfig
|
||||
from core.data import Data
|
||||
from os import path, mkdir, access, W_OK
|
||||
|
||||
if __name__=='__main__':
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="Database utilities")
|
||||
parser.add_argument("--config", "-c", type=str, help="Config folder to use", default="config")
|
||||
parser.add_argument("--version", "-v", type=str, help="Version of the database to upgrade/rollback to")
|
||||
parser.add_argument("--game", "-g", type=str, help="Game code of the game who's schema will be updated/rolled back. Ex. SDFE")
|
||||
parser.add_argument("action", type=str, help="DB Action, create, recreate, upgrade, or rollback")
|
||||
parser.add_argument(
|
||||
"--config", "-c", type=str, help="Config folder to use", default="config"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--version",
|
||||
"-v",
|
||||
type=str,
|
||||
help="Version of the database to upgrade/rollback to",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--game",
|
||||
"-g",
|
||||
type=str,
|
||||
help="Game code of the game who's schema will be updated/rolled back. Ex. SDFE",
|
||||
)
|
||||
parser.add_argument("--email", "-e", type=str, help="Email for the new user")
|
||||
parser.add_argument("--old_ac", "-o", type=str, help="Access code to transfer from")
|
||||
parser.add_argument("--new_ac", "-n", type=str, help="Access code to transfer to")
|
||||
parser.add_argument("--force", "-f", type=bool, help="Force the action to happen")
|
||||
parser.add_argument(
|
||||
"action", type=str, help="DB Action, create, recreate, upgrade, or rollback"
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
cfg = CoreConfig()
|
||||
cfg.update(yaml.safe_load(open(f"{args.config}/core.yaml")))
|
||||
if path.exists(f"{args.config}/core.yaml"):
|
||||
cfg_dict = yaml.safe_load(open(f"{args.config}/core.yaml"))
|
||||
cfg_dict.get("database", {})["loglevel"] = "info"
|
||||
cfg.update(cfg_dict)
|
||||
|
||||
if not path.exists(cfg.server.log_dir):
|
||||
mkdir(cfg.server.log_dir)
|
||||
|
||||
if not access(cfg.server.log_dir, W_OK):
|
||||
print(
|
||||
f"Log directory {cfg.server.log_dir} NOT writable, please check permissions"
|
||||
)
|
||||
exit(1)
|
||||
|
||||
data = Data(cfg)
|
||||
|
||||
if args.action == "create":
|
||||
data.create_database()
|
||||
|
||||
|
||||
elif args.action == "recreate":
|
||||
data.recreate_database()
|
||||
|
||||
elif args.action == "upgrade" or args.action == "rollback":
|
||||
if args.version is None:
|
||||
print("Must set game and version to migrate to")
|
||||
exit(0)
|
||||
data.logger.warn("No version set, upgrading to latest")
|
||||
|
||||
if args.game is None:
|
||||
print("No game set, upgrading core schema")
|
||||
data.migrate_database("CORE", int(args.version))
|
||||
data.logger.warn("No game set, upgrading core schema")
|
||||
data.migrate_database(
|
||||
"CORE",
|
||||
int(args.version) if args.version is not None else None,
|
||||
args.action,
|
||||
)
|
||||
|
||||
else:
|
||||
data.migrate_database(args.game, int(args.version), args.action)
|
||||
data.migrate_database(
|
||||
args.game,
|
||||
int(args.version) if args.version is not None else None,
|
||||
args.action,
|
||||
)
|
||||
|
||||
elif args.action == "autoupgrade":
|
||||
data.autoupgrade()
|
||||
|
||||
elif args.action == "create-owner":
|
||||
data.create_owner(args.email)
|
||||
|
||||
elif args.action == "migrate-card":
|
||||
data.migrate_card(args.old_ac, args.new_ac, args.force)
|
||||
|
||||
elif args.action == "cleanup":
|
||||
data.delete_hanging_users()
|
||||
|
||||
elif args.action == "migrate":
|
||||
print("Migrating from old schema to new schema")
|
||||
data.restore_from_old_schema()
|
||||
|
||||
elif args.action == "dump":
|
||||
print("Dumping old schema to migrate to new schema")
|
||||
data.dump_db()
|
||||
|
||||
elif args.action == "generate":
|
||||
pass
|
||||
elif args.action == "version":
|
||||
data.show_versions()
|
||||
|
||||
data.logger.info("Done")
|
||||
|
|
|
@ -0,0 +1,57 @@
|
|||
version: "3.9"
|
||||
services:
|
||||
app:
|
||||
hostname: ma.app
|
||||
build: .
|
||||
volumes:
|
||||
- ./aime:/app/aime
|
||||
|
||||
environment:
|
||||
CFG_DEV: 1
|
||||
CFG_CORE_SERVER_HOSTNAME: 0.0.0.0
|
||||
CFG_CORE_DATABASE_HOST: ma.db
|
||||
CFG_CORE_MEMCACHED_HOSTNAME: ma.memcached
|
||||
CFG_CORE_AIMEDB_KEY: keyhere
|
||||
CFG_CHUNI_SERVER_LOGLEVEL: debug
|
||||
|
||||
ports:
|
||||
- "80:80"
|
||||
- "8443:8443"
|
||||
- "22345:22345"
|
||||
|
||||
- "8080:8080"
|
||||
- "8090:8090"
|
||||
|
||||
depends_on:
|
||||
db:
|
||||
condition: service_healthy
|
||||
|
||||
db:
|
||||
hostname: ma.db
|
||||
image: mysql:8.0.31-debian
|
||||
environment:
|
||||
MYSQL_DATABASE: aime
|
||||
MYSQL_USER: aime
|
||||
MYSQL_PASSWORD: aime
|
||||
MYSQL_ROOT_PASSWORD: AimeRootPassword
|
||||
healthcheck:
|
||||
test: ["CMD", "mysqladmin" ,"ping", "-h", "localhost"]
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
|
||||
memcached:
|
||||
hostname: ma.memcached
|
||||
image: memcached:1.6.17-bullseye
|
||||
|
||||
phpmyadmin:
|
||||
hostname: ma.phpmyadmin
|
||||
image: phpmyadmin:latest
|
||||
environment:
|
||||
PMA_HOSTS: ma.db
|
||||
PMA_USER: root
|
||||
PMA_PASSWORD: AimeRootPassword
|
||||
APACHE_PORT: 8080
|
||||
ports:
|
||||
- "8080:8080"
|
||||
|
|
@ -0,0 +1,129 @@
|
|||
# ARTEMiS - Ubuntu 20.04 LTS Guide
|
||||
This step-by-step guide assumes that you are using a fresh install of Ubuntu 20.04 LTS, some of the steps can be skipped if you already have an installation with MySQL 5.7 or even some of the modules already present on your environment
|
||||
|
||||
# Setup
|
||||
## Install memcached module
|
||||
1. sudo apt-get install memcached
|
||||
2. Under the file /etc/memcached.conf, please make sure the following parameters are set:
|
||||
|
||||
```
|
||||
# Start with a cap of 64 megs of memory. It's reasonable, and the daemon default
|
||||
# Note that the daemon will grow to this size, but does not start out holding this much
|
||||
# memory
|
||||
|
||||
-I 128m
|
||||
-m 1024
|
||||
```
|
||||
|
||||
** This is mandatory to avoid memcached overload caused by Crossbeats or by massive profiles
|
||||
|
||||
3. Restart memcached using: sudo systemctl restart memcached
|
||||
|
||||
## Install MySQL 5.7
|
||||
```
|
||||
sudo apt update
|
||||
sudo apt install wget -y
|
||||
wget https://dev.mysql.com/get/mysql-apt-config_0.8.12-1_all.deb
|
||||
sudo dpkg -i mysql-apt-config_0.8.12-1_all.deb
|
||||
```
|
||||
1. During the first prompt, select Ubuntu Bionic
|
||||
2. Select the default option
|
||||
3. Select MySQL 5.7
|
||||
4. Select the last option
|
||||
```
|
||||
sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 467B942D3A79BD29
|
||||
sudo apt-get update
|
||||
sudo apt-cache policy mysql-server
|
||||
sudo apt install -f mysql-client=5.7* mysql-community-server=5.7* mysql-server=5.7*
|
||||
```
|
||||
|
||||
## Default Configuration for MySQL Server
|
||||
1. sudo mysql_secure_installation
|
||||
> Make sure to follow the steps that will be prompted such as changing the mysql root password and such
|
||||
|
||||
2. Test your MySQL Server login by doing the following command :
|
||||
> mysql -u root -p
|
||||
|
||||
## Create the default ARTEMiS database and user
|
||||
1. mysql -u root -p
|
||||
2. Please change the password indicated in the next line for a custom secure one and continue with the next commands
|
||||
|
||||
```
|
||||
CREATE USER 'aime'@'localhost' IDENTIFIED BY 'MyStrongPass.';
|
||||
CREATE DATABASE aime;
|
||||
GRANT Alter,Create,Delete,Drop,Index,Insert,References,Select,Update ON aime.* TO 'aime'@'localhost';
|
||||
FLUSH PRIVILEGES;
|
||||
exit;
|
||||
```
|
||||
|
||||
3. sudo systemctl restart mysql
|
||||
|
||||
## Install Python modules
|
||||
```
|
||||
sudo apt-get install python3-dev default-libmysqlclient-dev build-essential mysql-client libmysqlclient-dev libmemcached-dev
|
||||
sudo apt install libpython3.8-dev
|
||||
sudo apt-get install python3-software-properties
|
||||
sudo apt install python3-pip
|
||||
sudo pip3 install --upgrade pip testresources
|
||||
sudo pip3 install --upgrade pip setuptools
|
||||
sudo apt-get install python3-tk
|
||||
```
|
||||
7. Change your work path to the ARTEMiS root folder using 'cd' and install the requirements:
|
||||
> sudo python3 -m pip install -r requirements.txt
|
||||
|
||||
## Copy/Rename the folder example_config to config
|
||||
|
||||
## Adjust /config/core.yaml
|
||||
1. Make sure to change the server listen_address to be set to your local machine IP (ex.: 192.168.1.xxx)
|
||||
2. Adjust the proper MySQL information you created earlier
|
||||
3. Add the AimeDB key at the bottom of the file
|
||||
|
||||
## Create the database tables for ARTEMiS
|
||||
1. sudo python3 dbutils.py create
|
||||
|
||||
2. If you get "No module named Crypto", run the following command:
|
||||
```
|
||||
sudo pip uninstall crypto
|
||||
sudo pip uninstall pycrypto
|
||||
sudo pip install pycrypto
|
||||
```
|
||||
|
||||
## Firewall Adjustements
|
||||
```
|
||||
sudo ufw allow 80
|
||||
sudo ufw allow 443
|
||||
sudo ufw allow 8443
|
||||
sudo ufw allow 22345
|
||||
sudo ufw allow 8090
|
||||
sudo ufw allow 8444
|
||||
sudo ufw allow 8080
|
||||
```
|
||||
|
||||
## Running the ARTEMiS instance
|
||||
1. sudo python3 index.py
|
||||
|
||||
# Troubleshooting
|
||||
|
||||
## Game does not connect to ARTEMiS Allnet server
|
||||
1. Double-check your core.yaml, the listen_address is most likely either not binded to the proper IP or the port is not opened
|
||||
|
||||
## Game does not connect to Title Server
|
||||
1. Verify that your core.yaml is setup properly for both the server listen_address and title hostname
|
||||
2. Boot your game and verify that an AllNet response does show and if it does, attempt to open the URI that is shown under a browser such as Edge, Chrome & Firefox.
|
||||
3. If a page is shown, the server is working properly and if it doesn't, double check your port forwarding and also that you have entered the proper local IP under the Title hostname in core.yaml.
|
||||
|
||||
## Unhandled command under AimeDB
|
||||
1. Double check your AimeDB key under core.yaml, it is incorrect.
|
||||
|
||||
## Memcache failed, error 3
|
||||
1. Make sure memcached is properly installed and running. You can check the status of the service using the following command:
|
||||
> sudo systemctl status memcached
|
||||
2. If it is failing, double check the /etc/memcached.conf file, it may have duplicated arguments like the -I and -m
|
||||
3. If it is still not working afterward, you can proceed with a workaround by manually editing the /core/data/cache.py file.
|
||||
```
|
||||
# Make memcache optional
|
||||
try:
|
||||
has_mc = False
|
||||
except ModuleNotFoundError:
|
||||
has_mc = False
|
||||
```
|
|
@ -0,0 +1,83 @@
|
|||
# ARTEMiS - Windows 10/11 Guide
|
||||
This step-by-step guide assumes that you are using a fresh install of Windows 10/11 without MySQL installed, some of the steps can be skipped if you already have an installation with MySQL 8.0 or even some of the modules already present on your environment
|
||||
|
||||
# Setup
|
||||
## Install Python Python 3.9 (recommended) or 3.10
|
||||
1. Download Python 3.9 : [Link](https://www.python.org/ftp/python/3.9.13/python-3.9.13-amd64.exe)
|
||||
2. Install python-3.9.13-amd64.exe
|
||||
1. Select Customize installation
|
||||
2. Make sure that pip, tcl/tk, and the for all users are checked and hit Next
|
||||
3. Make sure that you enable "Create shortcuts for installed applications" and "Add Python to environment variables" and hit Install
|
||||
|
||||
## Install MySQL 8.0
|
||||
1. Download MySQL 8.0 Server : [Link](https://cdn.mysql.com//Downloads/MySQLInstaller/mysql-installer-web-community-8.0.31.0.msi)
|
||||
2. Install mysql-installer-web-community-8.0.31.0.msi
|
||||
1. Click on "Add ..." on the side
|
||||
2. Click on the "+" next to MySQL Servers
|
||||
3. Make sure MySQL Server 8.0.29 - X64 is under the products to be installed.
|
||||
4. Hit Next and Next once installed
|
||||
5. Select the configuration type "Development Computer"
|
||||
6. Hit Next
|
||||
7. Select "Use Legacy Authentication Method (Retain MySQL 5.x compatibility)" and hit Next
|
||||
8. Enter a root password and then hit Next >
|
||||
9. Leave everything under Windows Service as default and hit Next >
|
||||
10. Click on Execute and for it to finish and hit Next> and then Finish
|
||||
3. Open MySQL 8.0 Command Line Client and login as your root user
|
||||
4. Type those commands to create your user and the database
|
||||
```
|
||||
CREATE USER 'aime'@'localhost' IDENTIFIED BY 'MyStrongPass.';
|
||||
CREATE DATABASE aime;
|
||||
GRANT Alter,Create,Delete,Drop,Index,Insert,References,Select,Update ON aime.* TO 'aime'@'localhost';
|
||||
FLUSH PRIVILEGES;
|
||||
exit;
|
||||
```
|
||||
|
||||
## Install Python modules
|
||||
1. Change your work path to the artemis-master folder using 'cd' and install the requirements:
|
||||
> pip install -r requirements.txt
|
||||
|
||||
## Copy/Rename the folder example_config to config
|
||||
|
||||
## Adjust /config/core.yaml
|
||||
|
||||
1. Make sure to change the server listen_address to be set to your local machine IP (ex.: 192.168.1.xxx)
|
||||
- In case you want to run this only locally, set the following values:
|
||||
```
|
||||
server:
|
||||
listen_address: 0.0.0.0
|
||||
title:
|
||||
hostname: localhost
|
||||
```
|
||||
2. Adjust the proper MySQL information you created earlier
|
||||
3. Add the AimeDB key at the bottom of the file
|
||||
4. If the webui is needed, change the flag from False to True
|
||||
|
||||
## Create the database tables for ARTEMiS
|
||||
> python dbutils.py create
|
||||
|
||||
## Firewall Adjustements
|
||||
Make sure the following ports are open both on your router and local Windows firewall in case you want to use this for public use (NOT recommended):
|
||||
> Port 80 (TCP), 443 (TCP), 8443 (TCP), 22345 (TCP), 8080 (TCP), 8090 (TCP) **webui, 8444 (TCP) **mucha
|
||||
|
||||
## Running the ARTEMiS instance
|
||||
> python index.py
|
||||
|
||||
# Troubleshooting
|
||||
|
||||
## Game does not connect to ARTEMiS Allnet server
|
||||
1. Double-check your core.yaml, the listen_address is most likely either not binded to the proper IP or the port is not opened
|
||||
|
||||
## Game does not connect to Title Server
|
||||
1. Verify that your core.yaml is setup properly for both the server listen_address and title hostname
|
||||
2. Boot your game and verify that an AllNet response does show and if it does, attempt to open the URI that is shown under a browser such as Edge, Chrome & Firefox.
|
||||
3. If a page is shown, the server is working properly and if it doesn't, double check your port forwarding and also that you have entered the proper local IP under the Title hostname in core.yaml.
|
||||
|
||||
## Unhandled command under AimeDB
|
||||
1. Double check your AimeDB key under core.yaml, it is incorrect.
|
||||
|
||||
## AttributeError: module 'collections' has no attribute 'Hashable'
|
||||
1. This means the pyYAML module is obsolete, simply rerun pip with the -U (force update) flag, as shown below.
|
||||
- Change your work path to the artemis-master (or artemis-develop) folder using 'cd' and run the following commands:
|
||||
```
|
||||
pip install -r requirements.txt -U
|
||||
```
|
|
@ -5,6 +5,7 @@
|
|||
- `allow_unregistered_serials`: Allows games that do not have registered keychips to connect and authenticate. Disable to restrict who can connect to your server. Recomended to disable for production setups. Default `True`
|
||||
- `name`: Name for the server, used by some games in their default MOTDs. Default `ARTEMiS`
|
||||
- `is_develop`: Flags that the server is a development instance without a proxy standing in front of it. Setting to `False` tells the server not to listen for SSL, because the proxy should be handling all SSL-related things, among other things. Default `True`
|
||||
- `threading`: Flags that `reactor.run` should be called via the `Thread` standard library. May provide a speed boost, but removes the ability to kill the server via `Ctrl + C`. Default: `False`
|
||||
- `log_dir`: Directory to store logs. Server MUST have read and write permissions to this directory or you will have issues. Default `logs`
|
||||
## Title
|
||||
- `loglevel`: Logging level for the title server. Default `info`
|
||||
|
|
|
@ -0,0 +1,508 @@
|
|||
# ARTEMiS Games Documentation
|
||||
|
||||
Below are all supported games with supported version ids in order to use
|
||||
the corresponding importer and database upgrades.
|
||||
|
||||
**Important: The described database upgrades are only required if you are using an old database schema, f.e. still
|
||||
using the megaime database. Clean installations always create the latest database structure!**
|
||||
|
||||
# Table of content
|
||||
|
||||
- [Supported Games](#supported-games)
|
||||
- [CHUNITHM](#chunithm)
|
||||
- [crossbeats REV.](#crossbeats-rev)
|
||||
- [maimai DX](#maimai-dx)
|
||||
- [O.N.G.E.K.I.](#o-n-g-e-k-i)
|
||||
- [Card Maker](#card-maker)
|
||||
- [WACCA](#wacca)
|
||||
- [Sword Art Online Arcade](#sao)
|
||||
|
||||
|
||||
# Supported Games
|
||||
|
||||
Games listed below have been tested and confirmed working.
|
||||
|
||||
## CHUNITHM
|
||||
|
||||
### SDBT
|
||||
|
||||
| Version ID | Version Name |
|
||||
|------------|-----------------------|
|
||||
| 0 | CHUNITHM |
|
||||
| 1 | CHUNITHM PLUS |
|
||||
| 2 | CHUNITHM AIR |
|
||||
| 3 | CHUNITHM AIR PLUS |
|
||||
| 4 | CHUNITHM STAR |
|
||||
| 5 | CHUNITHM STAR PLUS |
|
||||
| 6 | CHUNITHM AMAZON |
|
||||
| 7 | CHUNITHM AMAZON PLUS |
|
||||
| 8 | CHUNITHM CRYSTAL |
|
||||
| 9 | CHUNITHM CRYSTAL PLUS |
|
||||
| 10 | CHUNITHM PARADISE |
|
||||
|
||||
### SDHD/SDBT
|
||||
|
||||
| Version ID | Version Name |
|
||||
|------------|---------------------|
|
||||
| 11 | CHUNITHM NEW!! |
|
||||
| 12 | CHUNITHM NEW PLUS!! |
|
||||
| 13 | CHUNITHM SUN |
|
||||
|
||||
|
||||
### Importer
|
||||
|
||||
In order to use the importer locate your game installation folder and execute:
|
||||
|
||||
```shell
|
||||
python read.py --series SDBT --version <version ID> --binfolder /path/to/game/folder --optfolder /path/to/game/option/folder
|
||||
```
|
||||
|
||||
The importer for Chunithm will import: Events, Music, Charge Items and Avatar Accesories.
|
||||
|
||||
### Database upgrade
|
||||
|
||||
Always make sure your database (tables) are up-to-date, to do so go to the `core/data/schema/versions` folder and see
|
||||
which version is the latest, f.e. `SDBT_4_upgrade.sql`. In order to upgrade to version 4 in this case you need to
|
||||
perform all previous updates as well:
|
||||
|
||||
```shell
|
||||
python dbutils.py --game SDBT upgrade
|
||||
```
|
||||
|
||||
### Online Battle
|
||||
|
||||
**Only matchmaking (with your imaginary friends) is supported! Online Battle does not (yet?) work!**
|
||||
|
||||
The first person to start the Online Battle (now called host) will create a "matching room" with a given `roomId`, after that max 3 other people can join the created room.
|
||||
Non used slots during the matchmaking will be filled with CPUs after the timer runs out.
|
||||
As soon as a new member will join the room the timer will jump back to 60 secs again.
|
||||
Sending those 4 messages to all other users is also working properly.
|
||||
In order to use the Online Battle every user needs the same ICF, same rom version and same data version!
|
||||
If a room is full a new room will be created if another user starts an Online Battle.
|
||||
After a failed Online Battle the room will be deleted. The host is used for the timer countdown, so if the connection failes to the host the timer will stop and could create a "frozen" state.
|
||||
|
||||
#### Information/Problems:
|
||||
|
||||
- Online Battle uses UDP hole punching and opens port 50201?
|
||||
- `reflectorUri` seems related to that?
|
||||
- Timer countdown should be handled globally and not by one user
|
||||
- Game can freeze or can crash if someone (especially the host) leaves the matchmaking
|
||||
|
||||
|
||||
## crossbeats REV.
|
||||
|
||||
### SDCA
|
||||
|
||||
| Version ID | Version Name |
|
||||
|------------|------------------------------------|
|
||||
| 0 | crossbeats REV. |
|
||||
| 1 | crossbeats REV. SUNRISE |
|
||||
| 2 | crossbeats REV. SUNRISE S2 |
|
||||
| 3 | crossbeats REV. SUNRISE S2 Omnimix |
|
||||
|
||||
### Importer
|
||||
|
||||
In order to use the importer you need to use the provided `Export.csv` file:
|
||||
|
||||
```shell
|
||||
python read.py --series SDCA --version <version ID> --binfolder titles/cxb/data
|
||||
```
|
||||
|
||||
The importer for crossbeats REV. will import Music.
|
||||
|
||||
### Config
|
||||
|
||||
Config file is located in `config/cxb.yaml`.
|
||||
|
||||
| Option | Info |
|
||||
|------------------------|------------------------------------------------------------|
|
||||
| `hostname` | Requires a proper `hostname` (not localhost!) to run |
|
||||
| `ssl_enable` | Enables/Disables the use of the `ssl_cert` and `ssl_key` |
|
||||
| `port` | Set your unsecure port number |
|
||||
| `port_secure` | Set your secure/SSL port number |
|
||||
| `ssl_cert`, `ssl_key` | Enter your SSL certificate (requires not self signed cert) |
|
||||
|
||||
|
||||
## maimai DX
|
||||
|
||||
### SDEZ
|
||||
|
||||
| Game Code | Version ID | Version Name |
|
||||
|-----------|------------|-------------------------|
|
||||
|
||||
|
||||
For versions pre-dx
|
||||
| Game Code | Version ID | Version Name |
|
||||
|-----------|------------|-------------------------|
|
||||
| SBXL | 0 | maimai |
|
||||
| SBXL | 1 | maimai PLUS |
|
||||
| SBZF | 2 | maimai GreeN |
|
||||
| SBZF | 3 | maimai GreeN PLUS |
|
||||
| SDBM | 4 | maimai ORANGE |
|
||||
| SDBM | 5 | maimai ORANGE PLUS |
|
||||
| SDCQ | 6 | maimai PiNK |
|
||||
| SDCQ | 7 | maimai PiNK PLUS |
|
||||
| SDDK | 8 | maimai MURASAKI |
|
||||
| SDDK | 9 | maimai MURASAKI PLUS |
|
||||
| SDDZ | 10 | maimai MILK |
|
||||
| SDDZ | 11 | maimai MILK PLUS |
|
||||
| SDEY | 12 | maimai FiNALE |
|
||||
| SDEZ | 13 | maimai DX |
|
||||
| SDEZ | 14 | maimai DX PLUS |
|
||||
| SDEZ | 15 | maimai DX Splash |
|
||||
| SDEZ | 16 | maimai DX Splash PLUS |
|
||||
| SDEZ | 17 | maimai DX Universe |
|
||||
| SDEZ | 18 | maimai DX Universe PLUS |
|
||||
| SDEZ | 19 | maimai DX Festival |
|
||||
|
||||
### Importer
|
||||
|
||||
In order to use the importer locate your game installation folder and execute:
|
||||
DX:
|
||||
```shell
|
||||
python read.py --series <Game Code> --version <Version ID> --binfolder /path/to/StreamingAssets --optfolder /path/to/game/option/folder
|
||||
```
|
||||
Pre-DX:
|
||||
```shell
|
||||
python read.py --series <Game Code> --version <Version ID> --binfolder /path/to/data --optfolder /path/to/patch/data
|
||||
```
|
||||
The importer for maimai DX will import Events, Music and Tickets.
|
||||
|
||||
The importer for maimai Pre-DX will import Events and Music. Not all games will have patch data. Milk - Finale have file encryption, and need an AES key. That key is not provided by the developers. For games that do use encryption, provide the key, as a hex string, with the `--extra` flag. Ex `--extra 00112233445566778899AABBCCDDEEFF`
|
||||
|
||||
**Important: It is required to use the importer because some games may not function properly or even crash without Events!**
|
||||
|
||||
### Database upgrade
|
||||
|
||||
Always make sure your database (tables) are up-to-date, to do so go to the `core/data/schema/versions` folder and see which version is the latest, f.e. `SDEZ_2_upgrade.sql`. In order to upgrade to version 2 in this case you need to perform all previous updates as well:
|
||||
|
||||
```shell
|
||||
python dbutils.py --game SDEZ upgrade
|
||||
```
|
||||
Pre-Dx uses the same database as DX, so only upgrade using the SDEZ game code!
|
||||
|
||||
## Hatsune Miku Project Diva
|
||||
|
||||
### SBZV
|
||||
|
||||
| Version ID | Version Name |
|
||||
|------------|---------------------------------|
|
||||
| 0 | Project Diva Arcade |
|
||||
| 1 | Project Diva Arcade Future Tone |
|
||||
|
||||
|
||||
### Importer
|
||||
|
||||
In order to use the importer locate your game installation folder and execute:
|
||||
|
||||
```shell
|
||||
python read.py --series SBZV --version <version ID> --binfolder /path/to/game/data/diva --optfolder /path/to/game/data/diva/mdata
|
||||
```
|
||||
|
||||
The importer for Project Diva Arcade will all required data in order to use
|
||||
the Shop, Modules and Customizations.
|
||||
|
||||
### Config
|
||||
|
||||
Config file is located in `config/diva.yaml`.
|
||||
|
||||
| Option | Info |
|
||||
|----------------------|-------------------------------------------------------------------------------------------------|
|
||||
| `unlock_all_modules` | Unlocks all modules (costumes) by default, if set to `False` all modules need to be purchased |
|
||||
| `unlock_all_items` | Unlocks all items (customizations) by default, if set to `False` all items need to be purchased |
|
||||
|
||||
|
||||
### Database upgrade
|
||||
|
||||
Always make sure your database (tables) are up-to-date, to do so go to the `core/data/schema/versions` folder and see
|
||||
which version is the latest, f.e. `SBZV_4_upgrade.sql`. In order to upgrade to version 4 in this case you need to
|
||||
perform all previous updates as well:
|
||||
|
||||
```shell
|
||||
python dbutils.py --game SBZV upgrade
|
||||
```
|
||||
|
||||
## O.N.G.E.K.I.
|
||||
|
||||
### SDDT
|
||||
|
||||
| Version ID | Version Name |
|
||||
|------------|----------------------------|
|
||||
| 0 | O.N.G.E.K.I. |
|
||||
| 1 | O.N.G.E.K.I. + |
|
||||
| 2 | O.N.G.E.K.I. Summer |
|
||||
| 3 | O.N.G.E.K.I. Summer + |
|
||||
| 4 | O.N.G.E.K.I. Red |
|
||||
| 5 | O.N.G.E.K.I. Red + |
|
||||
| 6 | O.N.G.E.K.I. Bright |
|
||||
| 7 | O.N.G.E.K.I. Bright Memory |
|
||||
|
||||
|
||||
### Importer
|
||||
|
||||
In order to use the importer locate your game installation folder and execute:
|
||||
|
||||
```shell
|
||||
python read.py --series SDDT --version <version ID> --binfolder /path/to/game/folder --optfolder /path/to/game/option/folder
|
||||
```
|
||||
|
||||
The importer for O.N.G.E.K.I. will all all Cards, Music and Events.
|
||||
|
||||
**NOTE: The Importer is required for Card Maker.**
|
||||
|
||||
### Config
|
||||
|
||||
Config file is located in `config/ongeki.yaml`.
|
||||
|
||||
| Option | Info |
|
||||
|------------------|----------------------------------------------------------------------------------------------------------------|
|
||||
| `enabled_gachas` | Enter all gacha IDs for Card Maker to work, other than default may not work due to missing cards added to them |
|
||||
|
||||
Note: 1149 and higher are only for Card Maker 1.35 and higher and will be ignored on lower versions.
|
||||
|
||||
### Database upgrade
|
||||
|
||||
Always make sure your database (tables) are up-to-date, to do so go to the `core/data/schema/versions` folder and see
|
||||
which version is the latest, f.e. `SDDT_4_upgrade.sql`. In order to upgrade to version 4 in this case you need to
|
||||
perform all previous updates as well:
|
||||
|
||||
```shell
|
||||
python dbutils.py --game SDDT upgrade
|
||||
```
|
||||
|
||||
## Card Maker
|
||||
|
||||
### SDED
|
||||
|
||||
| Version ID | Version Name |
|
||||
|------------|-----------------|
|
||||
| 0 | Card Maker 1.30 |
|
||||
| 1 | Card Maker 1.35 |
|
||||
|
||||
|
||||
### Support status
|
||||
|
||||
* Card Maker 1.30:
|
||||
* CHUNITHM NEW!!: Yes
|
||||
* maimai DX UNiVERSE: Yes
|
||||
* O.N.G.E.K.I. Bright: Yes
|
||||
|
||||
* Card Maker 1.35:
|
||||
* CHUNITHM SUN: Yes (NEW PLUS!! up to A032)
|
||||
* maimai DX FESTiVAL: Yes (up to A035) (UNiVERSE PLUS up to A031)
|
||||
* O.N.G.E.K.I. Bright Memory: Yes
|
||||
|
||||
|
||||
### Importer
|
||||
|
||||
In order to use the importer you need to use the provided `.csv` files (which are required for O.N.G.E.K.I.) and the
|
||||
option folders:
|
||||
|
||||
```shell
|
||||
python read.py --series SDED --version <version ID> --binfolder titles/cm/cm_data --optfolder /path/to/cardmaker/option/folder
|
||||
```
|
||||
|
||||
**If you haven't already executed the O.N.G.E.K.I. importer, make sure you import all cards!**
|
||||
|
||||
```shell
|
||||
python read.py --series SDDT --version <version ID> --binfolder /path/to/game/folder --optfolder /path/to/game/option/folder
|
||||
```
|
||||
|
||||
Also make sure to import all maimai DX and CHUNITHM data as well:
|
||||
|
||||
```shell
|
||||
python read.py --series SDED --version <version ID> --binfolder /path/to/cardmaker/CardMaker_Data
|
||||
```
|
||||
|
||||
The importer for Card Maker will import all required Gachas (Banners) and cards (for maimai DX/CHUNITHM) and the hardcoded
|
||||
Cards for each Gacha (O.N.G.E.K.I. only).
|
||||
|
||||
**NOTE: Without executing the importer Card Maker WILL NOT work!**
|
||||
|
||||
|
||||
### Config setup
|
||||
|
||||
Make sure to update your `config/cardmaker.yaml` with the correct version for each game. To get the current version required to run a specific game, open every opt (Axxx) folder descending until you find all three folders:
|
||||
|
||||
- `MU3`: O.N.G.E.K.I.
|
||||
- `MAI`: maimai DX
|
||||
- `CHU`: CHUNITHM
|
||||
|
||||
Inside each folder is a `DataConfig.xml` file, for example:
|
||||
|
||||
`MU3/DataConfig.xml`:
|
||||
```xml
|
||||
<cardMakerVersion>
|
||||
<major>1</major>
|
||||
<minor>35</minor>
|
||||
<release>3</release>
|
||||
</cardMakerVersion>
|
||||
```
|
||||
|
||||
Now update your `config/cardmaker.yaml` with the correct version number, for example:
|
||||
|
||||
```yaml
|
||||
version:
|
||||
1: # Card Maker 1.35
|
||||
ongeki: 1.35.03
|
||||
```
|
||||
|
||||
### O.N.G.E.K.I.
|
||||
|
||||
Gacha "無料ガチャ" can only pull from the free cards with the following probabilities: 94%: R, 5% SR and 1% chance of
|
||||
getting an SSR card
|
||||
|
||||
Gacha "無料ガチャ(SR確定)" can only pull from free SR cards with prob: 92% SR and 8% chance of getting an SSR card
|
||||
|
||||
Gacha "レギュラーガチャ" can pull from every card added to ongeki_static_cards with the following prob: 77% R, 20% SR
|
||||
and 3% chance of getting an SSR card
|
||||
|
||||
All other (limited) gachas can pull from every card added to ongeki_static_cards but with the promoted cards
|
||||
(click on the green button under the banner) having a 10 times higher chance to get pulled
|
||||
|
||||
### CHUNITHM
|
||||
|
||||
All cards in CHUNITHM (basically just the characters) have the same rarity to it just pulls randomly from all cards
|
||||
from a given gacha but made sure you cannot pull the same card twice in the same 5 times gacha roll.
|
||||
|
||||
### maimai DX
|
||||
|
||||
Printed maimai DX cards: Freedom (`cardTypeId=6`) or Gold Pass (`cardTypeId=4`) can now be selected during the login process. You can only have ONE Freedom and ONE Gold Pass active at a given time. The cards will expire after 15 days.
|
||||
|
||||
Thanks GetzeAvenue for the `selectedCardList` rarity hint!
|
||||
|
||||
### Notes
|
||||
|
||||
Card Maker 1.30-1.34 will only load an O.N.G.E.K.I. Bright profile (1.30). Card Maker 1.35+ will only load an O.N.G.E.K.I.
|
||||
Bright Memory profile (1.35).
|
||||
The gachas inside the `config/ongeki.yaml` will make sure only the right gacha ids for the right CM version will be loaded.
|
||||
Gacha IDs up to 1140 will be loaded for CM 1.34 and all gachas will be loaded for CM 1.35.
|
||||
|
||||
## WACCA
|
||||
|
||||
### SDFE
|
||||
|
||||
| Version ID | Version Name |
|
||||
|------------|---------------|
|
||||
| 0 | WACCA |
|
||||
| 1 | WACCA S |
|
||||
| 2 | WACCA Lily |
|
||||
| 3 | WACCA Lily R |
|
||||
| 4 | WACCA Reverse |
|
||||
|
||||
|
||||
### Importer
|
||||
|
||||
In order to use the importer locate your game installation folder and execute:
|
||||
|
||||
```shell
|
||||
python read.py --series SDFE --version <version ID> --binfolder /path/to/game/WindowsNoEditor/Mercury/Content
|
||||
```
|
||||
|
||||
The importer for WACCA will import all Music data.
|
||||
|
||||
### Config
|
||||
|
||||
Config file is located in `config/wacca.yaml`.
|
||||
|
||||
| Option | Info |
|
||||
|--------------------|-----------------------------------------------------------------------------|
|
||||
| `always_vip` | Enables/Disables VIP, if disabled it needs to be purchased manually in game |
|
||||
| `infinite_tickets` | Always set the "unlock expert" tickets to 5 |
|
||||
| `infinite_wp` | Sets the user WP to `999999` |
|
||||
| `enabled_gates` | Enter all gate IDs which should be enabled in game |
|
||||
|
||||
|
||||
### Database upgrade
|
||||
|
||||
Always make sure your database (tables) are up-to-date, to do so go to the `core/data/schema/versions` folder and see which version is the latest, f.e. `SDFE_3_upgrade.sql`. In order to upgrade to version 3 in this case you need to perform all previous updates as well:
|
||||
|
||||
```shell
|
||||
python dbutils.py --game SDFE upgrade
|
||||
```
|
||||
|
||||
### VIP Rewards
|
||||
Below is a list of VIP rewards. Currently, VIP is not implemented, and thus these are not obtainable. These 23 rewards were distributed once per month for VIP users on the real network.
|
||||
|
||||
Plates:
|
||||
211004 リッチ
|
||||
211018 特盛えりざべす
|
||||
211025 イースター
|
||||
211026 特盛りりぃ
|
||||
311004 ファンシー
|
||||
311005 インカンテーション
|
||||
311014 夜明け
|
||||
311015 ネイビー
|
||||
311016 特盛るーん
|
||||
|
||||
Ring Colors:
|
||||
203002 Gold Rushイエロー
|
||||
203009 トロピカル
|
||||
303005 ネイチャー
|
||||
|
||||
Icons:
|
||||
202020 どらみんぐ
|
||||
202063 ユニコーン
|
||||
202086 ゴリラ
|
||||
302014 ローズ
|
||||
302015 ファラオ
|
||||
302045 肉球
|
||||
302046 WACCA
|
||||
302047 WACCA Lily
|
||||
302048 WACCA Reverse
|
||||
|
||||
Note Sound Effect:
|
||||
205002 テニス
|
||||
205008 シャワー
|
||||
305003 タンバリンMk-Ⅱ
|
||||
|
||||
## SAO
|
||||
|
||||
### SDEW
|
||||
|
||||
| Version ID | Version Name |
|
||||
|------------|---------------|
|
||||
| 0 | SAO |
|
||||
|
||||
|
||||
### Importer
|
||||
|
||||
In order to use the importer locate your game installation folder and execute:
|
||||
|
||||
```shell
|
||||
python read.py --series SDEW --version <version ID> --binfolder /path/to/game/extractedassets
|
||||
```
|
||||
|
||||
The importer for SAO will import all items, heroes, support skills and titles data.
|
||||
|
||||
### Config
|
||||
|
||||
Config file is located in `config/sao.yaml`.
|
||||
|
||||
| Option | Info |
|
||||
|--------------------|-----------------------------------------------------------------------------|
|
||||
| `hostname` | Changes the server listening address for Mucha |
|
||||
| `port` | Changes the listing port |
|
||||
| `auto_register` | Allows the game to handle the automatic registration of new cards |
|
||||
|
||||
|
||||
### Database upgrade
|
||||
|
||||
Always make sure your database (tables) are up-to-date, to do so go to the `core/data/schema/versions` folder and see which version is the latest, f.e. `SDEW_1_upgrade.sql`. In order to upgrade to version 3 in this case you need to perform all previous updates as well:
|
||||
|
||||
```shell
|
||||
python dbutils.py --game SDEW upgrade
|
||||
```
|
||||
|
||||
### Notes
|
||||
- Defrag Match will crash at loading
|
||||
- Co-Op Online is not supported
|
||||
- Shop is not functionnal
|
||||
- Player title is currently static and cannot be changed in-game
|
||||
- QR Card Scanning currently only load a static hero
|
||||
|
||||
### Credits for SAO support:
|
||||
|
||||
- Midorica - Limited Network Support
|
||||
- Dniel97 - Helping with network base
|
||||
- tungnotpunk - Source
|
|
@ -0,0 +1,41 @@
|
|||
# ARTEMiS Production mode
|
||||
Production mode is a configuration option that changes how the server listens to be more friendly to a production environment. This mode assumes that a proxy (for this guide, nginx) is standing in front of the server to handle port mapping and TLS. In order to activate production mode, simply change `is_develop` to `False` in `core.yaml`. Next time you start the server, you should see "Starting server in production mode".
|
||||
|
||||
## Nginx Configuration
|
||||
### Port forwarding
|
||||
Artemis requires that the following ports be forwarded to allow internet traffic to access the server. This will not change regardless of what you set in the config, as many of these ports are hard-coded in the games.
|
||||
`tcp:80` all.net, non-ssl titles
|
||||
`tcp:8443` billing
|
||||
`tcp:22345` aimedb
|
||||
`tcp:443` frontend, SSL titles
|
||||
|
||||
### A note about external proxy services (cloudflare, etc)
|
||||
Due to the way that artemis functions, it is currently not possible to put the server behind something like Cloudflare. Cloudflare only proxies web traffic on the standard ports (80, 443) and, as shown above, this does not work with artemis. Server administrators should seek other means to protect their network (VPS hosting, VPN, etc)
|
||||
|
||||
### SSL Certificates
|
||||
You will need to generate SSL certificates for some games. The certificates vary in security and validity requirements. Please see the general guide below
|
||||
- General Title: The certificate for the general title server should be valid, not self-signed and match the CN that the game will be reaching out to (e.i if your games are reaching out to titles.hostname.here, your ssl certificate should be valid for titles.hostname.here, or *.hostname.here)
|
||||
- CXB: Same requires as the title server. It must not be self-signed, and CN must match. Recomended to get a wildcard cert if possible, and use it for both Title and CXB
|
||||
- Pokken: Pokken can be self-signed, and the CN doesn't have to match, but it MUST use 2048-bit RSA. Due to the games age, andthing stronger then that will be rejected.
|
||||
|
||||
### Port mappings
|
||||
An example config is provided in the `config` folder called `nginx_example.conf`. It is set up for the following:
|
||||
`naominet.jp:tcp:80` -> `localhost:tcp:8000` for allnet
|
||||
`ib.naominet.jp:ssl:8443` -> `localhost:tcp:8444` for the billing server
|
||||
`your.hostname.here:ssl:443` -> `localhost:tcp:8080` for the SSL title server
|
||||
`your.hostname.here:tcp:80` -> `localhost:tcp:8080` for the non-SSL title server
|
||||
`cxb.hostname.here:ssl:443` -> `localhost:tcp:8080` for crossbeats (appends /SDCA/104/ to the request)
|
||||
`pokken.hostname.here:ssl:443` -> `localhost:tcp:8080` for pokken
|
||||
`frontend.hostname.here:ssl:443` -> `localhost:tcp:8090` for the frontend, includes https redirection
|
||||
|
||||
If you're using this as a guide, be sure to replace your.hostname.here with the hostname you specified in core.yaml under `titles->hostname`. Do *not* change naominet.jp, or allnet/billing will fail. Also remember to specifiy certificate paths correctly, as in the example they are simply placeholders.
|
||||
|
||||
### Multi-service ports
|
||||
It is possible to use nginx to redirect billing and title server requests to the same port that all.net uses. By setting `port` to 0 under billing and title server, you can change the nginx config to serve the following (entries not shown here should be the same)
|
||||
`ib.naominet.jp:ssl:8443` -> `localhost:tcp:8000` for the billing server
|
||||
`your.hostname.here:ssl:443` -> `localhost:tcp:8000` for the SSL title server
|
||||
`your.hostname.here:tcp:80` -> `localhost:tcp:8000` for the non-SSL title server
|
||||
`cxb.hostname.here:ssl:443` -> `localhost:tcp:8000` for crossbeats (appends /SDCA/104/ to the request)
|
||||
`pokken.hostname.here:ssl:443` -> `localhost:tcp:8000` for pokken
|
||||
|
||||
This will allow you to only use 3 ports locally, but you will still need to forward the same internet-facing ports as before.
|
|
@ -0,0 +1,11 @@
|
|||
#!/bin/bash
|
||||
|
||||
if [[ -z "${CFG_DEV}" ]]; then
|
||||
echo Production mode
|
||||
python3 index.py
|
||||
else
|
||||
echo Development mode
|
||||
python3 dbutils.py create
|
||||
nodemon -w aime --legacy-watch index.py
|
||||
fi
|
||||
|
|
@ -0,0 +1,13 @@
|
|||
server:
|
||||
enable: True
|
||||
loglevel: "info"
|
||||
|
||||
version:
|
||||
0:
|
||||
ongeki: 1.30.01
|
||||
chuni: 2.00.00
|
||||
maimai: 1.20.00
|
||||
1:
|
||||
ongeki: 1.35.03
|
||||
chuni: 2.10.00
|
||||
maimai: 1.30.00
|
|
@ -2,5 +2,22 @@ server:
|
|||
enable: True
|
||||
loglevel: "info"
|
||||
|
||||
team:
|
||||
name: ARTEMiS
|
||||
|
||||
mods:
|
||||
use_login_bonus: True
|
||||
|
||||
version:
|
||||
11:
|
||||
rom: 2.00.00
|
||||
data: 2.00.00
|
||||
12:
|
||||
rom: 2.05.00
|
||||
data: 2.05.00
|
||||
13:
|
||||
rom: 2.10.00
|
||||
data: 2.10.00
|
||||
|
||||
crypto:
|
||||
encrypted_only: False
|
|
@ -4,6 +4,7 @@ server:
|
|||
allow_unregistered_serials: True
|
||||
name: "ARTEMiS"
|
||||
is_develop: True
|
||||
threading: False
|
||||
log_dir: "logs"
|
||||
|
||||
title:
|
||||
|
@ -32,6 +33,7 @@ allnet:
|
|||
loglevel: "info"
|
||||
port: 80
|
||||
allow_online_updates: False
|
||||
update_cfg_folder: ""
|
||||
|
||||
billing:
|
||||
port: 8443
|
||||
|
@ -48,6 +50,3 @@ mucha:
|
|||
enable: False
|
||||
hostname: "localhost"
|
||||
loglevel: "info"
|
||||
port: 8444
|
||||
ssl_key: "cert/server.key"
|
||||
ssl_cert: "cert/server.pem"
|
||||
|
|
|
@ -2,3 +2,6 @@ server:
|
|||
enable: True
|
||||
loglevel: "info"
|
||||
|
||||
mods:
|
||||
unlock_all_modules: True
|
||||
unlock_all_items: True
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
server:
|
||||
enable: True
|
||||
loglevel: "info"
|
||||
hostname: ""
|
||||
news: ""
|
||||
aes_key: ""
|
||||
|
||||
ports:
|
||||
userdb: 10000
|
||||
match: 10010
|
||||
echo: 10020
|
|
@ -1,3 +1,8 @@
|
|||
server:
|
||||
enable: True
|
||||
loglevel: "info"
|
||||
|
||||
deliver:
|
||||
enable: False
|
||||
udbdl_enable: False
|
||||
content_folder: ""
|
|
@ -4,6 +4,8 @@ server {
|
|||
server_name naominet.jp;
|
||||
|
||||
location / {
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_pass_request_headers on;
|
||||
proxy_pass http://localhost:8000/;
|
||||
}
|
||||
}
|
||||
|
@ -14,11 +16,13 @@ server {
|
|||
server_name your.hostname.here;
|
||||
|
||||
location / {
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_pass_request_headers on;
|
||||
proxy_pass http://localhost:8080/;
|
||||
}
|
||||
}
|
||||
|
||||
# SSL titles
|
||||
# SSL titles, comment out if you don't plan on accepting SSL titles
|
||||
server {
|
||||
listen 443 ssl default_server;
|
||||
listen [::]:443 ssl default_server;
|
||||
|
@ -57,4 +61,86 @@ server {
|
|||
location / {
|
||||
proxy_pass http://localhost:8444/;
|
||||
}
|
||||
}
|
||||
|
||||
# Pokken, comment this out if you don't plan on serving pokken.
|
||||
server {
|
||||
listen 443 ssl;
|
||||
server_name pokken.hostname.here;
|
||||
|
||||
ssl_certificate /path/to/cert/pokken.pem;
|
||||
ssl_certificate_key /path/to/cert/pokken.key;
|
||||
ssl_session_timeout 1d;
|
||||
ssl_session_cache shared:MozSSL:10m;
|
||||
ssl_session_tickets off;
|
||||
|
||||
ssl_protocols TLSv1 TLSv1.1 TLSv1.2 TLSv1.3;
|
||||
ssl_ciphers "ALL:@SECLEVEL=1";
|
||||
ssl_prefer_server_ciphers off;
|
||||
|
||||
location / {
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_pass_request_headers on;
|
||||
proxy_pass http://localhost:8080/;
|
||||
}
|
||||
}
|
||||
|
||||
# CXB, comment this out if you don't plan on serving crossbeats.
|
||||
server {
|
||||
listen 443 ssl;
|
||||
server_name cxb.hostname.here;
|
||||
|
||||
ssl_certificate /path/to/cert/cxb.pem;
|
||||
ssl_certificate_key /path/to/cert/cxb.key;
|
||||
ssl_session_timeout 1d;
|
||||
ssl_session_cache shared:MozSSL:10m;
|
||||
ssl_session_tickets off;
|
||||
|
||||
ssl_protocols TLSv1 TLSv1.1 TLSv1.2 TLSv1.3;
|
||||
ssl_ciphers "ALL:@SECLEVEL=1";
|
||||
ssl_prefer_server_ciphers off;
|
||||
|
||||
location / {
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_pass_request_headers on;
|
||||
proxy_pass http://localhost:8080/SDBT/104/;
|
||||
}
|
||||
}
|
||||
|
||||
# Frontend, set to redirect to HTTPS. Comment out if you don't intend to use the frontend
|
||||
server {
|
||||
listen 80;
|
||||
server_name frontend.hostname.here
|
||||
|
||||
location / {
|
||||
return 301 https://$host$request_uri;
|
||||
# If you don't want https redirection, comment the line above and uncomment the line below
|
||||
# proxy_pass http://localhost:8090/;
|
||||
}
|
||||
}
|
||||
|
||||
# Frontend HTTPS. Comment out if you on't intend to use the frontend
|
||||
server {
|
||||
listen 443 ssl;
|
||||
server_name frontend.hostname.here;
|
||||
|
||||
ssl_certificate /path/to/cert/frontend.pem;
|
||||
ssl_certificate_key /path/to/cert/frontend.key;
|
||||
ssl_session_timeout 1d;
|
||||
ssl_session_cache shared:MozSSL:10m; # about 40000 sessions
|
||||
ssl_session_tickets off;
|
||||
|
||||
# intermediate configuration
|
||||
ssl_protocols TLSv1.2 TLSv1.3;
|
||||
ssl_ciphers ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384;
|
||||
ssl_prefer_server_ciphers off;
|
||||
|
||||
# HSTS (ngx_http_headers_module is required) (63072000 seconds)
|
||||
add_header Strict-Transport-Security "max-age=63072000" always;
|
||||
|
||||
location / {
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_pass_request_headers on;
|
||||
proxy_pass http://localhost:8090/;
|
||||
}
|
||||
}
|
|
@ -1,3 +1,31 @@
|
|||
server:
|
||||
enable: True
|
||||
loglevel: "info"
|
||||
|
||||
gachas:
|
||||
enabled_gachas:
|
||||
- 1011
|
||||
- 1012
|
||||
- 1043
|
||||
- 1067
|
||||
- 1068
|
||||
- 1069
|
||||
- 1070
|
||||
- 1071
|
||||
- 1072
|
||||
- 1073
|
||||
- 1074
|
||||
- 1075
|
||||
- 1076
|
||||
- 1077
|
||||
- 1081
|
||||
- 1085
|
||||
- 1089
|
||||
- 1104
|
||||
- 1111
|
||||
- 1135
|
||||
# can be used for Card Maker 1.35 and up, else will be ignored
|
||||
- 1149
|
||||
- 1156
|
||||
- 1163
|
||||
- 1164
|
||||
|
|
|
@ -1,9 +1,12 @@
|
|||
server:
|
||||
hostname: "localhost"
|
||||
enable: True
|
||||
loglevel: "info"
|
||||
hostname: "localhost"
|
||||
ssl_enable: False
|
||||
port: 9000
|
||||
port_matching: 9001
|
||||
ssl_cert: cert/pokken.crt
|
||||
ssl_key: cert/pokken.key
|
||||
auto_register: True
|
||||
enable_matching: False
|
||||
stun_server_host: "stunserver.stunprotocol.org"
|
||||
stun_server_port: 3478
|
||||
|
||||
ports:
|
||||
game: 9000
|
||||
admission: 9001
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
server:
|
||||
hostname: "localhost"
|
||||
enable: True
|
||||
loglevel: "info"
|
||||
port: 9000
|
||||
auto_register: True
|
|
@ -1,6 +1,7 @@
|
|||
server:
|
||||
enable: True
|
||||
loglevel: "info"
|
||||
prefecture_name: "Hokkaido"
|
||||
|
||||
mods:
|
||||
always_vip: True
|
||||
|
@ -28,5 +29,8 @@ gates:
|
|||
- 17
|
||||
- 18
|
||||
- 19
|
||||
- 20
|
||||
- 21
|
||||
- 22
|
||||
- 23
|
||||
- 24
|
||||
|
|
301
index.py
301
index.py
|
@ -1,5 +1,8 @@
|
|||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import logging, coloredlogs
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
from typing import Dict
|
||||
import yaml
|
||||
from os import path, mkdir, access, W_OK
|
||||
from core import *
|
||||
|
@ -8,6 +11,7 @@ from twisted.web import server, resource
|
|||
from twisted.internet import reactor, endpoints
|
||||
from twisted.web.http import Request
|
||||
from routes import Mapper
|
||||
from threading import Thread
|
||||
|
||||
class HttpDispatcher(resource.Resource):
|
||||
def __init__(self, cfg: CoreConfig, config_dir: str):
|
||||
|
@ -16,118 +20,277 @@ class HttpDispatcher(resource.Resource):
|
|||
self.isLeaf = True
|
||||
self.map_get = Mapper()
|
||||
self.map_post = Mapper()
|
||||
|
||||
self.logger = logging.getLogger("core")
|
||||
|
||||
self.allnet = AllnetServlet(cfg, config_dir)
|
||||
self.title = TitleServlet(cfg, config_dir)
|
||||
self.mucha = MuchaServlet(cfg)
|
||||
self.mucha = MuchaServlet(cfg, config_dir)
|
||||
|
||||
self.map_post.connect('allnet_poweron', '/sys/servlet/PowerOn', controller="allnet", action='handle_poweron', conditions=dict(method=['POST']))
|
||||
self.map_post.connect('allnet_downloadorder', '/sys/servlet/DownloadOrder', controller="allnet", action='handle_dlorder', conditions=dict(method=['POST']))
|
||||
self.map_post.connect('allnet_billing', '/request', controller="allnet", action='handle_billing_request', conditions=dict(method=['POST']))
|
||||
self.map_get.connect(
|
||||
"allnet_downloadorder_ini",
|
||||
"/dl/ini/{file}",
|
||||
controller="allnet",
|
||||
action="handle_dlorder_ini",
|
||||
conditions=dict(method=["GET"]),
|
||||
)
|
||||
|
||||
self.map_post.connect('mucha_boardauth', '/mucha/boardauth.do', controller="mucha", action='handle_boardauth', conditions=dict(method=['POST']))
|
||||
self.map_post.connect('mucha_updatacheck', '/mucha/updatacheck.do', controller="mucha", action='handle_updatacheck', conditions=dict(method=['POST']))
|
||||
self.map_post.connect(
|
||||
"allnet_downloadorder_report",
|
||||
"/dl/report",
|
||||
controller="allnet",
|
||||
action="handle_dlorder_report",
|
||||
conditions=dict(method=["POST"]),
|
||||
)
|
||||
|
||||
self.map_get.connect("title_get", "/{game}/{version}/{endpoint:.*?}", controller="title", action="render_GET", requirements=dict(game=R"S..."))
|
||||
self.map_post.connect("title_post", "/{game}/{version}/{endpoint:.*?}", controller="title", action="render_POST", requirements=dict(game=R"S..."))
|
||||
self.map_get.connect(
|
||||
"allnet_ping",
|
||||
"/naomitest.html",
|
||||
controller="allnet",
|
||||
action="handle_naomitest",
|
||||
conditions=dict(method=["GET"]),
|
||||
)
|
||||
self.map_post.connect(
|
||||
"allnet_poweron",
|
||||
"/sys/servlet/PowerOn",
|
||||
controller="allnet",
|
||||
action="handle_poweron",
|
||||
conditions=dict(method=["POST"]),
|
||||
)
|
||||
self.map_post.connect(
|
||||
"allnet_downloadorder",
|
||||
"/sys/servlet/DownloadOrder",
|
||||
controller="allnet",
|
||||
action="handle_dlorder",
|
||||
conditions=dict(method=["POST"]),
|
||||
)
|
||||
self.map_post.connect(
|
||||
"allnet_loaderstaterecorder",
|
||||
"/sys/servlet/LoaderStateRecorder",
|
||||
controller="allnet",
|
||||
action="handle_loaderstaterecorder",
|
||||
conditions=dict(method=["POST"]),
|
||||
)
|
||||
self.map_post.connect(
|
||||
"allnet_alive",
|
||||
"/sys/servlet/Alive",
|
||||
controller="allnet",
|
||||
action="handle_alive",
|
||||
conditions=dict(method=["POST"]),
|
||||
)
|
||||
self.map_get.connect(
|
||||
"allnet_alive",
|
||||
"/sys/servlet/Alive",
|
||||
controller="allnet",
|
||||
action="handle_alive",
|
||||
conditions=dict(method=["GET"]),
|
||||
)
|
||||
self.map_post.connect(
|
||||
"allnet_billing",
|
||||
"/request",
|
||||
controller="allnet",
|
||||
action="handle_billing_request",
|
||||
conditions=dict(method=["POST"]),
|
||||
)
|
||||
self.map_post.connect(
|
||||
"allnet_billing",
|
||||
"/request/",
|
||||
controller="allnet",
|
||||
action="handle_billing_request",
|
||||
conditions=dict(method=["POST"]),
|
||||
)
|
||||
|
||||
def render_POST(self, request: Request) -> bytes:
|
||||
self.map_post.connect(
|
||||
"mucha_boardauth",
|
||||
"/mucha/boardauth.do",
|
||||
controller="mucha",
|
||||
action="handle_boardauth",
|
||||
conditions=dict(method=["POST"]),
|
||||
)
|
||||
self.map_post.connect(
|
||||
"mucha_updatacheck",
|
||||
"/mucha/updatacheck.do",
|
||||
controller="mucha",
|
||||
action="handle_updatecheck",
|
||||
conditions=dict(method=["POST"]),
|
||||
)
|
||||
self.map_post.connect(
|
||||
"mucha_dlstate",
|
||||
"/mucha/downloadstate.do",
|
||||
controller="mucha",
|
||||
action="handle_dlstate",
|
||||
conditions=dict(method=["POST"]),
|
||||
)
|
||||
|
||||
self.map_get.connect(
|
||||
"title_get",
|
||||
"/{game}/{version}/{endpoint:.*?}",
|
||||
controller="title",
|
||||
action="render_GET",
|
||||
conditions=dict(method=["GET"]),
|
||||
requirements=dict(game=R"S..."),
|
||||
)
|
||||
self.map_post.connect(
|
||||
"title_post",
|
||||
"/{game}/{version}/{endpoint:.*?}",
|
||||
controller="title",
|
||||
action="render_POST",
|
||||
conditions=dict(method=["POST"]),
|
||||
requirements=dict(game=R"S..."),
|
||||
)
|
||||
|
||||
def render_GET(self, request: Request) -> bytes:
|
||||
test = self.map_get.match(request.uri.decode())
|
||||
if test is None:
|
||||
return b""
|
||||
client_ip = Utils.get_ip_addr(request)
|
||||
|
||||
controller = getattr(self, test["controller"], None)
|
||||
if test is None:
|
||||
self.logger.debug(
|
||||
f"Unknown GET endpoint {request.uri.decode()} from {client_ip} to port {request.getHost().port}"
|
||||
)
|
||||
request.setResponseCode(404)
|
||||
return b"Endpoint not found."
|
||||
|
||||
return self.dispatch(test, request)
|
||||
|
||||
def render_POST(self, request: Request) -> bytes:
|
||||
test = self.map_post.match(request.uri.decode())
|
||||
client_ip = Utils.get_ip_addr(request)
|
||||
|
||||
if test is None:
|
||||
self.logger.debug(
|
||||
f"Unknown POST endpoint {request.uri.decode()} from {client_ip} to port {request.getHost().port}"
|
||||
)
|
||||
request.setResponseCode(404)
|
||||
return b"Endpoint not found."
|
||||
|
||||
return self.dispatch(test, request)
|
||||
|
||||
def dispatch(self, matcher: Dict, request: Request) -> bytes:
|
||||
controller = getattr(self, matcher["controller"], None)
|
||||
if controller is None:
|
||||
return b""
|
||||
|
||||
handler = getattr(controller, test["action"], None)
|
||||
self.logger.error(
|
||||
f"Controller {matcher['controller']} not found via endpoint {request.uri.decode()}"
|
||||
)
|
||||
request.setResponseCode(404)
|
||||
return b"Endpoint not found."
|
||||
|
||||
handler = getattr(controller, matcher["action"], None)
|
||||
if handler is None:
|
||||
return b""
|
||||
|
||||
url_vars = test
|
||||
self.logger.error(
|
||||
f"Action {matcher['action']} not found in controller {matcher['controller']} via endpoint {request.uri.decode()}"
|
||||
)
|
||||
request.setResponseCode(404)
|
||||
return b"Endpoint not found."
|
||||
|
||||
url_vars = matcher
|
||||
url_vars.pop("controller")
|
||||
url_vars.pop("action")
|
||||
|
||||
if len(url_vars) > 0:
|
||||
ret = handler(request, url_vars)
|
||||
else:
|
||||
ret = handler(request)
|
||||
|
||||
if type(ret) == str:
|
||||
return ret.encode()
|
||||
elif type(ret) == bytes:
|
||||
return ret
|
||||
else:
|
||||
return b""
|
||||
|
||||
def render_POST(self, request: Request) -> bytes:
|
||||
test = self.map_post.match(request.uri.decode())
|
||||
if test is None:
|
||||
return b""
|
||||
|
||||
controller = getattr(self, test["controller"], None)
|
||||
if controller is None:
|
||||
return b""
|
||||
|
||||
handler = getattr(controller, test["action"], None)
|
||||
if handler is None:
|
||||
return b""
|
||||
|
||||
url_vars = test
|
||||
url_vars.pop("controller")
|
||||
url_vars.pop("action")
|
||||
ret = handler(request, url_vars)
|
||||
|
||||
|
||||
if type(ret) == str:
|
||||
return ret.encode()
|
||||
elif type(ret) == bytes:
|
||||
|
||||
elif type(ret) == bytes or type(ret) == tuple: # allow for bytes or tuple (data, response code) responses
|
||||
return ret
|
||||
else:
|
||||
|
||||
elif ret is None:
|
||||
self.logger.warn(f"None returned by controller for {request.uri.decode()} endpoint")
|
||||
return b""
|
||||
|
||||
else:
|
||||
self.logger.warn(f"Unknown data type returned by controller for {request.uri.decode()} endpoint")
|
||||
return b""
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="ARTEMiS main entry point")
|
||||
parser.add_argument("--config", "-c", type=str, default="config", help="Configuration folder")
|
||||
parser.add_argument(
|
||||
"--config", "-c", type=str, default="config", help="Configuration folder"
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
if not path.exists(f"{args.config}/core.yaml"):
|
||||
print(f"The config folder you specified ({args.config}) does not exist or does not contain core.yaml.\nDid you copy the example folder?")
|
||||
print(
|
||||
f"The config folder you specified ({args.config}) does not exist or does not contain core.yaml.\nDid you copy the example folder?"
|
||||
)
|
||||
exit(1)
|
||||
|
||||
cfg: CoreConfig = CoreConfig()
|
||||
cfg.update(yaml.safe_load(open(f"{args.config}/core.yaml")))
|
||||
if path.exists(f"{args.config}/core.yaml"):
|
||||
cfg.update(yaml.safe_load(open(f"{args.config}/core.yaml")))
|
||||
|
||||
if not path.exists(cfg.server.log_dir):
|
||||
mkdir(cfg.server.log_dir)
|
||||
|
||||
|
||||
if not access(cfg.server.log_dir, W_OK):
|
||||
print(f"Log directory {cfg.server.log_dir} NOT writable, please check permissions")
|
||||
print(
|
||||
f"Log directory {cfg.server.log_dir} NOT writable, please check permissions"
|
||||
)
|
||||
exit(1)
|
||||
|
||||
logger = logging.getLogger("core")
|
||||
log_fmt_str = "[%(asctime)s] Core | %(levelname)s | %(message)s"
|
||||
log_fmt = logging.Formatter(log_fmt_str)
|
||||
|
||||
fileHandler = TimedRotatingFileHandler(
|
||||
"{0}/{1}.log".format(cfg.server.log_dir, "core"), when="d", backupCount=10
|
||||
)
|
||||
fileHandler.setFormatter(log_fmt)
|
||||
|
||||
consoleHandler = logging.StreamHandler()
|
||||
consoleHandler.setFormatter(log_fmt)
|
||||
|
||||
logger.addHandler(fileHandler)
|
||||
logger.addHandler(consoleHandler)
|
||||
|
||||
log_lv = logging.DEBUG if cfg.server.is_develop else logging.INFO
|
||||
logger.setLevel(log_lv)
|
||||
coloredlogs.install(level=log_lv, logger=logger, fmt=log_fmt_str)
|
||||
|
||||
if not cfg.aimedb.key:
|
||||
print("!!AIMEDB KEY BLANK, SET KEY IN CORE.YAML!!")
|
||||
logger.error("!!AIMEDB KEY BLANK, SET KEY IN CORE.YAML!!")
|
||||
exit(1)
|
||||
|
||||
print(f"ARTEMiS starting in {'develop' if cfg.server.is_develop else 'production'} mode")
|
||||
|
||||
allnet_server_str = f"tcp:{cfg.allnet.port}:interface={cfg.server.listen_address}"
|
||||
logger.info(
|
||||
f"ARTEMiS starting in {'develop' if cfg.server.is_develop else 'production'} mode"
|
||||
)
|
||||
|
||||
allnet_server_str = f"tcp:{cfg.allnet.port}:interface={cfg.server.listen_address}"
|
||||
title_server_str = f"tcp:{cfg.title.port}:interface={cfg.server.listen_address}"
|
||||
adb_server_str = f"tcp:{cfg.aimedb.port}:interface={cfg.server.listen_address}"
|
||||
frontend_server_str = (
|
||||
f"tcp:{cfg.frontend.port}:interface={cfg.server.listen_address}"
|
||||
)
|
||||
|
||||
billing_server_str = f"tcp:{cfg.billing.port}:interface={cfg.server.listen_address}"
|
||||
if cfg.server.is_develop:
|
||||
billing_server_str = f"ssl:{cfg.billing.port}:interface={cfg.server.listen_address}"\
|
||||
billing_server_str = (
|
||||
f"ssl:{cfg.billing.port}:interface={cfg.server.listen_address}"
|
||||
f":privateKey={cfg.billing.ssl_key}:certKey={cfg.billing.ssl_cert}"
|
||||
|
||||
)
|
||||
|
||||
dispatcher = HttpDispatcher(cfg, args.config)
|
||||
|
||||
endpoints.serverFromString(reactor, allnet_server_str).listen(server.Site(dispatcher))
|
||||
endpoints.serverFromString(reactor, allnet_server_str).listen(
|
||||
server.Site(dispatcher)
|
||||
)
|
||||
endpoints.serverFromString(reactor, adb_server_str).listen(AimedbFactory(cfg))
|
||||
|
||||
if cfg.frontend.enable:
|
||||
endpoints.serverFromString(reactor, frontend_server_str).listen(
|
||||
server.Site(FrontendServlet(cfg, args.config))
|
||||
)
|
||||
|
||||
if cfg.billing.port > 0:
|
||||
endpoints.serverFromString(reactor, billing_server_str).listen(server.Site(dispatcher))
|
||||
|
||||
if cfg.title.port > 0:
|
||||
endpoints.serverFromString(reactor, title_server_str).listen(server.Site(dispatcher))
|
||||
|
||||
reactor.run() # type: ignore
|
||||
endpoints.serverFromString(reactor, billing_server_str).listen(
|
||||
server.Site(dispatcher)
|
||||
)
|
||||
|
||||
if cfg.title.port > 0:
|
||||
endpoints.serverFromString(reactor, title_server_str).listen(
|
||||
server.Site(dispatcher)
|
||||
)
|
||||
|
||||
if cfg.server.threading:
|
||||
Thread(target=reactor.run, args=(False,)).start()
|
||||
else:
|
||||
reactor.run()
|
||||
|
|
88
read.py
88
read.py
|
@ -3,65 +3,73 @@ import argparse
|
|||
import re
|
||||
import os
|
||||
import yaml
|
||||
import importlib
|
||||
import logging, coloredlogs
|
||||
from os import path
|
||||
import logging
|
||||
import coloredlogs
|
||||
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
from typing import List, Optional
|
||||
|
||||
from core import CoreConfig
|
||||
from core.utils import Utils
|
||||
from core import CoreConfig, Utils
|
||||
|
||||
class BaseReader():
|
||||
def __init__(self, config: CoreConfig, version: int, bin_dir: Optional[str], opt_dir: Optional[str], extra: Optional[str]) -> None:
|
||||
|
||||
class BaseReader:
|
||||
def __init__(
|
||||
self,
|
||||
config: CoreConfig,
|
||||
version: int,
|
||||
bin_dir: Optional[str],
|
||||
opt_dir: Optional[str],
|
||||
extra: Optional[str],
|
||||
) -> None:
|
||||
self.logger = logging.getLogger("reader")
|
||||
self.config = config
|
||||
self.bin_dir = bin_dir
|
||||
self.opt_dir = opt_dir
|
||||
self.version = version
|
||||
self.extra = extra
|
||||
|
||||
|
||||
|
||||
def get_data_directories(self, directory: str) -> List[str]:
|
||||
ret: List[str] = []
|
||||
|
||||
for root, dirs, files in os.walk(directory):
|
||||
for dir in dirs:
|
||||
if re.fullmatch("[A-Z0-9]{4,4}", dir) is not None:
|
||||
ret.append(f"{root}/{dir}")
|
||||
|
||||
for dir in dirs:
|
||||
if re.fullmatch("[A-Z0-9]{4,4}", dir) is not None:
|
||||
ret.append(f"{root}/{dir}")
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description='Import Game Information')
|
||||
parser = argparse.ArgumentParser(description="Import Game Information")
|
||||
parser.add_argument(
|
||||
'--series',
|
||||
action='store',
|
||||
"--series",
|
||||
action="store",
|
||||
type=str,
|
||||
required=True,
|
||||
help='The game series we are importing.',
|
||||
help="The game series we are importing.",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--version',
|
||||
dest='version',
|
||||
action='store',
|
||||
"--version",
|
||||
dest="version",
|
||||
action="store",
|
||||
type=int,
|
||||
required=True,
|
||||
help='The game version we are importing.',
|
||||
help="The game version we are importing.",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--binfolder',
|
||||
dest='bin',
|
||||
action='store',
|
||||
"--binfolder",
|
||||
dest="bin",
|
||||
action="store",
|
||||
type=str,
|
||||
help='Folder containing A000 base data',
|
||||
help="Folder containing A000 base data",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--optfolder',
|
||||
dest='opt',
|
||||
action='store',
|
||||
"--optfolder",
|
||||
dest="opt",
|
||||
action="store",
|
||||
type=str,
|
||||
help='Folder containing Option data folders',
|
||||
help="Folder containing Option data folders",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--config",
|
||||
|
@ -79,29 +87,33 @@ if __name__ == "__main__":
|
|||
args = parser.parse_args()
|
||||
|
||||
config = CoreConfig()
|
||||
config.update(yaml.safe_load(open(f"{args.config}/core.yaml")))
|
||||
if path.exists(f"{args.config}/core.yaml"):
|
||||
config.update(yaml.safe_load(open(f"{args.config}/core.yaml")))
|
||||
|
||||
log_fmt_str = "[%(asctime)s] Reader | %(levelname)s | %(message)s"
|
||||
log_fmt = logging.Formatter(log_fmt_str)
|
||||
logger = logging.getLogger("reader")
|
||||
|
||||
fileHandler = TimedRotatingFileHandler("{0}/{1}.log".format(config.server.logs, "reader"), when="d", backupCount=10)
|
||||
fileHandler = TimedRotatingFileHandler(
|
||||
"{0}/{1}.log".format(config.server.log_dir, "reader"), when="d", backupCount=10
|
||||
)
|
||||
fileHandler.setFormatter(log_fmt)
|
||||
|
||||
|
||||
consoleHandler = logging.StreamHandler()
|
||||
consoleHandler.setFormatter(log_fmt)
|
||||
|
||||
logger.addHandler(fileHandler)
|
||||
logger.addHandler(consoleHandler)
|
||||
|
||||
logger.setLevel(logging.INFO)
|
||||
coloredlogs.install(level=logging.INFO, logger=logger, fmt=log_fmt_str)
|
||||
|
||||
log_lv = logging.DEBUG if config.server.is_develop else logging.INFO
|
||||
logger.setLevel(log_lv)
|
||||
coloredlogs.install(level=log_lv, logger=logger, fmt=log_fmt_str)
|
||||
|
||||
if args.series is None or args.version is None:
|
||||
logger.error("Game or version not specified")
|
||||
parser.print_help()
|
||||
exit(1)
|
||||
|
||||
exit(1)
|
||||
|
||||
if args.bin is None and args.opt is None:
|
||||
logger.error("Must specify either bin or opt directory")
|
||||
parser.print_help()
|
||||
|
@ -111,7 +123,7 @@ if __name__ == "__main__":
|
|||
bin_arg = args.bin[:-1]
|
||||
else:
|
||||
bin_arg = args.bin
|
||||
|
||||
|
||||
if args.opt is not None and (args.opt.endswith("\\") or args.opt.endswith("/")):
|
||||
opt_arg = args.opt[:-1]
|
||||
else:
|
||||
|
@ -125,5 +137,5 @@ if __name__ == "__main__":
|
|||
if args.series in mod.game_codes:
|
||||
handler = mod.reader(config, args.version, bin_arg, opt_arg, args.extra)
|
||||
handler.read()
|
||||
|
||||
|
||||
logger.info("Done")
|
||||
|
|
45
readme.md
45
readme.md
|
@ -2,26 +2,36 @@
|
|||
A network service emulator for games running SEGA'S ALL.NET service, and similar.
|
||||
|
||||
# Supported games
|
||||
Games listed below have been tested and confirmed working. Only game versions older then the current one in active use in arcades (n-0) or current game versions older then a year (y-1) are supported.
|
||||
+ Chunithm
|
||||
+ All versions up to New!! Plus
|
||||
Games listed below have been tested and confirmed working. Only game versions older then the version currently active in arcades, or games versions that have not recieved a major update in over one year, are supported.
|
||||
|
||||
+ Crossbeats Rev
|
||||
+ CHUNITHM
|
||||
+ All versions up to SUN
|
||||
|
||||
+ crossbeats REV.
|
||||
+ All versions + omnimix
|
||||
|
||||
+ Maimai
|
||||
+ All versions up to Universe Plus
|
||||
+ maimai DX
|
||||
+ All versions up to FESTiVAL
|
||||
|
||||
+ Hatsune Miku Arcade
|
||||
+ Hatsune Miku: Project DIVA Arcade
|
||||
+ All versions
|
||||
|
||||
+ Ongeki
|
||||
+ All versions up to Bright
|
||||
+ Card Maker
|
||||
+ 1.30
|
||||
+ 1.35
|
||||
|
||||
+ Wacca
|
||||
+ O.N.G.E.K.I.
|
||||
+ All versions up to Bright Memory
|
||||
|
||||
+ WACCA
|
||||
+ Lily R
|
||||
+ Reverse
|
||||
|
||||
+ POKKÉN TOURNAMENT
|
||||
+ Final Online
|
||||
|
||||
+ Sword Art Online Arcade (partial support)
|
||||
+ Final
|
||||
|
||||
## Requirements
|
||||
- python 3 (tested working with 3.9 and 3.10, other versions YMMV)
|
||||
|
@ -29,10 +39,11 @@ Games listed below have been tested and confirmed working. Only game versions ol
|
|||
- memcached (for non-windows platforms)
|
||||
- mysql/mariadb server
|
||||
|
||||
## Quick start guide
|
||||
1) Clone this repository
|
||||
2) Install requirements (see the platform-specific guides for instructions)
|
||||
3) Install python libraries via `pip`
|
||||
4) Copy the example configuration files into another folder (by default the server looks for the `config` directory)
|
||||
5) Edit the newly copied configuration files to your liking, using [this](docs/config.md) doc as a guide.
|
||||
6) Run the server by invoking `index.py` ex. `python3 index.py`
|
||||
## Setup guides
|
||||
Follow the platform-specific guides for [windows](docs/INSTALL_WINDOWS.md) and [ubuntu](docs/INSTALL_UBUNTU.md) to setup and run the server.
|
||||
|
||||
## Game specific information
|
||||
Read [Games specific info](docs/game_specific_info.md) for all supported games, importer settings, configuration option and database upgrades.
|
||||
|
||||
## Production guide
|
||||
See the [production guide](docs/prod.md) for running a production server.
|
||||
|
|
|
@ -10,6 +10,10 @@ service_identity
|
|||
PyCryptodome
|
||||
inflection
|
||||
coloredlogs
|
||||
pylibmc
|
||||
pylibmc; platform_system != "Windows"
|
||||
wacky
|
||||
Routes
|
||||
bcrypt
|
||||
jinja2
|
||||
protobuf
|
||||
autobahn
|
||||
|
|
|
@ -1,14 +0,0 @@
|
|||
mypy
|
||||
wheel
|
||||
twisted
|
||||
pytz
|
||||
pyyaml
|
||||
sqlalchemy==1.4.46
|
||||
mysqlclient
|
||||
pyopenssl
|
||||
service_identity
|
||||
PyCryptodome
|
||||
inflection
|
||||
coloredlogs
|
||||
wacky
|
||||
Routes
|
|
@ -6,13 +6,5 @@ from titles.chuni.read import ChuniReader
|
|||
index = ChuniServlet
|
||||
database = ChuniData
|
||||
reader = ChuniReader
|
||||
|
||||
use_default_title = True
|
||||
include_protocol = True
|
||||
title_secure = False
|
||||
game_codes = [ChuniConstants.GAME_CODE, ChuniConstants.GAME_CODE_NEW]
|
||||
trailing_slash = True
|
||||
use_default_host = False
|
||||
host = ""
|
||||
|
||||
current_schema_version = 1
|
||||
current_schema_version = 4
|
||||
|
|
|
@ -5,12 +5,13 @@ from titles.chuni.base import ChuniBase
|
|||
from titles.chuni.const import ChuniConstants
|
||||
from titles.chuni.config import ChuniConfig
|
||||
|
||||
|
||||
class ChuniAir(ChuniBase):
|
||||
def __init__(self, core_cfg: CoreConfig, game_cfg: ChuniConfig) -> None:
|
||||
super().__init__(core_cfg, game_cfg)
|
||||
self.version = ChuniConstants.VER_CHUNITHM_AIR
|
||||
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = super().handle_get_game_setting_api_request(data)
|
||||
ret = super().handle_get_game_setting_api_request(data)
|
||||
ret["gameSetting"]["dataVersion"] = "1.10.00"
|
||||
return ret
|
||||
return ret
|
||||
|
|
|
@ -5,12 +5,13 @@ from titles.chuni.base import ChuniBase
|
|||
from titles.chuni.const import ChuniConstants
|
||||
from titles.chuni.config import ChuniConfig
|
||||
|
||||
|
||||
class ChuniAirPlus(ChuniBase):
|
||||
def __init__(self, core_cfg: CoreConfig, game_cfg: ChuniConfig) -> None:
|
||||
super().__init__(core_cfg, game_cfg)
|
||||
self.version = ChuniConstants.VER_CHUNITHM_AIR_PLUS
|
||||
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = super().handle_get_game_setting_api_request(data)
|
||||
ret = super().handle_get_game_setting_api_request(data)
|
||||
ret["gameSetting"]["dataVersion"] = "1.15.00"
|
||||
return ret
|
||||
return ret
|
||||
|
|
|
@ -7,12 +7,13 @@ from titles.chuni.base import ChuniBase
|
|||
from titles.chuni.const import ChuniConstants
|
||||
from titles.chuni.config import ChuniConfig
|
||||
|
||||
|
||||
class ChuniAmazon(ChuniBase):
|
||||
def __init__(self, core_cfg: CoreConfig, game_cfg: ChuniConfig) -> None:
|
||||
super().__init__(core_cfg, game_cfg)
|
||||
self.version = ChuniConstants.VER_CHUNITHM_AMAZON
|
||||
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = super().handle_get_game_setting_api_request(data)
|
||||
ret = super().handle_get_game_setting_api_request(data)
|
||||
ret["gameSetting"]["dataVersion"] = "1.30.00"
|
||||
return ret
|
||||
|
|
|
@ -7,12 +7,13 @@ from titles.chuni.base import ChuniBase
|
|||
from titles.chuni.const import ChuniConstants
|
||||
from titles.chuni.config import ChuniConfig
|
||||
|
||||
|
||||
class ChuniAmazonPlus(ChuniBase):
|
||||
def __init__(self, core_cfg: CoreConfig, game_cfg: ChuniConfig) -> None:
|
||||
super().__init__(core_cfg, game_cfg)
|
||||
self.version = ChuniConstants.VER_CHUNITHM_AMAZON_PLUS
|
||||
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = super().handle_get_game_setting_api_request(data)
|
||||
ret = super().handle_get_game_setting_api_request(data)
|
||||
ret["gameSetting"]["dataVersion"] = "1.35.00"
|
||||
return ret
|
||||
|
|
|
@ -4,14 +4,15 @@ from datetime import datetime, timedelta
|
|||
from time import strftime
|
||||
|
||||
import pytz
|
||||
from typing import Dict, Any
|
||||
from typing import Dict, Any, List
|
||||
|
||||
from core.config import CoreConfig
|
||||
from titles.chuni.const import ChuniConstants
|
||||
from titles.chuni.database import ChuniData
|
||||
from titles.chuni.config import ChuniConfig
|
||||
|
||||
class ChuniBase():
|
||||
|
||||
class ChuniBase:
|
||||
def __init__(self, core_cfg: CoreConfig, game_cfg: ChuniConfig) -> None:
|
||||
self.core_cfg = core_cfg
|
||||
self.game_cfg = game_cfg
|
||||
|
@ -20,68 +21,181 @@ class ChuniBase():
|
|||
self.logger = logging.getLogger("chuni")
|
||||
self.game = ChuniConstants.GAME_CODE
|
||||
self.version = ChuniConstants.VER_CHUNITHM
|
||||
|
||||
|
||||
def handle_game_login_api_request(self, data: Dict) -> Dict:
|
||||
#self.data.base.log_event("chuni", "login", logging.INFO, {"version": self.version, "user": data["userId"]})
|
||||
return { "returnCode": 1 }
|
||||
|
||||
"""
|
||||
Handles the login bonus logic, required for the game because
|
||||
getUserLoginBonus gets called after getUserItem and therefore the
|
||||
items needs to be inserted in the database before they get requested.
|
||||
|
||||
Adds a bonusCount after a user logged in after 24 hours, makes sure
|
||||
loginBonus 30 gets looped, only show the login banner every 24 hours,
|
||||
adds the bonus to items (itemKind 6)
|
||||
"""
|
||||
|
||||
# ignore the login bonus if disabled in config
|
||||
if not self.game_cfg.mods.use_login_bonus:
|
||||
return {"returnCode": 1}
|
||||
|
||||
user_id = data["userId"]
|
||||
login_bonus_presets = self.data.static.get_login_bonus_presets(self.version)
|
||||
|
||||
for preset in login_bonus_presets:
|
||||
# check if a user already has some pogress and if not add the
|
||||
# login bonus entry
|
||||
user_login_bonus = self.data.item.get_login_bonus(
|
||||
user_id, self.version, preset["presetId"]
|
||||
)
|
||||
if user_login_bonus is None:
|
||||
self.data.item.put_login_bonus(
|
||||
user_id, self.version, preset["presetId"]
|
||||
)
|
||||
# yeah i'm lazy
|
||||
user_login_bonus = self.data.item.get_login_bonus(
|
||||
user_id, self.version, preset["presetId"]
|
||||
)
|
||||
|
||||
# skip the login bonus entirely if its already finished
|
||||
if user_login_bonus["isFinished"]:
|
||||
continue
|
||||
|
||||
# make sure the last login is more than 24 hours ago
|
||||
if user_login_bonus["lastUpdateDate"] < datetime.now() - timedelta(
|
||||
hours=24
|
||||
):
|
||||
# increase the login day counter and update the last login date
|
||||
bonus_count = user_login_bonus["bonusCount"] + 1
|
||||
last_update_date = datetime.now()
|
||||
|
||||
all_login_boni = self.data.static.get_login_bonus(
|
||||
self.version, preset["presetId"]
|
||||
)
|
||||
|
||||
# skip the current bonus preset if no boni were found
|
||||
if all_login_boni is None or len(all_login_boni) < 1:
|
||||
self.logger.warn(
|
||||
f"No bonus entries found for bonus preset {preset['presetId']}"
|
||||
)
|
||||
continue
|
||||
|
||||
max_needed_days = all_login_boni[0]["needLoginDayCount"]
|
||||
|
||||
# make sure to not show login boni after all days got redeemed
|
||||
is_finished = False
|
||||
if bonus_count > max_needed_days:
|
||||
# assume that all login preset ids under 3000 needs to be
|
||||
# looped, like 30 and 40 are looped, 40 does not work?
|
||||
if preset["presetId"] < 3000:
|
||||
bonus_count = 1
|
||||
else:
|
||||
is_finished = True
|
||||
|
||||
# grab the item for the corresponding day
|
||||
login_item = self.data.static.get_login_bonus_by_required_days(
|
||||
self.version, preset["presetId"], bonus_count
|
||||
)
|
||||
if login_item is not None:
|
||||
# now add the present to the database so the
|
||||
# handle_get_user_item_api_request can grab them
|
||||
self.data.item.put_item(
|
||||
user_id,
|
||||
{
|
||||
"itemId": login_item["presentId"],
|
||||
"itemKind": 6,
|
||||
"stock": login_item["itemNum"],
|
||||
"isValid": True,
|
||||
},
|
||||
)
|
||||
|
||||
self.data.item.put_login_bonus(
|
||||
user_id,
|
||||
self.version,
|
||||
preset["presetId"],
|
||||
bonusCount=bonus_count,
|
||||
lastUpdateDate=last_update_date,
|
||||
isWatched=False,
|
||||
isFinished=is_finished,
|
||||
)
|
||||
|
||||
return {"returnCode": 1}
|
||||
|
||||
def handle_game_logout_api_request(self, data: Dict) -> Dict:
|
||||
#self.data.base.log_event("chuni", "logout", logging.INFO, {"version": self.version, "user": data["userId"]})
|
||||
return { "returnCode": 1 }
|
||||
# self.data.base.log_event("chuni", "logout", logging.INFO, {"version": self.version, "user": data["userId"]})
|
||||
return {"returnCode": 1}
|
||||
|
||||
def handle_get_game_charge_api_request(self, data: Dict) -> Dict:
|
||||
game_charge_list = self.data.static.get_enabled_charges(self.version)
|
||||
|
||||
if game_charge_list is None or len(game_charge_list) == 0:
|
||||
return {"length": 0, "gameChargeList": []}
|
||||
|
||||
charges = []
|
||||
for x in range(len(game_charge_list)):
|
||||
charges.append({
|
||||
"orderId": x,
|
||||
"chargeId": game_charge_list[x]["chargeId"],
|
||||
"price": 1,
|
||||
"startDate": "2017-12-05 07:00:00.0",
|
||||
"endDate": "2099-12-31 00:00:00.0",
|
||||
"salePrice": 1,
|
||||
"saleStartDate": "2017-12-05 07:00:00.0",
|
||||
"saleEndDate": "2099-12-31 00:00:00.0"
|
||||
})
|
||||
return {
|
||||
"length": len(charges),
|
||||
"gameChargeList": charges
|
||||
}
|
||||
for x in range(len(game_charge_list)):
|
||||
charges.append(
|
||||
{
|
||||
"orderId": x,
|
||||
"chargeId": game_charge_list[x]["chargeId"],
|
||||
"price": 1,
|
||||
"startDate": "2017-12-05 07:00:00.0",
|
||||
"endDate": "2099-12-31 00:00:00.0",
|
||||
"salePrice": 1,
|
||||
"saleStartDate": "2017-12-05 07:00:00.0",
|
||||
"saleEndDate": "2099-12-31 00:00:00.0",
|
||||
}
|
||||
)
|
||||
return {"length": len(charges), "gameChargeList": charges}
|
||||
|
||||
def handle_get_game_event_api_request(self, data: Dict) -> Dict:
|
||||
game_events = self.data.static.get_enabled_events(self.version)
|
||||
|
||||
if game_events is None or len(game_events) == 0:
|
||||
self.logger.warn("No enabled events, did you run the reader?")
|
||||
return {
|
||||
"type": data["type"],
|
||||
"length": 0,
|
||||
"gameEventList": [],
|
||||
}
|
||||
|
||||
event_list = []
|
||||
for evt_row in game_events:
|
||||
tmp = {}
|
||||
tmp["id"] = evt_row["eventId"]
|
||||
tmp["type"] = evt_row["type"]
|
||||
tmp["startDate"] = "2017-12-05 07:00:00.0"
|
||||
tmp["endDate"] = "2099-12-31 00:00:00.0"
|
||||
event_list.append(tmp)
|
||||
event_list.append(
|
||||
{
|
||||
"id": evt_row["eventId"],
|
||||
"type": evt_row["type"],
|
||||
# actually use the startDate from the import so it
|
||||
# properly shows all the events when new ones are imported
|
||||
"startDate": datetime.strftime(
|
||||
evt_row["startDate"], "%Y-%m-%d %H:%M:%S"
|
||||
),
|
||||
"endDate": "2099-12-31 00:00:00",
|
||||
}
|
||||
)
|
||||
|
||||
return {
|
||||
"type": data["type"],
|
||||
"length": len(event_list),
|
||||
"gameEventList": event_list
|
||||
"type": data["type"],
|
||||
"length": len(event_list),
|
||||
"gameEventList": event_list,
|
||||
}
|
||||
|
||||
def handle_get_game_idlist_api_request(self, data: Dict) -> Dict:
|
||||
return { "type": data["type"], "length": 0, "gameIdlistList": [] }
|
||||
return {"type": data["type"], "length": 0, "gameIdlistList": []}
|
||||
|
||||
def handle_get_game_message_api_request(self, data: Dict) -> Dict:
|
||||
return { "type": data["type"], "length": "0", "gameMessageList": [] }
|
||||
return {"type": data["type"], "length": "0", "gameMessageList": []}
|
||||
|
||||
def handle_get_game_ranking_api_request(self, data: Dict) -> Dict:
|
||||
return { "type": data["type"], "gameRankingList": [] }
|
||||
return {"type": data["type"], "gameRankingList": []}
|
||||
|
||||
def handle_get_game_sale_api_request(self, data: Dict) -> Dict:
|
||||
return { "type": data["type"], "length": 0, "gameSaleList": [] }
|
||||
return {"type": data["type"], "length": 0, "gameSaleList": []}
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
reboot_start = datetime.strftime(datetime.now() - timedelta(hours=4), self.date_time_format)
|
||||
reboot_end = datetime.strftime(datetime.now() - timedelta(hours=3), self.date_time_format)
|
||||
reboot_start = datetime.strftime(
|
||||
datetime.now() - timedelta(hours=4), self.date_time_format
|
||||
)
|
||||
reboot_end = datetime.strftime(
|
||||
datetime.now() - timedelta(hours=3), self.date_time_format
|
||||
)
|
||||
return {
|
||||
"gameSetting": {
|
||||
"dataVersion": "1.00.00",
|
||||
|
@ -94,15 +208,17 @@ class ChuniBase():
|
|||
"maxCountItem": 300,
|
||||
"maxCountMusic": 300,
|
||||
},
|
||||
"isDumpUpload": "false",
|
||||
"isAou": "false",
|
||||
"isDumpUpload": "false",
|
||||
"isAou": "false",
|
||||
}
|
||||
|
||||
def handle_get_user_activity_api_request(self, data: Dict) -> Dict:
|
||||
user_activity_list = self.data.profile.get_profile_activity(data["userId"], data["kind"])
|
||||
|
||||
user_activity_list = self.data.profile.get_profile_activity(
|
||||
data["userId"], data["kind"]
|
||||
)
|
||||
|
||||
activity_list = []
|
||||
|
||||
|
||||
for activity in user_activity_list:
|
||||
tmp = activity._asdict()
|
||||
tmp.pop("user")
|
||||
|
@ -111,35 +227,45 @@ class ChuniBase():
|
|||
activity_list.append(tmp)
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": len(activity_list),
|
||||
"kind": data["kind"],
|
||||
"userActivityList": activity_list
|
||||
"userId": data["userId"],
|
||||
"length": len(activity_list),
|
||||
"kind": int(data["kind"]),
|
||||
"userActivityList": activity_list,
|
||||
}
|
||||
|
||||
def handle_get_user_character_api_request(self, data: Dict) -> Dict:
|
||||
characters = self.data.item.get_characters(data["userId"])
|
||||
if characters is None: return {}
|
||||
next_idx = -1
|
||||
if characters is None:
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": 0,
|
||||
"nextIndex": -1,
|
||||
"userCharacterList": [],
|
||||
}
|
||||
|
||||
characterList = []
|
||||
for x in range(int(data["nextIndex"]), len(characters)):
|
||||
character_list = []
|
||||
next_idx = int(data["nextIndex"])
|
||||
max_ct = int(data["maxCount"])
|
||||
|
||||
for x in range(next_idx, len(characters)):
|
||||
tmp = characters[x]._asdict()
|
||||
tmp.pop("user")
|
||||
tmp.pop("id")
|
||||
characterList.append(tmp)
|
||||
character_list.append(tmp)
|
||||
|
||||
if len(characterList) >= int(data["maxCount"]):
|
||||
if len(character_list) >= max_ct:
|
||||
break
|
||||
|
||||
if len(characterList) >= int(data["maxCount"]) and len(characters) > int(data["maxCount"]) + int(data["nextIndex"]):
|
||||
next_idx = int(data["maxCount"]) + int(data["nextIndex"]) + 1
|
||||
|
||||
|
||||
if len(characters) >= next_idx + max_ct:
|
||||
next_idx += max_ct
|
||||
else:
|
||||
next_idx = -1
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": len(characterList),
|
||||
"nextIndex": next_idx,
|
||||
"userCharacterList": characterList
|
||||
"userId": data["userId"],
|
||||
"length": len(character_list),
|
||||
"nextIndex": next_idx,
|
||||
"userCharacterList": character_list,
|
||||
}
|
||||
|
||||
def handle_get_user_charge_api_request(self, data: Dict) -> Dict:
|
||||
|
@ -153,21 +279,21 @@ class ChuniBase():
|
|||
charge_list.append(tmp)
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"userId": data["userId"],
|
||||
"length": len(charge_list),
|
||||
"userChargeList": charge_list
|
||||
"userChargeList": charge_list,
|
||||
}
|
||||
|
||||
def handle_get_user_course_api_request(self, data: Dict) -> Dict:
|
||||
user_course_list = self.data.score.get_courses(data["userId"])
|
||||
if user_course_list is None:
|
||||
if user_course_list is None:
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"userId": data["userId"],
|
||||
"length": 0,
|
||||
"nextIndex": -1,
|
||||
"userCourseList": []
|
||||
"nextIndex": -1,
|
||||
"userCourseList": [],
|
||||
}
|
||||
|
||||
|
||||
course_list = []
|
||||
next_idx = int(data["nextIndex"])
|
||||
max_ct = int(data["maxCount"])
|
||||
|
@ -180,51 +306,48 @@ class ChuniBase():
|
|||
|
||||
if len(user_course_list) >= max_ct:
|
||||
break
|
||||
|
||||
if len(user_course_list) >= max_ct:
|
||||
next_idx = next_idx + max_ct
|
||||
|
||||
if len(user_course_list) >= next_idx + max_ct:
|
||||
next_idx += max_ct
|
||||
else:
|
||||
next_idx = -1
|
||||
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"userId": data["userId"],
|
||||
"length": len(course_list),
|
||||
"nextIndex": next_idx,
|
||||
"userCourseList": course_list
|
||||
"nextIndex": next_idx,
|
||||
"userCourseList": course_list,
|
||||
}
|
||||
|
||||
def handle_get_user_data_api_request(self, data: Dict) -> Dict:
|
||||
p = self.data.profile.get_profile_data(data["userId"], self.version)
|
||||
if p is None: return {}
|
||||
if p is None:
|
||||
return {}
|
||||
|
||||
profile = p._asdict()
|
||||
profile.pop("id")
|
||||
profile.pop("user")
|
||||
profile.pop("version")
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"userData": profile
|
||||
}
|
||||
return {"userId": data["userId"], "userData": profile}
|
||||
|
||||
def handle_get_user_data_ex_api_request(self, data: Dict) -> Dict:
|
||||
p = self.data.profile.get_profile_data_ex(data["userId"], self.version)
|
||||
if p is None: return {}
|
||||
if p is None:
|
||||
return {}
|
||||
|
||||
profile = p._asdict()
|
||||
profile.pop("id")
|
||||
profile.pop("user")
|
||||
profile.pop("version")
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"userDataEx": profile
|
||||
}
|
||||
return {"userId": data["userId"], "userDataEx": profile}
|
||||
|
||||
def handle_get_user_duel_api_request(self, data: Dict) -> Dict:
|
||||
user_duel_list = self.data.item.get_duels(data["userId"])
|
||||
if user_duel_list is None: return {}
|
||||
|
||||
if user_duel_list is None:
|
||||
return {}
|
||||
|
||||
duel_list = []
|
||||
for duel in user_duel_list:
|
||||
tmp = duel._asdict()
|
||||
|
@ -233,18 +356,29 @@ class ChuniBase():
|
|||
duel_list.append(tmp)
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"userId": data["userId"],
|
||||
"length": len(duel_list),
|
||||
"userDuelList": duel_list
|
||||
"userDuelList": duel_list,
|
||||
}
|
||||
|
||||
def handle_get_user_favorite_item_api_request(self, data: Dict) -> Dict:
|
||||
user_fav_item_list = []
|
||||
|
||||
# still needs to be implemented on WebUI
|
||||
# 1: Music, 3: Character
|
||||
fav_list = self.data.item.get_all_favorites(
|
||||
data["userId"], self.version, fav_kind=int(data["kind"])
|
||||
)
|
||||
if fav_list is not None:
|
||||
for fav in fav_list:
|
||||
user_fav_item_list.append({"id": fav["favId"]})
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": 0,
|
||||
"kind": data["kind"],
|
||||
"nextIndex": -1,
|
||||
"userFavoriteItemList": []
|
||||
"userId": data["userId"],
|
||||
"length": len(user_fav_item_list),
|
||||
"kind": data["kind"],
|
||||
"nextIndex": -1,
|
||||
"userFavoriteItemList": user_fav_item_list,
|
||||
}
|
||||
|
||||
def handle_get_user_favorite_music_api_request(self, data: Dict) -> Dict:
|
||||
|
@ -252,22 +386,23 @@ class ChuniBase():
|
|||
This is handled via the webui, which we don't have right now
|
||||
"""
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": 0,
|
||||
"userFavoriteMusicList": []
|
||||
}
|
||||
return {"userId": data["userId"], "length": 0, "userFavoriteMusicList": []}
|
||||
|
||||
def handle_get_user_item_api_request(self, data: Dict) -> Dict:
|
||||
kind = int(int(data["nextIndex"]) / 10000000000)
|
||||
next_idx = int(int(data["nextIndex"]) % 10000000000)
|
||||
user_item_list = self.data.item.get_items(data["userId"], kind)
|
||||
|
||||
if user_item_list is None or len(user_item_list) == 0:
|
||||
return {"userId": data["userId"], "nextIndex": -1, "itemKind": kind, "userItemList": []}
|
||||
if user_item_list is None or len(user_item_list) == 0:
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"nextIndex": -1,
|
||||
"itemKind": kind,
|
||||
"userItemList": [],
|
||||
}
|
||||
|
||||
items: list[Dict[str, Any]] = []
|
||||
for i in range(next_idx, len(user_item_list)):
|
||||
items: List[Dict[str, Any]] = []
|
||||
for i in range(next_idx, len(user_item_list)):
|
||||
tmp = user_item_list[i]._asdict()
|
||||
tmp.pop("user")
|
||||
tmp.pop("id")
|
||||
|
@ -277,38 +412,50 @@ class ChuniBase():
|
|||
|
||||
xout = kind * 10000000000 + next_idx + len(items)
|
||||
|
||||
if len(items) < int(data["maxCount"]): nextIndex = 0
|
||||
else: nextIndex = xout
|
||||
if len(items) < int(data["maxCount"]):
|
||||
next_idx = 0
|
||||
else:
|
||||
next_idx = xout
|
||||
|
||||
return {"userId": data["userId"], "nextIndex": nextIndex, "itemKind": kind, "length": len(items), "userItemList": items}
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"nextIndex": next_idx,
|
||||
"itemKind": kind,
|
||||
"length": len(items),
|
||||
"userItemList": items,
|
||||
}
|
||||
|
||||
def handle_get_user_login_bonus_api_request(self, data: Dict) -> Dict:
|
||||
"""
|
||||
Unsure how to get this to trigger...
|
||||
"""
|
||||
user_id = data["userId"]
|
||||
user_login_bonus = self.data.item.get_all_login_bonus(user_id, self.version)
|
||||
# ignore the loginBonus request if its disabled in config
|
||||
if user_login_bonus is None or not self.game_cfg.mods.use_login_bonus:
|
||||
return {"userId": user_id, "length": 0, "userLoginBonusList": []}
|
||||
|
||||
user_login_list = []
|
||||
for bonus in user_login_bonus:
|
||||
user_login_list.append(
|
||||
{
|
||||
"presetId": bonus["presetId"],
|
||||
"bonusCount": bonus["bonusCount"],
|
||||
"lastUpdateDate": datetime.strftime(
|
||||
bonus["lastUpdateDate"], "%Y-%m-%d %H:%M:%S"
|
||||
),
|
||||
"isWatched": bonus["isWatched"],
|
||||
}
|
||||
)
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": 2,
|
||||
"userLoginBonusList": [
|
||||
{
|
||||
"presetId": '10',
|
||||
"bonusCount": '0',
|
||||
"lastUpdateDate": "1970-01-01 09:00:00",
|
||||
"isWatched": "true"
|
||||
},
|
||||
{
|
||||
"presetId": '20',
|
||||
"bonusCount": '0',
|
||||
"lastUpdateDate": "1970-01-01 09:00:00",
|
||||
"isWatched": "true"
|
||||
},
|
||||
]
|
||||
"userId": user_id,
|
||||
"length": len(user_login_list),
|
||||
"userLoginBonusList": user_login_list,
|
||||
}
|
||||
|
||||
def handle_get_user_map_api_request(self, data: Dict) -> Dict:
|
||||
user_map_list = self.data.item.get_maps(data["userId"])
|
||||
if user_map_list is None: return {}
|
||||
|
||||
if user_map_list is None:
|
||||
return {}
|
||||
|
||||
map_list = []
|
||||
for map in user_map_list:
|
||||
tmp = map._asdict()
|
||||
|
@ -317,20 +464,21 @@ class ChuniBase():
|
|||
map_list.append(tmp)
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"userId": data["userId"],
|
||||
"length": len(map_list),
|
||||
"userMapList": map_list
|
||||
"userMapList": map_list,
|
||||
}
|
||||
|
||||
def handle_get_user_music_api_request(self, data: Dict) -> Dict:
|
||||
music_detail = self.data.score.get_scores(data["userId"])
|
||||
if music_detail is None:
|
||||
if music_detail is None:
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": 0,
|
||||
"userId": data["userId"],
|
||||
"length": 0,
|
||||
"nextIndex": -1,
|
||||
"userMusicList": [] #240
|
||||
"userMusicList": [], # 240
|
||||
}
|
||||
|
||||
song_list = []
|
||||
next_idx = int(data["nextIndex"])
|
||||
max_ct = int(data["maxCount"])
|
||||
|
@ -340,66 +488,60 @@ class ChuniBase():
|
|||
tmp = music_detail[x]._asdict()
|
||||
tmp.pop("user")
|
||||
tmp.pop("id")
|
||||
|
||||
|
||||
for song in song_list:
|
||||
if song["userMusicDetailList"][0]["musicId"] == tmp["musicId"]:
|
||||
found = True
|
||||
song["userMusicDetailList"].append(tmp)
|
||||
song["length"] = len(song["userMusicDetailList"])
|
||||
|
||||
|
||||
if not found:
|
||||
song_list.append({
|
||||
"length": 1,
|
||||
"userMusicDetailList": [tmp]
|
||||
})
|
||||
|
||||
song_list.append({"length": 1, "userMusicDetailList": [tmp]})
|
||||
|
||||
if len(song_list) >= max_ct:
|
||||
break
|
||||
|
||||
if len(song_list) >= max_ct:
|
||||
|
||||
if len(song_list) >= next_idx + max_ct:
|
||||
next_idx += max_ct
|
||||
else:
|
||||
next_idx = 0
|
||||
next_idx = -1
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": len(song_list),
|
||||
"userId": data["userId"],
|
||||
"length": len(song_list),
|
||||
"nextIndex": next_idx,
|
||||
"userMusicList": song_list #240
|
||||
"userMusicList": song_list, # 240
|
||||
}
|
||||
|
||||
def handle_get_user_option_api_request(self, data: Dict) -> Dict:
|
||||
p = self.data.profile.get_profile_option(data["userId"])
|
||||
|
||||
|
||||
option = p._asdict()
|
||||
option.pop("id")
|
||||
option.pop("user")
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"userGameOption": option
|
||||
}
|
||||
return {"userId": data["userId"], "userGameOption": option}
|
||||
|
||||
def handle_get_user_option_ex_api_request(self, data: Dict) -> Dict:
|
||||
p = self.data.profile.get_profile_option_ex(data["userId"])
|
||||
|
||||
|
||||
option = p._asdict()
|
||||
option.pop("id")
|
||||
option.pop("user")
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"userGameOptionEx": option
|
||||
}
|
||||
return {"userId": data["userId"], "userGameOptionEx": option}
|
||||
|
||||
def read_wtf8(self, src):
|
||||
return bytes([ord(c) for c in src]).decode("utf-8")
|
||||
|
||||
def handle_get_user_preview_api_request(self, data: Dict) -> Dict:
|
||||
profile = self.data.profile.get_profile_preview(data["userId"], self.version)
|
||||
if profile is None: return None
|
||||
profile_character = self.data.item.get_character(data["userId"], profile["characterId"])
|
||||
|
||||
if profile is None:
|
||||
return None
|
||||
profile_character = self.data.item.get_character(
|
||||
data["userId"], profile["characterId"]
|
||||
)
|
||||
|
||||
if profile_character is None:
|
||||
chara = {}
|
||||
else:
|
||||
|
@ -408,8 +550,8 @@ class ChuniBase():
|
|||
chara.pop("user")
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
# Current Login State
|
||||
"userId": data["userId"],
|
||||
# Current Login State
|
||||
"isLogin": False,
|
||||
"lastLoginDate": profile["lastPlayDate"],
|
||||
# User Profile
|
||||
|
@ -421,22 +563,22 @@ class ChuniBase():
|
|||
"lastGameId": profile["lastGameId"],
|
||||
"lastRomVersion": profile["lastRomVersion"],
|
||||
"lastDataVersion": profile["lastDataVersion"],
|
||||
"lastPlayDate": profile["lastPlayDate"],
|
||||
"trophyId": profile["trophyId"],
|
||||
"lastPlayDate": profile["lastPlayDate"],
|
||||
"trophyId": profile["trophyId"],
|
||||
"nameplateId": profile["nameplateId"],
|
||||
# Current Selected Character
|
||||
"userCharacter": chara,
|
||||
# User Game Options
|
||||
"playerLevel": profile["playerLevel"],
|
||||
"rating": profile["rating"],
|
||||
"playerLevel": profile["playerLevel"],
|
||||
"rating": profile["rating"],
|
||||
"headphone": profile["headphone"],
|
||||
"chargeState": "1",
|
||||
"chargeState": 1,
|
||||
"userNameEx": profile["userName"],
|
||||
}
|
||||
|
||||
def handle_get_user_recent_rating_api_request(self, data: Dict) -> Dict:
|
||||
recet_rating_list = self.data.profile.get_profile_recent_rating(data["userId"])
|
||||
if recet_rating_list is None:
|
||||
recent_rating_list = self.data.profile.get_profile_recent_rating(data["userId"])
|
||||
if recent_rating_list is None:
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": 0,
|
||||
|
@ -445,8 +587,8 @@ class ChuniBase():
|
|||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": len(recet_rating_list["recentRating"]),
|
||||
"userRecentRatingList": recet_rating_list["recentRating"],
|
||||
"length": len(recent_rating_list["recentRating"]),
|
||||
"userRecentRatingList": recent_rating_list["recentRating"],
|
||||
}
|
||||
|
||||
def handle_get_user_region_api_request(self, data: Dict) -> Dict:
|
||||
|
@ -458,12 +600,25 @@ class ChuniBase():
|
|||
}
|
||||
|
||||
def handle_get_user_team_api_request(self, data: Dict) -> Dict:
|
||||
# TODO: Team
|
||||
# TODO: use the database "chuni_profile_team" with a GUI
|
||||
team_name = self.game_cfg.team.team_name
|
||||
if team_name == "":
|
||||
return {"userId": data["userId"], "teamId": 0}
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"teamId": 0
|
||||
"teamId": 1,
|
||||
"teamRank": 1,
|
||||
"teamName": team_name,
|
||||
"userTeamPoint": {
|
||||
"userId": data["userId"],
|
||||
"teamId": 1,
|
||||
"orderId": 1,
|
||||
"teamPoint": 1,
|
||||
"aggrDate": data["playDate"],
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def handle_get_team_course_setting_api_request(self, data: Dict) -> Dict:
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
|
@ -486,19 +641,33 @@ class ChuniBase():
|
|||
|
||||
if "userData" in upsert:
|
||||
try:
|
||||
upsert["userData"][0]["userName"] = self.read_wtf8(upsert["userData"][0]["userName"])
|
||||
except: pass
|
||||
upsert["userData"][0]["userName"] = self.read_wtf8(
|
||||
upsert["userData"][0]["userName"]
|
||||
)
|
||||
except:
|
||||
pass
|
||||
|
||||
self.data.profile.put_profile_data(
|
||||
user_id, self.version, upsert["userData"][0]
|
||||
)
|
||||
|
||||
self.data.profile.put_profile_data(user_id, self.version, upsert["userData"][0])
|
||||
if "userDataEx" in upsert:
|
||||
self.data.profile.put_profile_data_ex(user_id, self.version, upsert["userDataEx"][0])
|
||||
self.data.profile.put_profile_data_ex(
|
||||
user_id, self.version, upsert["userDataEx"][0]
|
||||
)
|
||||
|
||||
if "userGameOption" in upsert:
|
||||
self.data.profile.put_profile_option(user_id, upsert["userGameOption"][0])
|
||||
|
||||
if "userGameOptionEx" in upsert:
|
||||
self.data.profile.put_profile_option_ex(user_id, upsert["userGameOptionEx"][0])
|
||||
self.data.profile.put_profile_option_ex(
|
||||
user_id, upsert["userGameOptionEx"][0]
|
||||
)
|
||||
if "userRecentRatingList" in upsert:
|
||||
self.data.profile.put_profile_recent_rating(user_id, upsert["userRecentRatingList"])
|
||||
|
||||
self.data.profile.put_profile_recent_rating(
|
||||
user_id, upsert["userRecentRatingList"]
|
||||
)
|
||||
|
||||
if "userCharacterList" in upsert:
|
||||
for character in upsert["userCharacterList"]:
|
||||
self.data.item.put_character(user_id, character)
|
||||
|
@ -514,7 +683,7 @@ class ChuniBase():
|
|||
if "userDuelList" in upsert:
|
||||
for duel in upsert["userDuelList"]:
|
||||
self.data.item.put_duel(user_id, duel)
|
||||
|
||||
|
||||
if "userItemList" in upsert:
|
||||
for item in upsert["userItemList"]:
|
||||
self.data.item.put_item(user_id, item)
|
||||
|
@ -522,23 +691,27 @@ class ChuniBase():
|
|||
if "userActivityList" in upsert:
|
||||
for activity in upsert["userActivityList"]:
|
||||
self.data.profile.put_profile_activity(user_id, activity)
|
||||
|
||||
|
||||
if "userChargeList" in upsert:
|
||||
for charge in upsert["userChargeList"]:
|
||||
self.data.profile.put_profile_charge(user_id, charge)
|
||||
|
||||
|
||||
if "userMusicDetailList" in upsert:
|
||||
for song in upsert["userMusicDetailList"]:
|
||||
self.data.score.put_score(user_id, song)
|
||||
|
||||
|
||||
if "userPlaylogList" in upsert:
|
||||
for playlog in upsert["userPlaylogList"]:
|
||||
# convert the player names to utf-8
|
||||
playlog["playedUserName1"] = self.read_wtf8(playlog["playedUserName1"])
|
||||
playlog["playedUserName2"] = self.read_wtf8(playlog["playedUserName2"])
|
||||
playlog["playedUserName3"] = self.read_wtf8(playlog["playedUserName3"])
|
||||
self.data.score.put_playlog(user_id, playlog)
|
||||
|
||||
|
||||
if "userTeamPoint" in upsert:
|
||||
# TODO: team stuff
|
||||
pass
|
||||
|
||||
|
||||
if "userMapAreaList" in upsert:
|
||||
for map_area in upsert["userMapAreaList"]:
|
||||
self.data.item.put_map_area(user_id, map_area)
|
||||
|
@ -551,22 +724,37 @@ class ChuniBase():
|
|||
for emoney in upsert["userEmoneyList"]:
|
||||
self.data.profile.put_profile_emoney(user_id, emoney)
|
||||
|
||||
return { "returnCode": "1" }
|
||||
if "userLoginBonusList" in upsert:
|
||||
for login in upsert["userLoginBonusList"]:
|
||||
self.data.item.put_login_bonus(
|
||||
user_id, self.version, login["presetId"], isWatched=True
|
||||
)
|
||||
|
||||
return {"returnCode": "1"}
|
||||
|
||||
def handle_upsert_user_chargelog_api_request(self, data: Dict) -> Dict:
|
||||
return { "returnCode": "1" }
|
||||
# add tickets after they got bought, this makes sure the tickets are
|
||||
# still valid after an unsuccessful logout
|
||||
self.data.profile.put_profile_charge(data["userId"], data["userCharge"])
|
||||
return {"returnCode": "1"}
|
||||
|
||||
def handle_upsert_client_bookkeeping_api_request(self, data: Dict) -> Dict:
|
||||
return { "returnCode": "1" }
|
||||
return {"returnCode": "1"}
|
||||
|
||||
def handle_upsert_client_develop_api_request(self, data: Dict) -> Dict:
|
||||
return { "returnCode": "1" }
|
||||
return {"returnCode": "1"}
|
||||
|
||||
def handle_upsert_client_error_api_request(self, data: Dict) -> Dict:
|
||||
return { "returnCode": "1" }
|
||||
return {"returnCode": "1"}
|
||||
|
||||
def handle_upsert_client_setting_api_request(self, data: Dict) -> Dict:
|
||||
return { "returnCode": "1" }
|
||||
return {"returnCode": "1"}
|
||||
|
||||
def handle_upsert_client_testmode_api_request(self, data: Dict) -> Dict:
|
||||
return { "returnCode": "1" }
|
||||
return {"returnCode": "1"}
|
||||
|
||||
def handle_get_user_net_battle_data_api_request(self, data: Dict) -> Dict:
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"userNetBattleData": {"recentNBSelectMusicList": []},
|
||||
}
|
||||
|
|
|
@ -1,36 +1,88 @@
|
|||
from core.config import CoreConfig
|
||||
from typing import Dict
|
||||
|
||||
class ChuniServerConfig():
|
||||
|
||||
class ChuniServerConfig:
|
||||
def __init__(self, parent_config: "ChuniConfig") -> None:
|
||||
self.__config = parent_config
|
||||
|
||||
|
||||
@property
|
||||
def enable(self) -> bool:
|
||||
return CoreConfig.get_config_field(self.__config, 'chuni', 'server', 'enable', default=True)
|
||||
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "chuni", "server", "enable", default=True
|
||||
)
|
||||
|
||||
@property
|
||||
def loglevel(self) -> int:
|
||||
return CoreConfig.str_to_loglevel(CoreConfig.get_config_field(self.__config, 'chuni', 'server', 'loglevel', default="info"))
|
||||
return CoreConfig.str_to_loglevel(
|
||||
CoreConfig.get_config_field(
|
||||
self.__config, "chuni", "server", "loglevel", default="info"
|
||||
)
|
||||
)
|
||||
|
||||
class ChuniCryptoConfig():
|
||||
|
||||
class ChuniTeamConfig:
|
||||
def __init__(self, parent_config: "ChuniConfig") -> None:
|
||||
self.__config = parent_config
|
||||
|
||||
|
||||
@property
|
||||
def team_name(self) -> str:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "chuni", "team", "name", default=""
|
||||
)
|
||||
|
||||
|
||||
class ChuniModsConfig:
|
||||
def __init__(self, parent_config: "ChuniConfig") -> None:
|
||||
self.__config = parent_config
|
||||
|
||||
@property
|
||||
def use_login_bonus(self) -> bool:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "chuni", "mods", "use_login_bonus", default=True
|
||||
)
|
||||
|
||||
|
||||
class ChuniVersionConfig:
|
||||
def __init__(self, parent_config: "ChuniConfig") -> None:
|
||||
self.__config = parent_config
|
||||
|
||||
def version(self, version: int) -> Dict:
|
||||
"""
|
||||
in the form of:
|
||||
11: {"rom": 2.00.00, "data": 2.00.00}
|
||||
"""
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "chuni", "version", default={}
|
||||
)[version]
|
||||
|
||||
|
||||
class ChuniCryptoConfig:
|
||||
def __init__(self, parent_config: "ChuniConfig") -> None:
|
||||
self.__config = parent_config
|
||||
|
||||
@property
|
||||
def keys(self) -> Dict:
|
||||
"""
|
||||
in the form of:
|
||||
internal_version: [key, iv]
|
||||
internal_version: [key, iv]
|
||||
all values are hex strings
|
||||
"""
|
||||
return CoreConfig.get_config_field(self.__config, 'chuni', 'crypto', 'keys', default={})
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "chuni", "crypto", "keys", default={}
|
||||
)
|
||||
|
||||
@property
|
||||
def encrypted_only(self) -> bool:
|
||||
return CoreConfig.get_config_field(self.__config, 'chuni', 'crypto', 'encrypted_only', default=False)
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "chuni", "crypto", "encrypted_only", default=False
|
||||
)
|
||||
|
||||
|
||||
class ChuniConfig(dict):
|
||||
def __init__(self) -> None:
|
||||
self.server = ChuniServerConfig(self)
|
||||
self.crypto = ChuniCryptoConfig(self)
|
||||
self.team = ChuniTeamConfig(self)
|
||||
self.mods = ChuniModsConfig(self)
|
||||
self.version = ChuniVersionConfig(self)
|
||||
self.crypto = ChuniCryptoConfig(self)
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
class ChuniConstants():
|
||||
class ChuniConstants:
|
||||
GAME_CODE = "SDBT"
|
||||
GAME_CODE_NEW = "SDHD"
|
||||
|
||||
CONFIG_NAME = "chuni.yaml"
|
||||
|
||||
VER_CHUNITHM = 0
|
||||
VER_CHUNITHM_PLUS = 1
|
||||
VER_CHUNITHM_AIR = 2
|
||||
VER_CHUNITHM_AIR_PLUS = 3
|
||||
VER_CHUNITHM_STAR = 4
|
||||
VER_CHUNITHM_STAR = 4
|
||||
VER_CHUNITHM_STAR_PLUS = 5
|
||||
VER_CHUNITHM_AMAZON = 6
|
||||
VER_CHUNITHM_AMAZON_PLUS = 7
|
||||
|
@ -15,9 +17,24 @@ class ChuniConstants():
|
|||
VER_CHUNITHM_PARADISE = 10
|
||||
VER_CHUNITHM_NEW = 11
|
||||
VER_CHUNITHM_NEW_PLUS = 12
|
||||
VER_CHUNITHM_SUN = 13
|
||||
|
||||
VERSION_NAMES = ["Chunithm", "Chunithm+", "Chunithm Air", "Chunithm Air+", "Chunithm Star", "Chunithm Star+", "Chunithm Amazon",
|
||||
"Chunithm Amazon+", "Chunithm Crystal", "Chunithm Crystal+", "Chunithm Paradise", "Chunithm New!!", "Chunithm New!!+"]
|
||||
VERSION_NAMES = [
|
||||
"CHUNITHM",
|
||||
"CHUNITHM PLUS",
|
||||
"CHUNITHM AIR",
|
||||
"CHUNITHM AIR PLUS",
|
||||
"CHUNITHM STAR",
|
||||
"CHUNITHM STAR PLUS",
|
||||
"CHUNITHM AMAZON",
|
||||
"CHUNITHM AMAZON PLUS",
|
||||
"CHUNITHM CRYSTAL",
|
||||
"CHUNITHM CRYSTAL PLUS",
|
||||
"CHUNITHM PARADISE",
|
||||
"CHUNITHM NEW!!",
|
||||
"CHUNITHM NEW PLUS!!",
|
||||
"CHUNITHM SUN"
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def game_ver_to_string(cls, ver: int):
|
||||
|
|
|
@ -7,12 +7,13 @@ from titles.chuni.base import ChuniBase
|
|||
from titles.chuni.const import ChuniConstants
|
||||
from titles.chuni.config import ChuniConfig
|
||||
|
||||
|
||||
class ChuniCrystal(ChuniBase):
|
||||
def __init__(self, core_cfg: CoreConfig, game_cfg: ChuniConfig) -> None:
|
||||
super().__init__(core_cfg, game_cfg)
|
||||
self.version = ChuniConstants.VER_CHUNITHM_CRYSTAL
|
||||
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = super().handle_get_game_setting_api_request(data)
|
||||
ret = super().handle_get_game_setting_api_request(data)
|
||||
ret["gameSetting"]["dataVersion"] = "1.40.00"
|
||||
return ret
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue