1
0
Fork 0

Compare commits

...

126 Commits

Author SHA1 Message Date
Hay1tsme dde397a96c allnet: fix event type for handle_dlorder_report 2023-11-01 20:09:14 -04:00
Hay1tsme f99bf7d1ed allnet: add DLI_STATUS 2023-11-01 20:08:20 -04:00
Hay1tsme bd3e1918aa allnet: further dl report logging 2023-11-01 01:17:23 -04:00
Hay1tsme ffc0a07f6d allnet: fix dl report message typo 2023-11-01 01:02:09 -04:00
Hay1tsme ab640760a9 allnet: add event logging to download report 2023-11-01 00:53:04 -04:00
Midorica a8f06ee266 Merge pull request 'Aime Locks/Bans and Chunithm Improvements' (#47) from EmmyHeart/artemis:develop into develop
Reviewed-on: Hay1tsme/artemis#47
2023-10-27 17:25:35 +00:00
Hay1tsme 4b83d3f316 Merge pull request 'wacca: name unknown fields, rename incorrect fields.' (#56) from Yellowberry/artemis:wacca_notice_fix into develop
Reviewed-on: Hay1tsme/artemis#56
2023-10-27 02:06:38 +00:00
Midorica 1b5c335f4e Merge pull request 'Chunithm: Fix getUserMusic duplicate songs and fix missing song diffs' (#54) from DSRLIN/artemis-bugfix:develop into develop
Reviewed-on: Hay1tsme/artemis#54
2023-10-27 01:51:43 +00:00
Zsolt Zitting 680223dba2 wacca: name unknown fields, rename incorrect fields. 2023-10-26 14:25:04 -07:00
DSRLIN 3259552c29 update bug 2023-10-26 05:38:33 +00:00
DSRLIN b5ce13d1f1 fix chuni songlist duplicate songs and missing diff 2023-10-26 05:37:11 +00:00
Midorica be86448b23 Merge pull request 'Updates and Fixes for Dockerfile and docker-compose, and docker installation guide' (#51) from phantomlan/artemis:develop into develop
Reviewed-on: Hay1tsme/artemis#51
2023-10-22 14:22:41 +00:00
phantomlan 01182087e0 Merge branch 'docker-and-docs' into develop 2023-10-21 20:38:14 +02:00
phantomlan e9637c94de update dx.py 2023-10-21 20:35:46 +02:00
phantomlan 37a6502dc9 Merge branch 'docker-and-docs' into develop 2023-10-21 20:04:16 +02:00
phantomlan d1259509ad update pull request 2023-10-21 19:57:45 +02:00
phantomlan 7f1ff60b9d Update docs/INSTALL_DOCKER.md 2023-10-21 15:54:17 +00:00
phantomlan 12d0a3f927 - prettier docker guide
- update docker-compose to reflect 1 level higher config folder
2023-10-21 17:47:23 +02:00
phantomlan 5420076c8e - update docker-compose database charset and collation to utf8mb4
- update TODO list in docker guide
- update readme.md
2023-10-21 17:33:58 +02:00
phantomlan a3d2955fce - update docker-compose.yml
- update mysqldb to mariadb-alpine
- update mysql healthcheck to also include Password
- update memcached to alpine counterpart
- update phpmyadmin port to not collide with ARTEMiS main app
- add commented out options for DB Persistency

- update Dockerfile
- add read.py to Dockerfile

- add docs/INSTALL_DOCKER.md guide
2023-10-21 17:22:58 +02:00
EmmyHeart 990d60cf27 Forgot to remove rank scale option from the example config, as it does literally nothing now lol 2023-10-20 04:25:08 +00:00
EmmyHeart 21cb37001b Stubbed Team Course functions as they do not currently do anything 2023-10-20 03:31:36 +00:00
EmmyHeart 32903d979e Added code for song rankings using romVersion, and ensuring romVersion is set on playlog upsert 2023-10-20 03:29:05 +00:00
EmmyHeart 3f8c62044c Optimized rival music list, added ranking API, and began work on Team Courses (need help) 2023-10-20 03:26:51 +00:00
EmmyHeart 719ae9cfb1 Added checks for if Aime card is locked or banned, and pass that status to the game 2023-10-20 03:25:12 +00:00
EmmyHeart 8fcd227b33 Added functions for checking if Aime card is locked or banned 2023-10-20 03:24:25 +00:00
Midorica 54c77c047e fixing duplicated is_cab value from machine table 2023-10-18 18:44:03 -04:00
Midorica 93b8b86b55 Adding base handler & config for HTTPS title support 2023-10-17 13:12:08 -04:00
Midorica eaa2652647 Merge pull request 'Chunithm Fixes and Maintenance for all!' (#46) from EmmyHeart/artemis:develop into develop
Reviewed-on: Hay1tsme/artemis#46
2023-10-17 17:00:08 +00:00
Midorica 46f83b9925 Merge pull request 'maimai DX FESTiVAL PLUS support' (#44) from Dniel97/artemis:mai2_festival_plus into develop
Reviewed-on: Hay1tsme/artemis#44
2023-10-16 20:00:08 +00:00
Dniel97 862907b82a
mai2: Fixed cards not showing up 2023-10-16 21:36:26 +02:00
EmmyHeart 8c114532e8 Updated config documentation with IP enforcement and reboot times 2023-10-16 13:30:10 +00:00
EmmyHeart 300cd10abf Updated example config with reboot time
You can leave this option out to disable reboot times, and the games will use the prior mechanism of maintenance always being 4~ hours before current time
2023-10-16 13:23:56 +00:00
EmmyHeart 4f0a5f60ab Added config for reboot time support 2023-10-16 13:22:18 +00:00
EmmyHeart 9cff321857 Added reboot time support 2023-10-16 13:21:07 +00:00
EmmyHeart 8d289ca066 Added reboot time support 2023-10-16 13:20:37 +00:00
EmmyHeart e18b87ee5c Added reboot time support 2023-10-16 13:20:00 +00:00
EmmyHeart 540d7fc2c2 Added reboot time support 2023-10-16 13:18:23 +00:00
EmmyHeart 3ef40fe85e Removed Team Rank Scaling and fixed RIval Music Data. Also cleaned up extraneous functions 2023-10-16 13:15:39 +00:00
EmmyHeart 35a7525f74 Removed extraneous function 2023-10-16 13:14:11 +00:00
EmmyHeart d49997f832 Team Rank Scaling is dead, long live Team Rank Scaling 2023-10-16 13:09:53 +00:00
EmmyHeart c78a62de14 Fixed Rival Music retrieval 2023-10-16 13:09:11 +00:00
EmmyHeart 480551f942 Fixed logic error leading to strict IP checking always being enabled 2023-10-16 13:07:05 +00:00
Dniel97 d6e4db48f4
Added maimai DX FESTiVAL PLUS support
- Added Card Maker support for FESTiVAL PLUS
- Bumped SDEZ database to version 8
- Updated docs for FESTiVAL PLUS
2023-10-15 19:04:15 +02:00
Dniel97 346b74e51b
Merge branch 'develop' into fork_develop 2023-10-15 18:06:07 +02:00
Midorica aa9d48ccc9 fixed the diva profile again 2023-10-10 18:37:18 -04:00
Midorica dc02d60690 Merge branch 'develop' of https://gitea.tendokyu.moe/Hay1tsme/artemis into develop 2023-10-10 18:25:01 -04:00
Midorica 6d592dcbc7 fixing skin issue with diva profile 2023-10-10 18:24:54 -04:00
Midorica ab10f27cb4 Merge pull request 'Fixed typo with game_cfg/game_config' (#42) from EmmyHeart/artemis:develop into develop
Reviewed-on: Hay1tsme/artemis#42
2023-10-09 12:38:46 +00:00
EmmyHeart 0a12e93593 news_msg config option doesn't actually return anything. Fixed! 2023-10-09 06:47:55 +00:00
EmmyHeart bad106ceba Fixed typo with game_cfg/game_config
This resulted in an exception on Plost and earlier, leading to games not actually working.
2023-10-09 06:35:14 +00:00
Midorica 7fc5544c15 adding a SAO note to documentation 2023-10-08 15:52:41 -04:00
Midorica bfaadff9f6 pushing news support customization for chunithm 2023-10-07 12:31:08 -04:00
Midorica 1996f3f356 fix for chunithm song loading after 600 scores 2023-10-07 12:11:24 -04:00
Midorica 56ddd3b1cc fix yet again to the md file 2023-10-07 12:07:14 -04:00
Midorica 41fcadfd55 quick fix to markdown file 2023-10-07 12:04:33 -04:00
Midorica 06d95c8c5f Adding details under the game specific info doc 2023-10-07 11:59:01 -04:00
Hay1tsme b5ccd67940 db: add memcache toggle 2023-10-05 22:16:50 -04:00
Midorica 1f65cfd2eb Merge pull request 'Added Team and Rival support to Chunithm' (#24) from EmmyHeart/artemis:develop into develop
Reviewed-on: Hay1tsme/artemis#24
2023-10-06 01:48:50 +00:00
EmmyHeart 9681f86e33 Chunithm SQL documentation
Figured I would outline all of the neat SQL tricks you can do with this build, as well as properly document the features.
2023-10-05 07:56:46 +00:00
Hay1tsme d641705273 pokken: fix last_play_event_id 2023-10-02 15:12:29 -04:00
Hay1tsme 3d13eb1698 pokken: add coalesce 2023-10-01 23:49:47 -04:00
Hay1tsme 33e0288e5e pokken: pokemon data save/load 2023-10-01 22:38:56 -04:00
Dniel97 38c1c31cf5
Merge branch 'develop' into fork_develop 2023-09-25 22:48:53 +02:00
Midorica 91791813dc Merge pull request 'ONGEKI Rival Functionality' (#36) from 2TT/artemis:develop into develop
Reviewed-on: Hay1tsme/artemis#36
2023-09-23 16:08:15 +00:00
Hay1tsme 1497bf4a95 frontend: fixes to searching 2023-09-19 02:42:13 -04:00
Hay1tsme aa17f99252 frontend: add placeholders, arcade search by IP 2023-09-19 02:31:29 -04:00
Hay1tsme 50de0916d4 allnet: minor tweeks to mirrior realnet better 2023-09-19 02:15:22 -04:00
Midorica 2affd0aae9 Merge pull request 'Fix wrong id being used to check for Chunithm WORLD'S END charts' (#33) from ASleepyCat/artemis:fix-worlds-end-id into develop
Reviewed-on: Hay1tsme/artemis#33
2023-09-12 04:36:59 +00:00
ASleepyCat 4a916cb4d1 Fix reading WORLD'S END charts for SDBT 2023-09-10 12:29:19 +10:00
ASleepyCat 6c98f5f0a7 Fix wrong id being used to check for WORLD'S END charts 2023-09-10 12:29:19 +10:00
Midorica 090b3148d8 adding encryption support for ongeki 2023-09-09 12:03:42 -04:00
Wanich Keatkajonjumroen ed46ea33e3 Added version select in session, removed get_profile_data_ignore_version, renamed scripts file 2023-09-09 06:55:18 +09:00
Dniel97 08927db100
Merge branch 'develop' into fork_develop 2023-09-07 15:09:36 +02:00
Hay1tsme 238e39f415 adb: properly handle an incorrect adb status value 2023-09-06 22:47:37 -04:00
Hay1tsme 5499d38bb4 mucha: streamline mucha_postprocess (thanks Bottersnike!) 2023-09-05 17:32:51 -04:00
2TT d584b93ca5 Merge branch 'develop' into develop 2023-09-02 06:39:37 +00:00
Wanich Keatkajonjumroen d5d2803cc0 forgot a print command, see previous commit 2023-09-02 07:50:09 +09:00
Wanich Keatkajonjumroen 16d801aff5 Rival Delete functionality 2023-09-02 07:40:57 +09:00
Wanich Keatkajonjumroen 147d7adaaf Frontend for adding rivals, versionless backend function to support 2023-09-02 06:21:02 +09:00
Wanich Keatkajonjumroen 1f545aed41 removed print statements 2023-09-02 05:04:59 +09:00
Hay1tsme 3a6cfedcca maimai: fix net deliver paths 2023-08-31 01:24:41 -04:00
Hay1tsme 7a6272dcc5 Merge branch 'develop' of https://gitea.tendokyu.moe/Hay1tsme/artemis into develop 2023-08-30 11:19:14 -04:00
Hay1tsme 136e47d1e6 reader: series -> game 2023-08-30 11:19:13 -04:00
Wanich Keatkajonjumroen eaf9e0cf09 removed temp file 2023-08-30 13:53:55 +09:00
Wanich Keatkajonjumroen 0d7409906a fixed backend ongeki rival functionality 2023-08-30 13:53:45 +09:00
Wanich Keatkajonjumroen 5cccc9224a geki version display 2023-08-30 13:53:41 +09:00
Wanich Keatkajonjumroen 2a52e391d6 Ongeki template start 2023-08-30 13:53:31 +09:00
Hay1tsme dac655b4ae adb: add catch for uninitialized amlib requests 2023-08-27 19:18:22 -04:00
Hay1tsme 37e2da2051 maimai: fix usbdl endpoints 2023-08-27 11:53:29 -04:00
Dniel97 9d74d60c14
Merge remote-tracking branch 'origin/develop' into fork_develop 2023-08-21 10:33:25 +02:00
Hay1tsme d4ea3bc12a billing: float5 hotfix 2023-08-21 01:53:27 -04:00
Hay1tsme d8b0e2ea2a billing: add classes and validation, fix response 2023-08-21 01:50:59 -04:00
Hay1tsme 984949d902 allnet: partial DFI implementation 2023-08-21 00:10:25 -04:00
Hay1tsme 2e8d99e5fa allnet: add ip check config option 2023-08-20 23:25:50 -04:00
Hay1tsme fd6cadf2da adb: hotfix 2023-08-20 19:56:16 -04:00
Hay1tsme 71489c1272 adb: fix log_ex 2023-08-20 19:55:26 -04:00
Dniel97 3773c57de1
Merge remote-tracking branch 'origin/develop' into fork_develop 2023-08-20 14:22:58 +02:00
Hay1tsme 8ea82ffe1a adb: add from_req 2023-08-19 01:35:37 -04:00
Hay1tsme 904ea10920 cm: remove print, fix default config 2023-08-19 01:35:15 -04:00
Dniel97 8a8c0e023e
Merge branch 'develop' into fork_develop 2023-08-16 11:02:22 +02:00
Hay1tsme cf7cc0997a mucha: add other request/response structures 2023-08-15 23:19:48 -04:00
Hay1tsme 92567504f4 adb: fix for felica_lookup_ex, for #32 2023-08-15 10:43:49 -04:00
Hay1tsme fd50a7ee68 aimedb_redux (#30)
Update AimeDB from new [documentation](https://minori.tendokyu.moe/docs/allnet/aimedb/) of the protocol.
Reviewed-on: Hay1tsme/artemis#30
2023-08-14 03:32:03 +00:00
Hay1tsme 9e3a51a57a allnet: add IP checking 2023-08-08 12:35:38 -04:00
Hay1tsme 4744e8cf5f add ip checking config options 2023-08-08 10:24:28 -04:00
Hay1tsme 88a1462304 logger: change from warn to warning 2023-08-08 10:17:56 -04:00
Hay1tsme 2e277e7791 add arcade ip column 2023-08-08 10:17:38 -04:00
Hay1tsme 757fdc5c57 mai2: add check for Mai-Encoding headers 2023-08-01 02:34:40 -04:00
Hay1tsme 23bcb5cc13 Merge branch 'develop' of https://gitea.tendokyu.moe/Hay1tsme/artemis into develop 2023-07-25 09:18:47 -04:00
Hay1tsme 9f0c181593 add handler for /mucha_front in addition to /mucha 2023-07-25 09:18:45 -04:00
Midorica 5a4baba102 Diva: Adding threading to score loading 2023-07-24 13:19:54 -04:00
Hay1tsme 156b4e4ede mai2: fix get user music for dx and up 2023-07-24 00:49:08 -04:00
Hay1tsme 6c89a97fe3 frontend: add management pages 2023-07-23 22:21:49 -04:00
Hay1tsme b943807904 core: add columns to machine table, bump to v5 2023-07-23 22:21:41 -04:00
Hay1tsme f417be671b pokken: fix typo 2023-07-23 12:47:37 -04:00
Hay1tsme 20335aaebe add download report api 2023-07-23 12:47:10 -04:00
Dniel97 097181008b
Merge remote-tracking branch 'origin/develop' into fork_develop 2023-07-16 23:41:38 +02:00
Dniel97 859bf4bf5d
Merge remote-tracking branch 'origin/develop' into fork_develop 2023-07-16 19:07:56 +02:00
EmmyHeart 3c7ceabf4e And again 2023-07-11 10:04:25 +00:00
EmmyHeart 1bc8648e35 I knew I forgot something (fixed config) 2023-07-11 09:56:09 +00:00
EmmyHeart eecd3a829d Added value for team rank scaling, and documented it a bit 2023-07-11 09:40:49 +00:00
EmmyHeart b42e8ab76c Added function for pulling a song via the DB unique ID instead of the native song ID 2023-07-11 09:16:11 +00:00
EmmyHeart 043ff17008 Add team support, rivals, and test function for getting playcounts 2023-07-11 09:14:53 +00:00
EmmyHeart c01d3f49f5 Added call for getting rival's music lists 2023-07-11 09:13:19 +00:00
EmmyHeart 75842b5a88 Add team support, implement team upsert, add rivals 2023-07-11 09:12:34 +00:00
106 changed files with 3573 additions and 793 deletions

View File

@ -12,10 +12,10 @@ RUN chmod +x entrypoint.sh
COPY index.py index.py
COPY dbutils.py dbutils.py
COPY read.py read.py
ADD core core
ADD titles titles
ADD config config
ADD log log
ADD cert cert
ENTRYPOINT [ "/app/entrypoint.sh" ]
ENTRYPOINT [ "/app/entrypoint.sh" ]

View File

@ -1,6 +1,13 @@
# Changelog
Documenting updates to ARTEMiS, to be updated every time the master branch is pushed to.
## 20231015
### maimai DX
+ Added support for FESTiVAL PLUS
### Card Maker
+ Added support for maimai DX FESTiVAL PLUS
## 20230716
### General
+ Docker files added (#19)

View File

@ -0,0 +1,6 @@
from .base import ADBBaseRequest, ADBBaseResponse, ADBHeader, ADBHeaderException, PortalRegStatus, LogStatus, ADBStatus
from .base import CompanyCodes, ReaderFwVer, CMD_CODE_GOODBYE, HEADER_SIZE
from .lookup import ADBLookupRequest, ADBLookupResponse, ADBLookupExResponse
from .campaign import ADBCampaignClearRequest, ADBCampaignClearResponse, ADBCampaignResponse, ADBOldCampaignRequest, ADBOldCampaignResponse
from .felica import ADBFelicaLookupRequest, ADBFelicaLookupResponse, ADBFelicaLookup2Request, ADBFelicaLookup2Response
from .log import ADBLogExRequest, ADBLogRequest, ADBStatusLogRequest, ADBLogExResponse

170
core/adb_handlers/base.py Normal file
View File

@ -0,0 +1,170 @@
import struct
from construct import Struct, Int16ul, Int32ul, PaddedString
from enum import Enum
import re
from typing import Union, Final
class LogStatus(Enum):
NONE = 0
START = 1
CONTINUE = 2
END = 3
OTHER = 4
class PortalRegStatus(Enum):
NO_REG = 0
PORTAL = 1
SEGA_ID = 2
class ADBStatus(Enum):
UNKNOWN = 0
GOOD = 1
BAD_AMIE_ID = 2
ALREADY_REG = 3
BAN_SYS_USER = 4
BAN_SYS = 5
BAN_USER = 6
BAN_GEN = 7
LOCK_SYS_USER = 8
LOCK_SYS = 9
LOCK_USER = 10
class CompanyCodes(Enum):
NONE = 0
SEGA = 1
BAMCO = 2
KONAMI = 3
TAITO = 4
class ReaderFwVer(Enum): # Newer readers use a singly byte value
NONE = 0
TN32_10 = 1
TN32_12 = 2
OTHER = 9
def __str__(self) -> str:
if self == self.TN32_10:
return "TN32MSEC003S F/W Ver1.0"
elif self == self.TN32_12:
return "TN32MSEC003S F/W Ver1.2"
elif self == self.NONE:
return "Not Specified"
elif self == self.OTHER:
return "Unknown/Other"
else:
raise ValueError(f"Bad ReaderFwVer value {self.value}")
@classmethod
def from_byte(self, byte: bytes) -> Union["ReaderFwVer", int]:
try:
i = int.from_bytes(byte, 'little')
try:
return ReaderFwVer(i)
except ValueError:
return i
except TypeError:
return 0
class ADBHeaderException(Exception):
pass
HEADER_SIZE: Final[int] = 0x20
CMD_CODE_GOODBYE: Final[int] = 0x66
# everything is LE
class ADBHeader:
def __init__(self, magic: int, protocol_ver: int, cmd: int, length: int, status: int, game_id: Union[str, bytes], store_id: int, keychip_id: Union[str, bytes]) -> None:
self.magic = magic # u16
self.protocol_ver = protocol_ver # u16
self.cmd = cmd # u16
self.length = length # u16
try:
self.status = ADBStatus(status) # u16
except ValueError as e:
raise ADBHeaderException(f"Status is incorrect! {e}")
self.game_id = game_id # 4 char + \x00
self.store_id = store_id # u32
self.keychip_id = keychip_id# 11 char + \x00
if type(self.game_id) == bytes:
self.game_id = self.game_id.decode()
if type(self.keychip_id) == bytes:
self.keychip_id = self.keychip_id.decode()
self.game_id = self.game_id.replace("\0", "")
self.keychip_id = self.keychip_id.replace("\0", "")
if self.cmd != CMD_CODE_GOODBYE: # Games for some reason send no data with goodbye
self.validate()
@classmethod
def from_data(cls, data: bytes) -> "ADBHeader":
magic, protocol_ver, cmd, length, status, game_id, store_id, keychip_id = struct.unpack_from("<5H6sI12s", data)
head = cls(magic, protocol_ver, cmd, length, status, game_id, store_id, keychip_id)
if head.length != len(data):
raise ADBHeaderException(f"Length is incorrect! Expect {head.length}, got {len(data)}")
return head
def validate(self) -> bool:
if self.magic != 0xa13e:
raise ADBHeaderException(f"Magic {self.magic} != 0xa13e")
if self.protocol_ver < 0x1000:
raise ADBHeaderException(f"Protocol version {hex(self.protocol_ver)} is invalid!")
if re.fullmatch(r"^S[0-9A-Z]{3}[P]?$", self.game_id) is None:
raise ADBHeaderException(f"Game ID {self.game_id} is invalid!")
if self.store_id == 0:
raise ADBHeaderException(f"Store ID cannot be 0!")
if re.fullmatch(r"^A[0-9]{2}[E|X][0-9]{2}[A-HJ-NP-Z][0-9]{4}$", self.keychip_id) is None:
raise ADBHeaderException(f"Keychip ID {self.keychip_id} is invalid!")
return True
def make(self) -> bytes:
resp_struct = Struct(
"magic" / Int16ul,
"unknown" / Int16ul,
"response_code" / Int16ul,
"length" / Int16ul,
"status" / Int16ul,
"game_id" / PaddedString(6, 'utf_8'),
"store_id" / Int32ul,
"keychip_id" / PaddedString(12, 'utf_8'),
)
return resp_struct.build(dict(
magic=self.magic,
unknown=self.protocol_ver,
response_code=self.cmd,
length=self.length,
status=self.status.value,
game_id = self.game_id,
store_id = self.store_id,
keychip_id = self.keychip_id,
))
class ADBBaseRequest:
def __init__(self, data: bytes) -> None:
self.head = ADBHeader.from_data(data)
class ADBBaseResponse:
def __init__(self, code: int = 0, length: int = 0x20, status: int = 1, game_id: str = "SXXX", store_id: int = 1, keychip_id: str = "A69E01A8888", protocol_ver: int = 0x3087) -> None:
self.head = ADBHeader(0xa13e, protocol_ver, code, length, status, game_id, store_id, keychip_id)
@classmethod
def from_req(cls, req: ADBHeader, cmd: int, length: int = 0x20, status: int = 1) -> "ADBBaseResponse":
return cls(cmd, length, status, req.game_id, req.store_id, req.keychip_id, req.protocol_ver)
def append_padding(self, data: bytes):
"""Appends 0s to the end of the data until it's at the correct size"""
padding_size = self.head.length - len(data)
data += bytes(padding_size)
return data
def make(self) -> bytes:
return self.head.make()

View File

@ -0,0 +1,132 @@
from construct import Struct, Int16ul, Padding, Bytes, Int32ul, Int32sl
from .base import *
class Campaign:
def __init__(self) -> None:
self.id = 0
self.name = ""
self.announce_date = 0
self.start_date = 0
self.end_date = 0
self.distrib_start_date = 0
self.distrib_end_date = 0
def make(self) -> bytes:
name_padding = bytes(128 - len(self.name))
return Struct(
"id" / Int32ul,
"name" / Bytes(128),
"announce_date" / Int32ul,
"start_date" / Int32ul,
"end_date" / Int32ul,
"distrib_start_date" / Int32ul,
"distrib_end_date" / Int32ul,
Padding(8),
).build(dict(
id = self.id,
name = self.name.encode() + name_padding,
announce_date = self.announce_date,
start_date = self.start_date,
end_date = self.end_date,
distrib_start_date = self.distrib_start_date,
distrib_end_date = self.distrib_end_date,
))
class CampaignClear:
def __init__(self) -> None:
self.id = 0
self.entry_flag = 0
self.clear_flag = 0
def make(self) -> bytes:
return Struct(
"id" / Int32ul,
"entry_flag" / Int32ul,
"clear_flag" / Int32ul,
Padding(4),
).build(dict(
id = self.id,
entry_flag = self.entry_flag,
clear_flag = self.clear_flag,
))
class ADBCampaignResponse(ADBBaseResponse):
def __init__(self, game_id: str = "SXXX", store_id: int = 1, keychip_id: str = "A69E01A8888", code: int = 0x0C, length: int = 0x200, status: int = 1) -> None:
super().__init__(code, length, status, game_id, store_id, keychip_id)
self.campaigns = [Campaign(), Campaign(), Campaign()]
@classmethod
def from_req(cls, req: ADBHeader) -> "ADBCampaignResponse":
c = cls(req.game_id, req.store_id, req.keychip_id)
c.head.protocol_ver = req.protocol_ver
return c
def make(self) -> bytes:
body = b""
for c in self.campaigns:
body += c.make()
self.head.length = HEADER_SIZE + len(body)
return self.head.make() + body
class ADBOldCampaignRequest(ADBBaseRequest):
def __init__(self, data: bytes) -> None:
super().__init__(data)
self.campaign_id = struct.unpack_from("<I", data, 0x20)
class ADBOldCampaignResponse(ADBBaseResponse):
def __init__(self, game_id: str = "SXXX", store_id: int = 1, keychip_id: str = "A69E01A8888", code: int = 0x0C, length: int = 0x30, status: int = 1) -> None:
super().__init__(code, length, status, game_id, store_id, keychip_id)
self.info0 = 0
self.info1 = 0
self.info2 = 0
self.info3 = 0
@classmethod
def from_req(cls, req: ADBHeader) -> "ADBCampaignResponse":
c = cls(req.game_id, req.store_id, req.keychip_id)
c.head.protocol_ver = req.protocol_ver
return c
def make(self) -> bytes:
resp_struct = Struct(
"info0" / Int32sl,
"info1" / Int32sl,
"info2" / Int32sl,
"info3" / Int32sl,
).build(
info0 = self.info0,
info1 = self.info1,
info2 = self.info2,
info3 = self.info3,
)
self.head.length = HEADER_SIZE + len(resp_struct)
return self.head.make() + resp_struct
class ADBCampaignClearRequest(ADBBaseRequest):
def __init__(self, data: bytes) -> None:
super().__init__(data)
self.aime_id = struct.unpack_from("<i", data, 0x20)
class ADBCampaignClearResponse(ADBBaseResponse):
def __init__(self, game_id: str = "SXXX", store_id: int = 1, keychip_id: str = "A69E01A8888", code: int = 0x0E, length: int = 0x50, status: int = 1) -> None:
super().__init__(code, length, status, game_id, store_id, keychip_id)
self.campaign_clear_status = [CampaignClear(), CampaignClear(), CampaignClear()]
@classmethod
def from_req(cls, req: ADBHeader) -> "ADBCampaignResponse":
c = cls(req.game_id, req.store_id, req.keychip_id)
c.head.protocol_ver = req.protocol_ver
return c
def make(self) -> bytes:
body = b""
for c in self.campaign_clear_status:
body += c.make()
self.head.length = HEADER_SIZE + len(body)
return self.head.make() + body

View File

@ -0,0 +1,84 @@
from construct import Struct, Int32sl, Padding, Int8ub, Int16sl
from typing import Union
from .base import *
class ADBFelicaLookupRequest(ADBBaseRequest):
def __init__(self, data: bytes) -> None:
super().__init__(data)
idm, pmm = struct.unpack_from(">QQ", data, 0x20)
self.idm = hex(idm)[2:].upper()
self.pmm = hex(pmm)[2:].upper()
class ADBFelicaLookupResponse(ADBBaseResponse):
def __init__(self, access_code: str = None, game_id: str = "SXXX", store_id: int = 1, keychip_id: str = "A69E01A8888", code: int = 0x03, length: int = 0x30, status: int = 1) -> None:
super().__init__(code, length, status, game_id, store_id, keychip_id)
self.access_code = access_code if access_code is not None else "00000000000000000000"
@classmethod
def from_req(cls, req: ADBHeader, access_code: str = None) -> "ADBFelicaLookupResponse":
c = cls(access_code, req.game_id, req.store_id, req.keychip_id)
c.head.protocol_ver = req.protocol_ver
return c
def make(self) -> bytes:
resp_struct = Struct(
"felica_idx" / Int32ul,
"access_code" / Int8ub[10],
Padding(2)
).build(dict(
felica_idx = 0,
access_code = bytes.fromhex(self.access_code)
))
self.head.length = HEADER_SIZE + len(resp_struct)
return self.head.make() + resp_struct
class ADBFelicaLookup2Request(ADBBaseRequest):
def __init__(self, data: bytes) -> None:
super().__init__(data)
self.random = struct.unpack_from("<16s", data, 0x20)[0]
idm, pmm = struct.unpack_from(">QQ", data, 0x30)
self.card_key_ver, self.write_ct, self.maca, company, fw_ver, self.dfc = struct.unpack_from("<16s16sQccH", data, 0x40)
self.idm = hex(idm)[2:].upper()
self.pmm = hex(pmm)[2:].upper()
self.company = CompanyCodes(int.from_bytes(company, 'little'))
self.fw_ver = ReaderFwVer.from_byte(fw_ver)
class ADBFelicaLookup2Response(ADBBaseResponse):
def __init__(self, user_id: Union[int, None] = None, access_code: Union[str, None] = None, game_id: str = "SXXX", store_id: int = 1, keychip_id: str = "A69E01A8888", code: int = 0x12, length: int = 0x130, status: int = 1) -> None:
super().__init__(code, length, status, game_id, store_id, keychip_id)
self.user_id = user_id if user_id is not None else -1
self.access_code = access_code if access_code is not None else "00000000000000000000"
self.company = CompanyCodes.SEGA
self.portal_status = PortalRegStatus.NO_REG
@classmethod
def from_req(cls, req: ADBHeader, user_id: Union[int, None] = None, access_code: Union[str, None] = None) -> "ADBFelicaLookup2Response":
c = cls(user_id, access_code, req.game_id, req.store_id, req.keychip_id)
c.head.protocol_ver = req.protocol_ver
return c
def make(self) -> bytes:
resp_struct = Struct(
"user_id" / Int32sl,
"relation1" / Int32sl,
"relation2" / Int32sl,
"access_code" / Int8ub[10],
"portal_status" / Int8ub,
"company_code" / Int8ub,
Padding(8),
"auth_key" / Int8ub[256],
).build(dict(
user_id = self.user_id,
relation1 = -1, # Unsupported
relation2 = -1, # Unsupported
access_code = bytes.fromhex(self.access_code),
portal_status = self.portal_status.value,
company_code = self.company.value,
auth_key = [0] * 256 # Unsupported
))
self.head.length = HEADER_SIZE + len(resp_struct)
return self.head.make() + resp_struct

56
core/adb_handlers/log.py Normal file
View File

@ -0,0 +1,56 @@
from construct import Struct, Padding, Int8sl
from typing import Final, List
from .base import *
NUM_LOGS: Final[int] = 20
NUM_LEN_LOG_EX: Final[int] = 48
class AmLogEx:
def __init__(self, data: bytes) -> None:
self.aime_id, status, self.user_id, self.credit_ct, self.bet_ct, self.won_ct, self.local_time, \
self.tseq, self.place_id = struct.unpack("<IIQiii4xQiI", data)
self.status = LogStatus(status)
class ADBStatusLogRequest(ADBBaseRequest):
def __init__(self, data: bytes) -> None:
super().__init__(data)
self.aime_id, status = struct.unpack_from("<II", data, 0x20)
self.status = LogStatus(status)
class ADBLogRequest(ADBBaseRequest):
def __init__(self, data: bytes) -> None:
super().__init__(data)
self.aime_id, status, self.user_id, self.credit_ct, self.bet_ct, self.won_ct = struct.unpack_from("<IIQiii", data, 0x20)
self.status = LogStatus(status)
class ADBLogExRequest(ADBBaseRequest):
def __init__(self, data: bytes) -> None:
super().__init__(data)
self.logs: List[AmLogEx] = []
for x in range(NUM_LOGS):
self.logs.append(AmLogEx(data[0x20 + (NUM_LEN_LOG_EX * x): 0x50 + (NUM_LEN_LOG_EX * x)]))
self.num_logs = struct.unpack_from("<I", data, 0x03E0)[0]
class ADBLogExResponse(ADBBaseResponse):
def __init__(self, game_id: str = "SXXX", store_id: int = 1, keychip_id: str = "A69E01A8888", protocol_ver: int = 12423, code: int = 20, length: int = 64, status: int = 1) -> None:
super().__init__(code, length, status, game_id, store_id, keychip_id, protocol_ver)
@classmethod
def from_req(cls, req: ADBHeader) -> "ADBLogExResponse":
c = cls(req.game_id, req.store_id, req.keychip_id, req.protocol_ver)
return c
def make(self) -> bytes:
resp_struct = Struct(
"log_result" / Int8sl[NUM_LOGS],
Padding(12)
)
body = resp_struct.build(dict(
log_result = [1] * NUM_LOGS
))
self.head.length = HEADER_SIZE + len(body)
return self.head.make() + body

View File

@ -0,0 +1,81 @@
from construct import Struct, Int32sl, Padding, Int8sl
from typing import Union
from .base import *
class ADBLookupException(Exception):
pass
class ADBLookupRequest(ADBBaseRequest):
def __init__(self, data: bytes) -> None:
super().__init__(data)
self.access_code = data[0x20:0x2A].hex()
company_code, fw_version, self.serial_number = struct.unpack_from("<bbI", data, 0x2A)
try:
self.company_code = CompanyCodes(company_code)
except ValueError as e:
raise ADBLookupException(f"Invalid company code - {e}")
self.fw_version = ReaderFwVer.from_byte(fw_version)
class ADBLookupResponse(ADBBaseResponse):
def __init__(self, user_id: Union[int, None], game_id: str = "SXXX", store_id: int = 1, keychip_id: str = "A69E01A8888", code: int = 0x06, length: int = 0x30, status: int = 1) -> None:
super().__init__(code, length, status, game_id, store_id, keychip_id)
self.user_id = user_id if user_id is not None else -1
self.portal_reg = PortalRegStatus.NO_REG
@classmethod
def from_req(cls, req: ADBHeader, user_id: Union[int, None]) -> "ADBLookupResponse":
c = cls(user_id, req.game_id, req.store_id, req.keychip_id)
c.head.protocol_ver = req.protocol_ver
return c
def make(self):
resp_struct = Struct(
"user_id" / Int32sl,
"portal_reg" / Int8sl,
Padding(11)
)
body = resp_struct.build(dict(
user_id = self.user_id,
portal_reg = self.portal_reg.value
))
self.head.length = HEADER_SIZE + len(body)
return self.head.make() + body
class ADBLookupExResponse(ADBBaseResponse):
def __init__(self, user_id: Union[int, None], game_id: str = "SXXX", store_id: int = 1, keychip_id: str = "A69E01A8888",
code: int = 0x10, length: int = 0x130, status: int = 1) -> None:
super().__init__(code, length, status, game_id, store_id, keychip_id)
self.user_id = user_id if user_id is not None else -1
self.portal_reg = PortalRegStatus.NO_REG
@classmethod
def from_req(cls, req: ADBHeader, user_id: Union[int, None]) -> "ADBLookupExResponse":
c = cls(user_id, req.game_id, req.store_id, req.keychip_id)
c.head.protocol_ver = req.protocol_ver
return c
def make(self):
resp_struct = Struct(
"user_id" / Int32sl,
"portal_reg" / Int8sl,
Padding(3),
"auth_key" / Int8sl[256],
"relation1" / Int32sl,
"relation2" / Int32sl,
)
body = resp_struct.build(dict(
user_id = self.user_id,
portal_reg = self.portal_reg.value,
auth_key = [0] * 256,
relation1 = -1,
relation2 = -1
))
self.head.length = HEADER_SIZE + len(body)
return self.head.make() + body

View File

@ -2,27 +2,17 @@ from twisted.internet.protocol import Factory, Protocol
import logging, coloredlogs
from Crypto.Cipher import AES
import struct
from typing import Dict, Any
from typing import Dict, Tuple, Callable, Union
from typing_extensions import Final
from logging.handlers import TimedRotatingFileHandler
from core.config import CoreConfig
from core.data import Data
from .adb_handlers import *
class AimedbProtocol(Protocol):
AIMEDB_RESPONSE_CODES = {
"felica_lookup": 0x03,
"lookup": 0x06,
"log": 0x0A,
"campaign": 0x0C,
"touch": 0x0E,
"lookup2": 0x10,
"felica_lookup2": 0x12,
"log2": 0x14,
"hello": 0x65,
}
request_list: Dict[int, Any] = {}
request_list: Dict[int, Tuple[Callable[[bytes, int], Union[ADBBaseResponse, bytes]], int, str]] = {}
def __init__(self, core_cfg: CoreConfig) -> None:
self.logger = logging.getLogger("aimedb")
@ -32,16 +22,27 @@ class AimedbProtocol(Protocol):
self.logger.error("!!!KEY NOT SET!!!")
exit(1)
self.request_list[0x01] = self.handle_felica_lookup
self.request_list[0x04] = self.handle_lookup
self.request_list[0x05] = self.handle_register
self.request_list[0x09] = self.handle_log
self.request_list[0x0B] = self.handle_campaign
self.request_list[0x0D] = self.handle_touch
self.request_list[0x0F] = self.handle_lookup2
self.request_list[0x11] = self.handle_felica_lookup2
self.request_list[0x13] = self.handle_log2
self.request_list[0x64] = self.handle_hello
self.register_handler(0x01, 0x03, self.handle_felica_lookup, 'felica_lookup')
self.register_handler(0x02, 0x03, self.handle_felica_register, 'felica_register')
self.register_handler(0x04, 0x06, self.handle_lookup, 'lookup')
self.register_handler(0x05, 0x06, self.handle_register, 'register')
self.register_handler(0x07, 0x08, self.handle_status_log, 'status_log')
self.register_handler(0x09, 0x0A, self.handle_log, 'aime_log')
self.register_handler(0x0B, 0x0C, self.handle_campaign, 'campaign')
self.register_handler(0x0D, 0x0E, self.handle_campaign_clear, 'campaign_clear')
self.register_handler(0x0F, 0x10, self.handle_lookup_ex, 'lookup_ex')
self.register_handler(0x11, 0x12, self.handle_felica_lookup_ex, 'felica_lookup_ex')
self.register_handler(0x13, 0x14, self.handle_log_ex, 'aime_log_ex')
self.register_handler(0x64, 0x65, self.handle_hello, 'hello')
self.register_handler(0x66, 0, self.handle_goodbye, 'goodbye')
def register_handler(self, cmd: int, resp:int, handler: Callable[[bytes, int], Union[ADBBaseResponse, bytes]], name: str) -> None:
self.request_list[cmd] = (handler, resp, name)
def append_padding(self, data: bytes):
"""Appends 0s to the end of the data until it's at the correct size"""
@ -63,202 +64,250 @@ class AimedbProtocol(Protocol):
try:
decrypted = cipher.decrypt(data)
except Exception:
self.logger.error(f"Failed to decrypt {data.hex()}")
except Exception as e:
self.logger.error(f"Failed to decrypt {data.hex()} because {e}")
return None
self.logger.debug(f"{self.transport.getPeer().host} wrote {decrypted.hex()}")
if not decrypted[1] == 0xA1 and not decrypted[0] == 0x3E:
self.logger.error(f"Bad magic")
return None
try:
head = ADBHeader.from_data(decrypted)
except ADBHeaderException as e:
self.logger.error(f"Error parsing ADB header: {e}")
try:
encrypted = cipher.encrypt(ADBBaseResponse().make())
self.transport.write(encrypted)
req_code = decrypted[4]
if req_code == 0x66:
self.logger.info(f"goodbye from {self.transport.getPeer().host}")
self.transport.loseConnection()
except Exception as e:
self.logger.error(f"Failed to encrypt default response because {e}")
return
try:
resp = self.request_list[req_code](decrypted)
encrypted = cipher.encrypt(resp)
self.logger.debug(f"Response {resp.hex()}")
if head.keychip_id == "ABCD1234567" or head.store_id == 0xfff0:
self.logger.warning(f"Request from uninitialized AMLib: {vars(head)}")
handler, resp_code, name = self.request_list.get(head.cmd, (self.handle_default, None, 'default'))
if resp_code is None:
self.logger.warning(f"No handler for cmd {hex(head.cmd)}")
elif resp_code > 0:
self.logger.info(f"{name} from {head.keychip_id} ({head.game_id}) @ {self.transport.getPeer().host}")
resp = handler(decrypted, resp_code)
if type(resp) == ADBBaseResponse or issubclass(type(resp), ADBBaseResponse):
resp_bytes = resp.make()
if len(resp_bytes) != resp.head.length:
resp_bytes = self.append_padding(resp_bytes)
elif type(resp) == bytes:
resp_bytes = resp
elif resp is None: # Nothing to send, probably a goodbye
return
else:
raise TypeError(f"Unsupported type returned by ADB handler for {name}: {type(resp)}")
try:
encrypted = cipher.encrypt(resp_bytes)
self.logger.debug(f"Response {resp_bytes.hex()}")
self.transport.write(encrypted)
except KeyError:
self.logger.error(f"Unknown command code {hex(req_code)}")
return None
except Exception as e:
self.logger.error(f"Failed to encrypt {resp_bytes.hex()} because {e}")
def handle_default(self, data: bytes, resp_code: int, length: int = 0x20) -> ADBBaseResponse:
req = ADBHeader.from_data(data)
return ADBBaseResponse(resp_code, length, 1, req.game_id, req.store_id, req.keychip_id, req.protocol_ver)
except ValueError as e:
self.logger.error(f"Failed to encrypt {resp.hex()} because {e}")
return None
def handle_hello(self, data: bytes, resp_code: int) -> ADBBaseResponse:
return self.handle_default(data, resp_code)
def handle_campaign(self, data: bytes) -> bytes:
self.logger.info(f"campaign from {self.transport.getPeer().host}")
ret = struct.pack(
"<5H",
0xA13E,
0x3087,
self.AIMEDB_RESPONSE_CODES["campaign"],
0x0200,
0x0001,
)
return self.append_padding(ret)
def handle_campaign(self, data: bytes, resp_code: int) -> ADBBaseResponse:
h = ADBHeader.from_data(data)
if h.protocol_ver >= 0x3030:
req = h
resp = ADBCampaignResponse.from_req(req)
def handle_hello(self, data: bytes) -> bytes:
self.logger.info(f"hello from {self.transport.getPeer().host}")
ret = struct.pack(
"<5H", 0xA13E, 0x3087, self.AIMEDB_RESPONSE_CODES["hello"], 0x0020, 0x0001
)
return self.append_padding(ret)
def handle_lookup(self, data: bytes) -> bytes:
luid = data[0x20:0x2A].hex()
user_id = self.data.card.get_user_id_from_card(access_code=luid)
if user_id is None:
user_id = -1
self.logger.info(
f"lookup from {self.transport.getPeer().host}: luid {luid} -> user_id {user_id}"
)
ret = struct.pack(
"<5H", 0xA13E, 0x3087, self.AIMEDB_RESPONSE_CODES["lookup"], 0x0130, 0x0001
)
ret += bytes(0x20 - len(ret))
if user_id is None:
ret += struct.pack("<iH", -1, 0)
else:
ret += struct.pack("<l", user_id)
return self.append_padding(ret)
req = ADBOldCampaignRequest(data)
self.logger.info(f"Legacy campaign request for campaign {req.campaign_id} (protocol version {hex(h.protocol_ver)})")
resp = ADBOldCampaignResponse.from_req(req.head)
# We don't currently support campaigns
return resp
def handle_lookup2(self, data: bytes) -> bytes:
self.logger.info(f"lookup2")
def handle_lookup(self, data: bytes, resp_code: int) -> ADBBaseResponse:
req = ADBLookupRequest(data)
user_id = self.data.card.get_user_id_from_card(req.access_code)
is_banned = self.data.card.get_card_banned(req.access_code)
is_locked = self.data.card.get_card_locked(req.access_code)
ret = bytearray(self.handle_lookup(data))
ret[4] = self.AIMEDB_RESPONSE_CODES["lookup2"]
return bytes(ret)
def handle_felica_lookup(self, data: bytes) -> bytes:
idm = data[0x20:0x28].hex()
pmm = data[0x28:0x30].hex()
access_code = self.data.card.to_access_code(idm)
if is_banned and is_locked:
ret.head.status = ADBStatus.BAN_SYS_USER
elif is_banned:
ret.head.status = ADBStatus.BAN_SYS
elif is_locked:
ret.head.status = ADBStatus.LOCK_USER
ret = ADBLookupResponse.from_req(req.head, user_id)
self.logger.info(
f"felica_lookup from {self.transport.getPeer().host}: idm {idm} pmm {pmm} -> access_code {access_code}"
f"access_code {req.access_code} -> user_id {ret.user_id}"
)
return ret
ret = struct.pack(
"<5H",
0xA13E,
0x3087,
self.AIMEDB_RESPONSE_CODES["felica_lookup"],
0x0030,
0x0001,
def handle_lookup_ex(self, data: bytes, resp_code: int) -> ADBBaseResponse:
req = ADBLookupRequest(data)
user_id = self.data.card.get_user_id_from_card(req.access_code)
is_banned = self.data.card.get_card_banned(req.access_code)
is_locked = self.data.card.get_card_locked(req.access_code)
ret = ADBLookupExResponse.from_req(req.head, user_id)
if is_banned and is_locked:
ret.head.status = ADBStatus.BAN_SYS_USER
elif is_banned:
ret.head.status = ADBStatus.BAN_SYS
elif is_locked:
ret.head.status = ADBStatus.LOCK_USER
self.logger.info(
f"access_code {req.access_code} -> user_id {ret.user_id}"
)
ret += bytes(26)
ret += bytes.fromhex(access_code)
return ret
return self.append_padding(ret)
def handle_felica_lookup(self, data: bytes, resp_code: int) -> bytes:
"""
On official, I think a card has to be registered for this to actually work, but
I'm making the executive decision to not implement that and just kick back our
faux generated access code. The real felica IDm -> access code conversion is done
on the ADB server, which we do not and will not ever have access to. Because we can
assure that all IDms will be unique, this basic 0-padded hex -> int conversion will
be fine.
"""
req = ADBFelicaLookupRequest(data)
ac = self.data.card.to_access_code(req.idm)
self.logger.info(
f"idm {req.idm} ipm {req.pmm} -> access_code {ac}"
)
return ADBFelicaLookupResponse.from_req(req.head, ac)
def handle_felica_lookup2(self, data: bytes) -> bytes:
idm = data[0x30:0x38].hex()
pmm = data[0x38:0x40].hex()
access_code = self.data.card.to_access_code(idm)
def handle_felica_register(self, data: bytes, resp_code: int) -> bytes:
"""
I've never seen this used.
"""
req = ADBFelicaLookupRequest(data)
ac = self.data.card.to_access_code(req.idm)
if self.config.server.allow_user_registration:
user_id = self.data.user.create_user()
if user_id is None:
self.logger.error("Failed to register user!")
user_id = -1
else:
card_id = self.data.card.create_card(user_id, ac)
if card_id is None:
self.logger.error("Failed to register card!")
user_id = -1
self.logger.info(
f"Register access code {ac} (IDm: {req.idm} PMm: {req.pmm}) -> user_id {user_id}"
)
else:
self.logger.info(
f"Registration blocked!: access code {ac} (IDm: {req.idm} PMm: {req.pmm})"
)
return ADBFelicaLookupResponse.from_req(req.head, ac)
def handle_felica_lookup_ex(self, data: bytes, resp_code: int) -> bytes:
req = ADBFelicaLookup2Request(data)
access_code = self.data.card.to_access_code(req.idm)
user_id = self.data.card.get_user_id_from_card(access_code=access_code)
if user_id is None:
user_id = -1
self.logger.info(
f"felica_lookup2 from {self.transport.getPeer().host}: idm {idm} ipm {pmm} -> access_code {access_code} user_id {user_id}"
f"idm {req.idm} ipm {req.pmm} -> access_code {access_code} user_id {user_id}"
)
ret = struct.pack(
"<5H",
0xA13E,
0x3087,
self.AIMEDB_RESPONSE_CODES["felica_lookup2"],
0x0140,
0x0001,
)
ret += bytes(22)
ret += struct.pack("<lq", user_id, -1) # first -1 is ext_id, 3rd is access code
ret += bytes.fromhex(access_code)
ret += struct.pack("<l", 1)
return ADBFelicaLookup2Response.from_req(req.head, user_id, access_code)
return self.append_padding(ret)
def handle_campaign_clear(self, data: bytes, resp_code: int) -> ADBBaseResponse:
req = ADBCampaignClearRequest(data)
def handle_touch(self, data: bytes) -> bytes:
self.logger.info(f"touch from {self.transport.getPeer().host}")
ret = struct.pack(
"<5H", 0xA13E, 0x3087, self.AIMEDB_RESPONSE_CODES["touch"], 0x0050, 0x0001
)
ret += bytes(5)
ret += struct.pack("<3H", 0x6F, 0, 1)
resp = ADBCampaignClearResponse.from_req(req.head)
return self.append_padding(ret)
# We don't support campaign stuff
return resp
def handle_register(self, data: bytes, resp_code: int) -> bytes:
req = ADBLookupRequest(data)
user_id = -1
def handle_register(self, data: bytes) -> bytes:
luid = data[0x20:0x2A].hex()
if self.config.server.allow_user_registration:
user_id = self.data.user.create_user()
if user_id is None:
user_id = -1
self.logger.error("Failed to register user!")
user_id = -1
else:
card_id = self.data.card.create_card(user_id, luid)
card_id = self.data.card.create_card(user_id, req.access_code)
if card_id is None:
user_id = -1
self.logger.error("Failed to register card!")
user_id = -1
self.logger.info(
f"register from {self.transport.getPeer().host}: luid {luid} -> user_id {user_id}"
f"Register access code {req.access_code} -> user_id {user_id}"
)
else:
self.logger.info(
f"register from {self.transport.getPeer().host} blocked!: luid {luid}"
f"Registration blocked!: access code {req.access_code}"
)
user_id = -1
ret = struct.pack(
"<5H",
0xA13E,
0x3087,
self.AIMEDB_RESPONSE_CODES["lookup"],
0x0030,
0x0001 if user_id > -1 else 0,
)
ret += bytes(0x20 - len(ret))
ret += struct.pack("<l", user_id)
resp = ADBLookupResponse.from_req(req.head, user_id)
if resp.user_id <= 0:
resp.head.status = ADBStatus.BAN_SYS # Closest we can get to a "You cannot register"
return self.append_padding(ret)
return resp
def handle_log(self, data: bytes) -> bytes:
# TODO: Save aimedb logs
self.logger.info(f"log from {self.transport.getPeer().host}")
ret = struct.pack(
"<5H", 0xA13E, 0x3087, self.AIMEDB_RESPONSE_CODES["log"], 0x0020, 0x0001
)
return self.append_padding(ret)
# TODO: Save these in some capacity, as deemed relevant
def handle_status_log(self, data: bytes, resp_code: int) -> bytes:
req = ADBStatusLogRequest(data)
self.logger.info(f"User {req.aime_id} logged {req.status.name} event")
return ADBBaseResponse(resp_code, 0x20, 1, req.head.game_id, req.head.store_id, req.head.keychip_id, req.head.protocol_ver)
def handle_log2(self, data: bytes) -> bytes:
self.logger.info(f"log2 from {self.transport.getPeer().host}")
ret = struct.pack(
"<5H", 0xA13E, 0x3087, self.AIMEDB_RESPONSE_CODES["log2"], 0x0040, 0x0001
)
ret += bytes(22)
ret += struct.pack("H", 1)
def handle_log(self, data: bytes, resp_code: int) -> bytes:
req = ADBLogRequest(data)
self.logger.info(f"User {req.aime_id} logged {req.status.name} event, credit_ct: {req.credit_ct} bet_ct: {req.bet_ct} won_ct: {req.won_ct}")
return ADBBaseResponse(resp_code, 0x20, 1, req.head.game_id, req.head.store_id, req.head.keychip_id, req.head.protocol_ver)
return self.append_padding(ret)
def handle_log_ex(self, data: bytes, resp_code: int) -> bytes:
req = ADBLogExRequest(data)
strs = []
self.logger.info(f"Recieved {req.num_logs} or {len(req.logs)} logs")
for x in range(req.num_logs):
self.logger.debug(f"User {req.logs[x].aime_id} logged {req.logs[x].status.name} event, credit_ct: {req.logs[x].credit_ct} bet_ct: {req.logs[x].bet_ct} won_ct: {req.logs[x].won_ct}")
return ADBLogExResponse.from_req(req.head)
def handle_goodbye(self, data: bytes, resp_code: int) -> None:
self.logger.info(f"goodbye from {self.transport.getPeer().host}")
self.transport.loseConnection()
return
class AimedbFactory(Factory):
protocol = AimedbProtocol

View File

@ -1,4 +1,4 @@
from typing import Dict, List, Any, Optional, Tuple, Union
from typing import Dict, List, Any, Optional, Tuple, Union, Final
import logging, coloredlogs
from logging.handlers import TimedRotatingFileHandler
from twisted.web.http import Request
@ -6,18 +6,87 @@ from datetime import datetime
import pytz
import base64
import zlib
import json
from enum import Enum
from Crypto.PublicKey import RSA
from Crypto.Hash import SHA
from Crypto.Signature import PKCS1_v1_5
from time import strptime
from os import path
import urllib.parse
import math
from core.config import CoreConfig
from core.utils import Utils
from core.data import Data
from core.const import *
BILLING_DT_FORMAT: Final[str] = "%Y%m%d%H%M%S"
class DLIMG_TYPE(Enum):
app = 0
opt = 1
class ALLNET_STAT(Enum):
ok = 0
bad_game = -1
bad_machine = -2
bad_shop = -3
class DLI_STATUS(Enum):
START = 0
GET_DOWNLOAD_CONFIGURATION = 1
WAIT_DOWNLOAD = 2
DOWNLOADING = 3
NOT_SPECIFY_DLI = 100
ONLY_POST_REPORT = 101
STOPPED_BY_APP_RELEASE = 102
STOPPED_BY_OPT_RELEASE = 103
DOWNLOAD_COMPLETE_RECENTLY = 110
DOWNLOAD_COMPLETE_WAIT_RELEASE_TIME = 120
DOWNLOAD_COMPLETE_BUT_NOT_SYNC_SERVER = 121
DOWNLOAD_COMPLETE_BUT_NOT_FIRST_RESUME = 122
DOWNLOAD_COMPLETE_BUT_NOT_FIRST_LAUNCH = 123
DOWNLOAD_COMPLETE_WAIT_UPDATE = 124
DOWNLOAD_COMPLETE_AND_ALREADY_UPDATE = 130
ERROR_AUTH_FAILURE = 200
ERROR_GET_DLI_HTTP = 300
ERROR_GET_DLI = 301
ERROR_PARSE_DLI = 302
ERROR_INVALID_GAME_ID = 303
ERROR_INVALID_IMAGE_LIST = 304
ERROR_GET_DLI_APP = 305
ERROR_GET_BOOT_ID = 400
ERROR_ACCESS_SERVER = 401
ERROR_NO_IMAGE = 402
ERROR_ACCESS_IMAGE = 403
ERROR_DOWNLOAD_APP = 500
ERROR_DOWNLOAD_OPT = 501
ERROR_DISK_FULL = 600
ERROR_UNINSTALL = 601
ERROR_INSTALL_APP = 602
ERROR_INSTALL_OPT = 603
ERROR_GET_DLI_INTERNAL = 900
ERROR_ICF = 901
ERROR_CHECK_RELEASE_INTERNAL = 902
UNKNOWN = 999 # Not the actual enum val but it needs to be here as a catch-all
@classmethod
def from_int(cls, num: int) -> "DLI_STATUS":
try:
return cls(num)
except ValueError:
return cls.UNKNOWN
class AllnetServlet:
def __init__(self, core_cfg: CoreConfig, cfg_folder: str):
@ -72,8 +141,16 @@ class AllnetServlet:
def handle_poweron(self, request: Request, _: Dict):
request_ip = Utils.get_ip_addr(request)
pragma_header = request.getHeader('Pragma')
is_dfi = pragma_header is not None and pragma_header == "DFI"
try:
req_dict = self.allnet_req_to_dict(request.content.getvalue())
if is_dfi:
req_urlencode = self.from_dfi(request.content.getvalue())
else:
req_urlencode = request.content.getvalue().decode()
req_dict = self.allnet_req_to_dict(req_urlencode)
if req_dict is None:
raise AllnetRequestException()
@ -97,16 +174,86 @@ class AllnetServlet:
else:
resp = AllnetPowerOnResponse()
self.logger.debug(f"Allnet request: {vars(req)}")
self.logger.debug(f"Allnet request: {vars(req)}")
machine = self.data.arcade.get_machine(req.serial)
if machine is None and not self.config.server.allow_unregistered_serials:
msg = f"Unrecognised serial {req.serial} attempted allnet auth from {request_ip}."
self.data.base.log_event(
"allnet", "ALLNET_AUTH_UNKNOWN_SERIAL", logging.WARN, msg
)
self.logger.warning(msg)
resp.stat = ALLNET_STAT.bad_machine.value
resp_dict = {k: v for k, v in vars(resp).items() if v is not None}
return (urllib.parse.unquote(urllib.parse.urlencode(resp_dict)) + "\n").encode("utf-8")
if machine is not None:
arcade = self.data.arcade.get_arcade(machine["arcade"])
if self.config.server.check_arcade_ip:
if arcade["ip"] and arcade["ip"] is not None and arcade["ip"] != req.ip:
msg = f"Serial {req.serial} attempted allnet auth from bad IP {req.ip} (expected {arcade['ip']})."
self.data.base.log_event(
"allnet", "ALLNET_AUTH_BAD_IP", logging.ERROR, msg
)
self.logger.warning(msg)
resp.stat = ALLNET_STAT.bad_shop.value
resp_dict = {k: v for k, v in vars(resp).items() if v is not None}
return (urllib.parse.unquote(urllib.parse.urlencode(resp_dict)) + "\n").encode("utf-8")
elif (not arcade["ip"] or arcade["ip"] is None) and self.config.server.strict_ip_checking:
msg = f"Serial {req.serial} attempted allnet auth from bad IP {req.ip}, but arcade {arcade['id']} has no IP set! (strict checking enabled)."
self.data.base.log_event(
"allnet", "ALLNET_AUTH_NO_SHOP_IP", logging.ERROR, msg
)
self.logger.warning(msg)
resp.stat = ALLNET_STAT.bad_shop.value
resp_dict = {k: v for k, v in vars(resp).items() if v is not None}
return (urllib.parse.unquote(urllib.parse.urlencode(resp_dict)) + "\n").encode("utf-8")
country = (
arcade["country"] if machine["country"] is None else machine["country"]
)
if country is None:
country = AllnetCountryCode.JAPAN.value
resp.country = country
resp.place_id = arcade["id"]
resp.allnet_id = machine["id"]
resp.name = arcade["name"] if arcade["name"] is not None else ""
resp.nickname = arcade["nickname"] if arcade["nickname"] is not None else ""
resp.region0 = (
arcade["region_id"]
if arcade["region_id"] is not None
else AllnetJapanRegionId.AICHI.value
)
resp.region_name0 = (
arcade["state"]
if arcade["state"] is not None
else AllnetJapanRegionId.AICHI.name
)
resp.region_name1 = (
arcade["country"]
if arcade["country"] is not None
else AllnetCountryCode.JAPAN.value
)
resp.region_name2 = arcade["city"] if arcade["city"] is not None else ""
resp.client_timezone = ( # lmao
arcade["timezone"] if arcade["timezone"] is not None else "+0900" if req.format_ver == 3 else "+09:00"
)
if req.game_id not in self.uri_registry:
if not self.config.server.is_develop:
msg = f"Unrecognised game {req.game_id} attempted allnet auth from {request_ip}."
self.data.base.log_event(
"allnet", "ALLNET_AUTH_UNKNOWN_GAME", logging.WARN, msg
)
self.logger.warn(msg)
self.logger.warning(msg)
resp.stat = -1
resp.stat = ALLNET_STAT.bad_game.value
resp_dict = {k: v for k, v in vars(resp).items() if v is not None}
return (urllib.parse.unquote(urllib.parse.urlencode(resp_dict)) + "\n").encode("utf-8")
@ -125,51 +272,6 @@ class AllnetServlet:
resp.uri, resp.host = self.uri_registry[req.game_id]
machine = self.data.arcade.get_machine(req.serial)
if machine is None and not self.config.server.allow_unregistered_serials:
msg = f"Unrecognised serial {req.serial} attempted allnet auth from {request_ip}."
self.data.base.log_event(
"allnet", "ALLNET_AUTH_UNKNOWN_SERIAL", logging.WARN, msg
)
self.logger.warn(msg)
resp.stat = -2
resp_dict = {k: v for k, v in vars(resp).items() if v is not None}
return (urllib.parse.unquote(urllib.parse.urlencode(resp_dict)) + "\n").encode("utf-8")
if machine is not None:
arcade = self.data.arcade.get_arcade(machine["arcade"])
country = (
arcade["country"] if machine["country"] is None else machine["country"]
)
if country is None:
country = AllnetCountryCode.JAPAN.value
resp.country = country
resp.place_id = arcade["id"]
resp.allnet_id = machine["id"]
resp.name = arcade["name"] if arcade["name"] is not None else ""
resp.nickname = arcade["nickname"] if arcade["nickname"] is not None else ""
resp.region0 = (
arcade["region_id"]
if arcade["region_id"] is not None
else AllnetJapanRegionId.AICHI.value
)
resp.region_name0 = (
arcade["country"]
if arcade["country"] is not None
else AllnetCountryCode.JAPAN.value
)
resp.region_name1 = (
arcade["state"]
if arcade["state"] is not None
else AllnetJapanRegionId.AICHI.name
)
resp.region_name2 = arcade["city"] if arcade["city"] is not None else ""
resp.client_timezone = (
arcade["timezone"] if arcade["timezone"] is not None else "+0900"
)
int_ver = req.ver.replace(".", "")
resp.uri = resp.uri.replace("$v", int_ver)
resp.host = resp.host.replace("$v", int_ver)
@ -183,12 +285,24 @@ class AllnetServlet:
self.logger.debug(f"Allnet response: {resp_dict}")
resp_str += "\n"
"""if is_dfi:
request.responseHeaders.addRawHeader('Pragma', 'DFI')
return self.to_dfi(resp_str)"""
return resp_str.encode("utf-8")
def handle_dlorder(self, request: Request, _: Dict):
request_ip = Utils.get_ip_addr(request)
pragma_header = request.getHeader('Pragma')
is_dfi = pragma_header is not None and pragma_header == "DFI"
try:
req_dict = self.allnet_req_to_dict(request.content.getvalue())
if is_dfi:
req_urlencode = self.from_dfi(request.content.getvalue())
else:
req_urlencode = request.content.getvalue().decode()
req_dict = self.allnet_req_to_dict(req_urlencode)
if req_dict is None:
raise AllnetRequestException()
@ -230,7 +344,12 @@ class AllnetServlet:
self.logger.debug(f"Sending download uri {resp.uri}")
self.data.base.log_event("allnet", "DLORDER_REQ_SUCCESS", logging.INFO, f"{Utils.get_ip_addr(request)} requested DL Order for {req.serial} {req.game_id} v{req.ver}")
return urllib.parse.unquote(urllib.parse.urlencode(vars(resp))) + "\n"
res_str = urllib.parse.unquote(urllib.parse.urlencode(vars(resp))) + "\n"
"""if is_dfi:
request.responseHeaders.addRawHeader('Pragma', 'DFI')
return self.to_dfi(res_str)"""
return res_str
def handle_dlorder_ini(self, request: Request, match: Dict) -> bytes:
if "file" not in match:
@ -241,6 +360,7 @@ class AllnetServlet:
if path.exists(f"{self.config.allnet.update_cfg_folder}/{req_file}"):
self.logger.info(f"Request for DL INI file {req_file} from {Utils.get_ip_addr(request)} successful")
self.data.base.log_event("allnet", "DLORDER_INI_SENT", logging.INFO, f"{Utils.get_ip_addr(request)} successfully recieved {req_file}")
return open(
f"{self.config.allnet.update_cfg_folder}/{req_file}", "rb"
).read()
@ -249,10 +369,38 @@ class AllnetServlet:
return b""
def handle_dlorder_report(self, request: Request, match: Dict) -> bytes:
self.logger.info(
f"DLI Report from {Utils.get_ip_addr(request)}: {request.content.getvalue()}"
)
return b""
req_raw = request.content.getvalue()
client_ip = Utils.get_ip_addr(request)
try:
req_dict: Dict = json.loads(req_raw)
except Exception as e:
self.logger.warning(f"Failed to parse DL Report: {e}")
return "NG"
dl_data_type = DLIMG_TYPE.app
dl_data = req_dict.get("appimage", {})
if dl_data is None or not dl_data:
dl_data_type = DLIMG_TYPE.opt
dl_data = req_dict.get("optimage", {})
if dl_data is None or not dl_data:
self.logger.warning(f"Failed to parse DL Report: Invalid format - contains neither appimage nor optimage")
return "NG"
rep = DLReport(dl_data, dl_data_type)
if not rep.validate():
self.logger.warning(f"Failed to parse DL Report: Invalid format - {rep.err}")
return "NG"
msg = f"{rep.serial} @ {client_ip} reported {rep.__type.name} download state {rep.rf_state.name} for {rep.gd} v{rep.dav}:"\
f" {rep.tdsc}/{rep.tsc} segments downloaded for working files {rep.wfl} with {rep.dfl if rep.dfl else 'none'} complete."
self.data.base.log_event("allnet", "DL_REPORT", logging.INFO, msg, dl_data)
self.logger.info(msg)
return "OK"
def handle_loaderstaterecorder(self, request: Request, match: Dict) -> bytes:
req_data = request.content.getvalue()
@ -276,8 +424,16 @@ class AllnetServlet:
return "OK".encode()
def handle_billing_request(self, request: Request, _: Dict):
req_dict = self.billing_req_to_dict(request.content.getvalue())
req_raw = request.content.getvalue()
if request.getHeader('Content-Type') == "application/octet-stream":
req_unzip = zlib.decompressobj(-zlib.MAX_WBITS).decompress(req_raw)
else:
req_unzip = req_raw
req_dict = self.billing_req_to_dict(req_unzip)
request_ip = Utils.get_ip_addr(request)
if req_dict is None:
self.logger.error(f"Failed to parse request {request.content.getvalue()}")
return b""
@ -287,45 +443,60 @@ class AllnetServlet:
rsa = RSA.import_key(open(self.config.billing.signing_key, "rb").read())
signer = PKCS1_v1_5.new(rsa)
digest = SHA.new()
traces: List[TraceData] = []
try:
kc_playlimit = int(req_dict[0]["playlimit"])
kc_nearfull = int(req_dict[0]["nearfull"])
kc_billigtype = int(req_dict[0]["billingtype"])
kc_playcount = int(req_dict[0]["playcnt"])
kc_serial: str = req_dict[0]["keychipid"]
kc_game: str = req_dict[0]["gameid"]
kc_date = strptime(req_dict[0]["date"], "%Y%m%d%H%M%S")
kc_serial_bytes = kc_serial.encode()
for x in range(len(req_dict)):
if not req_dict[x]:
continue
if x == 0:
req = BillingInfo(req_dict[x])
continue
tmp = TraceData(req_dict[x])
if tmp.trace_type == TraceDataType.CHARGE:
tmp = TraceDataCharge(req_dict[x])
elif tmp.trace_type == TraceDataType.EVENT:
tmp = TraceDataEvent(req_dict[x])
elif tmp.trace_type == TraceDataType.CREDIT:
tmp = TraceDataCredit(req_dict[x])
traces.append(tmp)
kc_serial_bytes = req.keychipid.encode()
except KeyError as e:
return f"result=5&linelimit=&message={e} field is missing".encode()
self.logger.error(f"Billing request failed to parse: {e}")
return f"result=5&linelimit=&message=field is missing or formatting is incorrect\r\n".encode()
machine = self.data.arcade.get_machine(kc_serial)
machine = self.data.arcade.get_machine(req.keychipid)
if machine is None and not self.config.server.allow_unregistered_serials:
msg = f"Unrecognised serial {kc_serial} attempted billing checkin from {request_ip} for game {kc_game}."
msg = f"Unrecognised serial {req.keychipid} attempted billing checkin from {request_ip} for {req.gameid} v{req.gamever}."
self.data.base.log_event(
"allnet", "BILLING_CHECKIN_NG_SERIAL", logging.WARN, msg
)
self.logger.warn(msg)
self.logger.warning(msg)
resp = BillingResponse("", "", "", "")
resp.result = "1"
return self.dict_to_http_form_string([vars(resp)])
return f"result=1&requestno={req.requestno}&message=Keychip Serial bad\r\n".encode()
msg = (
f"Billing checkin from {request_ip}: game {kc_game} keychip {kc_serial} playcount "
f"{kc_playcount} billing_type {kc_billigtype} nearfull {kc_nearfull} playlimit {kc_playlimit}"
f"Billing checkin from {request_ip}: game {req.gameid} ver {req.gamever} keychip {req.keychipid} playcount "
f"{req.playcnt} billing_type {req.billingtype.name} nearfull {req.nearfull} playlimit {req.playlimit}"
)
self.logger.info(msg)
self.data.base.log_event("billing", "BILLING_CHECKIN_OK", logging.INFO, msg)
if req.traceleft > 0:
self.logger.warn(f"{req.traceleft} unsent tracelogs")
kc_playlimit = req.playlimit
kc_nearfull = req.nearfull
while kc_playcount > kc_playlimit:
while req.playcnt > req.playlimit:
kc_playlimit += 1024
kc_nearfull += 1024
playlimit = kc_playlimit
nearfull = kc_nearfull + (kc_billigtype * 0x00010000)
nearfull = kc_nearfull + (req.billingtype.value * 0x00010000)
digest.update(playlimit.to_bytes(4, "little") + kc_serial_bytes)
playlimit_sig = signer.sign(digest).hex()
@ -336,13 +507,16 @@ class AllnetServlet:
# TODO: playhistory
resp = BillingResponse(playlimit, playlimit_sig, nearfull, nearfull_sig)
#resp = BillingResponse(playlimit, playlimit_sig, nearfull, nearfull_sig)
resp = BillingResponse(playlimit, playlimit_sig, nearfull, nearfull_sig, req.requestno, req.protocolver)
resp_str = self.dict_to_http_form_string([vars(resp)])
if resp_str is None:
self.logger.error(f"Failed to parse response {vars(resp)}")
resp_str = urllib.parse.unquote(urllib.parse.urlencode(vars(resp))) + "\r\n"
self.logger.debug(f"response {vars(resp)}")
if req.traceleft > 0:
self.logger.info(f"Requesting 20 more of {req.traceleft} unsent tracelogs")
return f"result=6&waittime=0&linelimit=20\r\n".encode()
return resp_str.encode("utf-8")
def handle_naomitest(self, request: Request, _: Dict) -> bytes:
@ -354,9 +528,7 @@ class AllnetServlet:
Parses an billing request string into a python dictionary
"""
try:
decomp = zlib.decompressobj(-zlib.MAX_WBITS)
unzipped = decomp.decompress(data)
sections = unzipped.decode("ascii").split("\r\n")
sections = data.decode("ascii").split("\r\n")
ret = []
for x in sections:
@ -372,9 +544,7 @@ class AllnetServlet:
Parses an allnet request string into a python dictionary
"""
try:
zipped = base64.b64decode(data)
unzipped = zlib.decompress(zipped)
sections = unzipped.decode("utf-8").split("\r\n")
sections = data.split("\r\n")
ret = []
for x in sections:
@ -385,35 +555,15 @@ class AllnetServlet:
self.logger.error(f"allnet_req_to_dict: {e} while parsing {data}")
return None
def dict_to_http_form_string(
self,
data: List[Dict[str, Any]],
crlf: bool = True,
trailing_newline: bool = True,
) -> Optional[str]:
"""
Takes a python dictionary and parses it into an allnet response string
"""
try:
urlencode = ""
for item in data:
for k, v in item.items():
if k is None or v is None:
continue
urlencode += f"{k}={v}&"
if crlf:
urlencode = urlencode[:-1] + "\r\n"
else:
urlencode = urlencode[:-1] + "\n"
if not trailing_newline:
if crlf:
urlencode = urlencode[:-2]
else:
urlencode = urlencode[:-1]
return urlencode
except Exception as e:
self.logger.error(f"dict_to_http_form_string: {e} while parsing {data}")
return None
def from_dfi(self, data: bytes) -> str:
zipped = base64.b64decode(data)
unzipped = zlib.decompress(zipped)
return unzipped.decode("utf-8")
def to_dfi(self, data: str) -> bytes:
unzipped = data.encode('utf-8')
zipped = zlib.compress(unzipped)
return base64.b64encode(zipped)
class AllnetPowerOnRequest:
@ -499,6 +649,114 @@ class AllnetDownloadOrderResponse:
self.serial = serial
self.uri = uri
class TraceDataType(Enum):
CHARGE = 0
EVENT = 1
CREDIT = 2
class BillingType(Enum):
A = 1
B = 0
class float5:
def __init__(self, n: str = "0") -> None:
nf = float(n)
if nf > 999.9 or nf < 0:
raise ValueError('float5 must be between 0.000 and 999.9 inclusive')
return nf
@classmethod
def to_str(cls, f: float):
return f"%.{2 - int(math.log10(f))+1}f" % f
class BillingInfo:
def __init__(self, data: Dict) -> None:
try:
self.keychipid = str(data.get("keychipid", None))
self.functype = int(data.get("functype", None))
self.gameid = str(data.get("gameid", None))
self.gamever = float(data.get("gamever", None))
self.boardid = str(data.get("boardid", None))
self.tenpoip = str(data.get("tenpoip", None))
self.libalibver = float(data.get("libalibver", None))
self.datamax = int(data.get("datamax", None))
self.billingtype = BillingType(int(data.get("billingtype", None)))
self.protocolver = float(data.get("protocolver", None))
self.operatingfix = bool(data.get("operatingfix", None))
self.traceleft = int(data.get("traceleft", None))
self.requestno = int(data.get("requestno", None))
self.datesync = bool(data.get("datesync", None))
self.timezone = str(data.get("timezone", None))
self.date = datetime.strptime(data.get("date", None), BILLING_DT_FORMAT)
self.crcerrcnt = int(data.get("crcerrcnt", None))
self.memrepair = bool(data.get("memrepair", None))
self.playcnt = int(data.get("playcnt", None))
self.playlimit = int(data.get("playlimit", None))
self.nearfull = int(data.get("nearfull", None))
except Exception as e:
raise KeyError(e)
class TraceData:
def __init__(self, data: Dict) -> None:
try:
self.crc_err_flg = bool(data.get("cs", None))
self.record_number = int(data.get("rn", None))
self.seq_number = int(data.get("sn", None))
self.trace_type = TraceDataType(int(data.get("tt", None)))
self.date_sync_flg = bool(data.get("ds", None))
self.date = datetime.strptime(data.get("dt", None), BILLING_DT_FORMAT)
self.keychip = str(data.get("kn", None))
self.lib_ver = float(data.get("alib", None))
except Exception as e:
raise KeyError(e)
class TraceDataCharge(TraceData):
def __init__(self, data: Dict) -> None:
super().__init__(data)
try:
self.game_id = str(data.get("gi", None))
self.game_version = float(data.get("gv", None))
self.board_serial = str(data.get("bn", None))
self.shop_ip = str(data.get("ti", None))
self.play_count = int(data.get("pc", None))
self.play_limit = int(data.get("pl", None))
self.product_code = int(data.get("ic", None))
self.product_count = int(data.get("in", None))
self.func_type = int(data.get("kk", None))
self.player_number = int(data.get("playerno", None))
except Exception as e:
raise KeyError(e)
class TraceDataEvent(TraceData):
def __init__(self, data: Dict) -> None:
super().__init__(data)
try:
self.message = str(data.get("me", None))
except Exception as e:
raise KeyError(e)
class TraceDataCredit(TraceData):
def __init__(self, data: Dict) -> None:
super().__init__(data)
try:
self.chute_type = int(data.get("cct", None))
self.service_type = int(data.get("cst", None))
self.operation_type = int(data.get("cop", None))
self.coin_rate0 = int(data.get("cr0", None))
self.coin_rate1 = int(data.get("cr1", None))
self.bonus_addition = int(data.get("cba", None))
self.credit_rate = int(data.get("ccr", None))
self.credit0 = int(data.get("cc0", None))
self.credit1 = int(data.get("cc1", None))
self.credit2 = int(data.get("cc2", None))
self.credit3 = int(data.get("cc3", None))
self.credit4 = int(data.get("cc4", None))
self.credit5 = int(data.get("cc5", None))
self.credit6 = int(data.get("cc6", None))
self.credit7 = int(data.get("cc7", None))
except Exception as e:
raise KeyError(e)
class BillingResponse:
def __init__(
@ -507,20 +765,22 @@ class BillingResponse:
playlimit_sig: str = "",
nearfull: str = "",
nearfull_sig: str = "",
request_num: int = 1,
protocol_ver: float = 1.000,
playhistory: str = "000000/0:000000/0:000000/0",
) -> None:
self.result = "0"
self.waitime = "100"
self.linelimit = "1"
self.message = ""
self.result = 0
self.requestno = request_num
self.traceerase = 1
self.fixinterval = 120
self.fixlogcnt = 100
self.playlimit = playlimit
self.playlimitsig = playlimit_sig
self.protocolver = "1.000"
self.playhistory = playhistory
self.nearfull = nearfull
self.nearfullsig = nearfull_sig
self.fixlogincnt = "0"
self.fixinterval = "5"
self.playhistory = playhistory
self.linelimit = 100
self.protocolver = float5.to_str(protocol_ver)
# playhistory -> YYYYMM/C:...
# YYYY -> 4 digit year, MM -> 2 digit month, C -> Playcount during that period
@ -529,3 +789,66 @@ class AllnetRequestException(Exception):
def __init__(self, message="") -> None:
self.message = message
super().__init__(self.message)
class DLReport:
def __init__(self, data: Dict, report_type: DLIMG_TYPE) -> None:
self.serial = data.get("serial")
self.dfl = data.get("dfl")
self.wfl = data.get("wfl")
self.tsc = data.get("tsc")
self.tdsc = data.get("tdsc")
self.at = data.get("at")
self.ot = data.get("ot")
self.rt = data.get("rt")
self.as_ = data.get("as")
self.rf_state = DLI_STATUS.from_int(data.get("rf_state"))
self.gd = data.get("gd")
self.dav = data.get("dav")
self.wdav = data.get("wdav") # app only
self.dov = data.get("dov")
self.wdov = data.get("wdov") # app only
self.__type = report_type
self.err = ""
def validate(self) -> bool:
if self.serial is None:
self.err = "serial not provided"
return False
if self.tsc is None:
self.err = "tsc not provided"
return False
if self.tdsc is None:
self.err = "tdsc not provided"
return False
if self.as_ is None:
self.err = "as not provided"
return False
if self.rf_state is None:
self.err = "rf_state not provided"
return False
if self.gd is None:
self.err = "gd not provided"
return False
if self.dav is None:
self.err = "dav not provided"
return False
if self.dov is None:
self.err = "dov not provided"
return False
if (self.wdav is None or self.wdov is None) and self.__type == DLIMG_TYPE.app:
self.err = "wdav or wdov not provided in app image"
return False
if (self.wdav is not None or self.wdov is not None) and self.__type == DLIMG_TYPE.opt:
self.err = "wdav or wdov provided in opt image"
return False
return True

View File

@ -48,6 +48,18 @@ class ServerConfig:
self.__config, "core", "server", "log_dir", default="logs"
)
@property
def check_arcade_ip(self) -> bool:
return CoreConfig.get_config_field(
self.__config, "core", "server", "check_arcade_ip", default=False
)
@property
def strict_ip_checking(self) -> bool:
return CoreConfig.get_config_field(
self.__config, "core", "server", "strict_ip_checking", default=False
)
class TitleConfig:
def __init__(self, parent_config: "CoreConfig") -> None:
@ -72,6 +84,36 @@ class TitleConfig:
return CoreConfig.get_config_field(
self.__config, "core", "title", "port", default=8080
)
@property
def port_ssl(self) -> int:
return CoreConfig.get_config_field(
self.__config, "core", "title", "port_ssl", default=0
)
@property
def ssl_key(self) -> str:
return CoreConfig.get_config_field(
self.__config, "core", "title", "ssl_key", default="cert/title.key"
)
@property
def ssl_cert(self) -> str:
return CoreConfig.get_config_field(
self.__config, "core", "title", "ssl_cert", default="cert/title.pem"
)
@property
def reboot_start_time(self) -> str:
return CoreConfig.get_config_field(
self.__config, "core", "title", "reboot_start_time", default=""
)
@property
def reboot_end_time(self) -> str:
return CoreConfig.get_config_field(
self.__config, "core", "title", "reboot_end_time", default=""
)
class DatabaseConfig:
@ -138,6 +180,12 @@ class DatabaseConfig:
default=10000,
)
@property
def enable_memcached(self) -> bool:
return CoreConfig.get_config_field(
self.__config, "core", "database", "enable_memcached", default=True
)
@property
def memcached_host(self) -> str:
return CoreConfig.get_config_field(
@ -188,6 +236,12 @@ class AllnetConfig:
self.__config, "core", "allnet", "port", default=80
)
@property
def ip_check(self) -> bool:
return CoreConfig.get_config_field(
self.__config, "core", "allnet", "ip_check", default=False
)
@property
def allow_online_updates(self) -> int:
return CoreConfig.get_config_field(

View File

@ -17,7 +17,7 @@ except ModuleNotFoundError:
def cached(lifetime: int = 10, extra_key: Any = None) -> Callable:
def _cached(func: Callable) -> Callable:
if has_mc:
if has_mc and (cfg and cfg.database.enable_memcached):
hostname = "127.0.0.1"
if cfg:
hostname = cfg.database.memcached_host

View File

@ -15,7 +15,7 @@ from core.utils import Utils
class Data:
current_schema_version = 4
current_schema_version = 6
engine = None
session = None
user = None
@ -163,7 +163,7 @@ class Data:
version = mod.current_schema_version
else:
self.logger.warn(
self.logger.warning(
f"current_schema_version not found for {folder}"
)
@ -171,7 +171,7 @@ class Data:
version = self.current_schema_version
if version is None:
self.logger.warn(
self.logger.warning(
f"Could not determine latest version for {game}, please specify --version"
)
@ -254,7 +254,7 @@ class Data:
self.logger.error(f"Failed to create card for owner with id {user_id}")
return
self.logger.warn(
self.logger.warning(
f"Successfully created owner with email {email}, access code 00000000000000000000, and password {pw} Make sure to change this password and assign a real card ASAP!"
)
@ -269,7 +269,7 @@ class Data:
return
if not should_force:
self.logger.warn(
self.logger.warning(
f"Card already exists for access code {new_ac} (id {new_card['id']}). If you wish to continue, rerun with the '--force' flag."
f" All exiting data on the target card {new_ac} will be perminently erased and replaced with data from card {old_ac}."
)
@ -307,7 +307,7 @@ class Data:
def autoupgrade(self) -> None:
all_game_versions = self.base.get_all_schema_vers()
if all_game_versions is None:
self.logger.warn("Failed to get schema versions")
self.logger.warning("Failed to get schema versions")
return
all_games = Utils.get_all_titles()

View File

@ -1,9 +1,10 @@
from typing import Optional, Dict
from sqlalchemy import Table, Column
from typing import Optional, Dict, List
from sqlalchemy import Table, Column, and_, or_
from sqlalchemy.sql.schema import ForeignKey, PrimaryKeyConstraint
from sqlalchemy.types import Integer, String, Boolean
from sqlalchemy.types import Integer, String, Boolean, JSON
from sqlalchemy.sql import func, select
from sqlalchemy.dialects.mysql import insert
from sqlalchemy.engine import Row
import re
from core.data.schema.base import BaseData, metadata
@ -21,6 +22,7 @@ arcade = Table(
Column("city", String(255)),
Column("region_id", Integer),
Column("timezone", String(255)),
Column("ip", String(39)),
mysql_charset="utf8mb4",
)
@ -39,7 +41,9 @@ machine = Table(
Column("country", String(3)), # overwrites if not null
Column("timezone", String(255)),
Column("ota_enable", Boolean),
Column("memo", String(255)),
Column("is_cab", Boolean),
Column("data", JSON),
mysql_charset="utf8mb4",
)
@ -65,7 +69,7 @@ arcade_owner = Table(
class ArcadeData(BaseData):
def get_machine(self, serial: str = None, id: int = None) -> Optional[Dict]:
def get_machine(self, serial: str = None, id: int = None) -> Optional[Row]:
if serial is not None:
serial = serial.replace("-", "")
if len(serial) == 11:
@ -130,12 +134,19 @@ class ArcadeData(BaseData):
f"Failed to update board id for machine {machine_id} -> {boardid}"
)
def get_arcade(self, id: int) -> Optional[Dict]:
def get_arcade(self, id: int) -> Optional[Row]:
sql = arcade.select(arcade.c.id == id)
result = self.execute(sql)
if result is None:
return None
return result.fetchone()
def get_arcade_machines(self, id: int) -> Optional[List[Row]]:
sql = machine.select(machine.c.arcade == id)
result = self.execute(sql)
if result is None:
return None
return result.fetchall()
def put_arcade(
self,
@ -165,7 +176,21 @@ class ArcadeData(BaseData):
return None
return result.lastrowid
def get_arcade_owners(self, arcade_id: int) -> Optional[Dict]:
def get_arcades_managed_by_user(self, user_id: int) -> Optional[List[Row]]:
sql = select(arcade).join(arcade_owner, arcade_owner.c.arcade == arcade.c.id).where(arcade_owner.c.user == user_id)
result = self.execute(sql)
if result is None:
return False
return result.fetchall()
def get_manager_permissions(self, user_id: int, arcade_id: int) -> Optional[int]:
sql = select(arcade_owner.c.permissions).where(and_(arcade_owner.c.user == user_id, arcade_owner.c.arcade == arcade_id))
result = self.execute(sql)
if result is None:
return False
return result.fetchone()
def get_arcade_owners(self, arcade_id: int) -> Optional[Row]:
sql = select(arcade_owner).where(arcade_owner.c.arcade == arcade_id)
result = self.execute(sql)
@ -187,33 +212,21 @@ class ArcadeData(BaseData):
return f"{platform_code}{platform_rev:02d}A{serial_num:04d}{append:04d}" # 0x41 = A, 0x52 = R
def validate_keychip_format(self, serial: str) -> bool:
serial = serial.replace("-", "")
if len(serial) != 11 or len(serial) != 15:
self.logger.error(
f"Serial validate failed: Incorrect length for {serial} (len {len(serial)})"
)
if re.fullmatch(r"^A[0-9]{2}[E|X][-]?[0-9]{2}[A-HJ-NP-Z][0-9]{4}([0-9]{4})?$", serial) is None:
return False
platform_code = serial[:4]
platform_rev = serial[4:6]
const_a = serial[6]
num = serial[7:11]
append = serial[11:15]
if re.match("A[7|6]\d[E|X][0|1][0|1|2]A\d{4,8}", serial) is None:
self.logger.error(f"Serial validate failed: {serial} failed regex")
return False
if len(append) != 0 or len(append) != 4:
self.logger.error(
f"Serial validate failed: {serial} had malformed append {append}"
)
return False
if len(num) != 4:
self.logger.error(
f"Serial validate failed: {serial} had malformed number {num}"
)
return False
return True
def get_arcade_by_name(self, name: str) -> Optional[List[Row]]:
sql = arcade.select(or_(arcade.c.name.like(f"%{name}%"), arcade.c.nickname.like(f"%{name}%")))
result = self.execute(sql)
if result is None:
return None
return result.fetchall()
def get_arcades_by_ip(self, ip: str) -> Optional[List[Row]]:
sql = arcade.select().where(arcade.c.ip == ip)
result = self.execute(sql)
if result is None:
return None
return result.fetchall()

View File

@ -64,6 +64,27 @@ class CardData(BaseData):
return int(card["user"])
def get_card_banned(self, access_code: str) -> Optional[bool]:
"""
Given a 20 digit access code as a string, check if the card is banned
"""
card = self.get_card_by_access_code(access_code)
if card is None:
return None
if card["is_banned"]:
return True
return False
def get_card_locked(self, access_code: str) -> Optional[bool]:
"""
Given a 20 digit access code as a string, check if the card is locked
"""
card = self.get_card_by_access_code(access_code)
if card is None:
return None
if card["is_locked"]:
return True
return False
def delete_card(self, card_id: int) -> None:
sql = aime_card.delete(aime_card.c.id == card_id)

View File

@ -107,3 +107,17 @@ class UserData(BaseData):
if result is None:
return None
return result.fetchall()
def find_user_by_email(self, email: str) -> Row:
sql = select(aime_user).where(aime_user.c.email == email)
result = self.execute(sql)
if result is None:
return False
return result.fetchone()
def find_user_by_username(self, username: str) -> List[Row]:
sql = aime_user.select(aime_user.c.username.like(f"%{username}%"))
result = self.execute(sql)
if result is None:
return False
return result.fetchall()

View File

@ -0,0 +1,3 @@
ALTER TABLE machine DROP COLUMN memo;
ALTER TABLE machine DROP COLUMN is_blacklisted;
ALTER TABLE machine DROP COLUMN `data`;

View File

@ -0,0 +1 @@
ALTER TABLE arcade DROP COLUMN 'ip';

View File

@ -0,0 +1,3 @@
ALTER TABLE machine ADD memo varchar(255) NULL;
ALTER TABLE machine ADD is_blacklisted tinyint(1) NULL;
ALTER TABLE machine ADD `data` longtext NULL;

View File

@ -0,0 +1 @@
ALTER TABLE arcade ADD ip varchar(39) NULL;

View File

@ -0,0 +1,2 @@
ALTER TABLE diva_profile
DROP skn_eqp;

View File

@ -0,0 +1,2 @@
ALTER TABLE diva_profile
ADD skn_eqp INT NOT NULL DEFAULT 0;

View File

@ -0,0 +1,10 @@
ALTER TABLE mai2_profile_detail
DROP COLUMN mapStock;
ALTER TABLE mai2_profile_extend
DROP COLUMN selectResultScoreViewType;
ALTER TABLE mai2_profile_option
DROP COLUMN outFrameType,
DROP COLUMN touchVolume,
DROP COLUMN breakSlideVolume;

View File

@ -0,0 +1,10 @@
ALTER TABLE mai2_profile_detail
ADD mapStock INT NULL AFTER playCount;
ALTER TABLE mai2_profile_extend
ADD selectResultScoreViewType INT NULL AFTER selectResultDetails;
ALTER TABLE mai2_profile_option
ADD outFrameType INT NULL AFTER dispCenter,
ADD touchVolume INT NULL AFTER slideVolume,
ADD breakSlideVolume INT NULL AFTER slideVolume;

View File

@ -9,6 +9,9 @@ from zope.interface import Interface, Attribute, implementer
from twisted.python.components import registerAdapter
import jinja2
import bcrypt
import re
from enum import Enum
from urllib import parse
from core import CoreConfig, Utils
from core.data import Data
@ -18,7 +21,15 @@ class IUserSession(Interface):
userId = Attribute("User's ID")
current_ip = Attribute("User's current ip address")
permissions = Attribute("User's permission level")
ongeki_version = Attribute("User's selected Ongeki Version")
class PermissionOffset(Enum):
USER = 0 # Regular user
USERMOD = 1 # Can moderate other users
ACMOD = 2 # Can add arcades and cabs
SYSADMIN = 3 # Can change settings
# 4 - 6 reserved for future use
OWNER = 7 # Can do anything
@implementer(IUserSession)
class UserSession(object):
@ -26,6 +37,7 @@ class UserSession(object):
self.userId = 0
self.current_ip = "0.0.0.0"
self.permissions = 0
self.ongeki_version = 7
class FrontendServlet(resource.Resource):
@ -80,6 +92,9 @@ class FrontendServlet(resource.Resource):
self.environment.globals["game_list"] = self.game_list
self.putChild(b"gate", FE_Gate(cfg, self.environment))
self.putChild(b"user", FE_User(cfg, self.environment))
self.putChild(b"sys", FE_System(cfg, self.environment))
self.putChild(b"arcade", FE_Arcade(cfg, self.environment))
self.putChild(b"cab", FE_Machine(cfg, self.environment))
self.putChild(b"game", fe_game)
self.logger.info(
@ -154,6 +169,7 @@ class FE_Gate(FE_Base):
passwd = None
uid = self.data.card.get_user_id_from_card(access_code)
user = self.data.user.get_user(uid)
if uid is None:
return redirectTo(b"/gate?e=1", request)
@ -175,6 +191,7 @@ class FE_Gate(FE_Base):
usr_sesh = IUserSession(sesh)
usr_sesh.userId = uid
usr_sesh.current_ip = ip
usr_sesh.permissions = user['permissions']
return redirectTo(b"/user", request)
@ -192,7 +209,7 @@ class FE_Gate(FE_Base):
hashed = bcrypt.hashpw(passwd, salt)
result = self.data.user.create_user(
uid, username, email, hashed.decode(), 1
uid, username, email.lower(), hashed.decode(), 1
)
if result is None:
return redirectTo(b"/gate?e=3", request)
@ -210,17 +227,29 @@ class FE_Gate(FE_Base):
return redirectTo(b"/gate?e=2", request)
ac = request.args[b"ac"][0].decode()
card = self.data.card.get_card_by_access_code(ac)
if card is None:
return redirectTo(b"/gate?e=1", request)
user = self.data.user.get_user(card['user'])
if user is None:
self.logger.warning(f"Card {ac} exists with no/invalid associated user ID {card['user']}")
return redirectTo(b"/gate?e=0", request)
if user['password'] is not None:
return redirectTo(b"/gate?e=1", request)
template = self.environment.get_template("core/frontend/gate/create.jinja")
return template.render(
title=f"{self.core_config.server.name} | Create User",
code=ac,
sesh={"userId": 0},
sesh={"userId": 0, "permissions": 0},
).encode("utf-16")
class FE_User(FE_Base):
def render_GET(self, request: Request):
uri = request.uri.decode()
template = self.environment.get_template("core/frontend/user/index.jinja")
sesh: Session = request.getSession()
@ -228,9 +257,26 @@ class FE_User(FE_Base):
if usr_sesh.userId == 0:
return redirectTo(b"/gate", request)
cards = self.data.card.get_user_cards(usr_sesh.userId)
user = self.data.user.get_user(usr_sesh.userId)
m = re.match("\/user\/(\d*)", uri)
if m is not None:
usrid = m.group(1)
if usr_sesh.permissions < 1 << PermissionOffset.USERMOD.value or not usrid == usr_sesh.userId:
return redirectTo(b"/user", request)
else:
usrid = usr_sesh.userId
user = self.data.user.get_user(usrid)
if user is None:
return redirectTo(b"/user", request)
cards = self.data.card.get_user_cards(usrid)
arcades = self.data.arcade.get_arcades_managed_by_user(usrid)
card_data = []
arcade_data = []
for c in cards:
if c['is_locked']:
status = 'Locked'
@ -240,9 +286,113 @@ class FE_User(FE_Base):
status = 'Active'
card_data.append({'access_code': c['access_code'], 'status': status})
for a in arcades:
arcade_data.append({'id': a['id'], 'name': a['name']})
return template.render(
title=f"{self.core_config.server.name} | Account", sesh=vars(usr_sesh), cards=card_data, username=user['username']
title=f"{self.core_config.server.name} | Account",
sesh=vars(usr_sesh),
cards=card_data,
username=user['username'],
arcades=arcade_data
).encode("utf-16")
def render_POST(self, request: Request):
pass
class FE_System(FE_Base):
def render_GET(self, request: Request):
uri = request.uri.decode()
template = self.environment.get_template("core/frontend/sys/index.jinja")
usrlist: List[Dict] = []
aclist: List[Dict] = []
cablist: List[Dict] = []
sesh: Session = request.getSession()
usr_sesh = IUserSession(sesh)
if usr_sesh.userId == 0 or usr_sesh.permissions < 1 << PermissionOffset.USERMOD.value:
return redirectTo(b"/gate", request)
if uri.startswith("/sys/lookup.user?"):
uri_parse = parse.parse_qs(uri.replace("/sys/lookup.user?", "")) # lop off the first bit
uid_search = uri_parse.get("usrId")
email_search = uri_parse.get("usrEmail")
uname_search = uri_parse.get("usrName")
if uid_search is not None:
u = self.data.user.get_user(uid_search[0])
if u is not None:
usrlist.append(u._asdict())
elif email_search is not None:
u = self.data.user.find_user_by_email(email_search[0])
if u is not None:
usrlist.append(u._asdict())
elif uname_search is not None:
ul = self.data.user.find_user_by_username(uname_search[0])
for u in ul:
usrlist.append(u._asdict())
elif uri.startswith("/sys/lookup.arcade?"):
uri_parse = parse.parse_qs(uri.replace("/sys/lookup.arcade?", "")) # lop off the first bit
ac_id_search = uri_parse.get("arcadeId")
ac_name_search = uri_parse.get("arcadeName")
ac_user_search = uri_parse.get("arcadeUser")
ac_ip_search = uri_parse.get("arcadeIp")
if ac_id_search is not None:
u = self.data.arcade.get_arcade(ac_id_search[0])
if u is not None:
aclist.append(u._asdict())
elif ac_name_search is not None:
ul = self.data.arcade.get_arcade_by_name(ac_name_search[0])
if ul is not None:
for u in ul:
aclist.append(u._asdict())
elif ac_user_search is not None:
ul = self.data.arcade.get_arcades_managed_by_user(ac_user_search[0])
if ul is not None:
for u in ul:
aclist.append(u._asdict())
elif ac_ip_search is not None:
ul = self.data.arcade.get_arcades_by_ip(ac_ip_search[0])
if ul is not None:
for u in ul:
aclist.append(u._asdict())
elif uri.startswith("/sys/lookup.cab?"):
uri_parse = parse.parse_qs(uri.replace("/sys/lookup.cab?", "")) # lop off the first bit
cab_id_search = uri_parse.get("cabId")
cab_serial_search = uri_parse.get("cabSerial")
cab_acid_search = uri_parse.get("cabAcId")
if cab_id_search is not None:
u = self.data.arcade.get_machine(id=cab_id_search[0])
if u is not None:
cablist.append(u._asdict())
elif cab_serial_search is not None:
u = self.data.arcade.get_machine(serial=cab_serial_search[0])
if u is not None:
cablist.append(u._asdict())
elif cab_acid_search is not None:
ul = self.data.arcade.get_arcade_machines(cab_acid_search[0])
for u in ul:
cablist.append(u._asdict())
return template.render(
title=f"{self.core_config.server.name} | System",
sesh=vars(usr_sesh),
usrlist=usrlist,
aclist=aclist,
cablist=cablist,
).encode("utf-16")
@ -257,3 +407,54 @@ class FE_Game(FE_Base):
def render_GET(self, request: Request) -> bytes:
return redirectTo(b"/user", request)
class FE_Arcade(FE_Base):
def render_GET(self, request: Request):
uri = request.uri.decode()
template = self.environment.get_template("core/frontend/arcade/index.jinja")
managed = []
sesh: Session = request.getSession()
usr_sesh = IUserSession(sesh)
if usr_sesh.userId == 0:
return redirectTo(b"/gate", request)
m = re.match("\/arcade\/(\d*)", uri)
if m is not None:
arcadeid = m.group(1)
perms = self.data.arcade.get_manager_permissions(usr_sesh.userId, arcadeid)
arcade = self.data.arcade.get_arcade(arcadeid)
if perms is None:
perms = 0
else:
return redirectTo(b"/user", request)
return template.render(
title=f"{self.core_config.server.name} | Arcade",
sesh=vars(usr_sesh),
error=0,
perms=perms,
arcade=arcade._asdict()
).encode("utf-16")
class FE_Machine(FE_Base):
def render_GET(self, request: Request):
uri = request.uri.decode()
template = self.environment.get_template("core/frontend/machine/index.jinja")
sesh: Session = request.getSession()
usr_sesh = IUserSession(sesh)
if usr_sesh.userId == 0:
return redirectTo(b"/gate", request)
return template.render(
title=f"{self.core_config.server.name} | Machine",
sesh=vars(usr_sesh),
arcade={},
error=0,
).encode("utf-16")

View File

@ -0,0 +1,4 @@
{% extends "core/frontend/index.jinja" %}
{% block content %}
<h1>{{ arcade.name }}</h1>
{% endblock content %}

View File

@ -4,6 +4,7 @@
<title>{{ title }}</title>
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.2.3/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-rbsA2VBKQhggwzxH7pPCaAqO46MgnOM80zW1RWuH61DGLwZJEdK2Kadq2F9CUG65" crossorigin="anonymous">
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.2.3/dist/js/bootstrap.bundle.min.js" integrity="sha384-kenU1KFdBIe4zVF0s0G1M5b4hcpxyD9F7jL+jjXkk+Q2h455rYXK/7HAuoJl+0I4" crossorigin="anonymous"></script>
<script src="https://cdn.jsdelivr.net/npm/jquery@3.2.1/dist/jquery.min.js" integrity="sha256-hwg4gsxgFZhOsEEamdOYGBf13FyQuiTwlAQgxVSNgt4=" crossorigin="anonymous"></script>
<style>
html {
background-color: #181a1b !important;
@ -77,6 +78,9 @@
margin-bottom: 10px;
width: 15%;
}
.modal-content {
background-color: #181a1b;
}
</style>
</head>
<body>

View File

@ -0,0 +1,5 @@
{% extends "core/frontend/index.jinja" %}
{% block content %}
{% include "core/frontend/widgets/err_banner.jinja" %}
<h1>Machine Management</h1>
{% endblock content %}

View File

@ -0,0 +1,103 @@
{% extends "core/frontend/index.jinja" %}
{% block content %}
<h1>System Management</h1>
<div class="row" id="rowForm">
{% if sesh.permissions >= 2 %}
<div class="col-sm-6" style="max-width: 25%;">
<form id="usrLookup" name="usrLookup" action="/sys/lookup.user" class="form-inline">
<h3>User Search</h3>
<div class="form-group">
<label for="usrId">User ID</label>
<input type="number" class="form-control" id="usrId" name="usrId">
</div>
OR
<div class="form-group">
<label for="usrName">Username</label>
<input type="text" class="form-control" id="usrName" name="usrName">
</div>
OR
<div class="form-group">
<label for="usrEmail">Email address</label>
<input type="email" class="form-control" id="usrEmail" name="usrEmail" aria-describedby="emailHelp">
</div>
<br />
<button type="submit" class="btn btn-primary">Search</button>
</form>
</div>
{% endif %}
{% if sesh.permissions >= 4 %}
<div class="col-sm-6" style="max-width: 25%;">
<form id="arcadeLookup" name="arcadeLookup" action="/sys/lookup.arcade" class="form-inline" >
<h3>Arcade Search</h3>
<div class="form-group">
<label for="arcadeId">Arcade ID</label>
<input type="number" class="form-control" id="arcadeId" name="arcadeId">
</div>
OR
<div class="form-group">
<label for="arcadeName">Arcade Name</label>
<input type="text" class="form-control" id="arcadeName" name="arcadeName">
</div>
OR
<div class="form-group">
<label for="arcadeUser">Owner User ID</label>
<input type="number" class="form-control" id="arcadeUser" name="arcadeUser">
</div>
OR
<div class="form-group">
<label for="arcadeIp">Assigned IP Address</label>
<input type="text" class="form-control" id="arcadeIp" name="arcadeIp">
</div>
<br />
<button type="submit" class="btn btn-primary">Search</button>
</form>
</div>
<div class="col-sm-6" style="max-width: 25%;">
<form id="cabLookup" name="cabLookup" action="/sys/lookup.cab" class="form-inline" >
<h3>Machine Search</h3>
<div class="form-group">
<label for="cabId">Machine ID</label>
<input type="number" class="form-control" id="cabId" name="cabId">
</div>
OR
<div class="form-group">
<label for="cabSerial">Machine Serial</label>
<input type="text" class="form-control" id="cabSerial" name="cabSerial">
</div>
OR
<div class="form-group">
<label for="cabAcId">Arcade ID</label>
<input type="number" class="form-control" id="cabAcId" name="cabAcId">
</div>
<br />
<button type="submit" class="btn btn-primary">Search</button>
</form>
</div>
{% endif %}
</div>
<div class="row" id="rowResult" style="margin: 10px;">
{% if sesh.permissions >= 2 %}
<div id="userSearchResult" class="col-sm-6" style="max-width: 25%;">
{% for usr in usrlist %}
<a href=/user/{{ usr.id }}><pre>{{ usr.id }} | {{ usr.username if usr.username != None else "<i>No Name Set</i>"}}</pre></a>
{% endfor %}
</div>
{% endif %}
{% if sesh.permissions >= 4 %}
<div id="arcadeSearchResult" class="col-sm-6" style="max-width: 25%;">
{% for ac in aclist %}
<pre><a href=/arcade/{{ ac.id }}>{{ ac.id }} | {{ ac.name if ac.name != None else "<i>No Name Set</i>" }} | {{ ac.ip if ac.ip != None else "<i>No IP Assigned</i>"}}</pre></a>
{% endfor %}
</div
><div id="cabSearchResult" class="col-sm-6" style="max-width: 25%;">
{% for cab in cablist %}
<a href=/cab/{{ cab.id }}><pre>{{ cab.id }} | {{ cab.game if cab.game != None else "<i>ANY </i>" }} | {{ cab.serial }}</pre></a>
{% endfor %}
</div>
{% endif %}
</div>
<div class="row" id="rowAdd">
</div>
{% endblock content %}

View File

@ -2,11 +2,21 @@
{% block content %}
<h1>Management for {{ username }}</h1>
<h2>Cards <button class="btn btn-success" data-bs-toggle="modal" data-bs-target="#card_add">Add</button></h2>
<ul>
<ul style="font-size: 20px;">
{% for c in cards %}
<li>{{ c.access_code }}: {{ c.status }} <button class="btn-danger btn">Delete</button></li>
<li>{{ c.access_code }}: {{ c.status }}&nbsp;{% if c.status == 'Active'%}<button class="btn-warning btn">Lock</button>{% elif c.status == 'Locked' %}<button class="btn-warning btn">Unlock</button>{% endif %}&nbsp;<button class="btn-danger btn">Delete</button></li>
{% endfor %}
</ul>
{% if arcades is defined %}
<h2>Arcades</h2>
<ul style="font-size: 20px;">
{% for a in arcades %}
<li><a href=/arcade/{{ a.id }}>{{ a.name }}</a></li>
{% endfor %}
</ul>
{% endif %}
<div class="modal fade" id="card_add" tabindex="-1" aria-labelledby="card_add_label" aria-hidden="true">
<div class="modal-dialog">
<div class="modal-content">

View File

@ -7,6 +7,10 @@ Card not registered, or wrong password
Missing or malformed access code
{% elif error == 3 %}
Failed to create user
{% elif error == 4 %}
Arcade not found
{% elif error == 5 %}
Machine not found
{% else %}
An unknown error occoured
{% endif %}

View File

@ -9,6 +9,9 @@
</div>
</div>
<div style="background: #333; color: #f9f9f9; width: 10%; height: 50px; line-height: 50px; text-align: center; float: left;">
{% if sesh is defined and sesh["permissions"] >= 2 %}
<a href="/sys"><button class="btn btn-primary">System</button></a>
{% endif %}
{% if sesh is defined and sesh["userId"] > 0 %}
<a href="/user"><button class="btn btn-primary">Account</button></a>
{% else %}

View File

@ -64,7 +64,7 @@ class MuchaServlet:
self.logger.debug(f"Mucha request {vars(req)}")
if req.gameCd not in self.mucha_registry:
self.logger.warn(f"Unknown gameCd {req.gameCd}")
self.logger.warning(f"Unknown gameCd {req.gameCd}")
return b"RESULTS=000"
# TODO: Decrypt S/N
@ -99,7 +99,7 @@ class MuchaServlet:
self.logger.debug(f"Mucha request {vars(req)}")
if req.gameCd not in self.mucha_registry:
self.logger.warn(f"Unknown gameCd {req.gameCd}")
self.logger.warning(f"Unknown gameCd {req.gameCd}")
return b"RESULTS=000"
resp = MuchaUpdateResponse(req.gameVer, f"{self.config.mucha.hostname}{':' + str(self.config.allnet.port) if self.config.server.is_develop else ''}")
@ -140,9 +140,7 @@ class MuchaServlet:
def mucha_postprocess(self, data: dict) -> Optional[bytes]:
try:
urlencode = ""
for k, v in data.items():
urlencode += f"{k}={v}&"
urlencode = "&".join(f"{k}={v}" for k, v in data.items())
return urlencode.encode()
@ -279,3 +277,67 @@ class MuchaDownloadStateRequest:
self.boardId = request.get("boardId", "")
self.placeId = request.get("placeId", "")
self.storeRouterIp = request.get("storeRouterIp", "")
class MuchaDownloadErrorRequest:
def __init__(self, request: Dict) -> None:
self.gameCd = request.get("gameCd", "")
self.updateVer = request.get("updateVer", "")
self.serialNum = request.get("serialNum", "")
self.downloadUrl = request.get("downloadUrl", "")
self.errCd = request.get("errCd", "")
self.errMessage = request.get("errMessage", "")
self.boardId = request.get("boardId", "")
self.placeId = request.get("placeId", "")
self.storeRouterIp = request.get("storeRouterIp", "")
class MuchaRegiAuthRequest:
def __init__(self, request: Dict) -> None:
self.gameCd = request.get("gameCd", "")
self.serialNum = request.get("serialNum", "") # Encrypted
self.countryCd = request.get("countryCd", "")
self.registrationCd = request.get("registrationCd", "")
self.sendDate = request.get("sendDate", "")
self.useToken = request.get("useToken", "")
self.allToken = request.get("allToken", "")
self.placeId = request.get("placeId", "")
self.storeRouterIp = request.get("storeRouterIp", "")
class MuchaRegiAuthResponse:
def __init__(self) -> None:
self.RESULTS = "001" # 001 = success, 099, 098, 097 = fail, others = fail
self.ALL_TOKEN = "0" # Encrypted
self.ADD_TOKEN = "0" # Encrypted
class MuchaTokenStateRequest:
def __init__(self, request: Dict) -> None:
self.gameCd = request.get("gameCd", "")
self.serialNum = request.get("serialNum", "")
self.countryCd = request.get("countryCd", "")
self.useToken = request.get("useToken", "")
self.allToken = request.get("allToken", "")
self.placeId = request.get("placeId", "")
self.storeRouterIp = request.get("storeRouterIp", "")
class MuchaTokenStateResponse:
def __init__(self) -> None:
self.RESULTS = "001"
class MuchaTokenMarginStateRequest:
def __init__(self, request: Dict) -> None:
self.gameCd = request.get("gameCd", "")
self.serialNum = request.get("serialNum", "")
self.countryCd = request.get("countryCd", "")
self.placeId = request.get("placeId", "")
self.limitLowerToken = request.get("limitLowerToken", 0)
self.limitUpperToken = request.get("limitUpperToken", 0)
self.settlementMonth = request.get("settlementMonth", 0)
class MuchaTokenMarginStateResponse:
def __init__(self) -> None:
self.RESULTS = "001"
self.LIMIT_LOWER_TOKEN = 0
self.LIMIT_UPPER_TOKEN = 0
self.LAST_SETTLEMENT_MONTH = 0
self.LAST_LIMIT_LOWER_TOKEN = 0
self.LAST_LIMIT_UPPER_TOKEN = 0
self.SETTLEMENT_MONTH = 0

View File

@ -62,7 +62,7 @@ class TitleServlet:
self.title_registry[code] = handler_cls
else:
self.logger.warn(f"Game {folder} has no get_allnet_info")
self.logger.warning(f"Game {folder} has no get_allnet_info")
else:
self.logger.error(f"{folder} missing game_code or index in __init__.py")
@ -74,13 +74,13 @@ class TitleServlet:
def render_GET(self, request: Request, endpoints: dict) -> bytes:
code = endpoints["game"]
if code not in self.title_registry:
self.logger.warn(f"Unknown game code {code}")
self.logger.warning(f"Unknown game code {code}")
request.setResponseCode(404)
return b""
index = self.title_registry[code]
if not hasattr(index, "render_GET"):
self.logger.warn(f"{code} does not dispatch GET")
self.logger.warning(f"{code} does not dispatch GET")
request.setResponseCode(405)
return b""
@ -89,13 +89,13 @@ class TitleServlet:
def render_POST(self, request: Request, endpoints: dict) -> bytes:
code = endpoints["game"]
if code not in self.title_registry:
self.logger.warn(f"Unknown game code {code}")
self.logger.warning(f"Unknown game code {code}")
request.setResponseCode(404)
return b""
index = self.title_registry[code]
if not hasattr(index, "render_POST"):
self.logger.warn(f"{code} does not dispatch POST")
self.logger.warning(f"{code} does not dispatch POST")
request.setResponseCode(405)
return b""

View File

@ -56,10 +56,10 @@ if __name__ == "__main__":
elif args.action == "upgrade" or args.action == "rollback":
if args.version is None:
data.logger.warn("No version set, upgrading to latest")
data.logger.warning("No version set, upgrading to latest")
if args.game is None:
data.logger.warn("No game set, upgrading core schema")
data.logger.warning("No game set, upgrading core schema")
data.migrate_database(
"CORE",
int(args.version) if args.version is not None else None,

View File

@ -5,22 +5,23 @@ services:
build: .
volumes:
- ./aime:/app/aime
- ./configs/config:/app/config
environment:
CFG_DEV: 1
CFG_CORE_SERVER_HOSTNAME: 0.0.0.0
CFG_CORE_DATABASE_HOST: ma.db
CFG_CORE_MEMCACHED_HOSTNAME: ma.memcached
CFG_CORE_AIMEDB_KEY: keyhere
CFG_CORE_AIMEDB_KEY: <INSERT AIMEDB KEY HERE>
CFG_CHUNI_SERVER_LOGLEVEL: debug
ports:
- "80:80"
- "8443:8443"
- "22345:22345"
- "80:80"
- "8443:8443"
- "22345:22345"
- "8080:8080"
- "8090:8090"
- "8080:8080"
- "8090:8090"
depends_on:
db:
@ -28,21 +29,29 @@ services:
db:
hostname: ma.db
image: mysql:8.0.31-debian
image: yobasystems/alpine-mariadb:10.11.5
environment:
MYSQL_DATABASE: aime
MYSQL_USER: aime
MYSQL_PASSWORD: aime
MYSQL_ROOT_PASSWORD: AimeRootPassword
MYSQL_CHARSET: utf8mb4
MYSQL_COLLATION: utf8mb4_general_ci
##Note: expose port 3306 to allow read.py importer into database, comment out when not needed
#ports:
# - "3306:3306"
##Note: uncomment to allow mysql to create a persistent database, leave commented if you want to rebuild database from scratch often
#volumes:
# - ./AimeDB:/var/lib/mysql
healthcheck:
test: ["CMD", "mysqladmin" ,"ping", "-h", "localhost"]
test: ["CMD", "mysqladmin" ,"ping", "-h", "localhost", "-pAimeRootPassword"]
timeout: 5s
retries: 5
memcached:
hostname: ma.memcached
image: memcached:1.6.17-bullseye
image: memcached:1.6.22-alpine3.18
command: [ "memcached", "-m", "1024", "-I", "128m" ]
phpmyadmin:
hostname: ma.phpmyadmin
@ -53,5 +62,5 @@ services:
PMA_PASSWORD: AimeRootPassword
APACHE_PORT: 8080
ports:
- "8080:8080"
- "9090:8080"

246
docs/INSTALL_DOCKER.md Normal file
View File

@ -0,0 +1,246 @@
# ARTEMiS - Docker Installation Guide
This step-by-step guide will allow you to install a Contenerized Version of ARTEMiS inside Docker, some steps can be skipped assuming you already have pre-requisite components and modules installed.
This guide assumes using Debian 12(bookworm-stable) as a Host Operating System for most of packages and modules.
## Pre-Requisites:
- Linux-Based Operating System (e.g. Debian, Ubuntu)
- Docker (https://get.docker.com)
- Python 3.9+
- (optional) Git
## Install Python3.9+ and Docker
```
(if this is a fresh install of the system)
sudo apt update && sudo apt upgrade
(installs python3 and pip)
sudo apt install python3 python3-pip
(installs docker)
curl -fsSL https://get.docker.com -o get-docker.sh
sh get-docker.sh
(optionally install git)
sudo apt install git
```
## Get ARTEMiS
If you installed git, clone into your choice of ARTEMiS git repository, e.g.:
```
git clone <ARTEMiS Repo> <folder>
```
If not, download the source package, and unpack it to the folder of your choice.
## Prepare development/home configuration
To build our Docker setup, first we need to create some folders and copy some files around
- Create 'aime', 'configs', 'AimeDB', and 'logs' folder in ARTEMiS root folder (where all source files exist)
- Inside configs folder, create 'config' folder, and copy all .yaml files from example_config to config (thats all files without nginx_example.conf)
- Edit .yaml files inside configs/config to suit your server needs
- Edit core.yaml inside configs/config:
```
set server.listen_address: to "0.0.0.0"
set title.hostname: to machine's IP address, e.g. "192.168.x.x", depending on your network, or actual hostname if your configuration is already set for dns resolve
set database.host: to "ma.db"
set database.memcached_host: to "ma.memcached"
set aimedb.key: to "<actual AIMEDB key>"
```
## Running Docker Compose
After configuring, go to ARTEMiS root folder, and execute:
```
docker compose up -d
```
("-d" argument means detached or daemon, meaning you will regain control of your terminal and Containers will run in background)
This will start pulling and building required images from network, after it's done, a development server should be running, with server accessible under machine's IP, frontend with port 8090, and PHPMyAdmin under port 9090.
- To turn off the server, from ARTEMiS root folder, execute:
```
docker compose down
```
- If you changed some files around, and don't see your changes applied, execute:
```
(turn off the server)
docker compose down
(rebuild)
docker compose build
(turn on)
docker compose up -d
```
- If you need to see logs from containers running, execute:
```
docker compose logs
```
- add '-f' to the end if you want to follow logs.
## Running commands
If you need to execute python scripts supplied with the application, use `docker compose exec app python3 <script> <command>`, for example `docker compose exec app python3 dbutils.py version`
## Persistent DB
By default, in development mode, ARTEMiS database is stored temporarily, if you wish to keep your database saved between restarts, we need to bind the database inside the container to actual storage/folder inside our server, to do this we need to make a few changes:
- First off, edit docker-compose.yml, and uncomment 2 lines:
```
(uncomment these two)
#volumes:
# - ./AimeDB:/var/lib/mysql
```
- After that, start up the server, this time Database will be saved in AimeDB folder we created in our configuration steps.
- If you wish to save it in another folder and/or storage device, change the "./AimeDB" target folder to folder/device of your choice
NOTE (NEEDS FIX): at the moment running development mode with persistent DB will always run database creation script at the start of application, while it doesn't break database outright, it might create some issues, a temporary fix can be applied:
- Start up containers with persistent DB already enabled, let application create database
- After startup, `docker compose down` the instance
- Edit entrypoint.sh and remove the `python3 dbutils.py create` line from Development mode statement
- Execute `docker compose build` and `docker compose up -d` to rebuild the app and start the containers back
## Adding importer data
To add data using importer, we can do that a few ways:
### Use importer locally on server
For that we need actual GameData and Options supplied somehow to the server system, be it wsl2 mounting layer, a pendrive with data, network share, or a direct copy to the server storage
With python3 installed on system, install requirements.txt directly to the system, or through python3 virtual-environment (python3-venv)
Default mysql/mariadb client development packages will also be required
- In the system:
```
sudo apt install default-libmysqlclient-dev build-essential pkg-config libmemcached-dev
sudo apt install mysql-client
OR
sudo apt install libmariadb-dev
```
- In the root ARTEMiS folder
```
python3 -m pip install -r requirements.txt
```
- If we wish to layer that with python3 virtual-environment, install required system packages, then:
```
sudo apt install python3-venv
python3 -m venv /path/to/venv
cd /path/to/venv/bin
python3 -m pip install -r /path/to/artemis/requirements.txt
```
- Depending on how you installed, now you can run read.py using:
- For direct installation, from root ARTEMiS folder:
```
python3 read.py <args>
```
- Or from python3 virtual environment, from root ARTEMiS folder:
```
/path/to/python3-venv/bin/python3 /path/to/artemis/read.py <args>
```
- We need to expose database container port, so that read.py can communicate with the database, inside docker-compose.yml, uncomment 2 lines in the database container declaration (db):
```
#ports:
# - "3306:3306"
```
- Now, `docker compose down && docker compose build && docker compose up -d` to restart containers
Now to insert the data, by default, docker doesn't expose container hostnames to root system, when trying to run read.py against a container, it will Error that hostname is not available, to fix that, we can add database hostname by hand to /etc/hosts:
```
sudo <editor of your choice> /etc/hosts
add '127.0.0.1 ma.db' to the table
save and close
```
- You can remove the line in /etc/hosts and de-expose the database port after successful import (this assumes you're using Persistent DB, as restarting the container without it will clear imported data).
### Use importer on remote Linux system
Follow the system and python portion of the guide, installing required packages and python3 modules, Download the ARTEMiS source.
- Edit core.yaml and insert it into config catalog:
```
database:
host: "<hostname of target system>"
```
- Expose port 3306 from database docker container to system, and allow port 3306 through system firewall to expose port to the system from which you will be importing data. (Remember to close down the database ports after finishing!)
- Import data using read.py
### Use importer on remote Windows system
Follow the [windows](docs/INSTALL_WINDOWS.md) guide for installing python dependencies, download the ARTEMiS source.
- Edit core.yaml and insert it into config catalog:
```
database:
host: "<hostname of target system>"
```
- Expose port 3306 from database docker container to system, and allow port 3306 through system firewall to expose port to the system from which you will be importing data.
- For Windows, also allow port 3306 outside the system so that read.py can communicate with remote database. (Remember to close down the database ports after finishing!)
# Troubleshooting
## Game does not connect to ARTEMiS Allnet Server
Double check your core.yaml if all addresses are correct and ports are correctly set and/or opened.
## Game does not connect to Title Server
Title server hostname requires your actual system hostname, from which you set up the Containers, or it's IP address, you can get the IP by using command `ip a` which will list all interfaces, and one of them should be your system IP (typically under eth0).
## Unhandled command in AimeDB
Make sure you have a proper AimeDB Key added to configuration.
## Memcached Error in ARTEMiS application causes errors in loading data
Currently when running ARTEMiS from master branch, there is a small bug that causes app to always configure memcached service to 127.0.0.1, to fix that, locate cache.py file in core/data, and edit:
```
memcache = pylibmc.Client([hostname]), binary=True)
```
to:
```
memcache = pylibmc.Client(["ma.memcached"], binary=True)
```
And build the containers again.
This will fix errors loading data from server.
(This is fixed in development branch)
## read.py "Can't connect to local server through socket '/run/mysqld/mysqld.sock'"
sqlalchemy by default reads any ip based connection as socket, thus trying to connect locally, please use a hostname (such as ma.db as in guide, and do not localhost) to force it to use a network interface.
### TODO:
- Production environment

View File

@ -6,11 +6,18 @@
- `name`: Name for the server, used by some games in their default MOTDs. Default `ARTEMiS`
- `is_develop`: Flags that the server is a development instance without a proxy standing in front of it. Setting to `False` tells the server not to listen for SSL, because the proxy should be handling all SSL-related things, among other things. Default `True`
- `threading`: Flags that `reactor.run` should be called via the `Thread` standard library. May provide a speed boost, but removes the ability to kill the server via `Ctrl + C`. Default: `False`
- `check_arcade_ip`: Checks IPs against the `arcade` table in the database, if one is defined. Default `False`
- `strict_ip_checking`: Rejects clients if there is no IP in the `arcade` table for the respective arcade
- `log_dir`: Directory to store logs. Server MUST have read and write permissions to this directory or you will have issues. Default `logs`
## Title
- `loglevel`: Logging level for the title server. Default `info`
- `hostname`: Hostname that gets sent to clients to tell them where to connect. Games must be able to connect to your server via the hostname or IP you spcify here. Note that most games will reject `localhost` or `127.0.0.1`. Default `localhost`
- `port`: Port that the title server will listen for connections on. Set to 0 to use the Allnet handler to reduce the port footprint. Default `8080`
- `port_ssl`: Port that the secure title server will listen for connections on. Set to 0 to use the Allnet handler to reduce the port footprint. Default `0`
- `ssl_key`: Location of the ssl server key for the secure title server. Ignored if `port_ssl` is set to `0` or `is_develop` set to `False`. Default `cert/title.key`
- `ssl_cert`: Location of the ssl server certificate for the secure title server. Must not be a self-signed SSL. Ignored if `port_ssl` is set to `0` or `is_develop` is set to `False`. Default `cert/title.pem`
- `reboot_start_time`: 24 hour JST time that clients will see as the start of maintenance period. Leave blank for no maintenance time. Default: ""
- `reboot_end_time`: 24 hour JST time that clients will see as the end of maintenance period. Leave blank for no maintenance time. Default: ""
## Database
- `host`: Host of the database. Default `localhost`
- `username`: Username of the account the server should connect to the database with. Default `aime`

View File

@ -54,11 +54,33 @@ Games listed below have been tested and confirmed working.
In order to use the importer locate your game installation folder and execute:
```shell
python read.py --series SDBT --version <version ID> --binfolder /path/to/game/folder --optfolder /path/to/game/option/folder
python read.py --game SDBT --version <version ID> --binfolder /path/to/game/folder --optfolder /path/to/game/option/folder
```
The importer for Chunithm will import: Events, Music, Charge Items and Avatar Accesories.
### Config
Config file is located in `config/chuni.yaml`.
| Option | Info |
|------------------|----------------------------------------------------------------------------------------------------------------|
| `news_msg` | If this is set, the news at the top of the main screen will be displayed (up to Chunithm Paradise Lost) |
| `name` | If this is set, all players that are not on a team will use this one by default. |
| `rank_scale` | Scales the in-game ranking based on the number of teams within the database |
| `use_login_bonus`| This is used to enable the login bonuses |
| `crypto` | This option is used to enable the TLS Encryption |
**If you would like to use network encryption, the following will be required underneath but key, iv and hash are required:**
```yaml
crypto:
encrypted_only: False
keys:
13: ["0000000000000000000000000000000000000000000000000000000000000000", "00000000000000000000000000000000", "0000000000000000"]
```
### Database upgrade
Always make sure your database (tables) are up-to-date, to do so go to the `core/data/schema/versions` folder and see
@ -88,6 +110,36 @@ After a failed Online Battle the room will be deleted. The host is used for the
- Timer countdown should be handled globally and not by one user
- Game can freeze or can crash if someone (especially the host) leaves the matchmaking
### Rivals
You can configure up to 4 rivals in Chunithm on a per-user basis. There is no UI to do this currently, so in the database, you can do this:
```sql
INSERT INTO aime.chuni_item_favorite (user, version, favId, favKind) VALUES (<user1>, <version>, <user2>, 2);
INSERT INTO aime.chuni_item_favorite (user, version, favId, favKind) VALUES (<user2>, <version>, <user1>, 2);
```
Note that the version **must match**, otherwise song lookup may not work.
### Teams
You can also configure teams for users to be on. There is no UI to do this currently, so in the database, you can do this:
```sql
INSERT INTO aime.chuni_profile_team (teamName) VALUES (<teamName>);
```
Team names can be regular ASCII, and they will be displayed ingame.
On smaller installations, you may also wish to enable scaled team rankings. By default, Chunithm determines team ranking within the first 100 teams. This can be configured in the YAML:
```yaml
team:
rank_scale: True # Scales the in-game ranking based on the number of teams within the database, rather than the default scale of ~100 that the game normally uses.
```
### Favorite songs
You can set the songs that will be in a user's Favorite Songs category using the following SQL entries:
```sql
INSERT INTO aime.chuni_item_favorite (user, version, favId, favKind) VALUES (<user>, <version>, <songId>, 1);
```
The songId is based on the actual ID within your version of Chunithm.
## crossbeats REV.
@ -105,7 +157,7 @@ After a failed Online Battle the room will be deleted. The host is used for the
In order to use the importer you need to use the provided `Export.csv` file:
```shell
python read.py --series SDCA --version <version ID> --binfolder titles/cxb/data
python read.py --game SDCA --version <version ID> --binfolder titles/cxb/data
```
The importer for crossbeats REV. will import Music.
@ -142,29 +194,30 @@ For versions pre-dx
| SDBM | 5 | maimai ORANGE PLUS |
| SDCQ | 6 | maimai PiNK |
| SDCQ | 7 | maimai PiNK PLUS |
| SDDK | 8 | maimai MURASAKI |
| SDDK | 9 | maimai MURASAKI PLUS |
| SDDZ | 10 | maimai MILK |
| SDDZ | 11 | maimai MILK PLUS |
| SDDK | 8 | maimai MURASAKi |
| SDDK | 9 | maimai MURASAKi PLUS |
| SDDZ | 10 | maimai MiLK |
| SDDZ | 11 | maimai MiLK PLUS |
| SDEY | 12 | maimai FiNALE |
| SDEZ | 13 | maimai DX |
| SDEZ | 14 | maimai DX PLUS |
| SDEZ | 15 | maimai DX Splash |
| SDEZ | 16 | maimai DX Splash PLUS |
| SDEZ | 17 | maimai DX Universe |
| SDEZ | 18 | maimai DX Universe PLUS |
| SDEZ | 19 | maimai DX Festival |
| SDEZ | 17 | maimai DX UNiVERSE |
| SDEZ | 18 | maimai DX UNiVERSE PLUS |
| SDEZ | 19 | maimai DX FESTiVAL |
| SDEZ | 20 | maimai DX FESTiVAL PLUS |
### Importer
In order to use the importer locate your game installation folder and execute:
DX:
```shell
python read.py --series <Game Code> --version <Version ID> --binfolder /path/to/StreamingAssets --optfolder /path/to/game/option/folder
python read.py --game <Game Code> --version <Version ID> --binfolder /path/to/StreamingAssets --optfolder /path/to/game/option/folder
```
Pre-DX:
```shell
python read.py --series <Game Code> --version <Version ID> --binfolder /path/to/data --optfolder /path/to/patch/data
python read.py --game <Game Code> --version <Version ID> --binfolder /path/to/data --optfolder /path/to/patch/data
```
The importer for maimai DX will import Events, Music and Tickets.
@ -196,7 +249,7 @@ Pre-Dx uses the same database as DX, so only upgrade using the SDEZ game code!
In order to use the importer locate your game installation folder and execute:
```shell
python read.py --series SBZV --version <version ID> --binfolder /path/to/game/data/diva --optfolder /path/to/game/data/diva/mdata
python read.py --game SBZV --version <version ID> --binfolder /path/to/game/data/diva --optfolder /path/to/game/data/diva/mdata
```
The importer for Project Diva Arcade will all required data in order to use
@ -211,6 +264,9 @@ Config file is located in `config/diva.yaml`.
| `unlock_all_modules` | Unlocks all modules (costumes) by default, if set to `False` all modules need to be purchased |
| `unlock_all_items` | Unlocks all items (customizations) by default, if set to `False` all items need to be purchased |
### Custom PV Lists (databanks)
In order to use custom PV Lists, simply drop in your .dat files inside of /titles/diva/data/ and make sure they are called PvList0.dat, PvList1.dat, PvList2.dat, PvList3.dat and PvList4.dat exactly.
### Database upgrade
@ -243,7 +299,7 @@ python dbutils.py --game SBZV upgrade
In order to use the importer locate your game installation folder and execute:
```shell
python read.py --series SDDT --version <version ID> --binfolder /path/to/game/folder --optfolder /path/to/game/option/folder
python read.py --game SDDT --version <version ID> --binfolder /path/to/game/folder --optfolder /path/to/game/option/folder
```
The importer for O.N.G.E.K.I. will all all Cards, Music and Events.
@ -257,9 +313,19 @@ Config file is located in `config/ongeki.yaml`.
| Option | Info |
|------------------|----------------------------------------------------------------------------------------------------------------|
| `enabled_gachas` | Enter all gacha IDs for Card Maker to work, other than default may not work due to missing cards added to them |
| `crypto` | This option is used to enable the TLS Encryption |
Note: 1149 and higher are only for Card Maker 1.35 and higher and will be ignored on lower versions.
**If you would like to use network encryption, the following will be required underneath but key, iv and hash are required:**
```yaml
crypto:
encrypted_only: False
keys:
7: ["0000000000000000000000000000000000000000000000000000000000000000", "00000000000000000000000000000000", "0000000000000000"]
```
### Database upgrade
Always make sure your database (tables) are up-to-date, to do so go to the `core/data/schema/versions` folder and see
@ -282,15 +348,21 @@ python dbutils.py --game SDDT upgrade
### Support status
* Card Maker 1.30:
* CHUNITHM NEW!!: Yes
* maimai DX UNiVERSE: Yes
* O.N.G.E.K.I. bright: Yes
#### Card Maker 1.30:
* CHUNITHM NEW!!: Yes
* maimai DX UNiVERSE: Yes
* O.N.G.E.K.I. bright: Yes
* Card Maker 1.35:
* CHUNITHM SUN: Yes (NEW PLUS!! up to A032)
* maimai DX FESTiVAL: Yes (up to A035) (UNiVERSE PLUS up to A031)
* O.N.G.E.K.I. bright MEMORY: Yes
#### Card Maker 1.35:
* CHUNITHM:
* NEW!!: Yes
* NEW PLUS!!: Yes (added in A028)
* SUN: Yes (added in A032)
* maimai DX:
* UNiVERSE PLUS: Yes
* FESTiVAL: Yes (added in A031)
* FESTiVAL PLUS: Yes (added in A035)
* O.N.G.E.K.I. bright MEMORY: Yes
### Importer
@ -299,19 +371,19 @@ In order to use the importer you need to use the provided `.csv` files (which ar
option folders:
```shell
python read.py --series SDED --version <version ID> --binfolder titles/cm/cm_data --optfolder /path/to/cardmaker/option/folder
python read.py --game SDED --version <version ID> --binfolder titles/cm/cm_data --optfolder /path/to/cardmaker/option/folder
```
**If you haven't already executed the O.N.G.E.K.I. importer, make sure you import all cards!**
```shell
python read.py --series SDDT --version <version ID> --binfolder /path/to/game/folder --optfolder /path/to/game/option/folder
python read.py --game SDDT --version <version ID> --binfolder /path/to/game/folder --optfolder /path/to/game/option/folder
```
Also make sure to import all maimai DX and CHUNITHM data as well:
```shell
python read.py --series SDED --version <version ID> --binfolder /path/to/cardmaker/CardMaker_Data
python read.py --game SDED --version <version ID> --binfolder /path/to/cardmaker/CardMaker_Data
```
The importer for Card Maker will import all required Gachas (Banners) and cards (for maimai DX/CHUNITHM) and the hardcoded
@ -404,7 +476,7 @@ Gacha IDs up to 1140 will be loaded for CM 1.34 and all gachas will be loaded fo
In order to use the importer locate your game installation folder and execute:
```shell
python read.py --series SDFE --version <version ID> --binfolder /path/to/game/WindowsNoEditor/Mercury/Content
python read.py --game SDFE --version <version ID> --binfolder /path/to/game/WindowsNoEditor/Mercury/Content
```
The importer for WACCA will import all Music data.
@ -478,7 +550,7 @@ Below is a list of VIP rewards. Currently, VIP is not implemented, and thus thes
In order to use the importer locate your game installation folder and execute:
```shell
python read.py --series SDEW --version <version ID> --binfolder /path/to/game/extractedassets
python read.py --game SDEW --version <version ID> --binfolder /path/to/game/extractedassets
```
The importer for SAO will import all items, heroes, support skills and titles data.
@ -509,6 +581,8 @@ python dbutils.py --game SDEW upgrade
- Player title is currently static and cannot be changed in-game
- QR Card Scanning currently only load a static hero
**Network hashing in GssSite.dll must be disabled**
### Credits for SAO support:
- Midorica - Limited Network Support

View File

@ -1,9 +1,10 @@
server:
enable: True
loglevel: "info"
news_msg: ""
team:
name: ARTEMiS
name: ARTEMiS # If this is set, all players that are not on a team will use this one by default.
mods:
use_login_bonus: True

View File

@ -6,11 +6,19 @@ server:
is_develop: True
threading: False
log_dir: "logs"
check_arcade_ip: False
strict_ip_checking: False
title:
loglevel: "info"
hostname: "localhost"
port: 8080
port_ssl: 0
ssl_cert: "cert/title.crt"
ssl_key: "cert/title.key"
reboot_start_time: "04:00"
reboot_end_time: "05:00"
database:
host: "localhost"
@ -22,6 +30,7 @@ database:
sha2_password: False
loglevel: "warn"
user_table_autoincrement_start: 10000
enable_memcached: True
memcached_host: "localhost"
frontend:
@ -32,6 +41,7 @@ frontend:
allnet:
loglevel: "info"
port: 80
ip_check: False
allow_online_updates: False
update_cfg_folder: ""

View File

@ -35,3 +35,6 @@ version:
card_maker: 1.30.01
7:
card_maker: 1.35.03
crypto:
encrypted_only: False

View File

@ -36,7 +36,7 @@ class HttpDispatcher(resource.Resource):
self.map_post.connect(
"allnet_downloadorder_report",
"/dl/report",
"/report-api/Report",
controller="allnet",
action="handle_dlorder_report",
conditions=dict(method=["POST"]),
@ -99,6 +99,7 @@ class HttpDispatcher(resource.Resource):
conditions=dict(method=["POST"]),
)
# Maintain compatability
self.map_post.connect(
"mucha_boardauth",
"/mucha/boardauth.do",
@ -121,6 +122,28 @@ class HttpDispatcher(resource.Resource):
conditions=dict(method=["POST"]),
)
self.map_post.connect(
"mucha_boardauth",
"/mucha_front/boardauth.do",
controller="mucha",
action="handle_boardauth",
conditions=dict(method=["POST"]),
)
self.map_post.connect(
"mucha_updatacheck",
"/mucha_front/updatacheck.do",
controller="mucha",
action="handle_updatecheck",
conditions=dict(method=["POST"]),
)
self.map_post.connect(
"mucha_dlstate",
"/mucha_front/downloadstate.do",
controller="mucha",
action="handle_dlstate",
conditions=dict(method=["POST"]),
)
self.map_get.connect(
"title_get",
"/{game}/{version}/{endpoint:.*?}",
@ -193,11 +216,11 @@ class HttpDispatcher(resource.Resource):
return ret
elif ret is None:
self.logger.warn(f"None returned by controller for {request.uri.decode()} endpoint")
self.logger.warning(f"None returned by controller for {request.uri.decode()} endpoint")
return b""
else:
self.logger.warn(f"Unknown data type returned by controller for {request.uri.decode()} endpoint")
self.logger.warning(f"Unknown data type returned by controller for {request.uri.decode()} endpoint")
return b""
@ -256,6 +279,7 @@ if __name__ == "__main__":
allnet_server_str = f"tcp:{cfg.allnet.port}:interface={cfg.server.listen_address}"
title_server_str = f"tcp:{cfg.title.port}:interface={cfg.server.listen_address}"
title_https_server_str = f"ssl:{cfg.title.port_ssl}:interface={cfg.server.listen_address}:privateKey={cfg.title.ssl_key}:certKey={cfg.title.ssl_cert}"
adb_server_str = f"tcp:{cfg.aimedb.port}:interface={cfg.server.listen_address}"
frontend_server_str = (
f"tcp:{cfg.frontend.port}:interface={cfg.server.listen_address}"
@ -289,6 +313,11 @@ if __name__ == "__main__":
endpoints.serverFromString(reactor, title_server_str).listen(
server.Site(dispatcher)
)
if cfg.title.port_ssl > 0:
endpoints.serverFromString(reactor, title_https_server_str).listen(
server.Site(dispatcher)
)
if cfg.server.threading:
Thread(target=reactor.run, args=(False,)).start()

View File

@ -43,11 +43,11 @@ class BaseReader:
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Import Game Information")
parser.add_argument(
"--series",
"--game",
action="store",
type=str,
required=True,
help="The game series we are importing.",
help="The game we are importing.",
)
parser.add_argument(
"--version",
@ -109,7 +109,7 @@ if __name__ == "__main__":
logger.setLevel(log_lv)
coloredlogs.install(level=log_lv, logger=logger, fmt=log_fmt_str)
if args.series is None or args.version is None:
if args.game is None or args.version is None:
logger.error("Game or version not specified")
parser.print_help()
exit(1)
@ -134,7 +134,7 @@ if __name__ == "__main__":
titles = Utils.get_all_titles()
for dir, mod in titles.items():
if args.series in mod.game_codes:
if args.game in mod.game_codes:
handler = mod.reader(config, args.version, bin_arg, opt_arg, args.extra)
handler.read()

View File

@ -11,7 +11,7 @@ Games listed below have been tested and confirmed working. Only game versions ol
+ All versions + omnimix
+ maimai DX
+ All versions up to FESTiVAL
+ All versions up to FESTiVAL PLUS
+ Hatsune Miku: Project DIVA Arcade
+ All versions
@ -40,7 +40,7 @@ Games listed below have been tested and confirmed working. Only game versions ol
- mysql/mariadb server
## Setup guides
Follow the platform-specific guides for [windows](docs/INSTALL_WINDOWS.md) and [ubuntu](docs/INSTALL_UBUNTU.md) to setup and run the server.
Follow the platform-specific guides for [windows](docs/INSTALL_WINDOWS.md), [ubuntu](docs/INSTALL_UBUNTU.md) or [docker](docs/INSTALL_DOCKER.md) to setup and run the server.
## Game specific information
Read [Games specific info](docs/game_specific_info.md) for all supported games, importer settings, configuration option and database upgrades.

View File

@ -10,7 +10,7 @@ from core.config import CoreConfig
from titles.chuni.const import ChuniConstants
from titles.chuni.database import ChuniData
from titles.chuni.config import ChuniConfig
SCORE_BUFFER = {}
class ChuniBase:
def __init__(self, core_cfg: CoreConfig, game_cfg: ChuniConfig) -> None:
@ -73,7 +73,7 @@ class ChuniBase:
# skip the current bonus preset if no boni were found
if all_login_boni is None or len(all_login_boni) < 1:
self.logger.warn(
self.logger.warning(
f"No bonus entries found for bonus preset {preset['presetId']}"
)
continue
@ -149,7 +149,7 @@ class ChuniBase:
game_events = self.data.static.get_enabled_events(self.version)
if game_events is None or len(game_events) == 0:
self.logger.warn("No enabled events, did you run the reader?")
self.logger.warning("No enabled events, did you run the reader?")
return {
"type": data["type"],
"length": 0,
@ -181,21 +181,50 @@ class ChuniBase:
return {"type": data["type"], "length": 0, "gameIdlistList": []}
def handle_get_game_message_api_request(self, data: Dict) -> Dict:
return {"type": data["type"], "length": "0", "gameMessageList": []}
return {
"type": data["type"],
"length": 1,
"gameMessageList": [{
"id": 1,
"type": 1,
"message": f"Welcome to {self.core_cfg.server.name} network!" if not self.game_cfg.server.news_msg else self.game_cfg.server.news_msg,
"startDate": "2017-12-05 07:00:00.0",
"endDate": "2099-12-31 00:00:00.0"
}]
}
def handle_get_game_ranking_api_request(self, data: Dict) -> Dict:
return {"type": data["type"], "gameRankingList": []}
rankings = self.data.score.get_rankings(self.version)
return {"type": data["type"], "gameRankingList": rankings}
def handle_get_game_sale_api_request(self, data: Dict) -> Dict:
return {"type": data["type"], "length": 0, "gameSaleList": []}
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
reboot_start = datetime.strftime(
datetime.now() - timedelta(hours=4), self.date_time_format
)
reboot_end = datetime.strftime(
datetime.now() - timedelta(hours=3), self.date_time_format
)
# if reboot start/end time is not defined use the default behavior of being a few hours ago
if self.core_cfg.title.reboot_start_time == "" or self.core_cfg.title.reboot_end_time == "":
reboot_start = datetime.strftime(
datetime.utcnow() + timedelta(hours=6), self.date_time_format
)
reboot_end = datetime.strftime(
datetime.utcnow() + timedelta(hours=7), self.date_time_format
)
else:
# get current datetime in JST
current_jst = datetime.now(pytz.timezone('Asia/Tokyo')).date()
# parse config start/end times into datetime
reboot_start_time = datetime.strptime(self.core_cfg.title.reboot_start_time, "%H:%M")
reboot_end_time = datetime.strptime(self.core_cfg.title.reboot_end_time, "%H:%M")
# offset datetimes with current date/time
reboot_start_time = reboot_start_time.replace(year=current_jst.year, month=current_jst.month, day=current_jst.day, tzinfo=pytz.timezone('Asia/Tokyo'))
reboot_end_time = reboot_end_time.replace(year=current_jst.year, month=current_jst.month, day=current_jst.day, tzinfo=pytz.timezone('Asia/Tokyo'))
# create strings for use in gameSetting
reboot_start = reboot_start_time.strftime(self.date_time_format)
reboot_end = reboot_end_time.strftime(self.date_time_format)
return {
"gameSetting": {
"dataVersion": "1.00.00",
@ -361,11 +390,70 @@ class ChuniBase:
"userDuelList": duel_list,
}
def handle_get_user_rival_data_api_request(self, data: Dict) -> Dict:
p = self.data.profile.get_rival(data["rivalId"])
if p is None:
return {}
userRivalData = {
"rivalId": p.user,
"rivalName": p.userName
}
return {
"userId": data["userId"],
"userRivalData": userRivalData
}
def handle_get_user_rival_music_api_request(self, data: Dict) -> Dict:
rival_id = data["rivalId"]
next_index = int(data["nextIndex"])
max_count = int(data["maxCount"])
user_rival_music_list = []
# Fetch all the rival music entries for the user
all_entries = self.data.score.get_rival_music(rival_id)
# Process the entries based on max_count and nextIndex
for music in all_entries[next_index:]:
music_id = music["musicId"]
level = music["level"]
score = music["scoreMax"]
rank = music["scoreRank"]
# Create a music entry for the current music_id
music_entry = next((entry for entry in user_rival_music_list if entry["musicId"] == music_id), None)
if music_entry is None:
music_entry = {
"musicId": music_id,
"length": 0,
"userRivalMusicDetailList": []
}
user_rival_music_list.append(music_entry)
# Create a level entry for the current level
level_entry = {
"level": level,
"scoreMax": score,
"scoreRank": rank
}
music_entry["userRivalMusicDetailList"].append(level_entry)
# Calculate the length for each "musicId" by counting the levels
for music_entry in user_rival_music_list:
music_entry["length"] = len(music_entry["userRivalMusicDetailList"])
# Prepare the result dictionary with user rival music data
result = {
"userId": data["userId"],
"rivalId": data["rivalId"],
"nextIndex": str(next_index + len(all_entries) if len(all_entries) <= len(user_rival_music_list) else -1),
"userRivalMusicList": user_rival_music_list[:max_count]
}
return result
def handle_get_user_favorite_item_api_request(self, data: Dict) -> Dict:
user_fav_item_list = []
# still needs to be implemented on WebUI
# 1: Music, 3: Character
# 1: Music, 2: User, 3: Character
fav_list = self.data.item.get_all_favorites(
data["userId"], self.version, fav_kind=int(data["kind"])
)
@ -490,22 +578,39 @@ class ChuniBase:
tmp.pop("id")
for song in song_list:
score_buf = SCORE_BUFFER.get(str(data["userId"])) or []
if song["userMusicDetailList"][0]["musicId"] == tmp["musicId"]:
found = True
song["userMusicDetailList"].append(tmp)
song["length"] = len(song["userMusicDetailList"])
score_buf.append(tmp["musicId"])
SCORE_BUFFER[str(data["userId"])] = score_buf
if not found:
score_buf = SCORE_BUFFER.get(str(data["userId"])) or []
if not found and tmp["musicId"] not in score_buf:
song_list.append({"length": 1, "userMusicDetailList": [tmp]})
score_buf.append(tmp["musicId"])
SCORE_BUFFER[str(data["userId"])] = score_buf
if len(song_list) >= max_ct:
break
if len(song_list) >= next_idx + max_ct:
next_idx += max_ct
try:
while song_list[-1]["userMusicDetailList"][0]["musicId"] == music_detail[x + 1]["musicId"]:
music = music_detail[x + 1]._asdict()
music.pop("user")
music.pop("id")
song_list[-1]["userMusicDetailList"].append(music)
song_list[-1]["length"] += 1
x += 1
except IndexError:
pass
if len(song_list) >= max_ct:
next_idx += len(song_list)
else:
next_idx = -1
SCORE_BUFFER[str(data["userId"])] = []
return {
"userId": data["userId"],
"length": len(song_list),
@ -600,39 +705,94 @@ class ChuniBase:
}
def handle_get_user_team_api_request(self, data: Dict) -> Dict:
# TODO: use the database "chuni_profile_team" with a GUI
# Default values
team_id = 65535
team_name = self.game_cfg.team.team_name
if team_name == "":
team_rank = 0
# Get user profile
profile = self.data.profile.get_profile_data(data["userId"], self.version)
if profile and profile["teamId"]:
# Get team by id
team = self.data.profile.get_team_by_id(profile["teamId"])
if team:
team_id = team["id"]
team_name = team["teamName"]
team_rank = self.data.profile.get_team_rank(team["id"])
# Don't return anything if no team name has been defined for defaults and there is no team set for the player
if not profile["teamId"] and team_name == "":
return {"userId": data["userId"], "teamId": 0}
return {
"userId": data["userId"],
"teamId": 1,
"teamRank": 1,
"teamId": team_id,
"teamRank": team_rank,
"teamName": team_name,
"userTeamPoint": {
"userId": data["userId"],
"teamId": 1,
"teamId": team_id,
"orderId": 1,
"teamPoint": 1,
"aggrDate": data["playDate"],
},
}
def handle_get_team_course_setting_api_request(self, data: Dict) -> Dict:
return {
"userId": data["userId"],
"length": 0,
"nextIndex": 0,
"nextIndex": -1,
"teamCourseSettingList": [],
}
def handle_get_team_course_setting_api_request_proto(self, data: Dict) -> Dict:
return {
"userId": data["userId"],
"length": 1,
"nextIndex": -1,
"teamCourseSettingList": [
{
"orderId": 1,
"courseId": 1,
"classId": 1,
"ruleId": 1,
"courseName": "Test",
"teamCourseMusicList": [
{"track": 184, "type": 1, "level": 3, "selectLevel": -1},
{"track": 184, "type": 1, "level": 3, "selectLevel": -1},
{"track": 184, "type": 1, "level": 3, "selectLevel": -1}
],
"teamCourseRankingInfoList": [],
"recodeDate": "2099-12-31 11:59:99.0",
"isPlayed": False
}
],
}
def handle_get_team_course_rule_api_request(self, data: Dict) -> Dict:
return {
"userId": data["userId"],
"length": 0,
"nextIndex": 0,
"teamCourseRuleList": [],
"nextIndex": -1,
"teamCourseRuleList": []
}
def handle_get_team_course_rule_api_request_proto(self, data: Dict) -> Dict:
return {
"userId": data["userId"],
"length": 1,
"nextIndex": -1,
"teamCourseRuleList": [
{
"recoveryLife": 0,
"clearLife": 100,
"damageMiss": 1,
"damageAttack": 1,
"damageJustice": 1,
"damageJusticeC": 1
}
],
}
def handle_upsert_user_all_api_request(self, data: Dict) -> Dict:
@ -706,12 +866,28 @@ class ChuniBase:
playlog["playedUserName1"] = self.read_wtf8(playlog["playedUserName1"])
playlog["playedUserName2"] = self.read_wtf8(playlog["playedUserName2"])
playlog["playedUserName3"] = self.read_wtf8(playlog["playedUserName3"])
self.data.score.put_playlog(user_id, playlog)
self.data.score.put_playlog(user_id, playlog, self.version)
if "userTeamPoint" in upsert:
# TODO: team stuff
pass
team_points = upsert["userTeamPoint"]
try:
for tp in team_points:
if tp["teamId"] != '65535':
# Fetch the current team data
current_team = self.data.profile.get_team_by_id(tp["teamId"])
# Calculate the new teamPoint
new_team_point = int(tp["teamPoint"]) + current_team["teamPoint"]
# Prepare the data to update
team_data = {
"teamPoint": new_team_point
}
# Update the team data
self.data.profile.update_team(tp["teamId"], team_data)
except:
pass # Probably a better way to catch if the team is not set yet (new profiles), but let's just pass
if "userMapAreaList" in upsert:
for map_area in upsert["userMapAreaList"]:
self.data.item.put_map_area(user_id, map_area)

View File

@ -19,6 +19,12 @@ class ChuniServerConfig:
self.__config, "chuni", "server", "loglevel", default="info"
)
)
@property
def news_msg(self) -> str:
return CoreConfig.get_config_field(
self.__config, "chuni", "server", "news_msg", default=""
)
class ChuniTeamConfig:
@ -30,6 +36,11 @@ class ChuniTeamConfig:
return CoreConfig.get_config_field(
self.__config, "chuni", "team", "name", default=""
)
@property
def rank_scale(self) -> str:
return CoreConfig.get_config_field(
self.__config, "chuni", "team", "rank_scale", default="False"
)
class ChuniModsConfig:

View File

@ -3,6 +3,7 @@ from datetime import datetime, timedelta
from random import randint
from typing import Dict
import pytz
from core.config import CoreConfig
from titles.chuni.const import ChuniConstants
from titles.chuni.database import ChuniData
@ -31,12 +32,29 @@ class ChuniNew(ChuniBase):
match_end = datetime.strftime(
datetime.utcnow() + timedelta(hours=16), self.date_time_format
)
reboot_start = datetime.strftime(
datetime.utcnow() + timedelta(hours=6), self.date_time_format
)
reboot_end = datetime.strftime(
datetime.utcnow() + timedelta(hours=7), self.date_time_format
)
# if reboot start/end time is not defined use the default behavior of being a few hours ago
if self.core_cfg.title.reboot_start_time == "" or self.core_cfg.title.reboot_end_time == "":
reboot_start = datetime.strftime(
datetime.utcnow() + timedelta(hours=6), self.date_time_format
)
reboot_end = datetime.strftime(
datetime.utcnow() + timedelta(hours=7), self.date_time_format
)
else:
# get current datetime in JST
current_jst = datetime.now(pytz.timezone('Asia/Tokyo')).date()
# parse config start/end times into datetime
reboot_start_time = datetime.strptime(self.core_cfg.title.reboot_start_time, "%H:%M")
reboot_end_time = datetime.strptime(self.core_cfg.title.reboot_end_time, "%H:%M")
# offset datetimes with current date/time
reboot_start_time = reboot_start_time.replace(year=current_jst.year, month=current_jst.month, day=current_jst.day, tzinfo=pytz.timezone('Asia/Tokyo'))
reboot_end_time = reboot_end_time.replace(year=current_jst.year, month=current_jst.month, day=current_jst.day, tzinfo=pytz.timezone('Asia/Tokyo'))
# create strings for use in gameSetting
reboot_start = reboot_start_time.strftime(self.date_time_format)
reboot_end = reboot_end_time.strftime(self.date_time_format)
return {
"gameSetting": {
"isMaintenance": False,

View File

@ -67,7 +67,7 @@ class ChuniReader(BaseReader):
if result is not None:
self.logger.info(f"Inserted login bonus preset {id}")
else:
self.logger.warn(f"Failed to insert login bonus preset {id}")
self.logger.warning(f"Failed to insert login bonus preset {id}")
for bonus in xml_root.find("infos").findall("LoginBonusDataInfo"):
for name in bonus.findall("loginBonusName"):
@ -113,7 +113,7 @@ class ChuniReader(BaseReader):
if result is not None:
self.logger.info(f"Inserted login bonus {bonus_id}")
else:
self.logger.warn(
self.logger.warning(
f"Failed to insert login bonus {bonus_id}"
)
@ -138,7 +138,7 @@ class ChuniReader(BaseReader):
if result is not None:
self.logger.info(f"Inserted event {id}")
else:
self.logger.warn(f"Failed to insert event {id}")
self.logger.warning(f"Failed to insert event {id}")
def read_music(self, music_dir: str) -> None:
for root, dirs, files in walk(music_dir):
@ -169,8 +169,10 @@ class ChuniReader(BaseReader):
fumen_path = MusicFumenData.find("file").find("path")
if fumen_path is not None:
chart_id = MusicFumenData.find("type").find("id").text
if chart_id == "4":
chart_type = MusicFumenData.find("type")
chart_id = chart_type.find("id").text
chart_diff = chart_type.find("str").text
if chart_diff == "WorldsEnd" and (chart_id == "4" or chart_id == "5"): # 4 in SDBT, 5 in SDHD
level = float(xml_root.find("starDifType").text)
we_chara = (
xml_root.find("worldsEndTagName")
@ -200,7 +202,7 @@ class ChuniReader(BaseReader):
f"Inserted music {song_id} chart {chart_id}"
)
else:
self.logger.warn(
self.logger.warning(
f"Failed to insert music {song_id} chart {chart_id}"
)
@ -232,7 +234,7 @@ class ChuniReader(BaseReader):
if result is not None:
self.logger.info(f"Inserted charge {id}")
else:
self.logger.warn(f"Failed to insert charge {id}")
self.logger.warning(f"Failed to insert charge {id}")
def read_avatar(self, avatar_dir: str) -> None:
for root, dirs, files in walk(avatar_dir):
@ -259,4 +261,4 @@ class ChuniReader(BaseReader):
if result is not None:
self.logger.info(f"Inserted avatarAccessory {id}")
else:
self.logger.warn(f"Failed to insert avatarAccessory {id}")
self.logger.warning(f"Failed to insert avatarAccessory {id}")

View File

@ -530,7 +530,7 @@ class ChuniItemData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"put_user_gacha: Failed to insert! aime_id: {aime_id}")
self.logger.warning(f"put_user_gacha: Failed to insert! aime_id: {aime_id}")
return None
return result.lastrowid
@ -572,7 +572,7 @@ class ChuniItemData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(
self.logger.warning(
f"put_user_print_state: Failed to insert! aime_id: {aime_id}"
)
return None
@ -589,7 +589,7 @@ class ChuniItemData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(
self.logger.warning(
f"put_user_print_detail: Failed to insert! aime_id: {aime_id}"
)
return None

View File

@ -410,7 +410,7 @@ class ChuniProfileData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"put_profile_data: Failed to update! aime_id: {aime_id}")
self.logger.warning(f"put_profile_data: Failed to update! aime_id: {aime_id}")
return None
return result.lastrowid
@ -452,7 +452,7 @@ class ChuniProfileData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(
self.logger.warning(
f"put_profile_data_ex: Failed to update! aime_id: {aime_id}"
)
return None
@ -479,7 +479,7 @@ class ChuniProfileData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(
self.logger.warning(
f"put_profile_option: Failed to update! aime_id: {aime_id}"
)
return None
@ -503,7 +503,7 @@ class ChuniProfileData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(
self.logger.warning(
f"put_profile_option_ex: Failed to update! aime_id: {aime_id}"
)
return None
@ -527,7 +527,7 @@ class ChuniProfileData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(
self.logger.warning(
f"put_profile_recent_rating: Failed to update! aime_id: {aime_id}"
)
return None
@ -552,7 +552,7 @@ class ChuniProfileData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(
self.logger.warning(
f"put_profile_activity: Failed to update! aime_id: {aime_id}"
)
return None
@ -578,7 +578,7 @@ class ChuniProfileData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(
self.logger.warning(
f"put_profile_charge: Failed to update! aime_id: {aime_id}"
)
return None
@ -637,3 +637,69 @@ class ChuniProfileData(BaseData):
if result is None:
return None
return result.fetchall()
def get_team_by_id(self, team_id: int) -> Optional[Row]:
sql = select(team).where(team.c.id == team_id)
result = self.execute(sql)
if result is None:
return None
return result.fetchone()
def get_team_rank(self, team_id: int) -> int:
# Normal ranking system, likely the one used in the real servers
# Query all teams sorted by 'teamPoint'
result = self.execute(
select(team.c.id).order_by(team.c.teamPoint.desc())
)
# Get the rank of the team with the given team_id
rank = None
for i, row in enumerate(result, start=1):
if row.id == team_id:
rank = i
break
# Return the rank if found, or a default rank otherwise
return rank if rank is not None else 0
# RIP scaled team ranking. Gone, but forgotten
# def get_team_rank_scaled(self, team_id: int) -> int:
def update_team(self, team_id: int, team_data: Dict) -> bool:
team_data["id"] = team_id
sql = insert(team).values(**team_data)
conflict = sql.on_duplicate_key_update(**team_data)
result = self.execute(conflict)
if result is None:
self.logger.warn(
f"update_team: Failed to update team! team id: {team_id}"
)
return False
return True
def get_rival(self, rival_id: int) -> Optional[Row]:
sql = select(profile).where(profile.c.user == rival_id)
result = self.execute(sql)
if result is None:
return None
return result.fetchone()
def get_overview(self) -> Dict:
# Fetch and add up all the playcounts
playcount_sql = self.execute(select(profile.c.playCount))
if playcount_sql is None:
self.logger.warn(
f"get_overview: Couldn't pull playcounts"
)
return 0
total_play_count = 0;
for row in playcount_sql:
total_play_count += row[0]
return {
"total_play_count": total_play_count
}

View File

@ -6,7 +6,7 @@ from sqlalchemy.schema import ForeignKey
from sqlalchemy.engine import Row
from sqlalchemy.sql import func, select
from sqlalchemy.dialects.mysql import insert
from sqlalchemy.sql.expression import exists
from core.data.schema import BaseData, metadata
course = Table(
@ -189,9 +189,28 @@ class ChuniScoreData(BaseData):
return None
return result.fetchall()
def put_playlog(self, aime_id: int, playlog_data: Dict) -> Optional[int]:
def put_playlog(self, aime_id: int, playlog_data: Dict, version: int) -> Optional[int]:
# Calculate the ROM version that should be inserted into the DB, based on the version of the ggame being inserted
# We only need from Version 10 (Plost) and back, as newer versions include romVersion in their upsert
# This matters both for gameRankings, as well as a future DB update to keep version data separate
romVer = {
10: "1.50.0",
9: "1.45.0",
8: "1.40.0",
7: "1.35.0",
6: "1.30.0",
5: "1.25.0",
4: "1.20.0",
3: "1.15.0",
2: "1.10.0",
1: "1.05.0",
0: "1.00.0"
}
playlog_data["user"] = aime_id
playlog_data = self.fix_bools(playlog_data)
if "romVersion" not in playlog_data:
playlog_data["romVersion"] = romVer.get(version, "1.00.0")
sql = insert(playlog).values(**playlog_data)
conflict = sql.on_duplicate_key_update(**playlog_data)
@ -200,3 +219,40 @@ class ChuniScoreData(BaseData):
if result is None:
return None
return result.lastrowid
def get_rankings(self, version: int) -> Optional[List[Dict]]:
# Calculates the ROM version that should be fetched for rankings, based on the game version being retrieved
# This prevents tracks that are not accessible in your version from counting towards the 10 results
romVer = {
13: "2.10%",
12: "2.05%",
11: "2.00%",
10: "1.50%",
9: "1.45%",
8: "1.40%",
7: "1.35%",
6: "1.30%",
5: "1.25%",
4: "1.20%",
3: "1.15%",
2: "1.10%",
1: "1.05%",
0: "1.00%"
}
sql = select([playlog.c.musicId.label('id'), func.count(playlog.c.musicId).label('point')]).where((playlog.c.level != 4) & (playlog.c.romVersion.like(romVer.get(version, "%")))).group_by(playlog.c.musicId).order_by(func.count(playlog.c.musicId).desc()).limit(10)
result = self.execute(sql)
if result is None:
return None
rows = result.fetchall()
return [dict(row) for row in rows]
def get_rival_music(self, rival_id: int) -> Optional[List[Dict]]:
sql = select(best_score).where(best_score.c.user == rival_id)
result = self.execute(sql)
if result is None:
return None
return result.fetchall()

View File

@ -302,14 +302,14 @@ class ChuniStaticData(BaseData):
result = self.execute(sql)
if result is None:
self.logger.warn(
self.logger.warning(
f"update_event: failed to update event! version: {version}, event_id: {event_id}, enabled: {enabled}"
)
return None
event = self.get_event(version, event_id)
if event is None:
self.logger.warn(
self.logger.warning(
f"update_event: failed to fetch event {event_id} after updating"
)
return None
@ -453,6 +453,15 @@ class ChuniStaticData(BaseData):
return None
return result.fetchone()
def get_song(self, music_id: int) -> Optional[Row]:
sql = music.select(music.c.id == music_id)
result = self.execute(sql)
if result is None:
return None
return result.fetchone()
def put_avatar(
self,
version: int,
@ -506,7 +515,7 @@ class ChuniStaticData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"Failed to insert gacha! gacha_id {gacha_id}")
self.logger.warning(f"Failed to insert gacha! gacha_id {gacha_id}")
return None
return result.lastrowid
@ -541,7 +550,7 @@ class ChuniStaticData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"Failed to insert gacha card! gacha_id {gacha_id}")
self.logger.warning(f"Failed to insert gacha card! gacha_id {gacha_id}")
return None
return result.lastrowid
@ -577,7 +586,7 @@ class ChuniStaticData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"Failed to insert card! card_id {card_id}")
self.logger.warning(f"Failed to insert card! card_id {card_id}")
return None
return result.lastrowid
@ -587,4 +596,4 @@ class ChuniStaticData(BaseData):
result = self.execute(sql)
if result is None:
return None
return result.fetchone()
return result.fetchone()

View File

@ -4,6 +4,7 @@ import json
import logging
from enum import Enum
import pytz
from core.config import CoreConfig
from core.data.cache import cached
from titles.cm.const import CardMakerConstants
@ -61,12 +62,29 @@ class CardMakerBase:
}
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
reboot_start = date.strftime(
datetime.now() + timedelta(hours=3), self.date_time_format
)
reboot_end = date.strftime(
datetime.now() + timedelta(hours=4), self.date_time_format
)
# if reboot start/end time is not defined use the default behavior of being a few hours ago
if self.core_cfg.title.reboot_start_time == "" or self.core_cfg.title.reboot_end_time == "":
reboot_start = datetime.strftime(
datetime.utcnow() + timedelta(hours=6), self.date_time_format
)
reboot_end = datetime.strftime(
datetime.utcnow() + timedelta(hours=7), self.date_time_format
)
else:
# get current datetime in JST
current_jst = datetime.now(pytz.timezone('Asia/Tokyo')).date()
# parse config start/end times into datetime
reboot_start_time = datetime.strptime(self.core_cfg.title.reboot_start_time, "%H:%M")
reboot_end_time = datetime.strptime(self.core_cfg.title.reboot_end_time, "%H:%M")
# offset datetimes with current date/time
reboot_start_time = reboot_start_time.replace(year=current_jst.year, month=current_jst.month, day=current_jst.day, tzinfo=pytz.timezone('Asia/Tokyo'))
reboot_end_time = reboot_end_time.replace(year=current_jst.year, month=current_jst.month, day=current_jst.day, tzinfo=pytz.timezone('Asia/Tokyo'))
# create strings for use in gameSetting
reboot_start = reboot_start_time.strftime(self.date_time_format)
reboot_end = reboot_end_time.strftime(self.date_time_format)
# grab the dict with all games version numbers from user config
games_ver = self.game_cfg.version.version(self.version)

View File

@ -31,7 +31,18 @@ class CardMakerVersionConfig:
1: {"ongeki": 1.30.01, "chuni": 2.00.00, "maimai": 1.20.00}
"""
return CoreConfig.get_config_field(
self.__config, "cardmaker", "version", default={}
self.__config, "cardmaker", "version", default={
0: {
"ongeki": "1.30.01",
"chuni": "2.00.00",
"maimai": "1.20.00"
},
1: {
"ongeki": "1.35.03",
"chuni": "2.10.00",
"maimai": "1.30.00"
}
}
)[version]

View File

@ -85,8 +85,6 @@ class CardMakerServlet:
endpoint = url_split[len(url_split) - 1]
client_ip = Utils.get_ip_addr(request)
print(f"version: {version}")
if version >= 130 and version < 135: # Card Maker
internal_ver = CardMakerConstants.VER_CARD_MAKER
elif version >= 135 and version < 140: # Card Maker 1.35
@ -124,6 +122,7 @@ class CardMakerServlet:
except Exception as e:
self.logger.error(f"Error handling v{version} method {endpoint} - {e}")
raise
return zlib.compress(b'{"stat": "0"}')
if resp is None:

View File

@ -68,7 +68,7 @@ class CardMakerReader(BaseReader):
read_csv = getattr(CardMakerReader, func)
read_csv(self, f"{self.bin_dir}/MU3/{file}")
else:
self.logger.warn(
self.logger.warning(
f"Couldn't find {file} file in {self.bin_dir}, skipping"
)
@ -205,6 +205,7 @@ class CardMakerReader(BaseReader):
"1.20": Mai2Constants.VER_MAIMAI_DX_UNIVERSE,
"1.25": Mai2Constants.VER_MAIMAI_DX_UNIVERSE_PLUS,
"1.30": Mai2Constants.VER_MAIMAI_DX_FESTIVAL,
"1.35": Mai2Constants.VER_MAIMAI_DX_FESTIVAL_PLUS,
}
for root, dirs, files in os.walk(base_dir):

View File

@ -52,7 +52,7 @@ class CxbBase:
self.logger.info(f"Login user {data['login']['authid']}")
return {"token": data["login"]["authid"], "uid": data["login"]["authid"]}
self.logger.warn(f"User {data['login']['authid']} does not have a profile")
self.logger.warning(f"User {data['login']['authid']} does not have a profile")
return {}
def task_generateCoupon(index, data1):

View File

@ -123,13 +123,13 @@ class CxbServlet(resource.Resource):
)
except Exception as f:
self.logger.warn(
self.logger.warning(
f"Error decoding json: {e} / {f} - {req_url} - {req_bytes}"
)
return b""
if req_json == {}:
self.logger.warn(f"Empty json request to {req_url}")
self.logger.warning(f"Empty json request to {req_url}")
return b""
cmd = url_split[len(url_split) - 1]
@ -140,7 +140,7 @@ class CxbServlet(resource.Resource):
not type(req_json["dldate"]) is dict
or "filetype" not in req_json["dldate"]
):
self.logger.warn(f"Malformed dldate request: {req_url} {req_json}")
self.logger.warning(f"Malformed dldate request: {req_url} {req_json}")
return b""
filetype = req_json["dldate"]["filetype"]

View File

@ -33,7 +33,7 @@ class CxbReader(BaseReader):
pull_bin_ram = True
if not path.exists(f"{self.bin_dir}"):
self.logger.warn(f"Couldn't find csv file in {self.bin_dir}, skipping")
self.logger.warning(f"Couldn't find csv file in {self.bin_dir}, skipping")
pull_bin_ram = False
if pull_bin_ram:
@ -124,4 +124,4 @@ class CxbReader(BaseReader):
int(row["easy"].replace("Easy ", "").replace("N/A", "0")),
)
except Exception:
self.logger.warn(f"Couldn't read csv file in {self.bin_dir}, skipping")
self.logger.warning(f"Couldn't read csv file in {self.bin_dir}, skipping")

View File

@ -7,4 +7,4 @@ index = DivaServlet
database = DivaData
reader = DivaReader
game_codes = [DivaConstants.GAME_CODE]
current_schema_version = 5
current_schema_version = 6

View File

@ -3,6 +3,7 @@ from typing import Any, List, Dict
import logging
import json
import urllib
from threading import Thread
from core.config import CoreConfig
from titles.diva.config import DivaConfig
@ -401,7 +402,7 @@ class DivaBase:
response += f"&lv_num={profile['lv_num']}"
response += f"&lv_pnt={profile['lv_pnt']}"
response += f"&vcld_pts={profile['vcld_pts']}"
response += f"&skn_eqp={profile['use_pv_skn_eqp']}"
response += f"&skn_eqp={profile['skn_eqp']}"
response += f"&btn_se_eqp={profile['btn_se_eqp']}"
response += f"&sld_se_eqp={profile['sld_se_eqp']}"
response += f"&chn_sld_se_eqp={profile['chn_sld_se_eqp']}"
@ -663,50 +664,66 @@ class DivaBase:
return pv_result
def task_generateScoreData(self, data: Dict, pd_by_pv_id, song):
if int(song) > 0:
# the request do not send a edition so just perform a query best score and ranking for each edition.
# 0=ORIGINAL, 1=EXTRA
pd_db_song_0 = self.data.score.get_best_user_score(
data["pd_id"], int(song), data["difficulty"], edition=0
)
pd_db_song_1 = self.data.score.get_best_user_score(
data["pd_id"], int(song), data["difficulty"], edition=1
)
pd_db_ranking_0, pd_db_ranking_1 = None, None
if pd_db_song_0:
pd_db_ranking_0 = self.data.score.get_global_ranking(
data["pd_id"], int(song), data["difficulty"], edition=0
)
if pd_db_song_1:
pd_db_ranking_1 = self.data.score.get_global_ranking(
data["pd_id"], int(song), data["difficulty"], edition=1
)
pd_db_customize = self.data.pv_customize.get_pv_customize(
data["pd_id"], int(song)
)
# generate the pv_result string with the ORIGINAL edition and the EXTRA edition appended
pv_result = self._get_pv_pd_result(
int(song), pd_db_song_0, pd_db_ranking_0, pd_db_customize, edition=0
)
pv_result += "," + self._get_pv_pd_result(
int(song), pd_db_song_1, pd_db_ranking_1, pd_db_customize, edition=1
)
self.logger.debug(f"pv_result = {pv_result}")
pd_by_pv_id.append(urllib.parse.quote(pv_result))
else:
pd_by_pv_id.append(urllib.parse.quote(f"{song}***"))
pd_by_pv_id.append(",")
def handle_get_pv_pd_request(self, data: Dict) -> Dict:
song_id = data["pd_pv_id_lst"].split(",")
pv = ""
threads = []
pd_by_pv_id = []
for song in song_id:
if int(song) > 0:
# the request do not send a edition so just perform a query best score and ranking for each edition.
# 0=ORIGINAL, 1=EXTRA
pd_db_song_0 = self.data.score.get_best_user_score(
data["pd_id"], int(song), data["difficulty"], edition=0
)
pd_db_song_1 = self.data.score.get_best_user_score(
data["pd_id"], int(song), data["difficulty"], edition=1
)
thread_ScoreData = Thread(target=self.task_generateScoreData(data, pd_by_pv_id, song))
threads.append(thread_ScoreData)
pd_db_ranking_0, pd_db_ranking_1 = None, None
if pd_db_song_0:
pd_db_ranking_0 = self.data.score.get_global_ranking(
data["pd_id"], int(song), data["difficulty"], edition=0
)
for x in threads:
x.start()
if pd_db_song_1:
pd_db_ranking_1 = self.data.score.get_global_ranking(
data["pd_id"], int(song), data["difficulty"], edition=1
)
for x in threads:
x.join()
pd_db_customize = self.data.pv_customize.get_pv_customize(
data["pd_id"], int(song)
)
# generate the pv_result string with the ORIGINAL edition and the EXTRA edition appended
pv_result = self._get_pv_pd_result(
int(song), pd_db_song_0, pd_db_ranking_0, pd_db_customize, edition=0
)
pv_result += "," + self._get_pv_pd_result(
int(song), pd_db_song_1, pd_db_ranking_1, pd_db_customize, edition=1
)
self.logger.debug(f"pv_result = {pv_result}")
pv += urllib.parse.quote(pv_result)
else:
pv += urllib.parse.quote(f"{song}***")
pv += ","
for x in pd_by_pv_id:
pv += x
response = ""
response += f"&pd_by_pv_id={pv[:-1]}"

View File

@ -34,18 +34,18 @@ class DivaReader(BaseReader):
pull_opt_rom = True
if not path.exists(f"{self.bin_dir}/ram"):
self.logger.warn(f"Couldn't find ram folder in {self.bin_dir}, skipping")
self.logger.warning(f"Couldn't find ram folder in {self.bin_dir}, skipping")
pull_bin_ram = False
if not path.exists(f"{self.bin_dir}/rom"):
self.logger.warn(f"Couldn't find rom folder in {self.bin_dir}, skipping")
self.logger.warning(f"Couldn't find rom folder in {self.bin_dir}, skipping")
pull_bin_rom = False
if self.opt_dir is not None:
opt_dirs = self.get_data_directories(self.opt_dir)
else:
pull_opt_rom = False
self.logger.warn("No option directory specified, skipping")
self.logger.warning("No option directory specified, skipping")
if pull_bin_ram:
self.read_ram(f"{self.bin_dir}/ram")
@ -139,7 +139,7 @@ class DivaReader(BaseReader):
else:
continue
else:
self.logger.warn(f"Databank folder not found in {ram_root_dir}, skipping")
self.logger.warning(f"Databank folder not found in {ram_root_dir}, skipping")
def read_rom(self, rom_root_dir: str) -> None:
self.logger.info(f"Read ROM from {rom_root_dir}")
@ -150,7 +150,7 @@ class DivaReader(BaseReader):
elif path.exists(f"{rom_root_dir}/pv_db.txt"):
file_path = f"{rom_root_dir}/pv_db.txt"
else:
self.logger.warn(
self.logger.warning(
f"Cannot find pv_db.txt or mdata_pv_db.txt in {rom_root_dir}, skipping"
)
return

View File

@ -51,6 +51,7 @@ profile = Table(
Column("rgo_sts", Integer, nullable=False, server_default="1"),
Column("lv_efct_id", Integer, nullable=False, server_default="0"),
Column("lv_plt_id", Integer, nullable=False, server_default="1"),
Column("skn_eqp", Integer, nullable=False, server_default="0"),
Column("passwd_stat", Integer, nullable=False, server_default="0"),
Column("passwd", String(12), nullable=False, server_default="**********"),
Column(

View File

@ -114,7 +114,7 @@ class IDZUserDBProtocol(Protocol):
elif self.version == 230:
self.version_internal = IDZConstants.VER_IDZ_230
else:
self.logger.warn(f"Bad version v{self.version}")
self.logger.warning(f"Bad version v{self.version}")
self.version = None
self.version_internal = None
@ -142,7 +142,7 @@ class IDZUserDBProtocol(Protocol):
self.version_internal
].get(cmd, None)
if handler_cls is None:
self.logger.warn(f"No handler for v{self.version} {hex(cmd)} cmd")
self.logger.warning(f"No handler for v{self.version} {hex(cmd)} cmd")
handler_cls = IDZHandlerBase
handler = handler_cls(self.core_config, self.game_config, self.version_internal)

View File

@ -16,4 +16,4 @@ game_codes = [
Mai2Constants.GAME_CODE_GREEN,
Mai2Constants.GAME_CODE,
]
current_schema_version = 7
current_schema_version = 8

View File

@ -5,6 +5,7 @@ from base64 import b64decode
from os import path, stat, remove
from PIL import ImageFile
import pytz
from core.config import CoreConfig
from titles.mai2.const import Mai2Constants
from titles.mai2.config import Mai2Config
@ -21,22 +22,47 @@ class Mai2Base:
self.can_deliver = False
self.can_usbdl = False
self.old_server = ""
if self.core_config.server.is_develop and self.core_config.title.port > 0:
self.old_server = f"http://{self.core_config.title.hostname}:{self.core_config.title.port}/SDEY/197/"
else:
self.old_server = f"http://{self.core_config.title.hostname}/SDEY/197/"
def handle_get_game_setting_api_request(self, data: Dict):
return {
# if reboot start/end time is not defined use the default behavior of being a few hours ago
if self.core_cfg.title.reboot_start_time == "" or self.core_cfg.title.reboot_end_time == "":
reboot_start = datetime.strftime(
datetime.utcnow() + timedelta(hours=6), self.date_time_format
)
reboot_end = datetime.strftime(
datetime.utcnow() + timedelta(hours=7), self.date_time_format
)
else:
# get current datetime in JST
current_jst = datetime.now(pytz.timezone('Asia/Tokyo')).date()
# parse config start/end times into datetime
reboot_start_time = datetime.strptime(self.core_cfg.title.reboot_start_time, "%H:%M")
reboot_end_time = datetime.strptime(self.core_cfg.title.reboot_end_time, "%H:%M")
# offset datetimes with current date/time
reboot_start_time = reboot_start_time.replace(year=current_jst.year, month=current_jst.month, day=current_jst.day, tzinfo=pytz.timezone('Asia/Tokyo'))
reboot_end_time = reboot_end_time.replace(year=current_jst.year, month=current_jst.month, day=current_jst.day, tzinfo=pytz.timezone('Asia/Tokyo'))
# create strings for use in gameSetting
reboot_start = reboot_start_time.strftime(self.date_time_format)
reboot_end = reboot_end_time.strftime(self.date_time_format)
return {
"isDevelop": False,
"isAouAccession": False,
"gameSetting": {
"isMaintenance": False,
"requestInterval": 1800,
"rebootStartTime": "2020-01-01 07:00:00.0",
"rebootEndTime": "2020-01-01 07:59:59.0",
"rebootStartTime": reboot_start,
"rebootEndTime": reboot_end,
"movieUploadLimit": 100,
"movieStatus": 1,
"movieServerUri": self.old_server + "api/movie" if self.game_config.uploads.movies else "movie",
@ -57,7 +83,7 @@ class Mai2Base:
events = self.data.static.get_enabled_events(self.version)
events_lst = []
if events is None or not events:
self.logger.warn("No enabled events, did you run the reader?")
self.logger.warning("No enabled events, did you run the reader?")
return {"type": data["type"], "length": 0, "gameEventList": []}
for event in events:
@ -741,7 +767,7 @@ class Mai2Base:
music_detail_list = []
if user_id <= 0:
self.logger.warn("handle_get_user_music_api_request: Could not find userid in data, or userId is 0")
self.logger.warning("handle_get_user_music_api_request: Could not find userid in data, or userId is 0")
return {}
songs = self.data.score.get_best_scores(user_id, is_dx=False)
@ -794,46 +820,46 @@ class Mai2Base:
upload_date = photo.get("uploadDate", "")
if order_id < 0 or user_id <= 0 or div_num < 0 or div_len <= 0 or not div_data or playlog_id < 0 or track_num <= 0 or not upload_date:
self.logger.warn(f"Malformed photo upload request")
self.logger.warning(f"Malformed photo upload request")
return {'returnCode': 0, 'apiName': 'UploadUserPhotoApi'}
if order_id == 0 and div_num > 0:
self.logger.warn(f"Failed to set orderId properly (still 0 after first chunk)")
self.logger.warning(f"Failed to set orderId properly (still 0 after first chunk)")
return {'returnCode': 0, 'apiName': 'UploadUserPhotoApi'}
if div_num == 0 and order_id > 0:
self.logger.warn(f"First chuck re-send, Ignore")
self.logger.warning(f"First chuck re-send, Ignore")
return {'returnCode': 0, 'apiName': 'UploadUserPhotoApi'}
if div_num >= div_len:
self.logger.warn(f"Sent extra chunks ({div_num} >= {div_len})")
self.logger.warning(f"Sent extra chunks ({div_num} >= {div_len})")
return {'returnCode': 0, 'apiName': 'UploadUserPhotoApi'}
if div_len >= 100:
self.logger.warn(f"Photo too large ({div_len} * 10240 = {div_len * 10240} bytes)")
self.logger.warning(f"Photo too large ({div_len} * 10240 = {div_len * 10240} bytes)")
return {'returnCode': 0, 'apiName': 'UploadUserPhotoApi'}
ret_code = order_id + 1
photo_chunk = b64decode(div_data)
if len(photo_chunk) > 10240 or (len(photo_chunk) < 10240 and div_num + 1 != div_len):
self.logger.warn(f"Incorrect data size after decoding (Expected 10240, got {len(photo_chunk)})")
self.logger.warning(f"Incorrect data size after decoding (Expected 10240, got {len(photo_chunk)})")
return {'returnCode': 0, 'apiName': 'UploadUserPhotoApi'}
out_name = f"{self.game_config.uploads.photos_dir}/{user_id}_{playlog_id}_{track_num}"
if not path.exists(f"{out_name}.bin") and div_num != 0:
self.logger.warn(f"Out of order photo upload (div_num {div_num})")
self.logger.warning(f"Out of order photo upload (div_num {div_num})")
return {'returnCode': 0, 'apiName': 'UploadUserPhotoApi'}
if path.exists(f"{out_name}.bin") and div_num == 0:
self.logger.warn(f"Duplicate file upload")
self.logger.warning(f"Duplicate file upload")
return {'returnCode': 0, 'apiName': 'UploadUserPhotoApi'}
elif path.exists(f"{out_name}.bin"):
fstats = stat(f"{out_name}.bin")
if fstats.st_size != 10240 * div_num:
self.logger.warn(f"Out of order photo upload (trying to upload div {div_num}, expected div {fstats.st_size / 10240} for file sized {fstats.st_size} bytes)")
self.logger.warning(f"Out of order photo upload (trying to upload div {div_num}, expected div {fstats.st_size / 10240} for file sized {fstats.st_size} bytes)")
return {'returnCode': 0, 'apiName': 'UploadUserPhotoApi'}
try:

View File

@ -38,7 +38,7 @@ class Mai2DeliverConfig:
@property
def content_folder(self) -> int:
return CoreConfig.get_config_field(
self.__config, "mai2", "server", "content_folder", default=""
self.__config, "mai2", "deliver", "content_folder", default=""
)
class Mai2UploadsConfig:

View File

@ -20,13 +20,13 @@ class Mai2Constants:
DATE_TIME_FORMAT = "%Y-%m-%d %H:%M:%S"
GAME_CODE = "SBXL"
GAME_CODE_GREEN = "SBZF"
GAME_CODE_ORANGE = "SDBM"
GAME_CODE_PINK = "SDCQ"
GAME_CODE_MURASAKI = "SDDK"
GAME_CODE_MILK = "SDDZ"
GAME_CODE_FINALE = "SDEY"
GAME_CODE = "SBXL"
GAME_CODE_GREEN = "SBZF"
GAME_CODE_ORANGE = "SDBM"
GAME_CODE_PINK = "SDCQ"
GAME_CODE_MURASAKI = "SDDK"
GAME_CODE_MILK = "SDDZ"
GAME_CODE_FINALE = "SDEY"
GAME_CODE_DX = "SDEZ"
CONFIG_NAME = "mai2.yaml"
@ -52,6 +52,7 @@ class Mai2Constants:
VER_MAIMAI_DX_UNIVERSE = 17
VER_MAIMAI_DX_UNIVERSE_PLUS = 18
VER_MAIMAI_DX_FESTIVAL = 19
VER_MAIMAI_DX_FESTIVAL_PLUS = 20
VERSION_STRING = (
"maimai",
@ -66,7 +67,7 @@ class Mai2Constants:
"maimai MURASAKi PLUS",
"maimai MiLK",
"maimai MiLK PLUS",
"maimai FiNALE",
"maimai FiNALE",
"maimai DX",
"maimai DX PLUS",
"maimai DX Splash",
@ -74,6 +75,7 @@ class Mai2Constants:
"maimai DX UNiVERSE",
"maimai DX UNiVERSE PLUS",
"maimai DX FESTiVAL",
"maimai DX FESTiVAL PLUS",
)
@classmethod

View File

@ -542,8 +542,43 @@ class Mai2DX(Mai2Base):
}
def handle_get_user_region_api_request(self, data: Dict) -> Dict:
"""
class UserRegionList:
regionId: int
playCount: int
created: str
"""
return {"userId": data["userId"], "length": 0, "userRegionList": []}
def handle_get_user_rival_data_api_request(self, data: Dict) -> Dict:
user_id = data["userId"]
rival_id = data["rivalId"]
"""
class UserRivalData:
rivalId: int
rivalName: str
"""
return {"userId": user_id, "userRivalData": {}}
def handle_get_user_rival_music_api_request(self, data: Dict) -> Dict:
user_id = data["userId"]
rival_id = data["rivalId"]
next_idx = data["nextIndex"]
rival_music_levels = data["userRivalMusicLevelList"]
"""
class UserRivalMusicList:
class UserRivalMusicDetailList:
level: int
achievement: int
deluxscoreMax: int
musicId: int
userRivalMusicDetailList: list[UserRivalMusicDetailList]
"""
return {"userId": user_id, "nextIndex": 0, "userRivalMusicList": []}
def handle_get_user_music_api_request(self, data: Dict) -> Dict:
user_id = data.get("userId", 0)
next_index = data.get("nextIndex", 0)
@ -552,7 +587,7 @@ class Mai2DX(Mai2Base):
music_detail_list = []
if user_id <= 0:
self.logger.warn("handle_get_user_music_api_request: Could not find userid in data, or userId is 0")
self.logger.warning("handle_get_user_music_api_request: Could not find userid in data, or userId is 0")
return {}
songs = self.data.score.get_best_scores(user_id)

View File

@ -14,7 +14,7 @@ class Mai2Festival(Mai2UniversePlus):
def handle_cm_get_user_preview_api_request(self, data: Dict) -> Dict:
user_data = super().handle_cm_get_user_preview_api_request(data)
# hardcode lastDataVersion for CardMaker 1.35
# hardcode lastDataVersion for CardMaker
user_data["lastDataVersion"] = "1.30.00"
return user_data
@ -25,7 +25,13 @@ class Mai2Festival(Mai2UniversePlus):
return user_login
def handle_get_user_recommend_rate_music_api_request(self, data: Dict) -> Dict:
"""
userRecommendRateMusicIdList: list[int]
"""
return {"userId": data["userId"], "userRecommendRateMusicIdList": []}
def handle_get_user_recommend_select_music_api_request(self, data: Dict) -> Dict:
"""
userRecommendSelectionMusicIdList: list[int]
"""
return {"userId": data["userId"], "userRecommendSelectionMusicIdList": []}

View File

@ -0,0 +1,38 @@
from typing import Dict
from core.config import CoreConfig
from titles.mai2.festival import Mai2Festival
from titles.mai2.const import Mai2Constants
from titles.mai2.config import Mai2Config
class Mai2FestivalPlus(Mai2Festival):
def __init__(self, cfg: CoreConfig, game_cfg: Mai2Config) -> None:
super().__init__(cfg, game_cfg)
self.version = Mai2Constants.VER_MAIMAI_DX_FESTIVAL_PLUS
def handle_cm_get_user_preview_api_request(self, data: Dict) -> Dict:
user_data = super().handle_cm_get_user_preview_api_request(data)
# hardcode lastDataVersion for CardMaker
user_data["lastDataVersion"] = "1.35.00"
return user_data
def handle_get_user_favorite_item_api_request(self, data: Dict) -> Dict:
user_id = data.get("userId", 0)
kind = data.get("kind", 2)
next_index = data.get("nextIndex", 0)
max_ct = data.get("maxCount", 100)
is_all = data.get("isAllFavoriteItem", False)
"""
class userFavoriteItemList:
orderId: int
id: int
"""
return {
"userId": user_id,
"kind": kind,
"nextIndex": 0,
"userFavoriteItemList": [],
}

View File

@ -23,6 +23,7 @@ from titles.mai2.splashplus import Mai2SplashPlus
from titles.mai2.universe import Mai2Universe
from titles.mai2.universeplus import Mai2UniversePlus
from titles.mai2.festival import Mai2Festival
from titles.mai2.festivalplus import Mai2FestivalPlus
class Mai2Servlet:
@ -47,7 +48,7 @@ class Mai2Servlet:
None,
None,
None,
Mai2Finale,
Mai2Finale,
Mai2DX,
Mai2DXPlus,
Mai2Splash,
@ -55,6 +56,7 @@ class Mai2Servlet:
Mai2Universe,
Mai2UniversePlus,
Mai2Festival,
Mai2FestivalPlus,
]
self.logger = logging.getLogger("mai2")
@ -110,22 +112,34 @@ class Mai2Servlet:
)
def setup(self):
if self.game_cfg.uploads.photos and self.game_cfg.uploads.photos_dir and not path.exists(self.game_cfg.uploads.photos_dir):
if (
self.game_cfg.uploads.photos
and self.game_cfg.uploads.photos_dir
and not path.exists(self.game_cfg.uploads.photos_dir)
):
try:
mkdir(self.game_cfg.uploads.photos_dir)
except Exception:
self.logger.error(f"Failed to make photo upload directory at {self.game_cfg.uploads.photos_dir}")
self.logger.error(
f"Failed to make photo upload directory at {self.game_cfg.uploads.photos_dir}"
)
if self.game_cfg.uploads.movies and self.game_cfg.uploads.movies_dir and not path.exists(self.game_cfg.uploads.movies_dir):
if (
self.game_cfg.uploads.movies
and self.game_cfg.uploads.movies_dir
and not path.exists(self.game_cfg.uploads.movies_dir)
):
try:
mkdir(self.game_cfg.uploads.movies_dir)
except Exception:
self.logger.error(f"Failed to make movie upload directory at {self.game_cfg.uploads.movies_dir}")
self.logger.error(
f"Failed to make movie upload directory at {self.game_cfg.uploads.movies_dir}"
)
def render_POST(self, request: Request, version: int, url_path: str) -> bytes:
if url_path.lower() == "ping":
return zlib.compress(b'{"returnCode": "1"}')
elif url_path.startswith("api/movie/"):
self.logger.info(f"Movie data: {url_path} - {request.content.getvalue()}")
return b""
@ -140,18 +154,20 @@ class Mai2Servlet:
if request.uri.startswith(b"/SDEZ"):
if version < 105: # 1.0
internal_ver = Mai2Constants.VER_MAIMAI_DX
elif version >= 105 and version < 110: # Plus
elif version >= 105 and version < 110: # PLUS
internal_ver = Mai2Constants.VER_MAIMAI_DX_PLUS
elif version >= 110 and version < 115: # Splash
internal_ver = Mai2Constants.VER_MAIMAI_DX_SPLASH
elif version >= 115 and version < 120: # Splash Plus
elif version >= 115 and version < 120: # Splash PLUS
internal_ver = Mai2Constants.VER_MAIMAI_DX_SPLASH_PLUS
elif version >= 120 and version < 125: # Universe
elif version >= 120 and version < 125: # UNiVERSE
internal_ver = Mai2Constants.VER_MAIMAI_DX_UNIVERSE
elif version >= 125 and version < 130: # Universe Plus
elif version >= 125 and version < 130: # UNiVERSE PLUS
internal_ver = Mai2Constants.VER_MAIMAI_DX_UNIVERSE_PLUS
elif version >= 130: # Festival
elif version >= 130 and version < 135: # FESTiVAL
internal_ver = Mai2Constants.VER_MAIMAI_DX_FESTIVAL
elif version >= 135: # FESTiVAL PLUS
internal_ver = Mai2Constants.VER_MAIMAI_DX_FESTIVAL_PLUS
else:
if version < 110: # 1.0
@ -180,12 +196,20 @@ class Mai2Servlet:
internal_ver = Mai2Constants.VER_MAIMAI_MILK_PLUS
elif version >= 197: # Finale
internal_ver = Mai2Constants.VER_MAIMAI_FINALE
if all(c in string.hexdigits for c in endpoint) and len(endpoint) == 32:
# If we get a 32 character long hex string, it's a hash and we're
# doing encrypted. The likelyhood of false positives is low but
# technically not 0
self.logger.error("Encryption not supported at this time")
if (
request.getHeader("Mai-Encoding") is not None
or request.getHeader("X-Mai-Encoding") is not None
):
# The has is some flavor of MD5 of the endpoint with a constant bolted onto the end of it.
# See cake.dll's Obfuscator function for details. Hopefully most DLL edits will remove
# these two(?) headers to not cause issues, but given the general quality of SEGA data...
enc_ver = request.getHeader("Mai-Encoding")
if enc_ver is None:
enc_ver = request.getHeader("X-Mai-Encoding")
self.logger.debug(
f"Encryption v{enc_ver} - User-Agent: {request.getHeader('User-Agent')}"
)
try:
unzip = zlib.decompress(req_raw)
@ -225,12 +249,18 @@ class Mai2Servlet:
return zlib.compress(json.dumps(resp, ensure_ascii=False).encode("utf-8"))
def render_GET(self, request: Request, version: int, url_path: str) -> bytes:
self.logger.info(f"v{version} GET {url_path}")
self.logger.debug(f"v{version} GET {url_path}")
url_split = url_path.split("/")
if (url_split[0] == "api" and url_split[1] == "movie") or url_split[0] == "movie":
if (url_split[0] == "api" and url_split[1] == "movie") or url_split[
0
] == "movie":
if url_split[2] == "moviestart":
return json.dumps({"moviestart":{"status":"OK"}}).encode()
return json.dumps({"moviestart": {"status": "OK"}}).encode()
else:
request.setResponseCode(404)
return b""
if url_split[0] == "old":
if url_split[1] == "ping":
@ -244,18 +274,46 @@ class Mai2Servlet:
elif url_split[1].startswith("friend"):
self.logger.info(f"v{version} old server friend inquire")
return zlib.compress(b"{}")
else:
request.setResponseCode(404)
return b""
elif url_split[0] == "usbdl":
if url_split[1] == "CONNECTIONTEST":
self.logger.info(f"v{version} usbdl server test")
return zlib.compress(b"ok")
return b""
elif self.game_cfg.deliver.udbdl_enable and path.exists(
f"{self.game_cfg.deliver.content_folder}/usb/{url_split[-1]}"
):
with open(
f"{self.game_cfg.deliver.content_folder}/usb/{url_split[-1]}", "rb"
) as f:
return f.read()
else:
request.setResponseCode(404)
return b""
elif url_split[0] == "deliver":
file = url_split[len(url_split) - 1]
self.logger.info(f"v{version} {file} deliver inquire")
if not self.game_cfg.deliver.enable or not path.exists(f"{self.game_cfg.deliver.content_folder}/{file}"):
return zlib.compress(b"")
self.logger.debug(
f"{self.game_cfg.deliver.content_folder}/net_deliver/{file}"
)
if self.game_cfg.deliver.enable and path.exists(
f"{self.game_cfg.deliver.content_folder}/net_deliver/{file}"
):
with open(
f"{self.game_cfg.deliver.content_folder}/net_deliver/{file}", "rb"
) as f:
return f.read()
else:
request.setResponseCode(404)
return b""
else:
return zlib.compress(b"{}")

View File

@ -85,7 +85,7 @@ class Mai2Reader(BaseReader):
def load_table_raw(self, dir: str, file: str, key: Optional[bytes]) -> Optional[List[Dict[str, str]]]:
if not os.path.exists(f"{dir}/{file}"):
self.logger.warn(f"file {file} does not exist in directory {dir}, skipping")
self.logger.warning(f"file {file} does not exist in directory {dir}, skipping")
return
self.logger.info(f"Load table {file} from {dir}")
@ -100,7 +100,7 @@ class Mai2Reader(BaseReader):
f_data = f.read()[0x10:]
if f_data is None or not f_data:
self.logger.warn(f"file {dir} could not be read, skipping")
self.logger.warning(f"file {dir} could not be read, skipping")
return
f_data_deflate = zlib.decompress(f_data, wbits = zlib.MAX_WBITS | 16)[0x12:] # lop off the junk at the beginning
@ -127,13 +127,13 @@ class Mai2Reader(BaseReader):
try:
struct_def.append(x[x.rindex(" ") + 2: -1])
except ValueError:
self.logger.warn(f"rindex failed on line {x}")
self.logger.warning(f"rindex failed on line {x}")
if is_struct:
self.logger.warn("Struct not formatted properly")
self.logger.warning("Struct not formatted properly")
if not struct_def:
self.logger.warn("Struct def not found")
self.logger.warning("Struct def not found")
name = file[:file.index(".")]
if "_" in name:
@ -148,7 +148,7 @@ class Mai2Reader(BaseReader):
continue
if not line_match.group(1) == name.upper():
self.logger.warn(f"Strange regex match for line {x} -> {line_match}")
self.logger.warning(f"Strange regex match for line {x} -> {line_match}")
continue
vals = line_match.group(2)

View File

@ -204,7 +204,7 @@ class Mai2ItemData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(
self.logger.warning(
f"put_item: failed to insert item! user_id: {user_id}, item_kind: {item_kind}, item_id: {item_id}"
)
return None
@ -261,7 +261,7 @@ class Mai2ItemData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(
self.logger.warning(
f"put_login_bonus: failed to insert item! user_id: {user_id}, bonus_id: {bonus_id}, point: {point}"
)
return None
@ -312,7 +312,7 @@ class Mai2ItemData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(
self.logger.warning(
f"put_map: failed to insert item! user_id: {user_id}, map_id: {map_id}, distance: {distance}"
)
return None
@ -333,7 +333,7 @@ class Mai2ItemData(BaseData):
if result is None:
return None
return result.fetchone()
def put_character_(self, user_id: int, char_data: Dict) -> Optional[int]:
char_data["user"] = user_id
sql = insert(character).values(**char_data)
@ -341,7 +341,7 @@ class Mai2ItemData(BaseData):
conflict = sql.on_duplicate_key_update(**char_data)
result = self.execute(conflict)
if result is None:
self.logger.warn(
self.logger.warning(
f"put_character_: failed to insert item! user_id: {user_id}"
)
return None
@ -371,7 +371,7 @@ class Mai2ItemData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(
self.logger.warning(
f"put_character: failed to insert item! user_id: {user_id}, character_id: {character_id}, level: {level}"
)
return None
@ -414,7 +414,7 @@ class Mai2ItemData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(
self.logger.warning(
f"put_friend_season_ranking: failed to insert",
f"friend_season_ranking! aime_id: {aime_id}",
)
@ -432,7 +432,7 @@ class Mai2ItemData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(
self.logger.warning(
f"put_favorite: failed to insert item! user_id: {user_id}, kind: {kind}"
)
return None
@ -477,7 +477,7 @@ class Mai2ItemData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(
self.logger.warning(
f"put_card: failed to insert card! user_id: {user_id}, kind: {card_kind}"
)
return None
@ -489,6 +489,8 @@ class Mai2ItemData(BaseData):
else:
sql = card.select(and_(card.c.user == user_id, card.c.cardKind == kind))
sql = sql.order_by(card.c.startDate.desc())
result = self.execute(sql)
if result is None:
return None
@ -516,7 +518,7 @@ class Mai2ItemData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(
self.logger.warning(
f"put_card: failed to insert charge! user_id: {user_id}, chargeId: {charge_id}"
)
return None
@ -541,7 +543,7 @@ class Mai2ItemData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(
self.logger.warning(
f"put_user_print_detail: Failed to insert! aime_id: {aime_id}"
)
return None

View File

@ -40,6 +40,7 @@ detail = Table(
Column("charaLockSlot", JSON),
Column("contentBit", BigInteger),
Column("playCount", Integer),
Column("mapStock", Integer), # new with fes+
Column("eventWatchedDate", String(25)),
Column("lastGameId", String(25)),
Column("lastRomVersion", String(25)),
@ -100,7 +101,7 @@ detail = Table(
)
detail_old = Table(
"maimai_profile_detail",
"maimai_profile_detail",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
Column(
@ -216,6 +217,7 @@ extend = Table(
Column("isPhotoAgree", Boolean),
Column("isGotoCodeRead", Boolean),
Column("selectResultDetails", Boolean),
Column("selectResultScoreViewType", Integer), # new with fes+
Column("sortCategorySetting", Integer),
Column("sortMusicSetting", Integer),
Column("selectedCardList", JSON),
@ -251,6 +253,7 @@ option = Table(
Column("touchSize", Integer),
Column("starRotate", Integer),
Column("dispCenter", Integer),
Column("outFrameType", Integer), # new with fes+
Column("dispChain", Integer),
Column("dispRate", Integer),
Column("dispBar", Integer),
@ -276,6 +279,8 @@ option = Table(
Column("exVolume", Integer),
Column("slideSe", Integer),
Column("slideVolume", Integer),
Column("breakSlideVolume", Integer), # new with fes+
Column("touchVolume", Integer), # new with fes+
Column("touchHoldVolume", Integer),
Column("damageSeVolume", Integer),
Column("headPhoneVolume", Integer),
@ -438,7 +443,11 @@ boss = Table(
"maimai_profile_boss",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
Column("user", ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"), nullable=False),
Column(
"user",
ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"),
nullable=False,
),
Column("pandoraFlagList0", Integer),
Column("pandoraFlagList1", Integer),
Column("pandoraFlagList2", Integer),
@ -455,23 +464,32 @@ recent_rating = Table(
"maimai_profile_recent_rating",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
Column("user", ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"), nullable=False),
Column(
"user",
ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"),
nullable=False,
),
Column("userRecentRatingList", JSON),
UniqueConstraint("user", name="mai2_profile_recent_rating_uk"),
mysql_charset="utf8mb4",
)
consec_logins = Table(
"mai2_profile_consec_logins",
"mai2_profile_consec_logins",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
Column("user", ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"), nullable=False),
Column(
"user",
ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"),
nullable=False,
),
Column("version", Integer, nullable=False),
Column("logins", Integer),
UniqueConstraint("user", "version", name="mai2_profile_consec_logins_uk"),
mysql_charset="utf8mb4",
)
class Mai2ProfileData(BaseData):
def put_profile_detail(
self, user_id: int, version: int, detail_data: Dict, is_dx: bool = True
@ -488,24 +506,28 @@ class Mai2ProfileData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(
self.logger.warning(
f"put_profile: Failed to create profile! user_id {user_id} is_dx {is_dx}"
)
return None
return result.lastrowid
def get_profile_detail(self, user_id: int, version: int, is_dx: bool = True) -> Optional[Row]:
def get_profile_detail(
self, user_id: int, version: int, is_dx: bool = True
) -> Optional[Row]:
if is_dx:
sql = (
select(detail)
.where(and_(detail.c.user == user_id, detail.c.version <= version))
.order_by(detail.c.version.desc())
)
else:
sql = (
select(detail_old)
.where(and_(detail_old.c.user == user_id, detail_old.c.version <= version))
.where(
and_(detail_old.c.user == user_id, detail_old.c.version <= version)
)
.order_by(detail_old.c.version.desc())
)
@ -525,7 +547,7 @@ class Mai2ProfileData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"put_profile_ghost: failed to update! {user_id}")
self.logger.warning(f"put_profile_ghost: failed to update! {user_id}")
return None
return result.lastrowid
@ -552,7 +574,7 @@ class Mai2ProfileData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"put_profile_extend: failed to update! {user_id}")
self.logger.warning(f"put_profile_extend: failed to update! {user_id}")
return None
return result.lastrowid
@ -582,11 +604,15 @@ class Mai2ProfileData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"put_profile_option: failed to update! {user_id} is_dx {is_dx}")
self.logger.warning(
f"put_profile_option: failed to update! {user_id} is_dx {is_dx}"
)
return None
return result.lastrowid
def get_profile_option(self, user_id: int, version: int, is_dx: bool = True) -> Optional[Row]:
def get_profile_option(
self, user_id: int, version: int, is_dx: bool = True
) -> Optional[Row]:
if is_dx:
sql = (
select(option)
@ -596,7 +622,9 @@ class Mai2ProfileData(BaseData):
else:
sql = (
select(option_old)
.where(and_(option_old.c.user == user_id, option_old.c.version <= version))
.where(
and_(option_old.c.user == user_id, option_old.c.version <= version)
)
.order_by(option_old.c.version.desc())
)
@ -616,7 +644,7 @@ class Mai2ProfileData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"put_profile_rating: failed to update! {user_id}")
self.logger.warning(f"put_profile_rating: failed to update! {user_id}")
return None
return result.lastrowid
@ -643,7 +671,7 @@ class Mai2ProfileData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"put_region: failed to update! {user_id}")
self.logger.warning(f"put_region: failed to update! {user_id}")
return None
return result.lastrowid
@ -668,7 +696,7 @@ class Mai2ProfileData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(
self.logger.warning(
f"put_profile_activity: failed to update! user_id: {user_id}"
)
return None
@ -689,7 +717,9 @@ class Mai2ProfileData(BaseData):
return None
return result.fetchall()
def put_web_option(self, user_id: int, version: int, web_opts: Dict) -> Optional[int]:
def put_web_option(
self, user_id: int, version: int, web_opts: Dict
) -> Optional[int]:
web_opts["user"] = user_id
web_opts["version"] = version
sql = insert(web_opt).values(**web_opts)
@ -698,14 +728,14 @@ class Mai2ProfileData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(
f"put_web_option: failed to update! user_id: {user_id}"
)
self.logger.warning(f"put_web_option: failed to update! user_id: {user_id}")
return None
return result.lastrowid
def get_web_option(self, user_id: int, version: int) -> Optional[Row]:
sql = web_opt.select(and_(web_opt.c.user == user_id, web_opt.c.version == version))
sql = web_opt.select(
and_(web_opt.c.user == user_id, web_opt.c.version == version)
)
result = self.execute(sql)
if result is None:
@ -720,12 +750,12 @@ class Mai2ProfileData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(
self.logger.warning(
f"put_grade_status: failed to update! user_id: {user_id}"
)
return None
return result.lastrowid
def get_grade_status(self, user_id: int) -> Optional[Row]:
sql = grade_status.select(grade_status.c.user == user_id)
@ -742,12 +772,10 @@ class Mai2ProfileData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(
f"put_boss_list: failed to update! user_id: {user_id}"
)
self.logger.warning(f"put_boss_list: failed to update! user_id: {user_id}")
return None
return result.lastrowid
def get_boss_list(self, user_id: int) -> Optional[Row]:
sql = boss.select(boss.c.user == user_id)
@ -759,16 +787,16 @@ class Mai2ProfileData(BaseData):
def put_recent_rating(self, user_id: int, rr: Dict) -> Optional[int]:
sql = insert(recent_rating).values(user=user_id, userRecentRatingList=rr)
conflict = sql.on_duplicate_key_update({'userRecentRatingList': rr})
conflict = sql.on_duplicate_key_update({"userRecentRatingList": rr})
result = self.execute(conflict)
if result is None:
self.logger.warn(
self.logger.warning(
f"put_recent_rating: failed to update! user_id: {user_id}"
)
return None
return result.lastrowid
def get_recent_rating(self, user_id: int) -> Optional[Row]:
sql = recent_rating.select(recent_rating.c.user == user_id)
@ -776,27 +804,25 @@ class Mai2ProfileData(BaseData):
if result is None:
return None
return result.fetchone()
def add_consec_login(self, user_id: int, version: int) -> None:
sql = insert(consec_logins).values(
user=user_id,
version=version,
logins=1
)
conflict = sql.on_duplicate_key_update(
logins=consec_logins.c.logins + 1
)
def add_consec_login(self, user_id: int, version: int) -> None:
sql = insert(consec_logins).values(user=user_id, version=version, logins=1)
conflict = sql.on_duplicate_key_update(logins=consec_logins.c.logins + 1)
result = self.execute(conflict)
if result is None:
self.logger.error(f"Failed to update consecutive login count for user {user_id} version {version}")
self.logger.error(
f"Failed to update consecutive login count for user {user_id} version {version}"
)
def get_consec_login(self, user_id: int, version: int) -> Optional[Row]:
sql = select(consec_logins).where(and_(
consec_logins.c.user==user_id,
consec_logins.c.version==version,
))
sql = select(consec_logins).where(
and_(
consec_logins.c.user == user_id,
consec_logins.c.version == version,
)
)
result = self.execute(sql)
if result is None:
@ -804,12 +830,12 @@ class Mai2ProfileData(BaseData):
return result.fetchone()
def reset_consec_login(self, user_id: int, version: int) -> Optional[Row]:
sql = consec_logins.update(and_(
consec_logins.c.user==user_id,
consec_logins.c.version==version,
)).values(
logins=1
)
sql = consec_logins.update(
and_(
consec_logins.c.user == user_id,
consec_logins.c.version == version,
)
).values(logins=1)
result = self.execute(sql)
if result is None:

View File

@ -161,7 +161,7 @@ class Mai2StaticData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"Failed to insert song {song_id} chart {chart_id}")
self.logger.warning(f"Failed to insert song {song_id} chart {chart_id}")
return None
return result.lastrowid
@ -187,7 +187,7 @@ class Mai2StaticData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"Failed to insert charge {ticket_id} type {ticket_type}")
self.logger.warning(f"Failed to insert charge {ticket_id} type {ticket_type}")
return None
return result.lastrowid
@ -237,7 +237,7 @@ class Mai2StaticData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"Failed to insert card {card_id}")
self.logger.warning(f"Failed to insert card {card_id}")
return None
return result.lastrowid

View File

@ -23,10 +23,11 @@ class Mai2Universe(Mai2SplashPlus):
return {
"userName": p["userName"],
"rating": p["playerRating"],
# hardcode lastDataVersion for CardMaker 1.34
# hardcode lastDataVersion for CardMaker
"lastDataVersion": "1.20.00",
# checks if the user is still logged in
"isLogin": False,
"isExistSellingCard": False,
"isExistSellingCard": True,
}
def handle_cm_get_user_data_api_request(self, data: Dict) -> Dict:
@ -70,8 +71,12 @@ class Mai2Universe(Mai2SplashPlus):
tmp.pop("cardName")
tmp.pop("enabled")
tmp["startDate"] = datetime.strftime(tmp["startDate"], Mai2Constants.DATE_TIME_FORMAT)
tmp["endDate"] = datetime.strftime(tmp["endDate"], Mai2Constants.DATE_TIME_FORMAT)
tmp["startDate"] = datetime.strftime(
tmp["startDate"], Mai2Constants.DATE_TIME_FORMAT
)
tmp["endDate"] = datetime.strftime(
tmp["endDate"], Mai2Constants.DATE_TIME_FORMAT
)
tmp["noticeStartDate"] = datetime.strftime(
tmp["noticeStartDate"], Mai2Constants.DATE_TIME_FORMAT
)

View File

@ -2,9 +2,11 @@ from titles.ongeki.index import OngekiServlet
from titles.ongeki.const import OngekiConstants
from titles.ongeki.database import OngekiData
from titles.ongeki.read import OngekiReader
from titles.ongeki.frontend import OngekiFrontend
index = OngekiServlet
database = OngekiData
reader = OngekiReader
frontend = OngekiFrontend
game_codes = [OngekiConstants.GAME_CODE]
current_schema_version = 5

View File

@ -4,6 +4,7 @@ import json
import logging
from enum import Enum
import pytz
from core.config import CoreConfig
from core.data.cache import cached
from titles.ongeki.const import OngekiConstants
@ -103,12 +104,30 @@ class OngekiBase:
self.version = OngekiConstants.VER_ONGEKI
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
reboot_start = date.strftime(
datetime.now() + timedelta(hours=3), self.date_time_format
)
reboot_end = date.strftime(
datetime.now() + timedelta(hours=4), self.date_time_format
)
# if reboot start/end time is not defined use the default behavior of being a few hours ago
if self.core_cfg.title.reboot_start_time == "" or self.core_cfg.title.reboot_end_time == "":
reboot_start = datetime.strftime(
datetime.utcnow() + timedelta(hours=6), self.date_time_format
)
reboot_end = datetime.strftime(
datetime.utcnow() + timedelta(hours=7), self.date_time_format
)
else:
# get current datetime in JST
current_jst = datetime.now(pytz.timezone('Asia/Tokyo')).date()
# parse config start/end times into datetime
reboot_start_time = datetime.strptime(self.core_cfg.title.reboot_start_time, "%H:%M")
reboot_end_time = datetime.strptime(self.core_cfg.title.reboot_end_time, "%H:%M")
# offset datetimes with current date/time
reboot_start_time = reboot_start_time.replace(year=current_jst.year, month=current_jst.month, day=current_jst.day, tzinfo=pytz.timezone('Asia/Tokyo'))
reboot_end_time = reboot_end_time.replace(year=current_jst.year, month=current_jst.month, day=current_jst.day, tzinfo=pytz.timezone('Asia/Tokyo'))
# create strings for use in gameSetting
reboot_start = reboot_start_time.strftime(self.date_time_format)
reboot_end = reboot_end_time.strftime(self.date_time_format)
return {
"gameSetting": {
"dataVersion": "1.00.00",
@ -978,35 +997,38 @@ class OngekiBase:
"""
Added in Bright
"""
rival_list = self.data.profile.get_rivals(data["userId"])
if rival_list is None or len(rival_list) < 1:
rival_list = []
user_rivals = self.data.profile.get_rivals(data["userId"])
for rival in user_rivals:
tmp = {}
tmp["rivalUserId"] = rival[0]
rival_list.append(tmp)
if user_rivals is None or len(rival_list) < 1:
return {
"userId": data["userId"],
"length": 0,
"userRivalList": [],
}
return {
"userId": data["userId"],
"length": len(rival_list),
"userRivalList": rival_list._asdict(),
"userRivalList": rival_list,
}
def handle_get_user_rival_data_api_reqiest(self, data: Dict) -> Dict:
def handle_get_user_rival_data_api_request(self, data: Dict) -> Dict:
"""
Added in Bright
"""
rivals = []
for rival in data["userRivalList"]:
name = self.data.profile.get_profile_name(
rival["rivalUserId"], self.version
)
if name is None:
continue
rivals.append({"rivalUserId": rival["rival"], "rivalUserName": name})
rivals.append({"rivalUserId": rival["rivalUserId"], "rivalUserName": name})
return {
"userId": data["userId"],
"length": len(rivals),
@ -1027,7 +1049,6 @@ class OngekiBase:
for song in music["userMusicList"]:
song["userRivalMusicDetailList"] = song["userMusicDetailList"]
song.pop("userMusicDetailList")
return {
"userId": data["userId"],
"rivalUserId": rival_id,

View File

@ -48,9 +48,30 @@ class OngekiCardMakerVersionConfig:
self.__config, "ongeki", "version", default={}
).get(version)
class OngekiCryptoConfig:
def __init__(self, parent_config: "OngekiConfig") -> None:
self.__config = parent_config
@property
def keys(self) -> Dict:
"""
in the form of:
internal_version: [key, iv]
all values are hex strings
"""
return CoreConfig.get_config_field(
self.__config, "ongeki", "crypto", "keys", default={}
)
@property
def encrypted_only(self) -> bool:
return CoreConfig.get_config_field(
self.__config, "ongeki", "crypto", "encrypted_only", default=False
)
class OngekiConfig(dict):
def __init__(self) -> None:
self.server = OngekiServerConfig(self)
self.gachas = OngekiGachaConfig(self)
self.version = OngekiCardMakerVersionConfig(self)
self.crypto = OngekiCryptoConfig(self)

87
titles/ongeki/frontend.py Normal file
View File

@ -0,0 +1,87 @@
import yaml
import jinja2
from twisted.web.http import Request
from os import path
from twisted.web.util import redirectTo
from twisted.web.server import Session
from core.frontend import FE_Base, IUserSession
from core.config import CoreConfig
from titles.ongeki.config import OngekiConfig
from titles.ongeki.const import OngekiConstants
from titles.ongeki.database import OngekiData
from titles.ongeki.base import OngekiBase
class OngekiFrontend(FE_Base):
def __init__(
self, cfg: CoreConfig, environment: jinja2.Environment, cfg_dir: str
) -> None:
super().__init__(cfg, environment)
self.data = OngekiData(cfg)
self.game_cfg = OngekiConfig()
if path.exists(f"{cfg_dir}/{OngekiConstants.CONFIG_NAME}"):
self.game_cfg.update(
yaml.safe_load(open(f"{cfg_dir}/{OngekiConstants.CONFIG_NAME}"))
)
self.nav_name = "O.N.G.E.K.I."
self.version_list = OngekiConstants.VERSION_NAMES
def render_GET(self, request: Request) -> bytes:
template = self.environment.get_template(
"titles/ongeki/frontend/ongeki_index.jinja"
)
sesh: Session = request.getSession()
usr_sesh = IUserSession(sesh)
self.version = usr_sesh.ongeki_version
if getattr(usr_sesh, "userId", 0) != 0:
profile_data =self.data.profile.get_profile_data(usr_sesh.userId, self.version)
rival_list = self.data.profile.get_rivals(usr_sesh.userId)
rival_data = {
"userRivalList": rival_list,
"userId": usr_sesh.userId
}
rival_info = OngekiBase.handle_get_user_rival_data_api_request(self, rival_data)
return template.render(
data=self.data.profile,
title=f"{self.core_config.server.name} | {self.nav_name}",
game_list=self.environment.globals["game_list"],
gachas=self.game_cfg.gachas.enabled_gachas,
profile_data=profile_data,
rival_info=rival_info["userRivalDataList"],
version_list=self.version_list,
version=self.version,
sesh=vars(usr_sesh)
).encode("utf-16")
else:
return redirectTo(b"/gate/", request)
def render_POST(self, request: Request):
uri = request.uri.decode()
sesh: Session = request.getSession()
usr_sesh = IUserSession(sesh)
if hasattr(usr_sesh, "userId"):
if uri == "/game/ongeki/rival.add":
rival_id = request.args[b"rivalUserId"][0].decode()
self.data.profile.put_rival(usr_sesh.userId, rival_id)
# self.logger.info(f"{usr_sesh.userId} added a rival")
return redirectTo(b"/game/ongeki/", request)
elif uri == "/game/ongeki/rival.delete":
rival_id = request.args[b"rivalUserId"][0].decode()
self.data.profile.delete_rival(usr_sesh.userId, rival_id)
# self.logger.info(f"{response}")
return redirectTo(b"/game/ongeki/", request)
elif uri == "/game/ongeki/version.change":
ongeki_version=request.args[b"version"][0].decode()
if(ongeki_version.isdigit()):
usr_sesh.ongeki_version=int(ongeki_version)
return redirectTo(b"/game/ongeki/", request)
else:
return b"Something went wrong"
else:
return b"User is not logged in"

View File

@ -0,0 +1,24 @@
function deleteRival(rivalUserId){
$(document).ready(function () {
$.post("/game/ongeki/rival.delete",
{
rivalUserId
},
function(data,status){
window.location.replace("/game/ongeki/")
})
});
}
function changeVersion(sel){
$(document).ready(function () {
$.post("/game/ongeki/version.change",
{
version: sel.value
},
function(data,status){
window.location.replace("/game/ongeki/")
})
});
}

View File

@ -0,0 +1,83 @@
{% extends "core/frontend/index.jinja" %}
{% block content %}
{% if sesh is defined and sesh["userId"] > 0 %}
<br>
<br>
<br>
<div class="container">
<div class="row">
<h2> Profile </h2>
<h3>Version:
<select name="version" id="version" onChange="changeVersion(this)">
{% for ver in version_list %}
<option value={{loop.index0}} {{ "selected" if loop.index0==version else "" }} >{{ver}}</option>
{% endfor %}
</select>
</h3>
<hr>
</div>
<div class="row">
<div class="col">
<h2> Name: {{ profile_data.userName if profile_data.userName is defined else "Profile not found" }}</h2>
</div>
<div class="col">
<h4> ID: {{ profile_data.user if profile_data.user is defined else 'Profile not found' }}</h4>
</div>
</div>
<hr>
<div class="row">
<h2> Rivals <button class="btn btn-success" data-bs-toggle="modal" data-bs-target="#rival_add">Add</button></h2>
</div>
<div class="row">
<table class="table table-dark table-hover">
<thead>
<tr>
<th scope="col">ID</th>
<th scope="col">Name</th>
<th scope="col">Delete</th>
</tr>
</thead>
<tbody>
{% for rival in rival_info%}
<tr id="{{rival.rivalUserId}}">
<td>{{rival.rivalUserId}}</td>
<td>{{rival.rivalUserName}}</td>
<td><button class="btn-danger btn btn-sm" onclick="deleteRival({{rival.rivalUserId}})">Delete</button></td>
</tr>
{% endfor %}
</tbody>
</table>
</div>
<div class="modal fade" id="rival_add" tabindex="-1" aria-labelledby="card_add_label" data-bs-theme="dark" aria-hidden="true">
<form id="rival" action="/game/ongeki/rival.add" method="post">
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title">Modal title</h5>
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
</div>
<div class="modal-body">
Note:<br>
Please use the ID show next to your name in the profile page.
<br>
<label for="rivalUserId">ID:&nbsp;</label><input form="rival" id="rivalUserId" name="rivalUserId" maxlength="5" type="number" required>
</div>
<div class="modal-footer">
<input type=submit class="btn btn-primary" type="button" form="rival" value="Add">
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Close</button>
</div>
</div>
</div>
</form>
</div>
</div>
<script>
{% include 'titles/ongeki/frontend/js/ongeki_scripts.js' %}
</script>
{% else %}
<h2>Not Currently Logged In</h2>
{% endif %}
{% endblock content %}

View File

@ -7,6 +7,10 @@ import logging
import coloredlogs
import zlib
from logging.handlers import TimedRotatingFileHandler
from Crypto.Cipher import AES
from Crypto.Util.Padding import pad
from Crypto.Protocol.KDF import PBKDF2
from Crypto.Hash import SHA1
from os import path
from typing import Tuple
@ -28,6 +32,7 @@ class OngekiServlet:
def __init__(self, core_cfg: CoreConfig, cfg_dir: str) -> None:
self.core_cfg = core_cfg
self.game_cfg = OngekiConfig()
self.hash_table: Dict[Dict[str, str]] = {}
if path.exists(f"{cfg_dir}/{OngekiConstants.CONFIG_NAME}"):
self.game_cfg.update(
yaml.safe_load(open(f"{cfg_dir}/{OngekiConstants.CONFIG_NAME}"))
@ -45,27 +50,60 @@ class OngekiServlet:
]
self.logger = logging.getLogger("ongeki")
log_fmt_str = "[%(asctime)s] Ongeki | %(levelname)s | %(message)s"
log_fmt = logging.Formatter(log_fmt_str)
fileHandler = TimedRotatingFileHandler(
"{0}/{1}.log".format(self.core_cfg.server.log_dir, "ongeki"),
encoding="utf8",
when="d",
backupCount=10,
)
fileHandler.setFormatter(log_fmt)
if not hasattr(self.logger, "inited"):
log_fmt_str = "[%(asctime)s] Ongeki | %(levelname)s | %(message)s"
log_fmt = logging.Formatter(log_fmt_str)
fileHandler = TimedRotatingFileHandler(
"{0}/{1}.log".format(self.core_cfg.server.log_dir, "ongeki"),
encoding="utf8",
when="d",
backupCount=10,
)
consoleHandler = logging.StreamHandler()
consoleHandler.setFormatter(log_fmt)
fileHandler.setFormatter(log_fmt)
self.logger.addHandler(fileHandler)
self.logger.addHandler(consoleHandler)
consoleHandler = logging.StreamHandler()
consoleHandler.setFormatter(log_fmt)
self.logger.setLevel(self.game_cfg.server.loglevel)
coloredlogs.install(
level=self.game_cfg.server.loglevel, logger=self.logger, fmt=log_fmt_str
)
self.logger.addHandler(fileHandler)
self.logger.addHandler(consoleHandler)
self.logger.setLevel(self.game_cfg.server.loglevel)
coloredlogs.install(
level=self.game_cfg.server.loglevel, logger=self.logger, fmt=log_fmt_str
)
self.logger.inited = True
for version, keys in self.game_cfg.crypto.keys.items():
if len(keys) < 3:
continue
self.hash_table[version] = {}
method_list = [
method
for method in dir(self.versions[version])
if not method.startswith("__")
]
for method in method_list:
method_fixed = inflection.camelize(method)[6:-7]
# number of iterations is 64 on Bright Memory
iter_count = 64
hash = PBKDF2(
method_fixed,
bytes.fromhex(keys[2]),
128,
count=iter_count,
hmac_hash_module=SHA1,
)
hashed_name = hash.hex()[:32] # truncate unused bytes like the game does
self.hash_table[version][hashed_name] = method_fixed
self.logger.debug(
f"Hashed v{version} method {method_fixed} with {bytes.fromhex(keys[2])} to get {hash.hex()[:32]}"
)
@classmethod
def get_allnet_info(
@ -100,6 +138,7 @@ class OngekiServlet:
req_raw = request.content.getvalue()
url_split = url_path.split("/")
encrtped = False
internal_ver = 0
endpoint = url_split[len(url_split) - 1]
client_ip = Utils.get_ip_addr(request)
@ -125,8 +164,45 @@ class OngekiServlet:
# If we get a 32 character long hex string, it's a hash and we're
# doing encrypted. The likelyhood of false positives is low but
# technically not 0
self.logger.error("Encryption not supported at this time")
return b""
if internal_ver not in self.hash_table:
self.logger.error(
f"v{version} does not support encryption or no keys entered"
)
return zlib.compress(b'{"stat": "0"}')
elif endpoint.lower() not in self.hash_table[internal_ver]:
self.logger.error(
f"No hash found for v{version} endpoint {endpoint}"
)
return zlib.compress(b'{"stat": "0"}')
endpoint = self.hash_table[internal_ver][endpoint.lower()]
try:
crypt = AES.new(
bytes.fromhex(self.game_cfg.crypto.keys[internal_ver][0]),
AES.MODE_CBC,
bytes.fromhex(self.game_cfg.crypto.keys[internal_ver][1]),
)
req_raw = crypt.decrypt(req_raw)
except Exception as e:
self.logger.error(
f"Failed to decrypt v{version} request to {endpoint} -> {e}"
)
return zlib.compress(b'{"stat": "0"}')
encrtped = True
if (
not encrtped
and self.game_cfg.crypto.encrypted_only
):
self.logger.error(
f"Unencrypted v{version} {endpoint} request, but config is set to encrypted only: {req_raw}"
)
return zlib.compress(b'{"stat": "0"}')
try:
unzip = zlib.decompress(req_raw)
@ -163,4 +239,17 @@ class OngekiServlet:
self.logger.debug(f"Response {resp}")
return zlib.compress(json.dumps(resp, ensure_ascii=False).encode("utf-8"))
zipped = zlib.compress(json.dumps(resp, ensure_ascii=False).encode("utf-8"))
if not encrtped:
return zipped
padded = pad(zipped, 16)
crypt = AES.new(
bytes.fromhex(self.game_cfg.crypto.keys[internal_ver][0]),
AES.MODE_CBC,
bytes.fromhex(self.game_cfg.crypto.keys[internal_ver][1]),
)
return crypt.encrypt(padded)

View File

@ -326,7 +326,7 @@ class OngekiItemData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"put_card: Failed to update! aime_id: {aime_id}")
self.logger.warning(f"put_card: Failed to update! aime_id: {aime_id}")
return None
return result.lastrowid
@ -346,7 +346,7 @@ class OngekiItemData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"put_character: Failed to update! aime_id: {aime_id}")
self.logger.warning(f"put_character: Failed to update! aime_id: {aime_id}")
return None
return result.lastrowid
@ -366,7 +366,7 @@ class OngekiItemData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"put_deck: Failed to update! aime_id: {aime_id}")
self.logger.warning(f"put_deck: Failed to update! aime_id: {aime_id}")
return None
return result.lastrowid
@ -394,7 +394,7 @@ class OngekiItemData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"put_boss: Failed to update! aime_id: {aime_id}")
self.logger.warning(f"put_boss: Failed to update! aime_id: {aime_id}")
return None
return result.lastrowid
@ -406,7 +406,7 @@ class OngekiItemData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"put_story: Failed to update! aime_id: {aime_id}")
self.logger.warning(f"put_story: Failed to update! aime_id: {aime_id}")
return None
return result.lastrowid
@ -426,7 +426,7 @@ class OngekiItemData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"put_chapter: Failed to update! aime_id: {aime_id}")
self.logger.warning(f"put_chapter: Failed to update! aime_id: {aime_id}")
return None
return result.lastrowid
@ -446,7 +446,7 @@ class OngekiItemData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"put_item: Failed to update! aime_id: {aime_id}")
self.logger.warning(f"put_item: Failed to update! aime_id: {aime_id}")
return None
return result.lastrowid
@ -479,7 +479,7 @@ class OngekiItemData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"put_music_item: Failed to update! aime_id: {aime_id}")
self.logger.warning(f"put_music_item: Failed to update! aime_id: {aime_id}")
return None
return result.lastrowid
@ -499,7 +499,7 @@ class OngekiItemData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"put_login_bonus: Failed to update! aime_id: {aime_id}")
self.logger.warning(f"put_login_bonus: Failed to update! aime_id: {aime_id}")
return None
return result.lastrowid
@ -521,7 +521,7 @@ class OngekiItemData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"put_mission_point: Failed to update! aime_id: {aime_id}")
self.logger.warning(f"put_mission_point: Failed to update! aime_id: {aime_id}")
return None
return result.lastrowid
@ -541,7 +541,7 @@ class OngekiItemData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"put_event_point: Failed to update! aime_id: {aime_id}")
self.logger.warning(f"put_event_point: Failed to update! aime_id: {aime_id}")
return None
return result.lastrowid
@ -561,7 +561,7 @@ class OngekiItemData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"put_scenerio: Failed to update! aime_id: {aime_id}")
self.logger.warning(f"put_scenerio: Failed to update! aime_id: {aime_id}")
return None
return result.lastrowid
@ -581,7 +581,7 @@ class OngekiItemData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"put_trade_item: Failed to update! aime_id: {aime_id}")
self.logger.warning(f"put_trade_item: Failed to update! aime_id: {aime_id}")
return None
return result.lastrowid
@ -601,7 +601,7 @@ class OngekiItemData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"put_event_music: Failed to update! aime_id: {aime_id}")
self.logger.warning(f"put_event_music: Failed to update! aime_id: {aime_id}")
return None
return result.lastrowid
@ -621,7 +621,7 @@ class OngekiItemData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"put_tech_event: Failed to update! aime_id: {aime_id}")
self.logger.warning(f"put_tech_event: Failed to update! aime_id: {aime_id}")
return None
return result.lastrowid
@ -651,7 +651,7 @@ class OngekiItemData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"put_memorychapter: Failed to update! aime_id: {aime_id}")
self.logger.warning(f"put_memorychapter: Failed to update! aime_id: {aime_id}")
return None
return result.lastrowid
@ -694,7 +694,7 @@ class OngekiItemData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"put_user_gacha: Failed to insert! aime_id: {aime_id}")
self.logger.warning(f"put_user_gacha: Failed to insert! aime_id: {aime_id}")
return None
return result.lastrowid
@ -709,7 +709,7 @@ class OngekiItemData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(
self.logger.warning(
f"put_user_print_detail: Failed to insert! aime_id: {aime_id}"
)
return None

View File

@ -63,7 +63,7 @@ class OngekiLogData(BaseData):
result = self.execute(sql)
if result is None:
self.logger.warn(
self.logger.warning(
f"put_gp_log: Failed to insert GP log! aime_id: {aime_id} kind {kind} pattern {pattern} current_gp {current_gp}"
)
return result.lastrowid

View File

@ -3,7 +3,7 @@ from sqlalchemy import Table, Column, UniqueConstraint, PrimaryKeyConstraint, an
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON, BigInteger
from sqlalchemy.engine.base import Connection
from sqlalchemy.schema import ForeignKey
from sqlalchemy.sql import func, select
from sqlalchemy.sql import func, select, delete
from sqlalchemy.engine import Row
from sqlalchemy.dialects.mysql import insert
@ -269,7 +269,7 @@ class OngekiProfileData(BaseData):
return None
return row["userName"]
def get_profile_preview(self, aime_id: int, version: int) -> Optional[Row]:
sql = (
select([profile, option])
@ -364,7 +364,7 @@ class OngekiProfileData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"put_profile_data: Failed to update! aime_id: {aime_id}")
self.logger.warning(f"put_profile_data: Failed to update! aime_id: {aime_id}")
return None
return result.lastrowid
@ -376,7 +376,7 @@ class OngekiProfileData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(
self.logger.warning(
f"put_profile_options: Failed to update! aime_id: {aime_id}"
)
return None
@ -393,7 +393,7 @@ class OngekiProfileData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(
self.logger.warning(
f"put_profile_recent_rating: failed to update recent rating! aime_id {aime_id}"
)
return None
@ -415,7 +415,7 @@ class OngekiProfileData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(
self.logger.warning(
f"put_profile_rating_log: failed to update rating log! aime_id {aime_id} data_version {data_version} highest_rating {highest_rating}"
)
return None
@ -449,7 +449,7 @@ class OngekiProfileData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(
self.logger.warning(
f"put_profile_activity: failed to put activity! aime_id {aime_id} kind {kind} activity_id {activity_id}"
)
return None
@ -466,7 +466,7 @@ class OngekiProfileData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(
self.logger.warning(
f"put_profile_region: failed to update! aime_id {aime_id} region {region}"
)
return None
@ -480,7 +480,7 @@ class OngekiProfileData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"put_best_score: Failed to add score! aime_id: {aime_id}")
self.logger.warning(f"put_best_score: Failed to add score! aime_id: {aime_id}")
return None
return result.lastrowid
@ -492,19 +492,26 @@ class OngekiProfileData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"put_kop: Failed to add score! aime_id: {aime_id}")
self.logger.warning(f"put_kop: Failed to add score! aime_id: {aime_id}")
return None
return result.lastrowid
def put_rival(self, aime_id: int, rival_id: int) -> Optional[int]:
sql = insert(rival).values(user=aime_id, rivalUserId=rival_id)
conflict = sql.on_duplicate_key_update(rival=rival_id)
conflict = sql.on_duplicate_key_update(rivalUserId=rival_id)
result = self.execute(conflict)
if result is None:
self.logger.warn(
self.logger.warning(
f"put_rival: failed to update! aime_id: {aime_id}, rival_id: {rival_id}"
)
return None
return result.lastrowid
def delete_rival(self, aime_id: int, rival_id: int) -> Optional[int]:
sql = delete(rival).where(rival.c.user==aime_id, rival.c.rivalUserId==rival_id)
result = self.execute(sql)
if result is None:
self.logger.error(f"delete_rival: failed to delete! aime_id: {aime_id}, rival_id: {rival_id}")
else:
return result.rowcount

View File

@ -139,7 +139,7 @@ class OngekiScoreData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"put_tech_count: Failed to update! aime_id: {aime_id}")
self.logger.warning(f"put_tech_count: Failed to update! aime_id: {aime_id}")
return None
return result.lastrowid
@ -164,7 +164,7 @@ class OngekiScoreData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"put_best_score: Failed to add score! aime_id: {aime_id}")
self.logger.warning(f"put_best_score: Failed to add score! aime_id: {aime_id}")
return None
return result.lastrowid
@ -175,6 +175,6 @@ class OngekiScoreData(BaseData):
result = self.execute(sql)
if result is None:
self.logger.warn(f"put_playlog: Failed to add playlog! aime_id: {aime_id}")
self.logger.warning(f"put_playlog: Failed to add playlog! aime_id: {aime_id}")
return None
return result.lastrowid

View File

@ -105,7 +105,7 @@ class OngekiStaticData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"Failed to insert card! card_id {card_id}")
self.logger.warning(f"Failed to insert card! card_id {card_id}")
return None
return result.lastrowid
@ -180,7 +180,7 @@ class OngekiStaticData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"Failed to insert gacha! gacha_id {gacha_id}")
self.logger.warning(f"Failed to insert gacha! gacha_id {gacha_id}")
return None
return result.lastrowid
@ -215,7 +215,7 @@ class OngekiStaticData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"Failed to insert gacha card! gacha_id {gacha_id}")
self.logger.warning(f"Failed to insert gacha card! gacha_id {gacha_id}")
return None
return result.lastrowid
@ -243,7 +243,7 @@ class OngekiStaticData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(f"Failed to insert event! event_id {event_id}")
self.logger.warning(f"Failed to insert event! event_id {event_id}")
return None
return result.lastrowid
@ -304,7 +304,7 @@ class OngekiStaticData(BaseData):
result = self.execute(conflict)
if result is None:
self.logger.warn(
self.logger.warning(
f"Failed to insert chart! song_id: {song_id}, chart_id: {chart_id}"
)
return None

View File

@ -157,7 +157,6 @@ class PokkenBase:
support_set_3
aid_skill_list
achievement_flag
pokemon_data
event_achievement_flag
event_achievement_param
"""
@ -200,7 +199,7 @@ class PokkenBase:
load_usr.home_loc_name = profile_dict.get("home_loc_name", "")
load_usr.pref_code = profile_dict.get("pref_code", 0)
load_usr.trainer_name = profile_dict.get(
"trainer_name", "Newb" + str(random.randint(1111, 999999))
"trainer_name", f"Newb{str(user_id).zfill(4)}"
)
load_usr.trainer_rank_point = profile_dict.get("trainer_rank_point", 0)
load_usr.wallet = profile_dict.get("wallet", 0)
@ -262,6 +261,29 @@ class PokkenBase:
load_usr.sp_bonus_key_value_2 = profile_dict.get("sp_bonus_key_value_2", 0)
load_usr.last_play_event_id = profile_dict.get("last_play_event_id", 0)
if pokemon_data is not None:
for pkmn in pokemon_data:
pkmn_d = pkmn._asdict()
pkm = jackal_pb2.LoadUserResponseData.PokemonData()
pkm.char_id = pkmn_d.get('char_id', 0)
pkm.illustration_book_no = pkmn_d.get('illustration_book_no', 0)
pkm.pokemon_exp = pkmn_d.get('pokemon_exp', 0)
pkm.battle_num_vs_wan = pkmn_d.get('battle_num_vs_wan', 0)
pkm.win_vs_wan = pkmn_d.get('win_vs_wan', 0)
pkm.battle_num_vs_lan = pkmn_d.get('battle_num_vs_lan', 0)
pkm.win_vs_lan = pkmn_d.get('win_vs_lan', 0)
pkm.battle_num_vs_cpu = pkmn_d.get('battle_num_vs_cpu', 0)
pkm.win_cpu = pkmn_d.get('win_cpu', 0)
pkm.battle_all_num_tutorial = pkmn_d.get('battle_all_num_tutorial', 0)
pkm.battle_num_tutorial = pkmn_d.get('battle_num_tutorial', 0)
pkm.bp_point_atk = pkmn_d.get('bp_point_atk', 0)
pkm.bp_point_res = pkmn_d.get('bp_point_res', 0)
pkm.bp_point_def = pkmn_d.get('bp_point_def', 0)
pkm.bp_point_sp = pkmn_d.get('bp_point_sp', 0)
load_usr.pokemon_data.append(pkm)
res.load_user.CopyFrom(load_usr)
return res.SerializeToString()
@ -325,7 +347,7 @@ class PokkenBase:
for evt_param in req.event_achievement_param:
evt_params.append(evt_param)
self.data.profile.update_profile_event(user_id, evt_state, evt_flgs, evt_params, )
self.data.profile.update_profile_event(user_id, evt_state, evt_flgs, evt_params, req.last_play_event_id)
for reward in req.reward_data:
self.data.item.add_reward(user_id, reward.get_category_id, reward.get_content_id, reward.get_type_id)

View File

@ -15,6 +15,7 @@ class PokkenConstants:
AI = 2
LAN = 3
WAN = 4
TUTORIAL_3 = 7
class BATTLE_RESULT(Enum):
WIN = 1

View File

@ -112,7 +112,7 @@ class PokkenServlet(resource.Resource):
try:
pokken_request.ParseFromString(content)
except DecodeError as e:
self.logger.warn(f"{e} {content}")
self.logger.warning(f"{e} {content}")
return b""
endpoint = jackal_pb2.MessageType.DESCRIPTOR.values_by_number[
@ -123,7 +123,7 @@ class PokkenServlet(resource.Resource):
handler = getattr(self.base, f"handle_{endpoint}", None)
if handler is None:
self.logger.warn(f"No handler found for message type {endpoint}")
self.logger.warning(f"No handler found for message type {endpoint}")
return self.base.handle_noop(pokken_request)
self.logger.info(f"{endpoint} request from {Utils.get_ip_addr(request)}")
@ -157,7 +157,7 @@ class PokkenServlet(resource.Resource):
None,
)
if handler is None:
self.logger.warn(
self.logger.warning(
f"No handler found for message type {json_content['call']}"
)
return json.dumps(self.base.handle_matching_noop()).encode()

View File

@ -39,8 +39,12 @@ class PokkenItemData(BaseData):
type=item_type,
)
result = self.execute(sql)
conflict = sql.on_duplicate_key_update(
content=content,
)
result = self.execute(conflict)
if result is None:
self.logger.warn(f"Failed to insert reward for user {user_id}: {category}-{content}-{item_type}")
self.logger.warning(f"Failed to insert reward for user {user_id}: {category}-{content}-{item_type}")
return None
return result.lastrowid

View File

@ -3,6 +3,7 @@ from sqlalchemy import Table, Column, UniqueConstraint, PrimaryKeyConstraint, an
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON
from sqlalchemy.schema import ForeignKey
from sqlalchemy.sql import func, select, update, delete
from sqlalchemy.sql.functions import coalesce
from sqlalchemy.engine import Row
from sqlalchemy.dialects.mysql import insert
@ -257,23 +258,32 @@ class PokkenProfileData(BaseData):
user=user_id,
char_id=pokemon_id,
illustration_book_no=illust_no,
bp_point_atk=atk,
bp_point_res=res,
bp_point_defe=defe,
bp_point_sp=sp,
pokemon_exp=0,
battle_num_vs_wan=0,
win_vs_wan=0,
battle_num_vs_lan=0,
win_vs_lan=0,
battle_num_vs_cpu=0,
win_cpu=0,
battle_all_num_tutorial=0,
battle_num_tutorial=0,
bp_point_atk=1+atk,
bp_point_res=1+res,
bp_point_def=1+defe,
bp_point_sp=1+sp,
)
conflict = sql.on_duplicate_key_update(
illustration_book_no=illust_no,
bp_point_atk=atk,
bp_point_res=res,
bp_point_defe=defe,
bp_point_sp=sp,
bp_point_atk=pokemon_data.c.bp_point_atk + atk,
bp_point_res=pokemon_data.c.bp_point_res + res,
bp_point_def=pokemon_data.c.bp_point_def + defe,
bp_point_sp=pokemon_data.c.bp_point_sp + sp,
)
result = self.execute(conflict)
if result is None:
self.logger.warn(f"Failed to insert pokemon ID {pokemon_id} for user {user_id}")
self.logger.warning(f"Failed to insert pokemon ID {pokemon_id} for user {user_id}")
return None
return result.lastrowid
@ -284,42 +294,51 @@ class PokkenProfileData(BaseData):
xp: int
) -> None:
sql = update(pokemon_data).where(and_(pokemon_data.c.user==user_id, pokemon_data.c.char_id==pokemon_id)).values(
pokemon_exp=pokemon_data.c.pokemon_exp + xp
pokemon_exp=coalesce(pokemon_data.c.pokemon_exp, 0) + xp
)
result = self.execute(sql)
if result is None:
self.logger.warn(f"Failed to add {xp} XP to pokemon ID {pokemon_id} for user {user_id}")
self.logger.warning(f"Failed to add {xp} XP to pokemon ID {pokemon_id} for user {user_id}")
def get_pokemon_data(self, user_id: int, pokemon_id: int) -> Optional[Row]:
pass
sql = pokemon_data.select(and_(pokemon_data.c.user == user_id, pokemon_data.c.char_id == pokemon_id))
result = self.execute(sql)
if result is None:
return None
return result.fetchone()
def get_all_pokemon_data(self, user_id: int) -> Optional[List[Row]]:
pass
sql = pokemon_data.select(pokemon_data.c.user == user_id)
result = self.execute(sql)
if result is None:
return None
return result.fetchall()
def put_pokemon_battle_result(
self, user_id: int, pokemon_id: int, match_type: PokkenConstants.BATTLE_TYPE, match_result: PokkenConstants.BATTLE_RESULT
) -> None:
"""
Records the match stats (type and win/loss) for the pokemon and profile
coalesce(pokemon_data.c.win_vs_wan, 0)
"""
sql = update(pokemon_data).where(and_(pokemon_data.c.user==user_id, pokemon_data.c.char_id==pokemon_id)).values(
battle_num_tutorial=pokemon_data.c.battle_num_tutorial + 1 if match_type==PokkenConstants.BATTLE_TYPE.TUTORIAL else pokemon_data.c.battle_num_tutorial,
battle_all_num_tutorial=pokemon_data.c.battle_all_num_tutorial + 1 if match_type==PokkenConstants.BATTLE_TYPE.TUTORIAL else pokemon_data.c.battle_all_num_tutorial,
battle_num_tutorial=coalesce(pokemon_data.c.battle_num_tutorial, 0) + 1 if match_type==PokkenConstants.BATTLE_TYPE.TUTORIAL else coalesce(pokemon_data.c.battle_num_tutorial, 0),
battle_all_num_tutorial=coalesce(pokemon_data.c.battle_all_num_tutorial, 0) + 1 if match_type==PokkenConstants.BATTLE_TYPE.TUTORIAL else coalesce(pokemon_data.c.battle_all_num_tutorial, 0),
battle_num_vs_cpu=pokemon_data.c.battle_num_vs_cpu + 1 if match_type==PokkenConstants.BATTLE_TYPE.AI else pokemon_data.c.battle_num_vs_cpu,
win_cpu=pokemon_data.c.win_cpu + 1 if match_type==PokkenConstants.BATTLE_TYPE.AI and match_result==PokkenConstants.BATTLE_RESULT.WIN else pokemon_data.c.win_cpu,
battle_num_vs_cpu=coalesce(pokemon_data.c.battle_num_vs_cpu, 0) + 1 if match_type==PokkenConstants.BATTLE_TYPE.AI else coalesce(pokemon_data.c.battle_num_vs_cpu, 0),
win_cpu=coalesce(pokemon_data.c.win_cpu, 0) + 1 if match_type==PokkenConstants.BATTLE_TYPE.AI and match_result==PokkenConstants.BATTLE_RESULT.WIN else coalesce(pokemon_data.c.win_cpu, 0),
battle_num_vs_lan=pokemon_data.c.battle_num_vs_lan + 1 if match_type==PokkenConstants.BATTLE_TYPE.LAN else pokemon_data.c.battle_num_vs_lan,
win_vs_lan=pokemon_data.c.win_vs_lan + 1 if match_type==PokkenConstants.BATTLE_TYPE.LAN and match_result==PokkenConstants.BATTLE_RESULT.WIN else pokemon_data.c.win_vs_lan,
battle_num_vs_lan=coalesce(pokemon_data.c.battle_num_vs_lan, 0) + 1 if match_type==PokkenConstants.BATTLE_TYPE.LAN else coalesce(pokemon_data.c.battle_num_vs_lan, 0),
win_vs_lan=coalesce(pokemon_data.c.win_vs_lan, 0) + 1 if match_type==PokkenConstants.BATTLE_TYPE.LAN and match_result==PokkenConstants.BATTLE_RESULT.WIN else coalesce(pokemon_data.c.win_vs_lan, 0),
battle_num_vs_wan=pokemon_data.c.battle_num_vs_wan + 1 if match_type==PokkenConstants.BATTLE_TYPE.WAN else pokemon_data.c.battle_num_vs_wan,
win_vs_wan=pokemon_data.c.win_vs_wan + 1 if match_type==PokkenConstants.BATTLE_TYPE.WAN and match_result==PokkenConstants.BATTLE_RESULT.WIN else pokemon_data.c.win_vs_wan,
battle_num_vs_wan=coalesce(pokemon_data.c.battle_num_vs_wan, 0) + 1 if match_type==PokkenConstants.BATTLE_TYPE.WAN else coalesce(pokemon_data.c.battle_num_vs_wan, 0),
win_vs_wan=coalesce(pokemon_data.c.win_vs_wan, 0) + 1 if match_type==PokkenConstants.BATTLE_TYPE.WAN and match_result==PokkenConstants.BATTLE_RESULT.WIN else coalesce(pokemon_data.c.win_vs_wan, 0),
)
result = self.execute(sql)
if result is None:
self.logger.warn(f"Failed to record match stats for user {user_id}'s pokemon {pokemon_id} (type {match_type.name} | result {match_result.name})")
self.logger.warning(f"Failed to record match stats for user {user_id}'s pokemon {pokemon_id} (type {match_type.name} | result {match_result.name})")
def put_stats(
self,
@ -335,19 +354,19 @@ class PokkenProfileData(BaseData):
Records profile stats
"""
sql = update(profile).where(profile.c.user==user_id).values(
ex_ko_num=profile.c.ex_ko_num + exkos,
wko_num=profile.c.wko_num + wkos,
timeup_win_num=profile.c.timeup_win_num + timeout_wins,
cool_ko_num=profile.c.cool_ko_num + cool_kos,
perfect_ko_num=profile.c.perfect_ko_num + perfects,
ex_ko_num=coalesce(profile.c.ex_ko_num, 0) + exkos,
wko_num=coalesce(profile.c.wko_num, 0) + wkos,
timeup_win_num=coalesce(profile.c.timeup_win_num, 0) + timeout_wins,
cool_ko_num=coalesce(profile.c.cool_ko_num, 0) + cool_kos,
perfect_ko_num=coalesce(profile.c.perfect_ko_num, 0) + perfects,
continue_num=continues,
)
result = self.execute(sql)
if result is None:
self.logger.warn(f"Failed to update stats for user {user_id}")
self.logger.warning(f"Failed to update stats for user {user_id}")
def update_support_team(self, user_id: int, support_id: int, support1: int = 4294967295, support2: int = 4294967295) -> None:
def update_support_team(self, user_id: int, support_id: int, support1: int = None, support2: int = None) -> None:
sql = update(profile).where(profile.c.user==user_id).values(
support_set_1_1=support1 if support_id == 1 else profile.c.support_set_1_1,
support_set_1_2=support2 if support_id == 1 else profile.c.support_set_1_2,
@ -359,4 +378,4 @@ class PokkenProfileData(BaseData):
result = self.execute(sql)
if result is None:
self.logger.warn(f"Failed to update support team {support_id} for user {user_id}")
self.logger.warning(f"Failed to update support team {support_id} for user {user_id}")

View File

@ -33,7 +33,7 @@ class SaoReader(BaseReader):
pull_bin_ram = True
if not path.exists(f"{self.bin_dir}"):
self.logger.warn(f"Couldn't find csv file in {self.bin_dir}, skipping")
self.logger.warning(f"Couldn't find csv file in {self.bin_dir}, skipping")
pull_bin_ram = False
if pull_bin_ram:
@ -66,7 +66,7 @@ class SaoReader(BaseReader):
except Exception as err:
print(err)
except Exception:
self.logger.warn(f"Couldn't read csv file in {self.bin_dir}, skipping")
self.logger.warning(f"Couldn't read csv file in {self.bin_dir}, skipping")
self.logger.info("Now reading HeroLog.csv")
try:
@ -100,7 +100,7 @@ class SaoReader(BaseReader):
except Exception as err:
print(err)
except Exception:
self.logger.warn(f"Couldn't read csv file in {self.bin_dir}, skipping")
self.logger.warning(f"Couldn't read csv file in {self.bin_dir}, skipping")
self.logger.info("Now reading Equipment.csv")
try:
@ -132,7 +132,7 @@ class SaoReader(BaseReader):
except Exception as err:
print(err)
except Exception:
self.logger.warn(f"Couldn't read csv file in {self.bin_dir}, skipping")
self.logger.warning(f"Couldn't read csv file in {self.bin_dir}, skipping")
self.logger.info("Now reading Item.csv")
try:
@ -162,7 +162,7 @@ class SaoReader(BaseReader):
except Exception as err:
print(err)
except Exception:
self.logger.warn(f"Couldn't read csv file in {self.bin_dir}, skipping")
self.logger.warning(f"Couldn't read csv file in {self.bin_dir}, skipping")
self.logger.info("Now reading SupportLog.csv")
try:
@ -194,7 +194,7 @@ class SaoReader(BaseReader):
except Exception as err:
print(err)
except Exception:
self.logger.warn(f"Couldn't read csv file in {self.bin_dir}, skipping")
self.logger.warning(f"Couldn't read csv file in {self.bin_dir}, skipping")
self.logger.info("Now reading Title.csv")
try:
@ -227,7 +227,7 @@ class SaoReader(BaseReader):
elif len(titleId) < 6: # current server code cannot have multiple lengths for the id
continue
except Exception:
self.logger.warn(f"Couldn't read csv file in {self.bin_dir}, skipping")
self.logger.warning(f"Couldn't read csv file in {self.bin_dir}, skipping")
self.logger.info("Now reading RareDropTable.csv")
try:
@ -251,4 +251,4 @@ class SaoReader(BaseReader):
except Exception as err:
print(err)
except Exception:
self.logger.warn(f"Couldn't read csv file in {self.bin_dir}, skipping")
self.logger.warning(f"Couldn't read csv file in {self.bin_dir}, skipping")

View File

@ -192,7 +192,7 @@ class WaccaBase:
else:
profile = self.data.profile.get_profile(req.userId)
if profile is None:
self.logger.warn(
self.logger.warning(
f"Unknown user id {req.userId} attempted login from {req.chipId}"
)
return resp.make()
@ -282,7 +282,7 @@ class WaccaBase:
profile = self.data.profile.get_profile(req.userId)
if profile is None:
self.logger.warn(f"Unknown profile {req.userId}")
self.logger.warning(f"Unknown profile {req.userId}")
return resp.make()
self.logger.info(f"Get detail for profile {req.userId}")
@ -709,7 +709,7 @@ class WaccaBase:
profile = self.data.profile.get_profile(req.profileId)
if profile is None:
self.logger.warn(
self.logger.warning(
f"handle_user_music_update_request: No profile for game_id {req.profileId}"
)
return resp.make()
@ -1003,7 +1003,7 @@ class WaccaBase:
profile = self.data.profile.get_profile(req.profileId)
if profile is None:
self.logger.warn(
self.logger.warning(
f"handle_user_vip_get_request no profile with ID {req.profileId}"
)
return BaseResponse().make()

View File

@ -116,43 +116,43 @@ class HousingInfo:
class Notice:
name: str = ""
title: str = ""
message: str = ""
unknown3: str = ""
unknown4: str = ""
dialog: str = ""
fullImage: str = ""
messageImage: str = ""
showTitleScreen: bool = True
showWelcomeScreen: bool = True
startTime: int = 0
endTime: int = 0
voiceline: int = 0
voiceLine: int = 0
def __init__(
self,
name: str = "",
title: str = "",
message: str = "",
dialog: str = "",
start: int = 0,
end: int = 0,
) -> None:
self.name = name
self.title = title
self.message = message
self.dialog = dialog
self.startTime = start
self.endTime = end
def make(self) -> List:
return [
self.name,
self.title,
self.message,
self.unknown3,
self.unknown4,
self.dialog,
self.fullImage,
self.messageImage,
int(self.showTitleScreen),
int(self.showWelcomeScreen),
self.startTime,
self.endTime,
self.voiceline,
self.voiceLine,
]

View File

@ -146,7 +146,7 @@ class WaccaServlet:
self.logger.debug(req_json)
if not hasattr(self.versions[internal_ver], func_to_find):
self.logger.warn(
self.logger.warning(
f"{req_json['appVersion']} has no handler for {func_to_find}"
)
resp = BaseResponse().make()

View File

@ -157,7 +157,7 @@ class WaccaLily(WaccaS):
else:
profile = self.data.profile.get_profile(req.userId)
if profile is None:
self.logger.warn(
self.logger.warning(
f"Unknown user id {req.userId} attempted login from {req.chipId}"
)
return resp.make()
@ -198,7 +198,7 @@ class WaccaLily(WaccaS):
profile = self.data.profile.get_profile(req.userId)
if profile is None:
self.logger.warn(f"Unknown profile {req.userId}")
self.logger.warning(f"Unknown profile {req.userId}")
return resp.make()
self.logger.info(f"Get detail for profile {req.userId}")

Some files were not shown because too many files have changed in this diff Show More