Compare commits

156 Commits

Author SHA1 Message Date
6f4e5b0fa3 allow alembic to also connect with tls 2024-11-16 10:58:13 +07:00
5f3d62d84a fix: aiomysql only takes an SSLContext object 2024-11-16 09:31:52 +07:00
d7d3dbac59 properly support connecting with SSL on aiomysql 2024-11-16 09:14:44 +07:00
eba03e6b9b fix deprecated get_event_loop on 3.10+ 2024-11-16 09:13:23 +07:00
4df9edd551 fix on linux 2024-11-16 08:41:32 +07:00
bc35dd9170 forgot to push changes to ongeki 2024-11-15 21:42:42 +07:00
1af7bb81ba caching is probably not needed and is counterproductive 2024-11-15 21:40:14 +07:00
2f59d7b0d6 push the same limit/offset changes for maimai and ongeki 2024-11-15 21:37:23 +07:00
37d07e6035 wip: use SQL's limit/offset pagination for nextIndex/maxCount requests 2024-11-15 21:37:23 +07:00
cb009f6e23 wacca: tiny cleanup 2024-11-14 12:39:21 -05:00
2274b42358 Merge pull request '[database] make async' (#184) from beerpsi/artemis:fix/async-database into develop
Reviewed-on: Hay1tsme/artemis#184
2024-11-14 06:15:49 +00:00
789d50c406 use AsyncSession directly
see the warnings in https://docs.sqlalchemy.org/en/14/orm/extensions/asyncio.html#using-asyncio-scoped-session
2024-11-14 13:10:14 +07:00
4c33f4282a oops forgot a dependency on aiomysql 2024-11-14 12:38:00 +07:00
bc7524c8fc fix: make database async 2024-11-14 12:36:22 +07:00
1331d473c9 Merge pull request '[mai2] Implement GetGameRankingAPI . Fix photo merge , Add UserScoreRankingAPI handler' (#181) from SoulGateKey/artemis:develop into develop
Reviewed-on: Hay1tsme/artemis#181
2024-11-13 05:37:00 +00:00
b7a006f7ee core: pushing changes regarding MySQL ssl toggle that is now mandatory 2024-11-12 10:53:02 -05:00
65100920e3 Merge pull request '[chuni] web ui - customization support (user box, avatar, map icon, system voice)' (#182) from daydensteve/artemis-develop:chuni_ui_overhaul into develop
Reviewed-on: Hay1tsme/artemis#182
2024-11-12 12:03:14 +00:00
7a307b4d69 Merge pull request 'Fix mai2 photo merge problem and Add UserScoreRankingAPI handler' (#2) from mai2_tournament_support into develop
Reviewed-on: SoulGateKey/artemis#2
2024-11-12 05:42:20 +00:00
f4dff9b4c1 fix: mai2 photos cant be merged 2024-11-11 21:16:19 +08:00
8a6250bebd Formatted log print
Change log level
2024-11-11 21:11:33 +08:00
eb18ad22b8 hardened ui against the db not being upgraded or importer not being ran 2024-11-08 09:17:12 -05:00
954bd565d3 reduced db access with new chuni webui customizations 2024-11-07 20:28:28 -05:00
f272e97eae Formatted log print
Change log level
2024-11-06 02:44:07 +08:00
aa7ae6cb51 Formatted log print 2024-11-06 02:38:18 +08:00
3a44b18d91 fixed erroneously wide trophy select 2024-11-03 19:27:20 -05:00
c8186ccef0 fixed doc typo 2024-11-03 19:20:36 -05:00
4a701a5755 chuni doc updates 2024-11-03 19:19:05 -05:00
f5205801a8 Added customization unlock overrides 2024-11-03 19:12:49 -05:00
626ce6bd96 userbox, avatar, mapicon, and voice ui configuration 2024-11-03 18:37:09 -05:00
e49c70b738 more enums! 2024-11-03 16:37:27 -05:00
c2d4abcc26 db and import updates for userbox, avatar, voice, and map icon 2024-11-03 16:37:05 -05:00
2f6974cab6 new chuni ui images/directories 2024-11-03 08:48:13 -05:00
9b89cef51c ignore visual studio pro files 2024-11-03 08:46:12 -05:00
221517e310 TODO: GetUserScoreRankingApi 2024-10-30 12:37:18 +08:00
52b397f31f Merge remote-tracking branch 'origin/develop' into sgkdev
# Conflicts:
#	titles/mai2/schema/profile.py
2024-10-30 12:28:26 +08:00
b84e17a66b Merge pull request 'mai2_handle_get_game_ranking' (#1) from mai2_handle_get_game_ranking into develop
Reviewed-on: SoulGateKey/artemis#1
2024-10-30 04:18:53 +00:00
b6e7e0973b Delete unused dependency 2024-10-11 16:19:07 +00:00
598e4aad76 Update mai2/schema/score.py to support new handle_get_game_ranking 2024-10-11 16:16:40 +00:00
a673d9dabd Delete unused dependency 2024-10-11 16:12:53 +00:00
398fa9059d Update mai2/base.py using the ORM 2024-10-11 16:09:53 +00:00
29f4a6a696 revert 033c1aa776b7874b6e534efd664f0b7010271e68
revert Update 卖
2024-10-11 16:08:15 +00:00
033c1aa776 Update 卖 2024-10-11 16:06:17 +00:00
bbf41ac83f Merge branch 'develop' into mai2_handle_get_game_ranking 2024-10-11 15:56:05 +00:00
451754cf3c sao: fix my store 2024-10-06 16:09:09 -04:00
0cef797a8a mai2: rework photo uploads, relates to #67 2024-10-06 03:47:10 -04:00
58ae491a8c add pymysql to requirements.txt 2024-10-03 19:47:36 +00:00
3843ac6eb1 mai2: calc GetGameRanking result 2024-10-03 19:32:17 +00:00
ed5e7dc561 [chuni] Added truncation to long Title and Artist Name values on import (#178)
I noticed the importer failing to import music 523 (Niji-iro no Flügel) from an omni pack due to the artist name being crazy long.

To address this, I added truncation to max column value length for both the Title and Artist Name values. Considered doing this for the other 3 string fields as well but I can't imagine those ever being problematic.

Import now succeeds with a warning generated about the truncation occurring

Reviewed-on: Hay1tsme/artemis#178
Co-authored-by: daydensteve <daydensteve@gmail.com>
Co-committed-by: daydensteve <daydensteve@gmail.com>
2024-09-25 15:21:30 +00:00
b04840f3dd [chuni] Frontend favorites support (#176)
I had been itching for the favorites feature since I'm bad with japanese so figured I'd go ahead and add it. I've included a few pics to help visualize the changes.

### Summary of user-facing changes:
- New Favorites frontend page that itemizes favorites by genre for the current version (as selected on the Profile page). Favorites can be removed from this page via the Remove button
- Updated the Records page so that it only shows the playlog for the currently selected version and includes a "star" to the left of each title that can be clicked to add/remove favorites. When the star is yellow, its a favorite; when its a grey outline, its not. I figure its pretty straight forward
- The Records and new Favorites pages show the jacket image of each song now (The Importer was updated to convert the DDS files to PNGs on import)

### Behind-the-scenes changes:
- Fixed a bug in the chuni get_song method - it was inappropriately comparing the row id instead of the musicid (note this method was not used prior to adding favorites support)
- Overhauled the score scheme file to stop with all the hacky romVersion determination that was going on in various methods. To do this, I created a new ChuniRomVersion class that is populated with all base rom versions, then used to derive the internal integer version  number from the string stored in the DB. As written, this functionality can infer recorded rom versions when the playlog was entered using an update to the base version (e.g. 2.16  vs 2.15 for sunplus or 2.22 vs 2.20 for luminous).
- Made the chuni config version class safer as it would previously throw an exception if you gave it a version not present in the config file. This was done in support of the score overhaul to build up the initial ChuniRomVersion dict
- Added necessary methods to query/update the favorites table.

### Testing
- Frontend testing was performed with playlog data for both sunplus (2.16) and luminous (2.22) present. All add/remove permutations and images behavior was as expected
- Game testing was performed only with Luminous (2.22) and worked fine

Reviewed-on: Hay1tsme/artemis#176
Co-authored-by: daydensteve <daydensteve@gmail.com>
Co-committed-by: daydensteve <daydensteve@gmail.com>
2024-09-25 14:53:43 +00:00
1d8e31d4ab docs: add missing games 2024-09-23 14:46:48 -04:00
045465ed4e idz: disabled by default to silence warnings for people who don't feel like configuring games they don't intend to use 2024-09-23 14:46:41 -04:00
aa8e33a13e docs: add pokken to game specific info 2024-09-23 14:20:25 -04:00
ppc
f47175a144 [mai2] add buddies plus support (#177)
Adds favorite music support (there's an option in the results screen to star a song), handlers for new methods and fixes upsert failures for `userFavoriteList`.
The `UserIntimateApi` has been added but didn't seem to add any data during testing, and `CreateTokenApi`/`RemoveTokenApi` have also been added but I think they're only used during guest play.

---
Tested on 1.45 with no errors/game crashes (see logs). Card Maker hasn't been tested as I don't have a setup to play with.

Reviewed-on: Hay1tsme/artemis#177
Co-authored-by: ppc <albie@ppc.moe>
Co-committed-by: ppc <albie@ppc.moe>
2024-09-23 17:21:29 +00:00
e85728f33c chuni/mai2: remove upsert from put_playlog 2024-09-20 17:10:48 -04:00
ppc
5c60cde14f update docs 2024-09-19 23:10:54 +01:00
ppc
8d04d74f52 migration consistency 2024-09-18 17:59:24 +01:00
ppc
d85c575c61 add nullcheck 2024-09-18 11:29:14 +01:00
ppc
196aa601f3 handle GetUserMissionDataApi 2024-09-16 18:01:06 +01:00
ppc
d8169e37cc add mai2 UserIntimateApi 2024-09-16 17:56:22 +01:00
ppc
77aa1afaa0 add mai2 favorite music support 2024-09-16 16:55:09 +01:00
ppc
e128631e8f fix upsert failures 2024-09-16 12:39:15 +01:00
ppc
b01ac24799 add shop stock/friend bonus handlers 2024-09-16 09:54:39 +00:00
ppc
01dad267b9 add mai2 database upgrade 2024-09-15 20:48:24 +00:00
ppc
cc302b6e56 update cm reader 2024-09-15 19:22:47 +00:00
ppc
ee4eddd639 add buddies plus support 2024-09-15 19:22:39 +00:00
6f6a300879 Merge pull request '[BUGFIX] Fixed Chusan Map Overload' (#175) from EmmyHeart/artemis:develop into develop
Reviewed-on: Hay1tsme/artemis#175
2024-09-14 04:34:44 +00:00
82004cb743 Fix map overload in Chusan 2024-09-14 01:30:29 +00:00
8f4c08f825 Fix map overload in Chusan 2024-09-14 01:28:35 +00:00
7ebd9bfb8a Merge pull request '[chuni] Auto stock tickets at login' (#170) from daydensteve/artemis:chuni_ticket_stock into develop
Reviewed-on: Hay1tsme/artemis#170
2024-09-13 18:12:32 +00:00
19c9740f4b Merge pull request '[aime] fix ADB header parsing error when using generated keychips' (#173) from ppc/artemis:aime-regex-fix into develop
Reviewed-on: Hay1tsme/artemis#173
2024-09-13 18:11:37 +00:00
ppc
1db020b5fc fix generated keychip validation failures 2024-09-09 16:53:14 +00:00
d1048694d4 Fix --config option not being respected, fixes #172 2024-09-06 10:36:57 -04:00
944b80129b chuni: fix ultimate/worlds end chart reading, closes #63 2024-09-05 11:45:22 -04:00
73dda06413 mai2: add warning about portrait uploading not being supported. #67 2024-09-05 11:37:52 -04:00
eacd4a2f43 Adding stock_tickets and stock_count chuni mods. Enables specified tickets to be auto-stocked on login 2024-09-02 20:00:59 -04:00
dd33144040 adding luminous to readme 2024-08-16 09:26:12 -04:00
d400a0be4b adding luminous to readme 2024-08-16 09:25:39 -04:00
31ca45fc68 Merge branch 'develop' 2024-08-11 02:23:04 -04:00
ee667b9ea5 update changelog 2024-08-11 02:22:10 -04:00
c12272feab ...again 2024-08-11 02:11:23 -04:00
2a8a4880c2 Diva: change frontend nav name 2024-08-11 02:11:16 -04:00
5c497032f1 frontend: "Edit Card" -> "Card Information" 2024-08-09 02:43:39 -04:00
06a7fdb1b1 frontend: implement add/edit card 2024-08-09 02:37:34 -04:00
68bbde209a aimedb: add warning for all-zero access code/idm 2024-07-18 13:30:46 -04:00
02cee4198d aimedb: block all-zero access codes and idms 2024-07-18 13:26:57 -04:00
b5715b8da6 Merge pull request 'chuni: fix encryption , added known iter keys , and add c2c api for sdgs' (#165) from Puz/artemis:develop into develop
Reviewed-on: Hay1tsme/artemis#165
2024-07-15 18:32:55 +00:00
2647baef1e Merge pull request 'Chunithm: Add song ranks, improve non-S rating calculation' (#166) from mrarythmia/artemis:develop into develop
Reviewed-on: Hay1tsme/artemis#166
2024-07-15 18:31:04 +00:00
39e89d4908 Add song ranks, improve non-S rating calculation 2024-07-15 03:40:56 +02:00
Puz
ec7605eeb0 chuni: added missing c2c endpoint for sdgs 2024-07-10 07:21:56 +00:00
Puz
b5e7a59b5f chuni: fix encryption , and added all known iter keys 2024-07-10 07:17:12 +00:00
6a43e0eada update keychip serial generation code 2024-07-09 15:10:42 -04:00
d7e8c9b490 frontend: fix event log buttons 2024-07-06 23:20:04 -04:00
c65c71e89e card: add memos 2024-07-06 23:13:41 -04:00
15cf56ed9b dbutils: get loop once at top of file 2024-07-06 21:59:18 -04:00
51a47e51d9 Merge pull request 'DIVA made some festa stuff configurable in config' (#161) from ThatzOkay/artemis:diva_configire_festa into develop
Reviewed-on: Hay1tsme/artemis#161
2024-07-06 00:47:42 +00:00
58bdeb9290 Merge branch 'develop' into diva_configire_festa 2024-07-05 08:07:10 +00:00
9b5283d389 mai2: add event editing for sysops 2024-07-01 18:26:39 -04:00
=
97e1d5da05 fix disable. Fix casing in options 2024-06-30 12:54:44 +02:00
=
eb275062a1 Merge branch 'develop' into diva_configire_festa 2024-06-30 12:26:53 +02:00
d439483aec Merge pull request 'develop' (#8) from Hay1tsme/artemis:develop into develop
Reviewed-on: ThatzOkay/artemis#8
2024-06-30 10:24:42 +00:00
=
e0e63a9a13 configurable festa options 2024-06-30 00:23:10 +02:00
44fb6037cf chuni: add missing alembic script 2024-06-29 00:08:11 -04:00
c4afb083f9 Merge pull request 'Implemented User Points for teams' (#140) from EmmyHeart/artemis:develop into develop
Reviewed-on: Hay1tsme/artemis#140
2024-06-29 04:04:13 +00:00
b2bd73a8f5 Merge branch 'develop' into develop 2024-06-29 04:03:14 +00:00
ecb2e9ec75 mai2: properly add present items 2024-06-28 23:55:23 -04:00
af8bd1d1c0 mai2: fix presents 2024-06-28 23:29:31 -04:00
e9afaaf56c update changelog 2024-06-28 15:51:28 -04:00
4446ff1f21 mai2: add present support 2024-06-28 15:48:27 -04:00
fef527d61f Merge branch 'beerpsi-fix/chuni/correct-iter-counts' into develop (#151) 2024-06-28 13:10:14 -04:00
7242a187ab sao: fix sao_hero_log_data nullability 2024-06-28 12:25:33 -04:00
4a18f6b3bc Merge pull request 'develop' (#7) from Hay1tsme/artemis:develop into develop
Reviewed-on: ThatzOkay/artemis#7
2024-06-28 07:15:36 +00:00
f0515a2130 sao: fix quest failing to save 2024-06-27 23:24:12 -04:00
601cec9075 sao: you already know what it is 2024-06-27 10:02:19 -04:00
177f851368 sao: cleanup 2024-06-27 00:07:10 -04:00
40a66614bc sao: fix user_hero_log_id 2024-06-27 00:04:23 -04:00
171131493f sao: fix new issues 2024-06-26 23:57:07 -04:00
c13b096190 sao: split massive upgrade in half and hope it works 2024-06-26 23:31:59 -04:00
5eb2ef8311 sao: schema fixes 3: the misery never ends 2024-06-26 23:01:47 -04:00
f27f789269 sao: alembic sucks sometimes. 2024-06-26 22:37:56 -04:00
7206af83cb sao: fix upgrade script 2024-06-26 22:30:12 -04:00
e91f84fecc sao: backport changes from diana 2024-06-25 14:02:53 -04:00
6ae11f96a2 aimedb: fix idm zfill issue 2024-06-24 22:28:22 -04:00
8e39b56bae CxB: fix default config 2024-06-24 23:09:32 +00:00
05c4035f53 docs: add instructions for migrating from old versions, remove obsolite docs, add openssl example to windows guide 2024-06-24 18:11:25 -04:00
8f9e9d4766 fix downgrade script issue referenced in #156 2024-06-24 13:37:10 -04:00
bdbb22f164 logic fix 2024-06-23 22:19:27 -04:00
068aa5eaa3 prevent setting chipid if it's 0x04030201 2024-06-23 22:09:56 -04:00
f252b8e322 chuni: fix misnamed var in put_net_battle 2024-06-23 17:41:22 -04:00
40a177ddcc Merge pull request 'correctly reboot end time in example_config && update docs' (#159) from zaphkito/artemis:develop into develop
Reviewed-on: Hay1tsme/artemis#159
2024-06-23 21:35:07 +00:00
bd78251b6c Merge pull request 'chuni: fix GetUserNetBattleRankingInfoApi' (#160) from zaphkito/artemis:123/fix-net_battle-aime_id into develop
Reviewed-on: Hay1tsme/artemis#160
2024-06-23 21:34:29 +00:00
acac12c211 chuni: fix GetUserNetBattleRankingInfoApi 2024-06-23 16:17:46 +08:00
714713384d correctly reboot end time in example_config && update docs 2024-06-23 15:26:22 +08:00
699337c419 chuni: fix get_net_battle 2024-06-23 02:57:37 -04:00
e432cc1adf Merge pull request '[chuni] Use the map flag event ID when checking in GetGameMapAreaConditionApi' (#158) from beerpsi/artemis:fix/chuni/use-map-flag-event into develop
Reviewed-on: Hay1tsme/artemis#158
2024-06-22 22:44:00 +00:00
07ec4e5263 Merge pull request 'develop' (#6) from Hay1tsme/artemis:develop into develop
Reviewed-on: ThatzOkay/artemis#6
2024-06-22 14:12:08 +00:00
=
f999269e41 Merge branch 'develop' of https://gitea.tendokyu.moe/ThatzOkay/artemis into develop 2024-06-20 23:11:33 +02:00
8947179127 Merge pull request 'develop' (#4) from Hay1tsme/artemis:develop into develop
Reviewed-on: ThatzOkay/artemis#4
2024-06-20 21:07:16 +00:00
=
74e2130747 Merge branch 'ThatzOkay-feature/diva-frontend' into develop 2024-06-20 22:58:57 +02:00
243d40cfd8 Add changelog and docs 2024-06-18 09:39:14 +07:00
=
a6ca1b3392 Changed return from full result to just True or False 2024-06-17 22:23:53 +02:00
21a09f2de9 Merge pull request 'develop' (#3) from Hay1tsme/artemis:develop into develop
Reviewed-on: ThatzOkay/artemis#3
2024-06-16 12:56:33 +00:00
=
d456ed365c Working diva frontend 2024-06-16 14:08:59 +02:00
bf8631448a support SDGS encryption 2024-06-15 22:22:07 +07:00
c3efc36be2 [chuni] Add correct endpoint iter_counts for all versions with encryption 2024-06-12 16:09:36 +07:00
=
da55e3e877 develop branch config 2024-06-11 21:57:40 +02:00
=
7d8093bb54 accidentally changed example config 2024-06-11 21:56:29 +02:00
=
1f9c1798c4 Basic working diva frontend with name editing and lv string editing. Working on playlog page 2024-06-11 21:48:34 +02:00
=
1b5b9d7f9a very basic diva frontend 2024-06-11 01:06:24 +02:00
5ba041b32b Merge pull request 'develop' (#2) from Hay1tsme/artemis:develop into develop
Reviewed-on: ThatzOkay/artemis#2
2024-06-10 22:11:14 +00:00
b3c1dceec9 Add team user points 2024-05-13 08:48:01 +00:00
50e0dde7de Added team user points 2024-05-13 08:45:19 +00:00
d09f3e9907 Merge branch 'develop' 2024-01-09 10:34:27 -05:00
add85aeab7 Merge branch 'develop' 2024-01-08 21:26:26 -05:00
a497a9806d fixing CXB render_POST 2023-11-07 22:40:15 -05:00
e88e1f82f8 fixing get_energy for CXB 2023-11-06 21:55:05 -05:00
1e84a50330 fixing again the render_POST for CXB 2023-11-06 21:04:49 -05:00
285 changed files with 106952 additions and 29715 deletions

1
.gitignore vendored
View File

@ -145,6 +145,7 @@ dmypy.json
cython_debug/
.vscode/*
.vs/*
# Local History for Visual Studio Code
.history/

View File

@ -1,11 +1,60 @@
# Changelog
Documenting updates to ARTEMiS, to be updated every time the master branch is pushed to.
## 20240811
### System
+ Change backend from Twisted to Starlette
+ Implement async handlers
+ Reboot times for multiple games have been fixed (thanks zaphkito!)
### Frontend
+ Edit button changed to View on the user page, and is where you can edit the card memo
+ Add card now works as it should
+ Add event log viewer in the `sys` page for sysadmins
+ Add pages for Pokken, SAO, and maimai
### AimeDB
+ Now rejects all-zero access codes
+ Stores card IDm (for AmusementIC) and MiFare ID (for old aime/banapass)
+ ...unless that MiFare ID is 0x01020304 (the default for segatools)
### maimai
+ Add support for BUDDiES
+ Rivals and Favorite Music support
### Wacca
+ Add option to block unregistered serials from accessing the title server
### DIVA
+ Fix for reading modded content (Thanks ThatzOkay!)
### CHUNITHM
+ Save net battle info
## 20240630
### DIVA
+ Added configurable festa options'
## 20240629
### CHUNITHM
+ Add team points
## 20240628
### maimai
+ Add present support
## 20240627
### SAO
+ Fix ghost items, character and player XP, EX Bonuses, unlocks, and much much more
## 20240620
### CHUNITHM
+ CHUNITHM LUMINOUS support
## 20240616
### CHUNITHM
+ Support network encryption for Export/International versions
### DIVA
+ Working frontend with name and level strings edit and playlog

View File

@ -1,7 +1,5 @@
from core.config import CoreConfig
from core.allnet import AllnetServlet, BillingServlet
from core.aimedb import AimedbServlette
from core.title import TitleServlet
from core.utils import Utils
from core.mucha import MuchaServlet
from core.frontend import FrontendServlet

View File

@ -2,5 +2,5 @@ from .base import ADBBaseRequest, ADBBaseResponse, ADBHeader, ADBHeaderException
from .base import CompanyCodes, ReaderFwVer, CMD_CODE_GOODBYE, HEADER_SIZE
from .lookup import ADBLookupRequest, ADBLookupResponse, ADBLookupExResponse
from .campaign import ADBCampaignClearRequest, ADBCampaignClearResponse, ADBCampaignResponse, ADBOldCampaignRequest, ADBOldCampaignResponse
from .felica import ADBFelicaLookupRequest, ADBFelicaLookupResponse, ADBFelicaLookup2Request, ADBFelicaLookup2Response
from .felica import ADBFelicaLookupRequest, ADBFelicaLookupResponse, ADBFelicaLookupExRequest, ADBFelicaLookupExResponse
from .log import ADBLogExRequest, ADBLogRequest, ADBStatusLogRequest, ADBLogExResponse

View File

@ -120,7 +120,7 @@ class ADBHeader:
if self.store_id == 0:
raise ADBHeaderException(f"Store ID cannot be 0!")
if re.fullmatch(r"^A[0-9]{2}[E|X][0-9]{2}[A-HJ-NP-Z][0-9]{4}$", self.keychip_id) is None:
if re.fullmatch(r"^A[0-9]{2}[A-Z][0-9]{2}[A-HJ-NP-Z][0-9]{4}$", self.keychip_id) is None:
raise ADBHeaderException(f"Keychip ID {self.keychip_id} is invalid!")
return True

View File

@ -35,7 +35,7 @@ class ADBFelicaLookupResponse(ADBBaseResponse):
return self.head.make() + resp_struct
class ADBFelicaLookup2Request(ADBBaseRequest):
class ADBFelicaLookupExRequest(ADBBaseRequest):
def __init__(self, data: bytes) -> None:
super().__init__(data)
self.random = struct.unpack_from("<16s", data, 0x20)[0]
@ -46,7 +46,7 @@ class ADBFelicaLookup2Request(ADBBaseRequest):
self.company = CompanyCodes(int.from_bytes(company, 'little'))
self.fw_ver = ReaderFwVer.from_byte(fw_ver)
class ADBFelicaLookup2Response(ADBBaseResponse):
class ADBFelicaLookupExResponse(ADBBaseResponse):
def __init__(self, user_id: Union[int, None] = None, access_code: Union[str, None] = None, game_id: str = "SXXX", store_id: int = 1, keychip_id: str = "A69E01A8888", code: int = 0x12, length: int = 0x130, status: int = 1) -> None:
super().__init__(code, length, status, game_id, store_id, keychip_id)
self.user_id = user_id if user_id is not None else -1
@ -56,7 +56,7 @@ class ADBFelicaLookup2Response(ADBBaseResponse):
self.auth_key = [0] * 256
@classmethod
def from_req(cls, req: ADBHeader, user_id: Union[int, None] = None, access_code: Union[str, None] = None) -> "ADBFelicaLookup2Response":
def from_req(cls, req: ADBHeader, user_id: Union[int, None] = None, access_code: Union[str, None] = None) -> "ADBFelicaLookupExResponse":
c = cls(user_id, access_code, req.game_id, req.store_id, req.keychip_id)
c.head.protocol_ver = req.protocol_ver
return c

View File

@ -176,6 +176,12 @@ class AimedbServlette():
async def handle_lookup(self, data: bytes, resp_code: int) -> ADBBaseResponse:
req = ADBLookupRequest(data)
if req.access_code == "00000000000000000000":
self.logger.warn(f"All-zero access code from {req.head.keychip_id}")
ret = ADBLookupResponse.from_req(req.head, -1)
ret.head.status = ADBStatus.BAN_SYS
return ret
user_id = await self.data.card.get_user_id_from_card(req.access_code)
is_banned = await self.data.card.get_card_banned(req.access_code)
is_locked = await self.data.card.get_card_locked(req.access_code)
@ -194,13 +200,19 @@ class AimedbServlette():
if user_id and user_id > 0:
await self.data.card.update_card_last_login(req.access_code)
if req.access_code.startswith("010") or req.access_code.startswith("3"):
if (req.access_code.startswith("010") or req.access_code.startswith("3")) and req.serial_number != 0x04030201: # Default segatools sn
await self.data.card.set_chip_id_by_access_code(req.access_code, req.serial_number)
self.logger.info(f"Attempt to set chip id to {req.serial_number} for access code {req.access_code}")
self.logger.info(f"Attempt to set chip id to {req.serial_number:08X} for access code {req.access_code}")
return ret
async def handle_lookup_ex(self, data: bytes, resp_code: int) -> ADBBaseResponse:
req = ADBLookupRequest(data)
if req.access_code == "00000000000000000000":
self.logger.warn(f"All-zero access code from {req.head.keychip_id}")
ret = ADBLookupExResponse.from_req(req.head, -1)
ret.head.status = ADBStatus.BAN_SYS
return ret
user_id = await self.data.card.get_user_id_from_card(req.access_code)
is_banned = await self.data.card.get_card_banned(req.access_code)
@ -240,18 +252,25 @@ class AimedbServlette():
used on the big boy networks.
"""
req = ADBFelicaLookupRequest(data)
card = await self.data.card.get_card_by_idm(req.idm)
idm = req.idm.zfill(16)
if idm == "0000000000000000":
self.logger.warn(f"All-zero IDm from {req.head.keychip_id}")
ret = ADBFelicaLookupResponse.from_req(req.head, "00000000000000000000")
ret.head.status = ADBStatus.BAN_SYS
return ret
card = await self.data.card.get_card_by_idm(idm)
if not card:
ac = self.data.card.to_access_code(req.idm)
ac = self.data.card.to_access_code(idm)
test = await self.data.card.get_card_by_access_code(ac)
if test:
await self.data.card.set_idm_by_access_code(ac, req.idm)
await self.data.card.set_idm_by_access_code(ac, idm)
else:
ac = card['access_code']
self.logger.info(
f"idm {req.idm} ipm {req.pmm} -> access_code {ac}"
f"idm {idm} ipm {req.pmm.zfill(16)} -> access_code {ac}"
)
return ADBFelicaLookupResponse.from_req(req.head, ac)
@ -261,6 +280,14 @@ class AimedbServlette():
because we don't implement felica_lookup properly.
"""
req = ADBFelicaLookupRequest(data)
idm = req.idm.zfill(16)
if idm == "0000000000000000":
self.logger.warn(f"All-zero IDm from {req.head.keychip_id}")
ret = ADBFelicaLookupResponse.from_req(req.head, "00000000000000000000")
ret.head.status = ADBStatus.BAN_SYS
return ret
ac = self.data.card.to_access_code(req.idm)
if self.config.server.allow_user_registration:
@ -291,15 +318,23 @@ class AimedbServlette():
return ADBFelicaLookupResponse.from_req(req.head, ac)
async def handle_felica_lookup_ex(self, data: bytes, resp_code: int) -> bytes:
req = ADBFelicaLookup2Request(data)
req = ADBFelicaLookupExRequest(data)
user_id = None
card = await self.data.card.get_card_by_idm(req.idm)
idm = req.idm.zfill(16)
if idm == "0000000000000000":
self.logger.warn(f"All-zero IDm from {req.head.keychip_id}")
ret = ADBFelicaLookupExResponse.from_req(req.head, -1, "00000000000000000000")
ret.head.status = ADBStatus.BAN_SYS
return ret
card = await self.data.card.get_card_by_idm(idm)
if not card:
access_code = self.data.card.to_access_code(req.idm)
access_code = self.data.card.to_access_code(idm)
card = await self.data.card.get_card_by_access_code(access_code)
if card:
user_id = card['user']
await self.data.card.set_idm_by_access_code(access_code, req.idm)
await self.data.card.set_idm_by_access_code(access_code, idm)
else:
user_id = card['user']
@ -309,10 +344,10 @@ class AimedbServlette():
user_id = -1
self.logger.info(
f"idm {req.idm} ipm {req.pmm} -> access_code {access_code} user_id {user_id}"
f"idm {idm} ipm {req.pmm} -> access_code {access_code} user_id {user_id}"
)
resp = ADBFelicaLookup2Response.from_req(req.head, user_id, access_code)
resp = ADBFelicaLookupExResponse.from_req(req.head, user_id, access_code)
if user_id > 0:
if card['is_banned'] and card['is_locked']:
@ -345,6 +380,12 @@ class AimedbServlette():
async def handle_register(self, data: bytes, resp_code: int) -> bytes:
req = ADBLookupRequest(data)
user_id = -1
if req.access_code == "00000000000000000000":
self.logger.warn(f"All-zero access code from {req.head.keychip_id}")
ret = ADBLookupResponse.from_req(req.head, -1)
ret.head.status = ADBStatus.BAN_SYS
return ret
if self.config.server.allow_user_registration:
user_id = await self.data.user.create_user()
@ -370,7 +411,7 @@ class AimedbServlette():
)
if user_id > 0:
if req.access_code.startswith("010") or req.access_code.startswith("3"):
if (req.access_code.startswith("010") or req.access_code.startswith("3")) and req.serial_number != 0x04030201: # Default segatools sn:
await self.data.card.set_chip_id_by_access_code(req.access_code, req.serial_number)
self.logger.info(f"Attempt to set chip id to {req.serial_number} for access code {req.access_code}")

View File

@ -9,7 +9,8 @@ from starlette.responses import PlainTextResponse
from os import environ, path, mkdir, W_OK, access
from typing import List
from core import CoreConfig, TitleServlet, MuchaServlet, AllnetServlet, BillingServlet, AimedbServlette
from core import CoreConfig, TitleServlet, MuchaServlet
from core.allnet import AllnetServlet, BillingServlet
from core.frontend import FrontendServlet
async def dummy_rt(request: Request):

View File

@ -1,5 +1,9 @@
import logging, os
from typing import Any
import logging
import os
import ssl
from typing import Any, Union
from typing_extensions import Optional
class ServerConfig:
def __init__(self, parent_config: "CoreConfig") -> None:
@ -176,6 +180,60 @@ class DatabaseConfig:
self.__config, "core", "database", "protocol", default="mysql"
)
@property
def ssl_enabled(self) -> bool:
return CoreConfig.get_config_field(
self.__config, "core", "database", "ssl_enabled", default=False
)
@property
def ssl_cafile(self) -> Optional[str]:
return CoreConfig.get_config_field(
self.__config, "core", "database", "ssl_cafile", default=None
)
@property
def ssl_capath(self) -> Optional[str]:
return CoreConfig.get_config_field(
self.__config, "core", "database", "ssl_capath", default=None
)
@property
def ssl_cert(self) -> Optional[str]:
return CoreConfig.get_config_field(
self.__config, "core", "database", "ssl_cert", default=None
)
@property
def ssl_key(self) -> Optional[str]:
return CoreConfig.get_config_field(
self.__config, "core", "database", "ssl_key", default=None
)
@property
def ssl_key_password(self) -> Optional[str]:
return CoreConfig.get_config_field(
self.__config, "core", "database", "ssl_key_password", default=None
)
@property
def ssl_verify_identity(self) -> bool:
return CoreConfig.get_config_field(
self.__config, "core", "database", "ssl_verify_identity", default=True
)
@property
def ssl_verify_cert(self) -> Optional[Union[str, bool]]:
return CoreConfig.get_config_field(
self.__config, "core", "database", "ssl_verify_cert", default=None
)
@property
def ssl_ciphers(self) -> Optional[str]:
return CoreConfig.get_config_field(
self.__config, "core", "database", "ssl_ciphers", default=None
)
@property
def sha2_password(self) -> bool:
return CoreConfig.get_config_field(
@ -202,6 +260,53 @@ class DatabaseConfig:
self.__config, "core", "database", "memcached_host", default="localhost"
)
def create_ssl_context_if_enabled(self):
if not self.ssl_enabled:
return
no_ca = (
self.ssl_cafile is None
and self.ssl_capath is None
)
ctx = ssl.create_default_context(
cafile=self.ssl_cafile,
capath=self.ssl_capath,
)
ctx.check_hostname = not no_ca and self.ssl_verify_identity
if self.ssl_verify_cert is None:
ctx.verify_mode = ssl.CERT_NONE if no_ca else ssl.CERT_REQUIRED
elif isinstance(self.ssl_verify_cert, bool):
ctx.verify_mode = (
ssl.CERT_REQUIRED
if self.ssl_verify_cert
else ssl.CERT_NONE
)
elif isinstance(self.ssl_verify_cert, str):
value = self.ssl_verify_cert.lower()
if value in ("none", "0", "false", "no"):
ctx.verify_mode = ssl.CERT_NONE
elif value == "optional":
ctx.verify_mode = ssl.CERT_OPTIONAL
elif value in ("required", "1", "true", "yes"):
ctx.verify_mode = ssl.CERT_REQUIRED
else:
ctx.verify_mode = ssl.CERT_NONE if no_ca else ssl.CERT_REQUIRED
if self.ssl_cert:
ctx.load_cert_chain(
self.ssl_cert,
self.ssl_key,
self.ssl_key_password,
)
if self.ssl_ciphers:
ctx.set_ciphers(self.ssl_ciphers)
return ctx
class FrontendConfig:
def __init__(self, parent_config: "CoreConfig") -> None:
self.__config = parent_config

View File

@ -1,16 +1,18 @@
from enum import Enum
class MainboardPlatformCodes:
RINGEDGE = "AALE"
RINGWIDE = "AAML"
NU = "AAVE"
NUSX = "AAWE"
ALLS_UX = "ACAE"
ALLS_HX = "ACAX"
class MainboardPlatformCodes(Enum):
RINGEDGE = "AAL"
RINGEDGE2 = "AAS"
RINGWIDE = "AAM"
NU = "AAV"
NUSX = "AAW"
ALLS = "ACA"
#ALLS_UX = "ACAE"
#ALLS_HX = "ACAX"
class MainboardRevisions:
class MainboardRevisions(Enum):
RINGEDGE = 1
RINGEDGE2 = 2
@ -29,11 +31,10 @@ class MainboardRevisions:
ALLS_HX2 = 12
class KeychipPlatformsCodes:
RING = "A72E"
NU = ("A60E", "A60E", "A60E")
NUSX = ("A61X", "A69X")
ALLS = "A63E"
class KeychipPlatformsCodes(Enum):
RING = "72"
NU = ("60", "61", "69")
ALLS = "63"
class AllnetCountryCode(Enum):

View File

@ -1,8 +1,18 @@
from __future__ import with_statement
from alembic import context
from sqlalchemy import engine_from_config, pool
import asyncio
import os
from pathlib import Path
import threading
from logging.config import fileConfig
import yaml
from alembic import context
from sqlalchemy import pool
from sqlalchemy.engine import Connection
from sqlalchemy.ext.asyncio import async_engine_from_config
from core.config import CoreConfig
from core.data.schema.base import metadata
# this is the Alembic Config object, which provides
@ -37,20 +47,29 @@ def run_migrations_offline():
script output.
"""
raise Exception('Not implemented or configured!')
raise Exception("Not implemented or configured!")
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url, target_metadata=target_metadata, literal_binds=True)
context.configure(url=url, target_metadata=target_metadata, literal_binds=True)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
def do_run_migrations(connection: Connection) -> None:
context.configure(
connection=connection,
target_metadata=target_metadata,
compare_type=True,
compare_server_default=True,
)
In this scenario we need to create an Engine
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
"""In this scenario we need to create an Engine
and associate a connection with the context.
"""
@ -59,21 +78,42 @@ def run_migrations_online():
for override in overrides:
ini_section[override] = overrides[override]
connectable = engine_from_config(
core_config = CoreConfig()
with (Path("../../..") / os.environ["ARTEMIS_CFG_DIR"] / "core.yaml").open(encoding="utf-8") as f:
core_config.update(yaml.safe_load(f))
connectable = async_engine_from_config(
ini_section,
prefix='sqlalchemy.',
poolclass=pool.NullPool)
poolclass=pool.NullPool,
connect_args={
"charset": "utf8mb4",
"ssl": core_config.database.create_ssl_context_if_enabled(),
}
)
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata,
compare_type=True,
compare_server_default=True,
)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
await connectable.dispose()
def run_migrations_online():
try:
loop = asyncio.get_running_loop()
except RuntimeError:
# there's no event loop
asyncio.run(run_async_migrations())
else:
# there's currently an event loop and trying to wait for a coroutine
# to finish without using `await` is pretty wormy. nested event loops
# are explicitly forbidden by asyncio.
#
# take the easy way out, spawn it in another thread.
thread = threading.Thread(target=asyncio.run, args=(run_async_migrations(),))
thread.start()
thread.join()
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()

View File

@ -0,0 +1,28 @@
"""mai2_buddies_plus
Revision ID: 28443e2da5b8
Revises: 5ea73f89d982
Create Date: 2024-09-15 20:44:02.351819
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '28443e2da5b8'
down_revision = '5ea73f89d982'
branch_labels = None
depends_on = None
def upgrade():
op.add_column('mai2_profile_detail', sa.Column('point', sa.Integer()))
op.add_column('mai2_profile_detail', sa.Column('totalPoint', sa.Integer()))
op.add_column('mai2_profile_detail', sa.Column('friendRegistSkip', sa.SmallInteger()))
def downgrade():
op.drop_column('mai2_profile_detail', 'point')
op.drop_column('mai2_profile_detail', 'totalPoint')
op.drop_column('mai2_profile_detail', 'friendRegistSkip')

View File

@ -0,0 +1,122 @@
"""chuni_ui_overhaul
Revision ID: 41f77ef50588
Revises: d8cd1fa04c2a
Create Date: 2024-11-02 13:27:45.839787
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '41f77ef50588'
down_revision = 'd8cd1fa04c2a'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('chuni_static_avatar', sa.Column('sortName', mysql.VARCHAR(length=255), nullable=True))
op.add_column('chuni_static_avatar', sa.Column('isEnabled', mysql.TINYINT(display_width=1), server_default=sa.text('1'), autoincrement=False, nullable=True))
op.add_column('chuni_static_avatar', sa.Column('defaultHave', mysql.TINYINT(display_width=1), server_default=sa.text('0'), autoincrement=False, nullable=True))
op.create_table('chuni_static_character',
sa.Column('id', mysql.INTEGER(display_width=11), autoincrement=True, nullable=False),
sa.Column('version', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False),
sa.Column('characterId', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True),
sa.Column('name', mysql.VARCHAR(length=255), nullable=True),
sa.Column('sortName', mysql.VARCHAR(length=255), nullable=True),
sa.Column('worksName', mysql.VARCHAR(length=255), nullable=True),
sa.Column('rareType', mysql.INTEGER(display_width=11), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.Column('imagePath1', mysql.VARCHAR(length=255), nullable=True),
sa.Column('imagePath2', mysql.VARCHAR(length=255), nullable=True),
sa.Column('imagePath3', mysql.VARCHAR(length=255), nullable=True),
sa.Column('isEnabled', mysql.TINYINT(display_width=1), server_default=sa.text('1'), autoincrement=False, nullable=True),
sa.Column('defaultHave', mysql.TINYINT(display_width=1), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id'),
mysql_collate='utf8mb4_general_ci',
mysql_default_charset='utf8mb4',
mysql_engine='InnoDB'
)
op.create_index('chuni_static_character_uk', 'chuni_static_character', ['version', 'characterId'], unique=True)
op.create_table('chuni_static_map_icon',
sa.Column('id', mysql.INTEGER(display_width=11), autoincrement=True, nullable=False),
sa.Column('version', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False),
sa.Column('mapIconId', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True),
sa.Column('name', mysql.VARCHAR(length=255), nullable=True),
sa.Column('sortName', mysql.VARCHAR(length=255), nullable=True),
sa.Column('iconPath', mysql.VARCHAR(length=255), nullable=True),
sa.Column('isEnabled', mysql.TINYINT(display_width=1), server_default=sa.text('1'), autoincrement=False, nullable=True),
sa.Column('defaultHave', mysql.TINYINT(display_width=1), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id'),
mysql_collate='utf8mb4_general_ci',
mysql_default_charset='utf8mb4',
mysql_engine='InnoDB'
)
op.create_index('chuni_static_mapicon_uk', 'chuni_static_map_icon', ['version', 'mapIconId'], unique=True)
op.create_table('chuni_static_nameplate',
sa.Column('id', mysql.INTEGER(display_width=11), autoincrement=True, nullable=False),
sa.Column('version', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False),
sa.Column('nameplateId', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True),
sa.Column('name', mysql.VARCHAR(length=255), nullable=True),
sa.Column('texturePath', mysql.VARCHAR(length=255), nullable=True),
sa.Column('isEnabled', mysql.TINYINT(display_width=1), server_default=sa.text('1'), autoincrement=False, nullable=True),
sa.Column('defaultHave', mysql.TINYINT(display_width=1), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.Column('sortName', mysql.VARCHAR(length=255), nullable=True),
sa.PrimaryKeyConstraint('id'),
mysql_collate='utf8mb4_general_ci',
mysql_default_charset='utf8mb4',
mysql_engine='InnoDB'
)
op.create_index('chuni_static_nameplate_uk', 'chuni_static_nameplate', ['version', 'nameplateId'], unique=True)
op.create_table('chuni_static_trophy',
sa.Column('id', mysql.INTEGER(display_width=11), autoincrement=True, nullable=False),
sa.Column('version', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False),
sa.Column('trophyId', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True),
sa.Column('name', mysql.VARCHAR(length=255), nullable=True),
sa.Column('rareType', mysql.TINYINT(display_width=11), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.Column('isEnabled', mysql.TINYINT(display_width=1), server_default=sa.text('1'), autoincrement=False, nullable=True),
sa.Column('defaultHave', mysql.TINYINT(display_width=1), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id'),
mysql_collate='utf8mb4_general_ci',
mysql_default_charset='utf8mb4',
mysql_engine='InnoDB'
)
op.create_index('chuni_static_trophy_uk', 'chuni_static_trophy', ['version', 'trophyId'], unique=True)
op.create_table('chuni_static_system_voice',
sa.Column('id', mysql.INTEGER(display_width=11), autoincrement=True, nullable=False),
sa.Column('version', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False),
sa.Column('voiceId', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True),
sa.Column('name', mysql.VARCHAR(length=255), nullable=True),
sa.Column('sortName', mysql.VARCHAR(length=255), nullable=True),
sa.Column('imagePath', mysql.VARCHAR(length=255), nullable=True),
sa.Column('isEnabled', mysql.TINYINT(display_width=1), server_default=sa.text('1'), autoincrement=False, nullable=True),
sa.Column('defaultHave', mysql.TINYINT(display_width=1), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id'),
mysql_collate='utf8mb4_general_ci',
mysql_default_charset='utf8mb4',
mysql_engine='InnoDB'
)
op.create_index('chuni_static_systemvoice_uk', 'chuni_static_system_voice', ['version', 'voiceId'], unique=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index('chuni_static_systemvoice_uk', table_name='chuni_static_system_voice')
op.drop_table('chuni_static_system_voice')
op.drop_index('chuni_static_trophy_uk', table_name='chuni_static_trophy')
op.drop_table('chuni_static_trophy')
op.drop_index('chuni_static_nameplate_uk', table_name='chuni_static_nameplate')
op.drop_table('chuni_static_nameplate')
op.drop_index('chuni_static_mapicon_uk', table_name='chuni_static_map_icon')
op.drop_table('chuni_static_map_icon')
op.drop_index('chuni_static_character_uk', table_name='chuni_static_character')
op.drop_table('chuni_static_character')
op.drop_column('chuni_static_avatar', 'defaultHave')
op.drop_column('chuni_static_avatar', 'isEnabled')
op.drop_column('chuni_static_avatar', 'sortName')
# ### end Alembic commands ###

View File

@ -35,9 +35,9 @@ def upgrade():
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'aime_card', type_='unique')
op.drop_constraint(None, 'aime_card', type_='unique')
op.drop_constraint(None, 'aime_card', type_='unique')
op.drop_constraint("chip_id", 'aime_card', type_='unique')
op.drop_constraint("idm", 'aime_card', type_='unique')
op.drop_constraint("access_code", 'aime_card', type_='unique')
op.alter_column('aime_card', 'created_date',
existing_type=mysql.TIMESTAMP(),
server_default=sa.text('CURRENT_TIMESTAMP'),

View File

@ -0,0 +1,43 @@
"""mai2_intimacy
Revision ID: 54a84103b84e
Revises: bc91c1206dca
Create Date: 2024-09-16 17:47:49.164546
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import Column, Integer, UniqueConstraint
# revision identifiers, used by Alembic.
revision = '54a84103b84e'
down_revision = 'bc91c1206dca'
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
"mai2_user_intimate",
Column("id", Integer, primary_key=True, nullable=False),
Column("user", Integer, nullable=False),
Column("partnerId", Integer, nullable=False),
Column("intimateLevel", Integer, nullable=False),
Column("intimateCountRewarded", Integer, nullable=False),
UniqueConstraint("user", "partnerId", name="mai2_user_intimate_uk"),
mysql_charset="utf8mb4",
)
op.create_foreign_key(
None,
"mai2_user_intimate",
"aime_user",
["user"],
["id"],
ondelete="cascade",
onupdate="cascade",
)
def downgrade():
op.drop_table("mai2_user_intimate")

View File

@ -0,0 +1,41 @@
"""mai2_presents
Revision ID: 5ea363686347
Revises: 680789dabab3
Create Date: 2024-06-28 14:49:07.666879
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '5ea363686347'
down_revision = '680789dabab3'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('mai2_item_present',
sa.Column('id', sa.BIGINT(), nullable=False),
sa.Column('version', sa.INTEGER(), nullable=True),
sa.Column('user', sa.Integer(), nullable=True),
sa.Column('itemKind', sa.INTEGER(), nullable=False),
sa.Column('itemId', sa.INTEGER(), nullable=False),
sa.Column('stock', sa.INTEGER(), server_default='1', nullable=False),
sa.Column('startDate', sa.TIMESTAMP(), nullable=True),
sa.Column('endDate', sa.TIMESTAMP(), nullable=True),
sa.ForeignKeyConstraint(['user'], ['aime_user.id'], onupdate='cascade', ondelete='cascade'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('version', 'user', 'itemKind', 'itemId', name='mai2_item_present_uk'),
mysql_charset='utf8mb4'
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('mai2_item_present')
# ### end Alembic commands ###

View File

@ -0,0 +1,28 @@
"""card_add_memo
Revision ID: 5ea73f89d982
Revises: 745448d83696
Create Date: 2024-07-06 22:46:56.992152
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '5ea73f89d982'
down_revision = '745448d83696'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('aime_card', sa.Column('memo', sa.VARCHAR(length=16), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('aime_card', 'memo')
# ### end Alembic commands ###

View File

@ -0,0 +1,295 @@
"""sao_player_changes
Revision ID: 680789dabab3
Revises: a616fd164e40
Create Date: 2024-06-26 23:19:16.863778
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '680789dabab3'
down_revision = 'a616fd164e40'
branch_labels = None
depends_on = None
def upgrade():
op.add_column('sao_equipment_data', sa.Column('is_shop_purchase', sa.BOOLEAN(), server_default='0', nullable=False))
op.add_column('sao_equipment_data', sa.Column('is_protect', sa.BOOLEAN(), server_default='0', nullable=False))
op.add_column('sao_equipment_data', sa.Column('property1_property_id', sa.BIGINT(), server_default='2', nullable=False))
op.add_column('sao_equipment_data', sa.Column('property1_value1', sa.INTEGER(), server_default='0', nullable=False))
op.add_column('sao_equipment_data', sa.Column('property1_value2', sa.INTEGER(), server_default='0', nullable=False))
op.add_column('sao_equipment_data', sa.Column('property2_property_id', sa.BIGINT(), server_default='2', nullable=False))
op.add_column('sao_equipment_data', sa.Column('property2_value1', sa.INTEGER(), server_default='0', nullable=False))
op.add_column('sao_equipment_data', sa.Column('property2_value2', sa.INTEGER(), server_default='0', nullable=False))
op.add_column('sao_equipment_data', sa.Column('property3_property_id', sa.BIGINT(), server_default='2', nullable=False))
op.add_column('sao_equipment_data', sa.Column('property3_value1', sa.INTEGER(), server_default='0', nullable=False))
op.add_column('sao_equipment_data', sa.Column('property3_value2', sa.INTEGER(), server_default='0', nullable=False))
op.add_column('sao_equipment_data', sa.Column('property4_property_id', sa.BIGINT(), server_default='2', nullable=False))
op.add_column('sao_equipment_data', sa.Column('property4_value1', sa.INTEGER(), server_default='0', nullable=False))
op.add_column('sao_equipment_data', sa.Column('property4_value2', sa.INTEGER(), server_default='0', nullable=False))
op.add_column('sao_equipment_data', sa.Column('converted_card_num', sa.INTEGER(), server_default='0', nullable=False))
op.alter_column('sao_equipment_data', 'equipment_id',
existing_type=mysql.INTEGER(),
type_=sa.BIGINT(),
existing_nullable=False)
op.alter_column('sao_equipment_data', 'get_date',
existing_type=mysql.TIMESTAMP(),
server_default=sa.text('now()'),
existing_nullable=False)
op.create_foreign_key(None, 'sao_equipment_data', 'sao_static_property', ['property2_property_id'], ['PropertyId'], onupdate='cascade', ondelete='cascade')
op.create_foreign_key(None, 'sao_equipment_data', 'sao_static_property', ['property4_property_id'], ['PropertyId'], onupdate='cascade', ondelete='cascade')
op.create_foreign_key(None, 'sao_equipment_data', 'sao_static_property', ['property3_property_id'], ['PropertyId'], onupdate='cascade', ondelete='cascade')
op.create_foreign_key(None, 'sao_equipment_data', 'sao_static_property', ['property1_property_id'], ['PropertyId'], onupdate='cascade', ondelete='cascade')
op.create_foreign_key(None, 'sao_equipment_data', 'sao_static_equipment_list', ['equipment_id'], ['EquipmentId'], onupdate='cascade', ondelete='cascade')
op.add_column('sao_hero_log_data', sa.Column('max_level_extend_num', sa.INTEGER(), server_default='0', nullable=False))
op.add_column('sao_hero_log_data', sa.Column('is_awakenable', sa.BOOLEAN(), server_default='0', nullable=False))
op.add_column('sao_hero_log_data', sa.Column('awakening_stage', sa.INTEGER(), server_default='0', nullable=False))
op.add_column('sao_hero_log_data', sa.Column('awakening_exp', sa.INTEGER(), server_default='0', nullable=False))
op.add_column('sao_hero_log_data', sa.Column('is_shop_purchase', sa.BOOLEAN(), server_default='0', nullable=False))
op.add_column('sao_hero_log_data', sa.Column('is_protect', sa.BOOLEAN(), server_default='0', nullable=False))
op.add_column('sao_hero_log_data', sa.Column('property1_property_id', sa.BIGINT(), server_default='2', nullable=False))
op.add_column('sao_hero_log_data', sa.Column('property1_value1', sa.INTEGER(), server_default='0', nullable=False))
op.add_column('sao_hero_log_data', sa.Column('property1_value2', sa.INTEGER(), server_default='0', nullable=False))
op.add_column('sao_hero_log_data', sa.Column('property2_property_id', sa.BIGINT(), server_default='2', nullable=False))
op.add_column('sao_hero_log_data', sa.Column('property2_value1', sa.INTEGER(), server_default='0', nullable=False))
op.add_column('sao_hero_log_data', sa.Column('property2_value2', sa.INTEGER(), server_default='0', nullable=False))
op.add_column('sao_hero_log_data', sa.Column('property3_property_id', sa.BIGINT(), server_default='2', nullable=False))
op.add_column('sao_hero_log_data', sa.Column('property3_value1', sa.INTEGER(), server_default='0', nullable=False))
op.add_column('sao_hero_log_data', sa.Column('property3_value2', sa.INTEGER(), server_default='0', nullable=False))
op.add_column('sao_hero_log_data', sa.Column('property4_property_id', sa.BIGINT(), server_default='2', nullable=False))
op.add_column('sao_hero_log_data', sa.Column('property4_value1', sa.INTEGER(), server_default='0', nullable=False))
op.add_column('sao_hero_log_data', sa.Column('property4_value2', sa.INTEGER(), server_default='0', nullable=False))
op.add_column('sao_hero_log_data', sa.Column('converted_card_num', sa.INTEGER(), server_default='0', nullable=False))
op.alter_column('sao_hero_log_data', 'main_weapon',
existing_type=mysql.INTEGER(),
nullable=True)
op.alter_column('sao_hero_log_data', 'sub_equipment',
existing_type=mysql.INTEGER(),
nullable=True)
op.alter_column('sao_hero_log_data', 'skill_slot1_skill_id',
existing_type=mysql.INTEGER(),
type_=sa.BIGINT(),
nullable=True)
op.alter_column('sao_hero_log_data', 'skill_slot2_skill_id',
existing_type=mysql.INTEGER(),
type_=sa.BIGINT(),
nullable=True)
op.alter_column('sao_hero_log_data', 'skill_slot3_skill_id',
existing_type=mysql.INTEGER(),
type_=sa.BIGINT(),
nullable=True)
op.alter_column('sao_hero_log_data', 'skill_slot4_skill_id',
existing_type=mysql.INTEGER(),
type_=sa.BIGINT(),
nullable=True)
op.alter_column('sao_hero_log_data', 'skill_slot5_skill_id',
existing_type=mysql.INTEGER(),
type_=sa.BIGINT(),
nullable=True)
op.alter_column('sao_hero_log_data', 'get_date',
existing_type=mysql.TIMESTAMP(),
server_default=sa.text('now()'),
existing_nullable=False)
op.alter_column("sao_hero_log_data", "user_hero_log_id",
existing_type=sa.Integer(),
new_column_name="hero_log_id",
type_=sa.BIGINT(),
nullable=False)
op.execute(sa.text("UPDATE sao_hero_log_data SET skill_slot1_skill_id = NULL WHERE skill_slot1_skill_id = 0;"))
op.execute(sa.text("UPDATE sao_hero_log_data SET skill_slot2_skill_id = NULL WHERE skill_slot2_skill_id = 0;"))
op.execute(sa.text("UPDATE sao_hero_log_data SET skill_slot3_skill_id = NULL WHERE skill_slot3_skill_id = 0;"))
op.execute(sa.text("UPDATE sao_hero_log_data SET skill_slot4_skill_id = NULL WHERE skill_slot4_skill_id = 0;"))
op.execute(sa.text("UPDATE sao_hero_log_data SET skill_slot5_skill_id = NULL WHERE skill_slot5_skill_id = 0;"))
op.execute(sa.text("UPDATE sao_hero_log_data SET main_weapon = NULL WHERE main_weapon = 0;"))
op.execute(sa.text("UPDATE sao_hero_log_data SET sub_equipment = NULL WHERE sub_equipment = 0;"))
op.execute(sa.text("UPDATE sao_hero_party SET user_hero_log_id_1 = NULL WHERE user_hero_log_id_1 = 0;"))
op.execute(sa.text("UPDATE sao_hero_party SET user_hero_log_id_2 = NULL WHERE user_hero_log_id_2 = 0;"))
op.execute(sa.text("UPDATE sao_hero_party SET user_hero_log_id_3 = NULL WHERE user_hero_log_id_3 = 0;"))
op.execute(sa.text("UPDATE sao_hero_log_data INNER JOIN sao_equipment_data ON sao_hero_log_data.main_weapon = sao_equipment_data.equipment_id SET sao_hero_log_data.main_weapon = sao_equipment_data.id;"))
op.execute(sa.text("UPDATE sao_hero_log_data INNER JOIN sao_equipment_data ON sao_hero_log_data.sub_equipment = sao_equipment_data.equipment_id SET sao_hero_log_data.sub_equipment = sao_equipment_data.id;"))
op.execute(sa.text("UPDATE sao_hero_party INNER JOIN sao_hero_log_data ON sao_hero_party.user_hero_log_id_1 = sao_hero_log_data.hero_log_id SET sao_hero_party.user_hero_log_id_1 = sao_hero_log_data.id;"))
op.execute(sa.text("UPDATE sao_hero_party INNER JOIN sao_hero_log_data ON sao_hero_party.user_hero_log_id_2 = sao_hero_log_data.hero_log_id SET sao_hero_party.user_hero_log_id_2 = sao_hero_log_data.id;"))
op.execute(sa.text("UPDATE sao_hero_party INNER JOIN sao_hero_log_data ON sao_hero_party.user_hero_log_id_3 = sao_hero_log_data.hero_log_id SET sao_hero_party.user_hero_log_id_3 = sao_hero_log_data.id;"))
op.create_foreign_key(None, 'sao_hero_log_data', 'sao_static_property', ['property4_property_id'], ['PropertyId'], onupdate='cascade', ondelete='cascade')
op.create_foreign_key(None, 'sao_hero_log_data', 'sao_static_skill', ['skill_slot1_skill_id'], ['SkillId'], onupdate='set null', ondelete='set null')
op.create_foreign_key(None, 'sao_hero_log_data', 'sao_static_skill', ['skill_slot5_skill_id'], ['SkillId'], onupdate='set null', ondelete='set null')
op.create_foreign_key(None, 'sao_hero_log_data', 'sao_static_skill', ['skill_slot2_skill_id'], ['SkillId'], onupdate='set null', ondelete='set null')
op.create_foreign_key(None, 'sao_hero_log_data', 'sao_static_skill', ['skill_slot3_skill_id'], ['SkillId'], onupdate='set null', ondelete='set null')
op.create_foreign_key(None, 'sao_hero_log_data', 'sao_equipment_data', ['main_weapon'], ['id'], onupdate='set null', ondelete='set null')
op.create_foreign_key(None, 'sao_hero_log_data', 'sao_static_property', ['property3_property_id'], ['PropertyId'], onupdate='cascade', ondelete='cascade')
op.create_foreign_key(None, 'sao_hero_log_data', 'sao_static_skill', ['skill_slot4_skill_id'], ['SkillId'], onupdate='set null', ondelete='set null')
op.create_foreign_key(None, 'sao_hero_log_data', 'sao_equipment_data', ['sub_equipment'], ['id'], onupdate='set null', ondelete='set null')
op.create_foreign_key(None, 'sao_hero_log_data', 'sao_static_property', ['property1_property_id'], ['PropertyId'], onupdate='cascade', ondelete='cascade')
op.create_foreign_key(None, 'sao_hero_log_data', 'sao_static_hero_list', ['hero_log_id'], ['HeroLogId'], onupdate='cascade', ondelete='cascade')
op.create_foreign_key(None, 'sao_hero_log_data', 'sao_static_property', ['property2_property_id'], ['PropertyId'], onupdate='cascade', ondelete='cascade')
op.create_foreign_key(None, 'sao_hero_party', 'sao_hero_log_data', ['user_hero_log_id_3'], ['id'], onupdate='cascade', ondelete='cascade')
op.create_foreign_key(None, 'sao_hero_party', 'sao_hero_log_data', ['user_hero_log_id_1'], ['id'], onupdate='cascade', ondelete='cascade')
op.create_foreign_key(None, 'sao_hero_party', 'sao_hero_log_data', ['user_hero_log_id_2'], ['id'], onupdate='cascade', ondelete='cascade')
op.alter_column('sao_item_data', 'get_date',
existing_type=mysql.TIMESTAMP(),
server_default=sa.text('now()'),
existing_nullable=False)
op.alter_column('sao_play_sessions', 'play_date',
existing_type=mysql.TIMESTAMP(),
server_default=sa.text('now()'),
existing_nullable=False)
op.add_column('sao_player_quest', sa.Column('quest_type', sa.INTEGER(), server_default='1', nullable=False))
op.alter_column('sao_player_quest', 'play_date',
existing_type=mysql.TIMESTAMP(),
server_default=sa.text('now()'),
existing_nullable=False)
op.alter_column('sao_player_quest', 'episode_id',
existing_type=mysql.INTEGER(),
new_column_name="quest_scene_id",
type_=sa.BIGINT(),
nullable=False)
op.create_foreign_key(None, 'sao_player_quest', 'sao_static_quest', ['quest_scene_id'], ['QuestSceneId'], onupdate='cascade', ondelete='cascade')
op.add_column('sao_profile', sa.Column('my_shop', sa.INTEGER(), nullable=True))
op.add_column('sao_profile', sa.Column('fav_hero', sa.INTEGER(), nullable=True))
op.add_column('sao_profile', sa.Column('when_register', sa.TIMESTAMP(), server_default=sa.text('now()'), nullable=True))
op.add_column('sao_profile', sa.Column('last_login_date', sa.TIMESTAMP(), nullable=True))
op.add_column('sao_profile', sa.Column('last_yui_medal_date', sa.TIMESTAMP(), nullable=True))
op.add_column('sao_profile', sa.Column('last_bonus_yui_medal_date', sa.TIMESTAMP(), nullable=True))
op.add_column('sao_profile', sa.Column('last_comeback_date', sa.TIMESTAMP(), nullable=True))
op.add_column('sao_profile', sa.Column('last_login_bonus_date', sa.TIMESTAMP(), nullable=True))
op.add_column('sao_profile', sa.Column('ad_confirm_date', sa.TIMESTAMP(), nullable=True))
op.add_column('sao_profile', sa.Column('login_ct', sa.INTEGER(), server_default='0', nullable=True))
op.create_foreign_key(None, 'sao_profile', 'sao_hero_log_data', ['fav_hero'], ['id'], onupdate='cascade', ondelete='set null')
def downgrade():
op.drop_constraint("sao_profile_ibfk_2", 'sao_profile', type_='foreignkey')
op.drop_column('sao_profile', 'login_ct')
op.drop_column('sao_profile', 'ad_confirm_date')
op.drop_column('sao_profile', 'last_login_bonus_date')
op.drop_column('sao_profile', 'last_comeback_date')
op.drop_column('sao_profile', 'last_bonus_yui_medal_date')
op.drop_column('sao_profile', 'last_yui_medal_date')
op.drop_column('sao_profile', 'last_login_date')
op.drop_column('sao_profile', 'when_register')
op.drop_column('sao_profile', 'fav_hero')
op.drop_column('sao_profile', 'my_shop')
op.alter_column('sao_player_quest', 'quest_scene_id',
existing_type=mysql.BIGINT(),
new_column_name="episode_id",
type_=sa.INTEGER(),
nullable=False)
op.drop_constraint("sao_player_quest_ibfk_2", 'sao_player_quest', type_='foreignkey')
op.alter_column('sao_player_quest', 'play_date',
existing_type=mysql.TIMESTAMP(),
server_default=sa.text('CURRENT_TIMESTAMP'),
existing_nullable=False)
op.drop_column('sao_player_quest', 'quest_scene_id')
op.drop_column('sao_player_quest', 'quest_type')
op.alter_column('sao_play_sessions', 'play_date',
existing_type=mysql.TIMESTAMP(),
server_default=sa.text('CURRENT_TIMESTAMP'),
existing_nullable=False)
op.alter_column('sao_item_data', 'get_date',
existing_type=mysql.TIMESTAMP(),
server_default=sa.text('CURRENT_TIMESTAMP'),
existing_nullable=False)
op.drop_constraint("sao_hero_party_ibfk_2", 'sao_hero_party', type_='foreignkey')
op.drop_constraint("sao_hero_party_ibfk_3", 'sao_hero_party', type_='foreignkey')
op.drop_constraint("sao_hero_party_ibfk_4", 'sao_hero_party', type_='foreignkey')
op.alter_column("sao_hero_log_data", "hero_log_id",
existing_type=sa.BIGINT(),
new_column_name="user_hero_log_id",
type_=sa.Integer(),
nullable=False)
op.drop_constraint("sao_hero_log_data_ibfk_2", 'sao_hero_log_data', type_='foreignkey')
op.drop_constraint("sao_hero_log_data_ibfk_3", 'sao_hero_log_data', type_='foreignkey')
op.drop_constraint("sao_hero_log_data_ibfk_4", 'sao_hero_log_data', type_='foreignkey')
op.drop_constraint("sao_hero_log_data_ibfk_5", 'sao_hero_log_data', type_='foreignkey')
op.drop_constraint("sao_hero_log_data_ibfk_6", 'sao_hero_log_data', type_='foreignkey')
op.drop_constraint("sao_hero_log_data_ibfk_7", 'sao_hero_log_data', type_='foreignkey')
op.drop_constraint("sao_hero_log_data_ibfk_8", 'sao_hero_log_data', type_='foreignkey')
op.drop_constraint("sao_hero_log_data_ibfk_9", 'sao_hero_log_data', type_='foreignkey')
op.drop_constraint("sao_hero_log_data_ibfk_10", 'sao_hero_log_data', type_='foreignkey')
op.drop_constraint("sao_hero_log_data_ibfk_11", 'sao_hero_log_data', type_='foreignkey')
op.drop_constraint("sao_hero_log_data_ibfk_12", 'sao_hero_log_data', type_='foreignkey')
op.drop_constraint("sao_hero_log_data_ibfk_13", 'sao_hero_log_data', type_='foreignkey')
op.alter_column('sao_hero_log_data', 'get_date',
existing_type=mysql.TIMESTAMP(),
server_default=sa.text('CURRENT_TIMESTAMP'),
existing_nullable=False)
op.alter_column('sao_hero_log_data', 'skill_slot5_skill_id',
existing_type=sa.BIGINT(),
type_=mysql.INTEGER(),
nullable=False)
op.alter_column('sao_hero_log_data', 'skill_slot4_skill_id',
existing_type=sa.BIGINT(),
type_=mysql.INTEGER(),
nullable=False)
op.alter_column('sao_hero_log_data', 'skill_slot3_skill_id',
existing_type=sa.BIGINT(),
type_=mysql.INTEGER(),
nullable=False)
op.alter_column('sao_hero_log_data', 'skill_slot2_skill_id',
existing_type=sa.BIGINT(),
type_=mysql.INTEGER(),
nullable=False)
op.alter_column('sao_hero_log_data', 'skill_slot1_skill_id',
existing_type=sa.BIGINT(),
type_=mysql.INTEGER(),
nullable=False)
op.alter_column('sao_hero_log_data', 'sub_equipment',
existing_type=mysql.INTEGER(),
nullable=False)
op.alter_column('sao_hero_log_data', 'main_weapon',
existing_type=mysql.INTEGER(),
nullable=False)
op.drop_column('sao_hero_log_data', 'converted_card_num')
op.drop_column('sao_hero_log_data', 'property4_value2')
op.drop_column('sao_hero_log_data', 'property4_value1')
op.drop_column('sao_hero_log_data', 'property4_property_id')
op.drop_column('sao_hero_log_data', 'property3_value2')
op.drop_column('sao_hero_log_data', 'property3_value1')
op.drop_column('sao_hero_log_data', 'property3_property_id')
op.drop_column('sao_hero_log_data', 'property2_value2')
op.drop_column('sao_hero_log_data', 'property2_value1')
op.drop_column('sao_hero_log_data', 'property2_property_id')
op.drop_column('sao_hero_log_data', 'property1_value2')
op.drop_column('sao_hero_log_data', 'property1_value1')
op.drop_column('sao_hero_log_data', 'property1_property_id')
op.drop_column('sao_hero_log_data', 'is_protect')
op.drop_column('sao_hero_log_data', 'is_shop_purchase')
op.drop_column('sao_hero_log_data', 'awakening_exp')
op.drop_column('sao_hero_log_data', 'awakening_stage')
op.drop_column('sao_hero_log_data', 'is_awakenable')
op.drop_column('sao_hero_log_data', 'max_level_extend_num')
op.drop_constraint("sao_equipment_data_ibfk_2", 'sao_equipment_data', type_='foreignkey')
op.drop_constraint("sao_equipment_data_ibfk_3", 'sao_equipment_data', type_='foreignkey')
op.drop_constraint("sao_equipment_data_ibfk_4", 'sao_equipment_data', type_='foreignkey')
op.drop_constraint("sao_equipment_data_ibfk_5", 'sao_equipment_data', type_='foreignkey')
op.drop_constraint("sao_equipment_data_ibfk_6", 'sao_equipment_data', type_='foreignkey')
op.alter_column('sao_equipment_data', 'get_date',
existing_type=mysql.TIMESTAMP(),
server_default=sa.text('CURRENT_TIMESTAMP'),
existing_nullable=False)
op.alter_column('sao_equipment_data', 'equipment_id',
existing_type=sa.BIGINT(),
type_=mysql.INTEGER(),
existing_nullable=False)
op.drop_column('sao_equipment_data', 'converted_card_num')
op.drop_column('sao_equipment_data', 'property4_value2')
op.drop_column('sao_equipment_data', 'property4_value1')
op.drop_column('sao_equipment_data', 'property4_property_id')
op.drop_column('sao_equipment_data', 'property3_value2')
op.drop_column('sao_equipment_data', 'property3_value1')
op.drop_column('sao_equipment_data', 'property3_property_id')
op.drop_column('sao_equipment_data', 'property2_value2')
op.drop_column('sao_equipment_data', 'property2_value1')
op.drop_column('sao_equipment_data', 'property2_property_id')
op.drop_column('sao_equipment_data', 'property1_value2')
op.drop_column('sao_equipment_data', 'property1_value1')
op.drop_column('sao_equipment_data', 'property1_property_id')
op.drop_column('sao_equipment_data', 'is_protect')
op.drop_column('sao_equipment_data', 'is_shop_purchase')

View File

@ -0,0 +1,28 @@
"""chuni_team_points
Revision ID: 745448d83696
Revises: 5ea363686347
Create Date: 2024-06-29 00:05:22.479187
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '745448d83696'
down_revision = '5ea363686347'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('chuni_profile_team', sa.Column('userTeamPoint', sa.JSON(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('chuni_profile_team', 'userTeamPoint')
# ### end Alembic commands ###

View File

@ -0,0 +1,437 @@
"""sao_backport
Revision ID: a616fd164e40
Revises: 48f4acc43a7e
Create Date: 2024-06-24 20:28:34.471282
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = 'a616fd164e40'
down_revision = '48f4acc43a7e'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('sao_static_quest')
op.create_table('sao_static_quest',
sa.Column('QuestSceneId', sa.BIGINT(), nullable=False),
sa.Column('SortNo', sa.INTEGER(), nullable=False),
sa.Column('Tutorial', sa.BOOLEAN(), nullable=False),
sa.Column('ColRate', sa.DECIMAL(), nullable=False),
sa.Column('LimitDefault', sa.INTEGER(), nullable=False),
sa.Column('LimitResurrection', sa.INTEGER(), nullable=False),
sa.Column('RewardTableSubId', sa.INTEGER(), nullable=False),
sa.Column('PlayerTraceTableSubId', sa.INTEGER(), nullable=False),
sa.Column('SuccessPlayerExp', sa.INTEGER(), nullable=False),
sa.Column('FailedPlayerExp', sa.INTEGER(), nullable=False),
sa.Column('PairExpRate', sa.INTEGER(), nullable=False),
sa.Column('TrioExpRate', sa.INTEGER(), nullable=False),
sa.Column('SingleRewardVp', sa.INTEGER(), nullable=False),
sa.Column('PairRewardVp', sa.INTEGER(), nullable=False),
sa.Column('TrioRewardVp', sa.INTEGER(), nullable=False),
sa.PrimaryKeyConstraint('QuestSceneId'),
mysql_charset='utf8mb4'
)
op.create_table('sao_static_property',
sa.Column('PropertyId', sa.BIGINT(), nullable=False),
sa.Column('PropertyTargetType', sa.INTEGER(), nullable=False),
sa.Column('PropertyName', sa.VARCHAR(length=255), nullable=False),
sa.Column('PropertyName_en', sa.VARCHAR(length=255), nullable=True),
sa.Column('PropertyNameFormat', sa.VARCHAR(length=255), nullable=False),
sa.Column('PropertyNameFormat_en', sa.VARCHAR(length=255), nullable=True),
sa.Column('PropertyTypeId', sa.INTEGER(), nullable=False),
sa.Column('Value1Min', sa.INTEGER(), nullable=False),
sa.Column('Value1Max', sa.INTEGER(), nullable=False),
sa.Column('Value2Min', sa.INTEGER(), nullable=False),
sa.Column('Value2Max', sa.INTEGER(), nullable=False),
sa.PrimaryKeyConstraint('PropertyId'),
mysql_charset='utf8mb4'
)
op.create_table('sao_static_reward',
sa.Column('RewardTableId', sa.BIGINT(), nullable=False),
sa.Column('RewardTableSubId', sa.INTEGER(), nullable=False),
sa.Column('UnanalyzedLogGradeId', sa.INTEGER(), nullable=False),
sa.Column('CommonRewardType', sa.INTEGER(), nullable=False),
sa.Column('CommonRewardId', sa.INTEGER(), nullable=False),
sa.Column('CommonRewardNum', sa.INTEGER(), nullable=False),
sa.Column('StrengthMin', sa.INTEGER(), nullable=False),
sa.Column('StrengthMax', sa.INTEGER(), nullable=False),
sa.Column('PropertyTableSubId', sa.INTEGER(), nullable=False),
sa.Column('QuestInfoDisplayFlag', sa.BOOLEAN(), nullable=False),
sa.Column('Rate', sa.INTEGER(), nullable=False),
sa.PrimaryKeyConstraint('RewardTableId'),
mysql_charset='utf8mb4'
)
op.create_table('sao_static_skill',
sa.Column('SkillId', sa.BIGINT(), nullable=False),
sa.Column('WeaponTypeId', sa.INTEGER(), nullable=False),
sa.Column('Name', sa.VARCHAR(length=255), nullable=False),
sa.Column('Name_en', sa.VARCHAR(length=255), nullable=True),
sa.Column('Attack', sa.BOOLEAN(), nullable=False),
sa.Column('Passive', sa.BOOLEAN(), nullable=False),
sa.Column('Pet', sa.BOOLEAN(), nullable=False),
sa.Column('Level', sa.INTEGER(), nullable=False),
sa.Column('SkillCondition', sa.INTEGER(), nullable=False),
sa.Column('CoolTime', sa.INTEGER(), nullable=False),
sa.Column('SkillIcon', sa.VARCHAR(length=255), nullable=False),
sa.Column('FriendSkillIcon', sa.VARCHAR(length=255), nullable=False),
sa.Column('InfoText', sa.VARCHAR(length=255), nullable=False),
sa.Column('InfoText_en', sa.VARCHAR(length=255), nullable=True),
sa.PrimaryKeyConstraint('SkillId'),
mysql_charset='utf8mb4'
)
op.create_table('sao_static_trace_table',
sa.Column('PlayerTraceTableId', sa.BIGINT(), nullable=False),
sa.Column('PlayerTraceTableSubId', sa.INTEGER(), nullable=False),
sa.Column('CommonRewardType', sa.INTEGER(), nullable=False),
sa.Column('CommonRewardId', sa.INTEGER(), nullable=False),
sa.Column('CommonRewardNum', sa.INTEGER(), nullable=False),
sa.Column('Rate', sa.INTEGER(), nullable=False),
sa.PrimaryKeyConstraint('PlayerTraceTableId'),
mysql_charset='utf8mb4'
)
op.create_table('sao_player_beginner_mission',
sa.Column('id', sa.BIGINT(), nullable=False),
sa.Column('user', sa.INTEGER(), nullable=False),
sa.Column('beginner_mission_id', sa.INTEGER(), nullable=False),
sa.Column('condition_id', sa.INTEGER(), nullable=False),
sa.Column('is_seat', sa.BOOLEAN(), server_default='0', nullable=False),
sa.Column('achievement_num', sa.INTEGER(), nullable=False),
sa.Column('complete_flag', sa.BOOLEAN(), server_default='0', nullable=False),
sa.Column('complete_date', sa.TIMESTAMP(), nullable=True),
sa.Column('reward_received_flag', sa.BOOLEAN(), server_default='0', nullable=False),
sa.Column('reward_received_date', sa.TIMESTAMP(), nullable=True),
sa.ForeignKeyConstraint(['user'], ['aime_user.id'], onupdate='cascade', ondelete='cascade'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('user'),
sa.UniqueConstraint('user', 'condition_id', name='sao_player_beginner_mission_uk'),
mysql_charset='utf8mb4'
)
op.create_table('sao_player_resource_card',
sa.Column('id', sa.BIGINT(), nullable=False),
sa.Column('user', sa.INTEGER(), nullable=False),
sa.Column('common_reward_type', sa.INTEGER(), nullable=False),
sa.Column('common_reward_id', sa.INTEGER(), nullable=False),
sa.Column('holographic_flag', sa.BOOLEAN(), server_default='0', nullable=False),
sa.Column('serial', sa.VARCHAR(length=20), nullable=True),
sa.ForeignKeyConstraint(['user'], ['aime_user.id'], onupdate='cascade', ondelete='cascade'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('serial'),
mysql_charset='utf8mb4'
)
op.create_table('sao_player_tutorial',
sa.Column('id', sa.BIGINT(), nullable=False),
sa.Column('user', sa.INTEGER(), nullable=False),
sa.Column('tutorial_byte', sa.INTEGER(), nullable=False),
sa.ForeignKeyConstraint(['user'], ['aime_user.id'], onupdate='cascade', ondelete='cascade'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('user', 'tutorial_byte', name='sao_player_tutorial_uk'),
mysql_charset='utf8mb4'
)
op.create_table('sao_static_episode',
sa.Column('EpisodeId', sa.BIGINT(), nullable=False),
sa.Column('EpisodeChapterId', sa.INTEGER(), nullable=False),
sa.Column('ReleaseEpisodeId', sa.INTEGER(), nullable=False),
sa.Column('Title', sa.VARCHAR(length=255), nullable=False),
sa.Column('CommentSummary', sa.VARCHAR(length=255), nullable=False),
sa.Column('ExBonusTableSubId', sa.INTEGER(), nullable=False),
sa.Column('QuestSceneId', sa.BIGINT(), nullable=True),
sa.ForeignKeyConstraint(['QuestSceneId'], ['sao_static_quest.QuestSceneId'], onupdate='cascade', ondelete='cascade'),
sa.PrimaryKeyConstraint('EpisodeId'),
mysql_charset='utf8mb4'
)
op.create_table('sao_static_ex_bonus',
sa.Column('ExBonusTableId', sa.BIGINT(), nullable=False),
sa.Column('ExBonusTableSubId', sa.INTEGER(), nullable=False),
sa.Column('ExBonusConditionId', sa.INTEGER(), nullable=False),
sa.Column('ConditionValue1', sa.INTEGER(), nullable=False),
sa.Column('ConditionValue2', sa.INTEGER(), nullable=False),
sa.Column('CommonRewardType', sa.INTEGER(), nullable=False),
sa.Column('CommonRewardId', sa.INTEGER(), nullable=False),
sa.Column('CommonRewardNum', sa.INTEGER(), nullable=False),
sa.Column('Strength', sa.INTEGER(), nullable=False),
sa.Column('Property1PropertyId', sa.BIGINT(), nullable=False),
sa.Column('Property1Value1', sa.INTEGER(), nullable=False),
sa.Column('Property1Value2', sa.INTEGER(), nullable=False),
sa.Column('Property2PropertyId', sa.BIGINT(), nullable=False),
sa.Column('Property2Value1', sa.INTEGER(), nullable=False),
sa.Column('Property2Value2', sa.INTEGER(), nullable=False),
sa.Column('Property3PropertyId', sa.BIGINT(), nullable=False),
sa.Column('Property3Value1', sa.INTEGER(), nullable=False),
sa.Column('Property3Value2', sa.INTEGER(), nullable=False),
sa.Column('Property4PropertyId', sa.BIGINT(), nullable=False),
sa.Column('Property4Value1', sa.INTEGER(), nullable=False),
sa.Column('Property4Value2', sa.INTEGER(), nullable=False),
sa.ForeignKeyConstraint(['Property1PropertyId'], ['sao_static_property.PropertyId'], onupdate='cascade', ondelete='cascade'),
sa.ForeignKeyConstraint(['Property2PropertyId'], ['sao_static_property.PropertyId'], onupdate='cascade', ondelete='cascade'),
sa.ForeignKeyConstraint(['Property3PropertyId'], ['sao_static_property.PropertyId'], onupdate='cascade', ondelete='cascade'),
sa.ForeignKeyConstraint(['Property4PropertyId'], ['sao_static_property.PropertyId'], onupdate='cascade', ondelete='cascade'),
sa.PrimaryKeyConstraint('ExBonusTableId'),
mysql_charset='utf8mb4'
)
op.create_table('sao_static_ex_tower',
sa.Column('ExTowerQuestId', sa.BIGINT(), nullable=False),
sa.Column('ExTowerId', sa.INTEGER(), nullable=False),
sa.Column('ReleaseExTowerQuestId', sa.INTEGER(), nullable=False),
sa.Column('Title', sa.VARCHAR(length=255), nullable=False),
sa.Column('Title_en', sa.VARCHAR(length=255), nullable=True),
sa.Column('ExBonusTableSubId', sa.INTEGER(), nullable=False),
sa.Column('QuestSceneId', sa.BIGINT(), nullable=False),
sa.ForeignKeyConstraint(['QuestSceneId'], ['sao_static_quest.QuestSceneId'], onupdate='cascade', ondelete='cascade'),
sa.PrimaryKeyConstraint('ExTowerQuestId'),
mysql_charset='utf8mb4'
)
op.create_table('sao_static_side_quest',
sa.Column('SideQuestId', sa.BIGINT(), nullable=False),
sa.Column('DisplayName', sa.VARCHAR(length=255), nullable=False),
sa.Column('DisplayName_en', sa.VARCHAR(length=255), nullable=True),
sa.Column('EpisodeNum', sa.INTEGER(), nullable=False),
sa.Column('ExBonusTableSubId', sa.INTEGER(), nullable=False),
sa.Column('QuestSceneId', sa.BIGINT(), nullable=False),
sa.ForeignKeyConstraint(['QuestSceneId'], ['sao_static_quest.QuestSceneId'], onupdate='cascade', ondelete='cascade'),
sa.PrimaryKeyConstraint('SideQuestId'),
sa.UniqueConstraint('SideQuestId'),
mysql_charset='utf8mb4'
)
op.create_table('sao_static_skill_table',
sa.Column('SkillTableId', sa.BIGINT(), nullable=False),
sa.Column('SkillId', sa.BIGINT(), nullable=False),
sa.Column('SkillTableSubId', sa.INTEGER(), nullable=False),
sa.Column('LevelObtained', sa.INTEGER(), nullable=False),
sa.Column('AwakeningId', sa.INTEGER(), nullable=False),
sa.ForeignKeyConstraint(['SkillId'], ['sao_static_skill.SkillId'], onupdate='cascade', ondelete='cascade'),
sa.PrimaryKeyConstraint('SkillTableId'),
mysql_charset='utf8mb4'
)
op.create_table('sao_static_tower',
sa.Column('TowerId', sa.BIGINT(), nullable=False),
sa.Column('ReleaseTowerId', sa.INTEGER(), nullable=False),
sa.Column('ExBonusTableSubId', sa.INTEGER(), nullable=False),
sa.Column('QuestSceneId', sa.BIGINT(), nullable=False),
sa.ForeignKeyConstraint(['QuestSceneId'], ['sao_static_quest.QuestSceneId'], onupdate='cascade', ondelete='cascade'),
sa.PrimaryKeyConstraint('TowerId'),
mysql_charset='utf8mb4'
)
op.create_table('sao_player_ex_bonus',
sa.Column('id', sa.BIGINT(), nullable=False),
sa.Column('user', sa.INTEGER(), nullable=False),
sa.Column('quest_scene_id', sa.BIGINT(), nullable=False),
sa.Column('ex_bonus_table_id', sa.BIGINT(), nullable=False),
sa.Column('quest_clear_flag', sa.BOOLEAN(), server_default='0', nullable=False),
sa.ForeignKeyConstraint(['ex_bonus_table_id'], ['sao_static_ex_bonus.ExBonusTableId'], onupdate='cascade', ondelete='cascade'),
sa.ForeignKeyConstraint(['quest_scene_id'], ['sao_static_quest.QuestSceneId'], onupdate='cascade', ondelete='cascade'),
sa.ForeignKeyConstraint(['user'], ['aime_user.id'], onupdate='cascade', ondelete='cascade'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('user', 'quest_scene_id', 'ex_bonus_table_id', name='sao_player_ex_bonus_uk'),
mysql_charset='utf8mb4'
)
op.create_table('sao_player_hero_card',
sa.Column('id', sa.BIGINT(), nullable=False),
sa.Column('user', sa.INTEGER(), nullable=False),
sa.Column('user_hero_id', sa.INTEGER(), nullable=False),
sa.Column('holographic_flag', sa.BOOLEAN(), server_default='0', nullable=False),
sa.Column('serial', sa.VARCHAR(length=20), nullable=True),
sa.ForeignKeyConstraint(['user'], ['aime_user.id'], onupdate='cascade', ondelete='cascade'),
sa.ForeignKeyConstraint(['user_hero_id'], ['sao_hero_log_data.id'], onupdate='cascade', ondelete='cascade'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('serial'),
mysql_charset='utf8mb4'
)
op.alter_column('sao_end_sessions', 'play_date',
existing_type=mysql.TIMESTAMP(),
server_default=sa.text('now()'),
existing_nullable=False)
op.drop_table('sao_static_equipment_list')
op.create_table("sao_static_equipment_list",
sa.Column("EquipmentId", sa.BIGINT, primary_key=True, nullable=False),
sa.Column("EquipmentType", sa.INTEGER, nullable=False),
sa.Column("WeaponTypeId", sa.INTEGER, nullable=False),
sa.Column("Name", sa.VARCHAR(255), nullable=False),
sa.Column("Name_en", sa.VARCHAR(255)),
sa.Column("Rarity", sa.INTEGER, nullable=False),
sa.Column("Power", sa.INTEGER, nullable=False),
sa.Column("StrengthIncrement", sa.INTEGER, nullable=False),
sa.Column("SkillCondition", sa.INTEGER, nullable=False),
sa.Column("Property1PropertyId", sa.BIGINT, sa.ForeignKey("sao_static_property.PropertyId", ondelete="cascade", onupdate="cascade"), nullable=False),
sa.Column("Property1Value1", sa.INTEGER, nullable=False),
sa.Column("Property1Value2", sa.INTEGER, nullable=False),
sa.Column("Property2PropertyId", sa.BIGINT, sa.ForeignKey("sao_static_property.PropertyId", ondelete="cascade", onupdate="cascade"), nullable=False),
sa.Column("Property2Value1", sa.INTEGER, nullable=False),
sa.Column("Property2Value2", sa.INTEGER, nullable=False),
sa.Column("Property3PropertyId", sa.BIGINT, sa.ForeignKey("sao_static_property.PropertyId", ondelete="cascade", onupdate="cascade"), nullable=False),
sa.Column("Property3Value1", sa.INTEGER, nullable=False),
sa.Column("Property3Value2", sa.INTEGER, nullable=False),
sa.Column("Property4PropertyId", sa.BIGINT, sa.ForeignKey("sao_static_property.PropertyId", ondelete="cascade", onupdate="cascade"), nullable=False),
sa.Column("Property4Value1", sa.INTEGER, nullable=False),
sa.Column("Property4Value2", sa.INTEGER, nullable=False),
sa.Column("SalePrice", sa.INTEGER, nullable=False),
sa.Column("CompositionExp", sa.INTEGER, nullable=False),
sa.Column("AwakeningExp", sa.INTEGER, nullable=False),
sa.Column("FlavorText", sa.VARCHAR(255), nullable=False),
sa.Column("FlavorText_en", sa.VARCHAR(255)),
mysql_charset="utf8mb4"
)
op.drop_table('sao_static_hero_list')
op.create_table("sao_static_hero_list",
sa.Column("HeroLogId", sa.BIGINT, primary_key=True, nullable=False),
sa.Column("CharaId", sa.INTEGER, nullable=False),
sa.Column("Name", sa.VARCHAR(255), nullable=False),
sa.Column("Nickname", sa.VARCHAR(255), nullable=False),
sa.Column("Name_en", sa.VARCHAR(255)),
sa.Column("Nickname_en", sa.VARCHAR(255)),
sa.Column("Rarity", sa.INTEGER, nullable=False),
sa.Column("WeaponTypeId", sa.INTEGER, nullable=False),
sa.Column("HeroLogRoleId", sa.INTEGER, nullable=False),
sa.Column("CostumeTypeId", sa.INTEGER, nullable=False),
sa.Column("UnitId", sa.INTEGER, nullable=False),
sa.Column("DefaultEquipmentId1", sa.BIGINT, sa.ForeignKey("sao_static_equipment_list.EquipmentId", ondelete="cascade", onupdate="cascade")),
sa.Column("DefaultEquipmentId2", sa.BIGINT, sa.ForeignKey("sao_static_equipment_list.EquipmentId", ondelete="cascade", onupdate="cascade")),
sa.Column("SkillTableSubId", sa.INTEGER, nullable=False),
sa.Column("HpMin", sa.INTEGER, nullable=False),
sa.Column("HpMax", sa.INTEGER, nullable=False),
sa.Column("StrMin", sa.INTEGER, nullable=False),
sa.Column("StrMax", sa.INTEGER, nullable=False),
sa.Column("VitMin", sa.INTEGER, nullable=False),
sa.Column("VitMax", sa.INTEGER, nullable=False),
sa.Column("IntMin", sa.INTEGER, nullable=False),
sa.Column("IntMax", sa.INTEGER, nullable=False),
sa.Column("Property1PropertyId", sa.BIGINT, sa.ForeignKey("sao_static_property.PropertyId", ondelete="cascade", onupdate="cascade"), nullable=False),
sa.Column("Property1Value1", sa.INTEGER, nullable=False),
sa.Column("Property1Value2", sa.INTEGER, nullable=False),
sa.Column("Property2PropertyId", sa.BIGINT, sa.ForeignKey("sao_static_property.PropertyId", ondelete="cascade", onupdate="cascade"), nullable=False),
sa.Column("Property2Value1", sa.INTEGER, nullable=False),
sa.Column("Property2Value2", sa.INTEGER, nullable=False),
sa.Column("Property3PropertyId", sa.BIGINT, sa.ForeignKey("sao_static_property.PropertyId", ondelete="cascade", onupdate="cascade"), nullable=False),
sa.Column("Property3Value1", sa.INTEGER, nullable=False),
sa.Column("Property3Value2", sa.INTEGER, nullable=False),
sa.Column("Property4PropertyId", sa.BIGINT, sa.ForeignKey("sao_static_property.PropertyId", ondelete="cascade", onupdate="cascade"), nullable=False),
sa.Column("Property4Value1", sa.INTEGER, nullable=False),
sa.Column("Property4Value2", sa.INTEGER, nullable=False),
sa.Column("FlavorText", sa.VARCHAR(255), nullable=False),
sa.Column("FlavorText_en", sa.VARCHAR(255)),
sa.Column("SalePrice", sa.INTEGER, nullable=False),
sa.Column("CompositionExp", sa.INTEGER, nullable=False),
sa.Column("AwakeningExp", sa.INTEGER, nullable=False),
sa.Column("Slot4UnlockLevel", sa.INTEGER, nullable=False),
sa.Column("Slot5UnlockLevel", sa.INTEGER, nullable=False),
sa.Column("CollectionEmptyFrameDisplayFlag", sa.BOOLEAN, nullable=False),
mysql_charset="utf8mb4"
)
op.drop_table('sao_static_item_list')
op.create_table("sao_static_item_list",
sa.Column("ItemId", sa.INTEGER, nullable=False, primary_key=True),
sa.Column("ItemTypeId", sa.INTEGER, nullable=False),
sa.Column("Name", sa.VARCHAR(255), nullable=False),
sa.Column("Name_en", sa.VARCHAR(255)),
sa.Column("Rarity", sa.INTEGER, nullable=False),
sa.Column("Value", sa.INTEGER, nullable=False),
sa.Column("PropertyId", sa.BIGINT, sa.ForeignKey("sao_static_property.PropertyId", ondelete="cascade", onupdate="cascade"), nullable=False),
sa.Column("PropertyValue1Min", sa.INTEGER, nullable=False),
sa.Column("PropertyValue1Max", sa.INTEGER, nullable=False),
sa.Column("PropertyValue2Min", sa.INTEGER, nullable=False),
sa.Column("PropertyValue2Max", sa.INTEGER, nullable=False),
sa.Column("FlavorText", sa.VARCHAR(255), nullable=False),
sa.Column("FlavorText_en", sa.VARCHAR(255)),
sa.Column("SalePrice", sa.INTEGER, nullable=False),
sa.Column("ItemIcon", sa.VARCHAR(255), nullable=False),
mysql_charset="utf8mb4"
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('sao_static_item_list')
op.create_table("sao_static_item_list",
sa.Column("id", sa.Integer, primary_key=True, nullable=False),
sa.Column("version", sa.Integer),
sa.Column("itemId", sa.Integer),
sa.Column("itemTypeId", sa.Integer),
sa.Column("name", sa.String(255)),
sa.Column("rarity", sa.Integer),
sa.Column("flavorText", sa.String(255)),
sa.Column("enabled", sa.Boolean),
sa.UniqueConstraint(
"version", "itemId", name="sao_static_item_list_uk"
),
mysql_charset="utf8mb4"
)
op.drop_table('sao_static_hero_list')
op.create_table("sao_static_hero_list",
sa.Column("id", sa.Integer, primary_key=True, nullable=False),
sa.Column("version", sa.Integer),
sa.Column("heroLogId", sa.Integer),
sa.Column("name", sa.String(255)),
sa.Column("nickname", sa.String(255)),
sa.Column("rarity", sa.Integer),
sa.Column("skillTableSubId", sa.Integer),
sa.Column("awakeningExp", sa.Integer),
sa.Column("flavorText", sa.String(255)),
sa.Column("enabled", sa.Boolean),
sa.UniqueConstraint(
"version", "heroLogId", name="sao_static_hero_list_uk"
),
mysql_charset="utf8mb4",
)
op.drop_table('sao_static_equipment_list')
op.create_table("sao_static_equipment_list",
sa.Column("id", sa.Integer, primary_key=True, nullable=False),
sa.Column("version", sa.Integer),
sa.Column("equipmentId", sa.Integer),
sa.Column("equipmentType", sa.Integer),
sa.Column("weaponTypeId", sa.Integer),
sa.Column("name", sa.String(255)),
sa.Column("rarity", sa.Integer),
sa.Column("flavorText", sa.String(255)),
sa.Column("enabled", sa.Boolean),
sa.UniqueConstraint(
"version", "equipmentId", name="sao_static_equipment_list_uk"
),
mysql_charset="utf8mb4"
)
op.alter_column('sao_end_sessions', 'play_date',
existing_type=mysql.TIMESTAMP(),
server_default=sa.text('CURRENT_TIMESTAMP'),
existing_nullable=False)
op.drop_table('sao_player_hero_card')
op.drop_table('sao_player_ex_bonus')
op.drop_table('sao_static_tower')
op.drop_table('sao_static_skill_table')
op.drop_table('sao_static_side_quest')
op.drop_table('sao_static_ex_tower')
op.drop_table('sao_static_ex_bonus')
op.drop_table('sao_static_episode')
op.drop_table('sao_player_tutorial')
op.drop_table('sao_player_resource_card')
op.drop_table('sao_player_beginner_mission')
op.drop_table('sao_static_trace_table')
op.drop_table('sao_static_skill')
op.drop_table('sao_static_reward')
op.drop_table('sao_static_property')
op.drop_table('sao_static_quest')
op.create_table('sao_static_quest',
sa.Column('id', mysql.INTEGER(), autoincrement=True, nullable=False),
sa.Column('enabled', mysql.TINYINT(display_width=1), autoincrement=False, nullable=True),
sa.Column('version', mysql.INTEGER(), autoincrement=False, nullable=True),
sa.Column('questSceneId', mysql.INTEGER(), autoincrement=False, nullable=True),
sa.Column('sortNo', mysql.INTEGER(), autoincrement=False, nullable=True),
sa.Column('name', mysql.VARCHAR(charset='utf8mb4', collation='utf8mb4_general_ci', length=255), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint("version", "questSceneId", name="sao_static_quest_uk"),
mysql_charset='utf8mb4'
)
# ### end Alembic commands ###

View File

@ -0,0 +1,24 @@
"""mai2_favorite_song_ordering
Revision ID: bc91c1206dca
Revises: 28443e2da5b8
Create Date: 2024-09-16 14:24:56.714066
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'bc91c1206dca'
down_revision = '28443e2da5b8'
branch_labels = None
depends_on = None
def upgrade():
op.add_column('mai2_item_favorite_music', sa.Column('orderId', sa.Integer(), nullable=True))
def downgrade():
op.drop_column('mai2_item_favorite_music', 'orderId')

View File

@ -0,0 +1,38 @@
"""mai2_add_photos
Revision ID: d8cd1fa04c2a
Revises: 54a84103b84e
Create Date: 2024-10-06 03:09:15.959817
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = 'd8cd1fa04c2a'
down_revision = '54a84103b84e'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('mai2_user_photo',
sa.Column('id', sa.VARCHAR(length=36), nullable=False),
sa.Column('user', sa.Integer(), nullable=False),
sa.Column('playlog_num', sa.INTEGER(), nullable=False),
sa.Column('track_num', sa.INTEGER(), nullable=False),
sa.Column('when_upload', sa.TIMESTAMP(), server_default=sa.text('now()'), nullable=False),
sa.ForeignKeyConstraint(['user'], ['aime_user.id'], onupdate='cascade', ondelete='cascade'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('user', 'playlog_num', 'track_num', name='mai2_user_photo_uk'),
mysql_charset='utf8mb4'
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('mai2_user_photo')
# ### end Alembic commands ###

View File

@ -1,54 +1,70 @@
import logging, coloredlogs
from typing import Optional
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy import create_engine
from logging.handlers import TimedRotatingFileHandler
import logging
import os
import secrets, string
import bcrypt
import secrets
import ssl
import string
import warnings
from hashlib import sha256
from logging.handlers import TimedRotatingFileHandler
from typing import Any, ClassVar, Optional
import alembic.config
import glob
import bcrypt
import coloredlogs
import pymysql.err
from sqlalchemy.ext.asyncio import (
AsyncEngine,
AsyncSession,
create_async_engine,
)
from core.config import CoreConfig
from core.data.schema import *
from core.utils import Utils
from core.data.schema import ArcadeData, BaseData, CardData, UserData, metadata
from core.utils import MISSING, Utils
class Data:
engine = None
session = None
user = None
arcade = None
card = None
base = None
engine: ClassVar[AsyncEngine] = MISSING
session: ClassVar[AsyncSession] = MISSING
user: ClassVar[UserData] = MISSING
arcade: ClassVar[ArcadeData] = MISSING
card: ClassVar[CardData] = MISSING
base: ClassVar[BaseData] = MISSING
def __init__(self, cfg: CoreConfig) -> None:
self.config = cfg
if self.config.database.sha2_password:
passwd = sha256(self.config.database.password.encode()).digest()
self.__url = f"{self.config.database.protocol}://{self.config.database.username}:{passwd.hex()}@{self.config.database.host}:{self.config.database.port}/{self.config.database.name}?charset=utf8mb4"
self.__url = f"{self.config.database.protocol}+aiomysql://{self.config.database.username}:{passwd.hex()}@{self.config.database.host}:{self.config.database.port}/{self.config.database.name}"
else:
self.__url = f"{self.config.database.protocol}://{self.config.database.username}:{self.config.database.password}@{self.config.database.host}:{self.config.database.port}/{self.config.database.name}?charset=utf8mb4"
self.__url = f"{self.config.database.protocol}+aiomysql://{self.config.database.username}:{self.config.database.password}@{self.config.database.host}:{self.config.database.port}/{self.config.database.name}"
if Data.engine is None:
Data.engine = create_engine(self.__url, pool_recycle=3600)
if Data.engine is MISSING:
Data.engine = create_async_engine(
self.__url,
pool_recycle=3600,
isolation_level="AUTOCOMMIT",
connect_args={
"charset": "utf8mb4",
"ssl": self.config.database.create_ssl_context_if_enabled(),
},
)
self.__engine = Data.engine
if Data.session is None:
s = sessionmaker(bind=Data.engine, autoflush=True, autocommit=True)
Data.session = scoped_session(s)
if Data.session is MISSING:
Data.session = AsyncSession(Data.engine, expire_on_commit=False)
if Data.user is None:
if Data.user is MISSING:
Data.user = UserData(self.config, self.session)
if Data.arcade is None:
if Data.arcade is MISSING:
Data.arcade = ArcadeData(self.config, self.session)
if Data.card is None:
if Data.card is MISSING:
Data.card = CardData(self.config, self.session)
if Data.base is None:
if Data.base is MISSING:
Data.base = BaseData(self.config, self.session)
self.logger = logging.getLogger("database")
@ -94,40 +110,73 @@ class Data:
alembic.config.main(argv=alembicArgs)
os.chdir(old_dir)
def create_database(self):
async def create_database(self):
self.logger.info("Creating databases...")
metadata.create_all(
self.engine,
checkfirst=True,
)
for _, mod in Utils.get_all_titles().items():
if hasattr(mod, "database"):
mod.database(self.config)
metadata.create_all(
self.engine,
checkfirst=True,
)
with warnings.catch_warnings():
# SQLAlchemy will generate a nice primary key constraint name, but in
# MySQL/MariaDB the constraint name is always PRIMARY. Every time a
# custom primary key name is generated, a warning is emitted from pymysql,
# which we don't care about. Other warnings may be helpful though, don't
# suppress everything.
warnings.filterwarnings(
action="ignore",
message=r"Name '(.+)' ignored for PRIMARY key\.",
category=pymysql.err.Warning,
)
# Stamp the end revision as if alembic had created it, so it can take off after this.
self.__alembic_cmd(
"stamp",
"head",
)
async with self.engine.begin() as conn:
await conn.run_sync(metadata.create_all, checkfirst=True)
def schema_upgrade(self, ver: str = None):
self.__alembic_cmd(
"upgrade",
"head" if not ver else ver,
)
for _, mod in Utils.get_all_titles().items():
if hasattr(mod, "database"):
mod.database(self.config)
await conn.run_sync(metadata.create_all, checkfirst=True)
# Stamp the end revision as if alembic had created it, so it can take off after this.
self.__alembic_cmd(
"stamp",
"head",
)
def schema_upgrade(self, ver: Optional[str] = None):
with warnings.catch_warnings():
# SQLAlchemy will generate a nice primary key constraint name, but in
# MySQL/MariaDB the constraint name is always PRIMARY. Every time a
# custom primary key name is generated, a warning is emitted from pymysql,
# which we don't care about. Other warnings may be helpful though, don't
# suppress everything.
warnings.filterwarnings(
action="ignore",
message=r"Name '(.+)' ignored for PRIMARY key\.",
category=pymysql.err.Warning,
)
self.__alembic_cmd(
"upgrade",
"head" if not ver else ver,
)
def schema_downgrade(self, ver: str):
self.__alembic_cmd(
"downgrade",
ver,
)
with warnings.catch_warnings():
# SQLAlchemy will generate a nice primary key constraint name, but in
# MySQL/MariaDB the constraint name is always PRIMARY. Every time a
# custom primary key name is generated, a warning is emitted from pymysql,
# which we don't care about. Other warnings may be helpful though, don't
# suppress everything.
warnings.filterwarnings(
action="ignore",
message=r"Name '(.+)' ignored for PRIMARY key\.",
category=pymysql.err.Warning,
)
async def create_owner(self, email: Optional[str] = None, code: Optional[str] = "00000000000000000000") -> None:
self.__alembic_cmd(
"downgrade",
ver,
)
async def create_owner(self, email: Optional[str] = None, code: str = "00000000000000000000") -> None:
pw = "".join(
secrets.choice(string.ascii_letters + string.digits) for i in range(20)
)
@ -150,12 +199,12 @@ class Data:
async def migrate(self) -> None:
exist = await self.base.execute("SELECT * FROM alembic_version")
if exist is not None:
self.logger.warn("No need to migrate as you have already migrated to alembic. If you are trying to upgrade the schema, use `upgrade` instead!")
self.logger.warning("No need to migrate as you have already migrated to alembic. If you are trying to upgrade the schema, use `upgrade` instead!")
return
self.logger.info("Upgrading to latest with legacy system")
if not await self.legacy_upgrade():
self.logger.warn("No need to migrate as you have already deleted the old schema_versions system. If you are trying to upgrade the schema, use `upgrade` instead!")
self.logger.warning("No need to migrate as you have already deleted the old schema_versions system. If you are trying to upgrade the schema, use `upgrade` instead!")
return
self.logger.info("Done")

View File

@ -1,16 +1,16 @@
from typing import Optional, Dict, List
from sqlalchemy import Table, Column, and_, or_
from sqlalchemy.sql.schema import ForeignKey, PrimaryKeyConstraint
from sqlalchemy.types import Integer, String, Boolean, JSON
from sqlalchemy.sql import func, select
import re
from typing import List, Optional
from sqlalchemy import Column, Table, and_, or_
from sqlalchemy.dialects.mysql import insert
from sqlalchemy.engine import Row
import re
from sqlalchemy.sql import func, select
from sqlalchemy.sql.schema import ForeignKey, PrimaryKeyConstraint
from sqlalchemy.types import JSON, Boolean, Integer, String
from core.data.schema.base import BaseData, metadata
from core.const import *
arcade = Table(
arcade: Table = Table(
"arcade",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
@ -26,7 +26,7 @@ arcade = Table(
mysql_charset="utf8mb4",
)
machine = Table(
machine: Table = Table(
"machine",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
@ -47,7 +47,7 @@ machine = Table(
mysql_charset="utf8mb4",
)
arcade_owner = Table(
arcade_owner: Table = Table(
"arcade_owner",
metadata,
Column(
@ -69,7 +69,7 @@ arcade_owner = Table(
class ArcadeData(BaseData):
async def get_machine(self, serial: str = None, id: int = None) -> Optional[Row]:
async def get_machine(self, serial: Optional[str] = None, id: Optional[int] = None) -> Optional[Row]:
if serial is not None:
serial = serial.replace("-", "")
if len(serial) == 11:
@ -98,8 +98,8 @@ class ArcadeData(BaseData):
self,
arcade_id: int,
serial: str = "",
board: str = None,
game: str = None,
board: Optional[str] = None,
game: Optional[str] = None,
is_cab: bool = False,
) -> Optional[int]:
if not arcade_id:
@ -150,8 +150,8 @@ class ArcadeData(BaseData):
async def create_arcade(
self,
name: str = None,
nickname: str = None,
name: Optional[str] = None,
nickname: Optional[str] = None,
country: str = "JPN",
country_id: int = 1,
state: str = "",
@ -206,17 +206,6 @@ class ArcadeData(BaseData):
return None
return result.lastrowid
def format_serial( # TODO: Actual serial stuff
self, platform_code: str, platform_rev: int, serial_num: int, append: int = 8888
) -> str:
return f"{platform_code}{platform_rev:02d}A{serial_num:04d}{append:04d}" # 0x41 = A, 0x52 = R
def validate_keychip_format(self, serial: str) -> bool:
if re.fullmatch(r"^A[0-9]{2}[E|X][-]?[0-9]{2}[A-HJ-NP-Z][0-9]{4}([0-9]{4})?$", serial) is None:
return False
return True
async def get_arcade_by_name(self, name: str) -> Optional[List[Row]]:
sql = arcade.select(or_(arcade.c.name.like(f"%{name}%"), arcade.c.nickname.like(f"%{name}%")))
result = await self.execute(sql)
@ -230,3 +219,53 @@ class ArcadeData(BaseData):
if result is None:
return None
return result.fetchall()
async def get_num_generated_keychips(self) -> Optional[int]:
result = await self.execute(select(func.count("serial LIKE 'A69A%'")).select_from(machine))
if result:
return result.fetchone()['count_1']
self.logger.error("Failed to count machine serials that start with A69A!")
def format_serial(
self, platform_code: str, platform_rev: int, serial_letter: str, serial_num: int, append: int, dash: bool = False
) -> str:
return f"{platform_code}{'-' if dash else ''}{platform_rev:02d}{serial_letter}{serial_num:04d}{append:04d}"
def validate_keychip_format(self, serial: str) -> bool:
# For the 2nd letter, E and X are the only "real" values that have been observed (A is used for generated keychips)
if re.fullmatch(r"^A[0-9]{2}[A-Z][-]?[0-9]{2}[A-HJ-NP-Z][0-9]{4}([0-9]{4})?$", serial) is None:
return False
return True
# Thanks bottersnike!
def get_keychip_suffix(self, year: int, month: int) -> str:
assert year > 1957
assert 1 <= month <= 12
year -= 1957
# Jan/Feb/Mar are from the previous tax year
if month < 4:
year -= 1
assert year >= 1 and year <= 99
month = ((month - 1) + 9) % 12 # Offset so April=0
return f"{year:02}{month // 6:01}{month % 6 + 1:01}"
def parse_keychip_suffix(self, suffix: str) -> tuple[int, int]:
year = int(suffix[0:2])
half = int(suffix[2])
assert half in (0, 1)
period = int(suffix[3])
assert period in (1, 2, 3, 4, 5, 6)
month = half * 6 + (period - 1)
month = ((month + 3) % 12) + 1 # Offset so Jan=1
# Jan/Feb/Mar are from the previous tax year
if month < 4:
year += 1
year += 1957
return (year, month)

View File

@ -1,22 +1,23 @@
import asyncio
import json
import logging
from random import randrange
from typing import Any, Optional, Dict, List
from typing import Any, Dict, List, Optional
from sqlalchemy import Column, MetaData, Table
from sqlalchemy.engine import Row
from sqlalchemy.engine.cursor import CursorResult
from sqlalchemy.engine.base import Connection
from sqlalchemy.sql import text, func, select
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy import MetaData, Table, Column
from sqlalchemy.types import Integer, String, TIMESTAMP, JSON, INTEGER, TEXT
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.schema import ForeignKey
from sqlalchemy.dialects.mysql import insert
from sqlalchemy.sql import func, text
from sqlalchemy.types import INTEGER, JSON, TEXT, TIMESTAMP, Integer, String
from core.config import CoreConfig
metadata = MetaData()
event_log = Table(
event_log: Table = Table(
"event_log",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
@ -37,7 +38,7 @@ event_log = Table(
class BaseData:
def __init__(self, cfg: CoreConfig, conn: Connection) -> None:
def __init__(self, cfg: CoreConfig, conn: AsyncSession) -> None:
self.config = cfg
self.conn = conn
self.logger = logging.getLogger("database")
@ -47,7 +48,7 @@ class BaseData:
try:
self.logger.debug(f"SQL Execute: {''.join(str(sql).splitlines())}")
res = self.conn.execute(text(sql), opts)
res = await self.conn.execute(text(sql), opts)
except SQLAlchemyError as e:
self.logger.error(f"SQLAlchemy error {e}")
@ -59,7 +60,7 @@ class BaseData:
except Exception:
try:
res = self.conn.execute(sql, opts)
res = await self.conn.execute(sql, opts)
except SQLAlchemyError as e:
self.logger.error(f"SQLAlchemy error {e}")
@ -83,7 +84,7 @@ class BaseData:
async def log_event(
self, system: str, type: str, severity: int, message: str, details: Dict = {}, user: int = None,
arcade: int = None, machine: int = None, ip: str = None, game: str = None, version: str = None
arcade: int = None, machine: int = None, ip: Optional[str] = None, game: Optional[str] = None, version: Optional[str] = None
) -> Optional[int]:
sql = event_log.insert().values(
system=system,

View File

@ -1,13 +1,14 @@
from typing import Dict, List, Optional
from sqlalchemy import Table, Column, UniqueConstraint
from sqlalchemy.types import Integer, String, Boolean, TIMESTAMP, BIGINT
from sqlalchemy.sql.schema import ForeignKey
from sqlalchemy.sql import func
from sqlalchemy import Column, Table, UniqueConstraint
from sqlalchemy.engine import Row
from sqlalchemy.sql import func
from sqlalchemy.sql.schema import ForeignKey
from sqlalchemy.types import BIGINT, TIMESTAMP, VARCHAR, Boolean, Integer, String
from core.data.schema.base import BaseData, metadata
aime_card = Table(
aime_card: Table = Table(
"aime_card",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
@ -19,12 +20,16 @@ aime_card = Table(
Column("last_login_date", TIMESTAMP, onupdate=func.now()),
Column("is_locked", Boolean, server_default="0"),
Column("is_banned", Boolean, server_default="0"),
Column("memo", VARCHAR(16)),
UniqueConstraint("user", "access_code", name="aime_card_uk"),
mysql_charset="utf8mb4",
)
class CardData(BaseData):
moble_os_codes = set([0x06, 0x07, 0x10, 0x12, 0x13, 0x14, 0x15, 0x17, 0x18])
card_os_codes = set([0x20, 0xF0, 0xF1, 0xF2, 0xF3, 0xF4, 0xF5, 0xF6, 0xF7])
async def get_card_by_access_code(self, access_code: str) -> Optional[Row]:
sql = aime_card.select(aime_card.c.access_code == access_code)
@ -140,6 +145,16 @@ class CardData(BaseData):
if not result:
self.logger.error(f"Failed to update IDm to {idm} for {access_code}")
async def set_access_code_by_access_code(self, old_ac: str, new_ac: str) -> None:
result = await self.execute(aime_card.update(aime_card.c.access_code == old_ac).values(access_code=new_ac))
if not result:
self.logger.error(f"Failed to change card access code from {old_ac} to {new_ac}")
async def set_memo_by_access_code(self, access_code: str, memo: str) -> None:
result = await self.execute(aime_card.update(aime_card.c.access_code == access_code).values(memo=memo))
if not result:
self.logger.error(f"Failed to add memo to card {access_code}")
def to_access_code(self, luid: str) -> str:
"""
Given a felica cards internal 16 hex character luid, convert it to a 0-padded 20 digit access code as a string

View File

@ -1,15 +1,15 @@
from typing import Optional, List
from sqlalchemy import Table, Column
from sqlalchemy.types import Integer, String, TIMESTAMP
from sqlalchemy.sql import func
from sqlalchemy.dialects.mysql import insert
from sqlalchemy.sql import func, select
from sqlalchemy.engine import Row
from typing import List, Optional
import bcrypt
from sqlalchemy import Column, Table
from sqlalchemy.dialects.mysql import insert
from sqlalchemy.engine import Row
from sqlalchemy.sql import func, select
from sqlalchemy.types import TIMESTAMP, Integer, String
from core.data.schema.base import BaseData, metadata
aime_user = Table(
aime_user: Table = Table(
"aime_user",
metadata,
Column("id", Integer, nullable=False, primary_key=True, autoincrement=True),
@ -26,10 +26,10 @@ aime_user = Table(
class UserData(BaseData):
async def create_user(
self,
id: int = None,
username: str = None,
email: str = None,
password: str = None,
id: Optional[int] = None,
username: Optional[str] = None,
email: Optional[str] = None,
password: Optional[str] = None,
permission: int = 1,
) -> Optional[int]:
if id is None:

View File

@ -476,10 +476,11 @@ class FE_User(FE_Base):
card_data.append({
'access_code': ac,
'status': status,
'chip_id': "", #None if c['chip_id'] is None else f"{c['chip_id']:X}",
'idm': "",
'chip_id': c['chip_id'],
'idm': c['idm'],
'type': c_type,
"memo": ""
"memo": c['memo'],
"id": c['id'],
})
if "e" in request.query_params:
@ -516,10 +517,106 @@ class FE_User(FE_Base):
return resp
async def edit_card(self, request: Request) -> RedirectResponse:
return RedirectResponse("/user/", 303)
frm = await request.form()
usr_sesh = self.validate_session(request)
if not usr_sesh or not self.test_perm(usr_sesh.permissions, PermissionOffset.USERMOD):
return RedirectResponse("/gate/", 303)
frm = await request.form()
cid = frm.get("card_edit_frm_card_id", None)
if not cid:
return RedirectResponse("/user/?e=999", 303)
ac = frm.get("card_edit_frm_access_code", None)
if not ac:
return RedirectResponse("/user/?e=999", 303)
card = await self.data.card.get_card_by_id(cid)
if not card:
return RedirectResponse("/user/?e=2", 303)
if card['user'] != usr_sesh.user_id and not self.test_perm_minimum(usr_sesh.permissions, PermissionOffset.USERMOD):
return RedirectResponse("/user/?e=11", 303)
if frm.get("add_memo", None) or frm.get("add_memo", None) == "":
memo = frm.get("add_memo")
if len(memo) > 16:
return RedirectResponse("/user/?e=4", 303)
await self.data.card.set_memo_by_access_code(ac, memo)
if False: # Saving this in case I want to allow editing idm/chip ID down the line
if frm.get("add_felica_idm", None):
idm = frm.get('add_felica_idm')
if not all(c in string.hexdigits for c in idm):
return RedirectResponse("/user/?e=4", 303)
await self.data.card.set_idm_by_access_code(ac, idm)
if frm.get("add_mifare_chip_id", None):
chip_id: str = frm.get('add_mifare_chip_id')
if not all(c in string.hexdigits for c in idm):
return RedirectResponse("/user/?e=4", 303)
await self.data.card.set_chip_id_by_access_code(ac, int(chip_id, 16))
return RedirectResponse("/user/?s=4", 303)
async def add_card(self, request: Request) -> RedirectResponse:
return RedirectResponse("/user/", 303)
frm = await request.form()
card_type = frm.get("card_add_frm_type", None)
access_code = frm.get("add_access_code", None)
idm = frm.get("add_idm", None)
idm_caps = None
usr_sesh = self.validate_session(request)
if not usr_sesh or not self.test_perm(usr_sesh.permissions, PermissionOffset.USERMOD):
return RedirectResponse("/gate/", 303)
if not len(access_code) == 20 or (not access_code.startswith("5") and not access_code.startswith("3") \
and not access_code.startswith("010") and not access_code.startswith("0008")):
return RedirectResponse("/user/?e=4", 303)
if card_type == "0" and access_code.startswith("5") and len(idm) == 16:
idm_caps = idm.upper()
if not all([x in string.hexdigits for x in idm_caps]):
return RedirectResponse("/user/?e=4", 303)
if access_code.startswith("5") and not idm_caps:
return RedirectResponse("/user/?e=13", 303)
test = await self.data.card.get_card_by_access_code(access_code)
if test:
return RedirectResponse("/user/?e=12", 303)
if idm_caps:
test = await self.data.card.get_card_by_idm(idm_caps)
if test and test['user'] != usr_sesh.user_id:
return RedirectResponse("/user/?e=12", 303)
test = await self.data.card.get_card_by_access_code(self.data.card.to_access_code(idm_caps))
if test:
if test['user'] != usr_sesh.user_id:
return RedirectResponse("/user/?e=12", 303)
await self.data.card.set_access_code_by_access_code(test['access_code'], access_code)
self.logger.info(f"Update card {test['id']} from {test['access_code']} to {access_code} for user {usr_sesh.user_id}")
await self.data.card.set_idm_by_access_code(access_code, idm_caps)
self.logger.info(f"Set IDm for card {access_code} to {idm_caps}")
return RedirectResponse("/user/?s=1", 303)
if card_type == "0" and access_code.startswith("0008"):
test = await self.data.card.get_card_by_idm(self.data.card.to_idm(access_code))
if test:
return RedirectResponse("/user/?e=12", 303)
new_card = await self.data.card.create_card(usr_sesh.user_id, access_code)
self.logger.info(f"Created new card {new_card} with access code {access_code} for user {usr_sesh.user_id}")
if idm_caps:
await self.data.card.set_idm_by_access_code(access_code, idm_caps)
self.logger.info(f"Set IDm for card {access_code} to {idm_caps}")
return RedirectResponse("/user/?s=1", 303)
async def render_POST(self, request: Request):
frm = await request.form()
@ -791,14 +888,19 @@ class FE_System(FE_Base):
return RedirectResponse("/sys/?e=4", 303)
if not serial:
serial = self.data.arcade.format_serial("A69E", 1, random.randint(1, 9999))
append = self.data.arcade.get_keychip_suffix(datetime.now().year, datetime.now().month)
generated = await self.data.arcade.get_num_generated_keychips()
if not generated:
generated = 0
serial = self.data.arcade.format_serial("A69A", 1, "A", generated + 1, int(append))
serial_dash = self.data.arcade.format_serial("A69A", 1, "A", generated + 1, int(append), True)
cab_id = await self.data.arcade.create_machine(int(shopid), serial, None, game_code if game_code else None)
return Response(template.render(
title=f"{self.core_config.server.name} | System",
sesh=vars(usr_sesh),
cabadd={"id": cab_id, "serial": serial},
cabadd={"id": cab_id, "serial": serial_dash},
), media_type="text/html; charset=utf-8")
async def render_logs(self, request: Request):

View File

@ -46,7 +46,11 @@ var per_page = 0;
var page = 0;
function update_tbl() {
if (TBL_DATA.length == 0) { return; }
if (TBL_DATA.length == 0) {
document.getElementById("btn_next").disabled = true;
document.getElementById("btn_prev").disabled = true;
return;
}
var tbl = document.getElementById("tbl_events");
for (var i = 0; i < per_page; i++) {
@ -183,7 +187,7 @@ function chg_page(num) {
document.getElementById("btn_prev").disabled = true;
return;
} else if (page == 0) {
document.getElementById("btn_next").disabled = false;
document.getElementById("btn_next").disabled = TBL_DATA.length == 0;
document.getElementById("btn_prev").disabled = true;
} else {
document.getElementById("btn_next").disabled = false;

View File

@ -28,12 +28,28 @@ function toggle_add_card_form() {
}
}
function prep_edit_form(access_code, chip_id, idm, card_type, u_memo) {
function toggle_idm_disabled(is_disabled) {
document.getElementById("btn_add_card");
let dv = document.getElementById("add_card_container")
if (dv.style['display'] != "") {
btn.innerText = "Cancel";
dv.style['display'] = "";
} else {
btn.innerText = "Add";
dv.style['display'] = "none";
}
}
function prep_edit_form(access_code, chip_id, idm, card_type, u_memo, card_id) {
ac = document.getElementById("card_edit_frm_access_code");
cid = document.getElementById("card_edit_frm_chip_id");
fidm = document.getElementById("card_edit_frm_idm");
memo = document.getElementById("card_edit_frm_memo");
document.getElementById("card_edit_frm_card_id").value = card_id;
if (chip_id == "None" || chip_id == undefined) {
chip_id = ""
}
@ -49,11 +65,14 @@ function prep_edit_form(access_code, chip_id, idm, card_type, u_memo) {
fidm.value = idm;
memo.value = u_memo;
if (card_type == "AmusementIC") {
if (access_code.startsWith("3") || access_code.startsWith("010")) {
cid.disabled = false;
fidm.disabled = true;
} else if (access_code.startsWith("5") || access_code.startsWith("0008")) {
cid.disabled = true;
fidm.disabled = false;
} else {
cid.disabled = false;
cid.disabled = true;
fidm.disabled = true;
}
}
@ -84,16 +103,35 @@ Card added successfully
{% endif %}
<div id="add_card_container" style="display: none; max-width: 33%;">
<form action="/user/add.card" method="post", id="frm_add_card">
<div class="form-check">
<input type="radio" id="card_add_frm_type_aicc" value="0" name="card_add_frm_type" aria-describedby="aicc_help" onclick="document.getElementById('card_add_frm_idm').disabled = false;">
<label class="form-label" for="card_add_frm_type_aicc">AmusementIC</label>
<div id="aicc_help" class="form-text">Starts with 5. If you don't have the IDm, use the 0008 access code shown in-game</div>
<br>
<input type="radio" id="card_add_frm_type_old" value="1" name="card_add_frm_type" aria-describedby="old_help" onclick="document.getElementById('card_add_frm_idm').disabled = true;">
<label class="form-label" for="card_add_frm_type_old">Old Aime/Banapass</label>
<div id="old_help" class="form-text">Starts with 010 (aime) or 3 (banapass)</div>
</div>
<label class="form-label" for="card_add_frm_access_code">Access Code:</label>
<input class="form-control" name="add_access_code" id="card_add_frm_access_code" maxlength="20" type="text" required aria-describedby="ac_help">
<div id="ac_help" class="form-text">20 digit code on the back of the card.</div>
<label class="form-label" for="card_add_frm_access_code">IDm:</label>
<input class="form-control" name="add_idm" id="card_add_frm_idm" maxlength="16" type="text" aria-describedby="idm_help">
<div id="idm_help" class="form-text">AmusementIC cards only! 16 hexidecimal digits, sometimes called the serial number, gotten by scanning the card with a reader.</div>
<br>
<button type="submit" class="btn btn-primary">Add</button>
</form>
<br>
</div>
{% if success is defined and success == 4 %}
<div style="background-color: #00AA00; padding: 20px; margin-bottom: 10px; width: 15%;">
Update successful
</div>
{% endif %}
<ul style="font-size: 20px;">
{% for c in cards %}
<li>{{ c.access_code }} ({{ c.type}}): {{ c.status }}&nbsp;<button onclick="prep_edit_form('{{ c.access_code }}', '{{ c.chip_id}}', '{{ c.idm }}', '{{ c.type }}', '{{ c.memo }}')" data-bs-toggle="modal" data-bs-target="#card_edit" class="btn btn-secondary" id="btn_edit_card_{{ c.access_code }}">Edit</button>&nbsp;{% if c.status == 'Active'%}<button class="btn-warning btn">Lock</button>{% elif c.status == 'Locked' %}<button class="btn-warning btn">Unlock</button>{% endif %}&nbsp;<button class="btn-danger btn">Delete</button></li>
<li>{{ c.access_code }} ({{ c.type if c.memo is none or not c.memo else c.memo }}): {{ c.status }}&nbsp;<button onclick="prep_edit_form('{{ c.access_code }}', '{{ c.chip_id}}', '{{ c.idm }}', '{{ c.type }}', '{{ c.memo }}', '{{ c.id }}')" data-bs-toggle="modal" data-bs-target="#card_edit" class="btn btn-secondary" id="btn_edit_card_{{ c.access_code }}">View</button>&nbsp;{% if c.status == 'Active'%}<button class="btn-warning btn">Lock</button>{% elif c.status == 'Locked' %}<button class="btn-warning btn">Unlock</button>{% endif %}&nbsp;<button class="btn-danger btn" {{ "disabled" if cards|length == 1 else ""}}>Delete</button></li>
{% endfor %}
</ul>
@ -142,31 +180,31 @@ Update successful
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-header">
<h1 class="modal-title fs-5" id="card_edit_label">Edit Card</h1>
<h1 class="modal-title fs-5" id="card_edit_label">Card Information</h1>
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
</div>
<div class="modal-body">
<form action="/user/edit.card" method="post" id="frm_edit_card">
<input type="hidden" readonly name="card_edit_frm_card_id" id="card_edit_frm_card_id">
<label class="form-label" for="card_edit_frm_access_code">Access Code:</label>
<input class="form-control" readonly name="add_access_code" id="card_edit_frm_access_code" maxlength="20" type="text" required aria-describedby="ac_help">
<input class="form-control-plaintext" readonly name="card_edit_frm_access_code" id="card_edit_frm_access_code" maxlength="20" type="text" required aria-describedby="ac_help">
<div id="ac_help" class="form-text">20 digit code on the back of the card. If this is incorrect, contact a sysadmin.</div>
<label class="form-label" for="card_edit_frm_idm" id="card_edit_frm_idm_lbl">FeliCa IDm:</label>
<input class="form-control-plaintext" aria-describedby="idm_help" name="add_felica_idm" id="card_edit_frm_idm" maxlength="16" type="text" readonly>
<div id="idm_help" class="form-text">8 bytes that uniquly idenfites a FeliCa card. Obtained by reading the card with an NFC reader.</div>
<label class="form-label" for="card_edit_frm_chip_id" id="card_edit_frm_chip_id_lbl">Mifare UID:</label>
<input class="form-control-plaintext" aria-describedby="chip_id_help" name="add_mifare_chip_id" id="card_edit_frm_chip_id" maxlength="8" type="text" readonly>
<div id="chip_id_help" class="form-text">4 byte integer that uniquly identifies a Mifare card. Obtained by reading the card with an NFC reader.</div>
<label class="form-label" for="card_edit_frm_memo" id="card_edit_frm_memo_lbl">Memo:</label>
<input class="form-control" aria-describedby="memo_help" name="add_memo" id="card_edit_frm_memo" maxlength="16" type="text">
<div id="memo_help" class="form-text">Must be 16 characters or less.</div>
<label class="form-label" for="card_edit_frm_idm" id="card_edit_frm_idm_lbl">FeliCa IDm:</label>
<input class="form-control" aria-describedby="idm_help" name="add_felica_idm" id="card_edit_frm_idm" maxlength="16" type="text">
<div id="idm_help" class="form-text">8 bytes that uniquly idenfites a FeliCa card. Obtained by reading the card with an NFC reader.</div>
<label class="form-label" for="card_edit_frm_chip_id" id="card_edit_frm_chip_id_lbl">Mifare UID:</label>
<input class="form-control" aria-describedby="chip_id_help" name="add_mifare_chip_id" id="card_edit_frm_chip_id" maxlength="8" type="text">
<div id="chip_id_help" class="form-text">4 byte integer that uniquly identifies a Mifare card. Obtained by reading the card with an NFC reader.</div>
</form>
</div>
<div class="modal-footer">
<button type="submit" class="btn btn-primary" form="frm_edit_card">Edit</button>
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Close</button>
<button type="submit" class="btn btn-primary">Update Memo</button>
</form>
</div>
</div>
</div>

View File

@ -23,6 +23,10 @@ You must be logged in to preform this action
Invalid serial number
{% elif error == 11 %}
Access Denied
{% elif error == 12 %}
Card already registered
{% elif error == 13 %}
AmusementIC Access Codes beginning with 5 must have IDm
{% else %}
An unknown error occoured
{% endif %}

View File

@ -1,18 +1,47 @@
from typing import Dict, Any, Optional
from types import ModuleType
from starlette.requests import Request
import logging
import importlib
from os import walk
import jwt
import logging
from base64 import b64decode
from datetime import datetime, timezone
from os import walk
from types import ModuleType
from typing import Any, Dict, Optional
import jwt
from starlette.requests import Request
from .config import CoreConfig
class _MissingSentinel:
__slots__: tuple[str, ...] = ()
def __eq__(self, other) -> bool:
return False
def __bool__(self) -> bool:
return False
def __hash__(self) -> int:
return 0
def __repr__(self):
return "..."
MISSING: Any = _MissingSentinel()
"""This is different from `None` in that its type is `Any`, and so it can be used
as a placeholder for values that are *definitely* going to be initialized,
so they don't have to be typed as `T | None`, which makes type checkers
angry when an attribute is accessed.
This can also be used for when `None` has actual meaning as a value, and so a
separate value is needed to mean "unset"."""
class Utils:
real_title_port = None
real_title_port_ssl = None
@classmethod
def get_all_titles(cls) -> Dict[str, ModuleType]:
ret: Dict[str, Any] = {}
@ -36,27 +65,56 @@ class Utils:
def get_ip_addr(cls, req: Request) -> str:
ip = req.headers.get("x-forwarded-for", req.client.host)
return ip.split(", ")[0]
@classmethod
def get_title_port(cls, cfg: CoreConfig):
if cls.real_title_port is not None: return cls.real_title_port
if cls.real_title_port is not None:
return cls.real_title_port
cls.real_title_port = (
cfg.server.proxy_port
if cfg.server.is_using_proxy and cfg.server.proxy_port
else cfg.server.port
)
cls.real_title_port = cfg.server.proxy_port if cfg.server.is_using_proxy and cfg.server.proxy_port else cfg.server.port
return cls.real_title_port
@classmethod
def get_title_port_ssl(cls, cfg: CoreConfig):
if cls.real_title_port_ssl is not None: return cls.real_title_port_ssl
if cls.real_title_port_ssl is not None:
return cls.real_title_port_ssl
cls.real_title_port_ssl = (
cfg.server.proxy_port_ssl
if cfg.server.is_using_proxy and cfg.server.proxy_port_ssl
else 443
)
cls.real_title_port_ssl = cfg.server.proxy_port_ssl if cfg.server.is_using_proxy and cfg.server.proxy_port_ssl else 443
return cls.real_title_port_ssl
def create_sega_auth_key(aime_id: int, game: str, place_id: int, keychip_id: str, b64_secret: str, exp_seconds: int = 86400, err_logger: str = 'aimedb') -> Optional[str]:
def create_sega_auth_key(
aime_id: int,
game: str,
place_id: int,
keychip_id: str,
b64_secret: str,
exp_seconds: int = 86400,
err_logger: str = "aimedb",
) -> Optional[str]:
logger = logging.getLogger(err_logger)
try:
return jwt.encode({ "aime_id": aime_id, "game": game, "place_id": place_id, "keychip_id": keychip_id, "exp": int(datetime.now(tz=timezone.utc).timestamp()) + exp_seconds }, b64decode(b64_secret), algorithm="HS256")
return jwt.encode(
{
"aime_id": aime_id,
"game": game,
"place_id": place_id,
"keychip_id": keychip_id,
"exp": int(datetime.now(tz=timezone.utc).timestamp()) + exp_seconds,
},
b64decode(b64_secret),
algorithm="HS256",
)
except jwt.InvalidKeyError:
logger.error("Failed to encode Sega Auth Key because the secret is invalid!")
return None
@ -64,10 +122,19 @@ def create_sega_auth_key(aime_id: int, game: str, place_id: int, keychip_id: str
logger.error(f"Unknown exception occoured when encoding Sega Auth Key! {e}")
return None
def decode_sega_auth_key(token: str, b64_secret: str, err_logger: str = 'aimedb') -> Optional[Dict]:
def decode_sega_auth_key(
token: str, b64_secret: str, err_logger: str = "aimedb"
) -> Optional[Dict]:
logger = logging.getLogger(err_logger)
try:
return jwt.decode(token, "secret", b64decode(b64_secret), algorithms=["HS256"], options={"verify_signature": True})
return jwt.decode(
token,
"secret",
b64decode(b64_secret),
algorithms=["HS256"],
options={"verify_signature": True},
)
except jwt.ExpiredSignatureError:
logger.error("Sega Auth Key failed to validate due to an expired signature!")
return None
@ -83,4 +150,3 @@ def decode_sega_auth_key(token: str, b64_secret: str, err_logger: str = 'aimedb'
except Exception as e:
logger.error(f"Unknown exception occoured when decoding Sega Auth Key! {e}")
return None

View File

@ -1,14 +1,15 @@
#!/usr/bin/env python3
import argparse
import logging
from os import mkdir, path, access, W_OK
import yaml
import asyncio
import logging
from os import W_OK, access, environ, mkdir, path
import yaml
from core.data import Data
from core.config import CoreConfig
from core.data import Data
if __name__ == "__main__":
async def main():
parser = argparse.ArgumentParser(description="Database utilities")
parser.add_argument(
"--config", "-c", type=str, help="Config folder to use", default="config"
@ -25,10 +26,11 @@ if __name__ == "__main__":
parser.add_argument("action", type=str, help="create, upgrade, downgrade, create-owner, migrate, create-revision, create-autorevision")
args = parser.parse_args()
environ["ARTEMIS_CFG_DIR"] = args.config
cfg = CoreConfig()
if path.exists(f"{args.config}/core.yaml"):
cfg_dict = yaml.safe_load(open(f"{args.config}/core.yaml"))
cfg_dict.get("database", {})["loglevel"] = "info"
cfg.update(cfg_dict)
if not path.exists(cfg.server.log_dir):
@ -43,7 +45,7 @@ if __name__ == "__main__":
data = Data(cfg)
if args.action == "create":
data.create_database()
await data.create_database()
elif args.action == "upgrade":
data.schema_upgrade(args.version)
@ -55,20 +57,20 @@ if __name__ == "__main__":
data.schema_downgrade(args.version)
elif args.action == "create-owner":
loop = asyncio.get_event_loop()
loop.run_until_complete(data.create_owner(args.email, args.access_code))
await data.create_owner(args.email, args.access_code)
elif args.action == "migrate":
loop = asyncio.get_event_loop()
loop.run_until_complete(data.migrate())
await data.migrate()
elif args.action == "create-revision":
loop = asyncio.get_event_loop()
loop.run_until_complete(data.create_revision(args.message))
await data.create_revision(args.message)
elif args.action == "create-autorevision":
loop = asyncio.get_event_loop()
loop.run_until_complete(data.create_revision_auto(args.message))
await data.create_revision_auto(args.message)
else:
logging.getLogger("database").info(f"Unknown action {args.action}")
if __name__ == "__main__":
asyncio.run(main())

View File

@ -1,129 +0,0 @@
# ARTEMiS - Ubuntu 20.04 LTS Guide
This step-by-step guide assumes that you are using a fresh install of Ubuntu 20.04 LTS, some of the steps can be skipped if you already have an installation with MySQL 5.7 or even some of the modules already present on your environment
# Setup
## Install memcached module
1. sudo apt-get install memcached
2. Under the file /etc/memcached.conf, please make sure the following parameters are set:
```
# Start with a cap of 64 megs of memory. It's reasonable, and the daemon default
# Note that the daemon will grow to this size, but does not start out holding this much
# memory
-I 128m
-m 1024
```
** This is mandatory to avoid memcached overload caused by Crossbeats or by massive profiles
3. Restart memcached using: sudo systemctl restart memcached
## Install MySQL 5.7
```
sudo apt update
sudo apt install wget -y
wget https://dev.mysql.com/get/mysql-apt-config_0.8.12-1_all.deb
sudo dpkg -i mysql-apt-config_0.8.12-1_all.deb
```
1. During the first prompt, select Ubuntu Bionic
2. Select the default option
3. Select MySQL 5.7
4. Select the last option
```
sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 467B942D3A79BD29
sudo apt-get update
sudo apt-cache policy mysql-server
sudo apt install -f mysql-client=5.7* mysql-community-server=5.7* mysql-server=5.7*
```
## Default Configuration for MySQL Server
1. sudo mysql_secure_installation
> Make sure to follow the steps that will be prompted such as changing the mysql root password and such
2. Test your MySQL Server login by doing the following command :
> mysql -u root -p
## Create the default ARTEMiS database and user
1. mysql -u root -p
2. Please change the password indicated in the next line for a custom secure one and continue with the next commands
```
CREATE USER 'aime'@'localhost' IDENTIFIED BY 'MyStrongPass.';
CREATE DATABASE aime;
GRANT Alter,Create,Delete,Drop,Index,Insert,References,Select,Update ON aime.* TO 'aime'@'localhost';
FLUSH PRIVILEGES;
exit;
```
3. sudo systemctl restart mysql
## Install Python modules
```
sudo apt-get install python3-dev default-libmysqlclient-dev build-essential mysql-client libmysqlclient-dev libmemcached-dev
sudo apt install libpython3.8-dev
sudo apt-get install python3-software-properties
sudo apt install python3-pip
sudo pip3 install --upgrade pip testresources
sudo pip3 install --upgrade pip setuptools
sudo apt-get install python3-tk
```
7. Change your work path to the ARTEMiS root folder using 'cd' and install the requirements:
> sudo python3 -m pip install -r requirements.txt
## Copy/Rename the folder example_config to config
## Adjust /config/core.yaml
1. Make sure to change the server listen_address to be set to your local machine IP (ex.: 192.168.1.xxx)
2. Adjust the proper MySQL information you created earlier
3. Add the AimeDB key at the bottom of the file
## Create the database tables for ARTEMiS
1. sudo python3 dbutils.py create
2. If you get "No module named Crypto", run the following command:
```
sudo pip uninstall crypto
sudo pip uninstall pycrypto
sudo pip install pycrypto
```
## Firewall Adjustements
```
sudo ufw allow 80
sudo ufw allow 443
sudo ufw allow 8443
sudo ufw allow 22345
sudo ufw allow 8090
sudo ufw allow 8444
sudo ufw allow 8080
```
## Running the ARTEMiS instance
1. sudo python3 index.py
# Troubleshooting
## Game does not connect to ARTEMiS Allnet server
1. Double-check your core.yaml, the listen_address is most likely either not binded to the proper IP or the port is not opened
## Game does not connect to Title Server
1. Verify that your core.yaml is setup properly for both the server listen_address and title hostname
2. Boot your game and verify that an AllNet response does show and if it does, attempt to open the URI that is shown under a browser such as Edge, Chrome & Firefox.
3. If a page is shown, the server is working properly and if it doesn't, double check your port forwarding and also that you have entered the proper local IP under the Title hostname in core.yaml.
## Unhandled command under AimeDB
1. Double check your AimeDB key under core.yaml, it is incorrect.
## Memcache failed, error 3
1. Make sure memcached is properly installed and running. You can check the status of the service using the following command:
> sudo systemctl status memcached
2. If it is failing, double check the /etc/memcached.conf file, it may have duplicated arguments like the -I and -m
3. If it is still not working afterward, you can proceed with a workaround by manually editing the /core/data/cache.py file.
```
# Make memcache optional
try:
has_mc = False
except ModuleNotFoundError:
has_mc = False
```

View File

@ -59,7 +59,7 @@ GRANT Alter,Create,Delete,Drop,Index,Insert,References,Select,Update ON aime.* T
- Put the password you created for the aime user into the `database` section.
- Put in the aimedb key (YOU DO NOT GENERATE THIS KEY, FIND IT SOMEWHERE).
- Set your hostname to be whatever hostname or IP address games can reach your server at (many games reject localhost and 127.0.0.1).
- Optional: generate base64-encoded secrets for aimedb and frontend.
- Optional: generate base64-encoded secrets for aimedb and frontend using something like `openssl rand -base64 64`. It is advised to make all secrets different.
- See [config.md](docs/config.md) for a full list of options.
- edit `idz.yaml`
- If you don't plan on anyone using your server to play Initial D Zero, it is best to disable it to cut down on console spam on boot.

View File

@ -1,102 +0,0 @@
# ARTEMiS - Windows 10/11 Guide
This step-by-step guide assumes that you are using a fresh install of Windows 10/11 without MySQL installed, some of the steps can be skipped if you already have an installation with MySQL 8.0 or even some of the modules already present on your environment
# Setup
## Install Python Python 3.9 (recommended) or 3.10
1. Download Python 3.9 : [Link](https://www.python.org/ftp/python/3.9.13/python-3.9.13-amd64.exe)
2. Install python-3.9.13-amd64.exe
1. Select Customize installation
2. Make sure that pip, tcl/tk, and the for all users are checked and hit Next
3. Make sure that you enable "Create shortcuts for installed applications" and "Add Python to environment variables" and hit Install
## Install MySQL 8.0
1. Download MySQL 8.0 Server : [Link](https://dev.mysql.com/get/Downloads/MySQLInstaller/mysql-installer-community-8.0.34.0.msi)
2. Install mysql-installer-web-community-8.0.34.0.msi
1. Click on "Add ..." on the side
2. Click on the "+" next to MySQL Servers
3. Make sure MySQL Server 8.0.34 - X64 is under the products to be installed.
4. Hit Next and Next once installed
5. Select the configuration type "Development Computer"
6. Hit Next
7. Select "Use Legacy Authentication Method (Retain MySQL 5.x compatibility)" and hit Next
8. Enter a root password and then hit Next >
9. Leave everything under Windows Service as default and hit Next >
10. Click on Execute and for it to finish and hit Next> and then Finish
3. Open MySQL 8.0 Command Line Client and login as your root user
4. Change `<Enter Password Here>` to a new password for the user aime, type those commands to create your user and the database
```sql
CREATE USER 'aime'@'localhost' IDENTIFIED BY '<Enter Password Here>';
CREATE DATABASE aime;
GRANT Alter,Create,Delete,Drop,Index,Insert,References,Select,Update ON aime.* TO 'aime'@'localhost';
FLUSH PRIVILEGES;
exit;
```
## Install Python modules
1. Change your work path to the artemis-master folder using 'cd' and install the requirements:
```shell
pip install -r requirements.txt
```
## Copy/Rename the folder `example_config` to `config`
## Adjust `config/core.yaml`
1. Make sure to change the server `hostname` to be set to your local machine IP (ex.: 192.168.xxx.xxx)
- In case you want to run this only locally, set the following values:
```yaml
server:
listen_address: 0.0.0.0
title:
hostname: 192.168.xxx.xxx
```
1. Adjust the proper MySQL information you created earlier
```yaml
database:
host: "localhost"
username: "aime"
password: "<Enter Password Here>"
name: "aime"
```
3. Add the AimeDB key at the bottom of the file
4. If the webui is needed, change the flag from False to True
## Create the database tables for ARTEMiS
```shell
python dbutils.py create
```
## Firewall Adjustements
Make sure the following ports are open both on your router and local Windows firewall in case you want to use this for public use (NOT recommended):
> Port 80 (TCP), 443 (TCP), 8443 (TCP), 22345 (TCP), 8080 (TCP), 8090 (TCP) **webui, 8444 (TCP) **mucha
## Running the ARTEMiS instance
```shell
python index.py
```
# Troubleshooting
## Game does not connect to ARTEMiS Allnet server
1. Double-check your core.yaml, the listen_address is most likely either not binded to the proper IP or the port is not opened
## Game does not connect to Title Server
1. Verify that your core.yaml is setup properly for both the server listen_address and title hostname
2. Boot your game and verify that an AllNet response does show and if it does, attempt to open the URI that is shown under a browser such as Edge, Chrome & Firefox.
3. If a page is shown, the server is working properly and if it doesn't, double check your port forwarding and also that you have entered the proper local IP under the Title hostname in core.yaml.
## Unhandled command under AimeDB
1. Double check your AimeDB key under core.yaml, it is incorrect.
## AttributeError: module 'collections' has no attribute 'Hashable'
1. This means the pyYAML module is obsolete, simply rerun pip with the -U (force update) flag, as shown below.
- Change your work path to the artemis-master (or artemis-develop) folder using 'cd' and run the following commands:
```shell
pip install -r requirements.txt -U
```

View File

@ -17,8 +17,8 @@
- `strict_ip_checking`: Rejects clients if there is no IP in the `arcade` table for the respective arcade. Default `False`
## Title
- `loglevel`: Logging level for the title server. Default `info`
- `reboot_start_time`: 24 hour JST time that clients will see as the start of maintenance period, ex `04:00`. Leave blank for no maintenance time. Default: `""`
- `reboot_end_time`: 24 hour JST time that clients will see as the end of maintenance period, ex `05:00`. Leave blank for no maintenance time. Default: `""`
- `reboot_start_time`: 24 hour JST time that clients will see as the start of maintenance period, ex `04:00`. A few games or early version will report errors if it is empty, ex maimai DX 1.00
- `reboot_end_time`: 24 hour JST time that clients will see as the end of maintenance period, ex `07:00`. this must be set to 7:00 am for some game, please do not change it.
## Database
- `host`: Host of the database. Default `localhost`
- `username`: Username of the account the server should connect to the database with. Default `aime`
@ -26,6 +26,7 @@
- `name`: Name of the database the server should expect. Default `aime`
- `port`: Port the database server is listening on. Default `3306`
- `protocol`: Protocol used in the connection string, e.i `mysql` would result in `mysql://...`. Default `mysql`
- `ssl_enabled`: Enforce SSL to be used in the connection string. Default `False`
- `sha2_password`: Whether or not the password in the connection string should be hashed via SHA2. Default `False`
- `loglevel`: Logging level for the database. Default `info`
- `memcached_host`: Host of the memcached server. Default `localhost`

View File

@ -26,11 +26,14 @@ python dbutils.py migrate
- [CHUNITHM](#chunithm)
- [crossbeats REV.](#crossbeats-rev)
- [maimai DX](#maimai-dx)
- [Project Diva](#hatsune-miku-project-diva)
- [O.N.G.E.K.I.](#o-n-g-e-k-i)
- [Card Maker](#card-maker)
- [WACCA](#wacca)
- [Sword Art Online Arcade](#sao)
- [Initial D Zero](#initial-d-zero)
- [Initial D THE ARCADE](#initial-d-the-arcade)
- [Pokken Tournament](#pokken)
# Supported Games
@ -74,27 +77,36 @@ In order to use the importer locate your game installation folder and execute:
python read.py --game SDBT --version <version ID> --binfolder /path/to/game/folder --optfolder /path/to/game/option/folder
```
The importer for Chunithm will import: Events, Music, Charge Items and Avatar Accesories.
The importer for Chunithm will import: Events, Music, Charge Items, Avatar Accesories, Nameplates, Characters, Trophies, Map Icons, and System Voices.
### Config
Config file is located in `config/chuni.yaml`.
| Option | Info |
|------------------|----------------------------------------------------------------------------------------------------------------|
| `news_msg` | If this is set, the news at the top of the main screen will be displayed (up to Chunithm Paradise Lost) |
| `name` | If this is set, all players that are not on a team will use this one by default. |
| `use_login_bonus`| This is used to enable the login bonuses |
| `crypto` | This option is used to enable the TLS Encryption |
| Option | Info |
|-----------------------|-------------------------------------------------------------------------------------------------------------------------------------------|
| `news_msg` | If this is set, the news at the top of the main screen will be displayed (up to Chunithm Paradise Lost) |
| `name` | If this is set, all players that are not on a team will use this one by default. |
| `use_login_bonus` | This is used to enable the login bonuses |
| `stock_tickets` | If this is set, specifies tickets to auto-stock at login. Format is a comma-delimited list of IDs. Defaults to None |
| `stock_count` | Ignored if stock_tickets is not specified. Number to stock of each ticket. Defaults to 99 |
| `forced_item_unlocks` | Frontend UI customization overrides that allow all items of given types to be used (instead of just those unlocked/purchased by the user) |
| `crypto` | This option is used to enable the TLS Encryption |
**If you would like to use network encryption, the following will be required underneath but key, iv and hash are required:**
If you would like to use network encryption, add the keys to the `keys` section under `crypto`, where the key
is the version ID for Japanese (SDHD) versions and `"{versionID}_int"` for Export (SDGS) versions, and the value
is an array containing `[key, iv, salt, iter_count]` in order.
`iter_count` is optional for all Japanese (SDHD) versions but may be required for some Export (SDGS) versions.
You will receive an error in the logs if it needs to be specified.
```yaml
crypto:
encrypted_only: False
keys:
13: ["0000000000000000000000000000000000000000000000000000000000000000", "00000000000000000000000000000000", "0000000000000000"]
"13_int": ["0000000000000000000000000000000000000000000000000000000000000000", "00000000000000000000000000000000", "0000000000000000", 42]
```
### Database upgrade
@ -142,12 +154,15 @@ INSERT INTO aime.chuni_profile_team (teamName) VALUES (<teamName>);
Team names can be regular ASCII, and they will be displayed ingame.
### Favorite songs
You can set the songs that will be in a user's Favorite Songs category using the following SQL entries:
Favorites can be set through the Frontend Web UI for songs previously played. Alternatively, you can set the songs that will be in a user's Favorite Songs category using the following SQL entries:
```sql
INSERT INTO aime.chuni_item_favorite (user, version, favId, favKind) VALUES (<user>, <version>, <songId>, 1);
```
The songId is based on the actual ID within your version of Chunithm.
### Profile Customization
The Frontend Web UI supports configuration of the userbox, avatar (NEW!! and newer), map icon (AMAZON and newer), and system voice (AMAZON and newer).
## crossbeats REV.
@ -176,6 +191,14 @@ Config file is located in `config/cxb.yaml`.
## maimai DX
### Presents
Presents are items given to the user when they login, with a little animation (for example, the KOP song was given to the finalists as a present). To add a present, you must insert it into the `mai2_item_present` table. In that table, a NULL version means any version, a NULL user means any user, a NULL start date means always open, and a NULL end date means it never expires. Below is a list of presents one might wish to add:
| Game Version | Item ID | Item Kind | Item Description | Present Description |
|--------------|---------|-----------|-------------------------------------------------|------------------------------------------------|
| BUDDiES (21) | 409505 | Icon (3) | 旅行スタンプ(月面基地) (Travel Stamp - Moon Base) | Officially obtained on the webui with a serial |
| | | | | number, for project raputa |
### Versions
| Game Code | Version ID | Version Name |
@ -202,6 +225,7 @@ Config file is located in `config/cxb.yaml`.
| SDEZ | 19 | maimai DX FESTiVAL |
| SDEZ | 20 | maimai DX FESTiVAL PLUS |
| SDEZ | 21 | maimai DX BUDDiES |
| SDEZ | 22 | maimai DX BUDDiES PLUS |
### Importer
@ -255,10 +279,14 @@ the Shop, Modules and Customizations.
Config file is located in `config/diva.yaml`.
| Option | Info |
| -------------------- | ----------------------------------------------------------------------------------------------- |
| `unlock_all_modules` | Unlocks all modules (costumes) by default, if set to `False` all modules need to be purchased |
| `unlock_all_items` | Unlocks all items (customizations) by default, if set to `False` all items need to be purchased |
| Option | Info |
| -------------------- | ------------------------------------------------------------------------------------------------ |
| `festa_enable` | Enable or disable the ingame festa |
| `festa_add_VP` | Set the extra VP you get when clearing a song, if festa is not enabled no extra VP will be given |
| `festa_multiply_VP` | Multiplier for festa add VP |
| `festa_end_time` | Set the date time for when festa will end and not show up in game anymore |
| `unlock_all_modules` | Unlocks all modules (costumes) by default, if set to `False` all modules need to be purchased |
| `unlock_all_items` | Unlocks all items (customizations) by default, if set to `False` all items need to be purchased |
### Custom PV Lists (databanks)
@ -272,6 +300,23 @@ Always make sure your database (tables) are up-to-date:
python dbutils.py upgrade
```
### Using NGINX
Diva's netcode does not send a `Host` header with it's network requests. This renders it incompatable with NGINX as configured in the example config, because nginx relies on the header to determine how to proxy the request. If you'd still like to use NGINX with diva, please see the sample config below.
```conf
server {
listen 80 default_server;
server_name _;
location / {
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_pass_request_headers on;
proxy_pass http://127.0.0.1:8080/;
}
}
```
## O.N.G.E.K.I.
### SDDT
@ -630,21 +675,32 @@ python dbutils.py upgrade
```
### Notes
- Defrag Match will crash at loading
- Co-Op Online is not supported
- Shop is displayed but cannot purchase heroes or items
- Defrag Match and online coop requires a cloud instance of Photon and a working application ID
- Player title is currently static and cannot be changed in-game
- QR Card Scanning currently only load a static hero
- Ex-quests progression not supported yet
- QR Card Scanning of existing cards requires them to be registered on the webui
- Daily Missions not implemented
- EX TOWER 1,2 & 3 are not yet supported
- Daily Yui coin not yet fixed
- Terminal functionality is almost entirely untested
### Credits for SAO support:
- Midorica - Network Support
- Dniel97 - Helping with network base
- tungnotpunk - Source
- Hay1tsme - fixing many issues with the original implemetation
## Initial D Zero
### SDDF
| Version ID | Version Name |
| ---------- | -------------------- |
| 0 | Initial D Zero v1.10 |
| 1 | Initial D Zero v1.30 |
| 2 | Initial D Zero v2.10 |
| 3 | Initial D Zero v2.30 |
### Info
TODO, probably just leave disabled unless you're doing development things for it.
## Initial D THE ARCADE
@ -776,3 +832,82 @@ python dbutils.py upgrade
A huge thanks to all people who helped shaping this project to what it is now and don't want to be mentioned here.
## Pokken
### SDAK
| Version ID | Version Name |
| ---------- | ------------ |
| 0 | Pokken |
### Config
Config file is `pokken.yaml`
#### server
| Option | Info | Default |
| ------ | ---- | ------- |
| `hostname` | Hostname override for allnet to tell the game where to connect. Useful for local setups that need to use a different hostname for pokken's proxy. Otherwise, it should match `server`->`hostname` in `core.yaml`. | `localhost` |
| `enabled` | `True` if the pokken service should be enabled. `False` otherwise. | `True` |
| `loglevel` | String indicating how verbose pokken logs should be. Acceptable values are `debug`, `info`, `warn`, and `error`. | `info` |
| `auto_register` | For games that don't use aimedb, this controls weather connecting cards that aren't registered should automatically be registered when making a profile. Set to `False` to require cards be already registered before being usable with Pokken. | `True` |
| `enable_matching` | If `True`, allow non-local matching. This doesn't currently work because BIWA, the matching protocol the game uses, is not understood, so this should be set to `False`. | `False` |
| `stun_server_host` | Hostname of the STUN server the game will use for matching. | `stunserver.stunprotocol.org` (might not work anymore? recomend changing) |
| `stun_server_port` | Port for the external STUN server. Will probably be moved to the `ports` section in the future. | `3478` |
#### ports
| Option | Info | Default |
| ------ | ---- | ------- |
| `game` | Override for the title server port sent by allnet. Useful for local setups utalizing NGINX. | `9000` |
| `admission` | Port for the admission server used in global matching. May be obsolited later. | `9001` |
### Connecting to Artemis
Pokken is a bit tricky to get working due to it having a hard requirement of the connection being HTTPS. This is simplified somewhat by Pokken simply not validating the certificate in any way, shape or form (it can be self-signed, expired, for a different domain, etc.) but it does have to be there. The work-around is to spin up a local NGINX (or other proxy) instance and point traffic back to artemis. See below for a sample nginx config:
`nginx.conf`
```conf
# This example assumes your artemis instance is configured to listed on port 8080, and your certs exists at /path/to/cert and are called title.crt and title.key.
server {
listen 443 ssl;
server_name your.hostname.here;
ssl_certificate /path/to/cert/title.crt;
ssl_certificate_key /path/to/cert/title.key;
ssl_session_timeout 1d;
ssl_session_cache shared:MozSSL:10m;
ssl_session_tickets off;
ssl_protocols TLSv1 TLSv1.1 TLSv1.2 TLSv1.3;
ssl_ciphers "ALL:@SECLEVEL=0";
ssl_prefer_server_ciphers off;
location / {
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_pass_request_headers on;
proxy_pass http://127.0.0.1:8080/;
}
}
```
`pokken.yaml`
```yaml
server:
hostname: "your.hostname.here"
enable: True
loglevel: "info"
auto_register: True
enable_matching: False
stun_server_host: "stunserver.stunprotocol.org"
stun_server_port: 3478
ports:
game: 443
admission: 9001
```
### Info
The arcade release is missing a few fighters and supports compared to the switch version. It may be possible to mod these in in the future, but not much headway has been made on this as far as I know. Mercifully, the game uses the pokedex number (illustration_book_no) wherever possible when referingto both fighters and supports. Customization is entirely done on the webui. Artemis currently only supports changing your name, gender, and supporrt teams, but more is planned for the future.
### Credits
Special thanks to Pocky for pointing me in the right direction in terms of getting this game to function at all, and Lightning and other pokken cab owners for doing testing and reporting bugs/issues.

34
docs/migrating.md Normal file
View File

@ -0,0 +1,34 @@
# Migrating from an older build of ARTEMiS
If you haven't updated artemis in a while, you may find that configuration options have moved, been renamed, or no longer exist. This document exists to help migrate from legacy versions of artemis to newer builds.
## Dependancies
Make sure your dependiences are up to date with what's required to run artemis. A simple `pip install -r requirements.txt` will get you up to date.
## Database
Database migration is required if you are using a version of artemis that still uses the old custom-rolled database versioning system (raw SQL scripts). Artemis now uses alembic to manage database versioning, and you will need to move to this new system.
**BEFORE DOING ANY DATABASE WORK, ALWAYS MAKE SURE YOU HAVE FUNCTIONAL, UP-TO-DATE BACKUPS!!**
For almost all situations, simply running `python dbutils.py migrate` will do the job. This will upgrade you to the latest version of the old system, move you over to alembic, then upgrade you to the newest alembic version. If you encounter any errors or data loss, you should report this as a bug to our issue tracker.
## Configuration
Configuration management is the sewage cleaning of the sysadmin world. It sucks and nobody likes to do it, but it needs to be done or everyone ends up in deep shit. This section will walk through what configuration options have changed, and how to set them properly.
### core.yaml
`title`->`hostname` is now `server`->`hostname`. This hostname is what gets sent to clients in response to auth requests, so it should be both accessable from whereever the client is, and point properly to the title server.
With the move to starlette and uvicorn, different services now run as seperate USGI applications. `billing`->`standalone` and `allnet`->`standalone` are flags that determine weather the service runs as a stand-alone service, on it's own seperate port, or as a part of the whole application. For example, setting `billing`->`standalone` to `True` will cause a seperate instance of the billing server to spin up listening on 8443 with SSL using the certs listed in the config file. Setting it to `False` will just allow the main server to also serve `/request/` and assumes that something is standing in front of it proxying 8443 SSL to whatever `server`->`port` is set to.
Beforehand, if `server`->`is_develop` was `False`, the server assumed that there was a proxy standing in front of it, proxying requests to proper channels. This was, in hindsight, a very dumb assumption. Now, `server`->`is_using_proxy` is what flags the server as having nginx or another proxy in front of it. The effects of setting this to true are somewhat game-dependant, but generally artemis will use the port listed in `server`->`proxy_port` (and `server`->`proxy_port_ssl` for SSL connections, as defined by the games) instead of `server`->`port`. If set to 0, `server`->`proxy_port` will default to what `server`->`port` (and `server`->`proxy_port_ssl` will default to 443) make sure to set them accordingly. Note that some title servers have their own needs and specify their own specific ports. Refer to [game_specific_info.md](docs/game_specific_info.md) for more infomation. (For example, pokken requires SSL using an older, weaker certificate, and always requires the port to be sent even if it's port 443)
`index.py`'s args have changed. You can now override what port the title server listens on with `-p` and tell the server to use ssl with `-s`.
Rather then having a `standalone` config variable, the frontend is a seperate wsgi app entirely. Having `enable` be `True` will launch it on the port specified in the config file. Otherwise, the fontend will not run.
`title`->`reboot_start_time`/`reboot_end_time` allow you to specify when the games should be told network maintanence is happening. It's exact implementation depends on the game. Do note that many games will beave unexpectly if `reboot_end_time` is not `07:00`.
If you wish to make use of aimedb's SegaAuthId system to better protect the few title servers that actually use it, set `aimedb`->`id_secret` to base64-encoded random bytes (32 is a good length) using something like `openssl rand -base64 64`. If you intend to use the frontend, the same thing must be done for `frontend`->`secret` or you won't be able to log in.
`mucha`'s only option is now just log level.
`aimedb` now has it's own `listen_address` field, in case you want to proxy everything but aimedb, so it can still listen on `0.0.0.0` instead of `127.0.0.1`.

View File

@ -8,7 +8,22 @@ team:
mods:
use_login_bonus: True
# stock_tickets allows specified ticket IDs to be auto-stocked at login. Format is a comma-delimited string of ticket IDs
# note: quanity is not refreshed on "continue" after set - only on subsequent login
stock_tickets:
stock_count: 99
# Allow use of all available customization items in frontend web ui
# note: This effectively makes every available item appear to be in the user's inventory. It does _not_ override the "disableFlag" setting on individual items
# warning: This can result in pushing a lot of data, especially the userbox items. Recommended for local network use only.
forced_item_unlocks:
map_icons: False
system_voices: False
avatar_accessories: False
nameplates: False
trophies: False
character_icons: False
version:
11:
rom: 2.00.00

View File

@ -18,7 +18,7 @@ server:
title:
loglevel: "info"
reboot_start_time: "04:00"
reboot_end_time: "05:00"
reboot_end_time: "07:00" # this must be set to 7:00 am for some game, please do not change it
database:
host: "localhost"
@ -27,6 +27,7 @@ database:
name: "aime"
port: 3306
protocol: "mysql"
ssl_enabled: False
sha2_password: False
loglevel: "info"
enable_memcached: True

View File

@ -1,4 +1,4 @@
server:
enable: True
loglevel: "info"
use:https: True
use_https: True

View File

@ -1,6 +1,10 @@
server:
enable: True
loglevel: "info"
festa_enable: True
festa_add_VP: "20,5"
festa_multiply_VP: "1,2"
festa_end_time: "2029-01-01 00:00:00.0"
mods:
unlock_all_modules: True

View File

@ -1,5 +1,5 @@
server:
enable: True
enable: False
loglevel: "info"
hostname: ""
news: ""

View File

@ -2,12 +2,19 @@ server:
enable: True
loglevel: "info"
auto_register: True
photon_app_id: "7df3a2f6-d69d-4073-aafe-810ee61e1cea"
data_version: 1
game_version: 33
crypt:
enable: False
key: ""
iv: ""
hash:
verify_hash: False
hash_base: ""
hash_base: ""
card:
enable: True
crypt_password: ""
crypt_salt: ""

View File

@ -6,7 +6,8 @@ import uvicorn
import logging
import asyncio
from core import CoreConfig, AimedbServlette
from core.config import CoreConfig
from core.aimedb import AimedbServlette
async def launch_main(cfg: CoreConfig, ssl: bool) -> None:
if ssl:

24
read.py
View File

@ -1,16 +1,16 @@
#!/usr/bin/env python3
import argparse
import re
import os
import yaml
from os import path
import logging
import coloredlogs
import asyncio
import logging
import os
import re
from logging.handlers import TimedRotatingFileHandler
from os import path
from typing import List, Optional
import coloredlogs
import yaml
from core import CoreConfig, Utils
@ -44,7 +44,7 @@ class BaseReader:
pass
if __name__ == "__main__":
async def main():
parser = argparse.ArgumentParser(description="Import Game Information")
parser.add_argument(
"--game",
@ -140,8 +140,12 @@ if __name__ == "__main__":
for dir, mod in titles.items():
if args.game in mod.game_codes:
handler = mod.reader(config, args.version, bin_arg, opt_arg, args.extra)
loop = asyncio.get_event_loop()
loop.run_until_complete(handler.read())
await handler.read()
logger.info("Done")
if __name__ == "__main__":
asyncio.run(main())

View File

@ -29,6 +29,7 @@ Games listed below have been tested and confirmed working. Only game versions ol
+ NEW PLUS
+ SUN
+ SUN PLUS
+ LUMINOUS
+ crossbeats REV.
+ Crossbeats REV.
@ -49,6 +50,7 @@ Games listed below have been tested and confirmed working. Only game versions ol
+ FESTiVAL
+ FESTiVAL PLUS
+ BUDDiES
+ BUDDiES PLUS
+ O.N.G.E.K.I.
+ SUMMER

View File

@ -3,7 +3,7 @@ wheel
pytz
pyyaml
sqlalchemy==1.4.46
mysqlclient
aiomysql
pyopenssl
service_identity
PyCryptodome
@ -21,4 +21,4 @@ starlette
asyncio
uvicorn
alembic
python-multipart
python-multipart

View File

@ -1,16 +1,16 @@
import logging
import itertools
import json
import logging
from datetime import datetime, timedelta
from time import strftime
from typing import Any, Dict, List
import pytz
from typing import Dict, Any, List
from core.config import CoreConfig
from titles.chuni.const import ChuniConstants
from titles.chuni.database import ChuniData
from titles.chuni.config import ChuniConfig
SCORE_BUFFER = {}
from titles.chuni.const import ChuniConstants, ItemKind
from titles.chuni.database import ChuniData
class ChuniBase:
def __init__(self, core_cfg: CoreConfig, game_cfg: ChuniConfig) -> None:
@ -24,20 +24,35 @@ class ChuniBase:
async def handle_game_login_api_request(self, data: Dict) -> Dict:
"""
Handles the login bonus logic, required for the game because
getUserLoginBonus gets called after getUserItem and therefore the
Handles the login bonus and ticket stock logic, required for the game
because getUserLoginBonus gets called after getUserItem; therefore the
items needs to be inserted in the database before they get requested.
Adds a bonusCount after a user logged in after 24 hours, makes sure
loginBonus 30 gets looped, only show the login banner every 24 hours,
adds the bonus to items (itemKind 6)
- Adds a stock for each specified ticket (itemKind 5)
- Adds a bonusCount after a user logged in after 24 hours, makes sure
loginBonus 30 gets looped, only show the login banner every 24 hours,
adds the bonus to items (itemKind 6)
"""
user_id = data["userId"]
# If we want to make certain tickets always available, stock them now
if self.game_cfg.mods.stock_tickets:
for ticket in self.game_cfg.mods.stock_tickets.split(","):
await self.data.item.put_item(
user_id,
{
"itemId": ticket.strip(),
"itemKind": ItemKind.TICKET.value,
"stock": self.game_cfg.mods.stock_count,
"isValid": True,
},
)
# ignore the login bonus if disabled in config
if not self.game_cfg.mods.use_login_bonus:
return {"returnCode": 1}
user_id = data["userId"]
login_bonus_presets = await self.data.static.get_login_bonus_presets(self.version)
for preset in login_bonus_presets:
@ -101,7 +116,7 @@ class ChuniBase:
user_id,
{
"itemId": login_item["presentId"],
"itemKind": 6,
"itemKind": ItemKind.PRESENT.value,
"stock": login_item["itemNum"],
"isValid": True,
},
@ -262,35 +277,39 @@ class ChuniBase:
}
async def handle_get_user_character_api_request(self, data: Dict) -> Dict:
characters = await self.data.item.get_characters(data["userId"])
if characters is None:
user_id = int(data["userId"])
next_idx = int(data["nextIndex"])
max_ct = int(data["maxCount"])
# add one to the limit so we know if there's a next page of items
rows = await self.data.item.get_characters(
user_id, limit=max_ct + 1, offset=next_idx
)
if rows is None or len(rows) == 0:
return {
"userId": data["userId"],
"userId": user_id,
"length": 0,
"nextIndex": -1,
"userCharacterList": [],
}
character_list = []
next_idx = int(data["nextIndex"])
max_ct = int(data["maxCount"])
for x in range(next_idx, len(characters)):
tmp = characters[x]._asdict()
tmp.pop("user")
for row in rows[:max_ct]:
tmp = row._asdict()
tmp.pop("id")
tmp.pop("user")
character_list.append(tmp)
if len(character_list) >= max_ct:
break
if len(characters) >= next_idx + max_ct:
if len(rows) > max_ct:
next_idx += max_ct
else:
next_idx = -1
return {
"userId": data["userId"],
"userId": user_id,
"length": len(character_list),
"nextIndex": next_idx,
"userCharacterList": character_list,
@ -320,29 +339,31 @@ class ChuniBase:
}
async def handle_get_user_course_api_request(self, data: Dict) -> Dict:
user_course_list = await self.data.score.get_courses(data["userId"])
if user_course_list is None:
user_id = int(data["userId"])
next_idx = int(data["nextIndex"])
max_ct = int(data["maxCount"])
rows = await self.data.score.get_courses(
user_id, limit=max_ct + 1, offset=next_idx
)
if rows is None or len(rows) == 0:
return {
"userId": data["userId"],
"userId": user_id,
"length": 0,
"nextIndex": -1,
"userCourseList": [],
}
course_list = []
next_idx = int(data.get("nextIndex", 0))
max_ct = int(data.get("maxCount", 300))
for x in range(next_idx, len(user_course_list)):
tmp = user_course_list[x]._asdict()
for row in rows[:max_ct]:
tmp = row._asdict()
tmp.pop("user")
tmp.pop("id")
course_list.append(tmp)
if len(user_course_list) >= max_ct:
break
if len(user_course_list) >= next_idx + max_ct:
if len(rows) > max_ct:
next_idx += max_ct
else:
next_idx = -1
@ -410,75 +431,94 @@ class ChuniBase:
}
async def handle_get_user_rival_music_api_request(self, data: Dict) -> Dict:
rival_id = data["rivalId"]
next_index = int(data["nextIndex"])
max_count = int(data["maxCount"])
user_rival_music_list = []
user_id = int(data["userId"])
rival_id = int(data["rivalId"])
next_idx = int(data["nextIndex"])
max_ct = int(data["maxCount"])
rival_levels = [int(x["level"]) for x in data["userRivalMusicLevelList"]]
# Fetch all the rival music entries for the user
all_entries = await self.data.score.get_rival_music(rival_id)
rows = await self.data.score.get_scores(
rival_id,
levels=rival_levels,
limit=max_ct + 1,
offset=next_idx,
)
# Process the entries based on max_count and nextIndex
for music in all_entries:
music_id = music["musicId"]
level = music["level"]
score = music["scoreMax"]
rank = music["scoreRank"]
if rows is None or len(rows) == 0:
return {
"userId": user_id,
"rivalId": rival_id,
"nextIndex": -1,
"userRivalMusicList": [],
}
# Create a music entry for the current music_id if it's unique
music_entry = next((entry for entry in user_rival_music_list if entry["musicId"] == music_id), None)
if music_entry is None:
music_entry = {
"musicId": music_id,
"length": 0,
"userRivalMusicDetailList": []
}
user_rival_music_list.append(music_entry)
music_details = [x._asdict() for x in rows]
returned_music_details_count = 0
music_list = []
# Create a level entry for the current level if it's unique or has a higher score
level_entry = next((entry for entry in music_entry["userRivalMusicDetailList"] if entry["level"] == level), None)
if level_entry is None:
level_entry = {
"level": level,
"scoreMax": score,
"scoreRank": rank
}
music_entry["userRivalMusicDetailList"].append(level_entry)
elif score > level_entry["scoreMax"]:
level_entry["scoreMax"] = score
level_entry["scoreRank"] = rank
# note that itertools.groupby will only work on sorted keys, which is already sorted by
# the query in get_scores
for music_id, details_iter in itertools.groupby(music_details, key=lambda x: x["musicId"]):
details: list[dict[Any, Any]] = [
{"level": d["level"], "scoreMax": d["scoreMax"]}
for d in details_iter
]
# Calculate the length for each "musicId" by counting the unique levels
for music_entry in user_rival_music_list:
music_entry["length"] = len(music_entry["userRivalMusicDetailList"])
music_list.append({"musicId": music_id, "length": len(details), "userMusicDetailList": details})
returned_music_details_count += len(details)
# Prepare the result dictionary with user rival music data
result = {
"userId": data["userId"],
"rivalId": data["rivalId"],
"nextIndex": str(next_index + len(user_rival_music_list[next_index: next_index + max_count]) if max_count <= len(user_rival_music_list[next_index: next_index + max_count]) else -1),
"userRivalMusicList": user_rival_music_list[next_index: next_index + max_count]
if len(music_list) >= max_ct:
break
# if we returned fewer PBs than we originally asked for from the database, that means
# we queried for the PBs of max_ct + 1 songs.
if returned_music_details_count < len(rows):
next_idx += max_ct
else:
next_idx = -1
return {
"userId": user_id,
"rivalId": rival_id,
"length": len(music_list),
"nextIndex": next_idx,
"userRivalMusicList": music_list,
}
return result
async def handle_get_user_favorite_item_api_request(self, data: Dict) -> Dict:
user_id = int(data["userId"])
next_idx = int(data["nextIndex"])
max_ct = int(data["maxCount"])
kind = int(data["kind"])
is_all_favorite_item = str(data["isAllFavoriteItem"]) == "true"
user_fav_item_list = []
# still needs to be implemented on WebUI
# 1: Music, 2: User, 3: Character
fav_list = await self.data.item.get_all_favorites(
data["userId"], self.version, fav_kind=int(data["kind"])
rows = await self.data.item.get_all_favorites(
user_id,
self.version,
fav_kind=kind,
limit=max_ct + 1,
offset=next_idx,
)
if fav_list is not None:
for fav in fav_list:
if rows is not None:
for fav in rows[:max_ct]:
user_fav_item_list.append({"id": fav["favId"]})
if rows is None or len(rows) <= max_ct:
next_idx = -1
else:
next_idx += max_ct
return {
"userId": data["userId"],
"userId": user_id,
"length": len(user_fav_item_list),
"kind": data["kind"],
"nextIndex": -1,
"kind": kind,
"nextIndex": next_idx,
"userFavoriteItemList": user_fav_item_list,
}
@ -490,36 +530,39 @@ class ChuniBase:
return {"userId": data["userId"], "length": 0, "userFavoriteMusicList": []}
async def handle_get_user_item_api_request(self, data: Dict) -> Dict:
kind = int(int(data["nextIndex"]) / 10000000000)
next_idx = int(int(data["nextIndex"]) % 10000000000)
user_item_list = await self.data.item.get_items(data["userId"], kind)
user_id = int(data["userId"])
next_idx = int(data["nextIndex"])
max_ct = int(data["maxCount"])
if user_item_list is None or len(user_item_list) == 0:
kind = next_idx // 10000000000
next_idx = next_idx % 10000000000
rows = await self.data.item.get_items(
user_id, kind, limit=max_ct + 1, offset=next_idx
)
if rows is None or len(rows) == 0:
return {
"userId": data["userId"],
"userId": user_id,
"nextIndex": -1,
"itemKind": kind,
"userItemList": [],
}
items: List[Dict[str, Any]] = []
for i in range(next_idx, len(user_item_list)):
tmp = user_item_list[i]._asdict()
for row in rows[:max_ct]:
tmp = row._asdict()
tmp.pop("user")
tmp.pop("id")
items.append(tmp)
if len(items) >= int(data["maxCount"]):
break
xout = kind * 10000000000 + next_idx + len(items)
if len(items) < int(data["maxCount"]):
next_idx = 0
if len(rows) > max_ct:
next_idx = kind * 10000000000 + next_idx + max_ct
else:
next_idx = xout
next_idx = -1
return {
"userId": data["userId"],
"userId": user_id,
"nextIndex": next_idx,
"itemKind": kind,
"length": len(items),
@ -571,62 +614,55 @@ class ChuniBase:
}
async def handle_get_user_music_api_request(self, data: Dict) -> Dict:
music_detail = await self.data.score.get_scores(data["userId"])
if music_detail is None:
user_id = int(data["userId"])
next_idx = int(data["nextIndex"])
max_ct = int(data["maxCount"])
rows = await self.data.score.get_scores(
user_id, limit=max_ct + 1, offset=next_idx
)
if rows is None or len(rows) == 0:
return {
"userId": data["userId"],
"userId": user_id,
"length": 0,
"nextIndex": -1,
"userMusicList": [], # 240
}
song_list = []
next_idx = int(data["nextIndex"])
max_ct = int(data["maxCount"])
music_details = [x._asdict() for x in rows]
returned_music_details_count = 0
music_list = []
for x in range(next_idx, len(music_detail)):
found = False
tmp = music_detail[x]._asdict()
tmp.pop("user")
tmp.pop("id")
# note that itertools.groupby will only work on sorted keys, which is already sorted by
# the query in get_scores
for _music_id, details_iter in itertools.groupby(music_details, key=lambda x: x["musicId"]):
details: list[dict[Any, Any]] = []
for song in song_list:
score_buf = SCORE_BUFFER.get(str(data["userId"])) or []
if song["userMusicDetailList"][0]["musicId"] == tmp["musicId"]:
found = True
song["userMusicDetailList"].append(tmp)
song["length"] = len(song["userMusicDetailList"])
score_buf.append(tmp["musicId"])
SCORE_BUFFER[str(data["userId"])] = score_buf
for d in details_iter:
d.pop("id")
d.pop("user")
score_buf = SCORE_BUFFER.get(str(data["userId"])) or []
if not found and tmp["musicId"] not in score_buf:
song_list.append({"length": 1, "userMusicDetailList": [tmp]})
score_buf.append(tmp["musicId"])
SCORE_BUFFER[str(data["userId"])] = score_buf
details.append(d)
if len(song_list) >= max_ct:
music_list.append({"length": len(details), "userMusicDetailList": details})
returned_music_details_count += len(details)
if len(music_list) >= max_ct:
break
for songIdx in range(len(song_list)):
for recordIdx in range(x+1, len(music_detail)):
if song_list[songIdx]["userMusicDetailList"][0]["musicId"] == music_detail[recordIdx]["musicId"]:
music = music_detail[recordIdx]._asdict()
music.pop("user")
music.pop("id")
song_list[songIdx]["userMusicDetailList"].append(music)
song_list[songIdx]["length"] += 1
if len(song_list) >= max_ct:
next_idx += len(song_list)
# if we returned fewer PBs than we originally asked for from the database, that means
# we queried for the PBs of max_ct + 1 songs.
if returned_music_details_count < len(rows):
next_idx += max_ct
else:
next_idx = -1
SCORE_BUFFER[str(data["userId"])] = []
return {
"userId": data["userId"],
"length": len(song_list),
"userId": user_id,
"length": len(music_list),
"nextIndex": next_idx,
"userMusicList": song_list, # 240
"userMusicList": music_list,
}
async def handle_get_user_option_api_request(self, data: Dict) -> Dict:
@ -720,9 +756,14 @@ class ChuniBase:
team_id = 65535
team_name = self.game_cfg.team.team_name
team_rank = 0
team_user_point = 0
# Get user profile
profile = await self.data.profile.get_profile_data(data["userId"], self.version)
if profile is None:
return {"userId": data["userId"], "teamId": 0}
if profile and profile["teamId"]:
# Get team by id
team = await self.data.profile.get_team_by_id(profile["teamId"])
@ -731,7 +772,12 @@ class ChuniBase:
team_id = team["id"]
team_name = team["teamName"]
team_rank = await self.data.profile.get_team_rank(team["id"])
team_point = team["teamPoint"]
if team["userTeamPoint"] is not None and team["userTeamPoint"] != "":
user_team_point_data = json.loads(team["userTeamPoint"])
for user_point_data in user_team_point_data:
if user_point_data["user"] == data["userId"]:
team_user_point = int(user_point_data["userPoint"])
# Don't return anything if no team name has been defined for defaults and there is no team set for the player
if not profile["teamId"] and team_name == "":
return {"userId": data["userId"], "teamId": 0}
@ -741,11 +787,12 @@ class ChuniBase:
"teamId": team_id,
"teamRank": team_rank,
"teamName": team_name,
"assaultTimeRate": 1, # TODO: Figure out assaultTime, which might be team point boost?
"userTeamPoint": {
"userId": data["userId"],
"teamId": team_id,
"orderId": 1,
"teamPoint": 1,
"orderId": 0,
"teamPoint": team_user_point,
"aggrDate": data["playDate"],
},
}

View File

@ -53,6 +53,29 @@ class ChuniModsConfig:
self.__config, "chuni", "mods", "use_login_bonus", default=True
)
@property
def stock_tickets(self) -> str:
return CoreConfig.get_config_field(
self.__config, "chuni", "mods", "stock_tickets", default=None
)
@property
def stock_count(self) -> int:
return CoreConfig.get_config_field(
self.__config, "chuni", "mods", "stock_count", default=99
)
def forced_item_unlocks(self, item: str) -> bool:
forced_item_unlocks = CoreConfig.get_config_field(
self.__config, "chuni", "mods", "forced_item_unlocks", default={}
)
if item not in forced_item_unlocks.keys():
# default to no forced unlocks
return False
return forced_item_unlocks[item]
class ChuniVersionConfig:
def __init__(self, parent_config: "ChuniConfig") -> None:
@ -63,9 +86,14 @@ class ChuniVersionConfig:
in the form of:
11: {"rom": 2.00.00, "data": 2.00.00}
"""
return CoreConfig.get_config_field(
versions = CoreConfig.get_config_field(
self.__config, "chuni", "version", default={}
)[version]
)
if version not in versions.keys():
return None
return versions[version]
class ChuniCryptoConfig:

View File

@ -1,4 +1,4 @@
from enum import Enum
from enum import Enum, IntEnum
class ChuniConstants:
@ -19,11 +19,13 @@ class ChuniConstants:
VER_CHUNITHM_CRYSTAL = 8
VER_CHUNITHM_CRYSTAL_PLUS = 9
VER_CHUNITHM_PARADISE = 10
VER_CHUNITHM_NEW = 11
VER_CHUNITHM_NEW_PLUS = 12
VER_CHUNITHM_SUN = 13
VER_CHUNITHM_SUN_PLUS = 14
VER_CHUNITHM_LUMINOUS = 15
VERSION_NAMES = [
"CHUNITHM",
"CHUNITHM PLUS",
@ -43,18 +45,112 @@ class ChuniConstants:
"CHUNITHM LUMINOUS",
]
SCORE_RANK_INTERVALS_OLD = [
(1007500, "SSS"),
(1000000, "SS"),
( 975000, "S"),
( 950000, "AAA"),
( 925000, "AA"),
( 900000, "A"),
( 800000, "BBB"),
( 700000, "BB"),
( 600000, "B"),
( 500000, "C"),
( 0, "D"),
]
SCORE_RANK_INTERVALS_NEW = [
(1009000, "SSS+"), # New only
(1007500, "SSS"),
(1005000, "SS+"), # New only
(1000000, "SS"),
( 990000, "S+"), # New only
( 975000, "S"),
( 950000, "AAA"),
( 925000, "AA"),
( 900000, "A"),
( 800000, "BBB"),
( 700000, "BB"),
( 600000, "B"),
( 500000, "C"),
( 0, "D"),
]
@classmethod
def game_ver_to_string(cls, ver: int):
return cls.VERSION_NAMES[ver]
class MapAreaConditionType(Enum):
UNLOCKED = 0
class MapAreaConditionType(IntEnum):
"""Condition types for the GetGameMapAreaConditionApi endpoint. Incomplete.
For the MAP_CLEARED/MAP_AREA_CLEARED/TROPHY_OBTAINED conditions, the conditionId
is the map/map area/trophy.
For the RANK_*/ALL_JUSTICE conditions, the conditionId is songId * 100 + difficultyId.
For example, Halcyon [ULTIMA] would be 173 * 100 + 4 = 17304.
"""
ALWAYS_UNLOCKED = 0
MAP_CLEARED = 1
MAP_AREA_CLEARED = 2
TROPHY_OBTAINED = 3
RANK_SSS = 19
RANK_SSP = 20
RANK_SS = 21
RANK_SP = 22
RANK_S = 23
ALL_JUSTICE = 28
class MapAreaConditionLogicalOperator(Enum):
AND = 1
OR = 2
class AvatarCategory(Enum):
WEAR = 1
HEAD = 2
FACE = 3
SKIN = 4
ITEM = 5
FRONT = 6
BACK = 7
class ItemKind(IntEnum):
NAMEPLATE = 1
FRAME = 2
"""
"Frame" is the background for the gauge/score/max combo display
shown during gameplay. This item cannot be equipped (as of LUMINOUS)
and is hardcoded to the current game's version.
"""
TROPHY = 3
SKILL = 4
TICKET = 5
"""A statue is also a ticket."""
PRESENT = 6
MUSIC_UNLOCK = 7
MAP_ICON = 8
SYSTEM_VOICE = 9
SYMBOL_CHAT = 10
AVATAR_ACCESSORY = 11
ULTIMA_UNLOCK = 12
"""This only applies to ULTIMA difficulties that are *not* unlocked by
SS-ing EXPERT+MASTER.
"""
class FavoriteItemKind(IntEnum):
MUSIC = 1
RIVAL = 2
CHARACTER = 3

View File

@ -1,13 +1,17 @@
from core.data import Data
from core.config import CoreConfig
from titles.chuni.schema import *
from .config import ChuniConfig
class ChuniData(Data):
def __init__(self, cfg: CoreConfig) -> None:
def __init__(self, cfg: CoreConfig, chuni_cfg: ChuniConfig = None) -> None:
super().__init__(cfg)
self.item = ChuniItemData(cfg, self.session)
self.profile = ChuniProfileData(cfg, self.session)
self.score = ChuniScoreData(cfg, self.session)
self.static = ChuniStaticData(cfg, self.session)
# init rom versioning for use with score playlog data
if chuni_cfg:
ChuniRomVersion.init_versions(chuni_cfg)

View File

@ -2,6 +2,8 @@ from typing import List
from starlette.routing import Route, Mount
from starlette.requests import Request
from starlette.responses import Response, RedirectResponse
from starlette.staticfiles import StaticFiles
from sqlalchemy.engine import Row
from os import path
import yaml
import jinja2
@ -10,7 +12,70 @@ from core.frontend import FE_Base, UserSession
from core.config import CoreConfig
from .database import ChuniData
from .config import ChuniConfig
from .const import ChuniConstants
from .const import ChuniConstants, AvatarCategory, ItemKind
def pairwise(iterable):
# https://docs.python.org/3/library/itertools.html#itertools.pairwise
# but for Python < 3.10. pairwise('ABCDEFG') → AB BC CD DE EF FG
iterator = iter(iterable)
a = next(iterator, None)
for b in iterator:
yield a, b
a = b
def calculate_song_rank(score: int, game_version: int) -> str:
if game_version >= ChuniConstants.VER_CHUNITHM_NEW:
intervals = ChuniConstants.SCORE_RANK_INTERVALS_NEW
else:
intervals = ChuniConstants.SCORE_RANK_INTERVALS_OLD
for (min_score, rank) in intervals:
if score >= min_score:
return rank
return "D"
def calculate_song_rating(score: int, chart_constant: float, game_version: int) -> float:
is_new = game_version >= ChuniConstants.VER_CHUNITHM_NEW
if is_new: # New and later
max_score = 1009000
max_rating_modifier = 2.15
else: # Up to Paradise Lost
max_score = 1007500
max_rating_modifier = 2.0
if (score < 500000):
return 0.0 # D
elif (score >= max_score):
return chart_constant + max_rating_modifier # SSS/SSS+
# Okay, we're doing this the hard way.
# Rating goes up linearly between breakpoints listed below.
# Pick the score interval in which we are in, then calculate
# the position between possible ratings.
score_intervals = [
( 500000, 0.0), # C
( 800000, max(0.0, (chart_constant - 5.0) / 2)), # BBB
( 900000, max(0.0, (chart_constant - 5.0))), # A
( 925000, max(0.0, (chart_constant - 3.0))), # AA
( 975000, chart_constant), # S
(1000000, chart_constant + 1.0), # SS
(1005000, chart_constant + 1.5), # SS+
(1007500, chart_constant + 2.0), # SSS
(1009000, chart_constant + max_rating_modifier), # SSS+!
]
for ((lo_score, lo_rating), (hi_score, hi_rating)) in pairwise(score_intervals):
if not (lo_score <= score < hi_score):
continue
interval_pos = (score - lo_score) / (hi_score - lo_score)
return lo_rating + ((hi_rating - lo_rating) * interval_pos)
class ChuniFrontend(FE_Base):
@ -18,12 +83,12 @@ class ChuniFrontend(FE_Base):
self, cfg: CoreConfig, environment: jinja2.Environment, cfg_dir: str
) -> None:
super().__init__(cfg, environment)
self.data = ChuniData(cfg)
self.game_cfg = ChuniConfig()
if path.exists(f"{cfg_dir}/{ChuniConstants.CONFIG_NAME}"):
self.game_cfg.update(
yaml.safe_load(open(f"{cfg_dir}/{ChuniConstants.CONFIG_NAME}"))
)
self.data = ChuniData(cfg, self.game_cfg)
self.nav_name = "Chunithm"
def get_routes(self) -> List[Route]:
@ -34,8 +99,18 @@ class ChuniFrontend(FE_Base):
Route("/", self.render_GET_playlog, methods=['GET']),
Route("/{index}", self.render_GET_playlog, methods=['GET']),
]),
Route("/favorites", self.render_GET_favorites, methods=['GET']),
Route("/userbox", self.render_GET_userbox, methods=['GET']),
Route("/avatar", self.render_GET_avatar, methods=['GET']),
Route("/update.map-icon", self.update_map_icon, methods=['POST']),
Route("/update.system-voice", self.update_system_voice, methods=['POST']),
Route("/update.userbox", self.update_userbox, methods=['POST']),
Route("/update.avatar", self.update_avatar, methods=['POST']),
Route("/update.name", self.update_name, methods=['POST']),
Route("/update.favorite_music_playlog", self.update_favorite_music_playlog, methods=['POST']),
Route("/update.favorite_music_favorites", self.update_favorite_music_favorites, methods=['POST']),
Route("/version.change", self.version_change, methods=['POST']),
Mount('/img', app=StaticFiles(directory='titles/chuni/img'), name="img")
]
async def render_GET(self, request: Request) -> bytes:
@ -55,15 +130,28 @@ class ChuniFrontend(FE_Base):
usr_sesh.chunithm_version = versions[0]
profile = await self.data.profile.get_profile_data(usr_sesh.user_id, usr_sesh.chunithm_version)
user_id = usr_sesh.user_id
version = usr_sesh.chunithm_version
# While map icons and system voices weren't present prior to AMAZON, we don't need to bother checking
# version here - it'll just end up being empty sets and the jinja will ignore the variables anyway.
map_icons, total_map_icons = await self.get_available_map_icons(version, profile)
system_voices, total_system_voices = await self.get_available_system_voices(version, profile)
resp = Response(template.render(
title=f"{self.core_config.server.name} | {self.nav_name}",
game_list=self.environment.globals["game_list"],
sesh=vars(usr_sesh),
user_id=usr_sesh.user_id,
user_id=user_id,
profile=profile,
version_list=ChuniConstants.VERSION_NAMES,
versions=versions,
cur_version=usr_sesh.chunithm_version
cur_version=version,
cur_version_name=ChuniConstants.game_ver_to_string(version),
map_icons=map_icons,
system_voices=system_voices,
total_map_icons=total_map_icons,
total_system_voices=total_system_voices
), media_type="text/html; charset=utf-8")
if usr_sesh.chunithm_version >= 0:
@ -91,37 +179,27 @@ class ChuniFrontend(FE_Base):
base_list=[]
if profile and rating:
song_records = []
for song in rating:
music_chart = await self.data.static.get_music_chart(usr_sesh.chunithm_version, song.musicId, song.difficultId)
if music_chart:
if (song.score < 800000):
song_rating = 0
elif (song.score >= 800000 and song.score < 900000):
song_rating = music_chart.level / 2 - 5
elif (song.score >= 900000 and song.score < 925000):
song_rating = music_chart.level - 5
elif (song.score >= 925000 and song.score < 975000):
song_rating = music_chart.level - 3
elif (song.score >= 975000 and song.score < 1000000):
song_rating = (song.score - 975000) / 2500 * 0.1 + music_chart.level
elif (song.score >= 1000000 and song.score < 1005000):
song_rating = (song.score - 1000000) / 1000 * 0.1 + 1 + music_chart.level
elif (song.score >= 1005000 and song.score < 1007500):
song_rating = (song.score - 1005000) / 500 * 0.1 + 1.5 + music_chart.level
elif (song.score >= 1007500 and song.score < 1009000):
song_rating = (song.score - 1007500) / 100 * 0.01 + 2 + music_chart.level
elif (song.score >= 1009000):
song_rating = 2.15 + music_chart.level
song_rating = int(song_rating * 10 ** 2) / 10 ** 2
song_records.append({
"difficultId": song.difficultId,
"musicId": song.musicId,
"title": music_chart.title,
"level": music_chart.level,
"score": song.score,
"type": song.type,
"song_rating": song_rating,
})
if not music_chart:
continue
rank = calculate_song_rank(song.score, profile.version)
rating = calculate_song_rating(song.score, music_chart.level, profile.version)
song_rating = int(rating * 10 ** 2) / 10 ** 2
song_records.append({
"difficultId": song.difficultId,
"musicId": song.musicId,
"title": music_chart.title,
"level": music_chart.level,
"score": song.score,
"type": song.type,
"rank": rank,
"song_rating": song_rating,
})
hot_list = [obj for obj in song_records if obj["type"] == "userRatingBaseHotList"]
base_list = [obj for obj in song_records if obj["type"] == "userRatingBaseList"]
return Response(template.render(
@ -131,6 +209,8 @@ class ChuniFrontend(FE_Base):
profile=profile,
hot_list=hot_list,
base_list=base_list,
cur_version=usr_sesh.chunithm_version,
cur_version_name=ChuniConstants.game_ver_to_string(usr_sesh.chunithm_version)
), media_type="text/html; charset=utf-8")
else:
return RedirectResponse("/gate/", 303)
@ -152,43 +232,466 @@ class ChuniFrontend(FE_Base):
else:
index = int(path_index) - 1 # 0 and 1 are 1st page
user_id = usr_sesh.user_id
playlog_count = await self.data.score.get_user_playlogs_count(user_id)
version = usr_sesh.chunithm_version
playlog_count = await self.data.score.get_user_playlogs_count(user_id, version)
if playlog_count < index * 20 :
return Response(template.render(
title=f"{self.core_config.server.name} | {self.nav_name}",
game_list=self.environment.globals["game_list"],
sesh=vars(usr_sesh),
playlog_count=0
playlog_count=0,
cur_version=version,
cur_version_name=ChuniConstants.game_ver_to_string(version)
), media_type="text/html; charset=utf-8")
playlog = await self.data.score.get_playlogs_limited(user_id, index, 20)
playlog = await self.data.score.get_playlogs_limited(user_id, version, index, 20)
playlog_with_title = []
for record in playlog:
music_chart = await self.data.static.get_music_chart(usr_sesh.chunithm_version, record.musicId, record.level)
for idx,record in enumerate(playlog):
music_chart = await self.data.static.get_music_chart(version, record.musicId, record.level)
if music_chart:
difficultyNum=music_chart.level
artist=music_chart.artist
title=music_chart.title
(jacket, ext) = path.splitext(music_chart.jacketPath)
jacket += ".png"
else:
difficultyNum=0
artist="unknown"
title="musicid: " + str(record.musicId)
jacket = "unknown.png"
# Check if this song is a favorite so we can populate the add/remove button
is_favorite = await self.data.item.is_favorite(user_id, version, record.musicId)
playlog_with_title.append({
# Values for the actual readable results
"raw": record,
"title": title,
"difficultyNum": difficultyNum,
"artist": artist,
"jacket": jacket,
# Values used solely for favorite updates
"idx": idx,
"musicId": record.musicId,
"isFav": is_favorite
})
return Response(template.render(
title=f"{self.core_config.server.name} | {self.nav_name}",
game_list=self.environment.globals["game_list"],
sesh=vars(usr_sesh),
user_id=usr_sesh.user_id,
user_id=user_id,
playlog=playlog_with_title,
playlog_count=playlog_count
playlog_count=playlog_count,
cur_version=version,
cur_version_name=ChuniConstants.game_ver_to_string(version)
), media_type="text/html; charset=utf-8")
else:
return RedirectResponse("/gate/", 303)
async def render_GET_favorites(self, request: Request) -> bytes:
template = self.environment.get_template(
"titles/chuni/templates/chuni_favorites.jinja"
)
usr_sesh = self.validate_session(request)
if not usr_sesh:
usr_sesh = UserSession()
if usr_sesh.user_id > 0:
if usr_sesh.chunithm_version < 0:
return RedirectResponse("/game/chuni/", 303)
user_id = usr_sesh.user_id
version = usr_sesh.chunithm_version
favorites = await self.data.item.get_all_favorites(user_id, version, 1)
favorites_count = len(favorites)
favorites_with_title = []
favorites_by_genre = dict()
for idx,favorite in enumerate(favorites):
song = await self.data.static.get_song(favorite.favId)
if song:
# we likely got multiple results - one for each chart. Just use the first
artist=song.artist
title=song.title
genre=song.genre
(jacket, ext) = path.splitext(song.jacketPath)
jacket += ".png"
else:
artist="unknown"
title="musicid: " + str(favorite.favId)
genre="unknown"
jacket = "unknown.png"
# add a new collection for the genre if this is our first time seeing it
if genre not in favorites_by_genre:
favorites_by_genre[genre] = []
# add the song to the appropriate genre collection
favorites_by_genre[genre].append({
"idx": idx,
"title": title,
"artist": artist,
"jacket": jacket,
"favId": favorite.favId
})
# Sort favorites by title before rendering the page
for g in favorites_by_genre:
favorites_by_genre[g].sort(key=lambda x: x["title"].lower())
return Response(template.render(
title=f"{self.core_config.server.name} | {self.nav_name}",
game_list=self.environment.globals["game_list"],
sesh=vars(usr_sesh),
user_id=user_id,
favorites_by_genre=favorites_by_genre,
favorites_count=favorites_count,
cur_version=version,
cur_version_name=ChuniConstants.game_ver_to_string(version)
), media_type="text/html; charset=utf-8")
else:
return RedirectResponse("/gate/", 303)
async def get_available_map_icons(self, version: int, profile: Row) -> (List[dict], int):
items = dict()
rows = await self.data.static.get_map_icons(version)
if rows is None:
return (items, 0) # can only happen with old db
force_unlocked = self.game_cfg.mods.forced_item_unlocks("map_icons")
user_map_icons = []
if not force_unlocked:
user_map_icons = await self.data.item.get_items(profile.user, ItemKind.MAP_ICON.value)
user_map_icons = [icon["itemId"] for icon in user_map_icons] + [profile.mapIconId]
for row in rows:
if force_unlocked or row["defaultHave"] or row["mapIconId"] in user_map_icons:
item = dict()
item["id"] = row["mapIconId"]
item["name"] = row["name"]
item["iconPath"] = path.splitext(row["iconPath"])[0] + ".png"
items[row["mapIconId"]] = item
return (items, len(rows))
async def get_available_system_voices(self, version: int, profile: Row) -> (List[dict], int):
items = dict()
rows = await self.data.static.get_system_voices(version)
if rows is None:
return (items, 0) # can only happen with old db
force_unlocked = self.game_cfg.mods.forced_item_unlocks("system_voices")
user_system_voices = []
if not force_unlocked:
user_system_voices = await self.data.item.get_items(profile.user, ItemKind.SYSTEM_VOICE.value)
user_system_voices = [icon["itemId"] for icon in user_system_voices] + [profile.voiceId]
for row in rows:
if force_unlocked or row["defaultHave"] or row["voiceId"] in user_system_voices:
item = dict()
item["id"] = row["voiceId"]
item["name"] = row["name"]
item["imagePath"] = path.splitext(row["imagePath"])[0] + ".png"
items[row["voiceId"]] = item
return (items, len(rows))
async def get_available_nameplates(self, version: int, profile: Row) -> (List[dict], int):
items = dict()
rows = await self.data.static.get_nameplates(version)
if rows is None:
return (items, 0) # can only happen with old db
force_unlocked = self.game_cfg.mods.forced_item_unlocks("nameplates")
user_nameplates = []
if not force_unlocked:
user_nameplates = await self.data.item.get_items(profile.user, ItemKind.NAMEPLATE.value)
user_nameplates = [item["itemId"] for item in user_nameplates] + [profile.nameplateId]
for row in rows:
if force_unlocked or row["defaultHave"] or row["nameplateId"] in user_nameplates:
item = dict()
item["id"] = row["nameplateId"]
item["name"] = row["name"]
item["texturePath"] = path.splitext(row["texturePath"])[0] + ".png"
items[row["nameplateId"]] = item
return (items, len(rows))
async def get_available_trophies(self, version: int, profile: Row) -> (List[dict], int):
items = dict()
rows = await self.data.static.get_trophies(version)
if rows is None:
return (items, 0) # can only happen with old db
force_unlocked = self.game_cfg.mods.forced_item_unlocks("trophies")
user_trophies = []
if not force_unlocked:
user_trophies = await self.data.item.get_items(profile.user, ItemKind.TROPHY.value)
user_trophies = [item["itemId"] for item in user_trophies] + [profile.trophyId]
for row in rows:
if force_unlocked or row["defaultHave"] or row["trophyId"] in user_trophies:
item = dict()
item["id"] = row["trophyId"]
item["name"] = row["name"]
item["rarity"] = row["rareType"]
items[row["trophyId"]] = item
return (items, len(rows))
async def get_available_characters(self, version: int, profile: Row) -> (List[dict], int):
items = dict()
rows = await self.data.static.get_characters(version)
if rows is None:
return (items, 0) # can only happen with old db
force_unlocked = self.game_cfg.mods.forced_item_unlocks("character_icons")
user_characters = []
if not force_unlocked:
user_characters = await self.data.item.get_characters(profile.user)
user_characters = [chara["characterId"] for chara in user_characters] + [profile.characterId, profile.charaIllustId]
for row in rows:
if force_unlocked or row["defaultHave"] or row["characterId"] in user_characters:
item = dict()
item["id"] = row["characterId"]
item["name"] = row["name"]
item["iconPath"] = path.splitext(row["imagePath3"])[0] + ".png"
items[row["characterId"]] = item
return (items, len(rows))
async def get_available_avatar_items(self, version: int, category: AvatarCategory, user_unlocked_items: List[int]) -> (List[dict], int):
items = dict()
rows = await self.data.static.get_avatar_items(version, category.value)
if rows is None:
return (items, 0) # can only happen with old db
force_unlocked = self.game_cfg.mods.forced_item_unlocks("avatar_accessories")
for row in rows:
if force_unlocked or row["defaultHave"] or row["avatarAccessoryId"] in user_unlocked_items:
item = dict()
item["id"] = row["avatarAccessoryId"]
item["name"] = row["name"]
item["iconPath"] = path.splitext(row["iconPath"])[0] + ".png"
item["texturePath"] = path.splitext(row["texturePath"])[0] + ".png"
items[row["avatarAccessoryId"]] = item
return (items, len(rows))
async def render_GET_userbox(self, request: Request) -> bytes:
template = self.environment.get_template(
"titles/chuni/templates/chuni_userbox.jinja"
)
usr_sesh = self.validate_session(request)
if not usr_sesh:
usr_sesh = UserSession()
if usr_sesh.user_id > 0:
if usr_sesh.chunithm_version < 0:
return RedirectResponse("/game/chuni/", 303)
user_id = usr_sesh.user_id
version = usr_sesh.chunithm_version
# Get the user profile so we know how the userbox is currently configured
profile = await self.data.profile.get_profile_data(user_id, version)
# Build up lists of available userbox components
nameplates, total_nameplates = await self.get_available_nameplates(version, profile)
trophies, total_trophies = await self.get_available_trophies(version, profile)
characters, total_characters = await self.get_available_characters(version, profile)
# Get the user's team
team_name = "ARTEMiS"
if profile["teamId"]:
team = await self.data.profile.get_team_by_id(profile["teamId"])
team_name = team["teamName"]
# Figure out the rating color we should use (rank maps to the stylesheet)
rating = profile.playerRating / 100;
rating_rank = 0
if rating >= 16:
rating_rank = 8
elif rating >= 15.25:
rating_rank = 7
elif rating >= 14.5:
rating_rank = 6
elif rating >= 13.25:
rating_rank = 5
elif rating >= 12:
rating_rank = 4
elif rating >= 10:
rating_rank = 3
elif rating >= 7:
rating_rank = 2
elif rating >= 4:
rating_rank = 1
return Response(template.render(
title=f"{self.core_config.server.name} | {self.nav_name}",
game_list=self.environment.globals["game_list"],
sesh=vars(usr_sesh),
user_id=user_id,
cur_version=version,
cur_version_name=ChuniConstants.game_ver_to_string(version),
profile=profile,
team_name=team_name,
rating_rank=rating_rank,
nameplates=nameplates,
trophies=trophies,
characters=characters,
total_nameplates=total_nameplates,
total_trophies=total_trophies,
total_characters=total_characters
), media_type="text/html; charset=utf-8")
else:
return RedirectResponse("/gate/", 303)
async def render_GET_avatar(self, request: Request) -> bytes:
template = self.environment.get_template(
"titles/chuni/templates/chuni_avatar.jinja"
)
usr_sesh = self.validate_session(request)
if not usr_sesh:
usr_sesh = UserSession()
if usr_sesh.user_id > 0:
if usr_sesh.chunithm_version < 11:
# Avatar configuration only for NEW!! and newer
return RedirectResponse("/game/chuni/", 303)
user_id = usr_sesh.user_id
version = usr_sesh.chunithm_version
# Get the user profile so we know what avatar items are currently in use
profile = await self.data.profile.get_profile_data(user_id, version)
# Get all the user avatar accessories so we know what to populate
user_accessories = await self.data.item.get_items(user_id, ItemKind.AVATAR_ACCESSORY.value)
user_accessories = [item["itemId"] for item in user_accessories] + \
[profile.avatarBack, profile.avatarItem, profile.avatarWear, \
profile.avatarFront, profile.avatarSkin, profile.avatarHead, profile.avatarFace]
# Build up available list of items for each avatar category
wears, total_wears = await self.get_available_avatar_items(version, AvatarCategory.WEAR, user_accessories)
faces, total_faces = await self.get_available_avatar_items(version, AvatarCategory.FACE, user_accessories)
heads, total_heads = await self.get_available_avatar_items(version, AvatarCategory.HEAD, user_accessories)
skins, total_skins = await self.get_available_avatar_items(version, AvatarCategory.SKIN, user_accessories)
items, total_items = await self.get_available_avatar_items(version, AvatarCategory.ITEM, user_accessories)
fronts, total_fronts = await self.get_available_avatar_items(version, AvatarCategory.FRONT, user_accessories)
backs, total_backs = await self.get_available_avatar_items(version, AvatarCategory.BACK, user_accessories)
return Response(template.render(
title=f"{self.core_config.server.name} | {self.nav_name}",
game_list=self.environment.globals["game_list"],
sesh=vars(usr_sesh),
user_id=user_id,
cur_version=version,
cur_version_name=ChuniConstants.game_ver_to_string(version),
profile=profile,
wears=wears,
faces=faces,
heads=heads,
skins=skins,
items=items,
fronts=fronts,
backs=backs,
total_wears=total_wears,
total_faces=total_faces,
total_heads=total_heads,
total_skins=total_skins,
total_items=total_items,
total_fronts=total_fronts,
total_backs=total_backs
), media_type="text/html; charset=utf-8")
else:
return RedirectResponse("/gate/", 303)
async def update_map_icon(self, request: Request) -> bytes:
usr_sesh = self.validate_session(request)
if not usr_sesh:
return RedirectResponse("/gate/", 303)
form_data = await request.form()
new_map_icon: str = form_data.get("id")
if not new_map_icon:
return RedirectResponse("/gate/?e=4", 303)
if not await self.data.profile.update_map_icon(usr_sesh.user_id, usr_sesh.chunithm_version, new_map_icon):
return RedirectResponse("/gate/?e=999", 303)
return RedirectResponse("/game/chuni/", 303)
async def update_system_voice(self, request: Request) -> bytes:
usr_sesh = self.validate_session(request)
if not usr_sesh:
return RedirectResponse("/gate/", 303)
form_data = await request.form()
new_system_voice: str = form_data.get("id")
if not new_system_voice:
return RedirectResponse("/gate/?e=4", 303)
if not await self.data.profile.update_system_voice(usr_sesh.user_id, usr_sesh.chunithm_version, new_system_voice):
return RedirectResponse("/gate/?e=999", 303)
return RedirectResponse("/game/chuni/", 303)
async def update_userbox(self, request: Request) -> bytes:
usr_sesh = self.validate_session(request)
if not usr_sesh:
return RedirectResponse("/gate/", 303)
form_data = await request.form()
new_nameplate: str = form_data.get("nameplate")
new_trophy: str = form_data.get("trophy")
new_character: str = form_data.get("character")
if not new_nameplate or \
not new_trophy or \
not new_character:
return RedirectResponse("/game/chuni/userbox?e=4", 303)
if not await self.data.profile.update_userbox(usr_sesh.user_id, usr_sesh.chunithm_version, new_nameplate, new_trophy, new_character):
return RedirectResponse("/gate/?e=999", 303)
return RedirectResponse("/game/chuni/userbox", 303)
async def update_avatar(self, request: Request) -> bytes:
usr_sesh = self.validate_session(request)
if not usr_sesh:
return RedirectResponse("/gate/", 303)
form_data = await request.form()
new_wear: str = form_data.get("wear")
new_face: str = form_data.get("face")
new_head: str = form_data.get("head")
new_skin: str = form_data.get("skin")
new_item: str = form_data.get("item")
new_front: str = form_data.get("front")
new_back: str = form_data.get("back")
if not new_wear or \
not new_face or \
not new_head or \
not new_skin or \
not new_item or \
not new_front or \
not new_back:
return RedirectResponse("/game/chuni/avatar?e=4", 303)
if not await self.data.profile.update_avatar(usr_sesh.user_id, usr_sesh.chunithm_version, new_wear, new_face, new_head, new_skin, new_item, new_front, new_back):
return RedirectResponse("/gate/?e=999", 303)
return RedirectResponse("/game/chuni/avatar", 303)
async def update_name(self, request: Request) -> bytes:
usr_sesh = self.validate_session(request)
if not usr_sesh:
@ -226,6 +729,32 @@ class ChuniFrontend(FE_Base):
return RedirectResponse("/game/chuni/?s=1", 303)
async def update_favorite_music(self, request: Request, retPage: str):
usr_sesh = self.validate_session(request)
if not usr_sesh:
return RedirectResponse(retPage, 303)
user_id = usr_sesh.user_id
version = usr_sesh.chunithm_version
form_data = await request.form()
music_id: str = form_data.get("musicId")
isAdd: int = int(form_data.get("isAdd"))
if isAdd:
if await self.data.item.put_favorite_music(user_id, version, music_id) == None:
return RedirectResponse("/gate/?e=999", 303)
else:
if await self.data.item.delete_favorite_music(user_id, version, music_id) == None:
return RedirectResponse("/gate/?e=999", 303)
return RedirectResponse(retPage, 303)
async def update_favorite_music_playlog(self, request: Request):
return await self.update_favorite_music(request, "/game/chuni/playlog")
async def update_favorite_music_favorites(self, request: Request):
return await self.update_favorite_music(request, "/game/chuni/favorites")
async def version_change(self, request: Request):
usr_sesh = self.validate_session(request)
if not usr_sesh:
@ -234,13 +763,13 @@ class ChuniFrontend(FE_Base):
if usr_sesh.user_id > 0:
form_data = await request.form()
chunithm_version = form_data.get("version")
self.logger.info(f"version change to: {chunithm_version}")
self.logger.debug(f"version change to: {chunithm_version}")
if(chunithm_version.isdigit()):
usr_sesh.chunithm_version=int(chunithm_version)
encoded_sesh = self.encode_session(usr_sesh)
self.logger.info(f"Created session with JWT {encoded_sesh}")
self.logger.debug(f"Created session with JWT {encoded_sesh}")
resp = RedirectResponse("/game/chuni/", 303)
resp.set_cookie("ARTEMIS_SESH", encoded_sesh)
return resp
else:
return RedirectResponse("/gate/", 303)
return RedirectResponse("/gate/", 303)

Binary file not shown.

After

Width:  |  Height:  |  Size: 34 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 25 KiB

4
titles/chuni/img/avatar/.gitignore vendored Normal file
View File

@ -0,0 +1,4 @@
# Ignore everything in this directory
*
# Except this file
!.gitignore

Binary file not shown.

After

Width:  |  Height:  |  Size: 30 KiB

4
titles/chuni/img/character/.gitignore vendored Normal file
View File

@ -0,0 +1,4 @@
# Ignore everything in this directory
*
# Except this file
!.gitignore

5
titles/chuni/img/jacket/.gitignore vendored Normal file
View File

@ -0,0 +1,5 @@
# Ignore everything in this directory
*
# Except this file and default unknown
!.gitignore
!unknown.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

4
titles/chuni/img/mapIcon/.gitignore vendored Normal file
View File

@ -0,0 +1,4 @@
# Ignore everything in this directory
*
# Except this file
!.gitignore

4
titles/chuni/img/nameplate/.gitignore vendored Normal file
View File

@ -0,0 +1,4 @@
# Ignore everything in this directory
*
# Except this file
!.gitignore

Binary file not shown.

After

Width:  |  Height:  |  Size: 21 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 33 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 30 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 25 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 30 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 30 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 28 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 28 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 42 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 42 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 21 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 20 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 44 KiB

View File

@ -0,0 +1,4 @@
# Ignore everything in this directory
*
# Except this file
!.gitignore

View File

@ -41,7 +41,7 @@ class ChuniServlet(BaseServlet):
def __init__(self, core_cfg: CoreConfig, cfg_dir: str) -> None:
super().__init__(core_cfg, cfg_dir)
self.game_cfg = ChuniConfig()
self.hash_table: Dict[Dict[str, str]] = {}
self.hash_table: Dict[str, Dict[str, str]] = {}
if path.exists(f"{cfg_dir}/{ChuniConstants.CONFIG_NAME}"):
self.game_cfg.update(
yaml.safe_load(open(f"{cfg_dir}/{ChuniConstants.CONFIG_NAME}"))
@ -92,32 +92,65 @@ class ChuniServlet(BaseServlet):
)
self.logger.inited = True
known_iter_counts = {
ChuniConstants.VER_CHUNITHM_CRYSTAL_PLUS: 67,
f"{ChuniConstants.VER_CHUNITHM_CRYSTAL_PLUS}_int": 25, # SUPERSTAR
ChuniConstants.VER_CHUNITHM_PARADISE: 44,
f"{ChuniConstants.VER_CHUNITHM_PARADISE}_int": 51, # SUPERSTAR PLUS
ChuniConstants.VER_CHUNITHM_NEW: 54,
f"{ChuniConstants.VER_CHUNITHM_NEW}_int": 49,
ChuniConstants.VER_CHUNITHM_NEW_PLUS: 25,
f"{ChuniConstants.VER_CHUNITHM_NEW_PLUS}_int": 31,
ChuniConstants.VER_CHUNITHM_SUN: 70,
f"{ChuniConstants.VER_CHUNITHM_SUN}_int": 35,
ChuniConstants.VER_CHUNITHM_SUN_PLUS: 36,
f"{ChuniConstants.VER_CHUNITHM_SUN_PLUS}_int": 36,
ChuniConstants.VER_CHUNITHM_LUMINOUS: 8,
f"{ChuniConstants.VER_CHUNITHM_LUMINOUS}_int": 8,
}
for version, keys in self.game_cfg.crypto.keys.items():
if len(keys) < 3:
continue
self.hash_table[version] = {}
if isinstance(version, int):
version_idx = version
else:
version_idx = int(version.split("_")[0])
salt = bytes.fromhex(keys[2])
if len(keys) >= 4:
iter_count = keys[3]
elif (iter_count := known_iter_counts.get(version)) is None:
self.logger.error(
"Number of iteration rounds for version %s is not known, but it is not specified in the config",
version,
)
continue
self.hash_table[version] = {}
method_list = [
method
for method in dir(self.versions[version])
for method in dir(self.versions[version_idx])
if not method.startswith("__")
]
for method in method_list:
method_fixed = inflection.camelize(method)[6:-7]
# number of iterations was changed to 70 in SUN and then to 36
if version == ChuniConstants.VER_CHUNITHM_LUMINOUS:
iter_count = 8
elif version == ChuniConstants.VER_CHUNITHM_SUN_PLUS:
iter_count = 36
elif version == ChuniConstants.VER_CHUNITHM_SUN:
iter_count = 70
else:
iter_count = 44
# This only applies for CHUNITHM NEW International and later for some reason.
# CHUNITHM SUPERSTAR (PLUS) did not add "Exp" to the endpoint when hashing.
if (
isinstance(version, str)
and version.endswith("_int")
and version_idx >= ChuniConstants.VER_CHUNITHM_NEW
):
method_fixed += "C3Exp"
hash = PBKDF2(
method_fixed,
bytes.fromhex(keys[2]),
salt,
128,
count=iter_count,
hmac_hash_module=SHA1,
@ -127,7 +160,7 @@ class ChuniServlet(BaseServlet):
self.hash_table[version][hashed_name] = method_fixed
self.logger.debug(
f"Hashed v{version} method {method_fixed} with {bytes.fromhex(keys[2])} to get {hash.hex()}"
f"Hashed v{version} method {method_fixed} with {salt} to get {hashed_name}"
)
@classmethod
@ -205,8 +238,10 @@ class ChuniServlet(BaseServlet):
elif version >= 220: # LUMINOUS
internal_ver = ChuniConstants.VER_CHUNITHM_LUMINOUS
elif game_code == "SDGS": # Int
if version < 110: # SUPERSTAR / SUPERSTAR PLUS
internal_ver = ChuniConstants.VER_CHUNITHM_PARADISE # SUPERSTAR / SUPERSTAR PLUS worked fine with it
if version < 105: # SUPERSTAR
internal_ver = ChuniConstants.VER_CHUNITHM_CRYSTAL_PLUS
elif version >= 105 and version < 110: # SUPERSTAR PLUS *Cursed but needed due to different encryption key
internal_ver = ChuniConstants.VER_CHUNITHM_PARADISE
elif version >= 110 and version < 115: # NEW
internal_ver = ChuniConstants.VER_CHUNITHM_NEW
elif version >= 115 and version < 120: # NEW PLUS!!
@ -220,31 +255,39 @@ class ChuniServlet(BaseServlet):
if all(c in string.hexdigits for c in endpoint) and len(endpoint) == 32:
# If we get a 32 character long hex string, it's a hash and we're
# doing encrypted. The likelyhood of false positives is low but
# doing encrypted. The likelihood of false positives is low but
# technically not 0
if game_code == "SDGS":
crypto_cfg_key = f"{internal_ver}_int"
hash_table_key = f"{internal_ver}_int"
else:
crypto_cfg_key = internal_ver
hash_table_key = internal_ver
if internal_ver < ChuniConstants.VER_CHUNITHM_NEW:
endpoint = request.headers.get("User-Agent").split("#")[0]
else:
if internal_ver not in self.hash_table:
if hash_table_key not in self.hash_table:
self.logger.error(
f"v{version} does not support encryption or no keys entered"
)
return Response(zlib.compress(b'{"stat": "0"}'))
elif endpoint.lower() not in self.hash_table[internal_ver]:
elif endpoint.lower() not in self.hash_table[hash_table_key]:
self.logger.error(
f"No hash found for v{version} endpoint {endpoint}"
)
return Response(zlib.compress(b'{"stat": "0"}'))
endpoint = self.hash_table[internal_ver][endpoint.lower()]
endpoint = self.hash_table[hash_table_key][endpoint.lower()]
try:
crypt = AES.new(
bytes.fromhex(self.game_cfg.crypto.keys[internal_ver][0]),
bytes.fromhex(self.game_cfg.crypto.keys[crypto_cfg_key][0]),
AES.MODE_CBC,
bytes.fromhex(self.game_cfg.crypto.keys[internal_ver][1]),
bytes.fromhex(self.game_cfg.crypto.keys[crypto_cfg_key][1]),
)
req_raw = crypt.decrypt(req_raw)
@ -317,9 +360,9 @@ class ChuniServlet(BaseServlet):
padded = pad(zipped, 16)
crypt = AES.new(
bytes.fromhex(self.game_cfg.crypto.keys[internal_ver][0]),
bytes.fromhex(self.game_cfg.crypto.keys[crypto_cfg_key][0]),
AES.MODE_CBC,
bytes.fromhex(self.game_cfg.crypto.keys[internal_ver][1]),
bytes.fromhex(self.game_cfg.crypto.keys[crypto_cfg_key][1]),
)
return Response(crypt.encrypt(padded))

View File

@ -4,12 +4,14 @@ from random import randint
from typing import Dict
import pytz
from core.config import CoreConfig
from core.utils import Utils
from titles.chuni.const import ChuniConstants
from titles.chuni.database import ChuniData
from titles.chuni.base import ChuniBase
from titles.chuni.config import ChuniConfig
from titles.chuni.const import ChuniConstants
from titles.chuni.database import ChuniData
class ChuniNew(ChuniBase):
ITEM_TYPE = {"character": 20, "story": 21, "card": 22}
@ -104,7 +106,8 @@ class ChuniNew(ChuniBase):
return {"returnCode": "1"}
async def handle_get_user_map_area_api_request(self, data: Dict) -> Dict:
user_map_areas = await self.data.item.get_map_areas(data["userId"])
map_area_ids = [int(area["mapAreaId"]) for area in data["mapAreaIdList"]]
user_map_areas = await self.data.item.get_map_areas(data["userId"], map_area_ids)
map_areas = []
for map_area in user_map_areas:
@ -284,35 +287,37 @@ class ChuniNew(ChuniBase):
}
async def handle_get_user_printed_card_api_request(self, data: Dict) -> Dict:
user_print_list = await self.data.item.get_user_print_states(
data["userId"], has_completed=True
user_id = int(data["userId"])
next_idx = int(data["nextIndex"])
max_ct = int(data["maxCount"])
rows = await self.data.item.get_user_print_states(
user_id,
has_completed=True,
limit=max_ct + 1,
offset=next_idx,
)
if user_print_list is None:
if rows is None or len(rows) == 0:
return {
"userId": data["userId"],
"userId": user_id,
"length": 0,
"nextIndex": -1,
"userPrintedCardList": [],
}
print_list = []
next_idx = int(data["nextIndex"])
max_ct = int(data["maxCount"])
for x in range(next_idx, len(user_print_list)):
tmp = user_print_list[x]._asdict()
for row in rows[:max_ct]:
tmp = row._asdict()
print_list.append(tmp["cardId"])
if len(print_list) >= max_ct:
break
if len(print_list) >= max_ct:
next_idx = next_idx + max_ct
if len(rows) > max_ct:
next_idx += max_ct
else:
next_idx = -1
return {
"userId": data["userId"],
"userId": user_id,
"length": len(print_list),
"nextIndex": next_idx,
"userPrintedCardList": print_list,

View File

@ -2,10 +2,12 @@ from typing import Optional
from os import walk, path
import xml.etree.ElementTree as ET
from read import BaseReader
from PIL import Image
from core.config import CoreConfig
from titles.chuni.database import ChuniData
from titles.chuni.const import ChuniConstants
from titles.chuni.schema.static import music as MusicTable
class ChuniReader(BaseReader):
@ -35,14 +37,29 @@ class ChuniReader(BaseReader):
if self.opt_dir is not None:
data_dirs += self.get_data_directories(self.opt_dir)
we_diff = "4"
if self.version >= ChuniConstants.VER_CHUNITHM_NEW:
we_diff = "5"
# character images could be stored anywhere across all the data dirs. Map them first
self.logger.info(f"Mapping DDS image files...")
dds_images = dict()
for dir in data_dirs:
self.map_dds_images(dds_images, f"{dir}/ddsImage")
for dir in data_dirs:
self.logger.info(f"Read from {dir}")
await self.read_events(f"{dir}/event")
await self.read_music(f"{dir}/music")
await self.read_music(f"{dir}/music", we_diff)
await self.read_charges(f"{dir}/chargeItem")
await self.read_avatar(f"{dir}/avatarAccessory")
await self.read_login_bonus(f"{dir}/")
await self.read_nameplate(f"{dir}/namePlate")
await self.read_trophy(f"{dir}/trophy")
await self.read_character(f"{dir}/chara", dds_images)
await self.read_map_icon(f"{dir}/mapIcon")
await self.read_system_voice(f"{dir}/systemVoice")
async def read_login_bonus(self, root_dir: str) -> None:
for root, dirs, files in walk(f"{root_dir}loginBonusPreset"):
@ -55,9 +72,8 @@ class ChuniReader(BaseReader):
for name in xml_root.findall("name"):
id = name.find("id").text
name = name.find("str").text
is_enabled = (
True if xml_root.find("disableFlag").text == "false" else False
)
disableFlag = xml_root.find("disableFlag") # may not exist in older data
is_enabled = True if (disableFlag is None or disableFlag.text == "false") else False
result = await self.data.static.put_login_bonus_preset(
self.version, id, name, is_enabled
@ -138,7 +154,10 @@ class ChuniReader(BaseReader):
else:
self.logger.warning(f"Failed to insert event {id}")
async def read_music(self, music_dir: str) -> None:
async def read_music(self, music_dir: str, we_diff: str = "4") -> None:
max_title_len = MusicTable.columns["title"].type.length
max_artist_len = MusicTable.columns["artist"].type.length
for root, dirs, files in walk(music_dir):
for dir in dirs:
if path.exists(f"{root}/{dir}/Music.xml"):
@ -149,9 +168,15 @@ class ChuniReader(BaseReader):
for name in xml_root.findall("name"):
song_id = name.find("id").text
title = name.find("str").text
if len(title) > max_title_len:
self.logger.warning(f"Truncating music {song_id} song title")
title = title[:max_title_len]
for artistName in xml_root.findall("artistName"):
artist = artistName.find("str").text
if len(artist) > max_artist_len:
self.logger.warning(f"Truncating music {song_id} artist name")
artist = artist[:max_artist_len]
for genreNames in xml_root.findall("genreNames"):
for list_ in genreNames.findall("list"):
@ -160,6 +185,8 @@ class ChuniReader(BaseReader):
for jaketFile in xml_root.findall("jaketFile"): # nice typo, SEGA
jacket_path = jaketFile.find("path").text
# Save off image for use in frontend
self.copy_image(jacket_path, f"{root}/{dir}", "titles/chuni/img/jacket/")
for fumens in xml_root.findall("fumens"):
for MusicFumenData in fumens.findall("MusicFumenData"):
@ -169,7 +196,7 @@ class ChuniReader(BaseReader):
chart_type = MusicFumenData.find("type")
chart_id = chart_type.find("id").text
chart_diff = chart_type.find("str").text
if chart_diff == "WorldsEnd" and (chart_id == "4" or chart_id == "5"): # 4 in SDBT, 5 in SDHD
if chart_diff == "WorldsEnd" and chart_id == we_diff: # 4 in SDBT, 5 in SDHD
level = float(xml_root.find("starDifType").text)
we_chara = (
xml_root.find("worldsEndTagName")
@ -243,17 +270,212 @@ class ChuniReader(BaseReader):
for name in xml_root.findall("name"):
id = name.find("id").text
name = name.find("str").text
sortName = xml_root.find("sortName").text
category = xml_root.find("category").text
defaultHave = xml_root.find("defaultHave").text == 'true'
disableFlag = xml_root.find("disableFlag") # may not exist in older data
is_enabled = True if (disableFlag is None or disableFlag.text == "false") else False
for image in xml_root.findall("image"):
iconPath = image.find("path").text
self.copy_image(iconPath, f"{root}/{dir}", "titles/chuni/img/avatar/")
for texture in xml_root.findall("texture"):
texturePath = texture.find("path").text
self.copy_image(texturePath, f"{root}/{dir}", "titles/chuni/img/avatar/")
result = await self.data.static.put_avatar(
self.version, id, name, category, iconPath, texturePath
self.version, id, name, category, iconPath, texturePath, is_enabled, defaultHave, sortName
)
if result is not None:
self.logger.info(f"Inserted avatarAccessory {id}")
else:
self.logger.warning(f"Failed to insert avatarAccessory {id}")
async def read_nameplate(self, nameplate_dir: str) -> None:
for root, dirs, files in walk(nameplate_dir):
for dir in dirs:
if path.exists(f"{root}/{dir}/NamePlate.xml"):
with open(f"{root}/{dir}/NamePlate.xml", "r", encoding='utf-8') as fp:
strdata = fp.read()
xml_root = ET.fromstring(strdata)
for name in xml_root.findall("name"):
id = name.find("id").text
name = name.find("str").text
sortName = xml_root.find("sortName").text
defaultHave = xml_root.find("defaultHave").text == 'true'
disableFlag = xml_root.find("disableFlag") # may not exist in older data
is_enabled = True if (disableFlag is None or disableFlag.text == "false") else False
for image in xml_root.findall("image"):
texturePath = image.find("path").text
self.copy_image(texturePath, f"{root}/{dir}", "titles/chuni/img/nameplate/")
result = await self.data.static.put_nameplate(
self.version, id, name, texturePath, is_enabled, defaultHave, sortName
)
if result is not None:
self.logger.info(f"Inserted nameplate {id}")
else:
self.logger.warning(f"Failed to insert nameplate {id}")
async def read_trophy(self, trophy_dir: str) -> None:
for root, dirs, files in walk(trophy_dir):
for dir in dirs:
if path.exists(f"{root}/{dir}/Trophy.xml"):
with open(f"{root}/{dir}/Trophy.xml", "r", encoding='utf-8') as fp:
strdata = fp.read()
xml_root = ET.fromstring(strdata)
for name in xml_root.findall("name"):
id = name.find("id").text
name = name.find("str").text
rareType = xml_root.find("rareType").text
disableFlag = xml_root.find("disableFlag") # may not exist in older data
is_enabled = True if (disableFlag is None or disableFlag.text == "false") else False
defaultHave = xml_root.find("defaultHave").text == 'true'
result = await self.data.static.put_trophy(
self.version, id, name, rareType, is_enabled, defaultHave
)
if result is not None:
self.logger.info(f"Inserted trophy {id}")
else:
self.logger.warning(f"Failed to insert trophy {id}")
async def read_character(self, chara_dir: str, dds_images: dict) -> None:
for root, dirs, files in walk(chara_dir):
for dir in dirs:
if path.exists(f"{root}/{dir}/Chara.xml"):
with open(f"{root}/{dir}/Chara.xml", "r", encoding='utf-8') as fp:
strdata = fp.read()
xml_root = ET.fromstring(strdata)
for name in xml_root.findall("name"):
id = name.find("id").text
name = name.find("str").text
sortName = xml_root.find("sortName").text
for work in xml_root.findall("works"):
worksName = work.find("str").text
rareType = xml_root.find("rareType").text
defaultHave = xml_root.find("defaultHave").text == 'true'
disableFlag = xml_root.find("disableFlag") # may not exist in older data
is_enabled = True if (disableFlag is None or disableFlag.text == "false") else False
# character images are not stored alongside
for image in xml_root.findall("defaultImages"):
imageKey = image.find("str").text
if imageKey in dds_images.keys():
(imageDir, imagePaths) = dds_images[imageKey]
imagePath1 = imagePaths[0] if len(imagePaths) > 0 else ""
imagePath2 = imagePaths[1] if len(imagePaths) > 1 else ""
imagePath3 = imagePaths[2] if len(imagePaths) > 2 else ""
# @note the third image is the image needed for the user box ui
if imagePath3:
self.copy_image(imagePath3, imageDir, "titles/chuni/img/character/")
else:
self.logger.warning(f"Character {id} only has {len(imagePaths)} images. Expected 3")
else:
self.logger.warning(f"Unable to location character {id} images")
result = await self.data.static.put_character(
self.version, id, name, sortName, worksName, rareType, imagePath1, imagePath2, imagePath3, is_enabled, defaultHave
)
if result is not None:
self.logger.info(f"Inserted character {id}")
else:
self.logger.warning(f"Failed to insert character {id}")
async def read_map_icon(self, mapicon_dir: str) -> None:
for root, dirs, files in walk(mapicon_dir):
for dir in dirs:
if path.exists(f"{root}/{dir}/MapIcon.xml"):
with open(f"{root}/{dir}/MapIcon.xml", "r", encoding='utf-8') as fp:
strdata = fp.read()
xml_root = ET.fromstring(strdata)
for name in xml_root.findall("name"):
id = name.find("id").text
name = name.find("str").text
sortName = xml_root.find("sortName").text
for image in xml_root.findall("image"):
iconPath = image.find("path").text
self.copy_image(iconPath, f"{root}/{dir}", "titles/chuni/img/mapIcon/")
defaultHave = xml_root.find("defaultHave").text == 'true'
disableFlag = xml_root.find("disableFlag") # may not exist in older data
is_enabled = True if (disableFlag is None or disableFlag.text == "false") else False
result = await self.data.static.put_map_icon(
self.version, id, name, sortName, iconPath, is_enabled, defaultHave
)
if result is not None:
self.logger.info(f"Inserted map icon {id}")
else:
self.logger.warning(f"Failed to map icon {id}")
async def read_system_voice(self, voice_dir: str) -> None:
for root, dirs, files in walk(voice_dir):
for dir in dirs:
if path.exists(f"{root}/{dir}/SystemVoice.xml"):
with open(f"{root}/{dir}/SystemVoice.xml", "r", encoding='utf-8') as fp:
strdata = fp.read()
xml_root = ET.fromstring(strdata)
for name in xml_root.findall("name"):
id = name.find("id").text
name = name.find("str").text
sortName = xml_root.find("sortName").text
for image in xml_root.findall("image"):
imagePath = image.find("path").text
self.copy_image(imagePath, f"{root}/{dir}", "titles/chuni/img/systemVoice/")
defaultHave = xml_root.find("defaultHave").text == 'true'
disableFlag = xml_root.find("disableFlag") # may not exist in older data
is_enabled = True if (disableFlag is None or disableFlag.text == "false") else False
result = await self.data.static.put_system_voice(
self.version, id, name, sortName, imagePath, is_enabled, defaultHave
)
if result is not None:
self.logger.info(f"Inserted system voice {id}")
else:
self.logger.warning(f"Failed to system voice {id}")
def copy_image(self, filename: str, src_dir: str, dst_dir: str) -> None:
# Convert the image to png so we can easily display it in the frontend
file_src = path.join(src_dir, filename)
(basename, ext) = path.splitext(filename)
file_dst = path.join(dst_dir, basename) + ".png"
if path.exists(file_src) and not path.exists(file_dst):
try:
im = Image.open(file_src)
im.save(file_dst)
except Exception:
self.logger.warning(f"Failed to convert {filename} to png")
def map_dds_images(self, image_dict: dict, dds_dir: str) -> None:
for root, dirs, files in walk(dds_dir):
for dir in dirs:
directory = f"{root}/{dir}"
if path.exists(f"{directory}/DDSImage.xml"):
with open(f"{directory}/DDSImage.xml", "r", encoding='utf-8') as fp:
strdata = fp.read()
xml_root = ET.fromstring(strdata)
for name in xml_root.findall("name"):
name = name.find("str").text
images = []
i = 0
while xml_root.findall(f"ddsFile{i}"):
for ddsFile in xml_root.findall(f"ddsFile{i}"):
images += [ddsFile.find("path").text]
i += 1
image_dict[name] = (directory, images)

View File

@ -1,6 +1,6 @@
from titles.chuni.schema.profile import ChuniProfileData
from titles.chuni.schema.score import ChuniScoreData
from titles.chuni.schema.score import ChuniScoreData, ChuniRomVersion
from titles.chuni.schema.item import ChuniItemData
from titles.chuni.schema.static import ChuniStaticData
__all__ = ["ChuniProfileData", "ChuniScoreData", "ChuniItemData", "ChuniStaticData"]
__all__ = ["ChuniProfileData", "ChuniScoreData", "ChuniRomVersion", "ChuniItemData", "ChuniStaticData"]

View File

@ -1,22 +1,22 @@
from typing import Dict, List, Optional
from sqlalchemy import (
Table,
Column,
UniqueConstraint,
PrimaryKeyConstraint,
Table,
UniqueConstraint,
and_,
delete,
)
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON
from sqlalchemy.engine.base import Connection
from sqlalchemy.schema import ForeignKey
from sqlalchemy.sql import func, select
from sqlalchemy.dialects.mysql import insert
from sqlalchemy.engine import Row
from sqlalchemy.schema import ForeignKey
from sqlalchemy.sql import func, select
from sqlalchemy.types import JSON, TIMESTAMP, Boolean, Integer, String
from core.data.schema import BaseData, metadata
character = Table(
character: Table = Table(
"chuni_item_character",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
@ -40,7 +40,7 @@ character = Table(
mysql_charset="utf8mb4",
)
item = Table(
item: Table = Table(
"chuni_item_item",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
@ -141,7 +141,7 @@ gacha = Table(
mysql_charset="utf8mb4",
)
print_state = Table(
print_state: Table = Table(
"chuni_item_print_state",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
@ -210,7 +210,7 @@ login_bonus = Table(
mysql_charset="utf8mb4",
)
favorite = Table(
favorite: Table = Table(
"chuni_item_favorite",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
@ -359,10 +359,34 @@ class ChuniItemData(BaseData):
return None
return result.lastrowid
async def get_all_favorites(
self, user_id: int, version: int, fav_kind: int = 1
) -> Optional[List[Row]]:
async def is_favorite(
self, user_id: int, version: int, fav_id: int, fav_kind: int = 1
) -> bool:
sql = favorite.select(
and_(
favorite.c.version == version,
favorite.c.user == user_id,
favorite.c.favId == fav_id,
favorite.c.favKind == fav_kind,
)
)
result = await self.execute(sql)
if result is None:
return False
return True if len(result.all()) else False
async def get_all_favorites(
self,
user_id: int,
version: int,
fav_kind: int = 1,
limit: Optional[int] = None,
offset: Optional[int] = None,
) -> Optional[List[Row]]:
sql = select(favorite).where(
and_(
favorite.c.version == version,
favorite.c.user == user_id,
@ -370,6 +394,13 @@ class ChuniItemData(BaseData):
)
)
if limit is not None or offset is not None:
sql = sql.order_by(favorite.c.id)
if limit is not None:
sql = sql.limit(limit)
if offset is not None:
sql = sql.offset(offset)
result = await self.execute(sql)
if result is None:
return None
@ -421,6 +452,31 @@ class ChuniItemData(BaseData):
return None
return result.fetchone()
async def put_favorite_music(self, user_id: int, version: int, music_id: int) -> Optional[int]:
sql = insert(favorite).values(user=user_id, version=version, favId=music_id, favKind=1)
conflict = sql.on_duplicate_key_update(user=user_id, version=version, favId=music_id, favKind=1)
result = await self.execute(conflict)
if result is None:
return None
return result.lastrowid
async def delete_favorite_music(self, user_id: int, version: int, music_id: int) -> Optional[int]:
sql = delete(favorite).where(
and_(
favorite.c.user==user_id,
favorite.c.version==version,
favorite.c.favId==music_id,
favorite.c.favKind==1
)
)
result = await self.execute(sql)
if result is None:
return None
return result.lastrowid
async def put_character(self, user_id: int, character_data: Dict) -> Optional[int]:
character_data["user"] = user_id
@ -444,9 +500,18 @@ class ChuniItemData(BaseData):
return None
return result.fetchone()
async def get_characters(self, user_id: int) -> Optional[List[Row]]:
async def get_characters(
self, user_id: int, limit: Optional[int] = None, offset: Optional[int] = None
) -> Optional[List[Row]]:
sql = select(character).where(character.c.user == user_id)
if limit is not None or offset is not None:
sql = sql.order_by(character.c.id)
if limit is not None:
sql = sql.limit(limit)
if offset is not None:
sql = sql.offset(offset)
result = await self.execute(sql)
if result is None:
return None
@ -465,13 +530,26 @@ class ChuniItemData(BaseData):
return None
return result.lastrowid
async def get_items(self, user_id: int, kind: int = None) -> Optional[List[Row]]:
if kind is None:
sql = select(item).where(item.c.user == user_id)
else:
sql = select(item).where(
and_(item.c.user == user_id, item.c.itemKind == kind)
)
async def get_items(
self,
user_id: int,
kind: Optional[int] = None,
limit: Optional[int] = None,
offset: Optional[int] = None,
) -> Optional[List[Row]]:
cond = item.c.user == user_id
if kind is not None:
cond &= item.c.itemKind == kind
sql = select(item).where(cond)
if limit is not None or offset is not None:
sql = sql.order_by(item.c.id)
if limit is not None:
sql = sql.limit(limit)
if offset is not None:
sql = sql.offset(offset)
result = await self.execute(sql)
if result is None:
@ -533,8 +611,8 @@ class ChuniItemData(BaseData):
return None
return result.lastrowid
async def get_map_areas(self, user_id: int) -> Optional[List[Row]]:
sql = select(map_area).where(map_area.c.user == user_id)
async def get_map_areas(self, user_id: int, map_area_ids: List[int]) -> Optional[List[Row]]:
sql = select(map_area).where(map_area.c.user == user_id, map_area.c.mapAreaId.in_(map_area_ids))
result = await self.execute(sql)
if result is None:
@ -565,15 +643,26 @@ class ChuniItemData(BaseData):
return result.lastrowid
async def get_user_print_states(
self, aime_id: int, has_completed: bool = False
self,
aime_id: int,
has_completed: bool = False,
limit: Optional[int] = None,
offset: Optional[int] = None,
) -> Optional[List[Row]]:
sql = print_state.select(
sql = select(print_state).where(
and_(
print_state.c.user == aime_id,
print_state.c.hasCompleted == has_completed,
)
)
if limit is not None or offset is not None:
sql = sql.order_by(print_state.c.id)
if limit is not None:
sql = sql.limit(limit)
if offset is not None:
sql = sql.offset(offset)
result = await self.execute(sql)
if result is None:
return None

View File

@ -1,3 +1,4 @@
import json
from typing import Dict, List, Optional
from sqlalchemy import Table, Column, UniqueConstraint, and_
from sqlalchemy.types import Integer, String, Boolean, JSON, BigInteger
@ -389,6 +390,7 @@ team = Table(
Column("id", Integer, primary_key=True, nullable=False),
Column("teamName", String(255)),
Column("teamPoint", Integer),
Column("userTeamPoint", JSON),
mysql_charset="utf8mb4",
)
@ -437,6 +439,58 @@ class ChuniProfileData(BaseData):
return False
return True
async def update_map_icon(self, user_id: int, version: int, new_map_icon: int) -> bool:
sql = profile.update((profile.c.user == user_id) & (profile.c.version == version)).values(
mapIconId=new_map_icon
)
result = await self.execute(sql)
if result is None:
self.logger.warning(f"Failed to set user {user_id} map icon")
return False
return True
async def update_system_voice(self, user_id: int, version: int, new_system_voice: int) -> bool:
sql = profile.update((profile.c.user == user_id) & (profile.c.version == version)).values(
voiceId=new_system_voice
)
result = await self.execute(sql)
if result is None:
self.logger.warning(f"Failed to set user {user_id} system voice")
return False
return True
async def update_userbox(self, user_id: int, version: int, new_nameplate: int, new_trophy: int, new_character: int) -> bool:
sql = profile.update((profile.c.user == user_id) & (profile.c.version == version)).values(
nameplateId=new_nameplate,
trophyId=new_trophy,
charaIllustId=new_character
)
result = await self.execute(sql)
if result is None:
self.logger.warning(f"Failed to set user {user_id} userbox")
return False
return True
async def update_avatar(self, user_id: int, version: int, new_wear: int, new_face: int, new_head: int, new_skin: int, new_item: int, new_front: int, new_back: int) -> bool:
sql = profile.update((profile.c.user == user_id) & (profile.c.version == version)).values(
avatarWear=new_wear,
avatarFace=new_face,
avatarHead=new_head,
avatarSkin=new_skin,
avatarItem=new_item,
avatarFront=new_front,
avatarBack=new_back
)
result = await self.execute(sql)
if result is None:
self.logger.warning(f"Failed to set user {user_id} avatar")
return False
return True
async def put_profile_data(
self, aime_id: int, version: int, profile_data: Dict
) -> Optional[int]:
@ -705,12 +759,36 @@ class ChuniProfileData(BaseData):
# Return the rank if found, or a default rank otherwise
return rank if rank is not None else 0
# RIP scaled team ranking. Gone, but forgotten
# def get_team_rank_scaled(self, team_id: int) -> int:
async def update_team(self, team_id: int, team_data: Dict) -> bool:
async def update_team(self, team_id: int, team_data: Dict, user_id: str, user_point_delta: int) -> bool:
# Update the team data
team_data["id"] = team_id
existing_team = self.get_team_by_id(team_id)
if existing_team is None or "userTeamPoint" not in existing_team:
self.logger.warn(
f"update_team: Failed to update team! team id: {team_id}. Existing team data not found."
)
return False
user_team_point_data = []
if existing_team["userTeamPoint"] is not None and existing_team["userTeamPoint"] != "":
user_team_point_data = json.loads(existing_team["userTeamPoint"])
updated = False
# Try to find the user in the existing data and update their points
for user_point_data in user_team_point_data:
if user_point_data["user"] == user_id:
user_point_data["userPoint"] = str(int(user_point_delta))
updated = True
break
# If the user was not found, add them to the data with the new points
if not updated:
user_team_point_data.append({"user": user_id, "userPoint": str(user_point_delta)})
# Update the team's userTeamPoint field in the team data
team_data["userTeamPoint"] = json.dumps(user_team_point_data)
# Update the team in the database
sql = insert(team).values(**team_data)
conflict = sql.on_duplicate_key_update(**team_data)
@ -722,6 +800,7 @@ class ChuniProfileData(BaseData):
)
return False
return True
async def get_rival(self, rival_id: int) -> Optional[Row]:
sql = select(profile).where(profile.c.user == rival_id)
result = await self.execute(sql)
@ -793,9 +872,9 @@ class ChuniProfileData(BaseData):
versions = [row[0] for row in versions_raw]
return sorted(versions, reverse=True)
async def put_net_battle(self, user_id: int, net_battle_data: Dict) -> Optional[int]:
async def put_net_battle(self, aime_id: int, net_battle_data: Dict) -> Optional[int]:
sql = insert(net_battle).values(
user=user_id,
user=aime_id,
isRankUpChallengeFailed=net_battle_data['isRankUpChallengeFailed'],
highestBattleRankId=net_battle_data['highestBattleRankId'],
battleIconId=net_battle_data['battleIconId'],
@ -814,9 +893,9 @@ class ChuniProfileData(BaseData):
result = await self.execute(conflict)
if result:
return result.inserted_primary_key['id']
self.logger.error(f"Failed to put net battle data for user {user_id}")
self.logger.error(f"Failed to put net battle data for user {aime_id}")
async def get_net_battle(self, user_id: int, net_battle_data: Dict) -> Optional[Row]:
result = await self.execute(net_battle.select(net_battle.c.user == user_id))
async def get_net_battle(self, aime_id: int) -> Optional[Row]:
result = await self.execute(net_battle.select(net_battle.c.user == aime_id))
if result:
return result.fetchone()

View File

@ -1,15 +1,17 @@
from typing import Dict, List, Optional
from sqlalchemy import Table, Column, UniqueConstraint, PrimaryKeyConstraint, and_
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON, BigInteger
from sqlalchemy.engine.base import Connection
from sqlalchemy.schema import ForeignKey
from sqlalchemy.engine import Row
from sqlalchemy.sql import func, select
from sqlalchemy import Column, Table, UniqueConstraint
from sqlalchemy.dialects.mysql import insert
from sqlalchemy.sql.expression import exists
from sqlalchemy.engine import Row
from sqlalchemy.schema import ForeignKey
from sqlalchemy.sql import func, select
from sqlalchemy.types import Boolean, Integer, String
from core.data.schema import BaseData, metadata
course = Table(
from ..config import ChuniConfig
course: Table = Table(
"chuni_score_course",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
@ -40,7 +42,7 @@ course = Table(
mysql_charset="utf8mb4",
)
best_score = Table(
best_score: Table = Table(
"chuni_score_best",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
@ -140,11 +142,109 @@ playlog = Table(
mysql_charset="utf8mb4"
)
class ChuniRomVersion():
"""
Class used to easily compare rom version strings and map back to the internal integer version.
Used with methods that touch the playlog table.
"""
Versions = {}
def init_versions(cfg: ChuniConfig):
if len(ChuniRomVersion.Versions) > 0:
# dont bother with reinit
return
# Build up a easily comparible list of versions. Used when deriving romVersion from the playlog
all_versions = {
10: ChuniRomVersion("1.50.0"),
9: ChuniRomVersion("1.45.0"),
8: ChuniRomVersion("1.40.0"),
7: ChuniRomVersion("1.35.0"),
6: ChuniRomVersion("1.30.0"),
5: ChuniRomVersion("1.25.0"),
4: ChuniRomVersion("1.20.0"),
3: ChuniRomVersion("1.15.0"),
2: ChuniRomVersion("1.10.0"),
1: ChuniRomVersion("1.05.0"),
0: ChuniRomVersion("1.00.0")
}
# add the versions from the config
for ver in range(11,999):
cfg_ver = cfg.version.version(ver)
if cfg_ver:
all_versions[ver] = ChuniRomVersion(cfg_ver["rom"])
else:
break
# sort it by version number for easy iteration
ChuniRomVersion.Versions = dict(sorted(all_versions.items()))
def __init__(self, rom_version: str) -> None:
(major, minor, maint) = rom_version.split('.')
self.major = int(major)
self.minor = int(minor)
self.maint = int(maint)
self.version = rom_version
def __str__(self) -> str:
return self.version
def __eq__(self, other) -> bool:
return (self.major == other.major and
self.minor == other.minor and
self.maint == other.maint)
def __lt__(self, other) -> bool:
return (self.major < other.major) or \
(self.major == other.major and self.minor < other.minor) or \
(self.major == other.major and self.minor == other.minor and self.maint < other.maint)
def __gt__(self, other) -> bool:
return (self.major > other.major) or \
(self.major == other.major and self.minor > other.minor) or \
(self.major == other.major and self.minor == other.minor and self.maint > other.maint)
def get_int_version(self) -> int:
"""
Used when displaying the playlog to walk backwards from the recorded romVersion to our internal version number.
This is effectively a workaround to avoid recording our internal version number along with the romVersion in the db at insert time.
"""
for ver,rom in ChuniRomVersion.Versions.items():
# if the version matches exactly, great!
if self == rom:
return ver
# If this isnt the last version, use the next as an upper bound
if ver + 1 < len(ChuniRomVersion.Versions):
if self > rom and self < ChuniRomVersion.Versions[ver + 1]:
# this version fits in the middle! It must be a revision of the version
# e.g. 2.15.00 vs 2.16.00
return ver
else:
# this is the last version in the list.
# If its greate than this one and still the same major, this call it a match
if self.major == rom.major and self > rom:
return ver
# Only way we get here is if it was a version that started with "0." which is def invalid
return -1
class ChuniScoreData(BaseData):
async def get_courses(self, aime_id: int) -> Optional[Row]:
async def get_courses(
self,
aime_id: int,
limit: Optional[int] = None,
offset: Optional[int] = None,
) -> Optional[List[Row]]:
sql = select(course).where(course.c.user == aime_id)
if limit is not None or offset is not None:
sql = sql.order_by(course.c.id)
if limit is not None:
sql = sql.limit(limit)
if offset is not None:
sql = sql.offset(offset)
result = await self.execute(sql)
if result is None:
return None
@ -162,8 +262,45 @@ class ChuniScoreData(BaseData):
return None
return result.lastrowid
async def get_scores(self, aime_id: int) -> Optional[Row]:
sql = select(best_score).where(best_score.c.user == aime_id)
async def get_scores(
self,
aime_id: int,
levels: Optional[list[int]] = None,
limit: Optional[int] = None,
offset: Optional[int] = None,
) -> Optional[List[Row]]:
condition = best_score.c.user == aime_id
if levels is not None:
condition &= best_score.c.level.in_(levels)
if limit is None and offset is None:
sql = (
select(best_score)
.where(condition)
.order_by(best_score.c.musicId.asc(), best_score.c.level.asc())
)
else:
subq = (
select(best_score.c.musicId)
.distinct()
.where(condition)
.order_by(best_score.c.musicId)
)
if limit is not None:
subq = subq.limit(limit)
if offset is not None:
subq = subq.offset(offset)
subq = subq.subquery()
sql = (
select(best_score)
.join(subq, best_score.c.musicId == subq.c.musicId)
.where(condition)
.order_by(best_score.c.musicId, best_score.c.level)
)
result = await self.execute(sql)
if result is None:
@ -190,76 +327,82 @@ class ChuniScoreData(BaseData):
return None
return result.fetchall()
async def get_playlogs_limited(self, aime_id: int, index: int, count: int) -> Optional[Row]:
sql = select(playlog).where(playlog.c.user == aime_id).order_by(playlog.c.id.desc()).limit(count).offset(index * count)
async def get_playlog_rom_versions_by_int_version(self, version: int, aime_id: int = -1) -> Optional[str]:
# Get a set of all romVersion values present
sql = select([playlog.c.romVersion])
if aime_id != -1:
# limit results to a specific user
sql = sql.where(playlog.c.user == aime_id)
sql = sql.distinct()
result = await self.execute(sql)
if result is None:
self.logger.warning(f" aime_id {aime_id} has no playlog ")
return None
record_versions = result.fetchall()
# for each romVersion recorded, check if it maps back the current version we are operating on
matching_rom_versions = []
for v in record_versions:
if ChuniRomVersion(v[0]).get_int_version() == version:
matching_rom_versions += [v[0]]
self.logger.debug(f"romVersions {matching_rom_versions} map to version {version}")
return matching_rom_versions
async def get_playlogs_limited(self, aime_id: int, version: int, index: int, count: int) -> Optional[Row]:
# Get a list of all the recorded romVersions in the playlog
# for this user that map to the given version.
rom_versions = await self.get_playlog_rom_versions_by_int_version(version, aime_id)
if rom_versions is None:
return None
# Query results that have the matching romVersions
sql = select(playlog).where((playlog.c.user == aime_id) & (playlog.c.romVersion.in_(rom_versions))).order_by(playlog.c.id.desc()).limit(count).offset(index * count)
result = await self.execute(sql)
if result is None:
self.logger.info(f" aime_id {aime_id} has no playlog for version {version}")
return None
return result.fetchall()
async def get_user_playlogs_count(self, aime_id: int) -> Optional[Row]:
sql = select(func.count()).where(playlog.c.user == aime_id)
async def get_user_playlogs_count(self, aime_id: int, version: int) -> Optional[Row]:
# Get a list of all the recorded romVersions in the playlog
# for this user that map to the given version.
rom_versions = await self.get_playlog_rom_versions_by_int_version(version, aime_id)
if rom_versions is None:
return None
# Query results that have the matching romVersions
sql = select(func.count()).where((playlog.c.user == aime_id) & (playlog.c.romVersion.in_(rom_versions)))
result = await self.execute(sql)
if result is None:
self.logger.warning(f" aime_id {aime_id} has no playlog ")
return None
self.logger.info(f" aime_id {aime_id} has no playlog for version {version}")
return 0
return result.scalar()
async def put_playlog(self, aime_id: int, playlog_data: Dict, version: int) -> Optional[int]:
# Calculate the ROM version that should be inserted into the DB, based on the version of the ggame being inserted
# We only need from Version 10 (Plost) and back, as newer versions include romVersion in their upsert
# This matters both for gameRankings, as well as a future DB update to keep version data separate
romVer = {
10: "1.50.0",
9: "1.45.0",
8: "1.40.0",
7: "1.35.0",
6: "1.30.0",
5: "1.25.0",
4: "1.20.0",
3: "1.15.0",
2: "1.10.0",
1: "1.05.0",
0: "1.00.0"
}
playlog_data["user"] = aime_id
playlog_data = self.fix_bools(playlog_data)
# If the romVersion is not in the data (Version 10 and earlier), look it up from our internal mapping
if "romVersion" not in playlog_data:
playlog_data["romVersion"] = romVer.get(version, "1.00.0")
playlog_data["romVersion"] = ChuniRomVersion.Versions[version]
sql = insert(playlog).values(**playlog_data)
conflict = sql.on_duplicate_key_update(**playlog_data)
result = await self.execute(conflict)
result = await self.execute(sql)
if result is None:
return None
return result.lastrowid
async def get_rankings(self, version: int) -> Optional[List[Dict]]:
# Calculates the ROM version that should be fetched for rankings, based on the game version being retrieved
# This prevents tracks that are not accessible in your version from counting towards the 10 results
romVer = {
15: "2.20%",
14: "2.15%",
13: "2.10%",
12: "2.05%",
11: "2.00%",
10: "1.50%",
9: "1.45%",
8: "1.40%",
7: "1.35%",
6: "1.30%",
5: "1.25%",
4: "1.20%",
3: "1.15%",
2: "1.10%",
1: "1.05%",
0: "1.00%"
}
sql = select([playlog.c.musicId.label('id'), func.count(playlog.c.musicId).label('point')]).where((playlog.c.level != 4) & (playlog.c.romVersion.like(romVer.get(version, "%")))).group_by(playlog.c.musicId).order_by(func.count(playlog.c.musicId).desc()).limit(10)
# Get a list of all the recorded romVersions in the playlog for the given version
rom_versions = await self.get_playlog_rom_versions_by_int_version(version)
if rom_versions is None:
return None
# Query results that have the matching romVersions
sql = select([playlog.c.musicId.label('id'), func.count(playlog.c.musicId).label('point')]).where((playlog.c.level != 4) & (playlog.c.romVersion.in_(rom_versions))).group_by(playlog.c.musicId).order_by(func.count(playlog.c.musicId).desc()).limit(10)
result = await self.execute(sql)
if result is None:
@ -267,11 +410,3 @@ class ChuniScoreData(BaseData):
rows = result.fetchall()
return [dict(row) for row in rows]
async def get_rival_music(self, rival_id: int) -> Optional[List[Dict]]:
sql = select(best_score).where(best_score.c.user == rival_id)
result = await self.execute(sql)
if result is None:
return None
return result.fetchall()

View File

@ -73,10 +73,91 @@ avatar = Table(
Column("category", Integer),
Column("iconPath", String(255)),
Column("texturePath", String(255)),
Column("isEnabled", Boolean, server_default="1"),
Column("defaultHave", Boolean, server_default="0"),
Column("sortName", String(255)),
UniqueConstraint("version", "avatarAccessoryId", name="chuni_static_avatar_uk"),
mysql_charset="utf8mb4",
)
nameplate = Table(
"chuni_static_nameplate",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
Column("version", Integer, nullable=False),
Column("nameplateId", Integer),
Column("name", String(255)),
Column("texturePath", String(255)),
Column("isEnabled", Boolean, server_default="1"),
Column("defaultHave", Boolean, server_default="0"),
Column("sortName", String(255)),
UniqueConstraint("version", "nameplateId", name="chuni_static_nameplate_uk"),
mysql_charset="utf8mb4",
)
character = Table(
"chuni_static_character",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
Column("version", Integer, nullable=False),
Column("characterId", Integer),
Column("name", String(255)),
Column("sortName", String(255)),
Column("worksName", String(255)),
Column("rareType", Integer),
Column("imagePath1", String(255)),
Column("imagePath2", String(255)),
Column("imagePath3", String(255)),
Column("isEnabled", Boolean, server_default="1"),
Column("defaultHave", Boolean, server_default="0"),
UniqueConstraint("version", "characterId", name="chuni_static_character_uk"),
mysql_charset="utf8mb4",
)
trophy = Table(
"chuni_static_trophy",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
Column("version", Integer, nullable=False),
Column("trophyId", Integer),
Column("name", String(255)),
Column("rareType", Integer),
Column("isEnabled", Boolean, server_default="1"),
Column("defaultHave", Boolean, server_default="0"),
UniqueConstraint("version", "trophyId", name="chuni_static_trophy_uk"),
mysql_charset="utf8mb4",
)
map_icon = Table(
"chuni_static_map_icon",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
Column("version", Integer, nullable=False),
Column("mapIconId", Integer),
Column("name", String(255)),
Column("sortName", String(255)),
Column("iconPath", String(255)),
Column("isEnabled", Boolean, server_default="1"),
Column("defaultHave", Boolean, server_default="0"),
UniqueConstraint("version", "mapIconId", name="chuni_static_mapicon_uk"),
mysql_charset="utf8mb4",
)
system_voice = Table(
"chuni_static_system_voice",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
Column("version", Integer, nullable=False),
Column("voiceId", Integer),
Column("name", String(255)),
Column("sortName", String(255)),
Column("imagePath", String(255)),
Column("isEnabled", Boolean, server_default="1"),
Column("defaultHave", Boolean, server_default="0"),
UniqueConstraint("version", "voiceId", name="chuni_static_systemvoice_uk"),
mysql_charset="utf8mb4",
)
gachas = Table(
"chuni_static_gachas",
metadata,
@ -454,7 +535,7 @@ class ChuniStaticData(BaseData):
return result.fetchone()
async def get_song(self, music_id: int) -> Optional[Row]:
sql = music.select(music.c.id == music_id)
sql = music.select(music.c.songId == music_id)
result = await self.execute(sql)
if result is None:
@ -470,6 +551,9 @@ class ChuniStaticData(BaseData):
category: int,
iconPath: str,
texturePath: str,
isEnabled: int,
defaultHave: int,
sortName: str
) -> Optional[int]:
sql = insert(avatar).values(
version=version,
@ -478,6 +562,9 @@ class ChuniStaticData(BaseData):
category=category,
iconPath=iconPath,
texturePath=texturePath,
isEnabled=isEnabled,
defaultHave=defaultHave,
sortName=sortName
)
conflict = sql.on_duplicate_key_update(
@ -485,6 +572,9 @@ class ChuniStaticData(BaseData):
category=category,
iconPath=iconPath,
texturePath=texturePath,
isEnabled=isEnabled,
defaultHave=defaultHave,
sortName=sortName
)
result = await self.execute(conflict)
@ -492,6 +582,246 @@ class ChuniStaticData(BaseData):
return None
return result.lastrowid
async def get_avatar_items(self, version: int, category: int, enabled_only: bool = True) -> Optional[List[Dict]]:
if enabled_only:
sql = select(avatar).where((avatar.c.version == version) & (avatar.c.category == category) & (avatar.c.isEnabled)).order_by(avatar.c.sortName)
else:
sql = select(avatar).where((avatar.c.version == version) & (avatar.c.category == category)).order_by(avatar.c.sortName)
result = await self.execute(sql)
if result is None:
return None
return result.fetchall()
async def put_nameplate(
self,
version: int,
nameplateId: int,
name: str,
texturePath: str,
isEnabled: int,
defaultHave: int,
sortName: str
) -> Optional[int]:
sql = insert(nameplate).values(
version=version,
nameplateId=nameplateId,
name=name,
texturePath=texturePath,
isEnabled=isEnabled,
defaultHave=defaultHave,
sortName=sortName
)
conflict = sql.on_duplicate_key_update(
name=name,
texturePath=texturePath,
isEnabled=isEnabled,
defaultHave=defaultHave,
sortName=sortName
)
result = await self.execute(conflict)
if result is None:
return None
return result.lastrowid
async def get_nameplates(self, version: int, enabled_only: bool = True) -> Optional[List[Dict]]:
if enabled_only:
sql = select(nameplate).where((nameplate.c.version == version) & (nameplate.c.isEnabled)).order_by(nameplate.c.sortName)
else:
sql = select(nameplate).where(nameplate.c.version == version).order_by(nameplate.c.sortName)
result = await self.execute(sql)
if result is None:
return None
return result.fetchall()
async def put_trophy(
self,
version: int,
trophyId: int,
name: str,
rareType: int,
isEnabled: int,
defaultHave: int,
) -> Optional[int]:
sql = insert(trophy).values(
version=version,
trophyId=trophyId,
name=name,
rareType=rareType,
isEnabled=isEnabled,
defaultHave=defaultHave
)
conflict = sql.on_duplicate_key_update(
name=name,
rareType=rareType,
isEnabled=isEnabled,
defaultHave=defaultHave
)
result = await self.execute(conflict)
if result is None:
return None
return result.lastrowid
async def get_trophies(self, version: int, enabled_only: bool = True) -> Optional[List[Dict]]:
if enabled_only:
sql = select(trophy).where((trophy.c.version == version) & (trophy.c.isEnabled)).order_by(trophy.c.name)
else:
sql = select(trophy).where(trophy.c.version == version).order_by(trophy.c.name)
result = await self.execute(sql)
if result is None:
return None
return result.fetchall()
async def put_map_icon(
self,
version: int,
mapIconId: int,
name: str,
sortName: str,
iconPath: str,
isEnabled: int,
defaultHave: int,
) -> Optional[int]:
sql = insert(map_icon).values(
version=version,
mapIconId=mapIconId,
name=name,
sortName=sortName,
iconPath=iconPath,
isEnabled=isEnabled,
defaultHave=defaultHave
)
conflict = sql.on_duplicate_key_update(
name=name,
sortName=sortName,
iconPath=iconPath,
isEnabled=isEnabled,
defaultHave=defaultHave
)
result = await self.execute(conflict)
if result is None:
return None
return result.lastrowid
async def get_map_icons(self, version: int, enabled_only: bool = True) -> Optional[List[Dict]]:
if enabled_only:
sql = select(map_icon).where((map_icon.c.version == version) & (map_icon.c.isEnabled)).order_by(map_icon.c.sortName)
else:
sql = select(map_icon).where(map_icon.c.version == version).order_by(map_icon.c.sortName)
result = await self.execute(sql)
if result is None:
return None
return result.fetchall()
async def put_system_voice(
self,
version: int,
voiceId: int,
name: str,
sortName: str,
imagePath: str,
isEnabled: int,
defaultHave: int,
) -> Optional[int]:
sql = insert(system_voice).values(
version=version,
voiceId=voiceId,
name=name,
sortName=sortName,
imagePath=imagePath,
isEnabled=isEnabled,
defaultHave=defaultHave
)
conflict = sql.on_duplicate_key_update(
name=name,
sortName=sortName,
imagePath=imagePath,
isEnabled=isEnabled,
defaultHave=defaultHave
)
result = await self.execute(conflict)
if result is None:
return None
return result.lastrowid
async def get_system_voices(self, version: int, enabled_only: bool = True) -> Optional[List[Dict]]:
if enabled_only:
sql = select(system_voice).where((system_voice.c.version == version) & (system_voice.c.isEnabled)).order_by(system_voice.c.sortName)
else:
sql = select(system_voice).where(system_voice.c.version == version).order_by(system_voice.c.sortName)
result = await self.execute(sql)
if result is None:
return None
return result.fetchall()
async def put_character(
self,
version: int,
characterId: int,
name: str,
sortName: str,
worksName: str,
rareType: int,
imagePath1: str,
imagePath2: str,
imagePath3: str,
isEnabled: int,
defaultHave: int
) -> Optional[int]:
sql = insert(character).values(
version=version,
characterId=characterId,
name=name,
sortName=sortName,
worksName=worksName,
rareType=rareType,
imagePath1=imagePath1,
imagePath2=imagePath2,
imagePath3=imagePath3,
isEnabled=isEnabled,
defaultHave=defaultHave
)
conflict = sql.on_duplicate_key_update(
name=name,
sortName=sortName,
worksName=worksName,
rareType=rareType,
imagePath1=imagePath1,
imagePath2=imagePath2,
imagePath3=imagePath3,
isEnabled=isEnabled,
defaultHave=defaultHave
)
result = await self.execute(conflict)
if result is None:
return None
return result.lastrowid
async def get_characters(self, version: int, enabled_only: bool = True) -> Optional[List[Dict]]:
if enabled_only:
sql = select(character).where((character.c.version == version) & (character.c.isEnabled)).order_by(character.c.sortName)
else:
sql = select(character).where(character.c.version == version).order_by(character.c.sortName)
result = await self.execute(sql)
if result is None:
return None
return result.fetchall()
async def put_gacha(
self,
version: int,

View File

@ -16,4 +16,14 @@ class ChuniSun(ChuniNewPlus):
# hardcode lastDataVersion for CardMaker 1.35 A032
user_data["lastDataVersion"] = "2.10.00"
return user_data
return user_data
#SDGS Exclusive
async def handle_get_user_cto_c_play_api_request(self, data: Dict) -> Dict:
return {
"userId": data["userId"],
"orderBy": "0",
"count": "0",
#game request c2c play history while login but seem unused(?)
"userCtoCPlayList": [],
}

View File

@ -0,0 +1,309 @@
{% extends "core/templates/index.jinja" %}
{% block content %}
<style>
{% include 'titles/chuni/templates/css/chuni_style.css' %}
</style>
<div class="container">
{% include 'titles/chuni/templates/chuni_header.jinja' %}
<!-- AVATAR PREVIEW -->
<div class="row">
<div class="col-lg-8 m-auto mt-3">
<div class="card bg-card rounded">
<table class="table-large table-rowdistinct">
<caption align="top">AVATAR</caption>
<tr><td style="height:340px; width:50%" rowspan=8>
<img class="avatar-preview avatar-preview-platform" src="img/avatar-platform.png">
<img id="preview1_back" class="avatar-preview avatar-preview-back" src="">
<img id="preview1_skin" class="avatar-preview avatar-preview-skin-rightfoot" src="">
<img id="preview2_skin" class="avatar-preview avatar-preview-skin-leftfoot" src="">
<img id="preview3_skin" class="avatar-preview avatar-preview-skin-body" src="">
<img id="preview1_wear" class="avatar-preview avatar-preview-wear" src="">
<img class="avatar-preview avatar-preview-common" src="img/avatar-common.png">
<img id="preview1_head" class="avatar-preview avatar-preview-head" src="">
<img id="preview1_face" class="avatar-preview avatar-preview-face" src="">
<img id="preview1_item" class="avatar-preview avatar-preview-item-righthand" src="">
<img id="preview2_item" class="avatar-preview avatar-preview-item-lefthand" src="">
<img id="preview1_front" class="avatar-preview avatar-preview-front" src="">
</td>
</tr>
<tr><td>Wear:</td><td><div id="name_wear"></div></td></tr>
<tr><td>Face:</td><td><div id="name_face"></div></td></tr>
<tr><td>Head:</td><td><div id="name_head"></div></td></tr>
<tr><td>Skin:</td><td><div id="name_skin"></div></td></tr>
<tr><td>Item:</td><td><div id="name_item"></div></td></tr>
<tr><td>Front:</td><td><div id="name_front"></div></td></tr>
<tr><td>Back:</td><td><div id="name_back"></div></td></tr>
<tr><td colspan=3 style="padding:8px 0px; text-align: center;">
<button id="save-btn" class="btn btn-primary" style="width:140px;" onClick="saveAvatar()">SAVE</button>&nbsp;&nbsp;&nbsp;&nbsp;
<button id="reset-btn" class="btn btn-danger" style="width:140px;" onClick="resetAvatar()">RESET</button>
</td></tr>
</table>
</div>
</div>
</div>
<!-- ACCESSORY SELECTION -->
<div class="row col-lg-8 m-auto mt-3 scrolling-lists-lg card bg-card rounded">
<!-- WEAR ACCESSORIES -->
<button class="collapsible">Wear:&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;{{ wears|length }}/{{ total_wears }} {{ "items" if total_wears > 1 else "item" }}</button>
<div id="scrollable-wear" class="collapsible-content">
{% for item in wears.values() %}
<img id="{{ item["id"] }}" onclick="changeAccessory('wear', '{{ item["id"] }}', '{{ item["name"] }}', '{{ item["texturePath"] }}')" src="img/avatar/{{ item["iconPath"] }}" alt="{{ item["name"] }}">
<span id="wear-br-{{ loop.index }}"></span>
{% endfor %}
</div>
<hr>
<!-- FACE ACCESSORIES -->
<button class="collapsible">Face:&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;{{ faces|length }}/{{ total_faces }} {{ "items" if total_faces > 1 else "item" }}</button>
<div id="scrollable-face" class="collapsible-content">
{% for item in faces.values() %}
<img id="{{ item["id"] }}" onclick="changeAccessory('face', '{{ item["id"] }}', '{{ item["name"] }}', '{{ item["texturePath"] }}')" src="img/avatar/{{ item["iconPath"] }}" alt="{{ item["name"] }}">
<span id="face-br-{{ loop.index }}"></span>
{% endfor %}
</div>
<hr>
<!-- HEAD ACCESSORIES -->
<button class="collapsible">Head:&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;{{ heads|length }}/{{ total_heads }} {{ "items" if total_heads > 1 else "item" }}</button>
<div id="scrollable-head" class="collapsible-content">
{% for item in heads.values() %}
<img id="{{ item["id"] }}" onclick="changeAccessory('head', '{{ item["id"] }}', '{{ item["name"] }}', '{{ item["texturePath"] }}')" src="img/avatar/{{ item["iconPath"] }}" alt="{{ item["name"] }}">
<span id="head-br-{{ loop.index }}"></span>
{% endfor %}
</div>
<hr>
<!-- SKIN ACCESSORIES -->
<button class="collapsible">Skin:&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;{{ skins|length }}/{{ total_skins }} {{ "items" if total_skins > 1 else "item" }}</button>
<div id="scrollable-skin" class="collapsible-content">
{% for item in skins.values() %}
<img id="{{ item["id"] }}" onclick="changeAccessory('skin', '{{ item["id"] }}', '{{ item["name"] }}', '{{ item["texturePath"] }}')" src="img/avatar/{{ item["iconPath"] }}" alt="{{ item["name"] }}">
<span id="skin-br-{{ loop.index }}"></span>
{% endfor %}
</div>
<hr>
<!-- ITEM ACCESSORIES -->
<button class="collapsible">Item:&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;{{ items|length }}/{{ total_items }} {{ "items" if total_items > 1 else "item" }}</button>
<div id="scrollable-item" class="collapsible-content">
{% for item in items.values() %}
<img id="{{ item["id"] }}" onclick="changeAccessory('item', '{{ item["id"] }}', '{{ item["name"] }}', '{{ item["texturePath"] }}')" src="img/avatar/{{ item["iconPath"] }}" alt="{{ item["name"] }}">
<span id="item-br-{{ loop.index }}"></span>
{% endfor %}
</div>
<hr>
<!-- FRONT ACCESSORIES -->
<button class="collapsible">Front:&nbsp;&nbsp;&nbsp;&nbsp;{{ fronts|length }}/{{ total_fronts }} {{ "items" if total_fronts > 1 else "item" }}</button>
<div id="scrollable-front" class="collapsible-content">
{% for item in fronts.values() %}
<img id="{{ item["id"] }}" onclick="changeAccessory('front', '{{ item["id"] }}', '{{ item["name"] }}', '{{ item["texturePath"] }}')" src="img/avatar/{{ item["iconPath"] }}" alt="{{ item["name"] }}">
<span id="front-br-{{ loop.index }}"></span>
{% endfor %}
</div>
<hr>
<!-- BACK ACCESSORIES -->
<button class="collapsible">Back:&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;{{ backs|length }}/{{ total_backs }} {{ "items" if total_backs > 1 else "item" }}</button>
<div id="scrollable-back" class="collapsible-content">
{% for item in backs.values() %}
<img id="{{ item["id"] }}" onclick="changeAccessory('back', '{{ item["id"] }}', '{{ item["name"] }}', '{{ item["texturePath"] }}')" src="img/avatar/{{ item["iconPath"] }}" alt="{{ item["name"] }}">
<span id="back-br-{{ loop.index }}"></span>
{% endfor %}
</div>
</div>
{% if error is defined %}
{% include "core/templates/widgets/err_banner.jinja" %}
{% endif %}
</div>
{% if wears|length == 0 or faces|length == 0 or heads|length == 0 or skins|length == 0 or items|length == 0 or fronts|length == 0 or backs|length == 0 %}
<script>
// Server DB lacks necessary info. Maybe importer never got ran for this verison?
document.getElementById("name_wear").innerHTML = "Server DB needs upgraded or is not populated with necessary data";
</script>
{% else %}
<script>
{% include 'titles/chuni/templates/scripts/collapsibles.js' %}
///
/// This script handles all updates to the avatar
///
total_items = 0;
orig_id = 1;
orig_name = 2;
orig_img = 3;
curr_id = 4;
curr_name = 5;
curr_img = 6;
accessories = {
// [total_items, orig_id, orig_name, orig_img, curr_id, curr_name, curr_img]
"wear":["{{ wears|length }}",
"{{ profile.avatarWear }}",
"{{ wears[profile.avatarWear]["name"] }}",
"{{ wears[profile.avatarWear]["texturePath"] }}", "", "", "" ],
"face":["{{ faces|length }}",
"{{ profile.avatarFace }}",
"{{ faces[profile.avatarFace]["name"] }}",
"{{ faces[profile.avatarFace]["texturePath"] }}", "", "", "" ],
"head":["{{ heads|length }}",
"{{ profile.avatarHead }}",
"{{ heads[profile.avatarHead]["name"] }}",
"{{ heads[profile.avatarHead]["texturePath"] }}", "", "", "" ],
"skin":["{{ skins|length }}",
"{{ profile.avatarSkin }}",
"{{ skins[profile.avatarSkin]["name"] }}",
"{{ skins[profile.avatarSkin]["texturePath"] }}", "", "", "" ],
"item":["{{ items|length }}",
"{{ profile.avatarItem }}",
"{{ items[profile.avatarItem]["name"] }}",
"{{ items[profile.avatarItem]["texturePath"] }}", "", "", "" ],
"front":["{{ fronts|length }}",
"{{ profile.avatarFront }}",
"{{ fronts[profile.avatarFront]["name"] }}",
"{{ fronts[profile.avatarFront]["texturePath"] }}", "", "", "" ],
"back":["{{ backs|length }}",
"{{ profile.avatarBack }}",
"{{ backs[profile.avatarBack]["name"] }}",
"{{ backs[profile.avatarBack]["texturePath"] }}", "", "", "" ]
};
types = Object.keys(accessories);
function enableButtons(enabled) {
document.getElementById("reset-btn").disabled = !enabled;
document.getElementById("save-btn").disabled = !enabled;
}
function changeAccessory(type, id, name, img) {
// clear select style for old accessory
var element = document.getElementById(accessories[type][curr_id]);
if (element) {
element.style.backgroundColor="inherit";
}
// set new accessory
accessories[type][curr_id] = id;
accessories[type][curr_name] = name;
accessories[type][curr_img] = img;
// update select style for new accessory
element = document.getElementById(id);
if (element) {
element.style.backgroundColor="#5F5";
}
// Update the avatar preview and enable buttons
updatePreview();
if (id != accessories[type][orig_id]) {
enableButtons(true);
}
}
function resetAvatar() {
for (const type of types) {
changeAccessory(type, accessories[type][orig_id], accessories[type][orig_name], accessories[type][orig_img]);
}
// disable the save/reset buttons until something changes
enableButtons(false);
}
function getRandomInt(min, max) {
min = Math.ceil(min);
max = Math.floor(max);
return Math.floor(Math.random() * (max - min + 1)) + min;
}
function updatePreview() {
for (const type of types) {
for (let i = 1; i <= 3; i++) {
var img = document.getElementById("preview" + i + "_" + type);
if (img) {
img.src = "img/avatar/" + accessories[type][curr_img];
}
}
document.getElementById("name_" + type).innerHTML = accessories[type][curr_name];
}
}
function saveAvatar() {
$.post("/game/chuni/update.avatar", { wear: accessories["wear"][curr_id],
face: accessories["face"][curr_id],
head: accessories["head"][curr_id],
skin: accessories["skin"][curr_id],
item: accessories["item"][curr_id],
front: accessories["front"][curr_id],
back: accessories["back"][curr_id] })
.done(function (data) {
// set the current as the original and disable buttons
for (const type of types) {
accessories[type][orig_id] = accessories[type][curr_id];
accessories[type][orig_name] = accessories[type][curr_name];
accessories[type][orig_img] = accessories[type][curr_img];
}
enableButtons(false);
})
.fail(function () {
alert("Failed to save avatar.");
});
}
function resizePage() {
//
// Handles item organization in the collapsible scrollables to try to keep the items-per-row presentable
//
// @note Yes, we could simply let the div overflow like usual. This could however get really nasty looking
// when dealing with something like userbox characters where there are 1000s of possible items to
// display. This approach gives us full control over where items in the div wrap, allowing us to try
// to keep things presentable.
//
for (const type of types) {
var numPerRow = Math.floor(document.getElementById("scrollable-" + type).offsetWidth / 132);
// Dont put fewer than 4 per row
numPerRow = Math.max(numPerRow, 4);
// Dont populate more than 8 rows
numPerRow = Math.max(numPerRow, Math.ceil(accessories[type][total_items] / 8));
// update the locations of the <br>
for (var i = 1; document.getElementById(type + "-br-" + i) != null; i++) {
var spanBr = document.getElementById(type + "-br-" + i);
if ( i % numPerRow == 0 ) {
spanBr.innerHTML = "<br>";
} else {
spanBr.innerHTML = "";
}
}
}
// update the max height for any currently visible containers
Collapsibles.updateAllHeights();
}
resizePage();
window.addEventListener('resize', resizePage);
// Set initial preview for current avatar
resetAvatar();
// Initialize scroll on all current accessories so we can see the selected ones
for (const type of types) {
document.getElementById("scrollable-" + type).scrollLeft = document.getElementById(accessories[type][curr_id]).offsetLeft;
}
// Expand the first collapsible so the user can get the gist of it.
Collapsibles.expandFirst();
</script>
{% endif %}
{% endblock content %}

View File

@ -0,0 +1,61 @@
{% extends "core/templates/index.jinja" %}
{% block content %}
<style>
{% include 'titles/chuni/templates/css/chuni_style.css' %}
</style>
<div class="container">
{% include 'titles/chuni/templates/chuni_header.jinja' %}
{% if favorites_by_genre is defined and favorites_by_genre is not none %}
<div class="row">
<h4 style="text-align: center;">Favorite Count: {{ favorites_count }}</h4>
{% for key, genre in favorites_by_genre.items() %}
<h2 style="text-align: center; padding-top: 32px">{{ key }}</h2>
{% for favorite in genre %}
<div class="col-lg-6 mt-3">
<div class="card bg-card rounded card-hover">
<div class="card-body row">
<div class="col-3" style="text-align: center;">
<img src="img/jacket/{{ favorite.jacket }}" width="100%">
</div>
<div class="col scrolling-text">
<h5 class="card-text"> {{ favorite.title }} </h5>
<br>
<h6 class="card-text"> {{ favorite.artist }} </h6>
<br><br>
<div style="text-align: right;">
<button onclick="removeFavorite({{ favorite.favId }})" class="btn btn-secondary btn-fav-remove">Remove</button>
</div>
</div>
</div>
</div>
</div>
{% endfor %}
{% endfor %}
</div>
{% endif %}
</div>
<script>
$(document).ready(function () {
$('.scrolling-text p, .scrolling-text h1, .scrolling-text h2, .scrolling-text h3, .scrolling-text h4, .scrolling-text h5, .scrolling-text h6').each(function () {
var parentWidth = $(this).parent().width();
var elementWidth = $(this).outerWidth();
var elementWidthWithPadding = $(this).outerWidth(true);
if (elementWidthWithPadding > parentWidth) {
$(this).addClass('scrolling');
}
});
});
// Remove Favorite
function removeFavorite(musicId) {
$.post("/game/chuni/update.favorite_music_favorites", { musicId: musicId, isAdd: 0 })
.done(function (data) {
location.reload();
})
.fail(function () {
alert("Failed to remove favorite.");
});
}
</script>
{% endblock content %}

View File

@ -1,11 +1,15 @@
<div class="chuni-header">
<h1>Chunithm</h1>
<h1>{{ cur_version_name }}</h1>
<ul class="chuni-navi">
<li><a class="nav-link" href="/game/chuni">PROFILE</a></li>
<li><a class="nav-link" href="/game/chuni/rating">RATING</a></li>
<li><a class="nav-link" href="/game/chuni/playlog">RECORD</a></li>
<li><a class="nav-link" href="/game/chuni/favorites">FAVORITES</a></li>
<li><a class="nav-link" href="/game/chuni/musics">MUSICS</a></li>
<li><a class="nav-link" href="/game/chuni/userbox">USER BOX</a></li>
{% if cur_version >= 11 %} <!-- avatar config introduced in NEW!! -->
<li><a class="nav-link" href="/game/chuni/avatar">AVATAR</a></li>
{% endif %}
</ul>
</div>
<script>
@ -17,8 +21,17 @@
$('.nav-link[href="/game/chuni/playlog"]').addClass('active');
} else if (currentPath.startsWith('/game/chuni/rating')) {
$('.nav-link[href="/game/chuni/rating"]').addClass('active');
} else if (currentPath.startsWith('/game/chuni/favorites')) {
$('.nav-link[href="/game/chuni/favorites"]').addClass('active');
} else if (currentPath.startsWith('/game/chuni/musics')) {
$('.nav-link[href="/game/chuni/musics"]').addClass('active');
} else if (currentPath.startsWith('/game/chuni/userbox')) {
$('.nav-link[href="/game/chuni/userbox"]').addClass('active');
}
{% if cur_version >= 11 %} <!-- avatar config introduced in NEW!! -->
else if (currentPath.startsWith('/game/chuni/avatar')) {
$('.nav-link[href="/game/chuni/avatar"]').addClass('active');
}
{% endif %}
});
</script>

View File

@ -69,9 +69,48 @@
<td>Last Play Date:</td>
<td>{{ profile.lastPlayDate }}</td>
</tr>
{% if cur_version >= 6 %} <!-- MAP ICON and SYSTEM VOICE introduced in AMAZON -->
<tr>
<td>Map Icon:</td>
<td><div id="map-icon-name">{{ map_icons[profile.mapIconId]["name"] if map_icons|length > 0 else "Server DB needs upgraded or is not populated with necessary data" }}</div></td>
</tr>
<tr>
<td>System Voice:</td>
<td><div id="system-voice-name">{{ system_voices[profile.voiceId]["name"] if system_voices|length > 0 else "Server DB needs upgraded or is not populated with necessary data" }}</div></td>
</tr>
{% endif %}
</table>
</div>
</div>
{% if cur_version >= 6 %} <!-- MAP ICON and SYSTEM VOICE introduced in AMAZON -->
<!-- MAP ICON SELECTION -->
<div class="col-lg-8 m-auto mt-3 scrolling-lists">
<div class="card bg-card rounded">
<button class="collapsible">Map Icon:&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;{{ map_icons|length }}/{{ total_map_icons }}</button>
<div id="scrollable-map-icon" class="collapsible-content">
{% for item in map_icons.values() %}
<img id="map-icon-{{ item["id"] }}" style="padding: 8px 8px;" onclick="saveItem('map-icon', '{{ item["id"] }}', '{{ item["name"] }}')" src="img/mapIcon/{{ item["iconPath"] }}" alt="{{ item["name"] }}">
<span id="map-icon-br-{{ loop.index }}"></span>
{% endfor %}
</div>
</div>
</div>
<!-- SYSTEM VOICE SELECTION -->
<div class="col-lg-8 m-auto mt-3 scrolling-lists">
<div class="card bg-card rounded">
<button class="collapsible">System Voice:&nbsp;&nbsp;&nbsp;{{ system_voices|length }}/{{ total_system_voices }}</button>
<div id="scrollable-system-voice" class="collapsible-content">
{% for item in system_voices.values() %}
<img id="system-voice-{{ item["id"] }}" style="padding: 8px 8px;" onclick="saveItem('system-voice', '{{ item["id"] }}', '{{ item["name"] }}')" src="img/systemVoice/{{ item["imagePath"] }}" alt="{{ item["name"] }}">
<span id="system-voice-br-{{ loop.index }}"></span>
{% endfor %}
</div>
</div>
</div>
{% endif %}
<div class="col-lg-8 m-auto mt-3">
<div class="card bg-card rounded">
<table class="table-large table-rowdistinct">
@ -147,4 +186,93 @@
});
}
</script>
{% if cur_version >= 6 %} <!-- MAP ICON and SYSTEM VOICE introduced in AMAZON -->
<script>
{% include 'titles/chuni/templates/scripts/collapsibles.js' %}
///
/// This script handles all updates to the map icon and system voice
///
total_items = 0;
curr_id = 1;
items = {
// [total_items, curr_id]
"map-icon": ["{{ map_icons|length }}", "{{ profile.mapIconId }}"],
"system-voice":["{{ system_voices|length }}", "{{ profile.voiceId }}"]
};
types = Object.keys(items);
function changeItem(type, id, name) {
// clear select style for old selection
var element = document.getElementById(type + "-" + items[type][curr_id]);
if (element) {
element.style.backgroundColor="inherit";
}
// set new item
items[type][curr_id] = id;
document.getElementById(type + "-name").innerHTML = name;
// update select style for new accessory
element = document.getElementById(type + "-" + id);
if (element) {
element.style.backgroundColor="#5F5";
}
}
function saveItem(type, id, name) {
$.post("/game/chuni/update." + type, { id: id })
.done(function (data) {
changeItem(type, id, name);
})
.fail(function () {
alert("Failed to set " + type + " to " + name);
});
}
function resizePage() {
//
// Handles item organization in the collapsible scrollables to try to keep the items-per-row presentable
//
// @note Yes, we could simply let the div overflow like usual. This could however get really nasty looking
// when dealing with something like userbox characters where there are 1000s of possible items being
// display. This approach gives us full control over where items in the div wrap, allowing us to try
// to keep things presentable.
//
for (const type of types) {
var numPerRow = Math.floor(document.getElementById("scrollable-" + type).offsetWidth / 132);
// Dont put fewer than 4 per row
numPerRow = Math.max(numPerRow, 4);
// Dont populate more than 6 rows
numPerRow = Math.max(numPerRow, Math.ceil(items[type][total_items] / 6));
// update the locations of the <br>
for (var i = 1; document.getElementById(type + "-br-" + i) != null; i++) {
var spanBr = document.getElementById(type + "-br-" + i);
if ( i % numPerRow == 0 ) {
spanBr.innerHTML = "<br>";
} else {
spanBr.innerHTML = "";
}
}
}
// update the max height for any currently visible containers
Collapsibles.updateAllHeights();
}
resizePage();
window.addEventListener('resize', resizePage);
// Set initial style for current and scroll to selected
for (const type of types) {
changeItem(type, items[type][curr_id], document.getElementById(type + "-name").innerHTML);
document.getElementById("scrollable-" + type).scrollLeft = document.getElementById(type + "-" + items[type][curr_id]).offsetLeft;
}
Collapsibles.expandAll();
</script>
{% endif %}
{% endblock content %}

View File

@ -7,25 +7,31 @@
{% include 'titles/chuni/templates/chuni_header.jinja' %}
{% if playlog is defined and playlog is not none %}
<div class="row">
<h4 style="text-align: center;">Playlog counts: {{ playlog_count }}</h4>
<h4 style="text-align: center;">Playlog Count: {{ playlog_count }}</h4>
{% set rankName = ['D', 'C', 'B', 'BB', 'BBB', 'A', 'AA', 'AAA', 'S', 'S+', 'SS', 'SS+', 'SSS', 'SSS+'] %}
{% set difficultyName = ['normal', 'hard', 'expert', 'master', 'ultimate'] %}
{% for record in playlog %}
<div class="col-lg-6 mt-3">
<div class="card bg-card rounded card-hover">
<div class="card-header row">
<div class="col-8 scrolling-text">
<div class="col-auto fav" title="{{ ('Remove' if record.isFav else 'Add') + ' Favorite'}}">
<h1><span id="{{ record.idx }}" class="fav {{ 'fav-set' if record.isFav else '' }}" onclick="updateFavorite({{ record.idx }}, {{ record.musicId }})">{{ '&#9733' if record.isFav else '&#9734' }}</span>
</div>
<div class="col scrolling-text">
<h5 class="card-text"> {{ record.title }} </h5>
<br>
<h6 class="card-text"> {{ record.artist }} </h6>
</div>
<div class="col-4">
<div class="col-auto">
<h6 class="card-text">{{ record.raw.userPlayDate }}</h6>
<h6 class="card-text">TRACK {{ record.raw.track }}</h6>
</div>
</div>
<div class="card-body row">
<div class="col-3" style="text-align: center;">
<div class="col-sm" style="text-align: center;">
<img src="../img/jacket/{{ record.jacket }}" width="100%">
</div>
<div class="col" style="text-align: center;">
<h4 class="card-text">{{ record.raw.score }}</h4>
<h2>{{ rankName[record.raw.rank] }}</h2>
<h6
@ -33,10 +39,10 @@
{{ difficultyName[record.raw.level] }}&nbsp&nbsp{{ record.difficultyNum }}
</h6>
</div>
<div class="col-6" style="text-align: center;">
<div class="col-4" style="text-align: center;">
<table class="table-small table-rowdistinc">
<tr>
<td>JUSTICE CRITIAL</td>
<td>JUSTICE CRITICAL</td>
<td>
{{ record.raw.judgeCritical + record.raw.judgeHeaven }}
</td>
@ -180,5 +186,23 @@
}
});
});
// Add/Remove Favorite
function updateFavorite(elementId, musicId) {
element = document.getElementById(elementId);
isAdd = 1;
if (element.classList.contains("fav-set"))
{
isAdd = 0;
}
$.post("/game/chuni/update.favorite_music_favorites", { musicId: musicId, isAdd: isAdd })
.done(function (data) {
location.reload();
})
.fail(function () {
alert("Failed to update favorite.");
});
}
</script>
{% endblock content %}

View File

@ -18,6 +18,7 @@
<th>Music</th>
<th>Difficulty</th>
<th>Score</th>
<th>Rank</th>
<th>Rating</th>
</tr>
{% for row in hot_list %}
@ -28,6 +29,7 @@
{{ row.level }}
</td>
<td>{{ row.score }}</td>
<td>{{ row.rank }}</td>
<td class="{% if row.song_rating >= 16 %}rainbow{% endif %}">
{{ row.song_rating }}
</td>
@ -48,6 +50,7 @@
<th>Music</th>
<th>Difficulty</th>
<th>Score</th>
<th>Rank</th>
<th>Rating</th>
</tr>
{% for row in base_list %}
@ -58,6 +61,7 @@
{{ row.level }}
</td>
<td>{{ row.score }}</td>
<td>{{ row.rank }}</td>
<td class="{% if row.song_rating >= 16 %}rainbow{% endif %}">
{{ row.song_rating }}
</td>
@ -76,4 +80,4 @@
Login to view profile information.
{% endif %}
</div>
{% endblock content %}
{% endblock content %}

View File

@ -0,0 +1,262 @@
{% extends "core/templates/index.jinja" %}
{% block content %}
<style>
{% include 'titles/chuni/templates/css/chuni_style.css' %}
</style>
<div class="container">
{% include 'titles/chuni/templates/chuni_header.jinja' %}
<!-- USER BOX PREVIEW -->
<div class="row">
<div class="col-lg-8 m-auto mt-3">
<div class="card bg-card rounded">
<table class="table-large table-rowdistinct">
<caption align="top">USER BOX</caption>
<tr><td colspan=2 style="height:240px;">
<!-- NAMEPLATE -->
<img id="preview_nameplate" class="userbox userbox-nameplate" src="">
<!-- TEAM -->
<img class="userbox userbox-teamframe" src="img/rank/team3.png">
<div class="userbox userbox-teamname">{{team_name}}</div>
<!-- TROPHY/TITLE -->
<img id="preview_trophy_rank" class="userbox userbox-trophy" src="">
<div id="preview_trophy_name" class="userbox userbox-trophy userbox-trophy-name"></div>
<!-- NAME/RATING -->
<img class="userbox userbox-ratingframe" src="img/rank/rating0.png">
<div class="userbox userbox-name">
<span class="userbox-name-level-label">Lv.</span>
{{ profile.level }}&nbsp;&nbsp;&nbsp;{{ profile.userName }}
</div>
<div class="userbox userbox-rating rating rating-rank{{ rating_rank }}">
<span class="userbox-rating-label">RATING</span>
&nbsp;&nbsp;{{ profile.playerRating/100 }}
</div>
<!-- CHARACTER -->
<img class="userbox userbox-charaframe" src="img/character-bg.png">
<img id="preview_character" class="userbox userbox-chara" src="">
</td></tr>
<tr><td>Nameplate:</td><td style="width: 80%;"><div id="name_nameplate"></div></td></tr>
<tr><td>Trophy:</td><td><div id="name_trophy">
<select name="trophy" id="trophy" onchange="changeTrophy()" style="width:100%;">
{% for item in trophies.values() %}
<option value="{{ item["id"] }}" class="trophy-rank{{ item["rarity"] }}">{{ item["name"] }}</option>
{% endfor %}
</select>
</div></td></tr>
<tr><td>Character:</td><td><div id="name_character"></div></td></tr>
<tr><td colspan=2 style="padding:8px 0px; text-align: center;">
<button id="save-btn" class="btn btn-primary" style="width:140px;" onClick="saveUserbox()">SAVE</button>&nbsp;&nbsp;&nbsp;&nbsp;
<button id="reset-btn" class="btn btn-danger" style="width:140px;" onClick="resetUserbox()">RESET</button>
</td></tr>
</table>
</div>
</div>
</div>
<!-- USERBOX SELECTION -->
<div class="row col-lg-8 m-auto mt-3 scrolling-lists-lg card bg-card rounded">
<!-- NAMEPLATE -->
<button class="collapsible">Nameplate:&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;{{ nameplates|length }}/{{ total_nameplates }}</button>
<div id="scrollable-nameplate" class="collapsible-content">
{% for item in nameplates.values() %}
<img id="nameplate-{{ item["id"] }}" style="padding: 8px 8px;" onclick="changeItem('nameplate', '{{ item["id"] }}', '{{ item["name"] }}', '{{ item["texturePath"] }}')" src="img/nameplate/{{ item["texturePath"] }}" alt="{{ item["name"] }}">
<span id="nameplate-br-{{ loop.index }}"></span>
{% endfor %}
</div>
<hr>
<!-- CHARACTER -->
<button class="collapsible">Character:&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;{{ characters|length }}/{{ total_characters }}</button>
<div id="scrollable-character" class="collapsible-content">
{% for item in characters.values() %}
<img id="character-{{ item["id"] }}" onclick="changeItem('character', '{{ item["id"] }}', '{{ item["name"] }}', '{{ item["iconPath"] }}')" src="img/character/{{ item["iconPath"] }}" alt="{{ item["name"] }}">
<span id="character-br-{{ loop.index }}"></span>
{% endfor %}
</div>
</div>
{% if error is defined %}
{% include "core/templates/widgets/err_banner.jinja" %}
{% endif %}
</div>
{% if nameplates|length == 0 or characters|length == 0 %}
<script>
// Server DB lacks necessary info. Maybe importer never got ran for this verison?
document.getElementById("name_nameplate").innerHTML = "Server DB needs upgraded or is not populated with necessary data";
</script>
{% else %}
<script>
{% include 'titles/chuni/templates/scripts/collapsibles.js' %}
///
/// This script handles all updates to the user box
///
total_items = 0;
orig_id = 1;
orig_name = 2;
orig_img = 3;
curr_id = 4;
curr_name = 5;
curr_img = 6;
userbox_components = {
// [total_items, orig_id, orig_name, orig_img, curr_id, curr_name, curr_img]
"nameplate":["{{ nameplates|length }}",
"{{ profile.nameplateId }}",
"{{ nameplates[profile.nameplateId]["name"] }}",
"{{ nameplates[profile.nameplateId]["texturePath"] }}", "", "", ""],
"character":["{{ characters|length }}",
"{{ profile.charaIllustId }}",
"{{ characters[profile.charaIllustId]["name"] }}",
"{{ characters[profile.charaIllustId]["iconPath"] }}", "", "", ""]
};
types = Object.keys(userbox_components);
orig_trophy = curr_trophy = "{{ profile.trophyId }}";
curr_trophy_img = "";
function enableButtons(enabled) {
document.getElementById("reset-btn").disabled = !enabled;
document.getElementById("save-btn").disabled = !enabled;
}
function changeItem(type, id, name, img) {
// clear select style for old component
var element = document.getElementById(type + "-" + userbox_components[type][curr_id]);
if (element) {
element.style.backgroundColor="inherit";
}
// set new component
userbox_components[type][curr_id] = id;
userbox_components[type][curr_name] = name;
userbox_components[type][curr_img] = img;
// update select style for new accessory
element = document.getElementById(type + "-" + id);
if (element) {
element.style.backgroundColor="#5F5";
}
// Update the userbox preview and enable buttons
updatePreview();
if (id != userbox_components[type][orig_id]) {
enableButtons(true);
}
}
function getRankImage(selected_rank) {
for (const x of Array(12).keys()) {
if (selected_rank.classList.contains("trophy-rank" + x.toString())) {
return "rank" + x.toString() + ".png";
}
}
return "rank0.png"; // shouldnt ever happen
}
function changeTrophy() {
var trophy_element = document.getElementById("trophy");
curr_trophy = trophy_element.value;
curr_trophy_img = getRankImage(trophy_element[trophy_element.selectedIndex]);
updatePreview();
if (curr_trophy != orig_trophy) {
enableButtons(true);
}
}
function resetUserbox() {
for (const type of types) {
changeItem(type, userbox_components[type][orig_id], userbox_components[type][orig_name], userbox_components[type][orig_img]);
}
// reset trophy
document.getElementById("trophy").value = orig_trophy;
changeTrophy();
// disable the save/reset buttons until something changes
enableButtons(false);
}
function updatePreview() {
for (const type of types) {
document.getElementById("preview_" + type).src = "img/" + type + "/" + userbox_components[type][curr_img];
document.getElementById("name_" + type).innerHTML = userbox_components[type][curr_name];
}
document.getElementById("preview_trophy_rank").src = "img/rank/" + curr_trophy_img;
document.getElementById("preview_trophy_name").innerHTML = document.getElementById("trophy")[document.getElementById("trophy").selectedIndex].innerText;
}
function saveUserbox() {
$.post("/game/chuni/update.userbox", { nameplate: userbox_components["nameplate"][curr_id],
trophy: curr_trophy,
character: userbox_components["character"][curr_id] })
.done(function (data) {
// set the current as the original and disable buttons
for (const type of types) {
userbox_components[type][orig_id] = userbox_components[type][curr_id];
userbox_components[type][orig_name] = userbox_components[type][orig_name];
userbox_components[type][orig_img] = userbox_components[type][curr_img];
}
orig_trophy = curr_trophy
enableButtons(false);
})
.fail(function () {
alert("Failed to save userbox.");
});
}
function resizePage() {
//
// Handles item organization in the collapsible scrollables to try to keep the items-per-row presentable
//
// @note Yes, we could simply let the div overflow like usual. This could however get really nasty looking
// when dealing with something like userbox characters where there are 1000s of possible items being
// display. This approach gives us full control over where items in the div wrap, allowing us to try
// to keep things presentable.
//
for (const type of types) {
var numPerRow = Math.floor(document.getElementById("scrollable-" + type).offsetWidth / 132);
// Dont put fewer than 4 per row
numPerRow = Math.max(numPerRow, 4);
// Dont populate more than 8 rows
numPerRow = Math.max(numPerRow, Math.ceil(userbox_components[type][total_items] / 8));
// update the locations of the <br>
for (var i = 1; document.getElementById(type + "-br-" + i) != null; i++) {
var spanBr = document.getElementById(type + "-br-" + i);
if ( i % numPerRow == 0 ) {
spanBr.innerHTML = "<br>";
} else {
spanBr.innerHTML = "";
}
}
}
// update the max height for any currently visible containers
Collapsibles.updateAllHeights();
}
resizePage();
window.addEventListener('resize', resizePage);
// Set initial preview for current userbox
resetUserbox();
// Initialize scroll on all current items so we can see the selected ones
for (const type of types) {
document.getElementById("scrollable-" + type).scrollLeft = document.getElementById(type + "-" + userbox_components[type][curr_id]).offsetLeft;
}
Collapsibles.expandAll();
</script>
{% endif %}
{% endblock content %}

View File

@ -159,6 +159,45 @@ caption {
font-weight: bold;
}
.rating {
font-weight: bold;
-webkit-text-stroke-width: 1px;
-webkit-text-stroke-color: black;
}
.rating-rank0 {
color: #008000;
}
.rating-rank1 {
color: #ffa500;
}
.rating-rank2 {
color: #ff0000;
}
.rating-rank3 {
color: #800080;
}
.rating-rank4 {
color: #cd853f;
}
.rating-rank5 {
color: #c0c0c0;
}
.rating-rank6 {
color: #ffd700;
}
.rating-rank7 {
color: #a9a9a9;
}
.rating-rank8 {
background: linear-gradient(to right, red, yellow, lime, aqua, blue, fuchsia) 0 / 5em;
background-clip: text;
-webkit-background-clip: text;
-webkit-text-fill-color: transparent;
}
.scrolling-text {
overflow: hidden;
}
@ -192,4 +231,306 @@ caption {
100% {
transform: translateX(-100%);
}
}
/*
Styles to support collapsible boxes (used for browsing images)
*/
.collapsible {
background-color: #555;
cursor: pointer;
padding-bottom: 16px;
width: 100%;
border: none;
text-align: left;
outline: none;
font-family: monospace;
font-weight: bold;
}
.collapsible:after {
content: '[+]';
float: right;
}
.collapsible-active:after {
content: "[-]";
}
.collapsible-content {
max-height: 0px;
overflow: hidden;
opacity: 0;
transition: max-height 0.2s ease-out;
background-color: #DDD;
}
/*
Styles for favorites star in /playlog
*/
.fav {
padding: 0;
padding-left: 4px;
background-color: transparent;
border: none;
cursor: pointer;
}
.fav-set {
color: gold;
}
/*
Styles for favorites in /favorites
*/
.btn-fav-remove {
padding:10px;
width:100%;
}
/*
Styles for userbox configuration
*/
.userbox {
position: absolute;
}
.userbox-nameplate {
top: 72px;
left: 32px;
}
.userbox-teamframe {
top: 74px;
left: 156px;
}
.userbox-teamname {
top: 72px;
left: 254px;
padding: 8px 20px;
font-size: 22px;
text-shadow: rgba(0,0,0,0.8) 2px 2px;
color: #DDD;
width: 588px;
text-align: left;
}
.userbox-trophy {
top: 170px;
left: 250px;
zoom: 0.70;
}
.userbox-trophy-name {
top: 170px;
left: 250px;
padding: 8px 20px;
font-size: 28px;
font-weight: bold;
color: #333;
width: 588px;
text-align: center;
}
.userbox-ratingframe {
top: 160px;
left: 175px;
}
.userbox-charaframe {
top: 267px;
left: 824px;
zoom: 0.61;
}
.userbox-chara {
top: 266px;
left: 814px;
zoom: 0.62;
}
.userbox-name {
top: 160px;
left: 162px;
padding: 8px 20px;
font-size: 32px;
font-weight: bold;
color: #333;
text-align: left;
}
.userbox-name-level-label {
font-size: 24px;
}
.userbox-rating {
top: 204px;
left: 166px;
padding: 8px 20px;
font-size: 24px;
text-align: left;
}
.userbox-rating-label {
font-size: 16px;
}
.trophy-rank0 {
color: #111;
background-color: #DDD;
}
.trophy-rank1 {
color: #111;
background-color: #D85;
}
.trophy-rank2 {
color: #111;
background-color: #ADF;
}
.trophy-rank3 {
color: #111;
background-color: #EB3;
}
.trophy-rank4 {
color: #111;
background-color: #EB3;
}
.trophy-rank5 {
color: #111;
background-color: #FFA;
}
.trophy-rank6 {
color: #111;
background-color: #FFA;
}
.trophy-rank7 {
color: #111;
background-color: #FCF;
}
.trophy-rank8 {
color: #111;
background-color: #FCF;
}
.trophy-rank9 {
color: #111;
background-color: #07C;
}
.trophy-rank10 {
color: #111;
background-color: #7FE;
}
.trophy-rank11 {
color: #111;
background-color: #8D7;
}
/*
Styles for scrollable divs (used for browsing images)
*/
.scrolling-lists {
table-layout: fixed;
}
.scrolling-lists div {
overflow: auto;
white-space: nowrap;
}
.scrolling-lists img {
width: 128px;
}
.scrolling-lists-lg {
table-layout: fixed;
width: 100%;
}
.scrolling-lists-lg div {
overflow: auto;
white-space: nowrap;
padding: 4px;
}
.scrolling-lists-lg img {
padding: 4px;
width: 128px;
}
/*
Styles for avatar configuration
*/
.avatar-preview {
position:absolute;
zoom:0.5;
}
.avatar-preview-wear {
top: 280px;
left: 60px;
}
.avatar-preview-face {
top: 262px;
left: 200px;
}
.avatar-preview-head {
top: 130px;
left: 120px;
}
.avatar-preview-skin-body {
top: 250px;
left: 190px;
height: 406px;
width: 256px;
object-fit: cover;
object-position: top;
}
.avatar-preview-skin-leftfoot {
top: 625px;
left: 340px;
object-position: -84px -406px;
}
.avatar-preview-skin-rightfoot {
top: 625px;
left: 40px;
object-position: 172px -406px;
}
.avatar-preview-common {
top: 250px;
left: 135px;
}
.avatar-preview-item-lefthand {
top: 180px;
left: 370px;
height: 544px;
width: 200px;
object-fit: cover;
object-position: right;
}
.avatar-preview-item-righthand {
top: 180px;
left: 65px;
height: 544px;
width: 200px;
object-fit: cover;
object-position: left;
}
.avatar-preview-back {
top: 140px;
left: 46px;
}
.avatar-preview-platform {
top: 310px;
left: 55px;
zoom: 1;
}

View File

@ -0,0 +1,66 @@
///
/// Handles the collapsible behavior of each of the scrollable containers
///
/// @note Intent is to include this file via jinja in the same <script> tag as
/// any page-specific logic that interacts with the collapisbles.
///
class Collapsibles {
static setHeight(content) {
// @note Add an extra 20% height buffer - the div will autosize but we
// want to make sure we dont clip due to the horizontal scroll.
content.style.maxHeight = (content.scrollHeight * 1.2) + "px";
}
static updateAllHeights() {
// Updates the height of all expanded collapsibles.
// Intended for use when resolution changes cause the contents within the collapsible to move around.
var coll = document.getElementsByClassName("collapsible");
for (var i = 0; i < coll.length; i++) {
var content = coll[i].nextElementSibling;
if (content.style.maxHeight) {
// currently visible. update height
Collapsibles.setHeight(content);
}
}
}
static expandFirst() {
// Activate the first collapsible once loaded so the user can get an idea of how things work
window.addEventListener('load', function () {
var coll = document.getElementsByClassName("collapsible");
if (coll && coll.length > 0) {
coll[0].click();
}
})
}
static expandAll() {
// Activate all collapsibles so everything is visible immediately
window.addEventListener('load', function () {
var coll = document.getElementsByClassName("collapsible");
for (var i = 0; i < coll.length; i++) {
coll[i].click();
}
})
}
}
//
// Initial Collapsible Setup
//
// Add an event listener for a click on any collapsible object. This will change the style to collapse/expand the content.
var coll = document.getElementsByClassName("collapsible");
for (var i = 0; i < coll.length; i++) {
coll[i].addEventListener("click", function () {
this.classList.toggle("collapsible-active");
var content = this.nextElementSibling;
if (content.style.maxHeight) {
content.style.maxHeight = null;
content.style.opacity = 0;
} else {
Collapsibles.setHeight(content);
content.style.opacity = 1;
}
});
}

View File

@ -207,6 +207,7 @@ class CardMakerReader(BaseReader):
"1.30": Mai2Constants.VER_MAIMAI_DX_FESTIVAL,
"1.35": Mai2Constants.VER_MAIMAI_DX_FESTIVAL_PLUS,
"1.40": Mai2Constants.VER_MAIMAI_DX_BUDDIES,
"1.45": Mai2Constants.VER_MAIMAI_DX_BUDDIES_PLUS
}
for root, dirs, files in os.walk(base_dir):

View File

@ -264,6 +264,11 @@ class DivaBase:
return response
async def handle_festa_info_request(self, data: Dict) -> Dict:
if self.game_config.server.festa_enable:
festa_end_time = self.game_config.server.festa_end_time
else:
festa_end_time = (datetime.datetime.now() - datetime.timedelta(days=365)).strftime("%Y-%m-%d %H:%M:%S") + ".0"
encoded = "&"
params = {
"fi_id": "1,2",
@ -273,10 +278,10 @@ class DivaBase:
"fi_difficulty": "-1,-1",
"fi_pv_id_lst": "ALL,ALL",
"fi_attr": "7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF,7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF",
"fi_add_vp": "20,5",
"fi_mul_vp": "1,2",
"fi_add_vp": f"{self.game_config.server.festa_add_VP}",
"fi_mul_vp": f"{self.game_config.server.festa_multiply_VP}",
"fi_st": "2019-01-01 00:00:00.0,2019-01-01 00:00:00.0",
"fi_et": "2029-01-01 00:00:00.0,2029-01-01 00:00:00.0",
"fi_et": f"{festa_end_time},{festa_end_time}",
"fi_lut": "{self.time_lut}",
}

View File

@ -19,6 +19,29 @@ class DivaServerConfig:
)
)
@property
def festa_enable(self) -> bool:
return CoreConfig.get_config_field(
self.__config, "diva", "server", "festa_enable", default=True
)
@property
def festa_add_VP(self) -> str:
return CoreConfig.get_config_field(
self.__config, "diva", "server", "festa_add_VP", default="20,5"
)
@property
def festa_multiply_VP(self) -> str:
return CoreConfig.get_config_field(
self.__config, "diva", "server", "festa_multiply_VP", default="1,2"
)
@property
def festa_end_time(self) -> str:
return CoreConfig.get_config_field(
self.__config, "diva", "server", "festa_end_time", default="2029-01-01 00:00:00.0"
)
class DivaModsConfig:
def __init__(self, parent_config: "DivaConfig") -> None:

View File

@ -23,7 +23,7 @@ class DivaFrontend(FE_Base):
self.game_cfg.update(
yaml.safe_load(open(f"{cfg_dir}/{DivaConstants.CONFIG_NAME}"))
)
self.nav_name = "diva"
self.nav_name = "Project Diva"
def get_routes(self) -> List[Route]:
return [

Some files were not shown because too many files have changed in this diff Show More