Merge branch 'develop' into per_version_uri_host
This commit is contained in:
@ -12,10 +12,10 @@ RUN chmod +x entrypoint.sh
|
|||||||
|
|
||||||
COPY index.py index.py
|
COPY index.py index.py
|
||||||
COPY dbutils.py dbutils.py
|
COPY dbutils.py dbutils.py
|
||||||
|
COPY read.py read.py
|
||||||
ADD core core
|
ADD core core
|
||||||
ADD titles titles
|
ADD titles titles
|
||||||
ADD config config
|
|
||||||
ADD log log
|
ADD log log
|
||||||
ADD cert cert
|
ADD cert cert
|
||||||
|
|
||||||
ENTRYPOINT [ "/app/entrypoint.sh" ]
|
ENTRYPOINT [ "/app/entrypoint.sh" ]
|
||||||
|
@ -145,7 +145,15 @@ class AimedbProtocol(Protocol):
|
|||||||
def handle_lookup(self, data: bytes, resp_code: int) -> ADBBaseResponse:
|
def handle_lookup(self, data: bytes, resp_code: int) -> ADBBaseResponse:
|
||||||
req = ADBLookupRequest(data)
|
req = ADBLookupRequest(data)
|
||||||
user_id = self.data.card.get_user_id_from_card(req.access_code)
|
user_id = self.data.card.get_user_id_from_card(req.access_code)
|
||||||
|
is_banned = self.data.card.get_card_banned(req.access_code)
|
||||||
|
is_locked = self.data.card.get_card_locked(req.access_code)
|
||||||
|
|
||||||
|
if is_banned and is_locked:
|
||||||
|
ret.head.status = ADBStatus.BAN_SYS_USER
|
||||||
|
elif is_banned:
|
||||||
|
ret.head.status = ADBStatus.BAN_SYS
|
||||||
|
elif is_locked:
|
||||||
|
ret.head.status = ADBStatus.LOCK_USER
|
||||||
ret = ADBLookupResponse.from_req(req.head, user_id)
|
ret = ADBLookupResponse.from_req(req.head, user_id)
|
||||||
|
|
||||||
self.logger.info(
|
self.logger.info(
|
||||||
@ -157,8 +165,17 @@ class AimedbProtocol(Protocol):
|
|||||||
req = ADBLookupRequest(data)
|
req = ADBLookupRequest(data)
|
||||||
user_id = self.data.card.get_user_id_from_card(req.access_code)
|
user_id = self.data.card.get_user_id_from_card(req.access_code)
|
||||||
|
|
||||||
|
is_banned = self.data.card.get_card_banned(req.access_code)
|
||||||
|
is_locked = self.data.card.get_card_locked(req.access_code)
|
||||||
|
|
||||||
ret = ADBLookupExResponse.from_req(req.head, user_id)
|
ret = ADBLookupExResponse.from_req(req.head, user_id)
|
||||||
|
if is_banned and is_locked:
|
||||||
|
ret.head.status = ADBStatus.BAN_SYS_USER
|
||||||
|
elif is_banned:
|
||||||
|
ret.head.status = ADBStatus.BAN_SYS
|
||||||
|
elif is_locked:
|
||||||
|
ret.head.status = ADBStatus.LOCK_USER
|
||||||
|
|
||||||
self.logger.info(
|
self.logger.info(
|
||||||
f"access_code {req.access_code} -> user_id {ret.user_id}"
|
f"access_code {req.access_code} -> user_id {ret.user_id}"
|
||||||
)
|
)
|
||||||
@ -237,7 +254,7 @@ class AimedbProtocol(Protocol):
|
|||||||
def handle_register(self, data: bytes, resp_code: int) -> bytes:
|
def handle_register(self, data: bytes, resp_code: int) -> bytes:
|
||||||
req = ADBLookupRequest(data)
|
req = ADBLookupRequest(data)
|
||||||
user_id = -1
|
user_id = -1
|
||||||
|
|
||||||
if self.config.server.allow_user_registration:
|
if self.config.server.allow_user_registration:
|
||||||
user_id = self.data.user.create_user()
|
user_id = self.data.user.create_user()
|
||||||
|
|
||||||
|
@ -34,6 +34,61 @@ class ALLNET_STAT(Enum):
|
|||||||
bad_machine = -2
|
bad_machine = -2
|
||||||
bad_shop = -3
|
bad_shop = -3
|
||||||
|
|
||||||
|
class DLI_STATUS(Enum):
|
||||||
|
START = 0
|
||||||
|
GET_DOWNLOAD_CONFIGURATION = 1
|
||||||
|
WAIT_DOWNLOAD = 2
|
||||||
|
DOWNLOADING = 3
|
||||||
|
|
||||||
|
NOT_SPECIFY_DLI = 100
|
||||||
|
ONLY_POST_REPORT = 101
|
||||||
|
STOPPED_BY_APP_RELEASE = 102
|
||||||
|
STOPPED_BY_OPT_RELEASE = 103
|
||||||
|
|
||||||
|
DOWNLOAD_COMPLETE_RECENTLY = 110
|
||||||
|
|
||||||
|
DOWNLOAD_COMPLETE_WAIT_RELEASE_TIME = 120
|
||||||
|
DOWNLOAD_COMPLETE_BUT_NOT_SYNC_SERVER = 121
|
||||||
|
DOWNLOAD_COMPLETE_BUT_NOT_FIRST_RESUME = 122
|
||||||
|
DOWNLOAD_COMPLETE_BUT_NOT_FIRST_LAUNCH = 123
|
||||||
|
DOWNLOAD_COMPLETE_WAIT_UPDATE = 124
|
||||||
|
|
||||||
|
DOWNLOAD_COMPLETE_AND_ALREADY_UPDATE = 130
|
||||||
|
|
||||||
|
ERROR_AUTH_FAILURE = 200
|
||||||
|
|
||||||
|
ERROR_GET_DLI_HTTP = 300
|
||||||
|
ERROR_GET_DLI = 301
|
||||||
|
ERROR_PARSE_DLI = 302
|
||||||
|
ERROR_INVALID_GAME_ID = 303
|
||||||
|
ERROR_INVALID_IMAGE_LIST = 304
|
||||||
|
ERROR_GET_DLI_APP = 305
|
||||||
|
|
||||||
|
ERROR_GET_BOOT_ID = 400
|
||||||
|
ERROR_ACCESS_SERVER = 401
|
||||||
|
ERROR_NO_IMAGE = 402
|
||||||
|
ERROR_ACCESS_IMAGE = 403
|
||||||
|
|
||||||
|
ERROR_DOWNLOAD_APP = 500
|
||||||
|
ERROR_DOWNLOAD_OPT = 501
|
||||||
|
|
||||||
|
ERROR_DISK_FULL = 600
|
||||||
|
ERROR_UNINSTALL = 601
|
||||||
|
ERROR_INSTALL_APP = 602
|
||||||
|
ERROR_INSTALL_OPT = 603
|
||||||
|
|
||||||
|
ERROR_GET_DLI_INTERNAL = 900
|
||||||
|
ERROR_ICF = 901
|
||||||
|
ERROR_CHECK_RELEASE_INTERNAL = 902
|
||||||
|
UNKNOWN = 999 # Not the actual enum val but it needs to be here as a catch-all
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_int(cls, num: int) -> "DLI_STATUS":
|
||||||
|
try:
|
||||||
|
return cls(num)
|
||||||
|
except ValueError:
|
||||||
|
return cls.UNKNOWN
|
||||||
|
|
||||||
class AllnetServlet:
|
class AllnetServlet:
|
||||||
def __init__(self, core_cfg: CoreConfig, cfg_folder: str):
|
def __init__(self, core_cfg: CoreConfig, cfg_folder: str):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
@ -303,6 +358,7 @@ class AllnetServlet:
|
|||||||
|
|
||||||
def handle_dlorder_report(self, request: Request, match: Dict) -> bytes:
|
def handle_dlorder_report(self, request: Request, match: Dict) -> bytes:
|
||||||
req_raw = request.content.getvalue()
|
req_raw = request.content.getvalue()
|
||||||
|
client_ip = Utils.get_ip_addr(request)
|
||||||
try:
|
try:
|
||||||
req_dict: Dict = json.loads(req_raw)
|
req_dict: Dict = json.loads(req_raw)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -320,11 +376,17 @@ class AllnetServlet:
|
|||||||
self.logger.warning(f"Failed to parse DL Report: Invalid format - contains neither appimage nor optimage")
|
self.logger.warning(f"Failed to parse DL Report: Invalid format - contains neither appimage nor optimage")
|
||||||
return "NG"
|
return "NG"
|
||||||
|
|
||||||
dl_report_data = DLReport(dl_data, dl_data_type)
|
rep = DLReport(dl_data, dl_data_type)
|
||||||
|
|
||||||
if not dl_report_data.validate():
|
if not rep.validate():
|
||||||
self.logger.warning(f"Failed to parse DL Report: Invalid format - {dl_report_data.err}")
|
self.logger.warning(f"Failed to parse DL Report: Invalid format - {rep.err}")
|
||||||
return "NG"
|
return "NG"
|
||||||
|
|
||||||
|
msg = f"{rep.serial} @ {client_ip} reported {rep.rep_type.name} download state {rep.rf_state.name} for {rep.gd} v{rep.dav}:"\
|
||||||
|
f" {rep.tdsc}/{rep.tsc} segments downloaded for working files {rep.wfl} with {rep.dfl if rep.dfl else 'none'} complete."
|
||||||
|
|
||||||
|
self.data.base.log_event("allnet", "DL_REPORT", logging.INFO, msg, dl_data)
|
||||||
|
self.logger.info(msg)
|
||||||
|
|
||||||
return "OK"
|
return "OK"
|
||||||
|
|
||||||
@ -727,13 +789,13 @@ class DLReport:
|
|||||||
self.ot = data.get("ot")
|
self.ot = data.get("ot")
|
||||||
self.rt = data.get("rt")
|
self.rt = data.get("rt")
|
||||||
self.as_ = data.get("as")
|
self.as_ = data.get("as")
|
||||||
self.rf_state = data.get("rf_state")
|
self.rf_state = DLI_STATUS.from_int(data.get("rf_state"))
|
||||||
self.gd = data.get("gd")
|
self.gd = data.get("gd")
|
||||||
self.dav = data.get("dav")
|
self.dav = data.get("dav")
|
||||||
self.wdav = data.get("wdav") # app only
|
self.wdav = data.get("wdav") # app only
|
||||||
self.dov = data.get("dov")
|
self.dov = data.get("dov")
|
||||||
self.wdov = data.get("wdov") # app only
|
self.wdov = data.get("wdov") # app only
|
||||||
self.__type = report_type
|
self.rep_type = report_type
|
||||||
self.err = ""
|
self.err = ""
|
||||||
|
|
||||||
def validate(self) -> bool:
|
def validate(self) -> bool:
|
||||||
@ -741,14 +803,6 @@ class DLReport:
|
|||||||
self.err = "serial not provided"
|
self.err = "serial not provided"
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if self.dfl is None:
|
|
||||||
self.err = "dfl not provided"
|
|
||||||
return False
|
|
||||||
|
|
||||||
if self.wfl is None:
|
|
||||||
self.err = "wfl not provided"
|
|
||||||
return False
|
|
||||||
|
|
||||||
if self.tsc is None:
|
if self.tsc is None:
|
||||||
self.err = "tsc not provided"
|
self.err = "tsc not provided"
|
||||||
return False
|
return False
|
||||||
@ -757,18 +811,6 @@ class DLReport:
|
|||||||
self.err = "tdsc not provided"
|
self.err = "tdsc not provided"
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if self.at is None:
|
|
||||||
self.err = "at not provided"
|
|
||||||
return False
|
|
||||||
|
|
||||||
if self.ot is None:
|
|
||||||
self.err = "ot not provided"
|
|
||||||
return False
|
|
||||||
|
|
||||||
if self.rt is None:
|
|
||||||
self.err = "rt not provided"
|
|
||||||
return False
|
|
||||||
|
|
||||||
if self.as_ is None:
|
if self.as_ is None:
|
||||||
self.err = "as not provided"
|
self.err = "as not provided"
|
||||||
return False
|
return False
|
||||||
@ -789,11 +831,11 @@ class DLReport:
|
|||||||
self.err = "dov not provided"
|
self.err = "dov not provided"
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if (self.wdav is None or self.wdov is None) and self.__type == DLIMG_TYPE.app:
|
if (self.wdav is None or self.wdov is None) and self.rep_type == DLIMG_TYPE.app:
|
||||||
self.err = "wdav or wdov not provided in app image"
|
self.err = "wdav or wdov not provided in app image"
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if (self.wdav is not None or self.wdov is not None) and self.__type == DLIMG_TYPE.opt:
|
if (self.wdav is not None or self.wdov is not None) and self.rep_type == DLIMG_TYPE.opt:
|
||||||
self.err = "wdav or wdov provided in opt image"
|
self.err = "wdav or wdov provided in opt image"
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -64,6 +64,27 @@ class CardData(BaseData):
|
|||||||
|
|
||||||
return int(card["user"])
|
return int(card["user"])
|
||||||
|
|
||||||
|
def get_card_banned(self, access_code: str) -> Optional[bool]:
|
||||||
|
"""
|
||||||
|
Given a 20 digit access code as a string, check if the card is banned
|
||||||
|
"""
|
||||||
|
card = self.get_card_by_access_code(access_code)
|
||||||
|
if card is None:
|
||||||
|
return None
|
||||||
|
if card["is_banned"]:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
def get_card_locked(self, access_code: str) -> Optional[bool]:
|
||||||
|
"""
|
||||||
|
Given a 20 digit access code as a string, check if the card is locked
|
||||||
|
"""
|
||||||
|
card = self.get_card_by_access_code(access_code)
|
||||||
|
if card is None:
|
||||||
|
return None
|
||||||
|
if card["is_locked"]:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
def delete_card(self, card_id: int) -> None:
|
def delete_card(self, card_id: int) -> None:
|
||||||
sql = aime_card.delete(aime_card.c.id == card_id)
|
sql = aime_card.delete(aime_card.c.id == card_id)
|
||||||
|
|
||||||
|
@ -5,22 +5,23 @@ services:
|
|||||||
build: .
|
build: .
|
||||||
volumes:
|
volumes:
|
||||||
- ./aime:/app/aime
|
- ./aime:/app/aime
|
||||||
|
- ./configs/config:/app/config
|
||||||
|
|
||||||
environment:
|
environment:
|
||||||
CFG_DEV: 1
|
CFG_DEV: 1
|
||||||
CFG_CORE_SERVER_HOSTNAME: 0.0.0.0
|
CFG_CORE_SERVER_HOSTNAME: 0.0.0.0
|
||||||
CFG_CORE_DATABASE_HOST: ma.db
|
CFG_CORE_DATABASE_HOST: ma.db
|
||||||
CFG_CORE_MEMCACHED_HOSTNAME: ma.memcached
|
CFG_CORE_MEMCACHED_HOSTNAME: ma.memcached
|
||||||
CFG_CORE_AIMEDB_KEY: keyhere
|
CFG_CORE_AIMEDB_KEY: <INSERT AIMEDB KEY HERE>
|
||||||
CFG_CHUNI_SERVER_LOGLEVEL: debug
|
CFG_CHUNI_SERVER_LOGLEVEL: debug
|
||||||
|
|
||||||
ports:
|
ports:
|
||||||
- "80:80"
|
- "80:80"
|
||||||
- "8443:8443"
|
- "8443:8443"
|
||||||
- "22345:22345"
|
- "22345:22345"
|
||||||
|
|
||||||
- "8080:8080"
|
- "8080:8080"
|
||||||
- "8090:8090"
|
- "8090:8090"
|
||||||
|
|
||||||
depends_on:
|
depends_on:
|
||||||
db:
|
db:
|
||||||
@ -28,21 +29,29 @@ services:
|
|||||||
|
|
||||||
db:
|
db:
|
||||||
hostname: ma.db
|
hostname: ma.db
|
||||||
image: mysql:8.0.31-debian
|
image: yobasystems/alpine-mariadb:10.11.5
|
||||||
environment:
|
environment:
|
||||||
MYSQL_DATABASE: aime
|
MYSQL_DATABASE: aime
|
||||||
MYSQL_USER: aime
|
MYSQL_USER: aime
|
||||||
MYSQL_PASSWORD: aime
|
MYSQL_PASSWORD: aime
|
||||||
MYSQL_ROOT_PASSWORD: AimeRootPassword
|
MYSQL_ROOT_PASSWORD: AimeRootPassword
|
||||||
|
MYSQL_CHARSET: utf8mb4
|
||||||
|
MYSQL_COLLATION: utf8mb4_general_ci
|
||||||
|
##Note: expose port 3306 to allow read.py importer into database, comment out when not needed
|
||||||
|
#ports:
|
||||||
|
# - "3306:3306"
|
||||||
|
##Note: uncomment to allow mysql to create a persistent database, leave commented if you want to rebuild database from scratch often
|
||||||
|
#volumes:
|
||||||
|
# - ./AimeDB:/var/lib/mysql
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "mysqladmin" ,"ping", "-h", "localhost"]
|
test: ["CMD", "mysqladmin" ,"ping", "-h", "localhost", "-pAimeRootPassword"]
|
||||||
timeout: 5s
|
timeout: 5s
|
||||||
retries: 5
|
retries: 5
|
||||||
|
|
||||||
|
|
||||||
memcached:
|
memcached:
|
||||||
hostname: ma.memcached
|
hostname: ma.memcached
|
||||||
image: memcached:1.6.17-bullseye
|
image: memcached:1.6.22-alpine3.18
|
||||||
|
command: [ "memcached", "-m", "1024", "-I", "128m" ]
|
||||||
|
|
||||||
phpmyadmin:
|
phpmyadmin:
|
||||||
hostname: ma.phpmyadmin
|
hostname: ma.phpmyadmin
|
||||||
@ -53,5 +62,5 @@ services:
|
|||||||
PMA_PASSWORD: AimeRootPassword
|
PMA_PASSWORD: AimeRootPassword
|
||||||
APACHE_PORT: 8080
|
APACHE_PORT: 8080
|
||||||
ports:
|
ports:
|
||||||
- "8080:8080"
|
- "9090:8080"
|
||||||
|
|
||||||
|
246
docs/INSTALL_DOCKER.md
Normal file
246
docs/INSTALL_DOCKER.md
Normal file
@ -0,0 +1,246 @@
|
|||||||
|
# ARTEMiS - Docker Installation Guide
|
||||||
|
|
||||||
|
This step-by-step guide will allow you to install a Contenerized Version of ARTEMiS inside Docker, some steps can be skipped assuming you already have pre-requisite components and modules installed.
|
||||||
|
|
||||||
|
This guide assumes using Debian 12(bookworm-stable) as a Host Operating System for most of packages and modules.
|
||||||
|
|
||||||
|
## Pre-Requisites:
|
||||||
|
|
||||||
|
- Linux-Based Operating System (e.g. Debian, Ubuntu)
|
||||||
|
- Docker (https://get.docker.com)
|
||||||
|
- Python 3.9+
|
||||||
|
- (optional) Git
|
||||||
|
|
||||||
|
## Install Python3.9+ and Docker
|
||||||
|
|
||||||
|
```
|
||||||
|
(if this is a fresh install of the system)
|
||||||
|
sudo apt update && sudo apt upgrade
|
||||||
|
|
||||||
|
(installs python3 and pip)
|
||||||
|
sudo apt install python3 python3-pip
|
||||||
|
|
||||||
|
(installs docker)
|
||||||
|
curl -fsSL https://get.docker.com -o get-docker.sh
|
||||||
|
sh get-docker.sh
|
||||||
|
|
||||||
|
(optionally install git)
|
||||||
|
sudo apt install git
|
||||||
|
```
|
||||||
|
|
||||||
|
## Get ARTEMiS
|
||||||
|
|
||||||
|
If you installed git, clone into your choice of ARTEMiS git repository, e.g.:
|
||||||
|
```
|
||||||
|
git clone <ARTEMiS Repo> <folder>
|
||||||
|
```
|
||||||
|
If not, download the source package, and unpack it to the folder of your choice.
|
||||||
|
|
||||||
|
## Prepare development/home configuration
|
||||||
|
|
||||||
|
To build our Docker setup, first we need to create some folders and copy some files around
|
||||||
|
- Create 'aime', 'configs', 'AimeDB', and 'logs' folder in ARTEMiS root folder (where all source files exist)
|
||||||
|
- Inside configs folder, create 'config' folder, and copy all .yaml files from example_config to config (thats all files without nginx_example.conf)
|
||||||
|
- Edit .yaml files inside configs/config to suit your server needs
|
||||||
|
- Edit core.yaml inside configs/config:
|
||||||
|
```
|
||||||
|
set server.listen_address: to "0.0.0.0"
|
||||||
|
set title.hostname: to machine's IP address, e.g. "192.168.x.x", depending on your network, or actual hostname if your configuration is already set for dns resolve
|
||||||
|
set database.host: to "ma.db"
|
||||||
|
set database.memcached_host: to "ma.memcached"
|
||||||
|
set aimedb.key: to "<actual AIMEDB key>"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Running Docker Compose
|
||||||
|
|
||||||
|
After configuring, go to ARTEMiS root folder, and execute:
|
||||||
|
|
||||||
|
```
|
||||||
|
docker compose up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
("-d" argument means detached or daemon, meaning you will regain control of your terminal and Containers will run in background)
|
||||||
|
|
||||||
|
This will start pulling and building required images from network, after it's done, a development server should be running, with server accessible under machine's IP, frontend with port 8090, and PHPMyAdmin under port 9090.
|
||||||
|
|
||||||
|
- To turn off the server, from ARTEMiS root folder, execute:
|
||||||
|
|
||||||
|
```
|
||||||
|
docker compose down
|
||||||
|
```
|
||||||
|
|
||||||
|
- If you changed some files around, and don't see your changes applied, execute:
|
||||||
|
|
||||||
|
```
|
||||||
|
(turn off the server)
|
||||||
|
docker compose down
|
||||||
|
(rebuild)
|
||||||
|
docker compose build
|
||||||
|
(turn on)
|
||||||
|
docker compose up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
- If you need to see logs from containers running, execute:
|
||||||
|
|
||||||
|
```
|
||||||
|
docker compose logs
|
||||||
|
```
|
||||||
|
|
||||||
|
- add '-f' to the end if you want to follow logs.
|
||||||
|
|
||||||
|
## Running commands
|
||||||
|
|
||||||
|
If you need to execute python scripts supplied with the application, use `docker compose exec app python3 <script> <command>`, for example `docker compose exec app python3 dbutils.py version`
|
||||||
|
|
||||||
|
## Persistent DB
|
||||||
|
|
||||||
|
By default, in development mode, ARTEMiS database is stored temporarily, if you wish to keep your database saved between restarts, we need to bind the database inside the container to actual storage/folder inside our server, to do this we need to make a few changes:
|
||||||
|
|
||||||
|
- First off, edit docker-compose.yml, and uncomment 2 lines:
|
||||||
|
|
||||||
|
```
|
||||||
|
(uncomment these two)
|
||||||
|
#volumes:
|
||||||
|
# - ./AimeDB:/var/lib/mysql
|
||||||
|
```
|
||||||
|
|
||||||
|
- After that, start up the server, this time Database will be saved in AimeDB folder we created in our configuration steps.
|
||||||
|
- If you wish to save it in another folder and/or storage device, change the "./AimeDB" target folder to folder/device of your choice
|
||||||
|
|
||||||
|
NOTE (NEEDS FIX): at the moment running development mode with persistent DB will always run database creation script at the start of application, while it doesn't break database outright, it might create some issues, a temporary fix can be applied:
|
||||||
|
|
||||||
|
- Start up containers with persistent DB already enabled, let application create database
|
||||||
|
- After startup, `docker compose down` the instance
|
||||||
|
- Edit entrypoint.sh and remove the `python3 dbutils.py create` line from Development mode statement
|
||||||
|
- Execute `docker compose build` and `docker compose up -d` to rebuild the app and start the containers back
|
||||||
|
|
||||||
|
## Adding importer data
|
||||||
|
|
||||||
|
To add data using importer, we can do that a few ways:
|
||||||
|
|
||||||
|
### Use importer locally on server
|
||||||
|
|
||||||
|
For that we need actual GameData and Options supplied somehow to the server system, be it wsl2 mounting layer, a pendrive with data, network share, or a direct copy to the server storage
|
||||||
|
With python3 installed on system, install requirements.txt directly to the system, or through python3 virtual-environment (python3-venv)
|
||||||
|
Default mysql/mariadb client development packages will also be required
|
||||||
|
|
||||||
|
- In the system:
|
||||||
|
|
||||||
|
```
|
||||||
|
sudo apt install default-libmysqlclient-dev build-essential pkg-config libmemcached-dev
|
||||||
|
sudo apt install mysql-client
|
||||||
|
OR
|
||||||
|
sudo apt install libmariadb-dev
|
||||||
|
```
|
||||||
|
|
||||||
|
- In the root ARTEMiS folder
|
||||||
|
|
||||||
|
```
|
||||||
|
python3 -m pip install -r requirements.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
- If we wish to layer that with python3 virtual-environment, install required system packages, then:
|
||||||
|
|
||||||
|
```
|
||||||
|
sudo apt install python3-venv
|
||||||
|
python3 -m venv /path/to/venv
|
||||||
|
cd /path/to/venv/bin
|
||||||
|
python3 -m pip install -r /path/to/artemis/requirements.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
- Depending on how you installed, now you can run read.py using:
|
||||||
|
- For direct installation, from root ARTEMiS folder:
|
||||||
|
|
||||||
|
```
|
||||||
|
python3 read.py <args>
|
||||||
|
```
|
||||||
|
|
||||||
|
- Or from python3 virtual environment, from root ARTEMiS folder:
|
||||||
|
|
||||||
|
```
|
||||||
|
/path/to/python3-venv/bin/python3 /path/to/artemis/read.py <args>
|
||||||
|
```
|
||||||
|
|
||||||
|
- We need to expose database container port, so that read.py can communicate with the database, inside docker-compose.yml, uncomment 2 lines in the database container declaration (db):
|
||||||
|
|
||||||
|
```
|
||||||
|
#ports:
|
||||||
|
# - "3306:3306"
|
||||||
|
```
|
||||||
|
|
||||||
|
- Now, `docker compose down && docker compose build && docker compose up -d` to restart containers
|
||||||
|
|
||||||
|
Now to insert the data, by default, docker doesn't expose container hostnames to root system, when trying to run read.py against a container, it will Error that hostname is not available, to fix that, we can add database hostname by hand to /etc/hosts:
|
||||||
|
|
||||||
|
```
|
||||||
|
sudo <editor of your choice> /etc/hosts
|
||||||
|
add '127.0.0.1 ma.db' to the table
|
||||||
|
save and close
|
||||||
|
```
|
||||||
|
|
||||||
|
- You can remove the line in /etc/hosts and de-expose the database port after successful import (this assumes you're using Persistent DB, as restarting the container without it will clear imported data).
|
||||||
|
|
||||||
|
### Use importer on remote Linux system
|
||||||
|
|
||||||
|
Follow the system and python portion of the guide, installing required packages and python3 modules, Download the ARTEMiS source.
|
||||||
|
|
||||||
|
- Edit core.yaml and insert it into config catalog:
|
||||||
|
|
||||||
|
```
|
||||||
|
database:
|
||||||
|
host: "<hostname of target system>"
|
||||||
|
```
|
||||||
|
|
||||||
|
- Expose port 3306 from database docker container to system, and allow port 3306 through system firewall to expose port to the system from which you will be importing data. (Remember to close down the database ports after finishing!)
|
||||||
|
|
||||||
|
- Import data using read.py
|
||||||
|
|
||||||
|
### Use importer on remote Windows system
|
||||||
|
|
||||||
|
Follow the [windows](docs/INSTALL_WINDOWS.md) guide for installing python dependencies, download the ARTEMiS source.
|
||||||
|
|
||||||
|
- Edit core.yaml and insert it into config catalog:
|
||||||
|
|
||||||
|
```
|
||||||
|
database:
|
||||||
|
host: "<hostname of target system>"
|
||||||
|
```
|
||||||
|
|
||||||
|
- Expose port 3306 from database docker container to system, and allow port 3306 through system firewall to expose port to the system from which you will be importing data.
|
||||||
|
- For Windows, also allow port 3306 outside the system so that read.py can communicate with remote database. (Remember to close down the database ports after finishing!)
|
||||||
|
|
||||||
|
# Troubleshooting
|
||||||
|
|
||||||
|
## Game does not connect to ARTEMiS Allnet Server
|
||||||
|
Double check your core.yaml if all addresses are correct and ports are correctly set and/or opened.
|
||||||
|
|
||||||
|
## Game does not connect to Title Server
|
||||||
|
Title server hostname requires your actual system hostname, from which you set up the Containers, or it's IP address, you can get the IP by using command `ip a` which will list all interfaces, and one of them should be your system IP (typically under eth0).
|
||||||
|
|
||||||
|
## Unhandled command in AimeDB
|
||||||
|
Make sure you have a proper AimeDB Key added to configuration.
|
||||||
|
|
||||||
|
## Memcached Error in ARTEMiS application causes errors in loading data
|
||||||
|
Currently when running ARTEMiS from master branch, there is a small bug that causes app to always configure memcached service to 127.0.0.1, to fix that, locate cache.py file in core/data, and edit:
|
||||||
|
|
||||||
|
```
|
||||||
|
memcache = pylibmc.Client([hostname]), binary=True)
|
||||||
|
```
|
||||||
|
|
||||||
|
to:
|
||||||
|
|
||||||
|
```
|
||||||
|
memcache = pylibmc.Client(["ma.memcached"], binary=True)
|
||||||
|
```
|
||||||
|
|
||||||
|
And build the containers again.
|
||||||
|
This will fix errors loading data from server.
|
||||||
|
(This is fixed in development branch)
|
||||||
|
|
||||||
|
## read.py "Can't connect to local server through socket '/run/mysqld/mysqld.sock'"
|
||||||
|
|
||||||
|
sqlalchemy by default reads any ip based connection as socket, thus trying to connect locally, please use a hostname (such as ma.db as in guide, and do not localhost) to force it to use a network interface.
|
||||||
|
|
||||||
|
### TODO:
|
||||||
|
|
||||||
|
- Production environment
|
@ -5,7 +5,6 @@ server:
|
|||||||
|
|
||||||
team:
|
team:
|
||||||
name: ARTEMiS # If this is set, all players that are not on a team will use this one by default.
|
name: ARTEMiS # If this is set, all players that are not on a team will use this one by default.
|
||||||
rank_scale: True # Scales the in-game ranking based on the number of teams within the database, rather than the default scale of ~100 that the game normally uses.
|
|
||||||
|
|
||||||
mods:
|
mods:
|
||||||
use_login_bonus: True
|
use_login_bonus: True
|
||||||
|
@ -40,7 +40,7 @@ Games listed below have been tested and confirmed working. Only game versions ol
|
|||||||
- mysql/mariadb server
|
- mysql/mariadb server
|
||||||
|
|
||||||
## Setup guides
|
## Setup guides
|
||||||
Follow the platform-specific guides for [windows](docs/INSTALL_WINDOWS.md) and [ubuntu](docs/INSTALL_UBUNTU.md) to setup and run the server.
|
Follow the platform-specific guides for [windows](docs/INSTALL_WINDOWS.md), [ubuntu](docs/INSTALL_UBUNTU.md) or [docker](docs/INSTALL_DOCKER.md) to setup and run the server.
|
||||||
|
|
||||||
## Game specific information
|
## Game specific information
|
||||||
Read [Games specific info](docs/game_specific_info.md) for all supported games, importer settings, configuration option and database upgrades.
|
Read [Games specific info](docs/game_specific_info.md) for all supported games, importer settings, configuration option and database upgrades.
|
||||||
|
@ -10,7 +10,7 @@ from core.config import CoreConfig
|
|||||||
from titles.chuni.const import ChuniConstants
|
from titles.chuni.const import ChuniConstants
|
||||||
from titles.chuni.database import ChuniData
|
from titles.chuni.database import ChuniData
|
||||||
from titles.chuni.config import ChuniConfig
|
from titles.chuni.config import ChuniConfig
|
||||||
|
SCORE_BUFFER = {}
|
||||||
|
|
||||||
class ChuniBase:
|
class ChuniBase:
|
||||||
def __init__(self, core_cfg: CoreConfig, game_cfg: ChuniConfig) -> None:
|
def __init__(self, core_cfg: CoreConfig, game_cfg: ChuniConfig) -> None:
|
||||||
@ -194,7 +194,8 @@ class ChuniBase:
|
|||||||
}
|
}
|
||||||
|
|
||||||
def handle_get_game_ranking_api_request(self, data: Dict) -> Dict:
|
def handle_get_game_ranking_api_request(self, data: Dict) -> Dict:
|
||||||
return {"type": data["type"], "gameRankingList": []}
|
rankings = self.data.score.get_rankings(self.version)
|
||||||
|
return {"type": data["type"], "gameRankingList": rankings}
|
||||||
|
|
||||||
def handle_get_game_sale_api_request(self, data: Dict) -> Dict:
|
def handle_get_game_sale_api_request(self, data: Dict) -> Dict:
|
||||||
return {"type": data["type"], "length": 0, "gameSaleList": []}
|
return {"type": data["type"], "length": 0, "gameSaleList": []}
|
||||||
@ -401,7 +402,6 @@ class ChuniBase:
|
|||||||
"userId": data["userId"],
|
"userId": data["userId"],
|
||||||
"userRivalData": userRivalData
|
"userRivalData": userRivalData
|
||||||
}
|
}
|
||||||
|
|
||||||
def handle_get_user_rival_music_api_request(self, data: Dict) -> Dict:
|
def handle_get_user_rival_music_api_request(self, data: Dict) -> Dict:
|
||||||
rival_id = data["rivalId"]
|
rival_id = data["rivalId"]
|
||||||
next_index = int(data["nextIndex"])
|
next_index = int(data["nextIndex"])
|
||||||
@ -415,10 +415,10 @@ class ChuniBase:
|
|||||||
for music in all_entries[next_index:]:
|
for music in all_entries[next_index:]:
|
||||||
music_id = music["musicId"]
|
music_id = music["musicId"]
|
||||||
level = music["level"]
|
level = music["level"]
|
||||||
score = music["score"]
|
score = music["scoreMax"]
|
||||||
rank = music["rank"]
|
rank = music["scoreRank"]
|
||||||
|
|
||||||
# Create a music entry for the current music_id if it's unique
|
# Create a music entry for the current music_id
|
||||||
music_entry = next((entry for entry in user_rival_music_list if entry["musicId"] == music_id), None)
|
music_entry = next((entry for entry in user_rival_music_list if entry["musicId"] == music_id), None)
|
||||||
if music_entry is None:
|
if music_entry is None:
|
||||||
music_entry = {
|
music_entry = {
|
||||||
@ -428,20 +428,15 @@ class ChuniBase:
|
|||||||
}
|
}
|
||||||
user_rival_music_list.append(music_entry)
|
user_rival_music_list.append(music_entry)
|
||||||
|
|
||||||
# Create a level entry for the current level if it's unique or has a higher score
|
# Create a level entry for the current level
|
||||||
level_entry = next((entry for entry in music_entry["userRivalMusicDetailList"] if entry["level"] == level), None)
|
level_entry = {
|
||||||
if level_entry is None:
|
"level": level,
|
||||||
level_entry = {
|
"scoreMax": score,
|
||||||
"level": level,
|
"scoreRank": rank
|
||||||
"scoreMax": score,
|
}
|
||||||
"scoreRank": rank
|
music_entry["userRivalMusicDetailList"].append(level_entry)
|
||||||
}
|
|
||||||
music_entry["userRivalMusicDetailList"].append(level_entry)
|
|
||||||
elif score > level_entry["scoreMax"]:
|
|
||||||
level_entry["scoreMax"] = score
|
|
||||||
level_entry["scoreRank"] = rank
|
|
||||||
|
|
||||||
# Calculate the length for each "musicId" by counting the unique levels
|
# Calculate the length for each "musicId" by counting the levels
|
||||||
for music_entry in user_rival_music_list:
|
for music_entry in user_rival_music_list:
|
||||||
music_entry["length"] = len(music_entry["userRivalMusicDetailList"])
|
music_entry["length"] = len(music_entry["userRivalMusicDetailList"])
|
||||||
|
|
||||||
@ -583,22 +578,39 @@ class ChuniBase:
|
|||||||
tmp.pop("id")
|
tmp.pop("id")
|
||||||
|
|
||||||
for song in song_list:
|
for song in song_list:
|
||||||
|
score_buf = SCORE_BUFFER.get(str(data["userId"])) or []
|
||||||
if song["userMusicDetailList"][0]["musicId"] == tmp["musicId"]:
|
if song["userMusicDetailList"][0]["musicId"] == tmp["musicId"]:
|
||||||
found = True
|
found = True
|
||||||
song["userMusicDetailList"].append(tmp)
|
song["userMusicDetailList"].append(tmp)
|
||||||
song["length"] = len(song["userMusicDetailList"])
|
song["length"] = len(song["userMusicDetailList"])
|
||||||
|
score_buf.append(tmp["musicId"])
|
||||||
|
SCORE_BUFFER[str(data["userId"])] = score_buf
|
||||||
|
|
||||||
if not found:
|
score_buf = SCORE_BUFFER.get(str(data["userId"])) or []
|
||||||
|
if not found and tmp["musicId"] not in score_buf:
|
||||||
song_list.append({"length": 1, "userMusicDetailList": [tmp]})
|
song_list.append({"length": 1, "userMusicDetailList": [tmp]})
|
||||||
|
score_buf.append(tmp["musicId"])
|
||||||
|
SCORE_BUFFER[str(data["userId"])] = score_buf
|
||||||
|
|
||||||
if len(song_list) >= max_ct:
|
if len(song_list) >= max_ct:
|
||||||
break
|
break
|
||||||
|
|
||||||
if len(music_detail) >= next_idx + max_ct:
|
try:
|
||||||
next_idx += max_ct
|
while song_list[-1]["userMusicDetailList"][0]["musicId"] == music_detail[x + 1]["musicId"]:
|
||||||
|
music = music_detail[x + 1]._asdict()
|
||||||
|
music.pop("user")
|
||||||
|
music.pop("id")
|
||||||
|
song_list[-1]["userMusicDetailList"].append(music)
|
||||||
|
song_list[-1]["length"] += 1
|
||||||
|
x += 1
|
||||||
|
except IndexError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if len(song_list) >= max_ct:
|
||||||
|
next_idx += len(song_list)
|
||||||
else:
|
else:
|
||||||
next_idx = -1
|
next_idx = -1
|
||||||
|
SCORE_BUFFER[str(data["userId"])] = []
|
||||||
return {
|
return {
|
||||||
"userId": data["userId"],
|
"userId": data["userId"],
|
||||||
"length": len(song_list),
|
"length": len(song_list),
|
||||||
@ -730,16 +742,57 @@ class ChuniBase:
|
|||||||
return {
|
return {
|
||||||
"userId": data["userId"],
|
"userId": data["userId"],
|
||||||
"length": 0,
|
"length": 0,
|
||||||
"nextIndex": 0,
|
"nextIndex": -1,
|
||||||
"teamCourseSettingList": [],
|
"teamCourseSettingList": [],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def handle_get_team_course_setting_api_request_proto(self, data: Dict) -> Dict:
|
||||||
|
return {
|
||||||
|
"userId": data["userId"],
|
||||||
|
"length": 1,
|
||||||
|
"nextIndex": -1,
|
||||||
|
"teamCourseSettingList": [
|
||||||
|
{
|
||||||
|
"orderId": 1,
|
||||||
|
"courseId": 1,
|
||||||
|
"classId": 1,
|
||||||
|
"ruleId": 1,
|
||||||
|
"courseName": "Test",
|
||||||
|
"teamCourseMusicList": [
|
||||||
|
{"track": 184, "type": 1, "level": 3, "selectLevel": -1},
|
||||||
|
{"track": 184, "type": 1, "level": 3, "selectLevel": -1},
|
||||||
|
{"track": 184, "type": 1, "level": 3, "selectLevel": -1}
|
||||||
|
],
|
||||||
|
"teamCourseRankingInfoList": [],
|
||||||
|
"recodeDate": "2099-12-31 11:59:99.0",
|
||||||
|
"isPlayed": False
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
def handle_get_team_course_rule_api_request(self, data: Dict) -> Dict:
|
def handle_get_team_course_rule_api_request(self, data: Dict) -> Dict:
|
||||||
return {
|
return {
|
||||||
"userId": data["userId"],
|
"userId": data["userId"],
|
||||||
"length": 0,
|
"length": 0,
|
||||||
"nextIndex": 0,
|
"nextIndex": -1,
|
||||||
"teamCourseRuleList": [],
|
"teamCourseRuleList": []
|
||||||
|
}
|
||||||
|
|
||||||
|
def handle_get_team_course_rule_api_request_proto(self, data: Dict) -> Dict:
|
||||||
|
return {
|
||||||
|
"userId": data["userId"],
|
||||||
|
"length": 1,
|
||||||
|
"nextIndex": -1,
|
||||||
|
"teamCourseRuleList": [
|
||||||
|
{
|
||||||
|
"recoveryLife": 0,
|
||||||
|
"clearLife": 100,
|
||||||
|
"damageMiss": 1,
|
||||||
|
"damageAttack": 1,
|
||||||
|
"damageJustice": 1,
|
||||||
|
"damageJusticeC": 1
|
||||||
|
}
|
||||||
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
def handle_upsert_user_all_api_request(self, data: Dict) -> Dict:
|
def handle_upsert_user_all_api_request(self, data: Dict) -> Dict:
|
||||||
@ -813,7 +866,7 @@ class ChuniBase:
|
|||||||
playlog["playedUserName1"] = self.read_wtf8(playlog["playedUserName1"])
|
playlog["playedUserName1"] = self.read_wtf8(playlog["playedUserName1"])
|
||||||
playlog["playedUserName2"] = self.read_wtf8(playlog["playedUserName2"])
|
playlog["playedUserName2"] = self.read_wtf8(playlog["playedUserName2"])
|
||||||
playlog["playedUserName3"] = self.read_wtf8(playlog["playedUserName3"])
|
playlog["playedUserName3"] = self.read_wtf8(playlog["playedUserName3"])
|
||||||
self.data.score.put_playlog(user_id, playlog)
|
self.data.score.put_playlog(user_id, playlog, self.version)
|
||||||
|
|
||||||
if "userTeamPoint" in upsert:
|
if "userTeamPoint" in upsert:
|
||||||
team_points = upsert["userTeamPoint"]
|
team_points = upsert["userTeamPoint"]
|
||||||
|
@ -6,7 +6,7 @@ from sqlalchemy.schema import ForeignKey
|
|||||||
from sqlalchemy.engine import Row
|
from sqlalchemy.engine import Row
|
||||||
from sqlalchemy.sql import func, select
|
from sqlalchemy.sql import func, select
|
||||||
from sqlalchemy.dialects.mysql import insert
|
from sqlalchemy.dialects.mysql import insert
|
||||||
|
from sqlalchemy.sql.expression import exists
|
||||||
from core.data.schema import BaseData, metadata
|
from core.data.schema import BaseData, metadata
|
||||||
|
|
||||||
course = Table(
|
course = Table(
|
||||||
@ -189,9 +189,28 @@ class ChuniScoreData(BaseData):
|
|||||||
return None
|
return None
|
||||||
return result.fetchall()
|
return result.fetchall()
|
||||||
|
|
||||||
def put_playlog(self, aime_id: int, playlog_data: Dict) -> Optional[int]:
|
def put_playlog(self, aime_id: int, playlog_data: Dict, version: int) -> Optional[int]:
|
||||||
|
# Calculate the ROM version that should be inserted into the DB, based on the version of the ggame being inserted
|
||||||
|
# We only need from Version 10 (Plost) and back, as newer versions include romVersion in their upsert
|
||||||
|
# This matters both for gameRankings, as well as a future DB update to keep version data separate
|
||||||
|
romVer = {
|
||||||
|
10: "1.50.0",
|
||||||
|
9: "1.45.0",
|
||||||
|
8: "1.40.0",
|
||||||
|
7: "1.35.0",
|
||||||
|
6: "1.30.0",
|
||||||
|
5: "1.25.0",
|
||||||
|
4: "1.20.0",
|
||||||
|
3: "1.15.0",
|
||||||
|
2: "1.10.0",
|
||||||
|
1: "1.05.0",
|
||||||
|
0: "1.00.0"
|
||||||
|
}
|
||||||
|
|
||||||
playlog_data["user"] = aime_id
|
playlog_data["user"] = aime_id
|
||||||
playlog_data = self.fix_bools(playlog_data)
|
playlog_data = self.fix_bools(playlog_data)
|
||||||
|
if "romVersion" not in playlog_data:
|
||||||
|
playlog_data["romVersion"] = romVer.get(version, "1.00.0")
|
||||||
|
|
||||||
sql = insert(playlog).values(**playlog_data)
|
sql = insert(playlog).values(**playlog_data)
|
||||||
conflict = sql.on_duplicate_key_update(**playlog_data)
|
conflict = sql.on_duplicate_key_update(**playlog_data)
|
||||||
@ -201,9 +220,39 @@ class ChuniScoreData(BaseData):
|
|||||||
return None
|
return None
|
||||||
return result.lastrowid
|
return result.lastrowid
|
||||||
|
|
||||||
|
def get_rankings(self, version: int) -> Optional[List[Dict]]:
|
||||||
|
# Calculates the ROM version that should be fetched for rankings, based on the game version being retrieved
|
||||||
|
# This prevents tracks that are not accessible in your version from counting towards the 10 results
|
||||||
|
romVer = {
|
||||||
|
13: "2.10%",
|
||||||
|
12: "2.05%",
|
||||||
|
11: "2.00%",
|
||||||
|
10: "1.50%",
|
||||||
|
9: "1.45%",
|
||||||
|
8: "1.40%",
|
||||||
|
7: "1.35%",
|
||||||
|
6: "1.30%",
|
||||||
|
5: "1.25%",
|
||||||
|
4: "1.20%",
|
||||||
|
3: "1.15%",
|
||||||
|
2: "1.10%",
|
||||||
|
1: "1.05%",
|
||||||
|
0: "1.00%"
|
||||||
|
}
|
||||||
|
sql = select([playlog.c.musicId.label('id'), func.count(playlog.c.musicId).label('point')]).where((playlog.c.level != 4) & (playlog.c.romVersion.like(romVer.get(version, "%")))).group_by(playlog.c.musicId).order_by(func.count(playlog.c.musicId).desc()).limit(10)
|
||||||
|
result = self.execute(sql)
|
||||||
|
|
||||||
|
if result is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
rows = result.fetchall()
|
||||||
|
return [dict(row) for row in rows]
|
||||||
|
|
||||||
def get_rival_music(self, rival_id: int) -> Optional[List[Dict]]:
|
def get_rival_music(self, rival_id: int) -> Optional[List[Dict]]:
|
||||||
sql = select(playlog).where(playlog.c.user == rival_id)
|
sql = select(best_score).where(best_score.c.user == rival_id)
|
||||||
|
|
||||||
result = self.execute(sql)
|
result = self.execute(sql)
|
||||||
if result is None:
|
if result is None:
|
||||||
return None
|
return None
|
||||||
return result.fetchall()
|
return result.fetchall()
|
||||||
|
|
||||||
|
@ -116,43 +116,43 @@ class HousingInfo:
|
|||||||
|
|
||||||
|
|
||||||
class Notice:
|
class Notice:
|
||||||
name: str = ""
|
|
||||||
title: str = ""
|
title: str = ""
|
||||||
message: str = ""
|
message: str = ""
|
||||||
unknown3: str = ""
|
dialog: str = ""
|
||||||
unknown4: str = ""
|
fullImage: str = ""
|
||||||
|
messageImage: str = ""
|
||||||
showTitleScreen: bool = True
|
showTitleScreen: bool = True
|
||||||
showWelcomeScreen: bool = True
|
showWelcomeScreen: bool = True
|
||||||
startTime: int = 0
|
startTime: int = 0
|
||||||
endTime: int = 0
|
endTime: int = 0
|
||||||
voiceline: int = 0
|
voiceLine: int = 0
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
name: str = "",
|
|
||||||
title: str = "",
|
title: str = "",
|
||||||
message: str = "",
|
message: str = "",
|
||||||
|
dialog: str = "",
|
||||||
start: int = 0,
|
start: int = 0,
|
||||||
end: int = 0,
|
end: int = 0,
|
||||||
) -> None:
|
) -> None:
|
||||||
self.name = name
|
|
||||||
self.title = title
|
self.title = title
|
||||||
self.message = message
|
self.message = message
|
||||||
|
self.dialog = dialog
|
||||||
self.startTime = start
|
self.startTime = start
|
||||||
self.endTime = end
|
self.endTime = end
|
||||||
|
|
||||||
def make(self) -> List:
|
def make(self) -> List:
|
||||||
return [
|
return [
|
||||||
self.name,
|
|
||||||
self.title,
|
self.title,
|
||||||
self.message,
|
self.message,
|
||||||
self.unknown3,
|
self.dialog,
|
||||||
self.unknown4,
|
self.fullImage,
|
||||||
|
self.messageImage,
|
||||||
int(self.showTitleScreen),
|
int(self.showTitleScreen),
|
||||||
int(self.showWelcomeScreen),
|
int(self.showWelcomeScreen),
|
||||||
self.startTime,
|
self.startTime,
|
||||||
self.endTime,
|
self.endTime,
|
||||||
self.voiceline,
|
self.voiceLine,
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user