begin move

This commit is contained in:
2024-01-09 03:07:04 -05:00
parent b056ff218d
commit 14fa0f5e8e
82 changed files with 1683 additions and 1712 deletions

View File

@ -1,6 +1,6 @@
from core.config import CoreConfig
from core.allnet import AllnetServlet
from core.aimedb import AimedbFactory
from core.allnet import AllnetServlet, BillingServlet
from core.aimedb import AimedbServlette
from core.title import TitleServlet
from core.utils import Utils
from core.mucha import MuchaServlet

View File

@ -102,7 +102,7 @@ class ADBHeader:
magic, protocol_ver, cmd, length, status, game_id, store_id, keychip_id = struct.unpack_from("<5H6sI12s", data)
head = cls(magic, protocol_ver, cmd, length, status, game_id, store_id, keychip_id)
if head.length != len(data):
if head.length > len(data):
raise ADBHeaderException(f"Length is incorrect! Expect {head.length}, got {len(data)}")
return head

View File

@ -2,8 +2,8 @@ from twisted.internet.protocol import Factory, Protocol
import logging, coloredlogs
from Crypto.Cipher import AES
import struct
from typing import Dict, Tuple, Callable, Union
from typing_extensions import Final
from typing import Dict, Tuple, Callable, Union, Optional
import asyncio
from logging.handlers import TimedRotatingFileHandler
from core.config import CoreConfig
@ -11,15 +11,37 @@ from core.utils import create_sega_auth_key
from core.data import Data
from .adb_handlers import *
class AimedbProtocol(Protocol):
class AimedbServlette():
request_list: Dict[int, Tuple[Callable[[bytes, int], Union[ADBBaseResponse, bytes]], int, str]] = {}
def __init__(self, core_cfg: CoreConfig) -> None:
self.logger = logging.getLogger("aimedb")
self.config = core_cfg
def __init__(self, core_cfg: CoreConfig) -> None:
self.config = core_cfg
self.data = Data(core_cfg)
if core_cfg.aimedb.key == "":
self.logger = logging.getLogger("aimedb")
if not hasattr(self.logger, "initted"):
log_fmt_str = "[%(asctime)s] Aimedb | %(levelname)s | %(message)s"
log_fmt = logging.Formatter(log_fmt_str)
fileHandler = TimedRotatingFileHandler(
"{0}/{1}.log".format(self.config.server.log_dir, "aimedb"),
when="d",
backupCount=10,
)
fileHandler.setFormatter(log_fmt)
consoleHandler = logging.StreamHandler()
consoleHandler.setFormatter(log_fmt)
self.logger.addHandler(fileHandler)
self.logger.addHandler(consoleHandler)
self.logger.setLevel(self.config.aimedb.loglevel)
coloredlogs.install(
level=core_cfg.aimedb.loglevel, logger=self.logger, fmt=log_fmt_str
)
self.logger.initted = True
if not core_cfg.aimedb.key:
self.logger.error("!!!KEY NOT SET!!!")
exit(1)
@ -40,27 +62,30 @@ class AimedbProtocol(Protocol):
self.register_handler(0x13, 0x14, self.handle_log_ex, 'aime_log_ex')
self.register_handler(0x64, 0x65, self.handle_hello, 'hello')
self.register_handler(0x66, 0, self.handle_goodbye, 'goodbye')
def register_handler(self, cmd: int, resp:int, handler: Callable[[bytes, int], Union[ADBBaseResponse, bytes]], name: str) -> None:
self.request_list[cmd] = (handler, resp, name)
def start(self) -> None:
self.logger.info(f"Start on port {self.config.aimedb.port}")
asyncio.create_task(asyncio.start_server(self.dataReceived, self.config.server.listen_address, self.config.aimedb.port))
async def dataReceived(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
self.logger.debug(f"Connection made from {writer.get_extra_info('peername')[0]}")
while True:
try:
data: bytes = await reader.read()
if len(data) == 0:
self.logger.debug("Connection closed")
return
await self.process_data(data, reader, writer)
await writer.drain()
except ConnectionResetError as e:
self.logger.warn("Connection reset, disconnecting")
return
def append_padding(self, data: bytes):
"""Appends 0s to the end of the data until it's at the correct size"""
length = struct.unpack_from("<H", data, 6)
padding_size = length[0] - len(data)
data += bytes(padding_size)
return data
def connectionMade(self) -> None:
self.logger.debug(f"{self.transport.getPeer().host} Connected")
def connectionLost(self, reason) -> None:
self.logger.debug(
f"{self.transport.getPeer().host} Disconnected - {reason.value}"
)
def dataReceived(self, data: bytes) -> None:
async def process_data(self, data: bytes, reader: asyncio.StreamReader, writer: asyncio.StreamWriter) -> Optional[bytes]:
addr = writer.get_extra_info('peername')[0]
cipher = AES.new(self.config.aimedb.key.encode(), AES.MODE_ECB)
try:
@ -68,9 +93,9 @@ class AimedbProtocol(Protocol):
except Exception as e:
self.logger.error(f"Failed to decrypt {data.hex()} because {e}")
return None
return
self.logger.debug(f"{self.transport.getPeer().host} wrote {decrypted.hex()}")
self.logger.debug(f"{addr} wrote {decrypted.hex()}")
try:
head = ADBHeader.from_data(decrypted)
@ -79,7 +104,9 @@ class AimedbProtocol(Protocol):
self.logger.error(f"Error parsing ADB header: {e}")
try:
encrypted = cipher.encrypt(ADBBaseResponse().make())
self.transport.write(encrypted)
writer.write(encrypted)
await writer.drain()
return
except Exception as e:
self.logger.error(f"Failed to encrypt default response because {e}")
@ -89,46 +116,51 @@ class AimedbProtocol(Protocol):
if head.keychip_id == "ABCD1234567" or head.store_id == 0xfff0:
self.logger.warning(f"Request from uninitialized AMLib: {vars(head)}")
if head.cmd == 0x66:
self.logger.info("Goodbye")
writer.close()
return
handler, resp_code, name = self.request_list.get(head.cmd, (self.handle_default, None, 'default'))
if resp_code is None:
self.logger.warning(f"No handler for cmd {hex(head.cmd)}")
elif resp_code > 0:
self.logger.info(f"{name} from {head.keychip_id} ({head.game_id}) @ {self.transport.getPeer().host}")
self.logger.info(f"{name} from {head.keychip_id} ({head.game_id}) @ {addr}")
resp = handler(decrypted, resp_code)
resp = await handler(decrypted, resp_code)
if type(resp) == ADBBaseResponse or issubclass(type(resp), ADBBaseResponse):
resp_bytes = resp.make()
if len(resp_bytes) != resp.head.length:
resp_bytes = self.append_padding(resp_bytes)
elif type(resp) == bytes:
resp_bytes = resp
elif resp is None: # Nothing to send, probably a goodbye
self.logger.warn(f"None return by handler for {name}")
return
else:
self.logger.error(f"Unsupported type returned by ADB handler for {name}: {type(resp)}")
raise TypeError(f"Unsupported type returned by ADB handler for {name}: {type(resp)}")
try:
try:
encrypted = cipher.encrypt(resp_bytes)
self.logger.debug(f"Response {resp_bytes.hex()}")
self.transport.write(encrypted)
writer.write(encrypted)
except Exception as e:
self.logger.error(f"Failed to encrypt {resp_bytes.hex()} because {e}")
def handle_default(self, data: bytes, resp_code: int, length: int = 0x20) -> ADBBaseResponse:
async def handle_default(self, data: bytes, resp_code: int, length: int = 0x20) -> ADBBaseResponse:
req = ADBHeader.from_data(data)
return ADBBaseResponse(resp_code, length, 1, req.game_id, req.store_id, req.keychip_id, req.protocol_ver)
def handle_hello(self, data: bytes, resp_code: int) -> ADBBaseResponse:
return self.handle_default(data, resp_code)
async def handle_hello(self, data: bytes, resp_code: int) -> ADBBaseResponse:
return await self.handle_default(data, resp_code)
def handle_campaign(self, data: bytes, resp_code: int) -> ADBBaseResponse:
async def handle_campaign(self, data: bytes, resp_code: int) -> ADBBaseResponse:
h = ADBHeader.from_data(data)
if h.protocol_ver >= 0x3030:
req = h
@ -143,26 +175,26 @@ class AimedbProtocol(Protocol):
# We don't currently support campaigns
return resp
def handle_lookup(self, data: bytes, resp_code: int) -> ADBBaseResponse:
async def handle_lookup(self, data: bytes, resp_code: int) -> ADBBaseResponse:
req = ADBLookupRequest(data)
user_id = self.data.card.get_user_id_from_card(req.access_code)
is_banned = self.data.card.get_card_banned(req.access_code)
is_locked = self.data.card.get_card_locked(req.access_code)
ret = ADBLookupResponse.from_req(req.head, user_id)
if is_banned and is_locked:
ret.head.status = ADBStatus.BAN_SYS_USER
elif is_banned:
ret.head.status = ADBStatus.BAN_SYS
elif is_locked:
ret.head.status = ADBStatus.LOCK_USER
ret = ADBLookupResponse.from_req(req.head, user_id)
self.logger.info(
f"access_code {req.access_code} -> user_id {ret.user_id}"
)
return ret
def handle_lookup_ex(self, data: bytes, resp_code: int) -> ADBBaseResponse:
async def handle_lookup_ex(self, data: bytes, resp_code: int) -> ADBBaseResponse:
req = ADBLookupRequest(data)
user_id = self.data.card.get_user_id_from_card(req.access_code)
@ -191,7 +223,7 @@ class AimedbProtocol(Protocol):
return ret
def handle_felica_lookup(self, data: bytes, resp_code: int) -> bytes:
async def handle_felica_lookup(self, data: bytes, resp_code: int) -> bytes:
"""
On official, I think a card has to be registered for this to actually work, but
I'm making the executive decision to not implement that and just kick back our
@ -207,7 +239,7 @@ class AimedbProtocol(Protocol):
)
return ADBFelicaLookupResponse.from_req(req.head, ac)
def handle_felica_register(self, data: bytes, resp_code: int) -> bytes:
async def handle_felica_register(self, data: bytes, resp_code: int) -> bytes:
"""
I've never seen this used.
"""
@ -239,7 +271,7 @@ class AimedbProtocol(Protocol):
return ADBFelicaLookupResponse.from_req(req.head, ac)
def handle_felica_lookup_ex(self, data: bytes, resp_code: int) -> bytes:
async def handle_felica_lookup_ex(self, data: bytes, resp_code: int) -> bytes:
req = ADBFelicaLookup2Request(data)
access_code = self.data.card.to_access_code(req.idm)
user_id = self.data.card.get_user_id_from_card(access_code=access_code)
@ -263,7 +295,7 @@ class AimedbProtocol(Protocol):
return resp
def handle_campaign_clear(self, data: bytes, resp_code: int) -> ADBBaseResponse:
async def handle_campaign_clear(self, data: bytes, resp_code: int) -> ADBBaseResponse:
req = ADBCampaignClearRequest(data)
resp = ADBCampaignClearResponse.from_req(req.head)
@ -271,7 +303,7 @@ class AimedbProtocol(Protocol):
# We don't support campaign stuff
return resp
def handle_register(self, data: bytes, resp_code: int) -> bytes:
async def handle_register(self, data: bytes, resp_code: int) -> bytes:
req = ADBLookupRequest(data)
user_id = -1
@ -305,17 +337,17 @@ class AimedbProtocol(Protocol):
return resp
# TODO: Save these in some capacity, as deemed relevant
def handle_status_log(self, data: bytes, resp_code: int) -> bytes:
async def handle_status_log(self, data: bytes, resp_code: int) -> bytes:
req = ADBStatusLogRequest(data)
self.logger.info(f"User {req.aime_id} logged {req.status.name} event")
return ADBBaseResponse(resp_code, 0x20, 1, req.head.game_id, req.head.store_id, req.head.keychip_id, req.head.protocol_ver)
def handle_log(self, data: bytes, resp_code: int) -> bytes:
async def handle_log(self, data: bytes, resp_code: int) -> bytes:
req = ADBLogRequest(data)
self.logger.info(f"User {req.aime_id} logged {req.status.name} event, credit_ct: {req.credit_ct} bet_ct: {req.bet_ct} won_ct: {req.won_ct}")
return ADBBaseResponse(resp_code, 0x20, 1, req.head.game_id, req.head.store_id, req.head.keychip_id, req.head.protocol_ver)
def handle_log_ex(self, data: bytes, resp_code: int) -> bytes:
async def handle_log_ex(self, data: bytes, resp_code: int) -> bytes:
req = ADBLogExRequest(data)
strs = []
self.logger.info(f"Recieved {req.num_logs} or {len(req.logs)} logs")
@ -324,43 +356,3 @@ class AimedbProtocol(Protocol):
self.logger.debug(f"User {req.logs[x].aime_id} logged {req.logs[x].status.name} event, credit_ct: {req.logs[x].credit_ct} bet_ct: {req.logs[x].bet_ct} won_ct: {req.logs[x].won_ct}")
return ADBLogExResponse.from_req(req.head)
def handle_goodbye(self, data: bytes, resp_code: int) -> None:
self.logger.info(f"goodbye from {self.transport.getPeer().host}")
self.transport.loseConnection()
return
class AimedbFactory(Factory):
protocol = AimedbProtocol
def __init__(self, cfg: CoreConfig) -> None:
self.config = cfg
log_fmt_str = "[%(asctime)s] Aimedb | %(levelname)s | %(message)s"
log_fmt = logging.Formatter(log_fmt_str)
self.logger = logging.getLogger("aimedb")
fileHandler = TimedRotatingFileHandler(
"{0}/{1}.log".format(self.config.server.log_dir, "aimedb"),
when="d",
backupCount=10,
)
fileHandler.setFormatter(log_fmt)
consoleHandler = logging.StreamHandler()
consoleHandler.setFormatter(log_fmt)
self.logger.addHandler(fileHandler)
self.logger.addHandler(consoleHandler)
self.logger.setLevel(self.config.aimedb.loglevel)
coloredlogs.install(
level=cfg.aimedb.loglevel, logger=self.logger, fmt=log_fmt_str
)
if self.config.aimedb.key == "":
self.logger.error("Please set 'key' field in your config file.")
exit(1)
self.logger.info(f"Ready on port {self.config.aimedb.port}")
def buildProtocol(self, addr):
return AimedbProtocol(self.config)

View File

@ -1,20 +1,24 @@
from typing import Dict, List, Any, Optional, Tuple, Union, Final
import logging, coloredlogs
from logging.handlers import TimedRotatingFileHandler
from twisted.web.http import Request
from datetime import datetime
import pytz
import base64
import zlib
import json
import yaml
import logging
import coloredlogs
import urllib.parse
import math
from typing import Dict, List, Any, Optional, Union, Final
from logging.handlers import TimedRotatingFileHandler
from starlette.requests import Request
from starlette.responses import PlainTextResponse
from starlette.applications import Starlette
from starlette.routing import Route
from datetime import datetime
from enum import Enum
from Crypto.PublicKey import RSA
from Crypto.Hash import SHA
from Crypto.Signature import PKCS1_v1_5
from time import strptime
from os import path
import urllib.parse
import math
from os import path, environ, mkdir, access, W_OK
from .config import CoreConfig
from .utils import Utils
@ -91,7 +95,6 @@ class DLI_STATUS(Enum):
class AllnetServlet:
def __init__(self, core_cfg: CoreConfig, cfg_folder: str):
super().__init__()
self.config = core_cfg
self.config_folder = cfg_folder
self.data = Data(core_cfg)
@ -126,19 +129,20 @@ class AllnetServlet:
self.logger.error("No games detected!")
self.logger.info(
f"Serving {len(TitleServlet.title_registry)} game codes port {core_cfg.allnet.port}"
f"Serving {len(TitleServlet.title_registry)} game codes"
)
def handle_poweron(self, request: Request, _: Dict):
async def handle_poweron(self, request: Request):
request_ip = Utils.get_ip_addr(request)
pragma_header = request.getHeader('Pragma')
pragma_header = request.headers.get('Pragma', "")
is_dfi = pragma_header is not None and pragma_header == "DFI"
data = await request.body()
try:
if is_dfi:
req_urlencode = self.from_dfi(request.content.getvalue())
req_urlencode = self.from_dfi(data)
else:
req_urlencode = request.content.getvalue().decode()
req_urlencode = data
req_dict = self.allnet_req_to_dict(req_urlencode)
if req_dict is None:
@ -155,7 +159,7 @@ class AllnetServlet:
except AllnetRequestException as e:
if e.message != "":
self.logger.error(e)
return b""
return PlainTextResponse()
if req.format_ver == 3:
resp = AllnetPowerOnResponse3(req.token)
@ -176,7 +180,7 @@ class AllnetServlet:
resp.stat = ALLNET_STAT.bad_machine.value
resp_dict = {k: v for k, v in vars(resp).items() if v is not None}
return (urllib.parse.unquote(urllib.parse.urlencode(resp_dict)) + "\n").encode("utf-8")
return PlainTextResponse(urllib.parse.unquote(urllib.parse.urlencode(resp_dict)) + "\n")
if machine is not None:
arcade = self.data.arcade.get_arcade(machine["arcade"])
@ -190,7 +194,7 @@ class AllnetServlet:
resp.stat = ALLNET_STAT.bad_shop.value
resp_dict = {k: v for k, v in vars(resp).items() if v is not None}
return (urllib.parse.unquote(urllib.parse.urlencode(resp_dict)) + "\n").encode("utf-8")
return PlainTextResponse(urllib.parse.unquote(urllib.parse.urlencode(resp_dict)) + "\n")
elif (not arcade["ip"] or arcade["ip"] is None) and self.config.server.strict_ip_checking:
msg = f"Serial {req.serial} attempted allnet auth from bad IP {req.ip}, but arcade {arcade['id']} has no IP set! (strict checking enabled)."
@ -201,7 +205,7 @@ class AllnetServlet:
resp.stat = ALLNET_STAT.bad_shop.value
resp_dict = {k: v for k, v in vars(resp).items() if v is not None}
return (urllib.parse.unquote(urllib.parse.urlencode(resp_dict)) + "\n").encode("utf-8")
return PlainTextResponse(urllib.parse.unquote(urllib.parse.urlencode(resp_dict)) + "\n")
country = (
@ -245,20 +249,20 @@ class AllnetServlet:
resp.stat = ALLNET_STAT.bad_game.value
resp_dict = {k: v for k, v in vars(resp).items() if v is not None}
return (urllib.parse.unquote(urllib.parse.urlencode(resp_dict)) + "\n").encode("utf-8")
return PlainTextResponse(urllib.parse.unquote(urllib.parse.urlencode(resp_dict)) + "\n")
else:
self.logger.info(
f"Allowed unknown game {req.game_id} v{req.ver} to authenticate from {request_ip} due to 'is_develop' being enabled. S/N: {req.serial}"
)
resp.uri = f"http://{self.config.title.hostname}:{self.config.title.port}/{req.game_id}/{req.ver.replace('.', '')}/"
resp.host = f"{self.config.title.hostname}:{self.config.title.port}"
resp.uri = f"http://{self.config.server.hostname}:{self.config.server.port}/{req.game_id}/{req.ver.replace('.', '')}/"
resp.host = f"{self.config.server.hostname}:{self.config.server.port}"
resp_dict = {k: v for k, v in vars(resp).items() if v is not None}
resp_str = urllib.parse.unquote(urllib.parse.urlencode(resp_dict))
self.logger.debug(f"Allnet response: {resp_str}")
return (resp_str + "\n").encode("utf-8")
return PlainTextResponse(resp_str + "\n")
int_ver = req.ver.replace(".", "")
@ -277,18 +281,19 @@ class AllnetServlet:
request.responseHeaders.addRawHeader('Pragma', 'DFI')
return self.to_dfi(resp_str)"""
return resp_str.encode("utf-8")
return PlainTextResponse(resp_str)
def handle_dlorder(self, request: Request, _: Dict):
async def handle_dlorder(self, request: Request):
request_ip = Utils.get_ip_addr(request)
pragma_header = request.getHeader('Pragma')
pragma_header = request.headers.get('Pragma', "")
is_dfi = pragma_header is not None and pragma_header == "DFI"
data = await request.body()
try:
if is_dfi:
req_urlencode = self.from_dfi(request.content.getvalue())
req_urlencode = self.from_dfi(data)
else:
req_urlencode = request.content.getvalue().decode()
req_urlencode = data.decode()
req_dict = self.allnet_req_to_dict(req_urlencode)
if req_dict is None:
@ -305,7 +310,7 @@ class AllnetServlet:
except AllnetRequestException as e:
if e.message != "":
self.logger.error(e)
return b""
return PlainTextResponse()
self.logger.info(
f"DownloadOrder from {request_ip} -> {req.game_id} v{req.ver} serial {req.serial}"
@ -316,18 +321,18 @@ class AllnetServlet:
not self.config.allnet.allow_online_updates
or not self.config.allnet.update_cfg_folder
):
return urllib.parse.unquote(urllib.parse.urlencode(vars(resp))) + "\n"
return PlainTextResponse(urllib.parse.unquote(urllib.parse.urlencode(vars(resp))) + "\n")
else: # TODO: Keychip check
if path.exists(
f"{self.config.allnet.update_cfg_folder}/{req.game_id}-{req.ver.replace('.', '')}-app.ini"
):
resp.uri = f"http://{self.config.title.hostname}:{self.config.title.port}/dl/ini/{req.game_id}-{req.ver.replace('.', '')}-app.ini"
resp.uri = f"http://{self.config.server.hostname}:{self.config.server.port}/dl/ini/{req.game_id}-{req.ver.replace('.', '')}-app.ini"
if path.exists(
f"{self.config.allnet.update_cfg_folder}/{req.game_id}-{req.ver.replace('.', '')}-opt.ini"
):
resp.uri += f"|http://{self.config.title.hostname}:{self.config.title.port}/dl/ini/{req.game_id}-{req.ver.replace('.', '')}-opt.ini"
resp.uri += f"|http://{self.config.server.hostname}:{self.config.server.port}/dl/ini/{req.game_id}-{req.ver.replace('.', '')}-opt.ini"
self.logger.debug(f"Sending download uri {resp.uri}")
self.data.base.log_event("allnet", "DLORDER_REQ_SUCCESS", logging.INFO, f"{Utils.get_ip_addr(request)} requested DL Order for {req.serial} {req.game_id} v{req.ver}")
@ -337,33 +342,33 @@ class AllnetServlet:
request.responseHeaders.addRawHeader('Pragma', 'DFI')
return self.to_dfi(res_str)"""
return res_str
return PlainTextResponse(res_str)
def handle_dlorder_ini(self, request: Request, match: Dict) -> bytes:
if "file" not in match:
return b""
async def handle_dlorder_ini(self, request: Request) -> bytes:
req_file = request.path_params.get("file", "").replace("%0A", "")
req_file = match["file"].replace("%0A", "")
if not req_file:
return PlainTextResponse(status_code=404)
if path.exists(f"{self.config.allnet.update_cfg_folder}/{req_file}"):
self.logger.info(f"Request for DL INI file {req_file} from {Utils.get_ip_addr(request)} successful")
self.data.base.log_event("allnet", "DLORDER_INI_SENT", logging.INFO, f"{Utils.get_ip_addr(request)} successfully recieved {req_file}")
return open(
f"{self.config.allnet.update_cfg_folder}/{req_file}", "rb"
).read()
return PlainTextResponse(open(
f"{self.config.allnet.update_cfg_folder}/{req_file}", "r"
).read())
self.logger.info(f"DL INI File {req_file} not found")
return b""
return PlainTextResponse()
def handle_dlorder_report(self, request: Request, match: Dict) -> bytes:
req_raw = request.content.getvalue()
async def handle_dlorder_report(self, request: Request) -> bytes:
req_raw = await request.body()
client_ip = Utils.get_ip_addr(request)
try:
req_dict: Dict = json.loads(req_raw)
except Exception as e:
self.logger.warning(f"Failed to parse DL Report: {e}")
return "NG"
return PlainTextResponse("NG")
dl_data_type = DLIMG_TYPE.app
dl_data = req_dict.get("appimage", {})
@ -374,13 +379,13 @@ class AllnetServlet:
if dl_data is None or not dl_data:
self.logger.warning(f"Failed to parse DL Report: Invalid format - contains neither appimage nor optimage")
return "NG"
return PlainTextResponse("NG")
rep = DLReport(dl_data, dl_data_type)
if not rep.validate():
self.logger.warning(f"Failed to parse DL Report: Invalid format - {rep.err}")
return "NG"
return PlainTextResponse("NG")
msg = f"{rep.serial} @ {client_ip} reported {rep.rep_type.name} download state {rep.rf_state.name} for {rep.gd} v{rep.dav}:"\
f" {rep.tdsc}/{rep.tsc} segments downloaded for working files {rep.wfl} with {rep.dfl if rep.dfl else 'none'} complete."
@ -388,10 +393,10 @@ class AllnetServlet:
self.data.base.log_event("allnet", "DL_REPORT", logging.INFO, msg, dl_data)
self.logger.info(msg)
return "OK"
return PlainTextResponse("OK")
def handle_loaderstaterecorder(self, request: Request, match: Dict) -> bytes:
req_data = request.content.getvalue()
async def handle_loaderstaterecorder(self, request: Request) -> bytes:
req_data = await request.body()
sections = req_data.decode("utf-8").split("\r\n")
req_dict = dict(urllib.parse.parse_qsl(sections[0]))
@ -403,18 +408,94 @@ class AllnetServlet:
ip = Utils.get_ip_addr(request)
if serial is None or num_files_dld is None or num_files_to_dl is None or dl_state is None:
return "NG".encode()
return PlainTextResponse("NG")
self.logger.info(f"LoaderStateRecorder Request from {ip} {serial}: {num_files_dld}/{num_files_to_dl} Files download (State: {dl_state})")
return "OK".encode()
return PlainTextResponse("OK")
def handle_alive(self, request: Request, match: Dict) -> bytes:
return "OK".encode()
async def handle_alive(self, request: Request) -> bytes:
return PlainTextResponse("OK")
def handle_billing_request(self, request: Request, _: Dict):
req_raw = request.content.getvalue()
async def handle_naomitest(self, request: Request) -> bytes:
self.logger.info(f"Ping from {Utils.get_ip_addr(request)}")
return PlainTextResponse("naomi ok")
def allnet_req_to_dict(self, data: str) -> Optional[List[Dict[str, Any]]]:
"""
Parses an allnet request string into a python dictionary
"""
try:
sections = data.split("\r\n")
ret = []
for x in sections:
ret.append(dict(urllib.parse.parse_qsl(x)))
return ret
except Exception as e:
self.logger.error(f"allnet_req_to_dict: {e} while parsing {data}")
return None
def from_dfi(self, data: bytes) -> str:
zipped = base64.b64decode(data)
unzipped = zlib.decompress(zipped)
return unzipped.decode("utf-8")
def to_dfi(self, data: str) -> bytes:
unzipped = data.encode('utf-8')
zipped = zlib.compress(unzipped)
return base64.b64encode(zipped)
class BillingServlet:
def __init__(self, core_cfg: CoreConfig, cfg_folder: str) -> None:
self.config = core_cfg
self.config_folder = cfg_folder
self.data = Data(core_cfg)
self.logger = logging.getLogger("billing")
if not hasattr(self.logger, "initialized"):
log_fmt_str = "[%(asctime)s] Billing | %(levelname)s | %(message)s"
log_fmt = logging.Formatter(log_fmt_str)
fileHandler = TimedRotatingFileHandler(
"{0}/{1}.log".format(self.config.server.log_dir, "billing"),
when="d",
backupCount=10,
)
fileHandler.setFormatter(log_fmt)
consoleHandler = logging.StreamHandler()
consoleHandler.setFormatter(log_fmt)
self.logger.addHandler(fileHandler)
self.logger.addHandler(consoleHandler)
self.logger.setLevel(core_cfg.allnet.loglevel)
coloredlogs.install(
level=core_cfg.billing.loglevel, logger=self.logger, fmt=log_fmt_str
)
self.logger.initialized = True
def billing_req_to_dict(self, data: bytes):
"""
Parses an billing request string into a python dictionary
"""
try:
sections = data.decode("ascii").split("\r\n")
ret = []
for x in sections:
ret.append(dict(urllib.parse.parse_qsl(x)))
return ret
except Exception as e:
self.logger.error(f"billing_req_to_dict: {e} while parsing {data}")
return None
async def handle_billing_request(self, request: Request):
req_raw = await request.body()
if request.getHeader('Content-Type') == "application/octet-stream":
if request.headers.get('Content-Type', '') == "application/octet-stream":
req_unzip = zlib.decompressobj(-zlib.MAX_WBITS).decompress(req_raw)
else:
req_unzip = req_raw
@ -423,8 +504,8 @@ class AllnetServlet:
request_ip = Utils.get_ip_addr(request)
if req_dict is None:
self.logger.error(f"Failed to parse request {request.content.getvalue()}")
return b""
self.logger.error(f"Failed to parse request {req_raw}")
return PlainTextResponse()
self.logger.debug(f"request {req_dict}")
@ -436,7 +517,7 @@ class AllnetServlet:
req = BillingInfo(req_dict[0])
except KeyError as e:
self.logger.error(f"Billing request failed to parse: {e}")
return f"result=5&linelimit=&message=field is missing or formatting is incorrect\r\n".encode()
return PlainTextResponse("result=5&linelimit=&message=field is missing or formatting is incorrect\r\n")
for x in range(1, len(req_dict)):
if not req_dict[x]:
@ -467,7 +548,7 @@ class AllnetServlet:
)
self.logger.warning(msg)
return f"result=1&requestno={req.requestno}&message=Keychip Serial bad\r\n".encode()
return PlainTextResponse(f"result=1&requestno={req.requestno}&message=Keychip Serial bad\r\n")
msg = (
f"Billing checkin from {request_ip}: game {req.gameid} ver {req.gamever} keychip {req.keychipid} playcount "
@ -496,7 +577,6 @@ class AllnetServlet:
# TODO: playhistory
#resp = BillingResponse(playlimit, playlimit_sig, nearfull, nearfull_sig)
resp = BillingResponse(playlimit, playlimit_sig, nearfull, nearfull_sig, req.requestno, req.protocolver)
resp_str = urllib.parse.unquote(urllib.parse.urlencode(vars(resp))) + "\r\n"
@ -504,56 +584,9 @@ class AllnetServlet:
self.logger.debug(f"response {vars(resp)}")
if req.traceleft > 0:
self.logger.info(f"Requesting 20 more of {req.traceleft} unsent tracelogs")
return f"result=6&waittime=0&linelimit=20\r\n".encode()
return resp_str.encode("utf-8")
def handle_naomitest(self, request: Request, _: Dict) -> bytes:
self.logger.info(f"Ping from {Utils.get_ip_addr(request)}")
return b"naomi ok"
def billing_req_to_dict(self, data: bytes):
"""
Parses an billing request string into a python dictionary
"""
try:
sections = data.decode("ascii").split("\r\n")
ret = []
for x in sections:
ret.append(dict(urllib.parse.parse_qsl(x)))
return ret
except Exception as e:
self.logger.error(f"billing_req_to_dict: {e} while parsing {data}")
return None
def allnet_req_to_dict(self, data: str) -> Optional[List[Dict[str, Any]]]:
"""
Parses an allnet request string into a python dictionary
"""
try:
sections = data.split("\r\n")
ret = []
for x in sections:
ret.append(dict(urllib.parse.parse_qsl(x)))
return ret
except Exception as e:
self.logger.error(f"allnet_req_to_dict: {e} while parsing {data}")
return None
def from_dfi(self, data: bytes) -> str:
zipped = base64.b64decode(data)
unzipped = zlib.decompress(zipped)
return unzipped.decode("utf-8")
def to_dfi(self, data: str) -> bytes:
unzipped = data.encode('utf-8')
zipped = zlib.compress(unzipped)
return base64.b64encode(zipped)
return PlainTextResponse("result=6&waittime=0&linelimit=20\r\n")
return PlainTextResponse(resp_str)
class AllnetPowerOnRequest:
def __init__(self, req: Dict) -> None:
@ -613,7 +646,6 @@ class AllnetPowerOnResponse3(AllnetPowerOnResponse):
self.minute = None
self.second = None
class AllnetPowerOnResponse2(AllnetPowerOnResponse):
def __init__(self) -> None:
super().__init__()
@ -623,7 +655,6 @@ class AllnetPowerOnResponse2(AllnetPowerOnResponse):
self.timezone = "+09:00"
self.res_class = "PowerOnResponseV2"
class AllnetDownloadOrderRequest:
def __init__(self, req: Dict) -> None:
self.game_id = req.get("game_id", "")
@ -631,7 +662,6 @@ class AllnetDownloadOrderRequest:
self.serial = req.get("serial", "")
self.encode = req.get("encode", "")
class AllnetDownloadOrderResponse:
def __init__(self, stat: int = 1, serial: str = "", uri: str = "") -> None:
self.stat = stat
@ -781,7 +811,6 @@ class BillingResponse:
# playhistory -> YYYYMM/C:...
# YYYY -> 4 digit year, MM -> 2 digit month, C -> Playcount during that period
class AllnetRequestException(Exception):
def __init__(self, message="") -> None:
self.message = message
@ -849,3 +878,26 @@ class DLReport:
return False
return True
cfg_dir = environ.get("DIANA_CFG_DIR", "config")
cfg: CoreConfig = CoreConfig()
if path.exists(f"{cfg_dir}/core.yaml"):
cfg.update(yaml.safe_load(open(f"{cfg_dir}/core.yaml")))
if not path.exists(cfg.server.log_dir):
mkdir(cfg.server.log_dir)
if not access(cfg.server.log_dir, W_OK):
print(
f"Log directory {cfg.server.log_dir} NOT writable, please check permissions"
)
exit(1)
billing = BillingServlet(cfg, cfg_dir)
app = Starlette(
cfg.server.is_develop,
[
Route("/request", billing.handle_billing_request, methods=["POST"]),
Route("/request/", billing.handle_billing_request, methods=["POST"]),
]
)

89
core/app.py Normal file
View File

@ -0,0 +1,89 @@
import yaml
import logging
import coloredlogs
from logging.handlers import TimedRotatingFileHandler
from starlette.routing import Route
from starlette.requests import Request
from starlette.applications import Starlette
from starlette.responses import PlainTextResponse
from os import environ, path, mkdir, W_OK, access
from typing import List
from core import CoreConfig, TitleServlet, MuchaServlet, AllnetServlet, BillingServlet, AimedbServlette
from core.frontend import FrontendServlet
async def dummy_rt(request: Request):
return PlainTextResponse("Service OK")
cfg_dir = environ.get("ARTEMIS_CFG_DIR", "config")
cfg: CoreConfig = CoreConfig()
if path.exists(f"{cfg_dir}/core.yaml"):
cfg.update(yaml.safe_load(open(f"{cfg_dir}/core.yaml")))
if not path.exists(cfg.server.log_dir):
mkdir(cfg.server.log_dir)
if not access(cfg.server.log_dir, W_OK):
print(
f"Log directory {cfg.server.log_dir} NOT writable, please check permissions"
)
exit(1)
logger = logging.getLogger("core")
log_fmt_str = "[%(asctime)s] Core | %(levelname)s | %(message)s"
log_fmt = logging.Formatter(log_fmt_str)
fileHandler = TimedRotatingFileHandler(
"{0}/{1}.log".format(cfg.server.log_dir, "core"), when="d", backupCount=10
)
fileHandler.setFormatter(log_fmt)
consoleHandler = logging.StreamHandler()
consoleHandler.setFormatter(log_fmt)
logger.addHandler(fileHandler)
logger.addHandler(consoleHandler)
log_lv = logging.DEBUG if cfg.server.is_develop else logging.INFO
logger.setLevel(log_lv)
coloredlogs.install(level=log_lv, logger=logger, fmt=log_fmt_str)
logger.info(f"Artemis starting in {'develop' if cfg.server.is_develop else 'production'} mode")
title = TitleServlet(cfg, cfg_dir) # This has to be loaded first to load plugins
mucha = MuchaServlet(cfg, cfg_dir)
allnet = AllnetServlet(cfg, cfg_dir)
route_lst: List[Route] = [
# Allnet
Route("/sys/servlet/PowerOn", allnet.handle_poweron, methods=["GET", "POST"]),
Route("/sys/servlet/DownloadOrder", allnet.handle_dlorder, methods=["GET", "POST"]),
Route("/sys/servlet/LoaderStateRecorder", allnet.handle_loaderstaterecorder, methods=["GET", "POST"]),
Route("/sys/servlet/Alive", allnet.handle_alive, methods=["GET", "POST"]),
Route("/report-api/Report", allnet.handle_dlorder_report, methods=["POST"]),
Route("/dl/ini/{file:str}", allnet.handle_dlorder_ini),
Route("/naomitest.html", allnet.handle_naomitest),
# Mucha
Route("/mucha_front/boardauth.do", mucha.handle_boardauth, methods=["POST"]),
Route("/mucha_front/updatacheck.do", mucha.handle_updatecheck, methods=["POST"]),
Route("/mucha_front/downloadstate.do", mucha.handle_dlstate, methods=["POST"]),
]
if not cfg.billing.standalone:
billing = BillingServlet(cfg, cfg_dir)
route_lst += [
Route("/request", billing.handle_billing_request, methods=["POST"]),
Route("/request/", billing.handle_billing_request, methods=["POST"]),
]
if not cfg.frontend.standalone:
frontend = FrontendServlet(cfg, cfg_dir)
route_lst += frontend.get_routes()
else:
route_lst.append(Route("/", dummy_rt))
route_lst.append(Route("/robots.txt", FrontendServlet.robots))
for code, game in title.title_registry.items():
route_lst += game.get_routes()
app = Starlette(cfg.server.is_develop, route_lst)

View File

@ -1,16 +1,48 @@
import logging, os
from typing import Any
class ServerConfig:
def __init__(self, parent_config: "CoreConfig") -> None:
self.__config = parent_config
@property
def listen_address(self) -> str:
"""
Address Artemis will bind to and listen on
"""
return CoreConfig.get_config_field(
self.__config, "core", "server", "listen_address", default="127.0.0.1"
)
@property
def hostname(self) -> str:
"""
Hostname sent to games
"""
return CoreConfig.get_config_field(
self.__config, "core", "server", "hostname", default="localhost"
)
@property
def port(self) -> int:
"""
Port the game will listen on
"""
return CoreConfig.get_config_field(
self.__config, "core", "server", "port", default=8080
)
@property
def ssl_key(self) -> str:
return CoreConfig.get_config_field(
self.__config, "core", "server", "ssl_key", default="cert/title.key"
)
@property
def ssl_cert(self) -> str:
return CoreConfig.get_config_field(
self.__config, "core", "title", "ssl_cert", default="cert/title.pem"
)
@property
def allow_user_registration(self) -> bool:
@ -43,9 +75,13 @@ class ServerConfig:
)
@property
def threading(self) -> bool:
def proxy_port(self) -> int:
"""
What port the proxy is listening on. This will be sent instead of 'port' if
is_using_proxy is True and this value is non-zero
"""
return CoreConfig.get_config_field(
self.__config, "core", "server", "threading", default=False
self.__config, "core", "title", "proxy_port", default=0
)
@property
@ -66,7 +102,6 @@ class ServerConfig:
self.__config, "core", "server", "strict_ip_checking", default=False
)
class TitleConfig:
def __init__(self, parent_config: "CoreConfig") -> None:
self.__config = parent_config
@ -79,49 +114,18 @@ class TitleConfig:
)
)
@property
def hostname(self) -> str:
return CoreConfig.get_config_field(
self.__config, "core", "title", "hostname", default="localhost"
)
@property
def port(self) -> int:
return CoreConfig.get_config_field(
self.__config, "core", "title", "port", default=8080
)
@property
def port_ssl(self) -> int:
return CoreConfig.get_config_field(
self.__config, "core", "title", "port_ssl", default=0
)
@property
def ssl_key(self) -> str:
return CoreConfig.get_config_field(
self.__config, "core", "title", "ssl_key", default="cert/title.key"
)
@property
def ssl_cert(self) -> str:
return CoreConfig.get_config_field(
self.__config, "core", "title", "ssl_cert", default="cert/title.pem"
)
@property
def reboot_start_time(self) -> str:
return CoreConfig.get_config_field(
self.__config, "core", "title", "reboot_start_time", default=""
self.__config, "core", "title", "reboot_start_time", default="04:00"
)
@property
def reboot_end_time(self) -> str:
return CoreConfig.get_config_field(
self.__config, "core", "title", "reboot_end_time", default=""
self.__config, "core", "title", "reboot_end_time", default="05:00"
)
class DatabaseConfig:
def __init__(self, parent_config: "CoreConfig") -> None:
self.__config = parent_config
@ -176,16 +180,6 @@ class DatabaseConfig:
)
)
@property
def user_table_autoincrement_start(self) -> int:
return CoreConfig.get_config_field(
self.__config,
"core",
"database",
"user_table_autoincrement_start",
default=10000,
)
@property
def enable_memcached(self) -> bool:
return CoreConfig.get_config_field(
@ -198,21 +192,14 @@ class DatabaseConfig:
self.__config, "core", "database", "memcached_host", default="localhost"
)
class FrontendConfig:
def __init__(self, parent_config: "CoreConfig") -> None:
self.__config = parent_config
@property
def enable(self) -> int:
def standalone(self) -> int:
return CoreConfig.get_config_field(
self.__config, "core", "frontend", "enable", default=False
)
@property
def port(self) -> int:
return CoreConfig.get_config_field(
self.__config, "core", "frontend", "port", default=8090
self.__config, "core", "frontend", "standalone", default=True
)
@property
@ -222,7 +209,12 @@ class FrontendConfig:
self.__config, "core", "frontend", "loglevel", default="info"
)
)
@property
def secret(self) -> str:
CoreConfig.get_config_field(
self.__config, "core", "frontend", "secret", default=""
)
class AllnetConfig:
def __init__(self, parent_config: "CoreConfig") -> None:
@ -236,18 +228,6 @@ class AllnetConfig:
)
)
@property
def port(self) -> int:
return CoreConfig.get_config_field(
self.__config, "core", "allnet", "port", default=80
)
@property
def ip_check(self) -> bool:
return CoreConfig.get_config_field(
self.__config, "core", "allnet", "ip_check", default=False
)
@property
def allow_online_updates(self) -> int:
return CoreConfig.get_config_field(
@ -260,10 +240,23 @@ class AllnetConfig:
self.__config, "core", "allnet", "update_cfg_folder", default=""
)
class BillingConfig:
def __init__(self, parent_config: "CoreConfig") -> None:
self.__config = parent_config
@property
def standalone(self) -> bool:
return CoreConfig.get_config_field(
self.__config, "core", "billing", "standalone", default=True
)
@property
def loglevel(self) -> int:
return CoreConfig.str_to_loglevel(
CoreConfig.get_config_field(
self.__config, "core", "billing", "loglevel", default="info"
)
)
@property
def port(self) -> int:
@ -289,7 +282,6 @@ class BillingConfig:
self.__config, "core", "billing", "signing_key", default="cert/billing.key"
)
class AimedbConfig:
def __init__(self, parent_config: "CoreConfig") -> None:
self.__config = parent_config
@ -326,17 +318,10 @@ class AimedbConfig:
self.__config, "core", "aimedb", "id_lifetime_seconds", default=86400
)
class MuchaConfig:
def __init__(self, parent_config: "CoreConfig") -> None:
self.__config = parent_config
@property
def enable(self) -> int:
return CoreConfig.get_config_field(
self.__config, "core", "mucha", "enable", default=False
)
@property
def loglevel(self) -> int:
return CoreConfig.str_to_loglevel(
@ -345,13 +330,6 @@ class MuchaConfig:
)
)
@property
def hostname(self) -> str:
return CoreConfig.get_config_field(
self.__config, "core", "mucha", "hostname", default="localhost"
)
class CoreConfig(dict):
def __init__(self) -> None:
self.server = ServerConfig(self)

1
core/data/alembic/README Normal file
View File

@ -0,0 +1 @@
Generic single-database configuration.

View File

@ -0,0 +1,89 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = .
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date
# within the migration file as well as the filename.
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; this defaults
# to ./versions. When using multiple version
# directories, initial revisions must be specified with --version-path
# version_locations = %(here)s/bar %(here)s/bat ./versions
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = driver://user:pass@localhost/dbname
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks=black
# black.type=console_scripts
# black.entrypoint=black
# black.options=-l 79
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

77
core/data/alembic/env.py Normal file
View File

@ -0,0 +1,77 @@
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = None
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@ -0,0 +1,24 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}

View File

@ -2,7 +2,9 @@ import logging, coloredlogs
from typing import Any, Dict, List
from twisted.web import resource
from twisted.web.util import redirectTo
from twisted.web.http import Request
from starlette.requests import Request
from starlette.routing import Route
from starlette.responses import Response, PlainTextResponse
from logging.handlers import TimedRotatingFileHandler
from twisted.web.server import Session
from zope.interface import Interface, Attribute, implementer
@ -98,8 +100,15 @@ class FrontendServlet(resource.Resource):
self.putChild(b"game", fe_game)
self.logger.info(
f"Ready on port {self.config.frontend.port} serving {len(fe_game.children)} games"
f"Ready on port {self.config.server.port} serving {len(fe_game.children)} games"
)
def get_routes(self) -> List[Route]:
return []
@classmethod
async def robots(cls, request: Request) -> PlainTextResponse:
return PlainTextResponse("User-agent: *\nDisallow: /\n\nUser-agent: AdsBot-Google\nDisallow: /")
def render_GET(self, request):
self.logger.debug(f"{Utils.get_ip_addr(request)} -> {request.uri.decode()}")

View File

@ -2,7 +2,7 @@ from typing import Dict, Any, Optional, List
import logging, coloredlogs
from logging.handlers import TimedRotatingFileHandler
from twisted.web import resource
from twisted.web.http import Request
from starlette.requests import Request
from datetime import datetime
from Crypto.Cipher import Blowfish
import pytz
@ -12,7 +12,7 @@ from .utils import Utils
from .title import TitleServlet
class MuchaServlet:
mucha_registry: List[str] = []
mucha_registry: Dict[str, str] = {}
def __init__(self, cfg: CoreConfig, cfg_dir: str) -> None:
self.config = cfg
self.config_dir = cfg_dir
@ -39,11 +39,12 @@ class MuchaServlet:
for _, mod in TitleServlet.title_registry.items():
if hasattr(mod, "get_mucha_info"):
enabled, game_cd = mod.get_mucha_info(
enabled, game_cds, netid_prefixes = mod.get_mucha_info(
self.config, self.config_dir
)
if enabled:
self.mucha_registry.append(game_cd)
for x in range(len(game_cds)):
self.mucha_registry[game_cds[x]] = netid_prefixes[x]
self.logger.info(f"Serving {len(self.mucha_registry)} games")
@ -75,7 +76,7 @@ class MuchaServlet:
self.logger.debug(f"Decrypt SN to {sn_decrypt.hex()}")
resp = MuchaAuthResponse(
f"{self.config.mucha.hostname}{':' + str(self.config.allnet.port) if self.config.server.is_develop else ''}"
f"{self.config.server.hostname}{':' + str(self.config.server.port) if self.config.server.is_develop else ''}"
)
self.logger.debug(f"Mucha response {vars(resp)}")
@ -100,7 +101,7 @@ class MuchaServlet:
self.logger.warning(f"Unknown gameCd {req.gameCd}")
return b"RESULTS=000"
resp = MuchaUpdateResponse(req.gameVer, f"{self.config.mucha.hostname}{':' + str(self.config.allnet.port) if self.config.server.is_develop else ''}")
resp = MuchaUpdateResponse(req.gameVer, f"{self.config.server.hostname}{':' + str(self.config.server.port) if self.config.server.is_develop else ''}")
self.logger.debug(f"Mucha response {vars(resp)}")

View File

@ -1,7 +1,9 @@
from typing import Dict, List, Tuple
import logging, coloredlogs
from logging.handlers import TimedRotatingFileHandler
from twisted.web.http import Request
from starlette.requests import Request
from starlette.responses import Response
from starlette.routing import Route
from core.config import CoreConfig
from core.data import Data
@ -28,18 +30,16 @@ class BaseServlet:
"""
return False
def get_endpoint_matchers(self) -> Tuple[List[Tuple[str, str, Dict]], List[Tuple[str, str, Dict]]]:
def get_routes(self) -> List[Route]:
"""Called during boot to get all matcher endpoints this title servlet handles
Returns:
Tuple[List[Tuple[str, str, Dict]], List[Tuple[str, str, Dict]]]: A 2-length tuple where offset 0 is GET and offset 1 is POST,
containing a list of 3-length tuples where offset 0 is the name of the function in the handler that should be called, offset 1
is the matching string, and offset 2 is a dict containing rules for the matcher.
List[Route]: A list of Routes, WebSocketRoutes, or similar classes
"""
return (
[("render_GET", "/{game}/{version}/{endpoint}", {'game': R'S...'})],
[("render_POST", "/{game}/{version}/{endpoint}", {'game': R'S...'})]
)
return [
Route("/{game}/{version}/{endpoint}", self.render_POST, methods=["POST"]),
Route("/{game}/{version}/{endpoint}", self.render_GET, methods=["GET"]),
]
def setup(self) -> None:
"""Called once during boot, should contain any additional setup the handler must do, such as starting any sub-services
@ -58,11 +58,11 @@ class BaseServlet:
Tuple[str, str]: A tuple where offset 0 is the allnet uri field, and offset 1 is the allnet host field
"""
if not self.core_cfg.server.is_using_proxy and Utils.get_title_port(self.core_cfg) != 80:
return (f"http://{self.core_cfg.title.hostname}:{Utils.get_title_port(self.core_cfg)}/{game_code}/{game_ver}/", "")
return (f"http://{self.core_cfg.server.hostname}:{Utils.get_title_port(self.core_cfg)}/{game_code}/{game_ver}/", "")
return (f"http://{self.core_cfg.title.hostname}/{game_code}/{game_ver}/", "")
return (f"http://{self.core_cfg.server.hostname}/{game_code}/{game_ver}/", "")
def get_mucha_info(self, core_cfg: CoreConfig, cfg_dir: str) -> Tuple[bool, str]:
def get_mucha_info(self, core_cfg: CoreConfig, cfg_dir: str) -> Tuple[bool, List[str], List[str]]:
"""Called once during boot to check if this game is a mucha game
Args:
@ -72,15 +72,15 @@ class BaseServlet:
Returns:
Tuple[bool, str]: Tuple where offset 0 is true if the game is enabled, false otherwise, and offset 1 is the game CD
"""
return (False, "")
return (False, [], [])
def render_POST(self, request: Request, game_code: str, matchers: Dict) -> bytes:
self.logger.warn(f"{game_code} Does not dispatch POST")
return None
async def render_POST(self, request: Request) -> bytes:
self.logger.warn(f"Game Does not dispatch POST")
return Response()
def render_GET(self, request: Request, game_code: str, matchers: Dict) -> bytes:
self.logger.warn(f"{game_code} Does not dispatch GET")
return None
async def render_GET(self, request: Request) -> bytes:
self.logger.warn(f"Game Does not dispatch GET")
return Response()
class TitleServlet:
title_registry: Dict[str, BaseServlet] = {}
@ -136,7 +136,7 @@ class TitleServlet:
self.logger.error(f"{folder} missing game_code or index in __init__.py, or is_game_enabled in index")
self.logger.info(
f"Serving {len(self.title_registry)} game codes {'on port ' + str(core_cfg.title.port) if core_cfg.title.port > 0 else ''}"
f"Serving {len(self.title_registry)} game codes {'on port ' + str(core_cfg.server.port) if core_cfg.server.port > 0 else ''}"
)
def render_GET(self, request: Request, endpoints: dict) -> bytes:

View File

@ -1,6 +1,6 @@
from typing import Dict, Any, Optional
from types import ModuleType
from twisted.web.http import Request
from starlette.requests import Request
import logging
import importlib
from os import walk
@ -34,36 +34,16 @@ class Utils:
@classmethod
def get_ip_addr(cls, req: Request) -> str:
return (
req.getAllHeaders()[b"x-forwarded-for"].decode()
if b"x-forwarded-for" in req.getAllHeaders()
else req.getClientAddress().host
)
return req.headers.get("x-forwarded-for", req.client.host)
@classmethod
def get_title_port(cls, cfg: CoreConfig):
if cls.real_title_port is not None: return cls.real_title_port
if cfg.title.port == 0:
cls.real_title_port = cfg.allnet.port
else:
cls.real_title_port = cfg.title.port
cls.real_title_port = cfg.server.proxy_port if cfg.server.is_using_proxy else cfg.server.port
return cls.real_title_port
@classmethod
def get_title_port_ssl(cls, cfg: CoreConfig):
if cls.real_title_port_ssl is not None: return cls.real_title_port_ssl
if cfg.title.port_ssl == 0:
cls.real_title_port_ssl = 443
else:
cls.real_title_port_ssl = cfg.title.port_ssl
return cls.real_title_port_ssl
def create_sega_auth_key(aime_id: int, game: str, place_id: int, keychip_id: str, b64_secret: str, exp_seconds: int = 86400, err_logger: str = 'aimedb') -> Optional[str]:
logger = logging.getLogger(err_logger)
try: