Server stuff
This commit is contained in:
parent
f3e9db8cb1
commit
4ca5e03f59
|
@ -1,3 +0,0 @@
|
||||||
[submodule "server"]
|
|
||||||
path = eaapi/server
|
|
||||||
url = https://gitea.tendokyu.moe/eamuse/server.git
|
|
|
@ -1,13 +1,18 @@
|
||||||
from .const import Type, ServicesMode, Compression
|
from .const import Type, ServicesMode, Compression
|
||||||
from .node import XMLNode
|
from .node import XMLNode
|
||||||
from .encoder import Encoder
|
from .encoder import Encoder
|
||||||
from .decoder import Decoder
|
from .decoder import Decoder
|
||||||
from .wrapper import wrap, unwrap
|
from .wrapper import wrap, unwrap
|
||||||
from .misc import parse_model
|
from .misc import parse_model
|
||||||
|
|
||||||
__all__ = (
|
from .exception import EAAPIException
|
||||||
"Type", "ServicesMode", "Compression",
|
from . import crypt
|
||||||
"XMLNode", "Encoder", "Decoder",
|
|
||||||
"wrap", "unwrap",
|
__all__ = (
|
||||||
"parse_model",
|
"Type", "ServicesMode", "Compression",
|
||||||
)
|
"XMLNode", "Encoder", "Decoder",
|
||||||
|
"wrap", "unwrap",
|
||||||
|
"parse_model",
|
||||||
|
"EAAPIException",
|
||||||
|
"crypt",
|
||||||
|
)
|
||||||
|
|
|
@ -1,85 +1,85 @@
|
||||||
import binascii
|
import binascii
|
||||||
|
|
||||||
from Crypto.Cipher import DES3
|
from Crypto.Cipher import DES3
|
||||||
|
|
||||||
from .misc import assert_true, pack, unpack
|
from .misc import assert_true, pack, unpack
|
||||||
from .exception import InvalidCard
|
from .exception import InvalidCard
|
||||||
from .keys import CARDCONV_KEY
|
from .keys import CARDCONV_KEY
|
||||||
from .const import CARD_ALPHABET
|
from .const import CARD_ALPHABET
|
||||||
|
|
||||||
|
|
||||||
def enc_des(uid):
|
def enc_des(uid: bytes) -> bytes:
|
||||||
cipher = DES3.new(CARDCONV_KEY, DES3.MODE_CBC, iv=b'\0' * 8)
|
cipher = DES3.new(CARDCONV_KEY, DES3.MODE_CBC, iv=b'\0' * 8)
|
||||||
return cipher.encrypt(uid)
|
return cipher.encrypt(uid)
|
||||||
|
|
||||||
|
|
||||||
def dec_des(uid):
|
def dec_des(uid: bytes) -> bytes:
|
||||||
cipher = DES3.new(CARDCONV_KEY, DES3.MODE_CBC, iv=b'\0' * 8)
|
cipher = DES3.new(CARDCONV_KEY, DES3.MODE_CBC, iv=b'\0' * 8)
|
||||||
return cipher.decrypt(uid)
|
return cipher.decrypt(uid)
|
||||||
|
|
||||||
|
|
||||||
def checksum(data):
|
def checksum(data: bytes) -> int:
|
||||||
chk = sum(data[i] * (i % 3 + 1) for i in range(15))
|
chk = sum(data[i] * (i % 3 + 1) for i in range(15))
|
||||||
|
|
||||||
while chk > 31:
|
while chk > 31:
|
||||||
chk = (chk >> 5) + (chk & 31)
|
chk = (chk >> 5) + (chk & 31)
|
||||||
|
|
||||||
return chk
|
return chk
|
||||||
|
|
||||||
|
|
||||||
def uid_to_konami(uid):
|
def uid_to_konami(uid: str) -> str:
|
||||||
assert_true(len(uid) == 16, "UID must be 16 bytes", InvalidCard)
|
assert_true(len(uid) == 16, "UID must be 16 bytes", InvalidCard)
|
||||||
|
|
||||||
if uid.upper().startswith("E004"):
|
if uid.upper().startswith("E004"):
|
||||||
card_type = 1
|
card_type = 1
|
||||||
elif uid.upper().startswith("0"):
|
elif uid.upper().startswith("0"):
|
||||||
card_type = 2
|
card_type = 2
|
||||||
else:
|
else:
|
||||||
raise InvalidCard("Invalid UID prefix")
|
raise InvalidCard("Invalid UID prefix")
|
||||||
|
|
||||||
kid = binascii.unhexlify(uid)
|
kid = binascii.unhexlify(uid)
|
||||||
assert_true(len(kid) == 8, "ID must be 8 bytes", InvalidCard)
|
assert_true(len(kid) == 8, "ID must be 8 bytes", InvalidCard)
|
||||||
|
|
||||||
out = bytearray(unpack(enc_des(kid[::-1]), 5)[:13]) + b'\0\0\0'
|
out = bytearray(unpack(enc_des(kid[::-1]), 5)[:13]) + b'\0\0\0'
|
||||||
|
|
||||||
out[0] ^= card_type
|
out[0] ^= card_type
|
||||||
out[13] = 1
|
out[13] = 1
|
||||||
for i in range(1, 14):
|
for i in range(1, 14):
|
||||||
out[i] ^= out[i - 1]
|
out[i] ^= out[i - 1]
|
||||||
out[14] = card_type
|
out[14] = card_type
|
||||||
out[15] = checksum(out)
|
out[15] = checksum(out)
|
||||||
|
|
||||||
return "".join(CARD_ALPHABET[i] for i in out)
|
return "".join(CARD_ALPHABET[i] for i in out)
|
||||||
|
|
||||||
|
|
||||||
def konami_to_uid(konami_id):
|
def konami_to_uid(konami_id: str) -> str:
|
||||||
if konami_id[14] == "1":
|
if konami_id[14] == "1":
|
||||||
card_type = 1
|
card_type = 1
|
||||||
elif konami_id[14] == "2":
|
elif konami_id[14] == "2":
|
||||||
card_type = 2
|
card_type = 2
|
||||||
else:
|
else:
|
||||||
raise InvalidCard("Invalid ID")
|
raise InvalidCard("Invalid ID")
|
||||||
|
|
||||||
assert_true(len(konami_id) == 16, "ID must be 16 characters", InvalidCard)
|
assert_true(len(konami_id) == 16, "ID must be 16 characters", InvalidCard)
|
||||||
assert_true(all(i in CARD_ALPHABET for i in konami_id), "ID contains invalid characters", InvalidCard)
|
assert_true(all(i in CARD_ALPHABET for i in konami_id), "ID contains invalid characters", InvalidCard)
|
||||||
card = [CARD_ALPHABET.index(i) for i in konami_id]
|
card = bytearray([CARD_ALPHABET.index(i) for i in konami_id])
|
||||||
assert_true(card[11] % 2 == card[12] % 2, "Parity check failed", InvalidCard)
|
assert_true(card[11] % 2 == card[12] % 2, "Parity check failed", InvalidCard)
|
||||||
assert_true(card[13] == card[12] ^ 1, "Card invalid", InvalidCard)
|
assert_true(card[13] == card[12] ^ 1, "Card invalid", InvalidCard)
|
||||||
assert_true(card[15] == checksum(card), "Checksum failed", InvalidCard)
|
assert_true(card[15] == checksum(card), "Checksum failed", InvalidCard)
|
||||||
|
|
||||||
for i in range(13, 0, -1):
|
for i in range(13, 0, -1):
|
||||||
card[i] ^= card[i - 1]
|
card[i] ^= card[i - 1]
|
||||||
|
|
||||||
card[0] ^= card_type
|
card[0] ^= card_type
|
||||||
|
|
||||||
card_id = dec_des(pack(card[:13], 5)[:8])[::-1]
|
card_id = dec_des(pack(card[:13], 5)[:8])[::-1]
|
||||||
card_id = binascii.hexlify(card_id).decode().upper()
|
card_id = binascii.hexlify(card_id).decode().upper()
|
||||||
|
|
||||||
if card_type == 1:
|
if card_type == 1:
|
||||||
assert_true(card_id[:4] == "E004", "Invalid card type", InvalidCard)
|
assert_true(card_id[:4] == "E004", "Invalid card type", InvalidCard)
|
||||||
elif card_type == 2:
|
elif card_type == 2:
|
||||||
assert_true(card_id[0] == "0", "Invalid card type", InvalidCard)
|
assert_true(card_id[0] == "0", "Invalid card type", InvalidCard)
|
||||||
return card_id
|
return card_id
|
||||||
|
|
||||||
|
|
||||||
__all__ = ("konami_to_uid", "uid_to_konami")
|
__all__ = ("konami_to_uid", "uid_to_konami")
|
||||||
|
|
334
eaapi/const.py
334
eaapi/const.py
|
@ -1,163 +1,171 @@
|
||||||
import enum
|
import enum
|
||||||
|
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from html import unescape
|
from html import unescape
|
||||||
from typing import List, Callable
|
from typing import List, Callable
|
||||||
|
|
||||||
from .misc import assert_true
|
from .misc import assert_true
|
||||||
|
|
||||||
|
|
||||||
CARD_ALPHABET = "0123456789ABCDEFGHJKLMNPRSTUWXYZ"
|
CARD_ALPHABET = "0123456789ABCDEFGHJKLMNPRSTUWXYZ"
|
||||||
|
|
||||||
NAME_MAX_COMPRESSED = 0x24
|
NAME_MAX_COMPRESSED = 0x24
|
||||||
NAME_MAX_DECOMPRESSED = 0x1000
|
NAME_MAX_DECOMPRESSED = 0x1000
|
||||||
|
|
||||||
ENCODING = {
|
ENCODING = {
|
||||||
0x20: "ascii",
|
0x20: "ascii",
|
||||||
0x40: "iso-8859-1",
|
0x40: "iso-8859-1",
|
||||||
0x60: "euc-jp",
|
0x60: "euc-jp",
|
||||||
0x80: "shift-jis",
|
0x80: "shift-jis",
|
||||||
0xA0: "utf-8",
|
0xA0: "utf-8",
|
||||||
}
|
}
|
||||||
DEFAULT_ENCODING = ENCODING[0x80] # Shift-JIS
|
DEFAULT_ENCODING = ENCODING[0x80] # Shift-JIS
|
||||||
ENCODING[0x00] = DEFAULT_ENCODING
|
ENCODING[0x00] = DEFAULT_ENCODING
|
||||||
XML_ENCODING = {
|
XML_ENCODING = {
|
||||||
"ASCII": "ascii",
|
"ASCII": "ascii",
|
||||||
"ISO-8859-1": "iso-8859-1",
|
"ISO-8859-1": "iso-8859-1",
|
||||||
"EUC-JP": "euc-jp",
|
"EUC-JP": "euc-jp",
|
||||||
"SHIFT_JIS": "shift-jis",
|
"SHIFT_JIS": "shift-jis",
|
||||||
"SHIFT-JIS": "shift-jis",
|
"SHIFT-JIS": "shift-jis",
|
||||||
"UTF-8": "utf-8",
|
"UTF-8": "utf-8",
|
||||||
}
|
}
|
||||||
ENCODING_BACK = {v: k for k, v in ENCODING.items()}
|
ENCODING_BACK = {v: k for k, v in ENCODING.items()}
|
||||||
XML_ENCODING_BACK = {v: k for k, v in XML_ENCODING.items()}
|
XML_ENCODING_BACK = {v: k for k, v in XML_ENCODING.items()}
|
||||||
ATTR = 0x2E
|
ATTR = 0x2E
|
||||||
END_NODE = 0xFE
|
END_NODE = 0xFE
|
||||||
END_DOC = 0xFF
|
END_DOC = 0xFF
|
||||||
PACK_ALPHABET = "0123456789:ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz"
|
PACK_ALPHABET = "0123456789:ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz"
|
||||||
|
|
||||||
CONTENT_COMP_FULL = 0x42
|
CONTENT_COMP_FULL = 0x42
|
||||||
CONTENT_COMP_SCHEMA = 0x43
|
CONTENT_COMP_SCHEMA = 0x43
|
||||||
CONTENT_FINGERPRINT = 0x44 # TODO: Identify how exactly this differs from the others
|
CONTENT_FINGERPRINT = 0x44 # TODO: Identify how exactly this differs from the others
|
||||||
CONTENT_ASCII_FULL = 0x45
|
CONTENT_ASCII_FULL = 0x45
|
||||||
CONTENT_ASCII_SCHEMA = 0x46
|
CONTENT_ASCII_SCHEMA = 0x46
|
||||||
|
|
||||||
CONTENT_COMP = (CONTENT_COMP_FULL, CONTENT_COMP_SCHEMA)
|
CONTENT_COMP = (CONTENT_COMP_FULL, CONTENT_COMP_SCHEMA)
|
||||||
CONTENT_FULL = (CONTENT_COMP_FULL, CONTENT_ASCII_FULL)
|
CONTENT_FULL = (CONTENT_COMP_FULL, CONTENT_ASCII_FULL)
|
||||||
CONTENT = (
|
CONTENT = (
|
||||||
CONTENT_COMP_FULL, CONTENT_COMP_SCHEMA, CONTENT_FINGERPRINT,
|
CONTENT_COMP_FULL, CONTENT_COMP_SCHEMA, CONTENT_FINGERPRINT,
|
||||||
CONTENT_ASCII_FULL, CONTENT_ASCII_SCHEMA
|
CONTENT_ASCII_FULL, CONTENT_ASCII_SCHEMA
|
||||||
)
|
)
|
||||||
|
|
||||||
ARRAY_BIT = 0x40
|
ARRAY_BIT = 0x40
|
||||||
ARRAY_MASK = ARRAY_BIT - 1
|
ARRAY_MASK = ARRAY_BIT - 1
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class _Type:
|
class _Type:
|
||||||
id: int
|
id: int
|
||||||
fmt: str
|
fmt: str
|
||||||
names: List[str]
|
names: List[str]
|
||||||
c_name: str
|
c_name: str
|
||||||
convert: Callable
|
convert: Callable | None
|
||||||
size: int = 1
|
size: int = 1
|
||||||
no_check: bool = False
|
no_check: bool = False
|
||||||
|
|
||||||
def _parse(self, value):
|
def _parse(self, value):
|
||||||
if self.convert is None:
|
if self.convert is None:
|
||||||
return ()
|
return ()
|
||||||
if self.size == 1:
|
if self.size == 1:
|
||||||
if isinstance(value, (list, tuple)) and len(value) == 1:
|
if isinstance(value, (list, tuple)) and len(value) == 1:
|
||||||
value = value[0]
|
value = value[0]
|
||||||
return self.convert(value)
|
return self.convert(value)
|
||||||
if not self.no_check:
|
if not self.no_check:
|
||||||
assert_true(len(value) == self.size, "Invalid node data")
|
assert_true(len(value) == self.size, "Invalid node data")
|
||||||
return (*map(self.convert, value),)
|
return (*map(self.convert, value),)
|
||||||
|
|
||||||
|
|
||||||
def parse_ip(ip):
|
def parse_ip(ip: int | str) -> tuple[int, int, int, int]:
|
||||||
return (*map(int, ip.split(".")),)
|
if isinstance(ip, int):
|
||||||
|
return (
|
||||||
|
(ip >> 24) & 0xff,
|
||||||
class Type(enum.Enum):
|
(ip >> 16) & 0xff,
|
||||||
Void = _Type(0x01, "", ["void"], "void", None)
|
(ip >> 8) & 0xff,
|
||||||
S8 = _Type(0x02, "b", ["s8"], "int8", int)
|
(ip >> 0) & 0xff,
|
||||||
U8 = _Type(0x03, "B", ["u8"], "uint8", int)
|
)
|
||||||
S16 = _Type(0x04, "h", ["s16"], "int16", int)
|
return (*map(int, ip.split(".")),)
|
||||||
U16 = _Type(0x05, "H", ["u16"], "uint16", int)
|
|
||||||
S32 = _Type(0x06, "i", ["s32"], "int32", int)
|
|
||||||
U32 = _Type(0x07, "I", ["u32"], "uint32", int)
|
class Type(enum.Enum):
|
||||||
S64 = _Type(0x08, "q", ["s64"], "int64", int)
|
Void = _Type(0x01, "", ["void"], "void", None)
|
||||||
U64 = _Type(0x09, "Q", ["u64"], "uint64", int)
|
S8 = _Type(0x02, "b", ["s8"], "int8", int)
|
||||||
Blob = _Type(0x0a, "S", ["bin", "binary"], "char[]", bytes)
|
U8 = _Type(0x03, "B", ["u8"], "uint8", int)
|
||||||
Str = _Type(0x0b, "s", ["str", "string"], "char[]", unescape)
|
S16 = _Type(0x04, "h", ["s16"], "int16", int)
|
||||||
IPv4 = _Type(0x0c, "4B", ["ip4"], "uint8[4]", parse_ip, 1, True)
|
U16 = _Type(0x05, "H", ["u16"], "uint16", int)
|
||||||
Time = _Type(0x0d, "I", ["time"], "uint32", int)
|
S32 = _Type(0x06, "i", ["s32"], "int32", int)
|
||||||
Float = _Type(0x0e, "f", ["float", "f"], "float", float)
|
U32 = _Type(0x07, "I", ["u32"], "uint32", int)
|
||||||
Double = _Type(0x0f, "d", ["double", "d"], "double", float)
|
S64 = _Type(0x08, "q", ["s64"], "int64", int)
|
||||||
|
U64 = _Type(0x09, "Q", ["u64"], "uint64", int)
|
||||||
TwoS8 = _Type(0x10, "2b", ["2s8"], "int8[2]", int, 2)
|
Blob = _Type(0x0a, "S", ["bin", "binary"], "char[]", bytes)
|
||||||
TwoU8 = _Type(0x11, "2B", ["2u8"], "uint8[2]", int, 2)
|
Str = _Type(0x0b, "s", ["str", "string"], "char[]", unescape)
|
||||||
TwoS16 = _Type(0x12, "2h", ["2s16"], "int16[2]", int, 2)
|
IPv4 = _Type(0x0c, "4B", ["ip4"], "uint8[4]", parse_ip, 1, True)
|
||||||
TwoU16 = _Type(0x13, "2H", ["2u16"], "uint16[2]", int, 2)
|
IPv4_Int = _Type(0x0c, "I", ["ip4"], "uint8[4]", parse_ip, 1, True)
|
||||||
TwoS32 = _Type(0x14, "2i", ["2s32"], "int32[2]", int, 2)
|
Time = _Type(0x0d, "I", ["time"], "uint32", int)
|
||||||
TwoU32 = _Type(0x15, "2I", ["2u32"], "uint32[2]", int, 2)
|
Float = _Type(0x0e, "f", ["float", "f"], "float", float)
|
||||||
TwoS64 = _Type(0x16, "2q", ["2s64", "vs64"], "int16[2]", int, 2)
|
Double = _Type(0x0f, "d", ["double", "d"], "double", float)
|
||||||
TwoU64 = _Type(0x17, "2Q", ["2u64", "vu64"], "uint16[2]", int, 2)
|
|
||||||
TwoFloat = _Type(0x18, "2f", ["2f"], "float[2]", float, 2)
|
TwoS8 = _Type(0x10, "2b", ["2s8"], "int8[2]", int, 2)
|
||||||
TwoDouble = _Type(0x19, "2d", ["2d", "vd"], "double[2]", float, 2)
|
TwoU8 = _Type(0x11, "2B", ["2u8"], "uint8[2]", int, 2)
|
||||||
|
TwoS16 = _Type(0x12, "2h", ["2s16"], "int16[2]", int, 2)
|
||||||
ThreeS8 = _Type(0x1a, "3b", ["3s8"], "int8[3]", int, 3)
|
TwoU16 = _Type(0x13, "2H", ["2u16"], "uint16[2]", int, 2)
|
||||||
ThreeU8 = _Type(0x1b, "3B", ["3u8"], "uint8[3]", int, 3)
|
TwoS32 = _Type(0x14, "2i", ["2s32"], "int32[2]", int, 2)
|
||||||
ThreeS16 = _Type(0x1c, "3h", ["3s16"], "int16[3]", int, 3)
|
TwoU32 = _Type(0x15, "2I", ["2u32"], "uint32[2]", int, 2)
|
||||||
ThreeU16 = _Type(0x1d, "3H", ["3u16"], "uint16[3]", int, 3)
|
TwoS64 = _Type(0x16, "2q", ["2s64", "vs64"], "int16[2]", int, 2)
|
||||||
ThreeS32 = _Type(0x1e, "3i", ["3s32"], "int32[3]", int, 3)
|
TwoU64 = _Type(0x17, "2Q", ["2u64", "vu64"], "uint16[2]", int, 2)
|
||||||
ThreeU32 = _Type(0x1f, "3I", ["3u32"], "uint32[3]", int, 3)
|
TwoFloat = _Type(0x18, "2f", ["2f"], "float[2]", float, 2)
|
||||||
ThreeS64 = _Type(0x20, "3q", ["3s64"], "int64[3]", int, 3)
|
TwoDouble = _Type(0x19, "2d", ["2d", "vd"], "double[2]", float, 2)
|
||||||
ThreeU64 = _Type(0x21, "3Q", ["3u64"], "uint64[3]", int, 3)
|
|
||||||
ThreeFloat = _Type(0x22, "3f", ["3f"], "float[3]", float, 3)
|
ThreeS8 = _Type(0x1a, "3b", ["3s8"], "int8[3]", int, 3)
|
||||||
ThreeDouble = _Type(0x23, "3d", ["3d"], "double[3]", float, 3)
|
ThreeU8 = _Type(0x1b, "3B", ["3u8"], "uint8[3]", int, 3)
|
||||||
|
ThreeS16 = _Type(0x1c, "3h", ["3s16"], "int16[3]", int, 3)
|
||||||
FourS8 = _Type(0x24, "4b", ["4s8"], "int8[4]", int, 4)
|
ThreeU16 = _Type(0x1d, "3H", ["3u16"], "uint16[3]", int, 3)
|
||||||
FourU8 = _Type(0x25, "4B", ["4u8"], "uint8[4]", int, 4)
|
ThreeS32 = _Type(0x1e, "3i", ["3s32"], "int32[3]", int, 3)
|
||||||
FourS16 = _Type(0x26, "4h", ["4s16"], "int16[4]", int, 4)
|
ThreeU32 = _Type(0x1f, "3I", ["3u32"], "uint32[3]", int, 3)
|
||||||
FourU16 = _Type(0x27, "4H", ["4u16"], "uint8[4]", int, 4)
|
ThreeS64 = _Type(0x20, "3q", ["3s64"], "int64[3]", int, 3)
|
||||||
FourS32 = _Type(0x28, "4i", ["4s32", "vs32"], "int32[4]", int, 4)
|
ThreeU64 = _Type(0x21, "3Q", ["3u64"], "uint64[3]", int, 3)
|
||||||
FourU32 = _Type(0x29, "4I", ["4u32", "vs32"], "uint32[4]", int, 4)
|
ThreeFloat = _Type(0x22, "3f", ["3f"], "float[3]", float, 3)
|
||||||
FourS64 = _Type(0x2a, "4q", ["4s64"], "int64[4]", int, 4)
|
ThreeDouble = _Type(0x23, "3d", ["3d"], "double[3]", float, 3)
|
||||||
FourU64 = _Type(0x2b, "4Q", ["4u64"], "uint64[4]", int, 4)
|
|
||||||
FourFloat = _Type(0x2c, "4f", ["4f", "vf"], "float[4]", float, 4)
|
FourS8 = _Type(0x24, "4b", ["4s8"], "int8[4]", int, 4)
|
||||||
FourDouble = _Type(0x2d, "4d", ["4d"], "double[4]", float, 4)
|
FourU8 = _Type(0x25, "4B", ["4u8"], "uint8[4]", int, 4)
|
||||||
|
FourS16 = _Type(0x26, "4h", ["4s16"], "int16[4]", int, 4)
|
||||||
Attr = _Type(0x2e, "s", ["attr"], "char[]", None)
|
FourU16 = _Type(0x27, "4H", ["4u16"], "uint8[4]", int, 4)
|
||||||
Array = _Type(0x2f, "", ["array"], "", None)
|
FourS32 = _Type(0x28, "4i", ["4s32", "vs32"], "int32[4]", int, 4)
|
||||||
|
FourU32 = _Type(0x29, "4I", ["4u32", "vs32"], "uint32[4]", int, 4)
|
||||||
VecS8 = _Type(0x30, "16b", ["vs8"], "int8[16]", int, 16)
|
FourS64 = _Type(0x2a, "4q", ["4s64"], "int64[4]", int, 4)
|
||||||
VecU8 = _Type(0x31, "16B", ["vu8"], "uint8[16]", int, 16)
|
FourU64 = _Type(0x2b, "4Q", ["4u64"], "uint64[4]", int, 4)
|
||||||
VecS16 = _Type(0x32, "8h", ["vs16"], "int8[8]", int, 8)
|
FourFloat = _Type(0x2c, "4f", ["4f", "vf"], "float[4]", float, 4)
|
||||||
VecU16 = _Type(0x33, "8H", ["vu16"], "uint8[8]", int, 8)
|
FourDouble = _Type(0x2d, "4d", ["4d"], "double[4]", float, 4)
|
||||||
|
|
||||||
Bool = _Type(0x34, "b", ["bool", "b"], "bool", int)
|
Attr = _Type(0x2e, "s", ["attr"], "char[]", None)
|
||||||
TwoBool = _Type(0x35, "2b", ["2b"], "bool[2]", int, 2)
|
Array = _Type(0x2f, "", ["array"], "", None)
|
||||||
ThreeBool = _Type(0x36, "3b", ["3b"], "bool[3]", int, 3)
|
|
||||||
FourBool = _Type(0x37, "4b", ["4b"], "bool[4]", int, 4)
|
VecS8 = _Type(0x30, "16b", ["vs8"], "int8[16]", int, 16)
|
||||||
VecBool = _Type(0x38, "16b", ["vb"], "bool[16]", int, 16)
|
VecU8 = _Type(0x31, "16B", ["vu8"], "uint8[16]", int, 16)
|
||||||
|
VecS16 = _Type(0x32, "8h", ["vs16"], "int8[8]", int, 8)
|
||||||
@classmethod
|
VecU16 = _Type(0x33, "8H", ["vu16"], "uint8[8]", int, 8)
|
||||||
def from_val(cls, value):
|
|
||||||
for i in cls:
|
Bool = _Type(0x34, "b", ["bool", "b"], "bool", int)
|
||||||
if i.value.id == value & ARRAY_MASK:
|
TwoBool = _Type(0x35, "2b", ["2b"], "bool[2]", int, 2)
|
||||||
return i
|
ThreeBool = _Type(0x36, "3b", ["3b"], "bool[3]", int, 3)
|
||||||
raise ValueError(f"Unknown node type {value}")
|
FourBool = _Type(0x37, "4b", ["4b"], "bool[4]", int, 4)
|
||||||
|
VecBool = _Type(0x38, "16b", ["vb"], "bool[16]", int, 16)
|
||||||
|
|
||||||
class ServicesMode(enum.Enum):
|
@classmethod
|
||||||
Operation = "operation"
|
def from_val(cls, value):
|
||||||
Debug = "debug"
|
for i in cls:
|
||||||
Test = "test"
|
if i.value.id == value & ARRAY_MASK:
|
||||||
Factory = "factory"
|
return i
|
||||||
|
raise ValueError(f"Unknown node type {value}")
|
||||||
|
|
||||||
class Compression(enum.Enum):
|
|
||||||
Lz77 = "lz77"
|
class ServicesMode(enum.Enum):
|
||||||
None_ = "none"
|
Operation = "operation"
|
||||||
|
Debug = "debug"
|
||||||
|
Test = "test"
|
||||||
|
Factory = "factory"
|
||||||
|
|
||||||
|
|
||||||
|
class Compression(enum.Enum):
|
||||||
|
Lz77 = "lz77"
|
||||||
|
None_ = "none"
|
||||||
|
|
|
@ -1,49 +1,50 @@
|
||||||
import binascii
|
import binascii
|
||||||
import hashlib
|
import hashlib
|
||||||
import time
|
import time
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from Crypto.Cipher import ARC4
|
from Crypto.Cipher import ARC4
|
||||||
|
|
||||||
from .misc import assert_true
|
from .misc import assert_true
|
||||||
from .keys import EA_KEY
|
from .keys import EA_KEY
|
||||||
|
|
||||||
|
|
||||||
def new_prng():
|
def new_prng():
|
||||||
state = 0x41c64e6d
|
state = 0x41c64e6d
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
x = (state * 0x838c9cda) + 0x6072
|
x = (state * 0x838c9cda) + 0x6072
|
||||||
# state = (state * 0x41c64e6d + 0x3039)
|
# state = (state * 0x41c64e6d + 0x3039)
|
||||||
# state = (state * 0x41c64e6d + 0x3039)
|
# state = (state * 0x41c64e6d + 0x3039)
|
||||||
state = (state * 0xc2a29a69 + 0xd3dc167e) & 0xffffffff
|
state = (state * 0xc2a29a69 + 0xd3dc167e) & 0xffffffff
|
||||||
yield (x & 0x7fff0000) | state >> 15 & 0xffff
|
yield (x & 0x7fff0000) | state >> 15 & 0xffff
|
||||||
|
|
||||||
|
|
||||||
prng = new_prng()
|
prng = new_prng()
|
||||||
|
|
||||||
|
|
||||||
def validate_key(info):
|
def validate_key(info):
|
||||||
match = re.match(r"^(\d)-([0-9a-f]{8})-([0-9a-f]{4})$", info)
|
match = re.match(r"^(\d)-([0-9a-f]{8})-([0-9a-f]{4})$", info)
|
||||||
assert_true(match, "Invalid eamuse info key")
|
assert_true(match is not None, "Invalid eamuse info key")
|
||||||
version = match.group(1)
|
assert match is not None
|
||||||
assert_true(version == "1", f"Unsupported encryption version ({version})")
|
version = match.group(1)
|
||||||
|
assert_true(version == "1", f"Unsupported encryption version ({version})")
|
||||||
seconds = binascii.unhexlify(match.group(2)) # 4 bytes
|
|
||||||
rng = binascii.unhexlify(match.group(3)) # 2 bytes
|
seconds = binascii.unhexlify(match.group(2)) # 4 bytes
|
||||||
return seconds, rng
|
rng = binascii.unhexlify(match.group(3)) # 2 bytes
|
||||||
|
return seconds, rng
|
||||||
|
|
||||||
def get_key(prng_=None):
|
|
||||||
return f"1-{int(time.time()):08x}-{(next(prng_ or prng) & 0xffff):04x}"
|
def get_key(prng_=None):
|
||||||
|
return f"1-{int(time.time()):08x}-{(next(prng_ or prng) & 0xffff):04x}"
|
||||||
|
|
||||||
def ea_symmetric_crypt(data, info):
|
|
||||||
seconds, rng = validate_key(info)
|
def ea_symmetric_crypt(data, info):
|
||||||
|
seconds, rng = validate_key(info)
|
||||||
key = hashlib.md5(seconds + rng + EA_KEY).digest()
|
|
||||||
|
key = hashlib.md5(seconds + rng + EA_KEY).digest()
|
||||||
return ARC4.new(key).encrypt(data)
|
|
||||||
|
return ARC4.new(key).encrypt(data)
|
||||||
|
|
||||||
__all__ = ("new_prng", "prng", "validate_key", "get_key", "ea_symmetric_crypt")
|
|
||||||
|
__all__ = ("new_prng", "prng", "validate_key", "get_key", "ea_symmetric_crypt")
|
||||||
|
|
463
eaapi/decoder.py
463
eaapi/decoder.py
|
@ -1,222 +1,241 @@
|
||||||
import math
|
import math
|
||||||
import struct
|
import struct
|
||||||
import io
|
import io
|
||||||
|
|
||||||
from html import unescape
|
from html import unescape
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from lxml import etree
|
from lxml import etree
|
||||||
except ModuleNotFoundError:
|
except ModuleNotFoundError:
|
||||||
print("W", "lxml not found, XML strings will not be supported")
|
print("W", "lxml not found, XML strings will not be supported")
|
||||||
etree = None
|
etree = None
|
||||||
|
|
||||||
|
|
||||||
from .packer import Packer
|
from .packer import Packer
|
||||||
from .const import (
|
from .const import (
|
||||||
NAME_MAX_COMPRESSED, NAME_MAX_DECOMPRESSED, ATTR, PACK_ALPHABET, END_NODE, END_DOC, ARRAY_BIT,
|
NAME_MAX_COMPRESSED, NAME_MAX_DECOMPRESSED, ATTR, PACK_ALPHABET, END_NODE, END_DOC, ARRAY_BIT,
|
||||||
ENCODING, CONTENT, CONTENT_COMP, CONTENT_FULL, XML_ENCODING, Type
|
ENCODING, CONTENT, CONTENT_COMP, CONTENT_FULL, XML_ENCODING, DEFAULT_ENCODING, Type
|
||||||
)
|
)
|
||||||
from .misc import unpack, py_encoding, assert_true
|
from .misc import unpack, py_encoding, assert_true
|
||||||
from .node import XMLNode
|
from .node import XMLNode
|
||||||
from .exception import DecodeError
|
from .exception import DecodeError
|
||||||
|
|
||||||
|
|
||||||
class Decoder:
|
class Decoder:
|
||||||
def __init__(self, packet):
|
def __init__(self, packet):
|
||||||
self.stream = io.BytesIO(packet)
|
self.stream = io.BytesIO(packet)
|
||||||
self.is_xml_string = packet.startswith(b"<")
|
self.is_xml_string = packet.startswith(b"<")
|
||||||
self.encoding = None
|
self.encoding = None
|
||||||
self.compressed = False
|
self.compressed = False
|
||||||
self.has_data = False
|
self.has_data = False
|
||||||
self.packer = None
|
self.packer = None
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def decode(cls, packet):
|
def decode(cls, packet):
|
||||||
return cls(packet).unpack()
|
return cls(packet).unpack()
|
||||||
|
|
||||||
def read(self, s_format, single=True, align=True):
|
def read(self, s_format, single=True, align=True):
|
||||||
if s_format == "S":
|
if s_format == "S":
|
||||||
length = self.read("L")
|
length = self.read("L")
|
||||||
if self.packer:
|
if self.packer:
|
||||||
self.packer.notify_skipped(length)
|
self.packer.notify_skipped(length)
|
||||||
return self.stream.read(length)
|
return self.stream.read(length)
|
||||||
if s_format == "s":
|
if s_format == "s":
|
||||||
length = self.read("L")
|
length = self.read("L")
|
||||||
if self.packer:
|
if self.packer:
|
||||||
self.packer.notify_skipped(length)
|
self.packer.notify_skipped(length)
|
||||||
raw = self.stream.read(length)
|
raw = self.stream.read(length)
|
||||||
return raw.decode(py_encoding(self.encoding)).rstrip("\0")
|
return raw.decode(py_encoding(self.encoding or DEFAULT_ENCODING)).rstrip("\0")
|
||||||
|
|
||||||
length = struct.calcsize("=" + s_format)
|
length = struct.calcsize("=" + s_format)
|
||||||
if self.packer and align:
|
if self.packer and align:
|
||||||
self.stream.seek(self.packer.request_allocation(length))
|
self.stream.seek(self.packer.request_allocation(length))
|
||||||
data = self.stream.read(length)
|
data = self.stream.read(length)
|
||||||
assert_true(len(data) == length, "EOF reached", DecodeError)
|
assert_true(len(data) == length, "EOF reached", DecodeError)
|
||||||
value = struct.unpack(">" + s_format, data)
|
value = struct.unpack(">" + s_format, data)
|
||||||
return value[0] if single else value
|
return value[0] if single else value
|
||||||
|
|
||||||
def _read_node_value(self, node):
|
def _read_node_value(self, node: XMLNode) -> None:
|
||||||
fmt = node.type.value.fmt
|
fmt = node.type.value.fmt
|
||||||
count = 1
|
count = 1
|
||||||
if node.is_array:
|
if node.is_array:
|
||||||
length = struct.calcsize("=" + fmt)
|
length = struct.calcsize("=" + fmt)
|
||||||
count = self.read("I") // length
|
nbytes = self.read("I")
|
||||||
values = []
|
assert isinstance(nbytes, int)
|
||||||
for _ in range(count):
|
count = nbytes // length
|
||||||
values.append(self.read(fmt, single=len(fmt) == 1, align=False))
|
values = []
|
||||||
self.packer.notify_skipped(count * length)
|
for _ in range(count):
|
||||||
return values
|
values.append(self.read(fmt, single=len(fmt) == 1, align=False))
|
||||||
|
|
||||||
node.value = self.read(fmt, single=len(fmt) == 1)
|
assert self.packer is not None
|
||||||
|
self.packer.notify_skipped(count * length)
|
||||||
def _read_metadata_name(self):
|
node.value = values
|
||||||
length = self.read("B")
|
else:
|
||||||
|
node.value = self.read(fmt, single=len(fmt) == 1)
|
||||||
if not self.compressed:
|
|
||||||
if length < 0x80:
|
def _read_metadata_name(self) -> str:
|
||||||
assert_true(length >= 0x40, "Invalid name length", DecodeError)
|
length = self.read("B")
|
||||||
# i.e. length = (length & ~0x40) + 1
|
assert isinstance(length, int)
|
||||||
length -= 0x3f
|
|
||||||
else:
|
if not self.compressed:
|
||||||
length = (length << 8) | self.read("B")
|
if length < 0x80:
|
||||||
# i.e. length = (length & ~0x8000) + 0x41
|
assert_true(length >= 0x40, "Invalid name length", DecodeError)
|
||||||
length -= 0x7fbf
|
# i.e. length = (length & ~0x40) + 1
|
||||||
assert_true(length <= NAME_MAX_DECOMPRESSED, "Name length too long", DecodeError)
|
length -= 0x3f
|
||||||
|
else:
|
||||||
name = self.stream.read(length)
|
extra = self.read("B")
|
||||||
assert_true(len(name) == length, "Not enough bytes to read name", DecodeError)
|
assert isinstance(extra, int)
|
||||||
return name.decode(self.encoding)
|
length = (length << 8) | extra
|
||||||
|
# i.e. length = (length & ~0x8000) + 0x41
|
||||||
out = ""
|
length -= 0x7fbf
|
||||||
if length == 0:
|
assert_true(length <= NAME_MAX_DECOMPRESSED, "Name length too long", DecodeError)
|
||||||
return out
|
|
||||||
|
name = self.stream.read(length)
|
||||||
assert_true(length <= NAME_MAX_COMPRESSED, "Name length too long", DecodeError)
|
assert_true(len(name) == length, "Not enough bytes to read name", DecodeError)
|
||||||
|
return name.decode(self.encoding or "")
|
||||||
no_bytes = math.ceil((length * 6) / 8)
|
|
||||||
unpacked = unpack(self.stream.read(no_bytes), 6)[:length]
|
out = ""
|
||||||
return "".join(PACK_ALPHABET[i] for i in unpacked)
|
if length == 0:
|
||||||
|
return out
|
||||||
def _read_metadata(self, type_):
|
|
||||||
name = self._read_metadata_name()
|
assert_true(length <= NAME_MAX_COMPRESSED, "Name length too long", DecodeError)
|
||||||
node = XMLNode(name, type_, None, encoding=self.encoding)
|
|
||||||
|
no_bytes = math.ceil((length * 6) / 8)
|
||||||
while (child := self.read("B")) != END_NODE:
|
unpacked = unpack(self.stream.read(no_bytes), 6)[:length]
|
||||||
if child == ATTR:
|
return "".join(PACK_ALPHABET[i] for i in unpacked)
|
||||||
attr = self._read_metadata_name()
|
|
||||||
assert_true(not attr.startswith("__"), "Invalid binary node name", DecodeError)
|
def _read_metadata(self, type_):
|
||||||
# Abuse the array here to maintain order
|
name = self._read_metadata_name()
|
||||||
node.children.append(attr)
|
node = XMLNode(name, type_, None, encoding=self.encoding or DEFAULT_ENCODING)
|
||||||
else:
|
|
||||||
node.children.append(self._read_metadata(child))
|
while (child := self.read("B")) != END_NODE:
|
||||||
is_array = not not (type_ & ARRAY_BIT)
|
if child == ATTR:
|
||||||
if is_array:
|
attr = self._read_metadata_name()
|
||||||
node.value = []
|
assert_true(not attr.startswith("__"), "Invalid binary node name", DecodeError)
|
||||||
return node
|
# Abuse the array here to maintain order
|
||||||
|
node.children.append(attr)
|
||||||
def _read_databody(self, node: XMLNode):
|
else:
|
||||||
self._read_node_value(node)
|
node.children.append(self._read_metadata(child))
|
||||||
|
|
||||||
children = list(node.children)
|
if type_ & ARRAY_BIT:
|
||||||
node.children = []
|
node.value = []
|
||||||
for i in children:
|
return node
|
||||||
if isinstance(i, XMLNode):
|
|
||||||
node.children.append(self._read_databody(i))
|
def _read_databody(self, node: XMLNode):
|
||||||
else:
|
self._read_node_value(node)
|
||||||
node[i] = self.read("s")
|
|
||||||
|
children = list(node.children)
|
||||||
return node
|
node.children = []
|
||||||
|
for i in children:
|
||||||
def _read_magic(self):
|
if isinstance(i, XMLNode):
|
||||||
magic, contents, enc, enc_comp = struct.unpack(">BBBB", self.stream.read(4))
|
node.children.append(self._read_databody(i))
|
||||||
|
else:
|
||||||
assert_true(magic == 0xA0, "Not a packet", DecodeError)
|
node[i] = self.read("s")
|
||||||
assert_true(~enc & 0xFF == enc_comp, "Malformed packet header", DecodeError)
|
|
||||||
assert_true(enc in ENCODING, "Unknown packet encoding", DecodeError)
|
return node
|
||||||
assert_true(contents in CONTENT, "Invalid packet contents", DecodeError)
|
|
||||||
self.compressed = contents in CONTENT_COMP
|
def _read_magic(self):
|
||||||
self.has_data = contents in CONTENT_FULL or contents == 0x44
|
magic, contents, enc, enc_comp = struct.unpack(">BBBB", self.stream.read(4))
|
||||||
self.encoding = ENCODING[enc]
|
|
||||||
|
assert_true(magic == 0xA0, "Not a packet", DecodeError)
|
||||||
def _read_xml_string(self):
|
assert_true(~enc & 0xFF == enc_comp, "Malformed packet header", DecodeError)
|
||||||
assert_true(etree is not None, "lxml missing", DecodeError)
|
assert_true(enc in ENCODING, "Unknown packet encoding", DecodeError)
|
||||||
parser = etree.XMLParser(remove_comments=True)
|
assert_true(contents in CONTENT, "Invalid packet contents", DecodeError)
|
||||||
tree = etree.XML(self.stream.read(), parser)
|
self.compressed = contents in CONTENT_COMP
|
||||||
self.encoding = XML_ENCODING[tree.getroottree().docinfo.encoding.upper()]
|
self.has_data = contents in CONTENT_FULL or contents == 0x44
|
||||||
self.compressed = False
|
self.encoding = ENCODING[enc]
|
||||||
self.has_data = True
|
|
||||||
|
def _read_xml_string(self):
|
||||||
def walk(node):
|
assert_true(etree is not None, "lxml missing", DecodeError)
|
||||||
attrib = {**node.attrib}
|
assert etree is not None
|
||||||
type_str = attrib.pop("__type", "void")
|
|
||||||
for i in Type:
|
parser = etree.XMLParser(remove_comments=True)
|
||||||
if type_str in i.value.names:
|
tree = etree.XML(self.stream.read(), parser)
|
||||||
type_ = i
|
self.encoding = XML_ENCODING[tree.getroottree().docinfo.encoding.upper()]
|
||||||
break
|
self.compressed = False
|
||||||
else:
|
self.has_data = True
|
||||||
raise ValueError("Invalid node type")
|
|
||||||
attrib.pop("__size", None)
|
def walk(node):
|
||||||
count = attrib.pop("__count", None)
|
attrib = {**node.attrib}
|
||||||
|
type_str = attrib.pop("__type", "void")
|
||||||
is_array = count is not None
|
for i in Type:
|
||||||
count = 1 if count is None else int(count)
|
if type_str in i.value.names:
|
||||||
|
type_ = i
|
||||||
d_type = type_.value
|
break
|
||||||
|
else:
|
||||||
if d_type.size == 1 and not is_array:
|
raise ValueError("Invalid node type")
|
||||||
value = d_type._parse(node.text or "")
|
attrib.pop("__size", None)
|
||||||
else:
|
count = attrib.pop("__count", None)
|
||||||
data = node.text.split(" ")
|
|
||||||
|
is_array = count is not None
|
||||||
value = []
|
count = 1 if count is None else int(count)
|
||||||
for i in range(0, len(data), d_type.size):
|
|
||||||
value.append(d_type._parse(data[i:i+d_type.size]))
|
d_type = type_.value
|
||||||
if not is_array:
|
|
||||||
value = value[0]
|
if d_type.size == 1 and not is_array:
|
||||||
|
try:
|
||||||
xml_node = XMLNode(node.tag, type_, value, encoding=self.encoding)
|
value = d_type._parse(node.text or "")
|
||||||
for i in node.getchildren():
|
except ValueError:
|
||||||
xml_node.children.append(walk(i))
|
print(f"Failed to parse {node.tag} ({d_type.names[0]}): {repr(node.text)}")
|
||||||
|
raise
|
||||||
for i in attrib:
|
else:
|
||||||
xml_node[i] = unescape(attrib[i])
|
data = node.text.split(" ")
|
||||||
|
|
||||||
return xml_node
|
value = []
|
||||||
|
for i in range(0, len(data), d_type.size):
|
||||||
return walk(tree)
|
value.append(d_type._parse(data[i:i+d_type.size]))
|
||||||
|
if not is_array:
|
||||||
def unpack(self):
|
value = value[0]
|
||||||
try:
|
|
||||||
return self._unpack()
|
xml_node = XMLNode(node.tag, type_, value, encoding=self.encoding or DEFAULT_ENCODING)
|
||||||
except struct.error as e:
|
for i in node.getchildren():
|
||||||
raise DecodeError(e)
|
xml_node.children.append(walk(i))
|
||||||
|
|
||||||
def _unpack(self):
|
for i in attrib:
|
||||||
if self.is_xml_string:
|
xml_node[i] = unescape(attrib[i])
|
||||||
return self._read_xml_string()
|
|
||||||
|
return xml_node
|
||||||
self._read_magic()
|
|
||||||
|
return walk(tree)
|
||||||
header_len = self.read("I")
|
|
||||||
start = self.stream.tell()
|
def unpack(self):
|
||||||
schema = self._read_metadata(self.read("B"))
|
try:
|
||||||
assert_true(self.read("B") == END_DOC, "Unterminated schema", DecodeError)
|
return self._unpack()
|
||||||
padding = header_len - (self.stream.tell() - start)
|
except struct.error as e:
|
||||||
assert_true(padding >= 0, "Invalid schema definition", DecodeError)
|
raise DecodeError(e)
|
||||||
assert_true(all(i == 0 for i in self.stream.read(padding)), "Invalid schema padding", DecodeError)
|
|
||||||
|
def _unpack(self):
|
||||||
body_len = self.read("I")
|
if self.is_xml_string:
|
||||||
start = self.stream.tell()
|
return self._read_xml_string()
|
||||||
self.packer = Packer(start)
|
|
||||||
data = self._read_databody(schema)
|
self._read_magic()
|
||||||
self.stream.seek(self.packer.request_allocation(0))
|
|
||||||
padding = body_len - (self.stream.tell() - start)
|
header_len = self.read("I")
|
||||||
assert_true(padding >= 0, "Data shape not match schema", DecodeError)
|
assert isinstance(header_len, int)
|
||||||
assert_true(all(i == 0 for i in self.stream.read(padding)), "Invalid data padding", DecodeError)
|
start = self.stream.tell()
|
||||||
|
schema = self._read_metadata(self.read("B"))
|
||||||
assert_true(self.stream.read(1) == b"", "Trailing data unconsumed", DecodeError)
|
assert_true(self.read("B") == END_DOC, "Unterminated schema", DecodeError)
|
||||||
|
padding = header_len - (self.stream.tell() - start)
|
||||||
return data
|
assert_true(padding >= 0, "Invalid schema definition", DecodeError)
|
||||||
|
assert_true(
|
||||||
|
all(i == 0 for i in self.stream.read(padding)), "Invalid schema padding", DecodeError
|
||||||
__all__ = ("Decoder", )
|
)
|
||||||
|
|
||||||
|
body_len = self.read("I")
|
||||||
|
assert isinstance(body_len, int)
|
||||||
|
start = self.stream.tell()
|
||||||
|
self.packer = Packer(start)
|
||||||
|
data = self._read_databody(schema)
|
||||||
|
self.stream.seek(self.packer.request_allocation(0))
|
||||||
|
padding = body_len - (self.stream.tell() - start)
|
||||||
|
assert_true(padding >= 0, "Data shape not match schema", DecodeError)
|
||||||
|
assert_true(
|
||||||
|
all(i == 0 for i in self.stream.read(padding)), "Invalid data padding", DecodeError
|
||||||
|
)
|
||||||
|
|
||||||
|
assert_true(self.stream.read(1) == b"", "Trailing data unconsumed", DecodeError)
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ("Decoder", )
|
||||||
|
|
323
eaapi/encoder.py
323
eaapi/encoder.py
|
@ -1,157 +1,166 @@
|
||||||
import struct
|
import struct
|
||||||
import io
|
import io
|
||||||
|
|
||||||
from .packer import Packer
|
from .packer import Packer
|
||||||
from .misc import pack, py_encoding, assert_true
|
from .misc import pack, py_encoding, assert_true
|
||||||
from .const import (
|
from .const import (
|
||||||
PACK_ALPHABET, DEFAULT_ENCODING, ENCODING, ENCODING_BACK, NAME_MAX_DECOMPRESSED, ARRAY_BIT,
|
PACK_ALPHABET, DEFAULT_ENCODING, ENCODING, ENCODING_BACK, NAME_MAX_DECOMPRESSED, ARRAY_BIT,
|
||||||
ATTR, END_NODE, END_DOC, CONTENT_COMP_FULL, CONTENT_COMP_SCHEMA, CONTENT_ASCII_FULL,
|
ATTR, END_NODE, END_DOC, CONTENT_COMP_FULL, CONTENT_COMP_SCHEMA, CONTENT_ASCII_FULL,
|
||||||
CONTENT_ASCII_SCHEMA
|
CONTENT_ASCII_SCHEMA
|
||||||
)
|
)
|
||||||
from .exception import EncodeError
|
from .exception import EncodeError
|
||||||
|
from .node import XMLNode
|
||||||
|
|
||||||
class Encoder:
|
|
||||||
def __init__(self, encoding=DEFAULT_ENCODING):
|
class Encoder:
|
||||||
self.stream = io.BytesIO()
|
def __init__(self, encoding=DEFAULT_ENCODING):
|
||||||
assert_true(encoding in ENCODING_BACK, f"Unknown encoding {encoding}", EncodeError)
|
self.stream = io.BytesIO()
|
||||||
self.encoding = ENCODING_BACK[encoding]
|
assert_true(encoding in ENCODING_BACK, f"Unknown encoding {encoding}", EncodeError)
|
||||||
self.packer = None
|
self.encoding = ENCODING_BACK[encoding]
|
||||||
self._compressed = False
|
self.packer = None
|
||||||
|
self._compressed = False
|
||||||
@classmethod
|
|
||||||
def encode(cls, tree, xml_string=False):
|
@classmethod
|
||||||
if xml_string:
|
def encode(cls, tree, xml_string=False):
|
||||||
return tree.to_str(pretty=False).encode(tree.encoding)
|
if xml_string:
|
||||||
encoder = cls(tree.encoding)
|
return tree.to_str(pretty=False).encode(tree.encoding)
|
||||||
encoder.pack(tree)
|
encoder = cls(tree.encoding)
|
||||||
return encoder.stream.getvalue()
|
encoder.pack(tree)
|
||||||
|
return encoder.stream.getvalue()
|
||||||
def align(self, to=4, pad_char=b"\0"):
|
|
||||||
if to < 2:
|
def align(self, to=4, pad_char=b"\0"):
|
||||||
return
|
if to < 2:
|
||||||
if (dist := self.stream.tell() % to) == 0:
|
return
|
||||||
return
|
if (dist := self.stream.tell() % to) == 0:
|
||||||
self.stream.write(pad_char * (to - dist))
|
return
|
||||||
|
self.stream.write(pad_char * (to - dist))
|
||||||
def write(self, s_format, value, single=True):
|
|
||||||
if s_format == "S":
|
def write(self, s_format, value, single=True):
|
||||||
self.write("L", len(value))
|
if s_format == "S":
|
||||||
self.stream.write(value)
|
assert self.packer is not None
|
||||||
self.packer.notify_skipped(len(value))
|
self.write("L", len(value))
|
||||||
return
|
self.stream.write(value)
|
||||||
if s_format == "s":
|
self.packer.notify_skipped(len(value))
|
||||||
value = value.encode(py_encoding(ENCODING[self.encoding])) + b"\0"
|
return
|
||||||
self.write("L", len(value))
|
if s_format == "s":
|
||||||
self.stream.write(value)
|
assert self.packer is not None
|
||||||
self.packer.notify_skipped(len(value))
|
value = value.encode(py_encoding(ENCODING[self.encoding])) + b"\0"
|
||||||
return
|
self.write("L", len(value))
|
||||||
|
self.stream.write(value)
|
||||||
length = struct.calcsize("=" + s_format)
|
self.packer.notify_skipped(len(value))
|
||||||
|
return
|
||||||
if not isinstance(value, list):
|
|
||||||
value = [value]
|
length = struct.calcsize("=" + s_format)
|
||||||
count = len(value)
|
|
||||||
if count != 1:
|
if isinstance(value, list):
|
||||||
self.write("L", count * length)
|
assert self.packer is not None
|
||||||
self.packer.notify_skipped(count * length)
|
self.write("L", len(value) * length)
|
||||||
|
self.packer.notify_skipped(len(value) * length)
|
||||||
for x in value:
|
|
||||||
if self.packer and count == 1:
|
for x in value:
|
||||||
self.stream.seek(self.packer.request_allocation(length))
|
try:
|
||||||
|
if single:
|
||||||
try:
|
self.stream.write(struct.pack(f">{s_format}", x))
|
||||||
if single:
|
else:
|
||||||
self.stream.write(struct.pack(f">{s_format}", x))
|
self.stream.write(struct.pack(f">{s_format}", *x))
|
||||||
else:
|
except struct.error:
|
||||||
self.stream.write(struct.pack(f">{s_format}", *x))
|
raise ValueError(f"Failed to pack {s_format}: {repr(x)}")
|
||||||
except struct.error:
|
else:
|
||||||
raise ValueError(f"Failed to pack {s_format}: {repr(x)}")
|
if self.packer:
|
||||||
|
self.stream.seek(self.packer.request_allocation(length))
|
||||||
def _write_node_value(self, type_, value):
|
|
||||||
fmt = type_.value.fmt
|
try:
|
||||||
if fmt == "s":
|
if single:
|
||||||
self.write("s", value)
|
self.stream.write(struct.pack(f">{s_format}", value))
|
||||||
else:
|
else:
|
||||||
self.write(fmt, value, single=len(fmt) == 1)
|
self.stream.write(struct.pack(f">{s_format}", *value))
|
||||||
|
except struct.error:
|
||||||
def _write_metadata_name(self, name):
|
raise ValueError(f"Failed to pack {s_format}: {repr(value)}")
|
||||||
if not self._compressed:
|
|
||||||
assert_true(len(name) <= NAME_MAX_DECOMPRESSED, "Name length too long", EncodeError)
|
def _write_node_value(self, type_, value):
|
||||||
if len(name) > 64:
|
fmt = type_.value.fmt
|
||||||
self.write("H", len(name) + 0x7fbf)
|
if fmt == "s":
|
||||||
else:
|
self.write("s", value)
|
||||||
self.write("B", len(name) + 0x3f)
|
else:
|
||||||
self.stream.write(name.encode(py_encoding(ENCODING[self.encoding])))
|
self.write(fmt, value, single=len(fmt) == 1)
|
||||||
return
|
|
||||||
|
def _write_metadata_name(self, name):
|
||||||
assert_true(all(i in PACK_ALPHABET for i in name), f"Invalid schema name {name} (invalid chars)", EncodeError)
|
if not self._compressed:
|
||||||
assert_true(len(name) < 256, f"Invalid schema name {name} (too long)", EncodeError)
|
assert_true(len(name) <= NAME_MAX_DECOMPRESSED, "Name length too long", EncodeError)
|
||||||
self.write("B", len(name))
|
if len(name) > 64:
|
||||||
if len(name) == 0:
|
self.write("H", len(name) + 0x7fbf)
|
||||||
return
|
else:
|
||||||
|
self.write("B", len(name) + 0x3f)
|
||||||
name = bytearray(PACK_ALPHABET.index(i) for i in name)
|
self.stream.write(name.encode(py_encoding(ENCODING[self.encoding])))
|
||||||
self.stream.write(pack(name, 6))
|
return
|
||||||
|
|
||||||
def _write_metadata(self, node):
|
assert_true(all(i in PACK_ALPHABET for i in name), f"Invalid schema name {name} (invalid chars)", EncodeError)
|
||||||
self.write("B", node.type.value.id | (ARRAY_BIT if node.is_array else 0x00))
|
assert_true(len(name) < 256, f"Invalid schema name {name} (too long)", EncodeError)
|
||||||
self._write_metadata_name(node.name)
|
self.write("B", len(name))
|
||||||
|
if len(name) == 0:
|
||||||
for attr in node.attributes:
|
return
|
||||||
self.write("B", ATTR)
|
|
||||||
self._write_metadata_name(attr)
|
name = bytearray(PACK_ALPHABET.index(i) for i in name)
|
||||||
for child in node:
|
self.stream.write(pack(name, 6))
|
||||||
self._write_metadata(child)
|
|
||||||
self.write("B", END_NODE)
|
def _write_metadata(self, node):
|
||||||
|
self.write("B", node.type.value.id | (ARRAY_BIT if node.is_array else 0x00))
|
||||||
def _write_databody(self, data):
|
self._write_metadata_name(node.name)
|
||||||
self._write_node_value(data.type, data.value)
|
|
||||||
|
for attr in node.attributes:
|
||||||
for attr in data.attributes:
|
self.write("B", ATTR)
|
||||||
self.align()
|
self._write_metadata_name(attr)
|
||||||
self.write("s", data[attr])
|
for child in node:
|
||||||
for child in data:
|
self._write_metadata(child)
|
||||||
self._write_databody(child)
|
self.write("B", END_NODE)
|
||||||
|
|
||||||
def _write_magic(self, has_data=True):
|
def _write_databody(self, data: XMLNode):
|
||||||
if has_data:
|
self._write_node_value(data.type, data.value)
|
||||||
contents = CONTENT_COMP_FULL if self._compressed else CONTENT_ASCII_FULL
|
|
||||||
else:
|
for attr in data.attributes:
|
||||||
contents = CONTENT_COMP_SCHEMA if self._compressed else CONTENT_ASCII_SCHEMA
|
self.align()
|
||||||
|
self.write("s", data[attr])
|
||||||
enc_comp = ~self.encoding & 0xFF
|
for child in data:
|
||||||
self.stream.write(struct.pack(">BBBB", 0xA0, contents, self.encoding, enc_comp))
|
self._write_databody(child)
|
||||||
|
|
||||||
def pack(self, node):
|
def _write_magic(self, has_data=True):
|
||||||
try:
|
if has_data:
|
||||||
return self._pack(node)
|
contents = CONTENT_COMP_FULL if self._compressed else CONTENT_ASCII_FULL
|
||||||
except struct.error as e:
|
else:
|
||||||
return EncodeError(e)
|
contents = CONTENT_COMP_SCHEMA if self._compressed else CONTENT_ASCII_SCHEMA
|
||||||
|
|
||||||
def _pack(self, node):
|
enc_comp = ~self.encoding & 0xFF
|
||||||
self._compressed = node.can_compress # Opportunically compress if we can
|
self.stream.write(struct.pack(">BBBB", 0xA0, contents, self.encoding, enc_comp))
|
||||||
self._write_magic()
|
|
||||||
|
def pack(self, node):
|
||||||
schema_start = self.stream.tell()
|
try:
|
||||||
self.write("I", 0)
|
return self._pack(node)
|
||||||
self._write_metadata(node)
|
except struct.error as e:
|
||||||
self.write("B", END_DOC)
|
return EncodeError(e)
|
||||||
self.align()
|
|
||||||
schema_end = self.stream.tell()
|
def _pack(self, node):
|
||||||
self.stream.seek(schema_start)
|
self._compressed = node.can_compress # Opportunically compress if we can
|
||||||
self.write("I", schema_end - schema_start - 4)
|
self._write_magic()
|
||||||
|
|
||||||
self.stream.seek(schema_end)
|
schema_start = self.stream.tell()
|
||||||
self.write("I", 0)
|
self.write("I", 0)
|
||||||
self.packer = Packer(self.stream.tell())
|
self._write_metadata(node)
|
||||||
self._write_databody(node)
|
self.write("B", END_DOC)
|
||||||
self.stream.seek(0, io.SEEK_END)
|
self.align()
|
||||||
self.align()
|
schema_end = self.stream.tell()
|
||||||
node_end = self.stream.tell()
|
self.stream.seek(schema_start)
|
||||||
self.stream.seek(schema_end)
|
self.write("I", schema_end - schema_start - 4)
|
||||||
self.packer = None
|
|
||||||
self.write("I", node_end - schema_end - 4)
|
self.stream.seek(schema_end)
|
||||||
|
self.write("I", 0)
|
||||||
|
self.packer = Packer(self.stream.tell())
|
||||||
__all__ = ("Encoder", )
|
self._write_databody(node)
|
||||||
|
self.stream.seek(0, io.SEEK_END)
|
||||||
|
self.align()
|
||||||
|
node_end = self.stream.tell()
|
||||||
|
self.stream.seek(schema_end)
|
||||||
|
self.packer = None
|
||||||
|
self.write("I", node_end - schema_end - 4)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ("Encoder", )
|
||||||
|
|
|
@ -1,22 +1,34 @@
|
||||||
class EAAPIException(Exception):
|
class EAAPIException(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class CheckFailed(EAAPIException):
|
class CheckFailed(EAAPIException):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class InvalidCard(CheckFailed):
|
class InvalidCard(CheckFailed):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class DecodeError(CheckFailed):
|
class DecodeError(CheckFailed):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class EncodeError(CheckFailed):
|
class EncodeError(CheckFailed):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class InvalidModel(EAAPIException):
|
class InvalidModel(EAAPIException):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class XMLStrutureError(EAAPIException):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class NodeNotFound(XMLStrutureError, IndexError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class AttributeNotFound(XMLStrutureError, KeyError):
|
||||||
|
pass
|
||||||
|
|
270
eaapi/lz77.py
270
eaapi/lz77.py
|
@ -1,135 +1,135 @@
|
||||||
from .misc import assert_true
|
from .misc import assert_true
|
||||||
|
|
||||||
|
|
||||||
WINDOW_SIZE = 0x1000
|
WINDOW_SIZE = 0x1000
|
||||||
WINDOW_MASK = WINDOW_SIZE - 1
|
WINDOW_MASK = WINDOW_SIZE - 1
|
||||||
THRESHOLD = 3
|
THRESHOLD = 3
|
||||||
INPLACE_THRESHOLD = 0xA
|
INPLACE_THRESHOLD = 0xA
|
||||||
LOOK_RANGE = 0x200
|
LOOK_RANGE = 0x200
|
||||||
MAX_LEN = 0xF + THRESHOLD
|
MAX_LEN = 0xF + THRESHOLD
|
||||||
MAX_BUFFER = 0x10 + 1
|
MAX_BUFFER = 0x10 + 1
|
||||||
|
|
||||||
|
|
||||||
def match_current(window, pos, max_len, data, dpos):
|
def match_current(window: bytes, pos: int, max_len: int, data: bytes, dpos: int) -> int:
|
||||||
length = 0
|
length = 0
|
||||||
while (
|
while (
|
||||||
dpos + length < len(data)
|
dpos + length < len(data)
|
||||||
and length < max_len
|
and length < max_len
|
||||||
and window[(pos + length) & WINDOW_MASK] == data[dpos + length]
|
and window[(pos + length) & WINDOW_MASK] == data[dpos + length]
|
||||||
and length < MAX_LEN
|
and length < MAX_LEN
|
||||||
):
|
):
|
||||||
length += 1
|
length += 1
|
||||||
return length
|
return length
|
||||||
|
|
||||||
|
|
||||||
def match_window(window, pos, data, d_pos):
|
def match_window(window: bytes, pos: int, data: bytes, d_pos: int) -> None | tuple[int, int]:
|
||||||
max_pos = 0
|
max_pos = 0
|
||||||
max_len = 0
|
max_len = 0
|
||||||
for i in range(THRESHOLD, LOOK_RANGE):
|
for i in range(THRESHOLD, LOOK_RANGE):
|
||||||
length = match_current(window, (pos - i) & WINDOW_MASK, i, data, d_pos)
|
length = match_current(window, (pos - i) & WINDOW_MASK, i, data, d_pos)
|
||||||
if length >= INPLACE_THRESHOLD:
|
if length >= INPLACE_THRESHOLD:
|
||||||
return (i, length)
|
return (i, length)
|
||||||
if length >= THRESHOLD:
|
if length >= THRESHOLD:
|
||||||
max_pos = i
|
max_pos = i
|
||||||
max_len = length
|
max_len = length
|
||||||
if max_len >= THRESHOLD:
|
if max_len >= THRESHOLD:
|
||||||
return (max_pos, max_len)
|
return (max_pos, max_len)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def lz77_compress(data):
|
def lz77_compress(data: bytes) -> bytes:
|
||||||
output = bytearray()
|
output = bytearray()
|
||||||
window = [0] * WINDOW_SIZE
|
window = bytearray(WINDOW_SIZE)
|
||||||
current_pos = 0
|
current_pos = 0
|
||||||
current_window = 0
|
current_window = 0
|
||||||
current_buffer = 0
|
current_buffer = 0
|
||||||
flag_byte = 0
|
flag_byte = 0
|
||||||
bit = 0
|
bit = 0
|
||||||
buffer = [0] * MAX_BUFFER
|
buffer = [0] * MAX_BUFFER
|
||||||
pad = 3
|
pad = 3
|
||||||
while current_pos < len(data):
|
while current_pos < len(data):
|
||||||
flag_byte = 0
|
flag_byte = 0
|
||||||
current_buffer = 0
|
current_buffer = 0
|
||||||
for bit_pos in range(8):
|
for bit_pos in range(8):
|
||||||
if current_pos >= len(data):
|
if current_pos >= len(data):
|
||||||
pad = 0
|
pad = 0
|
||||||
flag_byte = flag_byte >> (8 - bit_pos)
|
flag_byte = flag_byte >> (8 - bit_pos)
|
||||||
buffer[current_buffer] = 0
|
buffer[current_buffer] = 0
|
||||||
buffer[current_buffer + 1] = 0
|
buffer[current_buffer + 1] = 0
|
||||||
current_buffer += 2
|
current_buffer += 2
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
found = match_window(window, current_window, data, current_pos)
|
found = match_window(window, current_window, data, current_pos)
|
||||||
if found is not None and found[1] >= THRESHOLD:
|
if found is not None and found[1] >= THRESHOLD:
|
||||||
pos, length = found
|
pos, length = found
|
||||||
|
|
||||||
byte1 = pos >> 4
|
byte1 = pos >> 4
|
||||||
byte2 = (((pos & 0x0F) << 4) | ((length - THRESHOLD) & 0x0F))
|
byte2 = (((pos & 0x0F) << 4) | ((length - THRESHOLD) & 0x0F))
|
||||||
buffer[current_buffer] = byte1
|
buffer[current_buffer] = byte1
|
||||||
buffer[current_buffer + 1] = byte2
|
buffer[current_buffer + 1] = byte2
|
||||||
current_buffer += 2
|
current_buffer += 2
|
||||||
bit = 0
|
bit = 0
|
||||||
for _ in range(length):
|
for _ in range(length):
|
||||||
window[current_window & WINDOW_MASK] = data[current_pos]
|
window[current_window & WINDOW_MASK] = data[current_pos]
|
||||||
current_pos += 1
|
current_pos += 1
|
||||||
current_window += 1
|
current_window += 1
|
||||||
else:
|
else:
|
||||||
buffer[current_buffer] = data[current_pos]
|
buffer[current_buffer] = data[current_pos]
|
||||||
window[current_window] = data[current_pos]
|
window[current_window] = data[current_pos]
|
||||||
current_pos += 1
|
current_pos += 1
|
||||||
current_window += 1
|
current_window += 1
|
||||||
current_buffer += 1
|
current_buffer += 1
|
||||||
bit = 1
|
bit = 1
|
||||||
|
|
||||||
flag_byte = (flag_byte >> 1) | ((bit & 1) << 7)
|
flag_byte = (flag_byte >> 1) | ((bit & 1) << 7)
|
||||||
current_window = current_window & WINDOW_MASK
|
current_window = current_window & WINDOW_MASK
|
||||||
|
|
||||||
assert_true(current_buffer < MAX_BUFFER, f"current buffer {current_buffer} > max buffer {MAX_BUFFER}")
|
assert_true(current_buffer < MAX_BUFFER, f"current buffer {current_buffer} > max buffer {MAX_BUFFER}")
|
||||||
|
|
||||||
output.append(flag_byte)
|
output.append(flag_byte)
|
||||||
for i in range(current_buffer):
|
for i in range(current_buffer):
|
||||||
output.append(buffer[i])
|
output.append(buffer[i])
|
||||||
for _ in range(pad):
|
for _ in range(pad):
|
||||||
output.append(0)
|
output.append(0)
|
||||||
|
|
||||||
return bytes(output)
|
return bytes(output)
|
||||||
|
|
||||||
|
|
||||||
def lz77_decompress(data):
|
def lz77_decompress(data: bytes) -> bytes:
|
||||||
output = bytearray()
|
output = bytearray()
|
||||||
cur_byte = 0
|
cur_byte = 0
|
||||||
window = [0] * WINDOW_SIZE
|
window = bytearray(WINDOW_SIZE)
|
||||||
window_cursor = 0
|
window_cursor = 0
|
||||||
|
|
||||||
while cur_byte < len(data):
|
while cur_byte < len(data):
|
||||||
flag = data[cur_byte]
|
flag = data[cur_byte]
|
||||||
cur_byte += 1
|
cur_byte += 1
|
||||||
|
|
||||||
for i in range(8):
|
for i in range(8):
|
||||||
if (flag >> i) & 1 == 1:
|
if (flag >> i) & 1 == 1:
|
||||||
output.append(data[cur_byte])
|
output.append(data[cur_byte])
|
||||||
window[window_cursor] = data[cur_byte]
|
window[window_cursor] = data[cur_byte]
|
||||||
window_cursor = (window_cursor + 1) & WINDOW_MASK
|
window_cursor = (window_cursor + 1) & WINDOW_MASK
|
||||||
cur_byte += 1
|
cur_byte += 1
|
||||||
else:
|
else:
|
||||||
w = ((data[cur_byte]) << 8) | (data[cur_byte + 1])
|
w = ((data[cur_byte]) << 8) | (data[cur_byte + 1])
|
||||||
if w == 0:
|
if w == 0:
|
||||||
return bytes(output)
|
return bytes(output)
|
||||||
|
|
||||||
cur_byte += 2
|
cur_byte += 2
|
||||||
position = ((window_cursor - (w >> 4)) & WINDOW_MASK)
|
position = ((window_cursor - (w >> 4)) & WINDOW_MASK)
|
||||||
length = (w & 0x0F) + THRESHOLD
|
length = (w & 0x0F) + THRESHOLD
|
||||||
|
|
||||||
for _ in range(length):
|
for _ in range(length):
|
||||||
b = window[position & WINDOW_MASK]
|
b = window[position & WINDOW_MASK]
|
||||||
output.append(b)
|
output.append(b)
|
||||||
window[window_cursor] = b
|
window[window_cursor] = b
|
||||||
window_cursor = (window_cursor + 1) & WINDOW_MASK
|
window_cursor = (window_cursor + 1) & WINDOW_MASK
|
||||||
position += 1
|
position += 1
|
||||||
|
|
||||||
return bytes(output)
|
return bytes(output)
|
||||||
|
|
||||||
|
|
||||||
__all__ = (
|
__all__ = (
|
||||||
"lz77_compress", "lz77_decompress"
|
"lz77_compress", "lz77_decompress"
|
||||||
)
|
)
|
||||||
|
|
143
eaapi/misc.py
143
eaapi/misc.py
|
@ -1,70 +1,73 @@
|
||||||
import inspect
|
import inspect
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from .exception import CheckFailed, InvalidModel
|
from typing import Type
|
||||||
|
|
||||||
|
from .exception import CheckFailed, InvalidModel
|
||||||
def assert_true(check, reason, exc=CheckFailed):
|
|
||||||
if not check:
|
|
||||||
line = inspect.stack()[1].code_context
|
def assert_true(check: bool, reason: str, exc: Type[Exception] = CheckFailed):
|
||||||
print()
|
if not check:
|
||||||
print("\n".join(line))
|
line = inspect.stack()[1].code_context
|
||||||
raise exc(reason)
|
if line:
|
||||||
|
print()
|
||||||
|
print("\n".join(line))
|
||||||
def py_encoding(name):
|
raise exc(reason)
|
||||||
if name.startswith("shift-jis"):
|
|
||||||
return "shift-jis"
|
|
||||||
return name
|
def py_encoding(name: str) -> str:
|
||||||
|
if name.startswith("shift-jis"):
|
||||||
|
return "shift-jis"
|
||||||
def parse_model(model):
|
return name
|
||||||
# e.g. KFC:J:A:A:2019020600
|
|
||||||
match = re.match(r"^([A-Z0-9]{3}):([A-Z]):([A-Z]):([A-Z])(?::(\d{10}))?$", model)
|
|
||||||
if match is None:
|
def parse_model(model: str) -> tuple[str, str, str, str, str]:
|
||||||
raise InvalidModel
|
# e.g. KFC:J:A:A:2019020600
|
||||||
gamecode, dest, spec, rev, datecode = match.groups()
|
match = re.match(r"^([A-Z0-9]{3}):([A-Z]):([A-Z]):([A-Z])(?::(\d{10}))?$", model)
|
||||||
return gamecode, dest, spec, rev, datecode
|
if match is None:
|
||||||
|
raise InvalidModel
|
||||||
|
gamecode, dest, spec, rev, datecode = match.groups()
|
||||||
def pack(data, width):
|
return gamecode, dest, spec, rev, datecode
|
||||||
assert_true(1 <= width <= 8, "Invalid pack size")
|
|
||||||
assert_true(all(i < (1 << width) for i in data), "Data too large for packing")
|
|
||||||
bit_buf = in_buf = 0
|
def pack(data, width: int) -> bytes:
|
||||||
output = bytearray()
|
assert_true(1 <= width <= 8, "Invalid pack size")
|
||||||
for i in data:
|
assert_true(all(i < (1 << width) for i in data), "Data too large for packing")
|
||||||
bit_buf |= i << (8 - width)
|
bit_buf = in_buf = 0
|
||||||
shift = min(8 - in_buf, width)
|
output = bytearray()
|
||||||
bit_buf <<= shift
|
for i in data:
|
||||||
in_buf += shift
|
bit_buf |= i << (8 - width)
|
||||||
if in_buf == 8:
|
shift = min(8 - in_buf, width)
|
||||||
output.append(bit_buf >> 8)
|
bit_buf <<= shift
|
||||||
in_buf = width - shift
|
in_buf += shift
|
||||||
bit_buf = (bit_buf & 0xff) << in_buf
|
if in_buf == 8:
|
||||||
|
output.append(bit_buf >> 8)
|
||||||
if in_buf:
|
in_buf = width - shift
|
||||||
output.append(bit_buf >> in_buf)
|
bit_buf = (bit_buf & 0xff) << in_buf
|
||||||
|
|
||||||
return bytes(output)
|
if in_buf:
|
||||||
|
output.append(bit_buf >> in_buf)
|
||||||
|
|
||||||
def unpack(data, width):
|
return bytes(output)
|
||||||
assert_true(1 <= width <= 8, "Invalid pack size")
|
|
||||||
bit_buf = in_buf = 0
|
|
||||||
output = bytearray()
|
def unpack(data, width: int) -> bytes:
|
||||||
for i in data:
|
assert_true(1 <= width <= 8, "Invalid pack size")
|
||||||
bit_buf |= i
|
bit_buf = in_buf = 0
|
||||||
bit_buf <<= width - in_buf
|
output = bytearray()
|
||||||
in_buf += 8
|
for i in data:
|
||||||
while in_buf >= width:
|
bit_buf |= i
|
||||||
output.append(bit_buf >> 8)
|
bit_buf <<= width - in_buf
|
||||||
in_buf -= width
|
in_buf += 8
|
||||||
bit_buf = (bit_buf & 0xff) << min(width, in_buf)
|
while in_buf >= width:
|
||||||
|
output.append(bit_buf >> 8)
|
||||||
if in_buf:
|
in_buf -= width
|
||||||
output.append(bit_buf >> (8 + in_buf - width))
|
bit_buf = (bit_buf & 0xff) << min(width, in_buf)
|
||||||
|
|
||||||
return bytes(output)
|
if in_buf:
|
||||||
|
output.append(bit_buf >> (8 + in_buf - width))
|
||||||
|
|
||||||
__all__ = ("assert_true", "py_encoding", "parse_model", "pack", "unpack")
|
return bytes(output)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ("assert_true", "py_encoding", "parse_model", "pack", "unpack")
|
||||||
|
|
324
eaapi/node.py
324
eaapi/node.py
|
@ -1,155 +1,169 @@
|
||||||
import binascii
|
import binascii
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from html import escape
|
from typing import Generator, Any
|
||||||
|
|
||||||
from .misc import assert_true
|
from html import escape
|
||||||
from .const import DEFAULT_ENCODING, NAME_MAX_COMPRESSED, XML_ENCODING_BACK, Type
|
|
||||||
|
from .misc import assert_true
|
||||||
|
from .const import DEFAULT_ENCODING, NAME_MAX_COMPRESSED, XML_ENCODING_BACK, Type
|
||||||
class XMLNode:
|
from .exception import XMLStrutureError, NodeNotFound, AttributeNotFound
|
||||||
def __init__(self, name, type_, value, attributes=None, encoding=DEFAULT_ENCODING):
|
|
||||||
self.name = name
|
|
||||||
self.type = type_ if isinstance(type_, Type) else Type.from_val(type_)
|
class XMLNode:
|
||||||
self.value = value
|
def __init__(self, name, type_, value, attributes=None, encoding=DEFAULT_ENCODING):
|
||||||
self.children = []
|
self.name = name
|
||||||
self.attributes = {}
|
self.type = type_ if isinstance(type_, Type) else Type.from_val(type_)
|
||||||
if attributes is not None:
|
self.value: Any = value # TODO: A stricter way to do this. Subclassing?
|
||||||
for i in attributes:
|
self.children = []
|
||||||
self.attributes[i] = attributes[i]
|
self.attributes = {}
|
||||||
self.encoding = encoding or DEFAULT_ENCODING
|
if attributes is not None:
|
||||||
assert_true(encoding in XML_ENCODING_BACK, "Invalid encoding")
|
for i in attributes:
|
||||||
|
self.attributes[i] = attributes[i]
|
||||||
@classmethod
|
self.encoding = encoding or DEFAULT_ENCODING
|
||||||
def void(cls, __name, **attributes):
|
assert_true(encoding in XML_ENCODING_BACK, "Invalid encoding")
|
||||||
return cls(__name, Type.Void, (), attributes)
|
|
||||||
|
@classmethod
|
||||||
@property
|
def void(cls, __name, **attributes):
|
||||||
def is_array(self):
|
return cls(__name, Type.Void, (), attributes)
|
||||||
return isinstance(self.value, list)
|
|
||||||
|
@property
|
||||||
@property
|
def is_array(self):
|
||||||
def can_compress(self):
|
return isinstance(self.value, list)
|
||||||
return (
|
|
||||||
(len(self.name) <= NAME_MAX_COMPRESSED)
|
@property
|
||||||
and all(i.can_compress for i in self.children)
|
def can_compress(self):
|
||||||
)
|
return (
|
||||||
|
(len(self.name) <= NAME_MAX_COMPRESSED)
|
||||||
def _xpath(self, attr, path):
|
and all(i.can_compress for i in self.children)
|
||||||
if path:
|
)
|
||||||
child = path.pop(0)
|
|
||||||
for i in self.children:
|
def _xpath(self, attr, path):
|
||||||
if i.name == child:
|
if path:
|
||||||
return i._xpath(attr, path)
|
child = path.pop(0)
|
||||||
raise IndexError
|
for i in self.children:
|
||||||
if not attr:
|
if i.name == child:
|
||||||
return self
|
return i._xpath(attr, path)
|
||||||
if attr in self.attributes:
|
raise NodeNotFound
|
||||||
return self.attributes[attr]
|
if not attr:
|
||||||
raise IndexError
|
return self
|
||||||
|
if attr in self.attributes:
|
||||||
def xpath(self, path):
|
return self.attributes[attr]
|
||||||
match = re.match(r"^(?:@([\w:]+)/)?((?:[\w:]+(?:/|$))+)", path)
|
raise AttributeNotFound
|
||||||
if match is None:
|
|
||||||
raise ValueError
|
def xpath(self, path):
|
||||||
attr = match.group(1)
|
match = re.match(r"^(?:@([\w:]+)/)?((?:[\w:]+(?:/|$))+)", path)
|
||||||
path = match.group(2).split("/")
|
if match is None:
|
||||||
return self._xpath(attr, path)
|
raise ValueError
|
||||||
|
attr = match.group(1)
|
||||||
def append(self, __name, __type=Type.Void, __value=(), **attributes):
|
path = match.group(2).split("/")
|
||||||
child = XMLNode(__name, __type, __value, attributes)
|
return self._xpath(attr, path)
|
||||||
self.children.append(child)
|
|
||||||
return child
|
def append(self, __name, __type=Type.Void, __value=(), **attributes):
|
||||||
|
child = XMLNode(__name, __type, __value, attributes)
|
||||||
def __len__(self):
|
self.children.append(child)
|
||||||
return len(self.children)
|
return child
|
||||||
|
|
||||||
def __iter__(self):
|
def __len__(self):
|
||||||
for i in self.children:
|
return len(self.children)
|
||||||
yield i
|
|
||||||
|
def __iter__(self) -> Generator["XMLNode", None, None]:
|
||||||
def get(self, name, default=None):
|
for i in self.children:
|
||||||
try:
|
yield i
|
||||||
return self[name]
|
|
||||||
except IndexError:
|
def get(self, name, default=None):
|
||||||
return default
|
try:
|
||||||
except KeyError:
|
return self[name]
|
||||||
return default
|
except XMLStrutureError:
|
||||||
|
return default
|
||||||
def __getitem__(self, name):
|
|
||||||
if isinstance(name, int):
|
def __getitem__(self, name):
|
||||||
return self.children[name]
|
if isinstance(name, int):
|
||||||
return self.attributes[name]
|
try:
|
||||||
|
return self.children[name]
|
||||||
def __setitem__(self, name, value):
|
except IndexError:
|
||||||
self.attributes[name] = value
|
raise NodeNotFound
|
||||||
|
try:
|
||||||
def to_str(self, pretty=False):
|
return self.attributes[name]
|
||||||
return (
|
except KeyError:
|
||||||
f'<?xml version="1.0" encoding="{XML_ENCODING_BACK[self.encoding]}"?>'
|
raise AttributeNotFound
|
||||||
+ ("\n" if pretty else "")
|
|
||||||
+ self._to_str(pretty)
|
def __setitem__(self, name, value):
|
||||||
)
|
self.attributes[name] = value
|
||||||
|
|
||||||
def __str__(self):
|
def to_str(self, pretty=False):
|
||||||
return self.to_str(pretty=True)
|
return (
|
||||||
|
f'<?xml version="1.0" encoding="{XML_ENCODING_BACK[self.encoding]}"?>'
|
||||||
def _value_str(self, value):
|
+ ("\n" if pretty else "")
|
||||||
if isinstance(value, list):
|
+ self._to_str(pretty)
|
||||||
return " ".join(map(self._value_str, value))
|
)
|
||||||
if self.type == Type.Blob:
|
|
||||||
return binascii.hexlify(value).decode()
|
def __str__(self):
|
||||||
if self.type == Type.IPv4:
|
return self.to_str(pretty=True)
|
||||||
return f"{value[0]}.{value[1]}.{value[2]}.{value[3]}"
|
|
||||||
if self.type in (Type.Float, Type.TwoFloat, Type.ThreeFloat):
|
def _value_str(self, value: Any) -> str:
|
||||||
return f"{value:.6f}"
|
if isinstance(value, list):
|
||||||
if self.type == Type.Str:
|
return " ".join(map(self._value_str, value))
|
||||||
return escape(str(value))
|
if self.type == Type.Blob:
|
||||||
|
return binascii.hexlify(value).decode()
|
||||||
return str(value)
|
if self.type == Type.IPv4 or self.type == Type.IPv4_Int:
|
||||||
|
if isinstance(value, int):
|
||||||
def _to_str(self, pretty, indent=0):
|
value = (
|
||||||
if not pretty:
|
(value >> 24) & 0xff,
|
||||||
indent = 0
|
(value >> 16) & 0xff,
|
||||||
nl = "\n" if pretty else ""
|
(value >> 8) & 0xff,
|
||||||
tag = f"{' ' * indent}<{self.name}"
|
(value >> 0) & 0xff,
|
||||||
|
)
|
||||||
if self.type != Type.Void:
|
return f"{value[0]}.{value[1]}.{value[2]}.{value[3]}"
|
||||||
tag += f" __type=\"{self.type.value.names[0]}\""
|
if self.type in (Type.Float, Type.TwoFloat, Type.ThreeFloat):
|
||||||
if self.type == Type.Blob:
|
return f"{value:.6f}"
|
||||||
tag += f" __size=\"{len(self.value)}\""
|
if self.type == Type.Str:
|
||||||
if self.is_array:
|
return escape(str(value))
|
||||||
tag += f" __count=\"{len(self.value)}\""
|
|
||||||
|
return str(value)
|
||||||
attributes = " ".join(f"{i}=\"{escape(j)}\"" for i, j in self.attributes.items())
|
|
||||||
if attributes:
|
def _to_str(self, pretty, indent=0):
|
||||||
tag += " " + attributes
|
if not pretty:
|
||||||
tag += ">"
|
indent = 0
|
||||||
if self.value is not None and self.type != Type.Void:
|
nl = "\n" if pretty else ""
|
||||||
if self.is_array:
|
tag = f"{' ' * indent}<{self.name}"
|
||||||
tag += " ".join(map(self._value_str, self.value))
|
|
||||||
else:
|
if self.type != Type.Void:
|
||||||
tag += self._value_str(self.value)
|
tag += f" __type=\"{self.type.value.names[0]}\""
|
||||||
elif not self.children:
|
if self.type == Type.Blob:
|
||||||
return tag[:-1] + (" " if pretty else "") + "/>"
|
tag += f" __size=\"{len(self.value)}\""
|
||||||
|
if self.is_array:
|
||||||
for i in self.children:
|
tag += f" __count=\"{len(self.value)}\""
|
||||||
if isinstance(i, XMLNode):
|
|
||||||
tag += nl + i._to_str(pretty, indent + 4)
|
attributes = " ".join(f"{i}=\"{escape(j)}\"" for i, j in self.attributes.items())
|
||||||
if self.children:
|
if attributes:
|
||||||
tag += nl + " " * indent
|
tag += " " + attributes
|
||||||
tag += f"</{self.name}>"
|
tag += ">"
|
||||||
return tag
|
if self.value is not None and self.type != Type.Void:
|
||||||
|
if self.is_array:
|
||||||
def __eq__(self, other):
|
tag += " ".join(map(self._value_str, self.value))
|
||||||
return (
|
else:
|
||||||
isinstance(other, XMLNode)
|
tag += self._value_str(self.value)
|
||||||
and self.name == other.name
|
elif not self.children:
|
||||||
and self.type == other.type
|
return tag[:-1] + (" " if pretty else "") + "/>"
|
||||||
and self.value == other.value
|
|
||||||
and len(self.children) == len(other.children)
|
for i in self.children:
|
||||||
and all(i == j for i, j in zip(self.children, other.children))
|
if isinstance(i, XMLNode):
|
||||||
)
|
tag += nl + i._to_str(pretty, indent + 4)
|
||||||
|
if self.children:
|
||||||
|
tag += nl + " " * indent
|
||||||
__all__ = ("XMLNode", )
|
tag += f"</{self.name}>"
|
||||||
|
return tag
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return (
|
||||||
|
isinstance(other, XMLNode)
|
||||||
|
and self.name == other.name
|
||||||
|
and self.type == other.type
|
||||||
|
and self.value == other.value
|
||||||
|
and len(self.children) == len(other.children)
|
||||||
|
and all(i == j for i, j in zip(self.children, other.children))
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ("XMLNode", )
|
||||||
|
|
|
@ -1,41 +1,41 @@
|
||||||
import math
|
import math
|
||||||
|
|
||||||
|
|
||||||
class Packer:
|
class Packer:
|
||||||
def __init__(self, offset=0):
|
def __init__(self, offset: int = 0):
|
||||||
self._word_cursor = offset
|
self._word_cursor = offset
|
||||||
self._short_cursor = offset
|
self._short_cursor = offset
|
||||||
self._byte_cursor = offset
|
self._byte_cursor = offset
|
||||||
self._boundary = offset % 4
|
self._boundary = offset % 4
|
||||||
|
|
||||||
def _next_block(self):
|
def _next_block(self) -> int:
|
||||||
self._word_cursor += 4
|
self._word_cursor += 4
|
||||||
return self._word_cursor - 4
|
return self._word_cursor - 4
|
||||||
|
|
||||||
def request_allocation(self, size):
|
def request_allocation(self, size: int) -> int:
|
||||||
if size == 0:
|
if size == 0:
|
||||||
return self._word_cursor
|
return self._word_cursor
|
||||||
elif size == 1:
|
elif size == 1:
|
||||||
if self._byte_cursor % 4 == self._boundary:
|
if self._byte_cursor % 4 == self._boundary:
|
||||||
self._byte_cursor = self._next_block() + 1
|
self._byte_cursor = self._next_block() + 1
|
||||||
else:
|
else:
|
||||||
self._byte_cursor += 1
|
self._byte_cursor += 1
|
||||||
return self._byte_cursor - 1
|
return self._byte_cursor - 1
|
||||||
elif size == 2:
|
elif size == 2:
|
||||||
if self._short_cursor % 4 == self._boundary:
|
if self._short_cursor % 4 == self._boundary:
|
||||||
self._short_cursor = self._next_block() + 2
|
self._short_cursor = self._next_block() + 2
|
||||||
else:
|
else:
|
||||||
self._short_cursor += 2
|
self._short_cursor += 2
|
||||||
return self._short_cursor - 2
|
return self._short_cursor - 2
|
||||||
else:
|
else:
|
||||||
old_cursor = self._word_cursor
|
old_cursor = self._word_cursor
|
||||||
for _ in range(math.ceil(size / 4)):
|
for _ in range(math.ceil(size / 4)):
|
||||||
self._word_cursor += 4
|
self._word_cursor += 4
|
||||||
return old_cursor
|
return old_cursor
|
||||||
|
|
||||||
def notify_skipped(self, no_bytes):
|
def notify_skipped(self, no_bytes: int) -> None:
|
||||||
for _ in range(math.ceil(no_bytes / 4)):
|
for _ in range(math.ceil(no_bytes / 4)):
|
||||||
self.request_allocation(4)
|
self.request_allocation(4)
|
||||||
|
|
||||||
|
|
||||||
__all__ = ("Packer", )
|
__all__ = ("Packer", )
|
||||||
|
|
|
@ -1 +0,0 @@
|
||||||
Subproject commit dec7ca3536cf459be21b7284358bf2bea4eb3d14
|
|
|
@ -0,0 +1,2 @@
|
||||||
|
*.pyc
|
||||||
|
__pycache__/
|
|
@ -0,0 +1,43 @@
|
||||||
|
# eaapi.server
|
||||||
|
|
||||||
|
## Quickstart
|
||||||
|
|
||||||
|
```py
|
||||||
|
server = EAMServer("http://127.0.0.1:5000")
|
||||||
|
|
||||||
|
@server.handler("message", "get")
|
||||||
|
def message(ctx):
|
||||||
|
ctx.resp.append("message", expire="300", status="0")
|
||||||
|
|
||||||
|
server.run("0.0.0.0", 5000)
|
||||||
|
```
|
||||||
|
|
||||||
|
```py
|
||||||
|
EAMServer(
|
||||||
|
# The URL this server can be access at. Used for services
|
||||||
|
public_url,
|
||||||
|
# Add `/<service>/` as a prefix when generating service urls (useful when debugging games)
|
||||||
|
prefix_services: bool = False,
|
||||||
|
# If both the URL and the query params match, which one gets the final say?
|
||||||
|
prioritise_params: bool = False,
|
||||||
|
# Include e-Amusement specific details of why requests failed in the responses
|
||||||
|
verbose_errors: bool = False,
|
||||||
|
# The operation mode in services.get's response
|
||||||
|
services_mode: eaapi.const.ServicesMode = eaapi.const.ServicesMode.Operation,
|
||||||
|
# The NTP server to use in services.get
|
||||||
|
ntp_server: str = "ntp://pool.ntp.org/",
|
||||||
|
# Keepalive server to use in serices.get. We'll use our own if one is not specified
|
||||||
|
keepalive_server: str = None
|
||||||
|
)
|
||||||
|
|
||||||
|
@handler(
|
||||||
|
# Module name to handle. Will curry if method is not provided
|
||||||
|
module,
|
||||||
|
# Method name to handle
|
||||||
|
method=None,
|
||||||
|
# The datecode prefix to match during routing
|
||||||
|
dc_prefix=None,
|
||||||
|
# The service to use. Likely `local` or `local2` when handling game functions
|
||||||
|
service=None
|
||||||
|
)
|
||||||
|
```
|
|
@ -0,0 +1,13 @@
|
||||||
|
from .server import EAMServer
|
||||||
|
from .context import CallContext
|
||||||
|
from .model import Model, ModelMatcher, DatecodeMatcher
|
||||||
|
from .exceptions import EAMHTTPException
|
||||||
|
from .controller import Controller
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"EAMServer",
|
||||||
|
"CallContext",
|
||||||
|
"Model", "ModelMatcher", "DatecodeMatcher",
|
||||||
|
"EAMHTTPException",
|
||||||
|
"Controller",
|
||||||
|
)
|
|
@ -0,0 +1,4 @@
|
||||||
|
from .server import EAMServer
|
||||||
|
|
||||||
|
app = EAMServer("http://127.0.0.1:5000", verbose_errors=True)
|
||||||
|
app.run("0.0.0.0", 5000, debug=True)
|
|
@ -0,0 +1,45 @@
|
||||||
|
# Services where the module and service name match
|
||||||
|
TRIVIAL_SERVICES = [
|
||||||
|
"pcbtracker",
|
||||||
|
"message",
|
||||||
|
"facility",
|
||||||
|
"pcbevent",
|
||||||
|
"cardmng",
|
||||||
|
"package",
|
||||||
|
"userdata",
|
||||||
|
"userid",
|
||||||
|
"dlstatus",
|
||||||
|
"eacoin",
|
||||||
|
# "traceroute",
|
||||||
|
"apsmanager",
|
||||||
|
"sidmgr",
|
||||||
|
]
|
||||||
|
|
||||||
|
# Just chilling here until I figure out where these route
|
||||||
|
UNMAPPED_SERVICES = {
|
||||||
|
"???0": "numbering",
|
||||||
|
"???1": "pkglist",
|
||||||
|
"???2": "posevent",
|
||||||
|
"???3": "lobby",
|
||||||
|
"???4": "lobby2",
|
||||||
|
"???5": "netlog", # ins.netlog (?)
|
||||||
|
"???6": "globby",
|
||||||
|
"???7": "matching",
|
||||||
|
"???8": "netsci",
|
||||||
|
}
|
||||||
|
|
||||||
|
MODULE_SERVICES = "services"
|
||||||
|
RESERVED_MODULES = [
|
||||||
|
MODULE_SERVICES,
|
||||||
|
]
|
||||||
|
|
||||||
|
METHOD_SERVICES_GET = "get"
|
||||||
|
|
||||||
|
SERVICE_SERVICES = "services"
|
||||||
|
SERVICE_KEEPALIVE = "keepalive"
|
||||||
|
SERVICE_NTP = "ntp"
|
||||||
|
RESERVED_SERVICES = [
|
||||||
|
SERVICE_SERVICES,
|
||||||
|
SERVICE_KEEPALIVE,
|
||||||
|
SERVICE_NTP,
|
||||||
|
]
|
|
@ -0,0 +1,105 @@
|
||||||
|
import eaapi
|
||||||
|
|
||||||
|
from . import exceptions as exc
|
||||||
|
from .model import Model
|
||||||
|
|
||||||
|
|
||||||
|
NODE_CALL = "call"
|
||||||
|
NODE_RESP = "response"
|
||||||
|
|
||||||
|
|
||||||
|
class CallContext:
|
||||||
|
def __init__(self, request, decoder, call, eainfo, compressed):
|
||||||
|
if call.name != NODE_CALL:
|
||||||
|
raise exc.CallNodeMissing
|
||||||
|
self._request = request
|
||||||
|
self._decoder = decoder
|
||||||
|
self._call: eaapi.XMLNode = call
|
||||||
|
self._eainfo: str | None = eainfo
|
||||||
|
self._compressed: bool = compressed
|
||||||
|
self._resp: eaapi.XMLNode = eaapi.XMLNode.void(NODE_RESP)
|
||||||
|
|
||||||
|
self._module: str | None = None
|
||||||
|
self._method: str | None = None
|
||||||
|
self._url_slash: bool | None = None
|
||||||
|
|
||||||
|
self._model: Model = Model.from_model_str(call.get("model"))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def module(self):
|
||||||
|
return self._module
|
||||||
|
|
||||||
|
@property
|
||||||
|
def method(self):
|
||||||
|
return self._method
|
||||||
|
|
||||||
|
@property
|
||||||
|
def url_slash(self):
|
||||||
|
return self._url_slash
|
||||||
|
|
||||||
|
@property
|
||||||
|
def request(self):
|
||||||
|
return self._request
|
||||||
|
|
||||||
|
@property
|
||||||
|
def was_xml_string(self):
|
||||||
|
return self._decoder.is_xml_string
|
||||||
|
|
||||||
|
@property
|
||||||
|
def was_compressed(self):
|
||||||
|
return self._compressed
|
||||||
|
|
||||||
|
@property
|
||||||
|
def call(self):
|
||||||
|
return self._call
|
||||||
|
|
||||||
|
@property
|
||||||
|
def resp(self):
|
||||||
|
return self._resp
|
||||||
|
|
||||||
|
@property
|
||||||
|
def model(self):
|
||||||
|
return self._model
|
||||||
|
|
||||||
|
@property
|
||||||
|
def srcid(self):
|
||||||
|
return self._call.get("srcid")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def tag(self):
|
||||||
|
return self._call.get("tag")
|
||||||
|
|
||||||
|
def get_root(self):
|
||||||
|
return self.call.xpath(self.module)
|
||||||
|
|
||||||
|
def abort(self, status="1"):
|
||||||
|
return self.resp.append(self.module, status=status)
|
||||||
|
|
||||||
|
def ok(self):
|
||||||
|
return self.abort("0")
|
||||||
|
|
||||||
|
|
||||||
|
class ResponseContext:
|
||||||
|
def __init__(self, resp, decoder, response, compressed):
|
||||||
|
if response.name != NODE_RESP:
|
||||||
|
raise exc.CallNodeMissing
|
||||||
|
self._resp = resp
|
||||||
|
self._decoder = decoder
|
||||||
|
self._response = response
|
||||||
|
self._compressed = compressed
|
||||||
|
|
||||||
|
@property
|
||||||
|
def resp(self):
|
||||||
|
return self._resp
|
||||||
|
|
||||||
|
@property
|
||||||
|
def decoder(self):
|
||||||
|
return self._decoder
|
||||||
|
|
||||||
|
@property
|
||||||
|
def response(self):
|
||||||
|
return self._response
|
||||||
|
|
||||||
|
@property
|
||||||
|
def compressed(self):
|
||||||
|
return self._compressed
|
|
@ -0,0 +1,210 @@
|
||||||
|
from typing import Callable
|
||||||
|
from collections import defaultdict
|
||||||
|
from abc import ABC
|
||||||
|
|
||||||
|
from .context import CallContext
|
||||||
|
from .model import ModelMatcher
|
||||||
|
from .const import RESERVED_MODULES, RESERVED_SERVICES, TRIVIAL_SERVICES, MODULE_SERVICES, METHOD_SERVICES_GET
|
||||||
|
|
||||||
|
import eaapi
|
||||||
|
|
||||||
|
|
||||||
|
Handler = Callable[[CallContext], None]
|
||||||
|
|
||||||
|
|
||||||
|
class IController(ABC):
|
||||||
|
_name: str
|
||||||
|
|
||||||
|
def get_handler(self, ctx: CallContext) -> Handler | None:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def get_service_routes(self, ctx: CallContext | None) -> dict[str, str]:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def serviced_prefixes(self) -> list[str]:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
class Controller(IController):
|
||||||
|
def __init__(self, server, endpoint="", matcher: None | ModelMatcher = None):
|
||||||
|
from .server import EAMServer
|
||||||
|
self._server: EAMServer = server
|
||||||
|
server.controllers.append(self)
|
||||||
|
|
||||||
|
import inspect
|
||||||
|
caller = inspect.getmodule(inspect.stack()[1][0])
|
||||||
|
assert caller is not None
|
||||||
|
self._name = caller.__name__
|
||||||
|
|
||||||
|
self._handlers: dict[
|
||||||
|
tuple[str | None, str | None],
|
||||||
|
list[tuple[ModelMatcher, Handler]]
|
||||||
|
] = defaultdict(lambda: [])
|
||||||
|
self._endpoint = endpoint
|
||||||
|
self._matcher = matcher
|
||||||
|
|
||||||
|
self._services: set[tuple[ModelMatcher, str]] = set()
|
||||||
|
|
||||||
|
self._pre_handler: list[Callable[[CallContext, Handler], Handler]] = []
|
||||||
|
|
||||||
|
@property
|
||||||
|
def server(self):
|
||||||
|
return self._server
|
||||||
|
|
||||||
|
def on_pre(self, callback):
|
||||||
|
if callback not in self._pre_handler:
|
||||||
|
self._pre_handler.append(callback)
|
||||||
|
|
||||||
|
def add_dummy_service(
|
||||||
|
self,
|
||||||
|
service: str,
|
||||||
|
matcher: ModelMatcher | None = None,
|
||||||
|
unsafe_force_bypass_reserved: bool = False
|
||||||
|
):
|
||||||
|
if not unsafe_force_bypass_reserved:
|
||||||
|
if service in RESERVED_SERVICES:
|
||||||
|
raise KeyError(
|
||||||
|
f"{service} is a reserved service provided by default.\n"
|
||||||
|
"Pass unsafe_force_bypass_reserved=True to override this implementation"
|
||||||
|
)
|
||||||
|
if matcher is None:
|
||||||
|
matcher = ModelMatcher()
|
||||||
|
self._services.add((matcher, service))
|
||||||
|
|
||||||
|
def register_handler(
|
||||||
|
self,
|
||||||
|
handler: Handler,
|
||||||
|
module: str,
|
||||||
|
method: str,
|
||||||
|
matcher: ModelMatcher | None = None,
|
||||||
|
service: str | None = None,
|
||||||
|
unsafe_force_bypass_reserved: bool = False
|
||||||
|
):
|
||||||
|
if not unsafe_force_bypass_reserved:
|
||||||
|
if module in RESERVED_MODULES:
|
||||||
|
raise KeyError(
|
||||||
|
f"{module} is a reserved module provided by default.\n"
|
||||||
|
"Pass unsafe_force_bypass_reserved=True to override this implementation"
|
||||||
|
)
|
||||||
|
|
||||||
|
if service is not None and service in RESERVED_SERVICES:
|
||||||
|
raise KeyError(
|
||||||
|
f"{service} is a reserved service provided by default.\n"
|
||||||
|
"Pass unsafe_force_bypass_reserved=True to override this implementation"
|
||||||
|
)
|
||||||
|
|
||||||
|
if service is None:
|
||||||
|
if module not in TRIVIAL_SERVICES:
|
||||||
|
raise ValueError(f"Unable to identify service for {module}")
|
||||||
|
service = module
|
||||||
|
|
||||||
|
handlers = self._handlers[(module, method)]
|
||||||
|
for i in handlers:
|
||||||
|
if matcher is None and i[0] is None:
|
||||||
|
raise ValueError(f"Duplicate default handler for {module}.{method}")
|
||||||
|
if matcher == i[0]:
|
||||||
|
raise ValueError(f"Duplicate handler for {module}.{method} ({matcher})")
|
||||||
|
matcher_ = matcher or ModelMatcher()
|
||||||
|
handlers.append((matcher_, handler))
|
||||||
|
handlers.sort(key=lambda x: x[0])
|
||||||
|
|
||||||
|
self._services.add((matcher_, service))
|
||||||
|
|
||||||
|
def handler(
|
||||||
|
self,
|
||||||
|
module: str,
|
||||||
|
method: str | None = None,
|
||||||
|
matcher: ModelMatcher | None = None,
|
||||||
|
service: str | None = None,
|
||||||
|
unsafe_force_bypass_reserved: bool = False
|
||||||
|
):
|
||||||
|
if method is None:
|
||||||
|
def h2(method):
|
||||||
|
return self.handler(module, method, matcher, service)
|
||||||
|
return h2
|
||||||
|
|
||||||
|
# Commented out for MachineCallContext bodge
|
||||||
|
# def decorator(handler: Handler):
|
||||||
|
def decorator(handler):
|
||||||
|
self.register_handler(handler, module, method, matcher, service, unsafe_force_bypass_reserved)
|
||||||
|
return handler
|
||||||
|
return decorator
|
||||||
|
|
||||||
|
def get_handler(self, ctx):
|
||||||
|
if self._matcher is not None:
|
||||||
|
if not self._matcher.matches(ctx.model):
|
||||||
|
return None
|
||||||
|
|
||||||
|
handlers = self._handlers[(ctx.module, ctx.method)]
|
||||||
|
if not handlers:
|
||||||
|
return None
|
||||||
|
|
||||||
|
for matcher, handler in handlers:
|
||||||
|
if matcher.matches(ctx.model):
|
||||||
|
for i in self._pre_handler:
|
||||||
|
handler = i(ctx, handler)
|
||||||
|
return handler
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_service_routes(self, ctx: CallContext | None):
|
||||||
|
endpoint = self._server.expand_url(self._endpoint)
|
||||||
|
|
||||||
|
if ctx is None:
|
||||||
|
return {
|
||||||
|
service: endpoint
|
||||||
|
for _, service in self._services
|
||||||
|
}
|
||||||
|
|
||||||
|
if self._matcher is not None and not self._matcher.matches(ctx.model):
|
||||||
|
return {}
|
||||||
|
|
||||||
|
return {
|
||||||
|
service: endpoint
|
||||||
|
for matcher, service in self._services
|
||||||
|
if matcher.matches(ctx.model)
|
||||||
|
}
|
||||||
|
|
||||||
|
def serviced_prefixes(self) -> list[str]:
|
||||||
|
return [self._endpoint]
|
||||||
|
|
||||||
|
|
||||||
|
class ServicesController(IController):
|
||||||
|
def __init__(self, server, services_mode: eaapi.const.ServicesMode):
|
||||||
|
from .server import EAMServer
|
||||||
|
self._server: EAMServer = server
|
||||||
|
|
||||||
|
self.services_mode = services_mode
|
||||||
|
|
||||||
|
self._name = __name__ + "." + self.__class__.__name__
|
||||||
|
|
||||||
|
def service_routes_for(self, ctx: CallContext):
|
||||||
|
services = defaultdict(lambda: self._server.public_url)
|
||||||
|
for service, route in self._server.get_service_routes(ctx):
|
||||||
|
services[service] = self._server.expand_url(route)
|
||||||
|
|
||||||
|
return services
|
||||||
|
|
||||||
|
def get_handler(self, ctx: CallContext) -> Handler | None:
|
||||||
|
if ctx.module == MODULE_SERVICES and ctx.method == METHOD_SERVICES_GET:
|
||||||
|
return self.services_get
|
||||||
|
return None
|
||||||
|
|
||||||
|
def service_route(self, for_service: str, ctx: CallContext):
|
||||||
|
routes = self.service_routes_for(ctx)
|
||||||
|
return routes[for_service]
|
||||||
|
|
||||||
|
def services_get(self, ctx: CallContext):
|
||||||
|
services = ctx.resp.append(
|
||||||
|
MODULE_SERVICES, expire="600", mode=self.services_mode.value, status="0"
|
||||||
|
)
|
||||||
|
|
||||||
|
routes = self._server.get_service_routes(ctx)
|
||||||
|
for service in routes:
|
||||||
|
services.append("item", name=service, url=routes[service])
|
||||||
|
|
||||||
|
def get_service_routes(self, ctx: CallContext | None) -> dict[str, str]:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def serviced_prefixes(self) -> list[str]:
|
||||||
|
return [""]
|
|
@ -0,0 +1,88 @@
|
||||||
|
import time
|
||||||
|
|
||||||
|
from eaapi import Type
|
||||||
|
|
||||||
|
from ..server import EAMServer
|
||||||
|
from ..model import ModelMatcher
|
||||||
|
|
||||||
|
|
||||||
|
server = EAMServer("http://127.0.0.1:5000", verbose_errors=True)
|
||||||
|
|
||||||
|
|
||||||
|
@server.handler("pcbtracker", "alive")
|
||||||
|
def pcbtracker(ctx):
|
||||||
|
ecflag = ctx.call.xpath("@ecflag/pcbtracker")
|
||||||
|
|
||||||
|
ctx.resp.append(
|
||||||
|
"pcbtracker",
|
||||||
|
status="0", expire="1200",
|
||||||
|
ecenable=ecflag, eclimit="1000", limit="1000",
|
||||||
|
time=str(round(time.time()))
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@server.handler("message", "get", matcher=ModelMatcher("KFC"))
|
||||||
|
def message(ctx):
|
||||||
|
ctx.resp.append("message", expire="300", status="0")
|
||||||
|
|
||||||
|
|
||||||
|
@server.handler("facility", "get")
|
||||||
|
def facility_get(ctx):
|
||||||
|
facility = ctx.resp.append("facility", status="0")
|
||||||
|
location = facility.append("location")
|
||||||
|
location.append("id", Type.Str, "")
|
||||||
|
location.append("country", Type.Str, "UK")
|
||||||
|
location.append("region", Type.Str, "")
|
||||||
|
location.append("name", Type.Str, "Hello Flask")
|
||||||
|
location.append("type", Type.U8, 0)
|
||||||
|
location.append("countryname", Type.Str, "UK-c")
|
||||||
|
location.append("countryjname", Type.Str, "")
|
||||||
|
location.append("regionname", Type.Str, "UK-r")
|
||||||
|
location.append("regionjname", Type.Str, "")
|
||||||
|
location.append("customercode", Type.Str, "")
|
||||||
|
location.append("companycode", Type.Str, "")
|
||||||
|
location.append("latitude", Type.S32, 0)
|
||||||
|
location.append("longitude", Type.S32, 0)
|
||||||
|
location.append("accuracy", Type.U8, 0)
|
||||||
|
|
||||||
|
line = facility.append("line")
|
||||||
|
line.append("id", Type.Str, "")
|
||||||
|
line.append("class", Type.U8, 0)
|
||||||
|
|
||||||
|
portfw = facility.append("portfw")
|
||||||
|
portfw.append("globalip", Type.IPv4, (*map(int, ctx.request.remote_addr.split(".")),))
|
||||||
|
portfw.append("globalport", Type.U16, ctx.request.environ.get('REMOTE_PORT'))
|
||||||
|
portfw.append("privateport", Type.U16, ctx.request.environ.get('REMOTE_PORT'))
|
||||||
|
|
||||||
|
public = facility.append("public")
|
||||||
|
public.append("flag", Type.U8, 1)
|
||||||
|
public.append("name", Type.Str, "")
|
||||||
|
public.append("latitude", Type.S32, 0)
|
||||||
|
public.append("longitude", Type.S32, 0)
|
||||||
|
|
||||||
|
share = facility.append("share")
|
||||||
|
eacoin = share.append("eacoin")
|
||||||
|
eacoin.append("notchamount", Type.S32, 0)
|
||||||
|
eacoin.append("notchcount", Type.S32, 0)
|
||||||
|
eacoin.append("supplylimit", Type.S32, 100000)
|
||||||
|
url = share.append("url")
|
||||||
|
url.append("eapass", Type.Str, "www.ea-pass.konami.net")
|
||||||
|
url.append("arcadefan", Type.Str, "www.konami.jp/am")
|
||||||
|
url.append("konaminetdx", Type.Str, "http://am.573.jp")
|
||||||
|
url.append("konamiid", Type.Str, "http://id.konami.jp")
|
||||||
|
url.append("eagate", Type.Str, "http://eagate.573.jp")
|
||||||
|
|
||||||
|
|
||||||
|
@server.handler("pcbevent", "put")
|
||||||
|
def pcbevent(ctx):
|
||||||
|
ctx.resp.append("pcbevent", status="0")
|
||||||
|
|
||||||
|
|
||||||
|
server.route_service_to("cardmng", server.service_route("cardmng"))
|
||||||
|
server.route_service_to("package", server.service_route("package"))
|
||||||
|
|
||||||
|
server.route_service_to("message", "message/KFC", matcher=ModelMatcher("KFC"))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
server.run("0.0.0.0", 5000, debug=True)
|
|
@ -0,0 +1,162 @@
|
||||||
|
import base64
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
|
import requests
|
||||||
|
|
||||||
|
import eaapi
|
||||||
|
|
||||||
|
from werkzeug.routing import Rule
|
||||||
|
from werkzeug.wrappers import Response
|
||||||
|
|
||||||
|
from .. import exceptions as exc
|
||||||
|
from ..model import ModelMatcher
|
||||||
|
from ..context import ResponseContext
|
||||||
|
from ..server import (
|
||||||
|
METHOD_SERVICES_GET, MODULE_SERVICES, SERVICE_KEEPALIVE, SERVICE_NTP, EAMServer, HEADER_ENCRYPTION,
|
||||||
|
HEADER_COMPRESSION, TRIVIAL_SERVICES
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class EAMProxyServer(EAMServer):
|
||||||
|
def __init__(self, upstream, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self._upstream_fallback = upstream
|
||||||
|
|
||||||
|
self._taps = []
|
||||||
|
self._tapped_services = set()
|
||||||
|
|
||||||
|
if not kwargs.get("disable_routes"):
|
||||||
|
self._rules.add(Rule("/__tap/<upstream>//<model>/<module>/<method>", endpoint="tap_request"))
|
||||||
|
self._rules.add(Rule("/__tap/<upstream>", endpoint="tap_request"))
|
||||||
|
|
||||||
|
def tap_url(self, ctx, url):
|
||||||
|
url = base64.b64encode(url.encode("latin-1")).decode()
|
||||||
|
return urllib.parse.urljoin(self._public_url, f"__tap/{url}/")
|
||||||
|
|
||||||
|
def _call_upstream(self, ctx, upstream):
|
||||||
|
base = upstream or self._upstream_fallback
|
||||||
|
if ctx.url_slash:
|
||||||
|
url = f"{base}/{ctx.model}/{ctx.module}/{ctx.method}"
|
||||||
|
else:
|
||||||
|
url = f"{base}?model={ctx.model}&module={ctx.module}&method={ctx.method}"
|
||||||
|
|
||||||
|
return requests.post(url, headers=ctx.request.headers, data=ctx.request.data)
|
||||||
|
|
||||||
|
def tap(self, module, method=None, matcher=None, service=None):
|
||||||
|
if method is None:
|
||||||
|
def h2(method):
|
||||||
|
return self.handler(module, method, matcher, service)
|
||||||
|
return h2
|
||||||
|
|
||||||
|
def decorator(handler):
|
||||||
|
matcher_ = matcher or ModelMatcher()
|
||||||
|
self._taps.append((module, method, matcher_, handler))
|
||||||
|
|
||||||
|
if service is None:
|
||||||
|
if module in TRIVIAL_SERVICES:
|
||||||
|
self._tapped_services.add((matcher_, module))
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Unable to identify service for {module}")
|
||||||
|
else:
|
||||||
|
self._tapped_services.add((matcher_, service))
|
||||||
|
return handler
|
||||||
|
return decorator
|
||||||
|
|
||||||
|
def _decode_us_request(self, resp):
|
||||||
|
ea_info = resp.headers.get(HEADER_ENCRYPTION)
|
||||||
|
compression = resp.headers.get(HEADER_COMPRESSION)
|
||||||
|
|
||||||
|
compressed = False
|
||||||
|
if compression == eaapi.Compression.Lz77.value:
|
||||||
|
compressed = True
|
||||||
|
elif compression != eaapi.Compression.None_.value:
|
||||||
|
raise exc.UnknownCompression
|
||||||
|
|
||||||
|
payload = eaapi.unwrap(resp.content, ea_info, compressed)
|
||||||
|
decoder = eaapi.Decoder(payload)
|
||||||
|
try:
|
||||||
|
response = decoder.unpack()
|
||||||
|
except eaapi.exception.EAAPIException:
|
||||||
|
raise exc.InvalidPacket
|
||||||
|
|
||||||
|
return ResponseContext(resp, decoder, response, compressed)
|
||||||
|
|
||||||
|
def call_upstream(self, ctx, upstream):
|
||||||
|
resp = self._call_upstream(ctx, upstream)
|
||||||
|
return self._decode_us_request(resp)
|
||||||
|
|
||||||
|
def _services_handler(self, ctx, upstream=None):
|
||||||
|
upstream_ctx = self.call_upstream(ctx, upstream)
|
||||||
|
|
||||||
|
super()._services_handler(ctx)
|
||||||
|
services = ctx.resp.xpath("services")
|
||||||
|
# Never proxy NTP or keepalive
|
||||||
|
services.children = [
|
||||||
|
i for i in services.children
|
||||||
|
if i.get("name") not in (SERVICE_NTP, SERVICE_KEEPALIVE)
|
||||||
|
]
|
||||||
|
|
||||||
|
added = set(i.get("name") for i in services)
|
||||||
|
|
||||||
|
for i in upstream_ctx.response.xpath("services"):
|
||||||
|
name = i.get("name")
|
||||||
|
if name in added:
|
||||||
|
continue
|
||||||
|
|
||||||
|
url = i.get("url")
|
||||||
|
# for matcher, service in self._tapped_services:
|
||||||
|
# if name == service and matcher.matches(ctx.model):
|
||||||
|
# url = self.tap_url(ctx, url)
|
||||||
|
if name != "keepalive":
|
||||||
|
url = self.tap_url(ctx, url)
|
||||||
|
|
||||||
|
services.append("item", name=name, url=url)
|
||||||
|
|
||||||
|
def _handle_request(self, upstream, url_slash, request, model, module, method):
|
||||||
|
ctx = self._create_ctx(url_slash, request, model, module, method)
|
||||||
|
|
||||||
|
if ctx.module == MODULE_SERVICES:
|
||||||
|
if ctx.method != METHOD_SERVICES_GET:
|
||||||
|
raise exc.NoMethodHandler
|
||||||
|
self._services_handler(ctx, upstream)
|
||||||
|
return self._encode_response(ctx)
|
||||||
|
|
||||||
|
try:
|
||||||
|
return super()._handle_request(ctx)
|
||||||
|
except exc.NoMethodHandler:
|
||||||
|
try:
|
||||||
|
us_resp = self._call_upstream(ctx, upstream)
|
||||||
|
except requests.RequestException:
|
||||||
|
raise exc.UpstreamFailed
|
||||||
|
|
||||||
|
try:
|
||||||
|
resp_ctx = self._decode_us_request(us_resp)
|
||||||
|
for tap in self._taps:
|
||||||
|
if tap[0] == module and tap[1] == method and tap[2].matches(ctx.model):
|
||||||
|
tap[3](ctx, resp_ctx)
|
||||||
|
except Exception:
|
||||||
|
import traceback
|
||||||
|
traceback.print_exc()
|
||||||
|
finally:
|
||||||
|
# Return the response the upstream sent
|
||||||
|
response = Response(us_resp.content, us_resp.status_code)
|
||||||
|
for i in us_resp.headers:
|
||||||
|
response.headers[i] = us_resp.headers[i]
|
||||||
|
return response
|
||||||
|
|
||||||
|
def on_xrpc_request(self, request, service=None, model=None, module=None, method=None):
|
||||||
|
return self.on_tap_request(request, None, service, model, module, method)
|
||||||
|
|
||||||
|
def on_tap_request(self, request, upstream, service=None, model=None, module=None, method=None):
|
||||||
|
url_slash, service, model, module, method = self.parse_request(request, service, model, module, method)
|
||||||
|
|
||||||
|
if request.method != "POST":
|
||||||
|
return self.on_xrpc_other(request, service, model, module, method)
|
||||||
|
|
||||||
|
if upstream is not None:
|
||||||
|
try:
|
||||||
|
upstream = base64.b64decode(upstream).decode("latin-1")
|
||||||
|
except Exception:
|
||||||
|
raise exc.InvalidUpstream
|
||||||
|
|
||||||
|
return self._handle_request(upstream, url_slash, request, model, module, method)
|
|
@ -0,0 +1,32 @@
|
||||||
|
from .proxy import EAMProxyServer
|
||||||
|
from ..model import ModelMatcher
|
||||||
|
|
||||||
|
|
||||||
|
server = EAMProxyServer(
|
||||||
|
upstream="http://127.0.0.1:8083",
|
||||||
|
public_url="http://127.0.0.1:5000",
|
||||||
|
verbose_errors=True
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@server.tap("game", "sv4_save_m", matcher=ModelMatcher("KFC"), service="local2")
|
||||||
|
def sv4_save_m(ctx_in, ctx_out):
|
||||||
|
print("SAVE M")
|
||||||
|
print(ctx_in.call)
|
||||||
|
print(ctx_out.response)
|
||||||
|
|
||||||
|
game = ctx_in.call.xpath("game")
|
||||||
|
print("mid:", game.xpath("music_id").value)
|
||||||
|
print("score:", game.xpath("score").value)
|
||||||
|
print("clear:", game.xpath("clear_type").value)
|
||||||
|
|
||||||
|
just = game.xpath("just_checker")
|
||||||
|
print(
|
||||||
|
just.xpath("before_3").value, just.xpath("before_2").value, just.xpath("before_1").value,
|
||||||
|
just.xpath("just").value,
|
||||||
|
just.xpath("after_1").value, just.xpath("after_2").value, just.xpath("after_3").value
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
server.run("0.0.0.0", 5000, debug=True)
|
|
@ -0,0 +1,51 @@
|
||||||
|
from werkzeug.exceptions import HTTPException
|
||||||
|
|
||||||
|
|
||||||
|
class EAMHTTPException(HTTPException):
|
||||||
|
code = None
|
||||||
|
eam_description = None
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidUpstream(EAMHTTPException):
|
||||||
|
code = 400
|
||||||
|
eam_description = "Upstream URL invalid"
|
||||||
|
|
||||||
|
|
||||||
|
class UpstreamFailed(EAMHTTPException):
|
||||||
|
code = 400
|
||||||
|
eam_description = "Upstream request failed"
|
||||||
|
|
||||||
|
|
||||||
|
class UnknownCompression(EAMHTTPException):
|
||||||
|
code = 400
|
||||||
|
eam_description = "Unknown compression type"
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidPacket(EAMHTTPException):
|
||||||
|
code = 400
|
||||||
|
eam_description = "Invalid XML packet"
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidModel(EAMHTTPException):
|
||||||
|
code = 400
|
||||||
|
eam_description = "Invalid model"
|
||||||
|
|
||||||
|
|
||||||
|
class ModelMissmatch(EAMHTTPException):
|
||||||
|
code = 400
|
||||||
|
eam_description = "Model missmatched"
|
||||||
|
|
||||||
|
|
||||||
|
class ModuleMethodMissing(EAMHTTPException):
|
||||||
|
code = 400
|
||||||
|
eam_description = "Module or method missing"
|
||||||
|
|
||||||
|
|
||||||
|
class CallNodeMissing(EAMHTTPException):
|
||||||
|
code = 400
|
||||||
|
eam_description = "<call> node missing"
|
||||||
|
|
||||||
|
|
||||||
|
class NoMethodHandler(EAMHTTPException):
|
||||||
|
code = 404
|
||||||
|
eam_description = "No handler found for module/method"
|
|
@ -0,0 +1,201 @@
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
import eaapi
|
||||||
|
|
||||||
|
|
||||||
|
class Model:
|
||||||
|
def __init__(self, gamecode: str, dest: str, spec: str, rev: str, datecode: str):
|
||||||
|
self._gamecode = gamecode
|
||||||
|
self._dest = dest
|
||||||
|
self._spec = spec
|
||||||
|
self._rev = rev
|
||||||
|
self._datecode = datecode
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_model_str(cls, model: str) -> "Model":
|
||||||
|
return cls(*eaapi.parse_model(model))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def gamecode(self):
|
||||||
|
return self._gamecode
|
||||||
|
|
||||||
|
@property
|
||||||
|
def dest(self):
|
||||||
|
return self._dest
|
||||||
|
|
||||||
|
@property
|
||||||
|
def spec(self):
|
||||||
|
return self._spec
|
||||||
|
|
||||||
|
@property
|
||||||
|
def rev(self):
|
||||||
|
return self._rev
|
||||||
|
|
||||||
|
@property
|
||||||
|
def datecode(self):
|
||||||
|
return int(self._datecode)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def year(self):
|
||||||
|
return int(self._datecode[:4])
|
||||||
|
|
||||||
|
@property
|
||||||
|
def month(self):
|
||||||
|
return int(self._datecode[4:6])
|
||||||
|
|
||||||
|
@property
|
||||||
|
def day(self):
|
||||||
|
return int(self._datecode[6:8])
|
||||||
|
|
||||||
|
@property
|
||||||
|
def minor(self):
|
||||||
|
return int(self._datecode[8:10])
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return hash(str(self))
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if not isinstance(other, Model):
|
||||||
|
return False
|
||||||
|
return str(other) == str(self)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"{self.gamecode}:{self.dest}:{self.spec}:{self.rev}:{self.datecode}"
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"<Model {self} at 0x{id(self):016X}>"
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class DatecodeMatcher:
|
||||||
|
year: int | None = None
|
||||||
|
month: int | None = None
|
||||||
|
day: int | None = None
|
||||||
|
minor: int | None = None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_str(cls, datecode: str):
|
||||||
|
if len(datecode) != 10 or not datecode.isdigit():
|
||||||
|
raise ValueError("Not a valid datecode")
|
||||||
|
return cls(
|
||||||
|
int(datecode[0:4]),
|
||||||
|
int(datecode[4:6]),
|
||||||
|
int(datecode[6:8]),
|
||||||
|
int(datecode[8:10])
|
||||||
|
)
|
||||||
|
|
||||||
|
def _num_filters(self):
|
||||||
|
num = 0
|
||||||
|
if self.year is not None:
|
||||||
|
num += 1
|
||||||
|
if self.month is not None:
|
||||||
|
num += 1
|
||||||
|
if self.day is not None:
|
||||||
|
num += 1
|
||||||
|
if self.minor is not None:
|
||||||
|
num += 1
|
||||||
|
return num
|
||||||
|
|
||||||
|
def __lt__(self, other):
|
||||||
|
if self._num_filters() < other._num_filters():
|
||||||
|
return False
|
||||||
|
if self.minor is None and other.minor is not None:
|
||||||
|
return False
|
||||||
|
if self.day is None and other.day is not None:
|
||||||
|
return False
|
||||||
|
if self.month is None and other.month is not None:
|
||||||
|
return False
|
||||||
|
if self.year is None and other.year is not None:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return hash(str(self))
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
year = self.year if self.year is not None else "----"
|
||||||
|
month = self.month if self.month is not None else "--"
|
||||||
|
day = self.day if self.day is not None else "--"
|
||||||
|
minor = self.minor if self.minor is not None else "--"
|
||||||
|
return f"{year:04}{month:02}{day:02}{minor:02}"
|
||||||
|
|
||||||
|
def matches(self, model):
|
||||||
|
if self.year is not None and model.year != self.year:
|
||||||
|
return False
|
||||||
|
if self.month is not None and model.month != self.month:
|
||||||
|
return False
|
||||||
|
if self.day is not None and model.day != self.day:
|
||||||
|
return False
|
||||||
|
if self.minor is not None and model.minor != self.minor:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ModelMatcher:
|
||||||
|
gamecode: str | None = None
|
||||||
|
dest: str | None = None
|
||||||
|
spec: str | None = None
|
||||||
|
rev: str | None = None
|
||||||
|
datecode: list[DatecodeMatcher] | DatecodeMatcher | None = None
|
||||||
|
|
||||||
|
def _num_filters(self):
|
||||||
|
num = 0
|
||||||
|
if self.gamecode is not None:
|
||||||
|
num += 1
|
||||||
|
if self.dest is not None:
|
||||||
|
num += 1
|
||||||
|
if self.spec is not None:
|
||||||
|
num += 1
|
||||||
|
if self.rev is not None:
|
||||||
|
num += 1
|
||||||
|
if isinstance(self.datecode, list):
|
||||||
|
num += sum(i._num_filters() for i in self.datecode)
|
||||||
|
elif self.datecode is not None:
|
||||||
|
num += self.datecode._num_filters()
|
||||||
|
return num
|
||||||
|
|
||||||
|
def __lt__(self, other):
|
||||||
|
if self._num_filters() < other._num_filters():
|
||||||
|
return False
|
||||||
|
if self.datecode is None and other.datecode is not None:
|
||||||
|
return False
|
||||||
|
if self.rev is None and other.rev is not None:
|
||||||
|
return False
|
||||||
|
if self.spec is None and other.spec is not None:
|
||||||
|
return False
|
||||||
|
if self.dest is None and other.dest is not None:
|
||||||
|
return False
|
||||||
|
if self.gamecode is None and other.gamecode is not None:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return hash(str(self))
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
gamecode = self.gamecode if self.gamecode is not None else "---"
|
||||||
|
dest = self.dest if self.dest is not None else "-"
|
||||||
|
spec = self.spec if self.spec is not None else "-"
|
||||||
|
rev = self.rev if self.rev is not None else "-"
|
||||||
|
datecode = self.datecode if self.datecode is not None else "-" * 10
|
||||||
|
if isinstance(self.datecode, list):
|
||||||
|
datecode = "/".join(str(i) for i in self.datecode)
|
||||||
|
if not datecode:
|
||||||
|
datecode = "-" * 10
|
||||||
|
return f"{gamecode:3}:{dest}:{spec}:{rev}:{datecode}"
|
||||||
|
|
||||||
|
def matches(self, model):
|
||||||
|
if self.gamecode is not None and model.gamecode != self.gamecode:
|
||||||
|
return False
|
||||||
|
if self.dest is not None and model.dest != self.dest:
|
||||||
|
return False
|
||||||
|
if self.spec is not None and model.spec != self.spec:
|
||||||
|
return False
|
||||||
|
if self.rev is not None and model.rev != self.rev:
|
||||||
|
return False
|
||||||
|
if isinstance(self.datecode, list):
|
||||||
|
return any(i.matches(model) for i in self.datecode)
|
||||||
|
if self.datecode is not None:
|
||||||
|
return self.datecode.matches(model)
|
||||||
|
return True
|
|
@ -0,0 +1,397 @@
|
||||||
|
import traceback
|
||||||
|
import urllib.parse
|
||||||
|
import urllib
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
from typing import Callable
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
from werkzeug.exceptions import HTTPException, MethodNotAllowed
|
||||||
|
from werkzeug.wrappers import Request, Response
|
||||||
|
from werkzeug.routing import Map, Rule
|
||||||
|
|
||||||
|
import eaapi
|
||||||
|
|
||||||
|
from . import exceptions as exc
|
||||||
|
from .context import CallContext
|
||||||
|
from .model import Model
|
||||||
|
from .controller import IController, ServicesController
|
||||||
|
from .const import SERVICE_NTP, SERVICE_KEEPALIVE
|
||||||
|
|
||||||
|
|
||||||
|
Handler = Callable[[CallContext], None]
|
||||||
|
|
||||||
|
|
||||||
|
HEADER_ENCRYPTION = "X-Eamuse-Info"
|
||||||
|
HEADER_COMPRESSION = "X-Compress"
|
||||||
|
|
||||||
|
PINGABLE_IP = "127.0.0.1"
|
||||||
|
|
||||||
|
|
||||||
|
class NetworkState:
|
||||||
|
def __init__(self):
|
||||||
|
self._pa = PINGABLE_IP # TODO: what does this one mean?
|
||||||
|
self.router_ip = PINGABLE_IP
|
||||||
|
self.gateway_ip = PINGABLE_IP
|
||||||
|
self.center_ip = PINGABLE_IP
|
||||||
|
|
||||||
|
def format_ka(self, base):
|
||||||
|
return base + "?" + urllib.parse.urlencode({
|
||||||
|
"pa": self.pa,
|
||||||
|
"ia": self.ia,
|
||||||
|
"ga": self.ga,
|
||||||
|
"ma": self.ma,
|
||||||
|
"t1": self.t1,
|
||||||
|
"t2": self.t2,
|
||||||
|
})
|
||||||
|
|
||||||
|
@property
|
||||||
|
def pa(self) -> str:
|
||||||
|
return self._pa
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ia(self) -> str:
|
||||||
|
return self.router_ip
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ga(self) -> str:
|
||||||
|
return self.gateway_ip
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ma(self) -> str:
|
||||||
|
return self.center_ip
|
||||||
|
|
||||||
|
# TODO: Identify what these values are. Ping intervals?
|
||||||
|
@property
|
||||||
|
def t1(self):
|
||||||
|
return 2
|
||||||
|
|
||||||
|
@property
|
||||||
|
def t2(self):
|
||||||
|
return 10
|
||||||
|
|
||||||
|
|
||||||
|
class EAMServer:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
public_url: str,
|
||||||
|
prioritise_params: bool = False,
|
||||||
|
verbose_errors: bool = False,
|
||||||
|
services_mode: eaapi.const.ServicesMode = eaapi.const.ServicesMode.Operation,
|
||||||
|
ntp_server: str = "ntp://pool.ntp.org/",
|
||||||
|
keepalive_server: str | None = None,
|
||||||
|
no_keepalive_route: bool = False,
|
||||||
|
disable_routes: bool = False,
|
||||||
|
no_services_handler: bool = False,
|
||||||
|
):
|
||||||
|
self.network = NetworkState()
|
||||||
|
|
||||||
|
self.verbose_errors = verbose_errors
|
||||||
|
|
||||||
|
self._prioritise_params = prioritise_params
|
||||||
|
self._public_url = public_url
|
||||||
|
|
||||||
|
self.disable_routes = disable_routes
|
||||||
|
self._no_keepalive_route = no_keepalive_route
|
||||||
|
|
||||||
|
self.ntp = ntp_server
|
||||||
|
self.keepalive = keepalive_server or f"{public_url}/keepalive"
|
||||||
|
|
||||||
|
self._prng = eaapi.crypt.new_prng()
|
||||||
|
|
||||||
|
self._setup = []
|
||||||
|
self._pre_handlers_check = []
|
||||||
|
self._teardown = []
|
||||||
|
|
||||||
|
self._einfo_ctx: CallContext | None = None
|
||||||
|
self._einfo_controller: str | None = None
|
||||||
|
|
||||||
|
self.controllers: list[IController] = []
|
||||||
|
if not no_services_handler:
|
||||||
|
self.controllers.append(ServicesController(self, services_mode))
|
||||||
|
|
||||||
|
def on_setup(self, callback):
|
||||||
|
if callback not in self._setup:
|
||||||
|
self._setup.append(callback)
|
||||||
|
|
||||||
|
def on_pre_handlers_check(self, callback):
|
||||||
|
if callback not in self._pre_handlers_check:
|
||||||
|
self._pre_handlers_check.append(callback)
|
||||||
|
|
||||||
|
def on_teardown(self, callback):
|
||||||
|
if callback not in self._teardown:
|
||||||
|
self._teardown.append(callback)
|
||||||
|
|
||||||
|
def build_rules_map(self) -> Map:
|
||||||
|
if self.disable_routes:
|
||||||
|
return Map([])
|
||||||
|
|
||||||
|
rules = Map([], strict_slashes=False, merge_slashes=False)
|
||||||
|
|
||||||
|
prefixes = {"/"}
|
||||||
|
for i in self.controllers:
|
||||||
|
for prefix in i.serviced_prefixes():
|
||||||
|
prefix = self.expand_url(prefix)
|
||||||
|
if not prefix.startswith(self._public_url):
|
||||||
|
continue
|
||||||
|
prefix = prefix[len(self._public_url):]
|
||||||
|
if prefix == "":
|
||||||
|
prefix = "/"
|
||||||
|
|
||||||
|
prefixes.add(prefix)
|
||||||
|
|
||||||
|
for i in prefixes:
|
||||||
|
rules.add(Rule(f"{i}/<model>/<module>/<method>", endpoint="xrpc_request"))
|
||||||
|
# WSGI flattens the // at the start
|
||||||
|
if i == "/":
|
||||||
|
rules.add(Rule("/<model>/<module>/<method>", endpoint="xrpc_request"))
|
||||||
|
rules.add(Rule(f"{i}", endpoint="xrpc_request"))
|
||||||
|
|
||||||
|
if not self._no_keepalive_route:
|
||||||
|
rules.add(Rule("/keepalive", endpoint="keepalive_request"))
|
||||||
|
|
||||||
|
return rules
|
||||||
|
|
||||||
|
def expand_url(self, url: str) -> str:
|
||||||
|
return urllib.parse.urljoin(self._public_url, url)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def public_url(self) -> str:
|
||||||
|
return self._public_url
|
||||||
|
|
||||||
|
def get_service_routes(self, ctx: CallContext | None) -> dict[str, str]:
|
||||||
|
services: dict[str, str] = defaultdict(lambda: self.public_url)
|
||||||
|
services[SERVICE_NTP] = self.ntp
|
||||||
|
services[SERVICE_KEEPALIVE] = self.network.format_ka(self.keepalive)
|
||||||
|
|
||||||
|
for i in self.controllers:
|
||||||
|
services.update(i.get_service_routes(ctx))
|
||||||
|
return services
|
||||||
|
|
||||||
|
def _decode_request(self, request: Request) -> CallContext:
|
||||||
|
ea_info = request.headers.get(HEADER_ENCRYPTION)
|
||||||
|
compression = request.headers.get(HEADER_COMPRESSION)
|
||||||
|
|
||||||
|
compressed = False
|
||||||
|
if compression == eaapi.Compression.Lz77.value:
|
||||||
|
compressed = True
|
||||||
|
elif compression != eaapi.Compression.None_.value:
|
||||||
|
raise exc.UnknownCompression
|
||||||
|
|
||||||
|
payload = eaapi.unwrap(request.data, ea_info, compressed)
|
||||||
|
decoder = eaapi.Decoder(payload)
|
||||||
|
try:
|
||||||
|
call = decoder.unpack()
|
||||||
|
except eaapi.EAAPIException:
|
||||||
|
raise exc.InvalidPacket
|
||||||
|
|
||||||
|
return CallContext(request, decoder, call, ea_info, compressed)
|
||||||
|
|
||||||
|
def _encode_response(self, ctx: CallContext) -> Response:
|
||||||
|
if ctx._eainfo is None:
|
||||||
|
ea_info = None
|
||||||
|
else:
|
||||||
|
ea_info = eaapi.crypt.get_key(self._prng)
|
||||||
|
|
||||||
|
encoded = eaapi.Encoder.encode(ctx.resp, ctx.was_xml_string)
|
||||||
|
wrapped = eaapi.wrap(encoded, ea_info, ctx.was_compressed)
|
||||||
|
response = Response(wrapped, 200)
|
||||||
|
if ea_info:
|
||||||
|
response.headers[HEADER_ENCRYPTION] = ea_info
|
||||||
|
response.headers[HEADER_COMPRESSION] = (
|
||||||
|
eaapi.Compression.Lz77 if ctx.was_compressed
|
||||||
|
else eaapi.Compression.None_
|
||||||
|
).value
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
def _create_ctx(
|
||||||
|
self,
|
||||||
|
url_slash: bool,
|
||||||
|
request: Request,
|
||||||
|
model: Model | None,
|
||||||
|
module: str,
|
||||||
|
method: str
|
||||||
|
) -> CallContext:
|
||||||
|
ctx = self._decode_request(request)
|
||||||
|
ctx._module = module
|
||||||
|
ctx._method = method
|
||||||
|
ctx._url_slash = url_slash
|
||||||
|
self._einfo_ctx = ctx
|
||||||
|
|
||||||
|
if ctx.model != model:
|
||||||
|
raise exc.ModelMissmatch
|
||||||
|
return ctx
|
||||||
|
|
||||||
|
def _handle_request(self, ctx: CallContext) -> Response:
|
||||||
|
for controller in self.controllers:
|
||||||
|
if (handler := controller.get_handler(ctx)) is not None:
|
||||||
|
self._einfo_controller = (
|
||||||
|
f"{controller._name}"
|
||||||
|
)
|
||||||
|
|
||||||
|
handler(ctx)
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
raise exc.NoMethodHandler
|
||||||
|
|
||||||
|
return self._encode_response(ctx)
|
||||||
|
|
||||||
|
def on_xrpc_other(
|
||||||
|
self,
|
||||||
|
request: Request,
|
||||||
|
service: str | None = None,
|
||||||
|
model: str | None = None,
|
||||||
|
module: str | None = None,
|
||||||
|
method: str | None = None
|
||||||
|
):
|
||||||
|
if request.method != "GET" or not self.verbose_errors:
|
||||||
|
raise MethodNotAllowed
|
||||||
|
|
||||||
|
return Response(
|
||||||
|
f"XRPC running. model {model}, call {module}.{method} ({service})"
|
||||||
|
)
|
||||||
|
|
||||||
|
def keepalive_request(self) -> Response:
|
||||||
|
return Response(None)
|
||||||
|
|
||||||
|
def parse_request(
|
||||||
|
self,
|
||||||
|
request: Request,
|
||||||
|
service: str | None = None,
|
||||||
|
model: str | None = None,
|
||||||
|
module: str | None = None,
|
||||||
|
method: str | None = None
|
||||||
|
):
|
||||||
|
url_slash = bool(module and module and method)
|
||||||
|
model_param = request.args.get("model", None)
|
||||||
|
module_param = request.args.get("module", None)
|
||||||
|
method_param = request.args.get("method", None)
|
||||||
|
if "f" in request.args:
|
||||||
|
module_param, _, method_param = request.args.get("f", "").partition(".")
|
||||||
|
|
||||||
|
if self._prioritise_params:
|
||||||
|
model = model_param or model
|
||||||
|
module = module_param or module
|
||||||
|
method = method_param or method
|
||||||
|
else:
|
||||||
|
model = model or model_param
|
||||||
|
module = module or module_param
|
||||||
|
method = method or method_param
|
||||||
|
|
||||||
|
if module is None or method is None:
|
||||||
|
raise exc.ModuleMethodMissing
|
||||||
|
|
||||||
|
if model is None:
|
||||||
|
model_obj = None
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
model_obj = Model.from_model_str(model)
|
||||||
|
except eaapi.exception.InvalidModel:
|
||||||
|
raise exc.InvalidModel
|
||||||
|
|
||||||
|
return url_slash, service, model_obj, module, method
|
||||||
|
|
||||||
|
def on_xrpc_request(
|
||||||
|
self,
|
||||||
|
request: Request,
|
||||||
|
service: str | None = None,
|
||||||
|
model: str | None = None,
|
||||||
|
module: str | None = None,
|
||||||
|
method: str | None = None
|
||||||
|
):
|
||||||
|
url_slash, service, model_obj, module, method = self.parse_request(request, service, model, module, method)
|
||||||
|
|
||||||
|
if request.method != "POST":
|
||||||
|
return self.on_xrpc_other(request, service, model, module, method)
|
||||||
|
|
||||||
|
ctx = self._create_ctx(url_slash, request, model_obj, module, method)
|
||||||
|
for i in self._pre_handlers_check:
|
||||||
|
i(ctx)
|
||||||
|
return self._handle_request(ctx)
|
||||||
|
|
||||||
|
def _make_error(self, status: int | None = None, message: str | None = None) -> Response:
|
||||||
|
response = eaapi.XMLNode.void("response")
|
||||||
|
if status is not None:
|
||||||
|
response["status"] = str(status)
|
||||||
|
|
||||||
|
if self.verbose_errors:
|
||||||
|
if message:
|
||||||
|
response.append("details", eaapi.Type.Str, message)
|
||||||
|
|
||||||
|
context = response.append("context")
|
||||||
|
if self._einfo_ctx is not None:
|
||||||
|
context.append("module", eaapi.Type.Str, self._einfo_ctx.module)
|
||||||
|
context.append("method", eaapi.Type.Str, self._einfo_ctx.method)
|
||||||
|
context.append("game", eaapi.Type.Str, str(self._einfo_ctx.model))
|
||||||
|
if self._einfo_controller is not None:
|
||||||
|
context.append("controller", eaapi.Type.Str, self._einfo_controller)
|
||||||
|
|
||||||
|
encoded = eaapi.Encoder.encode(response, False)
|
||||||
|
wrapped = eaapi.wrap(encoded, None, False)
|
||||||
|
response = Response(wrapped, status or 500)
|
||||||
|
response.headers[HEADER_COMPRESSION] = eaapi.Compression.None_.value
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
def _eamhttp_error(self, exc: exc.EAMHTTPException) -> Response:
|
||||||
|
return self._make_error(exc.code, exc.eam_description)
|
||||||
|
|
||||||
|
def _structure_error(self, e: eaapi.exception.XMLStrutureError) -> Response:
|
||||||
|
summary = traceback.extract_tb(e.__traceback__)
|
||||||
|
for frame_summary in summary:
|
||||||
|
filename = frame_summary.filename
|
||||||
|
frame_summary.filename = os.path.relpath(filename)
|
||||||
|
|
||||||
|
# The first three entries are within the controller, and the last one is us
|
||||||
|
summary = summary[3:-1]
|
||||||
|
tb = "".join(traceback.format_list(traceback.StackSummary.from_list(summary)))
|
||||||
|
tb += f"{e.__module__}.{e.__class__.__name__}"
|
||||||
|
|
||||||
|
return self._make_error(400, tb)
|
||||||
|
|
||||||
|
def _generic_error(self, exc: Exception) -> Response:
|
||||||
|
return self._make_error(500, str(exc))
|
||||||
|
|
||||||
|
def dispatch_request(self, request):
|
||||||
|
self._einfo_ctx = None
|
||||||
|
self._einfo_controller = None
|
||||||
|
|
||||||
|
adapter = self.build_rules_map().bind_to_environ(request.environ)
|
||||||
|
try:
|
||||||
|
endpoint, values = adapter.match()
|
||||||
|
return getattr(self, f"on_{endpoint}")(request, **values)
|
||||||
|
except exc.EAMHTTPException as e:
|
||||||
|
return self._eamhttp_error(e)
|
||||||
|
except HTTPException as e:
|
||||||
|
return e
|
||||||
|
except eaapi.exception.XMLStrutureError as e:
|
||||||
|
traceback.print_exc(file=sys.stderr)
|
||||||
|
return self._structure_error(e)
|
||||||
|
except Exception as e:
|
||||||
|
traceback.print_exc(file=sys.stderr)
|
||||||
|
return self._generic_error(e)
|
||||||
|
|
||||||
|
def wsgi_app(self, environ, start_response):
|
||||||
|
request = Request(environ)
|
||||||
|
response = self.dispatch_request(request)
|
||||||
|
return response(environ, start_response)
|
||||||
|
|
||||||
|
def __call__(self, environ, start_response):
|
||||||
|
for i in self._setup:
|
||||||
|
i()
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = self.wsgi_app(environ, start_response)
|
||||||
|
for i in self._teardown:
|
||||||
|
i(None)
|
||||||
|
return response
|
||||||
|
except Exception as e:
|
||||||
|
for i in self._teardown:
|
||||||
|
i(e)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
def run(self, host="127.0.0.1", port=5000, debug=False):
|
||||||
|
from werkzeug.serving import run_simple
|
||||||
|
run_simple(host, port, self, use_debugger=debug, use_reloader=debug)
|
|
@ -1,31 +1,31 @@
|
||||||
from .crypt import ea_symmetric_crypt
|
from .crypt import ea_symmetric_crypt
|
||||||
from .lz77 import lz77_compress, lz77_decompress
|
from .lz77 import lz77_compress, lz77_decompress
|
||||||
|
|
||||||
|
|
||||||
def wrap(packet, info=None, compressed=True):
|
def wrap(packet: bytes, info: str | None = None, compressed: bool = True) -> bytes:
|
||||||
if compressed:
|
if compressed:
|
||||||
packet = lz77_compress(packet)
|
packet = lz77_compress(packet)
|
||||||
if info is None:
|
if info is None:
|
||||||
return packet
|
return packet
|
||||||
return ea_symmetric_crypt(packet, info)
|
return ea_symmetric_crypt(packet, info)
|
||||||
|
|
||||||
|
|
||||||
def unwrap(packet, info=None, compressed=True):
|
def unwrap(packet: bytes, info: str | None = None, compressed: bool = True) -> bytes:
|
||||||
if info is None:
|
if info is None:
|
||||||
decrypted = packet
|
decrypted = packet
|
||||||
else:
|
else:
|
||||||
decrypted = ea_symmetric_crypt(packet, info)
|
decrypted = ea_symmetric_crypt(packet, info)
|
||||||
|
|
||||||
if compressed is None:
|
if compressed is None:
|
||||||
try:
|
try:
|
||||||
decompressed = lz77_decompress(decrypted)
|
decompressed = lz77_decompress(decrypted)
|
||||||
except IndexError:
|
except IndexError:
|
||||||
return decrypted
|
return decrypted
|
||||||
if decompressed == b"\0\0\0\0\0\0":
|
if decompressed == b"\0\0\0\0\0\0":
|
||||||
# Decompression almost certainly failed
|
# Decompression almost certainly failed
|
||||||
return decrypted
|
return decrypted
|
||||||
return decompressed
|
return decompressed
|
||||||
return lz77_decompress(decrypted) if compressed else decrypted
|
return lz77_decompress(decrypted) if compressed else decrypted
|
||||||
|
|
||||||
|
|
||||||
__all__ = ("wrap", "unwrap")
|
__all__ = ("wrap", "unwrap")
|
||||||
|
|
Loading…
Reference in New Issue