From 966e7691a3276872e6ff084dda1869e63050573d Mon Sep 17 00:00:00 2001 From: purr <204539943+purrfume@users.noreply.github.com> Date: Fri, 4 Apr 2025 21:30:31 +0900 Subject: [PATCH] Add files via upload --- CODEOWNERS | 1 + Dockerfile | 22 + Makefile | 47 + README_CN.md | 23 + README_DE.MD | 14 + app/__init__.py | 13 + app/_typing.py | 27 + app/adapters/__init__.py | 0 app/adapters/database.py | 164 ++ app/api/__init__.py | 16 + app/api/domains/__init__.py | 5 + app/api/domains/cho.py | 2227 ++++++++++++++++++++++ app/api/domains/map.py | 22 + app/api/domains/osu.py | 1786 +++++++++++++++++ app/api/init_api.py | 196 ++ app/api/middlewares.py | 37 + app/api/v1/__init__.py | 10 + app/api/v1/api.py | 1080 +++++++++++ app/api/v2/__init__.py | 15 + app/api/v2/clans.py | 50 + app/api/v2/common/json.py | 29 + app/api/v2/common/responses.py | 47 + app/api/v2/maps.py | 76 + app/api/v2/models/__init__.py | 18 + app/api/v2/models/clans.py | 18 + app/api/v2/models/maps.py | 36 + app/api/v2/models/players.py | 60 + app/api/v2/models/scores.py | 36 + app/api/v2/players.py | 137 ++ app/api/v2/scores.py | 67 + app/bg_loops.py | 89 + app/commands.py | 2533 +++++++++++++++++++++++++ app/constants/__init__.py | 8 + app/constants/clientflags.py | 68 + app/constants/gamemodes.py | 75 + app/constants/mods.py | 296 +++ app/constants/privileges.py | 61 + app/constants/regexes.py | 23 + app/discord.py | 173 ++ app/encryption.py | 59 + app/logging.py | 102 + app/objects/__init__.py | 11 + app/objects/achievement.py | 29 + app/objects/beatmap.py | 996 ++++++++++ app/objects/channel.py | 138 ++ app/objects/collections.py | 314 +++ app/objects/match.py | 552 ++++++ app/objects/models.py | 8 + app/objects/player.py | 1017 ++++++++++ app/objects/score.py | 453 +++++ app/packets.py | 1289 +++++++++++++ app/repositories/__init__.py | 15 + app/repositories/achievements.py | 173 ++ app/repositories/channels.py | 184 ++ app/repositories/clans.py | 156 ++ app/repositories/client_hashes.py | 133 ++ app/repositories/comments.py | 125 ++ app/repositories/favourites.py | 75 + app/repositories/ingame_logins.py | 128 ++ app/repositories/logs.py | 70 + app/repositories/mail.py | 113 ++ app/repositories/map_requests.py | 97 + app/repositories/maps.py | 370 ++++ app/repositories/ratings.py | 85 + app/repositories/scores.py | 246 +++ app/repositories/stats.py | 237 +++ app/repositories/tourney_pool_maps.py | 137 ++ app/repositories/tourney_pools.py | 104 + app/repositories/user_achievements.py | 79 + app/repositories/users.py | 270 +++ app/settings.py | 73 + app/settings_utils.py | 48 + app/state/__init__.py | 24 + app/state/cache.py | 14 + app/state/services.py | 492 +++++ app/state/sessions.py | 54 + app/timer.py | 27 + app/usecases/__init__.py | 0 app/usecases/achievements.py | 30 + app/usecases/performance.py | 138 ++ app/usecases/user_achievements.py | 27 + app/utils.py | 254 +++ docker-compose.test.yml | 99 + docker-compose.yml | 92 + ext/Caddyfile | 30 + ext/nginx.conf.example | 54 + logging.yaml.example | 36 + main.py | 31 + poetry.lock | 1953 +++++++++++++++++++ pyproject.toml | 92 + 90 files changed, 20938 insertions(+) create mode 100644 CODEOWNERS create mode 100644 Dockerfile create mode 100644 Makefile create mode 100644 README_CN.md create mode 100644 README_DE.MD create mode 100644 app/__init__.py create mode 100644 app/_typing.py create mode 100644 app/adapters/__init__.py create mode 100644 app/adapters/database.py create mode 100644 app/api/__init__.py create mode 100644 app/api/domains/__init__.py create mode 100644 app/api/domains/cho.py create mode 100644 app/api/domains/map.py create mode 100644 app/api/domains/osu.py create mode 100644 app/api/init_api.py create mode 100644 app/api/middlewares.py create mode 100644 app/api/v1/__init__.py create mode 100644 app/api/v1/api.py create mode 100644 app/api/v2/__init__.py create mode 100644 app/api/v2/clans.py create mode 100644 app/api/v2/common/json.py create mode 100644 app/api/v2/common/responses.py create mode 100644 app/api/v2/maps.py create mode 100644 app/api/v2/models/__init__.py create mode 100644 app/api/v2/models/clans.py create mode 100644 app/api/v2/models/maps.py create mode 100644 app/api/v2/models/players.py create mode 100644 app/api/v2/models/scores.py create mode 100644 app/api/v2/players.py create mode 100644 app/api/v2/scores.py create mode 100644 app/bg_loops.py create mode 100644 app/commands.py create mode 100644 app/constants/__init__.py create mode 100644 app/constants/clientflags.py create mode 100644 app/constants/gamemodes.py create mode 100644 app/constants/mods.py create mode 100644 app/constants/privileges.py create mode 100644 app/constants/regexes.py create mode 100644 app/discord.py create mode 100644 app/encryption.py create mode 100644 app/logging.py create mode 100644 app/objects/__init__.py create mode 100644 app/objects/achievement.py create mode 100644 app/objects/beatmap.py create mode 100644 app/objects/channel.py create mode 100644 app/objects/collections.py create mode 100644 app/objects/match.py create mode 100644 app/objects/models.py create mode 100644 app/objects/player.py create mode 100644 app/objects/score.py create mode 100644 app/packets.py create mode 100644 app/repositories/__init__.py create mode 100644 app/repositories/achievements.py create mode 100644 app/repositories/channels.py create mode 100644 app/repositories/clans.py create mode 100644 app/repositories/client_hashes.py create mode 100644 app/repositories/comments.py create mode 100644 app/repositories/favourites.py create mode 100644 app/repositories/ingame_logins.py create mode 100644 app/repositories/logs.py create mode 100644 app/repositories/mail.py create mode 100644 app/repositories/map_requests.py create mode 100644 app/repositories/maps.py create mode 100644 app/repositories/ratings.py create mode 100644 app/repositories/scores.py create mode 100644 app/repositories/stats.py create mode 100644 app/repositories/tourney_pool_maps.py create mode 100644 app/repositories/tourney_pools.py create mode 100644 app/repositories/user_achievements.py create mode 100644 app/repositories/users.py create mode 100644 app/settings.py create mode 100644 app/settings_utils.py create mode 100644 app/state/__init__.py create mode 100644 app/state/cache.py create mode 100644 app/state/services.py create mode 100644 app/state/sessions.py create mode 100644 app/timer.py create mode 100644 app/usecases/__init__.py create mode 100644 app/usecases/achievements.py create mode 100644 app/usecases/performance.py create mode 100644 app/usecases/user_achievements.py create mode 100644 app/utils.py create mode 100644 docker-compose.test.yml create mode 100644 docker-compose.yml create mode 100644 ext/Caddyfile create mode 100644 ext/nginx.conf.example create mode 100644 logging.yaml.example create mode 100644 main.py create mode 100644 poetry.lock create mode 100644 pyproject.toml diff --git a/CODEOWNERS b/CODEOWNERS new file mode 100644 index 0000000..68edc49 --- /dev/null +++ b/CODEOWNERS @@ -0,0 +1 @@ +* @cmyui @kingdom5500 @NiceAesth @tsunyoku @7mochi diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..9cf758a --- /dev/null +++ b/Dockerfile @@ -0,0 +1,22 @@ +FROM python:3.11-slim + +ENV PYTHONUNBUFFERED=1 + +WORKDIR /srv/root + +RUN apt update && apt install --no-install-recommends -y \ + git curl build-essential=12.9 \ + && rm -rf /var/lib/apt/lists/* + +COPY pyproject.toml poetry.lock ./ +RUN pip install -U pip poetry +RUN poetry config virtualenvs.create false +RUN poetry install --no-root + +RUN apt update && \ + apt install -y default-mysql-client redis-tools + +# NOTE: done last to avoid re-run of previous steps +COPY . . + +ENTRYPOINT [ "scripts/start_server.sh" ] diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..ac5ef40 --- /dev/null +++ b/Makefile @@ -0,0 +1,47 @@ +#!/usr/bin/env make + +build: + if [ -d ".dbdata" ]; then sudo chmod -R 755 .dbdata; fi + docker build -t bancho:latest . + +run: + docker compose up bancho mysql redis + +run-bg: + docker compose up -d bancho mysql redis + +run-caddy: + caddy run --envfile .env --config ext/Caddyfile + +last?=1 +logs: + docker compose logs -f bancho mysql redis --tail ${last} + +shell: + poetry shell + +test: + docker compose -f docker-compose.test.yml up -d bancho-test mysql-test redis-test + docker compose -f docker-compose.test.yml exec -T bancho-test /srv/root/scripts/run-tests.sh + +lint: + poetry run pre-commit run --all-files + +type-check: + poetry run mypy . + +install: + POETRY_VIRTUALENVS_IN_PROJECT=1 poetry install --no-root + +install-dev: + POETRY_VIRTUALENVS_IN_PROJECT=1 poetry install --no-root --with dev + poetry run pre-commit install + +uninstall: + poetry env remove python + +# To bump the version number run `make bump version=` +# (DO NOT USE IF YOU DON'T KNOW WHAT YOU'RE DOING) +# https://python-poetry.org/docs/cli/#version +bump: + poetry version $(version) diff --git a/README_CN.md b/README_CN.md new file mode 100644 index 0000000..76d0948 --- /dev/null +++ b/README_CN.md @@ -0,0 +1,23 @@ +# bancho.py - 中文文档 + +[![Python 3.11+](https://img.shields.io/badge/python-3.11+-blue.svg)](https://www.python.org/downloads/) +[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/ambv/black) +[![pre-commit.ci status](https://results.pre-commit.ci/badge/github/osuAkatsuki/bancho.py/master.svg)](https://results.pre-commit.ci/latest/github/osuAkatsuki/bancho.py/master) +[![Discord](https://discordapp.com/api/guilds/748687781605408908/widget.png?style=shield)](https://discord.gg/ShEQgUx) + +The English version: [[English]](https://github.com/osuAkatsuki/bancho.py/blob/master/README.md) + +这是中文翻译哦~由 [hedgehog-qd](https://github.com/hedgehog-qd) 在根据原英语文档部署成功后翻译的。这里 +我根据我当时遇到的问题补充了一些提示,如有错误请指正,谢谢! + +bancho.py 是一个还在被不断维护的osu!后端项目,不论你的水平如何,都 +可以去使用他来开一个自己的osu!私服! + +这个项目最初是由 [Akatsuki](https://akatsuki.pw/) 团队开发的,我们的目标是创建一个非常容易 +维护并且功能很丰富的osu!私服的服务端! + +注意:bancho.py是一个后端!当你跟着下面的步骤部署完成后你可以正常登录 +并游玩。这个项目自带api,但是没有前端(就是网页),前端的话你也可以去看 +他们团队开发的前端项目。 +api文档(英语): +前端(guweb): diff --git a/README_DE.MD b/README_DE.MD new file mode 100644 index 0000000..3c24f9c --- /dev/null +++ b/README_DE.MD @@ -0,0 +1,14 @@ +# bancho.py + +[![Python 3.11+](https://img.shields.io/badge/python-3.11+-blue.svg)](https://www.python.org/downloads/) +[![Code Stil: schwarz](https://img.shields.io/badge/Code%20Stil-Schwarz-black)](https://github.com/ambv/black) +[![pre-commit.ci Status](https://results.pre-commit.ci/badge/github/osuAkatsuki/bancho.py/master.svg)](https://results.pre-commit.ci/latest/github/osuAkatsuki/bancho.py/master) +[![Discord](https://discordapp.com/api/guilds/748687781605408908/widget.png?style=shield)](https://discord.gg/ShEQgUx) + +bancho.py ist eine in Arbeit befindliche osu!-Server-Implementierung für +Entwickler aller Erfahrungsstufen, die daran interessiert sind, ihre eigene(n) +private(n) osu-Server-Instanz(en) zu hosten + +Das Projekt wird hauptsächlich vom [Akatsuki](https://akatsuki.pw/)-Team entwickelt, +und unser Ziel ist es, die am einfachsten zu wartende, zuverlässigste und +funktionsreichste osu!-Server-Implementierung auf dem Markt zu schaffen. diff --git a/app/__init__.py b/app/__init__.py new file mode 100644 index 0000000..05dc03d --- /dev/null +++ b/app/__init__.py @@ -0,0 +1,13 @@ +# type: ignore +# isort: dont-add-imports + +from . import api +from . import bg_loops +from . import commands +from . import constants +from . import discord +from . import logging +from . import objects +from . import packets +from . import state +from . import utils diff --git a/app/_typing.py b/app/_typing.py new file mode 100644 index 0000000..f6ccd74 --- /dev/null +++ b/app/_typing.py @@ -0,0 +1,27 @@ +from __future__ import annotations + +from ipaddress import IPv4Address +from ipaddress import IPv6Address +from typing import Any +from typing import TypeVar + +T = TypeVar("T") + +IPAddress = IPv4Address | IPv6Address + + +class _UnsetSentinel: + def __repr__(self) -> str: + return "Unset" + + def __copy__(self: T) -> T: + return self + + def __reduce__(self) -> str: + return "Unset" + + def __deepcopy__(self: T, _: Any) -> T: + return self + + +UNSET = _UnsetSentinel() diff --git a/app/adapters/__init__.py b/app/adapters/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/adapters/database.py b/app/adapters/database.py new file mode 100644 index 0000000..abec747 --- /dev/null +++ b/app/adapters/database.py @@ -0,0 +1,164 @@ +from __future__ import annotations + +from typing import Any +from typing import cast + +from databases import Database as _Database +from databases.core import Transaction +from sqlalchemy.dialects.mysql.mysqldb import MySQLDialect_mysqldb +from sqlalchemy.sql.compiler import Compiled +from sqlalchemy.sql.expression import ClauseElement + +from app import settings +from app.logging import log +from app.timer import Timer + + +class MySQLDialect(MySQLDialect_mysqldb): + default_paramstyle = "named" + + +DIALECT = MySQLDialect() + +MySQLRow = dict[str, Any] +MySQLParams = dict[str, Any] | None +MySQLQuery = ClauseElement | str + + +class Database: + def __init__(self, url: str) -> None: + self._database = _Database(url) + + async def connect(self) -> None: + await self._database.connect() + + async def disconnect(self) -> None: + await self._database.disconnect() + + def _compile(self, clause_element: ClauseElement) -> tuple[str, MySQLParams]: + compiled: Compiled = clause_element.compile( + dialect=DIALECT, + compile_kwargs={"render_postcompile": True}, + ) + return str(compiled), compiled.params + + async def fetch_one( + self, + query: MySQLQuery, + params: MySQLParams = None, + ) -> MySQLRow | None: + if isinstance(query, ClauseElement): + query, params = self._compile(query) + + with Timer() as timer: + row = await self._database.fetch_one(query, params) + + if settings.DEBUG: + time_elapsed = timer.elapsed() + log( + f"Executed SQL query: {query} {params} in {time_elapsed * 1000:.2f} msec.", + extra={ + "query": query, + "params": params, + "time_elapsed": time_elapsed, + }, + ) + + return dict(row._mapping) if row is not None else None + + async def fetch_all( + self, + query: MySQLQuery, + params: MySQLParams = None, + ) -> list[MySQLRow]: + if isinstance(query, ClauseElement): + query, params = self._compile(query) + + with Timer() as timer: + rows = await self._database.fetch_all(query, params) + + if settings.DEBUG: + time_elapsed = timer.elapsed() + log( + f"Executed SQL query: {query} {params} in {time_elapsed * 1000:.2f} msec.", + extra={ + "query": query, + "params": params, + "time_elapsed": time_elapsed, + }, + ) + + return [dict(row._mapping) for row in rows] + + async def fetch_val( + self, + query: MySQLQuery, + params: MySQLParams = None, + column: Any = 0, + ) -> Any: + if isinstance(query, ClauseElement): + query, params = self._compile(query) + + with Timer() as timer: + val = await self._database.fetch_val(query, params, column) + + if settings.DEBUG: + time_elapsed = timer.elapsed() + log( + f"Executed SQL query: {query} {params} in {time_elapsed * 1000:.2f} msec.", + extra={ + "query": query, + "params": params, + "time_elapsed": time_elapsed, + }, + ) + + return val + + async def execute(self, query: MySQLQuery, params: MySQLParams = None) -> int: + if isinstance(query, ClauseElement): + query, params = self._compile(query) + + with Timer() as timer: + rec_id = await self._database.execute(query, params) + + if settings.DEBUG: + time_elapsed = timer.elapsed() + log( + f"Executed SQL query: {query} {params} in {time_elapsed * 1000:.2f} msec.", + extra={ + "query": query, + "params": params, + "time_elapsed": time_elapsed, + }, + ) + + return cast(int, rec_id) + + # NOTE: this accepts str since current execute_many uses are not using alchemy. + # alchemy does execute_many in a single query so this method will be unneeded once raw SQL is not in use. + async def execute_many(self, query: str, params: list[MySQLParams]) -> None: + if isinstance(query, ClauseElement): + query, _ = self._compile(query) + + with Timer() as timer: + await self._database.execute_many(query, params) + + if settings.DEBUG: + time_elapsed = timer.elapsed() + log( + f"Executed SQL query: {query} {params} in {time_elapsed * 1000:.2f} msec.", + extra={ + "query": query, + "params": params, + "time_elapsed": time_elapsed, + }, + ) + + def transaction( + self, + *, + force_rollback: bool = False, + **kwargs: Any, + ) -> Transaction: + return self._database.transaction(force_rollback=force_rollback, **kwargs) diff --git a/app/api/__init__.py b/app/api/__init__.py new file mode 100644 index 0000000..bbdf5d3 --- /dev/null +++ b/app/api/__init__.py @@ -0,0 +1,16 @@ +# type: ignore +# isort: dont-add-imports + +from fastapi import APIRouter + +from .v1 import apiv1_router +from .v2 import apiv2_router + +api_router = APIRouter() + +api_router.include_router(apiv1_router) +api_router.include_router(apiv2_router) + +from . import domains +from . import init_api +from . import middlewares diff --git a/app/api/domains/__init__.py b/app/api/domains/__init__.py new file mode 100644 index 0000000..dc62e05 --- /dev/null +++ b/app/api/domains/__init__.py @@ -0,0 +1,5 @@ +# isort: dont-add-imports + +from . import cho +from . import map +from . import osu diff --git a/app/api/domains/cho.py b/app/api/domains/cho.py new file mode 100644 index 0000000..59854a1 --- /dev/null +++ b/app/api/domains/cho.py @@ -0,0 +1,2227 @@ +"""cho: handle cho packets from the osu! client""" + +from __future__ import annotations + +import asyncio +import hashlib +import logging +import re +import struct +import time +from collections.abc import Callable +from collections.abc import Mapping +from datetime import date +from datetime import datetime +from pathlib import Path +from typing import Literal +from typing import TypedDict +from zoneinfo import ZoneInfo + +import bcrypt +import databases.core +from fastapi import APIRouter +from fastapi import Response +from fastapi.param_functions import Header +from fastapi.requests import Request +from fastapi.responses import HTMLResponse + +import app.packets +import app.settings +import app.state +import app.usecases.performance +import app.utils +from app import commands +from app._typing import IPAddress +from app.constants import regexes +from app.constants.gamemodes import GameMode +from app.constants.mods import SPEED_CHANGING_MODS +from app.constants.mods import Mods +from app.constants.privileges import ClanPrivileges +from app.constants.privileges import ClientPrivileges +from app.constants.privileges import Privileges +from app.logging import Ansi +from app.logging import get_timestamp +from app.logging import log +from app.logging import magnitude_fmt_time +from app.objects.beatmap import Beatmap +from app.objects.beatmap import ensure_osu_file_is_available +from app.objects.channel import Channel +from app.objects.match import MAX_MATCH_NAME_LENGTH +from app.objects.match import Match +from app.objects.match import MatchTeams +from app.objects.match import MatchTeamTypes +from app.objects.match import MatchWinConditions +from app.objects.match import Slot +from app.objects.match import SlotStatus +from app.objects.player import Action +from app.objects.player import ClientDetails +from app.objects.player import OsuStream +from app.objects.player import OsuVersion +from app.objects.player import Player +from app.objects.player import PresenceFilter +from app.packets import BanchoPacketReader +from app.packets import BasePacket +from app.packets import ClientPackets +from app.packets import LoginFailureReason +from app.repositories import client_hashes as client_hashes_repo +from app.repositories import ingame_logins as logins_repo +from app.repositories import mail as mail_repo +from app.repositories import users as users_repo +from app.state import services +from app.usecases.performance import ScoreParams + +OSU_API_V2_CHANGELOG_URL = "https://osu.ppy.sh/api/v2/changelog" + +BEATMAPS_PATH = Path.cwd() / ".data/osu" +DISK_CHAT_LOG_FILE = ".data/logs/chat.log" + +BASE_DOMAIN = app.settings.DOMAIN + +# TODO: dear god +NOW_PLAYING_RGX = re.compile( + r"^\x01ACTION is (?:playing|editing|watching|listening to) " + rf"\[https://osu\.(?:{re.escape(BASE_DOMAIN)}|ppy\.sh)/beatmapsets/(?P\d{{1,10}})#/?(?:osu|taiko|fruits|mania)?/(?P\d{{1,10}})/? .+\]" + r"(?: <(?PTaiko|CatchTheBeat|osu!mania)>)?" + r"(?P(?: (?:-|\+|~|\|)\w+(?:~|\|)?)+)?\x01$", +) + +FIRST_USER_ID = 3 + +router = APIRouter(tags=["Bancho API"]) + + +@router.get("/") +async def bancho_http_handler() -> Response: + """Handle a request from a web browser.""" + new_line = "\n" + matches = [m for m in app.state.sessions.matches if m is not None] + players = [p for p in app.state.sessions.players if not p.is_bot_client] + + packets = app.state.packets["all"] + + return HTMLResponse( + f""" + +Running bancho.py v{app.settings.VERSION} + +{len(players)} online players +{len(matches)} matches + +packets handled ({len(packets)}) +{new_line.join([f"{packet.name} ({packet.value})" for packet in packets])} + +Source code + +""", + ) + + +@router.get("/online") +async def bancho_view_online_users() -> Response: + """see who's online""" + new_line = "\n" + + players: list[Player] = [] + bots: list[Player] = [] + for p in app.state.sessions.players: + if p.is_bot_client: + bots.append(p) + else: + players.append(p) + + id_max_length = len(str(max(p.id for p in app.state.sessions.players))) + + return HTMLResponse( + f""" + +back +users: +{new_line.join([f"({p.id:>{id_max_length}}): {p.safe_name}" for p in players])} +bots: +{new_line.join(f"({p.id:>{id_max_length}}): {p.safe_name}" for p in bots)} + +""", + ) + + +@router.get("/matches") +async def bancho_view_matches() -> Response: + """ongoing matches""" + new_line = "\n" + + ON_GOING = "ongoing" + IDLE = "idle" + max_status_length = len(max(ON_GOING, IDLE)) + + BEATMAP = "beatmap" + HOST = "host" + max_properties_length = max(len(BEATMAP), len(HOST)) + + matches = [m for m in app.state.sessions.matches if m is not None] + + match_id_max_length = ( + len(str(max(match.id for match in matches))) if len(matches) else 0 + ) + + return HTMLResponse( + f""" + +back +matches: +{new_line.join( + f'''{(ON_GOING if m.in_progress else IDLE):<{max_status_length}} ({m.id:>{match_id_max_length}}): {m.name} +-- ''' + + f"{new_line}-- ".join([ + f'{BEATMAP:<{max_properties_length}}: {m.map_name}', + f'{HOST:<{max_properties_length}}: <{m.host.id}> {m.host.safe_name}' + ]) for m in matches +)} + +""", + ) + + +@router.post("/") +async def bancho_handler( + request: Request, + osu_token: str | None = Header(None), + user_agent: Literal["osu!"] = Header(...), +) -> Response: + ip = app.state.services.ip_resolver.get_ip(request.headers) + + if osu_token is None: + # the client is performing a login + login_data = await handle_osu_login_request( + request.headers, + await request.body(), + ip, + ) + + return Response( + content=login_data["response_body"], + headers={"cho-token": login_data["osu_token"]}, + ) + + # get the player from the specified osu token. + player = app.state.sessions.players.get(token=osu_token) + + if not player: + # chances are, we just restarted the server + # tell their client to reconnect immediately. + return Response( + content=( + app.packets.notification("Server has restarted.") + + app.packets.restart_server(0) # ms until reconnection + ), + ) + + if player.restricted: + # restricted users may only use certain packet handlers. + packet_map = app.state.packets["restricted"] + else: + packet_map = app.state.packets["all"] + + # bancho connections can be comprised of multiple packets; + # our reader is designed to iterate through them individually, + # allowing logic to be implemented around the actual handler. + # NOTE: any unhandled packets will be ignored internally. + + with memoryview(await request.body()) as body_view: + for packet in BanchoPacketReader(body_view, packet_map): + await packet.handle(player) + + player.last_recv_time = time.time() + + response_data = player.dequeue() + return Response(content=response_data) + + +""" Packet logic """ + + +def register( + packet: ClientPackets, + restricted: bool = False, +) -> Callable[[type[BasePacket]], type[BasePacket]]: + """Register a handler in `app.state.packets`.""" + + def wrapper(cls: type[BasePacket]) -> type[BasePacket]: + app.state.packets["all"][packet] = cls + + if restricted: + app.state.packets["restricted"][packet] = cls + + return cls + + return wrapper + + +@register(ClientPackets.PING, restricted=True) +class Ping(BasePacket): + async def handle(self, player: Player) -> None: + pass # ping be like + + +@register(ClientPackets.CHANGE_ACTION, restricted=True) +class ChangeAction(BasePacket): + def __init__(self, reader: BanchoPacketReader) -> None: + self.action = reader.read_u8() + self.info_text = reader.read_string() + self.map_md5 = reader.read_string() + + self.mods = reader.read_u32() + self.mode = reader.read_u8() + if self.mods & Mods.RELAX: + if self.mode == 3: # rx!mania doesn't exist + self.mods &= ~Mods.RELAX + else: + self.mode += 4 + elif self.mods & Mods.AUTOPILOT: + if self.mode in (1, 2, 3): # ap!catch, taiko and mania don't exist + self.mods &= ~Mods.AUTOPILOT + else: + self.mode += 8 + + self.map_id = reader.read_i32() + + async def handle(self, player: Player) -> None: + # update the user's status. + player.status.action = Action(self.action) + player.status.info_text = self.info_text + player.status.map_md5 = self.map_md5 + player.status.mods = Mods(self.mods) + player.status.mode = GameMode(self.mode) + player.status.map_id = self.map_id + + # broadcast it to all online players. + if not player.restricted: + app.state.sessions.players.enqueue(app.packets.user_stats(player)) + + +IGNORED_CHANNELS = ["#highlight", "#userlog"] + + +@register(ClientPackets.SEND_PUBLIC_MESSAGE) +class SendMessage(BasePacket): + def __init__(self, reader: BanchoPacketReader) -> None: + self.msg = reader.read_message() + + async def handle(self, player: Player) -> None: + if player.silenced: + log(f"{player} sent a message while silenced.", Ansi.LYELLOW) + return + + # remove leading/trailing whitespace + msg = self.msg.text.strip() + + if not msg: + return + + recipient = self.msg.recipient + + if recipient in IGNORED_CHANNELS: + return + elif recipient == "#spectator": + if player.spectating: + # we are spectating someone + spec_id = player.spectating.id + elif player.spectators: + # we are being spectated + spec_id = player.id + else: + return + + t_chan = app.state.sessions.channels.get_by_name(f"#spec_{spec_id}") + elif recipient == "#multiplayer": + if not player.match: + # they're not in a match? + return + + t_chan = player.match.chat + else: + t_chan = app.state.sessions.channels.get_by_name(recipient) + + if not t_chan: + log(f"{player} wrote to non-existent {recipient}.", Ansi.LYELLOW) + return + + if player not in t_chan: + log(f"{player} wrote to {recipient} without being in it.") + return + + if not t_chan.can_write(player.priv): + log(f"{player} wrote to {recipient} with insufficient privileges.") + return + + # limit message length to 2k chars + # perhaps this could be dangerous with !py..? + if len(msg) > 2000: + msg = f"{msg[:2000]}... (truncated)" + player.enqueue( + app.packets.notification( + "Your message was truncated\n(exceeded 2000 characters).", + ), + ) + + if msg.startswith(app.settings.COMMAND_PREFIX): + cmd = await commands.process_commands(player, t_chan, msg) + else: + cmd = None + + if cmd: + # a command was triggered. + if not cmd["hidden"]: + t_chan.send(msg, sender=player) + if cmd["resp"] is not None: + t_chan.send_bot(cmd["resp"]) + else: + staff = app.state.sessions.players.staff + t_chan.send_selective( + msg=msg, + sender=player, + recipients=staff - {player}, + ) + if cmd["resp"] is not None: + t_chan.send_selective( + msg=cmd["resp"], + sender=app.state.sessions.bot, + recipients=staff | {player}, + ) + + else: + # no commands were triggered + + # check if the user is /np'ing a map. + # even though this is a public channel, + # we'll update the player's last np stored. + r_match = NOW_PLAYING_RGX.match(msg) + if r_match: + # the player is /np'ing a map. + # save it to their player instance + # so we can use this elsewhere. + bmap = await Beatmap.from_bid(int(r_match["bid"])) + + if bmap: + # parse mode_vn int from regex + if r_match["mode_vn"] is not None: + mode_vn = {"Taiko": 1, "CatchTheBeat": 2, "osu!mania": 3}[ + r_match["mode_vn"] + ] + else: + # use player mode if not specified + mode_vn = player.status.mode.as_vanilla + + # parse the mods from regex + mods = None + if r_match["mods"] is not None: + mods = Mods.from_np(r_match["mods"][1:], mode_vn) + + player.last_np = { + "bmap": bmap, + "mods": mods, + "mode_vn": mode_vn, + "timeout": time.time() + 300, # /np's last 5mins + } + else: + # time out their previous /np + player.last_np = None + + t_chan.send(msg, sender=player) + + player.update_latest_activity_soon() + + log(f"{player} @ {t_chan}: {msg}", Ansi.LCYAN) + + with open(DISK_CHAT_LOG_FILE, "a+") as f: + f.write( + f"[{get_timestamp(full=True, tz=ZoneInfo('GMT'))}] {player} @ {t_chan}: {msg}\n", + ) + + +@register(ClientPackets.LOGOUT, restricted=True) +class Logout(BasePacket): + def __init__(self, reader: BanchoPacketReader) -> None: + reader.read_i32() # reserved + + async def handle(self, player: Player) -> None: + if (time.time() - player.login_time) < 1: + # osu! has a weird tendency to log out immediately after login. + # i've tested the times and they're generally 300-800ms, so + # we'll block any logout request within 1 second from login. + return + + player.logout() + + player.update_latest_activity_soon() + + +@register(ClientPackets.REQUEST_STATUS_UPDATE, restricted=True) +class StatsUpdateRequest(BasePacket): + async def handle(self, player: Player) -> None: + player.enqueue(app.packets.user_stats(player)) + + +# Some messages to send on welcome/restricted/etc. +# TODO: these should probably be moved to the config. +WELCOME_MSG = "\n".join( + ( + f"Welcome to {BASE_DOMAIN}.", + "To see a list of commands, use !help.", + "We have a public (Discord)[https://discord.gg/memorial]!", + "Enjoy the server!", + ), +) + +RESTRICTED_MSG = ( + "Your account has been restricted! " + "While restricted, you will be unable to interact with other players " + "and your scores will only be visible to you. " + "If you believe this is a mistake, or have waited longer than 3 months, " + "you can appeal in our Discord." +) + +WELCOME_NOTIFICATION = app.packets.notification( + f"Welcome back to {BASE_DOMAIN}!\nRunning bancho.py v{app.settings.VERSION}.", +) + +OFFLINE_NOTIFICATION = app.packets.notification( + "The server is currently running in offline mode; " + "some features will be unavailable.", +) + + +class LoginResponse(TypedDict): + osu_token: str + response_body: bytes + + +class LoginData(TypedDict): + username: str + password_md5: bytes + osu_version: str + utc_offset: int + display_city: bool + pm_private: bool + osu_path_md5: str + adapters_str: str + adapters_md5: str + uninstall_md5: str + disk_signature_md5: str + + +def parse_login_data(data: bytes) -> LoginData: + """Parse data from the body of a login request.""" + ( + username, + password_md5, + remainder, + ) = data.decode().split("\n", maxsplit=2) + + ( + osu_version, + utc_offset, + display_city, + client_hashes, + pm_private, + ) = remainder.split("|", maxsplit=4) + + ( + osu_path_md5, + adapters_str, + adapters_md5, + uninstall_md5, + disk_signature_md5, + ) = client_hashes[:-1].split(":", maxsplit=4) + + return { + "username": username, + "password_md5": password_md5.encode(), + "osu_version": osu_version, + "utc_offset": int(utc_offset), + "display_city": display_city == "1", + "pm_private": pm_private == "1", + "osu_path_md5": osu_path_md5, + "adapters_str": adapters_str, + "adapters_md5": adapters_md5, + "uninstall_md5": uninstall_md5, + "disk_signature_md5": disk_signature_md5, + } + + +def parse_osu_version_string(osu_version_string: str) -> OsuVersion | None: + match = regexes.OSU_VERSION.match(osu_version_string) + if match is None: + return None + + osu_version = OsuVersion( + date=date( + year=int(match["date"][0:4]), + month=int(match["date"][4:6]), + day=int(match["date"][6:8]), + ), + revision=int(match["revision"]) if match["revision"] else None, + stream=OsuStream(match["stream"] or "stable"), + ) + return osu_version + + +async def get_allowed_client_versions(osu_stream: OsuStream) -> set[date] | None: + """ + Return a list of acceptable client versions for the given stream. + + This is used to determine whether a client is too old to connect to the server. + + Returns None if the connection to the osu! api fails. + """ + osu_stream_str = osu_stream.value + if osu_stream in (OsuStream.STABLE, OsuStream.BETA): + osu_stream_str += "40" # i wonder why this exists + + response = await services.http_client.get( + OSU_API_V2_CHANGELOG_URL, + params={"stream": osu_stream_str}, + ) + if not response.is_success: + return None + + allowed_client_versions: set[date] = set() + for build in response.json()["builds"]: + version = date( + int(build["version"][0:4]), + int(build["version"][4:6]), + int(build["version"][6:8]), + ) + allowed_client_versions.add(version) + if any(entry["major"] for entry in build["changelog_entries"]): + # this build is a major iteration to the client + # don't allow anything older than this + break + + return allowed_client_versions + + +def parse_adapters_string(adapters_string: str) -> tuple[list[str], bool]: + running_under_wine = adapters_string == "runningunderwine" + adapters = adapters_string[:-1].split(".") + return adapters, running_under_wine + + +async def authenticate( + username: str, + untrusted_password: bytes, +) -> users_repo.User | None: + user_info = await users_repo.fetch_one( + name=username, + fetch_all_fields=True, + ) + if user_info is None: + return None + + trusted_hashword = user_info["pw_bcrypt"].encode() + + # in-memory bcrypt lookup cache for performance + if trusted_hashword in app.state.cache.bcrypt: # ~0.01 ms + if untrusted_password != app.state.cache.bcrypt[trusted_hashword]: + return None + else: # ~200ms + if not bcrypt.checkpw(untrusted_password, trusted_hashword): + return None + + app.state.cache.bcrypt[trusted_hashword] = untrusted_password + + return user_info + + +async def handle_osu_login_request( + headers: Mapping[str, str], + body: bytes, + ip: IPAddress, +) -> LoginResponse: + """\ + Login has no specific packet, but happens when the osu! + client sends a request without an 'osu-token' header. + + Request format: + username\npasswd_md5\nosu_version|utc_offset|display_city|client_hashes|pm_private\n + + Response format: + Packet 5 (userid), with ID: + -1: authentication failed + -2: old client + -3: banned + -4: banned + -5: error occurred + -6: needs supporter + -7: password reset + -8: requires verification + other: valid id, logged in + """ + + # parse login data + login_data = parse_login_data(body) + + # perform some validation & further parsing on the data + + osu_version = parse_osu_version_string(login_data["osu_version"]) + if osu_version is None: + return { + "osu_token": "invalid-request", + "response_body": ( + app.packets.login_reply(LoginFailureReason.AUTHENTICATION_FAILED) + + app.packets.notification("Please restart your osu! and try again.") + ), + } + + if app.settings.DISALLOW_OLD_CLIENTS: + allowed_client_versions = await get_allowed_client_versions( + osu_version.stream, + ) + # in the case where the osu! api fails, we'll allow the client to connect + if ( + allowed_client_versions is not None + and osu_version.date not in allowed_client_versions + ): + return { + "osu_token": "client-too-old", + "response_body": ( + app.packets.version_update() + + app.packets.login_reply(LoginFailureReason.OLD_CLIENT) + ), + } + + adapters, running_under_wine = parse_adapters_string(login_data["adapters_str"]) + if not (running_under_wine or any(adapters)): + return { + "osu_token": "empty-adapters", + "response_body": ( + app.packets.login_reply(LoginFailureReason.AUTHENTICATION_FAILED) + + app.packets.notification("Please restart your osu! and try again.") + ), + } + + ## parsing successful + + login_time = time.time() + + # disallow multiple sessions from a single user + # with the exception of tourney spectator clients + player = app.state.sessions.players.get(name=login_data["username"]) + if player and osu_version.stream != "tourney": + # check if the existing session is still active + if (login_time - player.last_recv_time) < 10: + return { + "osu_token": "user-already-logged-in", + "response_body": ( + app.packets.login_reply(LoginFailureReason.AUTHENTICATION_FAILED) + + app.packets.notification("User already logged in.") + ), + } + else: + # session is not active; replace it + player.logout() + del player + + user_info = await authenticate(login_data["username"], login_data["password_md5"]) + if user_info is None: + return { + "osu_token": "incorrect-credentials", + "response_body": ( + app.packets.notification(f"{BASE_DOMAIN}: Incorrect credentials") + + app.packets.login_reply(LoginFailureReason.AUTHENTICATION_FAILED) + ), + } + + if osu_version.stream is OsuStream.TOURNEY and not ( + user_info["priv"] & Privileges.DONATOR + and user_info["priv"] & Privileges.UNRESTRICTED + ): + # trying to use tourney client with insufficient privileges. + return { + "osu_token": "no", + "response_body": app.packets.login_reply( + LoginFailureReason.AUTHENTICATION_FAILED, + ), + } + + """ login credentials verified """ + + await logins_repo.create( + user_id=user_info["id"], + ip=str(ip), + osu_ver=osu_version.date, + osu_stream=osu_version.stream, + ) + + await client_hashes_repo.create( + userid=user_info["id"], + osupath=login_data["osu_path_md5"], + adapters=login_data["adapters_md5"], + uninstall_id=login_data["uninstall_md5"], + disk_serial=login_data["disk_signature_md5"], + ) + + # TODO: store adapters individually + + # Some disk manufacturers set constant/shared ids for their products. + # In these cases, there's not a whole lot we can do -- we'll allow them thru. + INACTIONABLE_DISK_SIGNATURE_MD5S: list[str] = [ + hashlib.md5(b"0").hexdigest(), # "0" is likely the most common variant + ] + + if login_data["disk_signature_md5"] not in INACTIONABLE_DISK_SIGNATURE_MD5S: + disk_signature_md5 = login_data["disk_signature_md5"] + else: + disk_signature_md5 = None + + hw_matches = await client_hashes_repo.fetch_any_hardware_matches_for_user( + userid=user_info["id"], + running_under_wine=running_under_wine, + adapters=login_data["adapters_md5"], + uninstall_id=login_data["uninstall_md5"], + disk_serial=disk_signature_md5, + ) + + if hw_matches: + # we have other accounts with matching hashes + if user_info["priv"] & Privileges.VERIFIED: + # this is a normal, registered & verified player. + # TODO: this user already has a registered hwid. + # they may be multi-accounting; + # there may be some desirable behavior to implement here in the future. + ... + else: + # this player is not verified yet, this is their first + # time connecting in-game and submitting their hwid set. + # we will not allow any banned matches; if there are any, + # then ask the user to contact staff and resolve manually. + if not all( + [hw_match["priv"] & Privileges.UNRESTRICTED for hw_match in hw_matches], + ): + return { + "osu_token": "contact-staff", + "response_body": ( + app.packets.notification( + "Please contact staff directly to create an account.", + ) + + app.packets.login_reply( + LoginFailureReason.AUTHENTICATION_FAILED, + ) + ), + } + + """ All checks passed, player is safe to login """ + + # get clan & clan priv if we're in a clan + clan_id: int | None = None + clan_priv: ClanPrivileges | None = None + if user_info["clan_id"] != 0: + clan_id = user_info["clan_id"] + clan_priv = ClanPrivileges(user_info["clan_priv"]) + + db_country = user_info["country"] + + geoloc = await app.state.services.fetch_geoloc(ip, headers) + + if geoloc is None: + return { + "osu_token": "login-failed", + "response_body": ( + app.packets.notification( + f"{BASE_DOMAIN}: Login failed. Please contact an admin.", + ) + + app.packets.login_reply(LoginFailureReason.AUTHENTICATION_FAILED) + ), + } + + if db_country == "xx": + # bugfix for old bancho.py versions when + # country wasn't stored on registration. + log(f"Fixing {login_data['username']}'s country.", Ansi.LGREEN) + + await users_repo.partial_update( + id=user_info["id"], + country=geoloc["country"]["acronym"], + ) + + client_details = ClientDetails( + osu_version=osu_version, + osu_path_md5=login_data["osu_path_md5"], + adapters_md5=login_data["adapters_md5"], + uninstall_md5=login_data["uninstall_md5"], + disk_signature_md5=login_data["disk_signature_md5"], + adapters=adapters, + ip=ip, + ) + + player = Player( + id=user_info["id"], + name=user_info["name"], + priv=Privileges(user_info["priv"]), + pw_bcrypt=user_info["pw_bcrypt"].encode(), + token=Player.generate_token(), + clan_id=clan_id, + clan_priv=clan_priv, + geoloc=geoloc, + utc_offset=login_data["utc_offset"], + pm_private=login_data["pm_private"], + silence_end=user_info["silence_end"], + donor_end=user_info["donor_end"], + client_details=client_details, + login_time=login_time, + is_tourney_client=osu_version.stream == "tourney", + api_key=user_info["api_key"], + ) + + data = bytearray(app.packets.protocol_version(19)) + data += app.packets.login_reply(player.id) + + # *real* client privileges are sent with this packet, + # then the user's apparent privileges are sent in the + # userPresence packets to other players. we'll send + # supporter along with the user's privileges here, + # but not in userPresence (so that only donators + # show up with the yellow name in-game, but everyone + # gets osu!direct & other in-game perks). + data += app.packets.bancho_privileges( + player.bancho_priv | ClientPrivileges.SUPPORTER, + ) + + data += WELCOME_NOTIFICATION + + # send all appropriate channel info to our player. + # the osu! client will attempt to join the channels. + for channel in app.state.sessions.channels: + if ( + not channel.auto_join + or not channel.can_read(player.priv) + or channel._name == "#lobby" # (can't be in mp lobby @ login) + ): + continue + + # send chan info to all players who can see + # the channel (to update their playercounts) + chan_info_packet = app.packets.channel_info( + channel._name, + channel.topic, + len(channel.players), + ) + + data += chan_info_packet + + for o in app.state.sessions.players: + if channel.can_read(o.priv): + o.enqueue(chan_info_packet) + + # tells osu! to reorder channels based on config. + data += app.packets.channel_info_end() + + # fetch some of the player's + # information from sql to be cached. + await player.stats_from_sql_full() + await player.relationships_from_sql() + + # TODO: fetch player.recent_scores from sql + + data += app.packets.main_menu_icon( + icon_url=app.settings.MENU_ICON_URL, + onclick_url=app.settings.MENU_ONCLICK_URL, + ) + data += app.packets.friends_list(player.friends) + data += app.packets.silence_end(player.remaining_silence) + + # update our new player's stats, and broadcast them. + user_data = app.packets.user_presence(player) + app.packets.user_stats(player) + + data += user_data + + if not player.restricted: + # player is unrestricted, two way data + for o in app.state.sessions.players: + # enqueue us to them + o.enqueue(user_data) + + # enqueue them to us. + if not o.restricted: + if o is app.state.sessions.bot: + # optimization for bot since it's + # the most frequently requested user + data += app.packets.bot_presence(o) + data += app.packets.bot_stats(o) + else: + data += app.packets.user_presence(o) + data += app.packets.user_stats(o) + + # the player may have been sent mail while offline, + # enqueue any messages from their respective authors. + mail_rows = await mail_repo.fetch_all_mail_to_user( + user_id=player.id, + read=False, + ) + + if mail_rows: + sent_to: set[int] = set() + + for msg in mail_rows: + # Add "Unread messages" header as the first message + # for any given sender, to make it clear that the + # messages are coming from the mail system. + if msg["from_id"] not in sent_to: + data += app.packets.send_message( + sender=msg["from_name"], + msg="Unread messages", + recipient=msg["to_name"], + sender_id=msg["from_id"], + ) + sent_to.add(msg["from_id"]) + + msg_time = datetime.fromtimestamp(msg["time"]) + data += app.packets.send_message( + sender=msg["from_name"], + msg=f'[{msg_time:%a %b %d @ %H:%M%p}] {msg["msg"]}', + recipient=msg["to_name"], + sender_id=msg["from_id"], + ) + + if not player.priv & Privileges.VERIFIED: + # this is the player's first login, verify their + # account & send info about the server/its usage. + await player.add_privs(Privileges.VERIFIED) + + if player.id == FIRST_USER_ID: + # this is the first player registering on + # the server, grant them full privileges. + await player.add_privs( + Privileges.STAFF + | Privileges.NOMINATOR + | Privileges.WHITELISTED + | Privileges.TOURNEY_MANAGER + | Privileges.DONATOR + | Privileges.ALUMNI, + ) + + data += app.packets.send_message( + sender=app.state.sessions.bot.name, + msg=WELCOME_MSG, + recipient=player.name, + sender_id=app.state.sessions.bot.id, + ) + + else: + # player is restricted, one way data + for o in app.state.sessions.players.unrestricted: + # enqueue them to us. + if o is app.state.sessions.bot: + # optimization for bot since it's + # the most frequently requested user + data += app.packets.bot_presence(o) + data += app.packets.bot_stats(o) + else: + data += app.packets.user_presence(o) + data += app.packets.user_stats(o) + + data += app.packets.account_restricted() + data += app.packets.send_message( + sender=app.state.sessions.bot.name, + msg=RESTRICTED_MSG, + recipient=player.name, + sender_id=app.state.sessions.bot.id, + ) + + # add `p` to the global player list, + # making them officially logged in. + app.state.sessions.players.append(player) + + if app.state.services.datadog: + if not player.restricted: + app.state.services.datadog.increment("bancho.online_players") + + time_taken = time.time() - login_time + app.state.services.datadog.histogram("bancho.login_time", time_taken) + + user_os = "unix (wine)" if running_under_wine else "win32" + country_code = player.geoloc["country"]["acronym"].upper() + + log( + f"{player} logged in from {country_code} using {login_data['osu_version']} on {user_os}", + Ansi.LCYAN, + ) + + player.update_latest_activity_soon() + + return {"osu_token": player.token, "response_body": bytes(data)} + + +@register(ClientPackets.START_SPECTATING) +class StartSpectating(BasePacket): + def __init__(self, reader: BanchoPacketReader) -> None: + self.target_id = reader.read_i32() + + async def handle(self, player: Player) -> None: + new_host = app.state.sessions.players.get(id=self.target_id) + if not new_host: + log( + f"{player} tried to spectate nonexistant id {self.target_id}.", + Ansi.LYELLOW, + ) + return + + current_host = player.spectating + if current_host: + if current_host == new_host: + # host hasn't changed, they didn't have + # the map but have downloaded it. + + if not player.stealth: + # NOTE: `player` would have already received the other + # fellow spectators, so no need to resend them. + new_host.enqueue(app.packets.spectator_joined(player.id)) + + player_joined = app.packets.fellow_spectator_joined(player.id) + for spec in new_host.spectators: + if spec is not player: + spec.enqueue(player_joined) + + return + + current_host.remove_spectator(player) + + new_host.add_spectator(player) + + +@register(ClientPackets.STOP_SPECTATING) +class StopSpectating(BasePacket): + async def handle(self, player: Player) -> None: + host = player.spectating + + if not host: + log(f"{player} tried to stop spectating when they're not..?", Ansi.LRED) + return + + host.remove_spectator(player) + + +@register(ClientPackets.SPECTATE_FRAMES) +class SpectateFrames(BasePacket): + def __init__(self, reader: BanchoPacketReader) -> None: + self.frame_bundle = reader.read_replayframe_bundle() + + async def handle(self, player: Player) -> None: + # ANTICHEAT: TODO: perform validations on the parsed frame bundle + # to ensure it's not being tamperated with or weaponized. + + # NOTE: this is given a fastpath here for efficiency due to the + # sheer rate of usage of these packets in spectator mode. + + # data = app.packets.spectateFrames(self.frame_bundle.raw_data) + data = ( + struct.pack(" None: + if not player.spectating: + log(f"{player} sent can't spectate while not spectating?", Ansi.LRED) + return + + if not player.stealth: + data = app.packets.spectator_cant_spectate(player.id) + + host = player.spectating + host.enqueue(data) + + for t in host.spectators: + t.enqueue(data) + + +@register(ClientPackets.SEND_PRIVATE_MESSAGE) +class SendPrivateMessage(BasePacket): + def __init__(self, reader: BanchoPacketReader) -> None: + self.msg = reader.read_message() + + async def handle(self, player: Player) -> None: + if player.silenced: + if app.settings.DEBUG: + log(f"{player} tried to send a DM while silenced.", Ansi.LYELLOW) + return + + # remove leading/trailing whitespace + msg = self.msg.text.strip() + + if not msg: + return + + target_name = self.msg.recipient + + # allow this to get from sql - players can receive + # messages offline, due to the mail system. B) + target = await app.state.sessions.players.from_cache_or_sql(name=target_name) + if not target: + if app.settings.DEBUG: + log( + f"{player} tried to write to non-existent user {target_name}.", + Ansi.LYELLOW, + ) + return + + if player.id in target.blocks: + player.enqueue(app.packets.user_dm_blocked(target_name)) + + if app.settings.DEBUG: + log(f"{player} tried to message {target}, but they have them blocked.") + return + + if target.pm_private and player.id not in target.friends: + player.enqueue(app.packets.user_dm_blocked(target_name)) + + if app.settings.DEBUG: + log(f"{player} tried to message {target}, but they are blocking dms.") + return + + if target.silenced: + # if target is silenced, inform player. + player.enqueue(app.packets.target_silenced(target_name)) + + if app.settings.DEBUG: + log(f"{player} tried to message {target}, but they are silenced.") + return + + # limit message length to 2k chars + # perhaps this could be dangerous with !py..? + if len(msg) > 2000: + msg = f"{msg[:2000]}... (truncated)" + player.enqueue( + app.packets.notification( + "Your message was truncated\n(exceeded 2000 characters).", + ), + ) + + if target.status.action == Action.Afk and target.away_msg: + # send away message if target is afk and has one set. + player.send(target.away_msg, sender=target) + + if target is not app.state.sessions.bot: + # target is not bot, send the message normally if online + if target.is_online: + target.send(msg, sender=player) + else: + # inform user they're offline, but + # will receive the mail @ next login. + player.enqueue( + app.packets.notification( + f"{target.name} is currently offline, but will " + "receive your messsage on their next login.", + ), + ) + + # insert mail into db, marked as unread. + await mail_repo.create( + from_id=player.id, + to_id=target.id, + msg=msg, + ) + else: + # messaging the bot, check for commands & /np. + if msg.startswith(app.settings.COMMAND_PREFIX): + cmd = await commands.process_commands(player, target, msg) + else: + cmd = None + + if cmd: + # command triggered, send response if any. + if cmd["resp"] is not None: + player.send(cmd["resp"], sender=target) + else: + # no commands triggered. + r_match = NOW_PLAYING_RGX.match(msg) + if r_match: + # user is /np'ing a map. + # save it to their player instance + # so we can use this elsewhere. + bmap = await Beatmap.from_bid(int(r_match["bid"])) + + if bmap: + # parse mode_vn int from regex + if r_match["mode_vn"] is not None: + mode_vn = {"Taiko": 1, "CatchTheBeat": 2, "osu!mania": 3}[ + r_match["mode_vn"] + ] + else: + # use player mode if not specified + mode_vn = player.status.mode.as_vanilla + + # parse the mods from regex + mods = None + if r_match["mods"] is not None: + mods = Mods.from_np(r_match["mods"][1:], mode_vn) + + player.last_np = { + "bmap": bmap, + "mode_vn": mode_vn, + "mods": mods, + "timeout": time.time() + 300, # /np's last 5mins + } + + # calculate generic pp values from their /np + + osu_file_available = await ensure_osu_file_is_available( + bmap.id, + expected_md5=bmap.md5, + ) + if not osu_file_available: + resp_msg = ( + "Mapfile could not be found; " + "this incident has been reported." + ) + else: + # calculate pp for common generic values + pp_calc_st = time.time_ns() + + mods = None + if r_match["mods"] is not None: + # [1:] to remove leading whitespace + mods_str = r_match["mods"][1:] + mods = Mods.from_np(mods_str, mode_vn) + + scores = [ + ScoreParams( + mode=mode_vn, + mods=int(mods) if mods else None, + acc=acc, + ) + for acc in app.settings.PP_CACHED_ACCURACIES + ] + + results = app.usecases.performance.calculate_performances( + osu_file_path=str(BEATMAPS_PATH / f"{bmap.id}.osu"), + scores=scores, + ) + + resp_msg = " | ".join( + f"{acc}%: {result['performance']['pp']:,.2f}pp" + for acc, result in zip( + app.settings.PP_CACHED_ACCURACIES, + results, + ) + ) + + elapsed = time.time_ns() - pp_calc_st + resp_msg += f" | Elapsed: {magnitude_fmt_time(elapsed)}" + else: + resp_msg = "Could not find map." + + # time out their previous /np + player.last_np = None + + player.send(resp_msg, sender=target) + + player.update_latest_activity_soon() + + log(f"{player} @ {target}: {msg}", Ansi.LCYAN) + with open(DISK_CHAT_LOG_FILE, "a+") as f: + f.write( + f"[{get_timestamp(full=True, tz=ZoneInfo('GMT'))}] {player} @ {target}: {msg}\n", + ) + + +@register(ClientPackets.PART_LOBBY) +class LobbyPart(BasePacket): + async def handle(self, player: Player) -> None: + player.in_lobby = False + + +@register(ClientPackets.JOIN_LOBBY) +class LobbyJoin(BasePacket): + async def handle(self, player: Player) -> None: + player.in_lobby = True + + for match in app.state.sessions.matches: + if match is not None: + try: + player.enqueue(app.packets.new_match(match)) + except ValueError: + log( + f"Failed to send match {match.id} to player joining lobby; likely due to missing host", + Ansi.LYELLOW, + ) + stacktrace = app.utils.get_appropriate_stacktrace() + await app.state.services.log_strange_occurrence(stacktrace) + continue + + +def validate_match_data( + untrusted_match_data: app.packets.MultiplayerMatch, + expected_host_id: int, +) -> bool: + return all( + ( + untrusted_match_data.host_id == expected_host_id, + len(untrusted_match_data.name) <= MAX_MATCH_NAME_LENGTH, + ), + ) + + +@register(ClientPackets.CREATE_MATCH) +class MatchCreate(BasePacket): + def __init__(self, reader: BanchoPacketReader) -> None: + self.match_data = reader.read_match() + + async def handle(self, player: Player) -> None: + if not validate_match_data(self.match_data, expected_host_id=player.id): + log(f"{player} tried to create a match with invalid data.", Ansi.LYELLOW) + return + + if player.restricted: + player.enqueue( + app.packets.match_join_fail() + + app.packets.notification( + "Multiplayer is not available while restricted.", + ), + ) + return + + if player.silenced: + player.enqueue( + app.packets.match_join_fail() + + app.packets.notification( + "Multiplayer is not available while silenced.", + ), + ) + return + + match_id = app.state.sessions.matches.get_free() + + if match_id is None: + # failed to create match (match slots full). + player.send_bot("Failed to create match (no slots available).") + player.enqueue(app.packets.match_join_fail()) + return + + # create the channel and add it + # to the global channel list as + # an instanced channel. + chat_channel = Channel( + name=f"#multi_{match_id}", + topic=f"MID {match_id}'s multiplayer channel.", + auto_join=False, + instance=True, + ) + + match = Match( + id=match_id, + name=self.match_data.name, + password=self.match_data.passwd.removesuffix("//private"), + has_public_history=not self.match_data.passwd.endswith("//private"), + map_name=self.match_data.map_name, + map_id=self.match_data.map_id, + map_md5=self.match_data.map_md5, + host_id=self.match_data.host_id, + mode=GameMode(self.match_data.mode), + mods=Mods(self.match_data.mods), + win_condition=MatchWinConditions(self.match_data.win_condition), + team_type=MatchTeamTypes(self.match_data.team_type), + freemods=bool(self.match_data.freemods), + seed=self.match_data.seed, + chat_channel=chat_channel, + ) + + app.state.sessions.matches[match_id] = match + app.state.sessions.channels.append(chat_channel) + match.chat = chat_channel + + player.update_latest_activity_soon() + player.join_match(match, self.match_data.passwd) + + match.chat.send_bot(f"Match created by {player.name}.") + log(f"{player} created a new multiplayer match.") + + +@register(ClientPackets.JOIN_MATCH) +class MatchJoin(BasePacket): + def __init__(self, reader: BanchoPacketReader) -> None: + self.match_id = reader.read_i32() + self.match_passwd = reader.read_string() + + async def handle(self, player: Player) -> None: + match = app.state.sessions.matches[self.match_id] + if not match: + log(f"{player} tried to join a non-existant mp lobby?") + player.enqueue(app.packets.match_join_fail()) + return + + if player.restricted: + player.enqueue( + app.packets.match_join_fail() + + app.packets.notification( + "Multiplayer is not available while restricted.", + ), + ) + return + + if player.silenced: + player.enqueue( + app.packets.match_join_fail() + + app.packets.notification( + "Multiplayer is not available while silenced.", + ), + ) + return + + player.update_latest_activity_soon() + player.join_match(match, self.match_passwd) + + +@register(ClientPackets.PART_MATCH) +class MatchPart(BasePacket): + async def handle(self, player: Player) -> None: + player.update_latest_activity_soon() + player.leave_match() + + +@register(ClientPackets.MATCH_CHANGE_SLOT) +class MatchChangeSlot(BasePacket): + def __init__(self, reader: BanchoPacketReader) -> None: + self.slot_id = reader.read_i32() + + async def handle(self, player: Player) -> None: + if player.match is None: + return + + # read new slot ID + if not 0 <= self.slot_id < 16: + return + + if player.match.slots[self.slot_id].status != SlotStatus.open: + log(f"{player} tried to move into non-open slot.", Ansi.LYELLOW) + return + + # swap with current slot. + slot = player.match.get_slot(player) + assert slot is not None + + player.match.slots[self.slot_id].copy_from(slot) + slot.reset() + + player.match.enqueue_state() # technically not needed for host? + + +@register(ClientPackets.MATCH_READY) +class MatchReady(BasePacket): + async def handle(self, player: Player) -> None: + if player.match is None: + return + + slot = player.match.get_slot(player) + assert slot is not None + + slot.status = SlotStatus.ready + player.match.enqueue_state(lobby=False) + + +@register(ClientPackets.MATCH_LOCK) +class MatchLock(BasePacket): + def __init__(self, reader: BanchoPacketReader) -> None: + self.slot_id = reader.read_i32() + + async def handle(self, player: Player) -> None: + if player.match is None: + return + + if player is not player.match.host: + log(f"{player} attempted to lock match as non-host.", Ansi.LYELLOW) + return + + # read new slot ID + if not 0 <= self.slot_id < 16: + return + + slot = player.match.slots[self.slot_id] + + if slot.status == SlotStatus.locked: + slot.status = SlotStatus.open + else: + if slot.player is player.match.host: + # don't allow the match host to kick + # themselves by clicking their crown + return + + if slot.player: + # uggggggh i hate trusting the osu! client + # man why is it designed like this + # TODO: probably going to end up changing + ... # slot.reset() + + slot.status = SlotStatus.locked + + player.match.enqueue_state() + + +@register(ClientPackets.MATCH_CHANGE_SETTINGS) +class MatchChangeSettings(BasePacket): + def __init__(self, reader: BanchoPacketReader) -> None: + self.match_data = reader.read_match() + + async def handle(self, player: Player) -> None: + if not validate_match_data(self.match_data, expected_host_id=player.id): + log( + f"{player} tried to change match settings with invalid data.", + Ansi.LYELLOW, + ) + return + + if player.match is None: + return + + if player is not player.match.host: + log(f"{player} attempted to change settings as non-host.", Ansi.LYELLOW) + return + + if self.match_data.freemods != player.match.freemods: + # freemods status has been changed. + player.match.freemods = self.match_data.freemods + + if self.match_data.freemods: + # match mods -> active slot mods. + for slot in player.match.slots: + if slot.player is not None: + # the slot takes any non-speed + # changing mods from the match. + slot.mods = player.match.mods & ~SPEED_CHANGING_MODS + + # keep only speed-changing mods. + player.match.mods &= SPEED_CHANGING_MODS + else: + # host mods -> match mods. + host = player.match.get_host_slot() # should always exist + assert host is not None + + # the match keeps any speed-changing mods, + # and also takes any mods the host has enabled. + player.match.mods &= SPEED_CHANGING_MODS + player.match.mods |= host.mods + + for slot in player.match.slots: + if slot.player is not None: + slot.mods = Mods.NOMOD + + if self.match_data.map_id == -1: + # map being changed, unready players. + player.match.unready_players(expected=SlotStatus.ready) + player.match.prev_map_id = player.match.map_id + + player.match.map_id = -1 + player.match.map_md5 = "" + player.match.map_name = "" + elif player.match.map_id == -1: + if player.match.prev_map_id != self.match_data.map_id: + # new map has been chosen, send to match chat. + map_url = ( + f"https://osu.{app.settings.DOMAIN}/b/{self.match_data.map_id}" + ) + map_embed = f"[{map_url} {self.match_data.map_name}]" + player.match.chat.send_bot(f"Selected: {map_embed}.") + + # use our serverside version if we have it, but + # still allow for users to pick unknown maps. + bmap = await Beatmap.from_md5(self.match_data.map_md5) + + if bmap: + player.match.map_id = bmap.id + player.match.map_md5 = bmap.md5 + player.match.map_name = bmap.full_name + player.match.mode = GameMode(player.match.host.status.mode.as_vanilla) + else: + player.match.map_id = self.match_data.map_id + player.match.map_md5 = self.match_data.map_md5 + player.match.map_name = self.match_data.map_name + player.match.mode = GameMode(self.match_data.mode) + + if player.match.team_type != self.match_data.team_type: + # if theres currently a scrim going on, only allow + # team type to change by using the !mp teams command. + if player.match.is_scrimming: + _team = ("head-to-head", "tag-coop", "team-vs", "tag-team-vs")[ + self.match_data.team_type + ] + + msg = ( + "Changing team type while scrimming will reset " + "the overall score - to do so, please use the " + f"!mp teams {_team} command." + ) + player.match.chat.send_bot(msg) + else: + # find the new appropriate default team. + # defaults are (ffa: neutral, teams: red). + if self.match_data.team_type in ( + MatchTeamTypes.head_to_head, + MatchTeamTypes.tag_coop, + ): + new_t = MatchTeams.neutral + else: + new_t = MatchTeams.red + + # change each active slots team to + # fit the correspoding team type. + for slot in player.match.slots: + if slot.player is not None: + slot.team = new_t + + # change the matches'. + player.match.team_type = MatchTeamTypes(self.match_data.team_type) + + if player.match.win_condition != self.match_data.win_condition: + # win condition changing; if `use_pp_scoring` + # is enabled, disable it. always use new cond. + if player.match.use_pp_scoring: + player.match.use_pp_scoring = False + + player.match.win_condition = MatchWinConditions( + self.match_data.win_condition, + ) + + player.match.name = self.match_data.name + + player.match.enqueue_state() + + +@register(ClientPackets.MATCH_START) +class MatchStart(BasePacket): + async def handle(self, player: Player) -> None: + if player.match is None: + return + + if player is not player.match.host: + log(f"{player} attempted to start match as non-host.", Ansi.LYELLOW) + return + + player.match.start() + + +@register(ClientPackets.MATCH_SCORE_UPDATE) +class MatchScoreUpdate(BasePacket): + def __init__(self, reader: BanchoPacketReader) -> None: + self.play_data = reader.read_raw() + + async def handle(self, player: Player) -> None: + # this runs very frequently in matches, + # so it's written to run pretty quick. + + if player.match is None: + return + + slot_id = player.match.get_slot_id(player) + assert slot_id is not None + + # if scorev2 is enabled, read an extra 8 bytes. + buf = bytearray(b"0\x00\x00") + buf += len(self.play_data).to_bytes(4, "little") + buf += self.play_data + buf[11] = slot_id + + player.match.enqueue(bytes(buf), lobby=False) + + +@register(ClientPackets.MATCH_COMPLETE) +class MatchComplete(BasePacket): + async def handle(self, player: Player) -> None: + if player.match is None: + return + + slot = player.match.get_slot(player) + assert slot is not None + + slot.status = SlotStatus.complete + + # check if there are any players that haven't finished. + if any([s.status == SlotStatus.playing for s in player.match.slots]): + return + + # find any players just sitting in the multi room + # that have not been playing the map; they don't + # need to know all the players have completed, only + # the ones who are playing (just new match info). + not_playing = [ + s.player.id + for s in player.match.slots + if s.player is not None and s.status != SlotStatus.complete + ] + + was_playing = [ + s for s in player.match.slots if s.player and s.player.id not in not_playing + ] + + player.match.unready_players(expected=SlotStatus.complete) + player.match.reset_players_loaded_status() + + player.match.in_progress = False + player.match.enqueue( + app.packets.match_complete(), + lobby=False, + immune=not_playing, + ) + player.match.enqueue_state() + + if player.match.is_scrimming: + # determine winner, update match points & inform players. + asyncio.create_task(player.match.update_matchpoints(was_playing)) + + +@register(ClientPackets.MATCH_CHANGE_MODS) +class MatchChangeMods(BasePacket): + def __init__(self, reader: BanchoPacketReader) -> None: + self.mods = reader.read_i32() + + async def handle(self, player: Player) -> None: + if player.match is None: + return + + if player.match.freemods: + if player is player.match.host: + # allow host to set speed-changing mods. + player.match.mods = Mods(self.mods & SPEED_CHANGING_MODS) + + # set slot mods + slot = player.match.get_slot(player) + assert slot is not None + + slot.mods = Mods(self.mods & ~SPEED_CHANGING_MODS) + else: + if player is not player.match.host: + log(f"{player} attempted to change mods as non-host.", Ansi.LYELLOW) + return + + # not freemods, set match mods. + player.match.mods = Mods(self.mods) + + player.match.enqueue_state() + + +def is_playing(slot: Slot) -> bool: + return slot.status == SlotStatus.playing and not slot.loaded + + +@register(ClientPackets.MATCH_LOAD_COMPLETE) +class MatchLoadComplete(BasePacket): + async def handle(self, player: Player) -> None: + if player.match is None: + return + + # our player has loaded in and is ready to play. + slot = player.match.get_slot(player) + assert slot is not None + + slot.loaded = True + + # check if all players are loaded, + # if so, tell all players to begin. + if not any(map(is_playing, player.match.slots)): + player.match.enqueue(app.packets.match_all_players_loaded(), lobby=False) + + +@register(ClientPackets.MATCH_NO_BEATMAP) +class MatchNoBeatmap(BasePacket): + async def handle(self, player: Player) -> None: + if player.match is None: + return + + slot = player.match.get_slot(player) + assert slot is not None + + slot.status = SlotStatus.no_map + player.match.enqueue_state(lobby=False) + + +@register(ClientPackets.MATCH_NOT_READY) +class MatchNotReady(BasePacket): + async def handle(self, player: Player) -> None: + if player.match is None: + return + + slot = player.match.get_slot(player) + assert slot is not None + + slot.status = SlotStatus.not_ready + player.match.enqueue_state(lobby=False) + + +@register(ClientPackets.MATCH_FAILED) +class MatchFailed(BasePacket): + async def handle(self, player: Player) -> None: + if player.match is None: + return + + # find the player's slot id, and enqueue that + # they've failed to all other players in the match. + slot_id = player.match.get_slot_id(player) + assert slot_id is not None + + player.match.enqueue(app.packets.match_player_failed(slot_id), lobby=False) + + +@register(ClientPackets.MATCH_HAS_BEATMAP) +class MatchHasBeatmap(BasePacket): + async def handle(self, player: Player) -> None: + if player.match is None: + return + + slot = player.match.get_slot(player) + assert slot is not None + + slot.status = SlotStatus.not_ready + player.match.enqueue_state(lobby=False) + + +@register(ClientPackets.MATCH_SKIP_REQUEST) +class MatchSkipRequest(BasePacket): + async def handle(self, player: Player) -> None: + if player.match is None: + return + + slot = player.match.get_slot(player) + assert slot is not None + + slot.skipped = True + player.match.enqueue(app.packets.match_player_skipped(player.id)) + + for slot in player.match.slots: + if slot.status == SlotStatus.playing and not slot.skipped: + return + + # all users have skipped, enqueue a skip. + player.match.enqueue(app.packets.match_skip(), lobby=False) + + +@register(ClientPackets.CHANNEL_JOIN, restricted=True) +class ChannelJoin(BasePacket): + def __init__(self, reader: BanchoPacketReader) -> None: + self.name = reader.read_string() + + async def handle(self, player: Player) -> None: + if self.name in IGNORED_CHANNELS: + return + + channel = app.state.sessions.channels.get_by_name(self.name) + + if not channel or not player.join_channel(channel): + log(f"{player} failed to join {self.name}.", Ansi.LYELLOW) + return + + +@register(ClientPackets.MATCH_TRANSFER_HOST) +class MatchTransferHost(BasePacket): + def __init__(self, reader: BanchoPacketReader) -> None: + self.slot_id = reader.read_i32() + + async def handle(self, player: Player) -> None: + if player.match is None: + return + + if player is not player.match.host: + log(f"{player} attempted to transfer host as non-host.", Ansi.LYELLOW) + return + + # read new slot ID + if not 0 <= self.slot_id < 16: + return + + target = player.match.slots[self.slot_id].player + if not target: + log(f"{player} tried to transfer host to an empty slot?") + return + + player.match.host_id = target.id + player.match.host.enqueue(app.packets.match_transfer_host()) + player.match.enqueue_state() + + +@register(ClientPackets.TOURNAMENT_MATCH_INFO_REQUEST) +class TourneyMatchInfoRequest(BasePacket): + def __init__(self, reader: BanchoPacketReader) -> None: + self.match_id = reader.read_i32() + + async def handle(self, player: Player) -> None: + if not 0 <= self.match_id < 64: + return # invalid match id + + if not player.priv & Privileges.DONATOR: + return # insufficient privs + + match = app.state.sessions.matches[self.match_id] + if not match: + return # match not found + + player.enqueue(app.packets.update_match(match, send_pw=False)) + + +@register(ClientPackets.TOURNAMENT_JOIN_MATCH_CHANNEL) +class TourneyMatchJoinChannel(BasePacket): + def __init__(self, reader: BanchoPacketReader) -> None: + self.match_id = reader.read_i32() + + async def handle(self, player: Player) -> None: + if not 0 <= self.match_id < 64: + return # invalid match id + + if not player.priv & Privileges.DONATOR: + return # insufficient privs + + match = app.state.sessions.matches[self.match_id] + if not match: + return # match not found + + for slot in match.slots: + if slot.player is not None: + if player.id == slot.player.id: + return # playing in the match + + # attempt to join match chan + if player.join_channel(match.chat): + match.tourney_clients.add(player.id) + + +@register(ClientPackets.TOURNAMENT_LEAVE_MATCH_CHANNEL) +class TourneyMatchLeaveChannel(BasePacket): + def __init__(self, reader: BanchoPacketReader) -> None: + self.match_id = reader.read_i32() + + async def handle(self, player: Player) -> None: + if not 0 <= self.match_id < 64: + return # invalid match id + + if not player.priv & Privileges.DONATOR: + return # insufficient privs + + match = app.state.sessions.matches[self.match_id] + if not (match and player.id in match.tourney_clients): + return # match not found + + # attempt to join match chan + player.leave_channel(match.chat) + match.tourney_clients.remove(player.id) + + +@register(ClientPackets.FRIEND_ADD) +class FriendAdd(BasePacket): + def __init__(self, reader: BanchoPacketReader) -> None: + self.user_id = reader.read_i32() + + async def handle(self, player: Player) -> None: + target = app.state.sessions.players.get(id=self.user_id) + if not target: + log(f"{player} tried to add a user who is not online! ({self.user_id})") + return + + if target is app.state.sessions.bot: + return + + if target.id in player.blocks: + player.blocks.remove(target.id) + + player.update_latest_activity_soon() + await player.add_friend(target) + + +@register(ClientPackets.FRIEND_REMOVE) +class FriendRemove(BasePacket): + def __init__(self, reader: BanchoPacketReader) -> None: + self.user_id = reader.read_i32() + + async def handle(self, player: Player) -> None: + target = app.state.sessions.players.get(id=self.user_id) + if not target: + log(f"{player} tried to remove a user who is not online! ({self.user_id})") + return + + if target is app.state.sessions.bot: + return + + player.update_latest_activity_soon() + await player.remove_friend(target) + + +@register(ClientPackets.MATCH_CHANGE_TEAM) +class MatchChangeTeam(BasePacket): + async def handle(self, player: Player) -> None: + if player.match is None: + return + + # toggle team + slot = player.match.get_slot(player) + assert slot is not None + + if slot.team == MatchTeams.blue: + slot.team = MatchTeams.red + else: + slot.team = MatchTeams.blue + + player.match.enqueue_state(lobby=False) + + +@register(ClientPackets.CHANNEL_PART, restricted=True) +class ChannelPart(BasePacket): + def __init__(self, reader: BanchoPacketReader) -> None: + self.name = reader.read_string() + + async def handle(self, player: Player) -> None: + if self.name in IGNORED_CHANNELS: + return + + channel = app.state.sessions.channels.get_by_name(self.name) + + if not channel: + log(f"{player} failed to leave {self.name}.", Ansi.LYELLOW) + return + + if player not in channel: + # user not in chan + return + + # leave the chan server-side. + player.leave_channel(channel) + + +@register(ClientPackets.RECEIVE_UPDATES, restricted=True) +class ReceiveUpdates(BasePacket): + def __init__(self, reader: BanchoPacketReader) -> None: + self.value = reader.read_i32() + + async def handle(self, player: Player) -> None: + if not 0 <= self.value < 3: + log(f"{player} tried to set his presence filter to {self.value}?") + return + + player.pres_filter = PresenceFilter(self.value) + + +@register(ClientPackets.SET_AWAY_MESSAGE) +class SetAwayMessage(BasePacket): + def __init__(self, reader: BanchoPacketReader) -> None: + self.msg = reader.read_message() + + async def handle(self, player: Player) -> None: + player.away_msg = self.msg.text + + +@register(ClientPackets.USER_STATS_REQUEST, restricted=True) +class StatsRequest(BasePacket): + def __init__(self, reader: BanchoPacketReader) -> None: + self.user_ids = reader.read_i32_list_i16l() + + async def handle(self, player: Player) -> None: + unrestrcted_ids = [p.id for p in app.state.sessions.players.unrestricted] + is_online = lambda o: o in unrestrcted_ids and o != player.id + + for online in filter(is_online, self.user_ids): + target = app.state.sessions.players.get(id=online) + if target: + if target is app.state.sessions.bot: + # optimization for bot since it's + # the most frequently requested user + packet = app.packets.bot_stats(target) + else: + packet = app.packets.user_stats(target) + + player.enqueue(packet) + + +@register(ClientPackets.MATCH_INVITE) +class MatchInvite(BasePacket): + def __init__(self, reader: BanchoPacketReader) -> None: + self.user_id = reader.read_i32() + + async def handle(self, player: Player) -> None: + if not player.match: + return + + target = app.state.sessions.players.get(id=self.user_id) + if not target: + log(f"{player} tried to invite a user who is not online! ({self.user_id})") + return + + if target is app.state.sessions.bot: + player.send_bot("I'm too busy!") + return + + target.enqueue(app.packets.match_invite(player, target.name)) + player.update_latest_activity_soon() + + log(f"{player} invited {target} to their match.") + + +@register(ClientPackets.MATCH_CHANGE_PASSWORD) +class MatchChangePassword(BasePacket): + def __init__(self, reader: BanchoPacketReader) -> None: + self.match_data = reader.read_match() + + async def handle(self, player: Player) -> None: + if not validate_match_data(self.match_data, expected_host_id=player.id): + log( + f"{player} tried to change match password with invalid data.", + Ansi.LYELLOW, + ) + return + + if player.match is None: + return + + if player is not player.match.host: + log(f"{player} attempted to change pw as non-host.", Ansi.LYELLOW) + return + + player.match.passwd = self.match_data.passwd + player.match.enqueue_state() + + +@register(ClientPackets.USER_PRESENCE_REQUEST) +class UserPresenceRequest(BasePacket): + def __init__(self, reader: BanchoPacketReader) -> None: + self.user_ids = reader.read_i32_list_i16l() + + async def handle(self, player: Player) -> None: + for pid in self.user_ids: + target = app.state.sessions.players.get(id=pid) + if target: + if target is app.state.sessions.bot: + # optimization for bot since it's + # the most frequently requested user + packet = app.packets.bot_presence(target) + else: + packet = app.packets.user_presence(target) + + player.enqueue(packet) + + +@register(ClientPackets.USER_PRESENCE_REQUEST_ALL) +class UserPresenceRequestAll(BasePacket): + def __init__(self, reader: BanchoPacketReader) -> None: + self.ingame_time = reader.read_i32() + + async def handle(self, player: Player) -> None: + # NOTE: this packet is only used when there + # are >256 players visible to the client. + + buffer = bytearray() + + for player in app.state.sessions.players.unrestricted: + buffer += app.packets.user_presence(player) + + player.enqueue(bytes(buffer)) + + +@register(ClientPackets.TOGGLE_BLOCK_NON_FRIEND_DMS) +class ToggleBlockingDMs(BasePacket): + def __init__(self, reader: BanchoPacketReader) -> None: + self.value = reader.read_i32() + + async def handle(self, player: Player) -> None: + player.pm_private = self.value == 1 + + player.update_latest_activity_soon() diff --git a/app/api/domains/map.py b/app/api/domains/map.py new file mode 100644 index 0000000..f12a01b --- /dev/null +++ b/app/api/domains/map.py @@ -0,0 +1,22 @@ +"""bmap: static beatmap info (thumbnails, previews, etc.)""" + +from __future__ import annotations + +from fastapi import APIRouter +from fastapi import status +from fastapi.requests import Request +from fastapi.responses import RedirectResponse + +# import app.settings + +router = APIRouter(tags=["Beatmaps"]) + + +# forward any unmatched request to osu! +# eventually if we do bmap submission, we'll need this. +@router.get("/{file_path:path}") +async def everything(request: Request) -> RedirectResponse: + return RedirectResponse( + url=f"https://b.ppy.sh{request['path']}", + status_code=status.HTTP_301_MOVED_PERMANENTLY, + ) diff --git a/app/api/domains/osu.py b/app/api/domains/osu.py new file mode 100644 index 0000000..627e720 --- /dev/null +++ b/app/api/domains/osu.py @@ -0,0 +1,1786 @@ +"""osu: handle connections from web, api, and beyond?""" + +from __future__ import annotations + +import copy +import hashlib +import random +import secrets +from collections import defaultdict +from collections.abc import Awaitable +from collections.abc import Callable +from collections.abc import Mapping +from enum import IntEnum +from enum import unique +from functools import cache +from pathlib import Path as SystemPath +from typing import Any +from typing import Literal +from urllib.parse import unquote +from urllib.parse import unquote_plus + +import bcrypt +from fastapi import status +from fastapi.datastructures import FormData +from fastapi.datastructures import UploadFile +from fastapi.exceptions import HTTPException +from fastapi.param_functions import Depends +from fastapi.param_functions import File +from fastapi.param_functions import Form +from fastapi.param_functions import Header +from fastapi.param_functions import Path +from fastapi.param_functions import Query +from fastapi.requests import Request +from fastapi.responses import FileResponse +from fastapi.responses import ORJSONResponse +from fastapi.responses import RedirectResponse +from fastapi.responses import Response +from fastapi.routing import APIRouter +from starlette.datastructures import UploadFile as StarletteUploadFile + +import app.packets +import app.settings +import app.state +import app.utils +from app import encryption +from app._typing import UNSET +from app.constants import regexes +from app.constants.clientflags import LastFMFlags +from app.constants.gamemodes import GameMode +from app.constants.mods import Mods +from app.constants.privileges import Privileges +from app.logging import Ansi +from app.logging import log +from app.objects import models +from app.objects.beatmap import Beatmap +from app.objects.beatmap import RankedStatus +from app.objects.beatmap import ensure_osu_file_is_available +from app.objects.player import Player +from app.objects.score import Grade +from app.objects.score import Score +from app.objects.score import SubmissionStatus +from app.repositories import clans as clans_repo +from app.repositories import comments as comments_repo +from app.repositories import favourites as favourites_repo +from app.repositories import mail as mail_repo +from app.repositories import maps as maps_repo +from app.repositories import ratings as ratings_repo +from app.repositories import scores as scores_repo +from app.repositories import stats as stats_repo +from app.repositories import users as users_repo +from app.repositories.achievements import Achievement +from app.usecases import achievements as achievements_usecases +from app.usecases import user_achievements as user_achievements_usecases +from app.utils import escape_enum +from app.utils import pymysql_encode + +BEATMAPS_PATH = SystemPath.cwd() / ".data/osu" +REPLAYS_PATH = SystemPath.cwd() / ".data/osr" +SCREENSHOTS_PATH = SystemPath.cwd() / ".data/ss" + + +router = APIRouter( + tags=["osu! web API"], + default_response_class=Response, +) + + +@cache +def authenticate_player_session( + param_function: Callable[..., Any], + username_alias: str = "u", + pw_md5_alias: str = "p", + err: Any | None = None, +) -> Callable[[str, str], Awaitable[Player]]: + async def wrapper( + username: str = param_function(..., alias=username_alias), + pw_md5: str = param_function(..., alias=pw_md5_alias), + ) -> Player: + player = await app.state.sessions.players.from_login( + name=unquote(username), + pw_md5=pw_md5, + ) + if player: + return player + + # player login incorrect + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=err, + ) + + return wrapper + + +""" /web/ handlers """ + +# Unhandled endpoints: +# POST /web/osu-error.php +# POST /web/osu-session.php +# POST /web/osu-osz2-bmsubmit-post.php +# POST /web/osu-osz2-bmsubmit-upload.php +# GET /web/osu-osz2-bmsubmit-getid.php +# GET /web/osu-get-beatmap-topic.php + + +@router.post("/web/osu-screenshot.php") +async def osuScreenshot( + player: Player = Depends(authenticate_player_session(Form, "u", "p")), + endpoint_version: int = Form(..., alias="v"), + screenshot_file: UploadFile = File(..., alias="ss"), +) -> Response: + with memoryview(await screenshot_file.read()) as screenshot_view: + # png sizes: 1080p: ~300-800kB | 4k: ~1-2mB + if len(screenshot_view) > (4 * 1024 * 1024): + return Response( + content=b"Screenshot file too large.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + if endpoint_version != 1: + await app.state.services.log_strange_occurrence( + f"Incorrect endpoint version (/web/osu-screenshot.php v{endpoint_version})", + ) + + if app.utils.has_jpeg_headers_and_trailers(screenshot_view): + extension = "jpeg" + elif app.utils.has_png_headers_and_trailers(screenshot_view): + extension = "png" + else: + return Response( + content=b"Invalid file type", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + while True: + filename = f"{secrets.token_urlsafe(6)}.{extension}" + ss_file = SCREENSHOTS_PATH / filename + if not ss_file.exists(): + break + + with ss_file.open("wb") as f: + f.write(screenshot_view) + + log(f"{player} uploaded {filename}.") + return Response(filename.encode()) + + +@router.get("/web/osu-getfriends.php") +async def osuGetFriends( + player: Player = Depends(authenticate_player_session(Query, "u", "h")), +) -> Response: + return Response("\n".join(map(str, player.friends)).encode()) + + +def bancho_to_osuapi_status(bancho_status: int) -> int: + return { + 0: 0, + 2: 1, + 3: 2, + 4: 3, + 5: 4, + }[bancho_status] + + +@router.post("/web/osu-getbeatmapinfo.php") +async def osuGetBeatmapInfo( + form_data: models.OsuBeatmapRequestForm, + player: Player = Depends(authenticate_player_session(Query, "u", "h")), +) -> Response: + num_requests = len(form_data.Filenames) + len(form_data.Ids) + log(f"{player} requested info for {num_requests} maps.", Ansi.LCYAN) + + ret = [] + + for idx, map_filename in enumerate(form_data.Filenames): + # try getting the map from sql + + beatmap = await maps_repo.fetch_one(filename=map_filename) + + if not beatmap: + continue + + # try to get the user's grades on the map + # NOTE: osu! only allows us to send back one per gamemode, + # so we've decided to send back *vanilla* grades. + # (in theory we could make this user-customizable) + grades = ["N", "N", "N", "N"] + + for score in await scores_repo.fetch_many( + map_md5=beatmap["md5"], + user_id=player.id, + mode=player.status.mode.as_vanilla, + status=SubmissionStatus.BEST, + ): + grades[score["mode"]] = score["grade"] + + ret.append( + "{i}|{id}|{set_id}|{md5}|{status}|{grades}".format( + i=idx, + id=beatmap["id"], + set_id=beatmap["set_id"], + md5=beatmap["md5"], + status=bancho_to_osuapi_status(beatmap["status"]), + grades="|".join(grades), + ), + ) + + if form_data.Ids: # still have yet to see this used + await app.state.services.log_strange_occurrence( + f"{player} requested map(s) info by id ({form_data.Ids})", + ) + + return Response("\n".join(ret).encode()) + + +@router.get("/web/osu-getfavourites.php") +async def osuGetFavourites( + player: Player = Depends(authenticate_player_session(Query, "u", "h")), +) -> Response: + favourites = await favourites_repo.fetch_all(userid=player.id) + + return Response( + "\n".join([str(favourite["setid"]) for favourite in favourites]).encode(), + ) + + +@router.get("/web/osu-addfavourite.php") +async def osuAddFavourite( + player: Player = Depends(authenticate_player_session(Query, "u", "h")), + map_set_id: int = Query(..., alias="a"), +) -> Response: + # check if they already have this favourited. + if await favourites_repo.fetch_one(player.id, map_set_id): + return Response(b"You've already favourited this beatmap!") + + # add favourite + await favourites_repo.create( + userid=player.id, + setid=map_set_id, + ) + + return Response(b"Added favourite!") + +@router.get("/web/lastfm.php") +async def lastFM( + action: Literal["scrobble", "np"], + beatmap_id_or_hidden_flag: str = Query( + ..., + description=( + "This flag is normally a beatmap ID, but is also " + "used as a hidden anticheat flag within osu!" + ), + alias="b", + ), + player: Player = Depends(authenticate_player_session(Query, "us", "ha")), +) -> Response: + if beatmap_id_or_hidden_flag[0] != "a": + # not anticheat related, tell the + # client not to send any more for now. + return Response(b"-3") + + flags = LastFMFlags(int(beatmap_id_or_hidden_flag[1:])) + + if flags & (LastFMFlags.HQ_ASSEMBLY | LastFMFlags.HQ_FILE): + # Player is currently running hq!osu; could possibly + # be a separate client, but it's very unlikely. + + await player.restrict( + admin=app.state.sessions.bot, + reason=f"hq!osu running ({flags})", + ) + + # refresh their client state + if player.is_online: + player.logout() + + #Ban the player + return Response(b"-3") + + if flags & LastFMFlags.REGISTRY_EDITS: + # Player has registry edits left from + # hq!osu's multiaccounting tool. This + # does not necessarily mean they are + # using it now, but they have in the past. + + if random.randrange(32) == 0: + # Random chance (1/32) for a ban. + await player.restrict( + admin=app.state.sessions.bot, + reason="hq!osu relife 1/32", + ) + + # refresh their client state + if player.is_online: + player.logout() + + # Ban the player. + return Response(b"-3") + + player.enqueue( + app.packets.notification( + "\n".join( + [ + "Hey!", + "It appears you have hq!osu's multiaccounting tool (relife) enabled.", + "This tool leaves a change in your registry that the osu! client can detect.", + "Please re-install relife and disable the program to avoid any restrictions.", + ], + ), + ), + ) + + player.logout() + + return Response(b"-3") + + """ These checks only worked for ~5 hours from release. rumoi's quick! + if flags & ( + LastFMFlags.SDL2_LIBRARY + | LastFMFlags.OPENSSL_LIBRARY + | LastFMFlags.AQN_MENU_SAMPLE + ): + # AQN has been detected in the client, either + # through the 'libeay32.dll' library being found + # onboard, or from the menu sound being played in + # the AQN menu while being in an inappropriate menu + # for the context of the sound effect. + pass + """ + + return Response(b"-3") + +DIRECT_SET_INFO_FMTSTR = ( + "{SetID}.osz|{Artist}|{Title}|{Creator}|" + "{RankedStatus}|10.0|{LastUpdate}|{SetID}|" + "0|{HasVideo}|0|0|0|{diffs}" # 0s are threadid, has_story, + # filesize, filesize_novid. +) + +DIRECT_MAP_INFO_FMTSTR = ( + "[{DifficultyRating:.2f}⭐] {DiffName} " + "{{cs: {CS} / od: {OD} / ar: {AR} / hp: {HP}}}@{Mode}" +) + + +@router.get("/web/osu-search.php") +async def osuSearchHandler( + player: Player = Depends(authenticate_player_session(Query, "u", "h")), + ranked_status: int = Query(..., alias="r", ge=0, le=8), + query: str = Query(..., alias="q"), + mode: int = Query(..., alias="m", ge=-1, le=3), # -1 for all + page_num: int = Query(..., alias="p"), +) -> Response: + params: dict[str, Any] = {"amount": 100, "offset": page_num * 100} + + # eventually we could try supporting these, + # but it mostly depends on the mirror. + if query not in ("Newest", "Top+Rated", "Most+Played"): + params["query"] = query + + if mode != -1: # -1 for all + params["mode"] = mode + + if ranked_status != 4: # 4 for all + # convert to osu!api status + params["status"] = RankedStatus.from_osudirect(ranked_status).osu_api + + response = await app.state.services.http_client.get( + app.settings.MIRROR_SEARCH_ENDPOINT, + params=params, + ) + if response.status_code != status.HTTP_200_OK: + return Response(b"-1\nFailed to retrieve data from the beatmap mirror.") + + result = response.json() + + lresult = len(result) # send over 100 if we receive + # 100 matches, so the client + # knows there are more to get + ret = [f"{'101' if lresult == 100 else lresult}"] + for bmapset in result: + if bmapset["ChildrenBeatmaps"] is None: + continue + + # some mirrors use a true/false instead of 0 or 1 + bmapset["HasVideo"] = int(bmapset["HasVideo"]) + + diff_sorted_maps = sorted( + bmapset["ChildrenBeatmaps"], + key=lambda m: m["DifficultyRating"], + ) + + def handle_invalid_characters(s: str) -> str: + # XXX: this is a bug that exists on official servers (lmao) + # | is used to delimit the set data, so the difficulty name + # cannot contain this or it will be ignored. we fix it here + # by using a different character. + return s.replace("|", "I") + + diffs_str = ",".join( + [ + DIRECT_MAP_INFO_FMTSTR.format( + DifficultyRating=row["DifficultyRating"], + DiffName=handle_invalid_characters(row["DiffName"]), + CS=row["CS"], + OD=row["OD"], + AR=row["AR"], + HP=row["HP"], + Mode=row["Mode"], + ) + for row in diff_sorted_maps + ], + ) + + ret.append( + DIRECT_SET_INFO_FMTSTR.format( + Artist=handle_invalid_characters(bmapset["Artist"]), + Title=handle_invalid_characters(bmapset["Title"]), + Creator=bmapset["Creator"], + RankedStatus=bmapset["RankedStatus"], + LastUpdate=bmapset["LastUpdate"], + SetID=bmapset["SetID"], + HasVideo=bmapset["HasVideo"], + diffs=diffs_str, + ), + ) + + return Response("\n".join(ret).encode()) + + +# TODO: video support (needs db change) // (We're not doing this, right..?) -Crumble +@router.get("/web/osu-search-set.php") +async def osuSearchSetHandler( + player: Player = Depends(authenticate_player_session(Query, "u", "h")), + map_set_id: int | None = Query(None, alias="s"), + map_id: int | None = Query(None, alias="b"), + checksum: str | None = Query(None, alias="c"), +) -> Response: + # Since we only need set-specific data, we can basically + # just do same query with either bid or bsid. + + v: int | str + if map_set_id is not None: + # this is just a normal request + k, v = ("set_id", map_set_id) + elif map_id is not None: + k, v = ("id", map_id) + elif checksum is not None: + k, v = ("md5", checksum) + else: + return Response(b"") # invalid args + + # Get all set data. + bmapset = await app.state.services.database.fetch_one( + "SELECT DISTINCT set_id, artist, " + "title, status, creator, last_update " + f"FROM maps WHERE {k} = :v", + {"v": v}, + ) + if bmapset is None: + # TODO: get from osu! + return Response(b"") + + rating = 10.0 # TODO: real data + + return Response( + ( + "{set_id}.osz|{artist}|{title}|{creator}|" + "{status}|{rating:.1f}|{last_update}|{set_id}|" + "0|0|0|0|0" + ) + .format(**bmapset, rating=rating) + .encode(), + ) + # 0s are threadid, has_vid, has_story, filesize, filesize_novid + + +def chart_entry(name: str, before: float | None, after: float | None) -> str: + return f"{name}Before:{before or ''}|{name}After:{after or ''}" + + +def format_achievement_string(file: str, name: str, description: str) -> str: + return f"{file}+{name}+{description}" + + +def parse_form_data_score_params( + score_data: FormData, +) -> tuple[bytes, StarletteUploadFile] | None: + """Parse the score data, and replay file + from the form data's 'score' parameters.""" + try: + score_parts = score_data.getlist("score") + assert len(score_parts) == 2, "Invalid score data" + + score_data_b64 = score_data.getlist("score")[0] + assert isinstance(score_data_b64, str), "Invalid score data" + replay_file = score_data.getlist("score")[1] + assert isinstance(replay_file, StarletteUploadFile), "Invalid replay data" + except AssertionError as exc: + log(f"Failed to validate score multipart data: ({exc.args[0]})", Ansi.LRED) + return None + else: + return ( + score_data_b64.encode(), + replay_file, + ) + + +@router.post("/web/osu-submit-modular-selector.php") +async def osuSubmitModularSelector( + request: Request, + # TODO: should token be allowed + # through but ac'd if not found? + # TODO: validate token format + # TODO: save token in the database + token: str = Header(...), + # TODO: do ft & st contain pauses? + exited_out: bool = Form(..., alias="x"), + fail_time: int = Form(..., alias="ft"), + visual_settings_b64: bytes = Form(..., alias="fs"), + updated_beatmap_hash: str = Form(..., alias="bmk"), + storyboard_md5: str | None = Form(None, alias="sbk"), + iv_b64: bytes = Form(..., alias="iv"), + unique_ids: str = Form(..., alias="c1"), + score_time: int = Form(..., alias="st"), + pw_md5: str = Form(..., alias="pass"), + osu_version: str = Form(..., alias="osuver"), + client_hash_b64: bytes = Form(..., alias="s"), + fl_cheat_screenshot: bytes | None = File(None, alias="i"), +) -> Response: + """Handle a score submission from an osu! client with an active session.""" + + if fl_cheat_screenshot: + stacktrace = app.utils.get_appropriate_stacktrace() + await app.state.services.log_strange_occurrence(stacktrace) + + # NOTE: the bancho protocol uses the "score" parameter name for both + # the base64'ed score data, and the replay file in the multipart + # starlette/fastapi do not support this, so we've moved it out + score_parameters = parse_form_data_score_params(await request.form()) + if score_parameters is None: + return Response(b"") + + # extract the score data and replay file from the score data + score_data_b64, replay_file = score_parameters + + # decrypt the score data (aes) + score_data, client_hash_decoded = encryption.decrypt_score_aes_data( + score_data_b64, + client_hash_b64, + iv_b64, + osu_version, + ) + + # fetch map & player + + bmap_md5 = score_data[0] + bmap = await Beatmap.from_md5(bmap_md5) + if not bmap: + # Map does not exist, most likely unsubmitted. + return Response(b"error: beatmap") + + # if the client has supporter, a space is appended + # but usernames may also end with a space, which must be preserved + username = score_data[1] + if username[-1] == " ": + username = username[:-1] + + player = await app.state.sessions.players.from_login(username, pw_md5) + if not player: + # Player is not online, return nothing so that their + # client will retry submission when they log in. + return Response(b"") + + # parse the score from the remaining data + score = Score.from_submission(score_data[2:]) + + # attach bmap & player + score.bmap = bmap + score.player = player + + ## perform checksum validation + + unique_id1, unique_id2 = unique_ids.split("|", maxsplit=1) + unique_id1_md5 = hashlib.md5(unique_id1.encode()).hexdigest() + unique_id2_md5 = hashlib.md5(unique_id2.encode()).hexdigest() + + try: + assert player.client_details is not None + + if osu_version != f"{player.client_details.osu_version.date:%Y%m%d}": + raise ValueError("osu! version mismatch") + + if client_hash_decoded != player.client_details.client_hash: + raise ValueError("client hash mismatch") + # assert unique ids (c1) are correct and match login params + if unique_id1_md5 != player.client_details.uninstall_md5: + raise ValueError( + f"unique_id1 mismatch ({unique_id1_md5} != {player.client_details.uninstall_md5})", + ) + + if unique_id2_md5 != player.client_details.disk_signature_md5: + raise ValueError( + f"unique_id2 mismatch ({unique_id2_md5} != {player.client_details.disk_signature_md5})", + ) + + # assert online checksums match + server_score_checksum = score.compute_online_checksum( + osu_version=osu_version, + osu_client_hash=client_hash_decoded, + storyboard_checksum=storyboard_md5 or "", + ) + if score.client_checksum != server_score_checksum: + raise ValueError( + f"online score checksum mismatch ({server_score_checksum} != {score.client_checksum})", + ) + + # assert beatmap hashes match + if bmap_md5 != updated_beatmap_hash: + raise ValueError( + f"beatmap hash mismatch ({bmap_md5} != {updated_beatmap_hash})", + ) + + except (ValueError, AssertionError): + # NOTE: this is undergoing a temporary trial period, + # after which, it will be enabled & perform restrictions. + stacktrace = app.utils.get_appropriate_stacktrace() + await app.state.services.log_strange_occurrence(stacktrace) + + # await player.restrict( + # admin=app.state.sessions.bot, + # reason="mismatching hashes on score submission", + # ) + + # refresh their client state + # if player.online: + # player.logout() + + # return b"error: ban" + + # we should update their activity no matter + # what the result of the score submission is. + score.player.update_latest_activity_soon() + + # make sure the player's client displays the correct mode's stats + if score.mode != score.player.status.mode: + score.player.status.mods = score.mods + score.player.status.mode = score.mode + + if not score.player.restricted: + app.state.sessions.players.enqueue(app.packets.user_stats(score.player)) + + # hold a lock around (check if submitted, submission) to ensure no duplicates + # are submitted to the database, and potentially award duplicate score/pp/etc. + async with app.state.score_submission_locks[score.client_checksum]: + # stop here if this is a duplicate score + if await app.state.services.database.fetch_one( + "SELECT 1 FROM scores WHERE online_checksum = :checksum", + {"checksum": score.client_checksum}, + ): + log(f"{score.player} submitted a duplicate score.", Ansi.LYELLOW) + return Response(b"error: no") + + # all data read from submission. + # now we can calculate things based on our data. + score.acc = score.calculate_accuracy() + + osu_file_available = await ensure_osu_file_is_available( + bmap.id, + expected_md5=bmap.md5, + ) + if osu_file_available: + score.pp, score.sr = score.calculate_performance(bmap.id) + + if score.passed: + await score.calculate_status() + + if score.bmap.status != RankedStatus.Pending: + score.rank = await score.calculate_placement() + else: + score.status = SubmissionStatus.FAILED + + score.time_elapsed = score_time if score.passed else fail_time + + # TODO: re-implement pp caps for non-whitelisted players? + + """ Score submission checks completed; submit the score. """ + + if app.state.services.datadog: + app.state.services.datadog.increment("bancho.submitted_scores") + + if score.status == SubmissionStatus.BEST: + if app.state.services.datadog: + app.state.services.datadog.increment("bancho.submitted_scores_best") + + if score.bmap.has_leaderboard: + if score.bmap.status == RankedStatus.Loved and score.mode in ( + GameMode.VANILLA_OSU, + GameMode.VANILLA_TAIKO, + GameMode.VANILLA_CATCH, + GameMode.VANILLA_MANIA, + ): + performance = f"{score.score:,} score" + else: + performance = f"{score.pp:,.2f}pp" + + score.player.enqueue( + app.packets.notification( + f"You achieved #{score.rank}! ({performance})", + ), + ) + + if score.rank == 1 and not score.player.restricted: + announce_chan = app.state.sessions.channels.get_by_name("#announce") + + ann = [ + f"\x01ACTION achieved #1 on {score.bmap.embed}", + f"with {score.acc:.2f}% for {performance}.", + ] + + if score.mods: + ann.insert(1, f"+{score.mods!r}") + + scoring_metric = ( + "pp" if score.mode >= GameMode.RELAX_OSU else "score" + ) + + # If there was previously a score on the map, add old #1. + prev_n1 = await app.state.services.database.fetch_one( + "SELECT u.id, name FROM users u " + "INNER JOIN scores s ON u.id = s.userid " + "WHERE s.map_md5 = :map_md5 AND s.mode = :mode " + "AND s.status = 2 AND u.priv & 1 " + f"ORDER BY s.{scoring_metric} DESC LIMIT 1", + {"map_md5": score.bmap.md5, "mode": score.mode}, + ) + + if prev_n1: + if score.player.id != prev_n1["id"]: + ann.append( + f"(Previous #1: [https://{app.settings.DOMAIN}/u/" + "{id} {name}])".format( + id=prev_n1["id"], + name=prev_n1["name"], + ), + ) + + assert announce_chan is not None + announce_chan.send(" ".join(ann), sender=score.player, to_self=True) + + # this score is our best score. + # update any preexisting personal best + # records with SubmissionStatus.SUBMITTED. + await app.state.services.database.execute( + "UPDATE scores SET status = 1 " + "WHERE status = 2 AND map_md5 = :map_md5 " + "AND userid = :user_id AND mode = :mode", + { + "map_md5": score.bmap.md5, + "user_id": score.player.id, + "mode": score.mode, + }, + ) + + score.id = await app.state.services.database.execute( + "INSERT INTO scores " + "VALUES (NULL, " + ":map_md5, :score, :pp, :acc, " + ":max_combo, :mods, :n300, :n100, " + ":n50, :nmiss, :ngeki, :nkatu, " + ":grade, :status, :mode, :play_time, " + ":time_elapsed, :client_flags, :user_id, :perfect, " + ":checksum)", + { + "map_md5": score.bmap.md5, + "score": score.score, + "pp": score.pp, + "acc": score.acc, + "max_combo": score.max_combo, + "mods": score.mods, + "n300": score.n300, + "n100": score.n100, + "n50": score.n50, + "nmiss": score.nmiss, + "ngeki": score.ngeki, + "nkatu": score.nkatu, + "grade": score.grade.name, + "status": score.status, + "mode": score.mode, + "play_time": score.server_time, + "time_elapsed": score.time_elapsed, + "client_flags": score.client_flags, + "user_id": score.player.id, + "perfect": score.perfect, + "checksum": score.client_checksum, + }, + ) + + if score.passed: + replay_data = await replay_file.read() + + MIN_REPLAY_SIZE = 24 + + if len(replay_data) >= MIN_REPLAY_SIZE: + replay_disk_file = REPLAYS_PATH / f"{score.id}.osr" + replay_disk_file.write_bytes(replay_data) + else: + log(f"{score.player} submitted a score without a replay!", Ansi.LRED) + + #Restrict a player if a score without replay gets submitted + if not score.player.restricted: + await score.player.restrict( + admin=app.state.sessions.bot, + reason="submitted score with no replay. Most likely submitter cheat.", + ) + if score.player.is_online: + score.player.logout() + + """ Update the user's & beatmap's stats """ + + # get the current stats, and take a + # shallow copy for the response charts. + stats = score.player.stats[score.mode] + prev_stats = copy.copy(stats) + + # stuff update for all submitted scores + stats.playtime += score.time_elapsed // 1000 + stats.plays += 1 + stats.tscore += score.score + stats.total_hits += score.n300 + score.n100 + score.n50 + + if score.mode.as_vanilla in (1, 3): + # taiko uses geki & katu for hitting big notes with 2 keys + # mania uses geki & katu for rainbow 300 & 200 + stats.total_hits += score.ngeki + score.nkatu + + stats_updates: dict[str, Any] = { + "plays": stats.plays, + "playtime": stats.playtime, + "tscore": stats.tscore, + "total_hits": stats.total_hits, + } + + if score.passed and score.bmap.has_leaderboard: + # player passed & map is ranked, approved, or loved. + + if score.max_combo > stats.max_combo: + stats.max_combo = score.max_combo + stats_updates["max_combo"] = stats.max_combo + + if score.bmap.awards_ranked_pp and score.status == SubmissionStatus.BEST: + # map is ranked or approved, and it's our (new) + # best score on the map. update the player's + # ranked score, grades, pp, acc and global rank. + + additional_rscore = score.score + if score.prev_best: + # we previously had a score, so remove + # it's score from our ranked score. + additional_rscore -= score.prev_best.score + + if score.grade != score.prev_best.grade: + if score.grade >= Grade.A: + stats.grades[score.grade] += 1 + grade_col = format(score.grade, "stats_column") + stats_updates[grade_col] = stats.grades[score.grade] + + if score.prev_best.grade >= Grade.A: + stats.grades[score.prev_best.grade] -= 1 + grade_col = format(score.prev_best.grade, "stats_column") + stats_updates[grade_col] = stats.grades[score.prev_best.grade] + else: + # this is our first submitted score on the map + if score.grade >= Grade.A: + stats.grades[score.grade] += 1 + grade_col = format(score.grade, "stats_column") + stats_updates[grade_col] = stats.grades[score.grade] + + stats.rscore += additional_rscore + stats_updates["rscore"] = stats.rscore + + # fetch scores sorted by pp for total acc/pp calc + # NOTE: we select all plays (and not just top100) + # because bonus pp counts the total amount of ranked + # scores. I'm aware this scales horribly, and it'll + # likely be split into two queries in the future. + best_scores = await app.state.services.database.fetch_all( + "SELECT s.pp, s.acc FROM scores s " + "INNER JOIN maps m ON s.map_md5 = m.md5 " + "WHERE s.userid = :user_id AND s.mode = :mode " + "AND s.status = 2 AND m.status IN (2, 3) " # ranked, approved + "ORDER BY s.pp DESC", + {"user_id": score.player.id, "mode": score.mode}, + ) + + # calculate new total weighted accuracy + weighted_acc = sum( + row["acc"] * 0.95**i for i, row in enumerate(best_scores) + ) + bonus_acc = 100.0 / (20 * (1 - 0.95 ** len(best_scores))) + stats.acc = (weighted_acc * bonus_acc) / 100 + stats_updates["acc"] = stats.acc + + # calculate new total weighted pp + weighted_pp = sum(row["pp"] * 0.95**i for i, row in enumerate(best_scores)) + bonus_pp = 416.6667 * (1 - 0.9994 ** len(best_scores)) + stats.pp = round(weighted_pp + bonus_pp) + stats_updates["pp"] = stats.pp + + # update global & country ranking + stats.rank = await score.player.update_rank(score.mode) + + await stats_repo.partial_update( + score.player.id, + score.mode.value, + plays=stats_updates.get("plays", UNSET), + playtime=stats_updates.get("playtime", UNSET), + tscore=stats_updates.get("tscore", UNSET), + total_hits=stats_updates.get("total_hits", UNSET), + max_combo=stats_updates.get("max_combo", UNSET), + xh_count=stats_updates.get("xh_count", UNSET), + x_count=stats_updates.get("x_count", UNSET), + sh_count=stats_updates.get("sh_count", UNSET), + s_count=stats_updates.get("s_count", UNSET), + a_count=stats_updates.get("a_count", UNSET), + rscore=stats_updates.get("rscore", UNSET), + acc=stats_updates.get("acc", UNSET), + pp=stats_updates.get("pp", UNSET), + ) + + if not score.player.restricted: + # enqueue new stats info to all other users + app.state.sessions.players.enqueue(app.packets.user_stats(score.player)) + + # update beatmap with new stats + score.bmap.plays += 1 + if score.passed: + score.bmap.passes += 1 + + await app.state.services.database.execute( + "UPDATE maps SET plays = :plays, passes = :passes WHERE md5 = :map_md5", + { + "plays": score.bmap.plays, + "passes": score.bmap.passes, + "map_md5": score.bmap.md5, + }, + ) + + # update their recent score + score.player.recent_scores[score.mode] = score + + """ score submission charts """ + + # charts are only displayed for passes vanilla gamemodes. + if not score.passed: # TODO: check if this is correct + response = b"error: no" + else: + # construct and send achievements & ranking charts to the client + if score.bmap.awards_ranked_pp and not score.player.restricted: + unlocked_achievements: list[Achievement] = [] + + server_achievements = await achievements_usecases.fetch_many() + player_achievements = await user_achievements_usecases.fetch_many( + user_id=score.player.id, + ) + + for server_achievement in server_achievements: + player_unlocked_achievement = any( + player_achievement + for player_achievement in player_achievements + if player_achievement["achid"] == server_achievement["id"] + ) + if player_unlocked_achievement: + # player already has this achievement. + continue + + achievement_condition = server_achievement["cond"] + if achievement_condition(score, score.mode.as_vanilla): + await user_achievements_usecases.create( + score.player.id, + server_achievement["id"], + ) + unlocked_achievements.append(server_achievement) + + achievements_str = "/".join( + format_achievement_string(a["file"], a["name"], a["desc"]) + for a in unlocked_achievements + ) + else: + achievements_str = "" + + # create score submission charts for osu! client to display + + if score.prev_best: + beatmap_ranking_chart_entries = ( + chart_entry("rank", score.prev_best.rank, score.rank), + chart_entry("rankedScore", score.prev_best.score, score.score), + chart_entry("totalScore", score.prev_best.score, score.score), + chart_entry("maxCombo", score.prev_best.max_combo, score.max_combo), + chart_entry( + "accuracy", + round(score.prev_best.acc, 2), + round(score.acc, 2), + ), + chart_entry("pp", score.prev_best.pp, score.pp), + ) + else: + # no previous best score + beatmap_ranking_chart_entries = ( + chart_entry("rank", None, score.rank), + chart_entry("rankedScore", None, score.score), + chart_entry("totalScore", None, score.score), + chart_entry("maxCombo", None, score.max_combo), + chart_entry("accuracy", None, round(score.acc, 2)), + chart_entry("pp", None, score.pp), + ) + + overall_ranking_chart_entries = ( + chart_entry("rank", prev_stats.rank, stats.rank), + chart_entry("rankedScore", prev_stats.rscore, stats.rscore), + chart_entry("totalScore", prev_stats.tscore, stats.tscore), + chart_entry("maxCombo", prev_stats.max_combo, stats.max_combo), + chart_entry("accuracy", round(prev_stats.acc, 2), round(stats.acc, 2)), + chart_entry("pp", prev_stats.pp, stats.pp), + ) + + submission_charts = [ + # beatmap info chart + f"beatmapId:{score.bmap.id}", + f"beatmapSetId:{score.bmap.set_id}", + f"beatmapPlaycount:{score.bmap.plays}", + f"beatmapPasscount:{score.bmap.passes}", + f"approvedDate:{score.bmap.last_update}", + "\n", + # beatmap ranking chart + "chartId:beatmap", + f"chartUrl:{score.bmap.set.url}", + "chartName:Beatmap Ranking", + *beatmap_ranking_chart_entries, + f"onlineScoreId:{score.id}", + "\n", + # overall ranking chart + "chartId:overall", + f"chartUrl:https://{app.settings.DOMAIN}/u/{score.player.id}", + "chartName:Overall Ranking", + *overall_ranking_chart_entries, + f"achievements-new:{achievements_str}", + ] + + response = "|".join(submission_charts).encode() + + log( + f"[{score.mode!r}] {score.player} submitted a score! " + f"({score.status!r}, {score.pp:,.2f}pp / {stats.pp:,}pp)", + Ansi.LGREEN, + ) + + return Response(response) + + +@router.get("/web/osu-getreplay.php") +async def getReplay( + player: Player = Depends(authenticate_player_session(Query, "u", "h")), + mode: int = Query(..., alias="m", ge=0, le=3), + score_id: int = Query(..., alias="c", min=0, max=9_223_372_036_854_775_807), +) -> Response: + score = await Score.from_sql(score_id) + if not score: + return Response(b"", status_code=404) + + file = REPLAYS_PATH / f"{score_id}.osr" + if not file.exists(): + return Response(b"", status_code=404) + + # increment replay views for this score + if score.player is not None and player.id != score.player.id: + app.state.loop.create_task(score.increment_replay_views()) + + return FileResponse(file) + + +@router.get("/web/osu-rate.php") +async def osuRate( + player: Player = Depends( + authenticate_player_session(Query, "u", "p", err=b"auth fail"), + ), + map_md5: str = Query(..., alias="c", min_length=32, max_length=32), + rating: int | None = Query(None, alias="v", ge=1, le=10), +) -> Response: + if rating is None: + # check if we have the map in our cache; + # if not, the map probably doesn't exist. + if map_md5 not in app.state.cache.beatmap: + return Response(b"no exist") + + cached = app.state.cache.beatmap[map_md5] + + # only allow rating on maps with a leaderboard. + if cached.status < RankedStatus.Ranked: + return Response(b"not ranked") + + # osu! client is checking whether we can rate the map or not. + # the client hasn't rated the map, so simply + # tell them that they can submit a rating. + if not await ratings_repo.fetch_one(map_md5=map_md5, userid=player.id): + return Response(b"ok") + else: + # the client is submitting a rating for the map. + await ratings_repo.create(userid=player.id, map_md5=map_md5, rating=rating) + + map_ratings = await ratings_repo.fetch_many(map_md5=map_md5) + ratings = [row["rating"] for row in map_ratings] + + # send back the average rating + avg = sum(ratings) / len(ratings) + return Response(f"alreadyvoted\n{avg}".encode()) + + +@unique +@pymysql_encode(escape_enum) +class LeaderboardType(IntEnum): + Local = 0 + Top = 1 + Mods = 2 + Friends = 3 + Country = 4 + + +async def get_leaderboard_scores( + leaderboard_type: LeaderboardType | int, + map_md5: str, + mode: int, + mods: Mods, + player: Player, + scoring_metric: Literal["pp", "score"], +) -> tuple[list[dict[str, Any]], dict[str, Any] | None]: + query = [ + f"SELECT s.id, s.{scoring_metric} AS _score, " + "s.max_combo, s.n50, s.n100, s.n300, " + "s.nmiss, s.nkatu, s.ngeki, s.perfect, s.mods, " + "UNIX_TIMESTAMP(s.play_time) time, u.id userid, " + "COALESCE(CONCAT('[', c.tag, '] ', u.name), u.name) AS name " + "FROM scores s " + "INNER JOIN users u ON u.id = s.userid " + "LEFT JOIN clans c ON c.id = u.clan_id " + "WHERE s.map_md5 = :map_md5 AND s.status = 2 " # 2: =best score + "AND (u.priv & 1 OR u.id = :user_id) AND mode = :mode", + ] + + params: dict[str, Any] = { + "map_md5": map_md5, + "user_id": player.id, + "mode": mode, + } + + if leaderboard_type == LeaderboardType.Mods: + query.append("AND s.mods = :mods") + params["mods"] = mods + elif leaderboard_type == LeaderboardType.Friends: + query.append("AND s.userid IN :friends") + params["friends"] = player.friends | {player.id} + elif leaderboard_type == LeaderboardType.Country: + query.append("AND u.country = :country") + params["country"] = player.geoloc["country"]["acronym"] + + # TODO: customizability of the number of scores + query.append("ORDER BY _score DESC LIMIT 50") + + score_rows = await app.state.services.database.fetch_all( + " ".join(query), + params, + ) + + if score_rows: # None or [] + # fetch player's personal best score + personal_best_score_row = await app.state.services.database.fetch_one( + f"SELECT id, {scoring_metric} AS _score, " + "max_combo, n50, n100, n300, " + "nmiss, nkatu, ngeki, perfect, mods, " + "UNIX_TIMESTAMP(play_time) time " + "FROM scores " + "WHERE map_md5 = :map_md5 AND mode = :mode " + "AND userid = :user_id AND status = 2 " + "ORDER BY _score DESC LIMIT 1", + {"map_md5": map_md5, "mode": mode, "user_id": player.id}, + ) + + if personal_best_score_row is not None: + # calculate the rank of the score. + p_best_rank = 1 + await app.state.services.database.fetch_val( + "SELECT COUNT(*) FROM scores s " + "INNER JOIN users u ON u.id = s.userid " + "WHERE s.map_md5 = :map_md5 AND s.mode = :mode " + "AND s.status = 2 AND u.priv & 1 " + f"AND s.{scoring_metric} > :score", + { + "map_md5": map_md5, + "mode": mode, + "score": personal_best_score_row["_score"], + }, + column=0, # COUNT(*) + ) + + # attach rank to personal best row + personal_best_score_row["rank"] = p_best_rank + else: + score_rows = [] + personal_best_score_row = None + + return score_rows, personal_best_score_row + + +SCORE_LISTING_FMTSTR = ( + "{id}|{name}|{score}|{max_combo}|" + "{n50}|{n100}|{n300}|{nmiss}|{nkatu}|{ngeki}|" + "{perfect}|{mods}|{userid}|{rank}|{time}|{has_replay}" +) + + +@router.get("/web/osu-osz2-getscores.php") +async def getScores( + player: Player = Depends(authenticate_player_session(Query, "us", "ha")), + requesting_from_editor_song_select: bool = Query(..., alias="s"), + leaderboard_version: int = Query(..., alias="vv"), + leaderboard_type: int = Query(..., alias="v", ge=0, le=4), + map_md5: str = Query(..., alias="c", min_length=32, max_length=32), + map_filename: str = Query(..., alias="f"), + mode_arg: int = Query(..., alias="m", ge=0, le=3), + map_set_id: int = Query(..., alias="i", ge=-1, le=2_147_483_647), + mods_arg: int = Query(..., alias="mods", ge=0, le=2_147_483_647), + map_package_hash: str = Query(..., alias="h"), # TODO: further validation + aqn_files_found: bool = Query(..., alias="a"), +) -> Response: + if aqn_files_found: + stacktrace = app.utils.get_appropriate_stacktrace() + await app.state.services.log_strange_occurrence(stacktrace) + + # check if this md5 has already been cached as + # unsubmitted/needs update to reduce osu!api spam + if map_md5 in app.state.cache.unsubmitted: + return Response(b"-1|false") + if map_md5 in app.state.cache.needs_update: + return Response(b"1|false") + + if mods_arg & Mods.RELAX: + if mode_arg == 3: # rx!mania doesn't exist + mods_arg &= ~Mods.RELAX + else: + mode_arg += 4 + elif mods_arg & Mods.AUTOPILOT: + if mode_arg in (1, 2, 3): # ap!catch, taiko and mania don't exist + mods_arg &= ~Mods.AUTOPILOT + else: + mode_arg += 8 + + mods = Mods(mods_arg) + mode = GameMode(mode_arg) + + # attempt to update their stats if their + # gm/gm-affecting-mods change at all. + if mode != player.status.mode: + player.status.mods = mods + player.status.mode = mode + + if not player.restricted: + app.state.sessions.players.enqueue(app.packets.user_stats(player)) + + scoring_metric: Literal["pp", "score"] = ( + "pp" if mode >= GameMode.RELAX_OSU else "score" + ) + + bmap = await Beatmap.from_md5(map_md5, set_id=map_set_id) + has_set_id = map_set_id > 0 + + if not bmap: + # map not found, figure out whether it needs an + # update or isn't submitted using its filename. + + if has_set_id and map_set_id not in app.state.cache.beatmapset: + # set not cached, it doesn't exist + app.state.cache.unsubmitted.add(map_md5) + return Response(b"-1|false") + + map_filename = unquote_plus(map_filename) # TODO: is unquote needed? + + if has_set_id: + # we can look it up in the specific set from cache + for bmap in app.state.cache.beatmapset[map_set_id].maps: + if map_filename == bmap.filename: + map_exists = True + break + else: + map_exists = False + else: + # we can't find it on the osu!api by md5, + # and we don't have the set id, so we must + # look it up in sql from the filename. + map_exists = ( + await maps_repo.fetch_one( + filename=map_filename, + ) + is not None + ) + + if map_exists: + # map can be updated. + app.state.cache.needs_update.add(map_md5) + return Response(b"1|false") + else: + # map is unsubmitted. + # add this map to the unsubmitted cache, so + # that we don't have to make this request again. + app.state.cache.unsubmitted.add(map_md5) + return Response(b"-1|false") + + # we've found a beatmap for the request. + + if app.state.services.datadog: + app.state.services.datadog.increment("bancho.leaderboards_served") + + if bmap.status < RankedStatus.Ranked: + # only show leaderboards for ranked, + # approved, qualified, or loved maps. + return Response(f"{int(bmap.status)}|false".encode()) + + # fetch scores & personal best + # TODO: create a leaderboard cache + if not requesting_from_editor_song_select: + score_rows, personal_best_score_row = await get_leaderboard_scores( + leaderboard_type, + bmap.md5, + mode, + mods, + player, + scoring_metric, + ) + else: + score_rows = [] + personal_best_score_row = None + + # fetch beatmap rating + map_ratings = await ratings_repo.fetch_many( + map_md5=bmap.md5, + page=None, + page_size=None, + ) + ratings = [row["rating"] for row in map_ratings] + map_avg_rating = sum(ratings) / len(ratings) if ratings else 0.0 + + ## construct response for osu! client + + response_lines: list[str] = [ + # NOTE: fa stands for featured artist (for the ones that may not know) + # {ranked_status}|{serv_has_osz2}|{bid}|{bsid}|{len(scores)}|{fa_track_id}|{fa_license_text} + f"{int(bmap.status)}|false|{bmap.id}|{bmap.set_id}|{len(score_rows)}|0|", + # {offset}\n{beatmap_name}\n{rating} + # TODO: server side beatmap offsets + f"0\n{bmap.full_name}\n{map_avg_rating}", + ] + + if not score_rows: + response_lines.extend(("", "")) # no scores, no personal best + return Response("\n".join(response_lines).encode()) + + if personal_best_score_row is not None: + user_clan = ( + await clans_repo.fetch_one(id=player.clan_id) + if player.clan_id is not None + else None + ) + display_name = ( + f"[{user_clan['tag']}] {player.name}" + if user_clan is not None + else player.name + ) + response_lines.append( + SCORE_LISTING_FMTSTR.format( + **personal_best_score_row, + name=display_name, + userid=player.id, + score=int(round(personal_best_score_row["_score"])), + has_replay="1", + ), + ) + else: + response_lines.append("") + + response_lines.extend( + [ + SCORE_LISTING_FMTSTR.format( + **s, + score=int(round(s["_score"])), + has_replay="1", + rank=idx + 1, + ) + for idx, s in enumerate(score_rows) + ], + ) + + return Response("\n".join(response_lines).encode()) + + +@router.post("/web/osu-comment.php") +async def osuComment( + player: Player = Depends(authenticate_player_session(Form, "u", "p")), + map_id: int = Form(..., alias="b"), + map_set_id: int = Form(..., alias="s"), + score_id: int = Form(..., alias="r", ge=0, le=9_223_372_036_854_775_807), + mode_vn: int = Form(..., alias="m", ge=0, le=3), + action: Literal["get", "post"] = Form(..., alias="a"), + # only sent for post + target: Literal["song", "map", "replay"] | None = Form(None), + colour: str | None = Form(None, alias="f", min_length=6, max_length=6), + start_time: int | None = Form(None, alias="starttime"), + comment: str | None = Form(None, min_length=1, max_length=80), +) -> Response: + if action == "get": + # client is requesting all comments + comments = await comments_repo.fetch_all_relevant_to_replay( + score_id=score_id, + map_set_id=map_set_id, + map_id=map_id, + ) + + ret: list[str] = [] + + for cmt in comments: + # note: this implementation does not support + # "player" or "creator" comment colours + if cmt["priv"] & Privileges.NOMINATOR: + fmt = "bat" + elif cmt["priv"] & Privileges.DONATOR: + fmt = "supporter" + else: + fmt = "" + + if cmt["colour"]: + fmt += f'|{cmt["colour"]}' + + ret.append( + "{time}\t{target_type}\t{fmt}\t{comment}".format(fmt=fmt, **cmt), + ) + + player.update_latest_activity_soon() + return Response("\n".join(ret).encode()) + + elif action == "post": + # client is submitting a new comment + + # validate all required params are provided + assert target is not None + assert start_time is not None + assert comment is not None + + # get the corresponding id from the request + if target == "song": + target_id = map_set_id + elif target == "map": + target_id = map_id + else: # target == "replay" + target_id = score_id + + if colour and not player.priv & Privileges.DONATOR: + # only supporters can use colours. + colour = None + + log( + f"User {player} attempted to use a coloured comment without " + "supporter status. Submitting comment without a colour.", + ) + + # insert into sql + await comments_repo.create( + target_id=target_id, + target_type=comments_repo.TargetType(target), + userid=player.id, + time=start_time, + comment=comment, + colour=colour, + ) + + player.update_latest_activity_soon() + + return Response(b"") # empty resp is fine + + +@router.get("/web/osu-markasread.php") +async def osuMarkAsRead( + player: Player = Depends(authenticate_player_session(Query, "u", "h")), + channel: str = Query(..., min_length=0, max_length=32), +) -> Response: + target_name = unquote(channel) # TODO: unquote needed? + if not target_name: + log( + f"User {player} attempted to mark a channel as read without a target.", + Ansi.LYELLOW, + ) + return Response(b"") # no channel specified + + target = await app.state.sessions.players.from_cache_or_sql(name=target_name) + if target: + # mark any unread mail from this user as read. + await mail_repo.mark_conversation_as_read( + to_id=player.id, + from_id=target.id, + ) + + return Response(b"") + + +@router.get("/web/osu-getseasonal.php") +async def osuSeasonal() -> Response: + return ORJSONResponse(app.settings.SEASONAL_BGS) + + +@router.get("/web/bancho_connect.php") +async def banchoConnect( + # NOTE: this is disabled as this endpoint can be called + # before a player has been granted a session + # player: Player = Depends(authenticate_player_session(Query, "u", "h")), + osu_ver: str = Query(..., alias="v"), + active_endpoint: str | None = Query(None, alias="fail"), + net_framework_vers: str | None = Query(None, alias="fx"), # delimited by | + client_hash: str | None = Query(None, alias="ch"), + retrying: bool | None = Query(None, alias="retry"), # '0' or '1' +) -> Response: + return Response(b"") + + +_checkupdates_cache = { # default timeout is 1h, set on request. + "cuttingedge": {"check": None, "path": None, "timeout": 0}, + "stable40": {"check": None, "path": None, "timeout": 0}, + "beta40": {"check": None, "path": None, "timeout": 0}, + "stable": {"check": None, "path": None, "timeout": 0}, +} + + +@router.get("/web/check-updates.php") +async def checkUpdates( + request: Request, + action: Literal["check", "path", "error"], + stream: Literal["cuttingedge", "stable40", "beta40", "stable"], +) -> Response: + return Response(b"") + + +""" Misc handlers """ + + +if app.settings.REDIRECT_OSU_URLS: + # NOTE: this will likely be removed with the addition of a frontend. + async def osu_redirect(request: Request, _: int = Path(...)) -> Response: + return RedirectResponse( + url=f"https://osu.ppy.sh{request['path']}", + status_code=status.HTTP_301_MOVED_PERMANENTLY, + ) + + for pattern in ( + "/beatmapsets/{_}", + "/beatmaps/{_}", + "/beatmapsets/{_}/discussion", + "/community/forums/topics/{_}", + ): + router.get(pattern)(osu_redirect) + + +@router.get("/ss/{screenshot_id}.{extension}") +async def get_screenshot( + screenshot_id: str = Path(..., pattern=r"[a-zA-Z0-9-_]{8}"), + extension: Literal["jpg", "jpeg", "png"] = Path(...), +) -> Response: + """Serve a screenshot from the server, by filename.""" + screenshot_path = SCREENSHOTS_PATH / f"{screenshot_id}.{extension}" + + if not screenshot_path.exists(): + return ORJSONResponse( + content={"status": "Screenshot not found."}, + status_code=status.HTTP_404_NOT_FOUND, + ) + + if extension in ("jpg", "jpeg"): + media_type = "image/jpeg" + elif extension == "png": + media_type = "image/png" + else: + media_type = None + + return FileResponse( + path=screenshot_path, + media_type=media_type, + ) + + +@router.get("/d/{map_set_id}") +async def get_osz( + map_set_id: str = Path(...), +) -> Response: + """Handle a map download request (osu.ppy.sh/d/*).""" + no_video = map_set_id[-1] == "n" + if no_video: + map_set_id = map_set_id[:-1] + + query_str = f"{map_set_id}?n={int(not no_video)}" + + return RedirectResponse( + url=f"{app.settings.MIRROR_DOWNLOAD_ENDPOINT}/{query_str}", + status_code=status.HTTP_301_MOVED_PERMANENTLY, + ) + + +@router.get("/web/maps/{map_filename}") +async def get_updated_beatmap( + request: Request, + map_filename: str, + host: str = Header(...), +) -> Response: + """Send the latest .osu file the server has for a given map.""" + if host == "osu.ppy.sh": + return Response("bancho.py only supports the -devserver connection method") + + return RedirectResponse( + url=f"https://osu.ppy.sh{request['raw_path'].decode()}", + status_code=status.HTTP_301_MOVED_PERMANENTLY, + ) + + +@router.get("/p/doyoureallywanttoaskpeppy") +async def peppyDMHandler() -> Response: + return Response( + content=( + b"This user's ID is usually peppy's (when on bancho), " + b"and is blocked from being messaged by the osu! client." + ), + ) + + +""" ingame registration """ + +INGAME_REGISTRATION_DISALLOWED_ERROR = { + "form_error": { + "user": { + "password": [ + "In-game registration is disabled. Please register on the website.", + ], + }, + }, +} + + +@router.post("/users") +async def register_account( + request: Request, + username: str = Form(..., alias="user[username]"), + email: str = Form(..., alias="user[user_email]"), + pw_plaintext: str = Form(..., alias="user[password]"), + check: int = Form(...), + # XXX: require/validate these headers; they are used later + # on in the registration process for resolving geolocation + forwarded_ip: str = Header(..., alias="X-Forwarded-For"), + real_ip: str = Header(..., alias="X-Real-IP"), +) -> Response: + if not all((username, email, pw_plaintext)): + return Response( + content=b"Missing required params", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + # Disable in-game registration if enabled + if app.settings.DISALLOW_INGAME_REGISTRATION: + return ORJSONResponse( + content=INGAME_REGISTRATION_DISALLOWED_ERROR, + status_code=status.HTTP_400_BAD_REQUEST, + ) + + # ensure all args passed + # are safe for registration. + errors: Mapping[str, list[str]] = defaultdict(list) + + # Usernames must: + # - be within 2-15 characters in length + # - not contain both ' ' and '_', one is fine + # - not be in the config's `disallowed_names` list + # - not already be taken by another player + if not regexes.USERNAME.match(username): + errors["username"].append("Must be 2-15 characters in length.") + + if "_" in username and " " in username: + errors["username"].append('May contain "_" and " ", but not both.') + + if username in app.settings.DISALLOWED_NAMES: + errors["username"].append("This username isn't allowed. Please pick another.") + + if "username" not in errors: + if await users_repo.fetch_one(name=username): + errors["username"].append("Username already taken by another player.") + + # Emails must: + # - match the regex `^[^@\s]{1,200}@[^@\s\.]{1,30}\.[^@\.\s]{1,24}$` + # - not already be taken by another player + if not regexes.EMAIL.match(email): + errors["user_email"].append("Invalid email syntax.") + else: + if await users_repo.fetch_one(email=email): + errors["user_email"].append("Email already taken by another player.") + + # Passwords must: + # - be within 8-32 characters in length + # - have more than 3 unique characters + # - not be in the config's `disallowed_passwords` list + if not 8 <= len(pw_plaintext) <= 32: + errors["password"].append("Must be 8-32 characters in length.") + + if len(set(pw_plaintext)) <= 3: + errors["password"].append("Must have more than 3 unique characters.") + + if pw_plaintext.lower() in app.settings.DISALLOWED_PASSWORDS: + errors["password"].append("That password was deemed too simple.") + + if errors: + # we have errors to send back, send them back delimited by newlines. + errors = {k: ["\n".join(v)] for k, v in errors.items()} + errors_full = {"form_error": {"user": errors}} + return ORJSONResponse( + content=errors_full, + status_code=status.HTTP_400_BAD_REQUEST, + ) + + if check == 0: + # the client isn't just checking values, + # they want to register the account now. + # make the md5 & bcrypt the md5 for sql. + pw_md5 = hashlib.md5(pw_plaintext.encode()).hexdigest().encode() + pw_bcrypt = bcrypt.hashpw(pw_md5, bcrypt.gensalt()) + app.state.cache.bcrypt[pw_bcrypt] = pw_md5 # cache result for login + + ip = app.state.services.ip_resolver.get_ip(request.headers) + + geoloc = await app.state.services.fetch_geoloc(ip, request.headers) + country = geoloc["country"]["acronym"] if geoloc is not None else "XX" + + async with app.state.services.database.transaction(): + # add to `users` table. + player = await users_repo.create( + name=username, + email=email, + pw_bcrypt=pw_bcrypt, + country=country, + ) + + # add to `stats` table. + await stats_repo.create_all_modes(player_id=player["id"]) + + if app.state.services.datadog: + app.state.services.datadog.increment("bancho.registrations") + + log(f"<{username} ({player['id']})> has registered!", Ansi.LGREEN) + + return Response(content=b"ok") # success + + +@router.post("/difficulty-rating") +async def difficultyRatingHandler(request: Request) -> Response: + return RedirectResponse( + url=f"https://osu.ppy.sh{request['path']}", + status_code=status.HTTP_307_TEMPORARY_REDIRECT, + ) diff --git a/app/api/init_api.py b/app/api/init_api.py new file mode 100644 index 0000000..42686c9 --- /dev/null +++ b/app/api/init_api.py @@ -0,0 +1,196 @@ +# #!/usr/bin/env python3.11 +from __future__ import annotations + +import asyncio +import io +import pprint +import sys +from collections.abc import AsyncIterator +from contextlib import asynccontextmanager +from typing import Any + +import starlette.routing +from fastapi import FastAPI +from fastapi import status +from fastapi.encoders import jsonable_encoder +from fastapi.exceptions import RequestValidationError +from fastapi.openapi.utils import get_openapi +from fastapi.requests import Request +from fastapi.responses import ORJSONResponse +from fastapi.responses import Response +from starlette.middleware.base import RequestResponseEndpoint +from starlette.requests import ClientDisconnect + +import app.bg_loops +import app.settings +import app.state +import app.utils +from app.api import api_router # type: ignore[attr-defined] +from app.api import domains +from app.api import middlewares +from app.logging import Ansi +from app.logging import log +from app.objects import collections + + +class BanchoAPI(FastAPI): + def openapi(self) -> dict[str, Any]: + if not self.openapi_schema: + routes = self.routes + starlette_hosts = [ + host + for host in super().routes + if isinstance(host, starlette.routing.Host) + ] + + # XXX:HACK fastapi will not show documentation for routes + # added through use sub applications using the Host class + # (e.g. app.host('other.domain', app2)) + for host in starlette_hosts: + for route in host.routes: + if route not in routes: + routes.append(route) + + self.openapi_schema = get_openapi( + title=self.title, + version=self.version, + openapi_version=self.openapi_version, + description=self.description, + terms_of_service=self.terms_of_service, + contact=self.contact, + license_info=self.license_info, + routes=routes, + tags=self.openapi_tags, + servers=self.servers, + ) + + return self.openapi_schema + + +@asynccontextmanager +async def lifespan(asgi_app: BanchoAPI) -> AsyncIterator[None]: + if isinstance(sys.stdout, io.TextIOWrapper): + sys.stdout.reconfigure(encoding="utf-8") + + app.utils.ensure_persistent_volumes_are_available() + + app.state.loop = asyncio.get_running_loop() + + if app.utils.is_running_as_admin(): + log( + "Running the server with root privileges is not recommended.", + Ansi.LYELLOW, + ) + + await app.state.services.database.connect() + await app.state.services.redis.initialize() + + if app.state.services.datadog is not None: + app.state.services.datadog.start( + flush_in_thread=True, + flush_interval=15, + ) + app.state.services.datadog.gauge("bancho.online_players", 0) + + app.state.services.ip_resolver = app.state.services.IPResolver() + + await app.state.services.run_sql_migrations() + + await collections.initialize_ram_caches() + + await app.bg_loops.initialize_housekeeping_tasks() + + log("Startup process complete.", Ansi.LGREEN) + log( + f"Listening @ {app.settings.APP_HOST}:{app.settings.APP_PORT}", + Ansi.LMAGENTA, + ) + + yield + + # we want to attempt to gracefully finish any ongoing connections + # and shut down any of the housekeeping tasks running in the background. + await app.state.sessions.cancel_housekeeping_tasks() + + # shutdown services + + await app.state.services.http_client.aclose() + await app.state.services.database.disconnect() + await app.state.services.redis.aclose() + + if app.state.services.datadog is not None: + app.state.services.datadog.stop() + app.state.services.datadog.flush() + + +def init_exception_handlers(asgi_app: BanchoAPI) -> None: + @asgi_app.exception_handler(RequestValidationError) + async def handle_validation_error( + request: Request, + exc: RequestValidationError, + ) -> Response: + """Wrapper around 422 validation errors to print out info for devs.""" + log(f"Validation error on {request.url}", Ansi.LRED) + pprint.pprint(exc.errors()) + + return ORJSONResponse( + content={"detail": jsonable_encoder(exc.errors())}, + status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, + ) + + +def init_middlewares(asgi_app: BanchoAPI) -> None: + """Initialize our app's middleware stack.""" + asgi_app.add_middleware(middlewares.MetricsMiddleware) + + @asgi_app.middleware("http") + async def http_middleware( + request: Request, + call_next: RequestResponseEndpoint, + ) -> Response: + # if an osu! client is waiting on leaderboard data + # and switches to another leaderboard, it will cancel + # the previous request midway, resulting in a large + # error in the console. this is to catch that :) + + try: + return await call_next(request) + except ClientDisconnect: + # client disconnected from the server + # while we were reading the body. + return Response("Client disconnected while reading request.") + except RuntimeError as exc: + if exc.args[0] == "No response returned.": + # client disconnected from the server + # while we were sending the response. + return Response("Client returned empty response.") + + # unrelated issue, raise normally + raise exc + + +def init_routes(asgi_app: BanchoAPI) -> None: + """Initialize our app's route endpoints.""" + for domain in ("ppy.sh", app.settings.DOMAIN): + for subdomain in ("c", "ce", "c4", "c5", "c6"): + asgi_app.host(f"{subdomain}.{domain}", domains.cho.router) + + asgi_app.host(f"osu.{domain}", domains.osu.router) + asgi_app.host(f"b.{domain}", domains.map.router) + + # bancho.py's developer-facing api + asgi_app.host(f"api.{domain}", api_router) + + +def init_api() -> BanchoAPI: + """Create & initialize our app.""" + asgi_app = BanchoAPI(lifespan=lifespan) + + init_middlewares(asgi_app) + init_exception_handlers(asgi_app) + init_routes(asgi_app) + + return asgi_app + + +asgi_app = init_api() diff --git a/app/api/middlewares.py b/app/api/middlewares.py new file mode 100644 index 0000000..8ff917a --- /dev/null +++ b/app/api/middlewares.py @@ -0,0 +1,37 @@ +from __future__ import annotations + +import time + +from starlette.middleware.base import BaseHTTPMiddleware +from starlette.middleware.base import RequestResponseEndpoint +from starlette.requests import Request +from starlette.responses import Response + +from app.logging import Ansi +from app.logging import log +from app.logging import magnitude_fmt_time + + +class MetricsMiddleware(BaseHTTPMiddleware): + async def dispatch( + self, + request: Request, + call_next: RequestResponseEndpoint, + ) -> Response: + start_time = time.perf_counter_ns() + response = await call_next(request) + end_time = time.perf_counter_ns() + + time_elapsed = end_time - start_time + + col = Ansi.LGREEN if response.status_code < 400 else Ansi.LRED + + url = f"{request.headers['host']}{request['path']}" + + log( + f"[{request.method}] {response.status_code} {url}{Ansi.RESET!r} | {Ansi.LBLUE!r}Request took: {magnitude_fmt_time(time_elapsed)}", + col, + ) + + response.headers["process-time"] = str(round(time_elapsed) / 1e6) + return response diff --git a/app/api/v1/__init__.py b/app/api/v1/__init__.py new file mode 100644 index 0000000..e4557f6 --- /dev/null +++ b/app/api/v1/__init__.py @@ -0,0 +1,10 @@ +# type: ignore +# isort: dont-add-imports + +from fastapi import APIRouter + +from .api import router + +apiv1_router = APIRouter(tags=["API v1"], prefix="/v1") + +apiv1_router.include_router(router) diff --git a/app/api/v1/api.py b/app/api/v1/api.py new file mode 100644 index 0000000..2fe1db8 --- /dev/null +++ b/app/api/v1/api.py @@ -0,0 +1,1080 @@ +"""api: bancho.py's developer api for interacting with server state""" + +from __future__ import annotations + +import hashlib +import struct +from pathlib import Path as SystemPath +from typing import Literal + +from fastapi import APIRouter +from fastapi import Depends +from fastapi import status +from fastapi.param_functions import Query +from fastapi.responses import ORJSONResponse +from fastapi.responses import Response +from fastapi.security import HTTPAuthorizationCredentials as HTTPCredentials +from fastapi.security import HTTPBearer + +import app.packets +import app.state +import app.usecases.performance +from app.constants import regexes +from app.constants.gamemodes import GameMode +from app.constants.mods import Mods +from app.objects.beatmap import Beatmap +from app.objects.beatmap import ensure_osu_file_is_available +from app.repositories import clans as clans_repo +from app.repositories import scores as scores_repo +from app.repositories import stats as stats_repo +from app.repositories import tourney_pool_maps as tourney_pool_maps_repo +from app.repositories import tourney_pools as tourney_pools_repo +from app.repositories import users as users_repo +from app.usecases.performance import ScoreParams + +AVATARS_PATH = SystemPath.cwd() / ".data/avatars" +BEATMAPS_PATH = SystemPath.cwd() / ".data/osu" +REPLAYS_PATH = SystemPath.cwd() / ".data/osr" +SCREENSHOTS_PATH = SystemPath.cwd() / ".data/ss" + + +router = APIRouter() +oauth2_scheme = HTTPBearer(auto_error=False) + +# NOTE: the api is still under design and is subject to change. +# to keep up with breaking changes, please either join our discord, +# or keep up with changes to https://github.com/JKBGL/gulag-api-docs. + +# Unauthorized (no api key required) +# GET /search_players: returns a list of matching users, based on a passed string, sorted by ascending ID. +# GET /get_player_count: return total registered & online player counts. +# GET /get_player_info: return info or stats for a given player. +# GET /get_player_status: return a player's current status, if online. +# GET /get_player_scores: return a list of best or recent scores for a given player. +# GET /get_player_most_played: return a list of maps most played by a given player. +# GET /get_map_info: return information about a given beatmap. +# GET /get_map_scores: return the best scores for a given beatmap & mode. +# GET /get_score_info: return information about a given score. +# GET /get_replay: return the file for a given replay (with or without headers). +# GET /get_match: return information for a given multiplayer match. +# GET /get_leaderboard: return the top players for a given mode & sort condition + +# Authorized (requires valid api key, passed as 'Authorization' header) +# NOTE: authenticated handlers may have privilege requirements. + +# [Normal] +# GET /calculate_pp: calculate & return pp for a given beatmap. +# POST/PUT /set_avatar: Update the tokenholder's avatar to a given file. + +DATETIME_OFFSET = 0x89F7FF5F7B58000 + + +@router.get("/calculate_pp") +async def api_calculate_pp( + token: HTTPCredentials = Depends(oauth2_scheme), + beatmap_id: int = Query(None, alias="id", min=0, max=2_147_483_647), + nkatu: int = Query(None, max=2_147_483_647), + ngeki: int = Query(None, max=2_147_483_647), + n100: int = Query(None, max=2_147_483_647), + n50: int = Query(None, max=2_147_483_647), + misses: int = Query(0, max=2_147_483_647), + mods: int = Query(0, min=0, max=2_147_483_647), + mode: int = Query(0, min=0, max=11), + combo: int = Query(None, max=2_147_483_647), + acclist: list[float] = Query([100, 99, 98, 95], alias="acc"), +) -> Response: + """Calculates the PP of a specified map with specified score parameters.""" + + if token is None or app.state.sessions.api_keys.get(token.credentials) is None: + return ORJSONResponse( + {"status": "Invalid API key."}, + status_code=status.HTTP_401_UNAUTHORIZED, + ) + + beatmap = await Beatmap.from_bid(beatmap_id) + if not beatmap: + return ORJSONResponse( + {"status": "Beatmap not found."}, + status_code=status.HTTP_400_BAD_REQUEST, + ) + + osu_file_available = await ensure_osu_file_is_available( + beatmap.id, + expected_md5=beatmap.md5, + ) + if not osu_file_available: + return ORJSONResponse( + {"status": "Beatmap file could not be fetched."}, + status_code=status.HTTP_400_BAD_REQUEST, + ) + + scores = [] + + if all(x is None for x in [ngeki, nkatu, n100, n50]): + scores = [ + ScoreParams(GameMode(mode).as_vanilla, mods, combo, acc, nmiss=misses) + for acc in acclist + ] + else: + scores.append( + ScoreParams( + GameMode(mode).as_vanilla, + mods, + combo, + ngeki=ngeki or 0, + nkatu=nkatu or 0, + n100=n100 or 0, + n50=n50 or 0, + nmiss=misses, + ), + ) + + results = app.usecases.performance.calculate_performances( + str(BEATMAPS_PATH / f"{beatmap.id}.osu"), + scores, + ) + + # "Inject" the accuracy into the list of results + final_results = [ + performance_result | {"accuracy": score.acc} + for performance_result, score in zip(results, scores) + ] + + return ORJSONResponse( + # XXX: change the output type based on the inputs from user + ( + final_results + if all(x is None for x in [ngeki, nkatu, n100, n50]) + else final_results[0] + ), + status_code=status.HTTP_200_OK, # a list via the acclist parameter or a single score via n100 and n50 + ) + + +@router.get("/search_players") +async def api_search_players( + search: str | None = Query(None, alias="q", min=2, max=32), +) -> Response: + """Search for users on the server by name.""" + rows = await app.state.services.database.fetch_all( + "SELECT id, name " + "FROM users " + "WHERE name LIKE COALESCE(:name, name) " + "AND priv & 3 = 3 " + "ORDER BY id ASC", + {"name": f"%{search}%" if search is not None else None}, + ) + + return ORJSONResponse( + { + "status": "success", + "results": len(rows), + "result": [dict(row) for row in rows], + }, + ) + + +@router.get("/get_player_count") +async def api_get_player_count() -> Response: + """Get the current amount of online players.""" + return ORJSONResponse( + { + "status": "success", + "counts": { + # -1 for the bot, who is always online + "online": len(app.state.sessions.players.unrestricted) - 1, + "total": await users_repo.fetch_count(), + }, + }, + ) + + +@router.get("/get_player_info") +async def api_get_player_info( + scope: Literal["stats", "info", "all"], + user_id: int | None = Query(None, alias="id", ge=3, le=2_147_483_647), + username: str | None = Query(None, alias="name", pattern=regexes.USERNAME.pattern), +) -> Response: + """Return information about a given player.""" + if not (username or user_id) or (username and user_id): + return ORJSONResponse( + {"status": "Must provide either id OR name!"}, + status_code=status.HTTP_400_BAD_REQUEST, + ) + + # get user info from username or user id + if username: + user_info = await users_repo.fetch_one(name=username) + else: # if user_id + user_info = await users_repo.fetch_one(id=user_id) + + if user_info is None: + return ORJSONResponse( + {"status": "Player not found."}, + status_code=status.HTTP_404_NOT_FOUND, + ) + + resolved_user_id: int = user_info["id"] + resolved_country: str = user_info["country"] + + api_data = {} + + # fetch user's info if requested + if scope in ("info", "all"): + api_data["info"] = dict(user_info) + + # fetch user's stats if requested + if scope in ("stats", "all"): + api_data["stats"] = {} + + # get all stats + all_stats = await stats_repo.fetch_many(player_id=resolved_user_id) + + for mode_stats in all_stats: + rank = await app.state.services.redis.zrevrank( + f"bancho:leaderboard:{mode_stats['mode']}", + str(resolved_user_id), + ) + country_rank = await app.state.services.redis.zrevrank( + f"bancho:leaderboard:{mode_stats['mode']}:{resolved_country}", + str(resolved_user_id), + ) + + # NOTE: this dict-like return is intentional. + # but quite cursed. + stats_key = str(mode_stats["mode"]) + api_data["stats"][stats_key] = { + "id": mode_stats["id"], + "mode": mode_stats["mode"], + "tscore": mode_stats["tscore"], + "rscore": mode_stats["rscore"], + "pp": mode_stats["pp"], + "plays": mode_stats["plays"], + "playtime": mode_stats["playtime"], + "acc": mode_stats["acc"], + "max_combo": mode_stats["max_combo"], + "total_hits": mode_stats["total_hits"], + "replay_views": mode_stats["replay_views"], + "xh_count": mode_stats["xh_count"], + "x_count": mode_stats["x_count"], + "sh_count": mode_stats["sh_count"], + "s_count": mode_stats["s_count"], + "a_count": mode_stats["a_count"], + # extra fields are added to the api response + "rank": rank + 1 if rank is not None else 0, + "country_rank": country_rank + 1 if country_rank is not None else 0, + } + + return ORJSONResponse({"status": "success", "player": api_data}) + + +@router.get("/get_player_status") +async def api_get_player_status( + user_id: int | None = Query(None, alias="id", ge=3, le=2_147_483_647), + username: str | None = Query(None, alias="name", pattern=regexes.USERNAME.pattern), +) -> Response: + """Return a players current status, if they are online.""" + if username and user_id: + return ORJSONResponse( + {"status": "Must provide either id OR name!"}, + status_code=status.HTTP_400_BAD_REQUEST, + ) + + if username: + player = app.state.sessions.players.get(name=username) + elif user_id: + player = app.state.sessions.players.get(id=user_id) + else: + return ORJSONResponse( + {"status": "Must provide either id OR name!"}, + status_code=status.HTTP_400_BAD_REQUEST, + ) + + if not player: + # no such player online, return their last seen time if they exist in sql + + if username: + row = await users_repo.fetch_one(name=username) + else: # if userid + row = await users_repo.fetch_one(id=user_id) + + if not row: + return ORJSONResponse( + {"status": "Player not found."}, + status_code=status.HTTP_404_NOT_FOUND, + ) + + return ORJSONResponse( + { + "status": "success", + "player_status": { + "online": False, + "last_seen": row["latest_activity"], + }, + }, + ) + + if player.status.map_md5: + bmap = await Beatmap.from_md5(player.status.map_md5) + else: + bmap = None + + return ORJSONResponse( + { + "status": "success", + "player_status": { + "online": True, + "login_time": player.login_time, + "status": { + "action": int(player.status.action), + "info_text": player.status.info_text, + "mode": int(player.status.mode), + "mods": int(player.status.mods), + "beatmap": bmap.as_dict if bmap else None, + }, + }, + }, + ) + + +@router.get("/get_player_scores") +async def api_get_player_scores( + scope: Literal["recent", "best"], + user_id: int | None = Query(None, alias="id", ge=3, le=2_147_483_647), + username: str | None = Query(None, alias="name", pattern=regexes.USERNAME.pattern), + mods_arg: str | None = Query(None, alias="mods"), + mode_arg: int = Query(0, alias="mode", ge=0, le=11), + limit: int = Query(25, ge=1, le=100), + include_loved: bool = False, + include_failed: bool = True, +) -> Response: + """Return a list of a given user's recent/best scores.""" + if mode_arg in ( + GameMode.RELAX_MANIA, + GameMode.AUTOPILOT_CATCH, + GameMode.AUTOPILOT_TAIKO, + GameMode.AUTOPILOT_MANIA, + ): + return ORJSONResponse( + {"status": "Invalid gamemode."}, + status_code=status.HTTP_400_BAD_REQUEST, + ) + + if username and user_id: + return ORJSONResponse( + {"status": "Must provide either id OR name!"}, + status_code=status.HTTP_400_BAD_REQUEST, + ) + + if username: + player = await app.state.sessions.players.from_cache_or_sql(name=username) + elif user_id: + player = await app.state.sessions.players.from_cache_or_sql(id=user_id) + else: + return ORJSONResponse( + {"status": "Must provide either id OR name!"}, + status_code=status.HTTP_400_BAD_REQUEST, + ) + + if not player: + return ORJSONResponse( + {"status": "Player not found."}, + status_code=status.HTTP_404_NOT_FOUND, + ) + + # parse args (scope, mode, mods, limit) + + mode = GameMode(mode_arg) + + strong_equality = True + if mods_arg is not None: + if mods_arg[0] in ("~", "="): # weak/strong equality + strong_equality = mods_arg[0] == "=" + mods_arg = mods_arg[1:] + + if mods_arg.isdecimal(): + # parse from int form + mods = Mods(int(mods_arg)) + else: + # parse from string form + mods = Mods.from_modstr(mods_arg) + else: + mods = None + + # build sql query & fetch info + + query = [ + "SELECT t.id, t.map_md5, t.score, t.pp, t.acc, t.max_combo, " + "t.mods, t.n300, t.n100, t.n50, t.nmiss, t.ngeki, t.nkatu, t.grade, " + "t.status, t.mode, t.play_time, t.time_elapsed, t.perfect " + "FROM scores t " + "INNER JOIN maps b ON t.map_md5 = b.md5 " + "WHERE t.userid = :user_id AND t.mode = :mode", + ] + + params: dict[str, object] = { + "user_id": player.id, + "mode": mode, + } + + if mods is not None: + if strong_equality: + query.append("AND t.mods & :mods = :mods") + else: + query.append("AND t.mods & :mods != 0") + + params["mods"] = mods + + if scope == "best": + allowed_statuses = [2, 3] + + if include_loved: + allowed_statuses.append(5) + + query.append("AND t.status = 2 AND b.status IN :statuses") + params["statuses"] = allowed_statuses + sort = "t.pp" + else: + if not include_failed: + query.append("AND t.status != 0") + + sort = "t.play_time" + + query.append(f"ORDER BY {sort} DESC LIMIT :limit") + params["limit"] = limit + + rows = [ + dict(row) + for row in await app.state.services.database.fetch_all(" ".join(query), params) + ] + + # fetch & return info from sql + for row in rows: + bmap = await Beatmap.from_md5(row.pop("map_md5")) + row["beatmap"] = bmap.as_dict if bmap else None + + clan: clans_repo.Clan | None = None + if player.clan_id: + clan = await clans_repo.fetch_one(id=player.clan_id) + + player_info = { + "id": player.id, + "name": player.name, + "clan": ( + { + "id": clan["id"], + "name": clan["name"], + "tag": clan["tag"], + } + if clan is not None + else None + ), + } + + return ORJSONResponse( + { + "status": "success", + "scores": rows, + "player": player_info, + }, + ) + + +@router.get("/get_player_most_played") +async def api_get_player_most_played( + user_id: int | None = Query(None, alias="id", ge=3, le=2_147_483_647), + username: str | None = Query(None, alias="name", pattern=regexes.USERNAME.pattern), + mode_arg: int = Query(0, alias="mode", ge=0, le=11), + limit: int = Query(25, ge=1, le=100), +) -> Response: + """Return the most played beatmaps of a given player.""" + # NOTE: this will almost certainly not scale well, lol. + if mode_arg in ( + GameMode.RELAX_MANIA, + GameMode.AUTOPILOT_CATCH, + GameMode.AUTOPILOT_TAIKO, + GameMode.AUTOPILOT_MANIA, + ): + return ORJSONResponse( + {"status": "Invalid gamemode."}, + status_code=status.HTTP_400_BAD_REQUEST, + ) + + if user_id is not None: + player = await app.state.sessions.players.from_cache_or_sql(id=user_id) + elif username is not None: + player = await app.state.sessions.players.from_cache_or_sql(name=username) + else: + return ORJSONResponse( + {"status": "Must provide either id or name."}, + status_code=status.HTTP_400_BAD_REQUEST, + ) + + if not player: + return ORJSONResponse( + {"status": "Player not found."}, + status_code=status.HTTP_404_NOT_FOUND, + ) + + # parse args (mode, limit) + + mode = GameMode(mode_arg) + + # fetch & return info from sql + rows = await app.state.services.database.fetch_all( + "SELECT m.md5, m.id, m.set_id, m.status, " + "m.artist, m.title, m.version, m.creator, COUNT(*) plays " + "FROM scores s " + "INNER JOIN maps m ON m.md5 = s.map_md5 " + "WHERE s.userid = :user_id " + "AND s.mode = :mode " + "GROUP BY s.map_md5 " + "ORDER BY plays DESC " + "LIMIT :limit", + {"user_id": player.id, "mode": mode, "limit": limit}, + ) + + return ORJSONResponse( + { + "status": "success", + "maps": [dict(row) for row in rows], + }, + ) + + +@router.get("/get_map_info") +async def api_get_map_info( + map_id: int | None = Query(None, alias="id", ge=3, le=2_147_483_647), + md5: str | None = Query(None, alias="md5", min_length=32, max_length=32), +) -> Response: + """Return information about a given beatmap.""" + if map_id is not None: + bmap = await Beatmap.from_bid(map_id) + elif md5 is not None: + bmap = await Beatmap.from_md5(md5) + else: + return ORJSONResponse( + {"status": "Must provide either id or md5!"}, + status_code=status.HTTP_400_BAD_REQUEST, + ) + + if not bmap: + return ORJSONResponse( + {"status": "Map not found."}, + status_code=status.HTTP_404_NOT_FOUND, + ) + + return ORJSONResponse( + { + "status": "success", + "map": bmap.as_dict, + }, + ) + + +@router.get("/get_map_scores") +async def api_get_map_scores( + scope: Literal["recent", "best"], + map_id: int | None = Query(None, alias="id", ge=0, le=2_147_483_647), + map_md5: str | None = Query(None, alias="md5", min_length=32, max_length=32), + mods_arg: str | None = Query(None, alias="mods"), + mode_arg: int = Query(0, alias="mode", ge=0, le=11), + limit: int = Query(50, ge=1, le=100), +) -> Response: + """Return the top n scores on a given beatmap.""" + if mode_arg in ( + GameMode.RELAX_MANIA, + GameMode.AUTOPILOT_CATCH, + GameMode.AUTOPILOT_TAIKO, + GameMode.AUTOPILOT_MANIA, + ): + return ORJSONResponse( + {"status": "Invalid gamemode."}, + status_code=status.HTTP_400_BAD_REQUEST, + ) + + if map_id is not None: + bmap = await Beatmap.from_bid(map_id) + elif map_md5 is not None: + bmap = await Beatmap.from_md5(map_md5) + else: + return ORJSONResponse( + {"status": "Must provide either id or md5!"}, + status_code=status.HTTP_400_BAD_REQUEST, + ) + + if not bmap: + return ORJSONResponse( + {"status": "Map not found."}, + status_code=status.HTTP_404_NOT_FOUND, + ) + + # parse args (scope, mode, mods, limit) + + mode = GameMode(mode_arg) + + strong_equality = True + if mods_arg is not None: + if mods_arg[0] in ("~", "="): + strong_equality = mods_arg[0] == "=" + mods_arg = mods_arg[1:] + + if mods_arg.isdecimal(): + # parse from int form + mods = Mods(int(mods_arg)) + else: + # parse from string form + mods = Mods.from_modstr(mods_arg) + else: + mods = None + + query = [ + "SELECT s.map_md5, s.score, s.pp, s.acc, s.max_combo, s.mods, " + "s.n300, s.n100, s.n50, s.nmiss, s.ngeki, s.nkatu, s.grade, s.status, " + "s.mode, s.play_time, s.time_elapsed, s.userid, s.perfect, " + "u.name player_name, u.country player_country, " + "c.id clan_id, c.name clan_name, c.tag clan_tag " + "FROM scores s " + "INNER JOIN users u ON u.id = s.userid " + "LEFT JOIN clans c ON c.id = u.clan_id " + "WHERE s.map_md5 = :map_md5 " + "AND s.mode = :mode " + "AND s.status = 2 " + "AND u.priv & 1", + ] + params: dict[str, object] = { + "map_md5": bmap.md5, + "mode": mode, + } + + if mods is not None: + if strong_equality: + query.append("AND mods & :mods = :mods") + else: + query.append("AND mods & :mods != 0") + + params["mods"] = mods + + # unlike /get_player_scores, we'll sort by score/pp depending + # on the mode played, since we want to replicated leaderboards. + if scope == "best": + sort = "pp" if mode >= GameMode.RELAX_OSU else "score" + else: # recent + sort = "play_time" + + query.append(f"ORDER BY {sort} DESC LIMIT :limit") + params["limit"] = limit + + rows = await app.state.services.database.fetch_all(" ".join(query), params) + + return ORJSONResponse( + { + "status": "success", + "scores": [dict(row) for row in rows], + }, + ) + + +@router.get("/get_score_info") +async def api_get_score_info( + score_id: int = Query(..., alias="id", ge=0, le=9_223_372_036_854_775_807), +) -> Response: + """Return information about a given score.""" + score = await scores_repo.fetch_one(score_id) + + if score is None: + return ORJSONResponse( + {"status": "Score not found."}, + status_code=status.HTTP_404_NOT_FOUND, + ) + + return ORJSONResponse({"status": "success", "score": score}) + + +@router.get("/get_replay") +async def api_get_replay( + score_id: int = Query(..., alias="id", ge=0, le=9_223_372_036_854_775_807), + include_headers: bool = True, +) -> Response: + """\ + Return a given replay (including headers). + + Note that this endpoint does not increment + the player's total replay views. + """ + # fetch replay file & make sure it exists + replay_file = REPLAYS_PATH / f"{score_id}.osr" + if not replay_file.exists(): + return ORJSONResponse( + {"status": "Replay not found."}, + status_code=status.HTTP_404_NOT_FOUND, + ) + # read replay frames from file + raw_replay_data = replay_file.read_bytes() + if not include_headers: + return Response( + bytes(raw_replay_data), + media_type="application/octet-stream", + headers={ + "Content-Description": "File Transfer", + # TODO: should we include a Content-Disposition? + }, + ) + # add replay headers from sql + # TODO: osu_version & life graph in scores tables? + row = await app.state.services.database.fetch_one( + "SELECT u.name username, m.md5 map_md5, " + "m.artist, m.title, m.version, " + "s.mode, s.n300, s.n100, s.n50, s.ngeki, " + "s.nkatu, s.nmiss, s.score, s.max_combo, " + "s.perfect, s.mods, s.play_time " + "FROM scores s " + "INNER JOIN users u ON u.id = s.userid " + "INNER JOIN maps m ON m.md5 = s.map_md5 " + "WHERE s.id = :score_id", + {"score_id": score_id}, + ) + if not row: + # score not found in sql + return ORJSONResponse( + {"status": "Score not found."}, + status_code=status.HTTP_404_NOT_FOUND, + ) # but replay was? + # generate the replay's hash + replay_md5 = hashlib.md5( + "{}p{}o{}o{}t{}a{}r{}e{}y{}o{}u{}{}{}".format( + row["n100"] + row["n300"], + row["n50"], + row["ngeki"], + row["nkatu"], + row["nmiss"], + row["map_md5"], + row["max_combo"], + str(row["perfect"] == 1), + row["username"], + row["score"], + 0, # TODO: rank + row["mods"], + "True", # TODO: ?? + ).encode(), + ).hexdigest() + # create a buffer to construct the replay output + replay_data = bytearray() + # pack first section of headers. + replay_data += struct.pack( + " Response: + """Return information of a given multiplayer match.""" + match = app.state.sessions.matches[match_id] + if not match: + return ORJSONResponse( + {"status": "Match not found."}, + status_code=status.HTTP_404_NOT_FOUND, + ) + + return ORJSONResponse( + { + "status": "success", + "match": { + "name": match.name, + "mode": match.mode, + "mods": int(match.mods), + "seed": match.seed, + "host": {"id": match.host.id, "name": match.host.name}, + "refs": [ + {"id": player.id, "name": player.name} for player in match.refs + ], + "in_progress": match.in_progress, + "is_scrimming": match.is_scrimming, + "map": { + "id": match.map_id, + "md5": match.map_md5, + "name": match.map_name, + }, + "active_slots": { + str(idx): { + "loaded": slot.loaded, + "mods": int(slot.mods), + "player": {"id": slot.player.id, "name": slot.player.name}, + "skipped": slot.skipped, + "status": int(slot.status), + "team": int(slot.team), + } + for idx, slot in enumerate(match.slots) + if slot.player + }, + }, + }, + ) + + +@router.get("/get_leaderboard") +async def api_get_global_leaderboard( + sort: Literal["tscore", "rscore", "pp", "acc", "plays", "playtime"] = "pp", + mode_arg: int = Query(0, alias="mode", ge=0, le=11), + limit: int = Query(50, ge=1, le=100), + offset: int = Query(0, min=0, max=2_147_483_647), + country: str | None = Query(None, min_length=2, max_length=2), +) -> Response: + if mode_arg in ( + GameMode.RELAX_MANIA, + GameMode.AUTOPILOT_CATCH, + GameMode.AUTOPILOT_TAIKO, + GameMode.AUTOPILOT_MANIA, + ): + return ORJSONResponse( + {"status": "Invalid gamemode."}, + status_code=status.HTTP_400_BAD_REQUEST, + ) + + mode = GameMode(mode_arg) + + query_conditions = ["s.mode = :mode", "u.priv & 1", f"s.{sort} > 0"] + query_parameters: dict[str, object] = {"mode": mode} + + if country is not None: + query_conditions.append("u.country = :country") + query_parameters["country"] = country + + rows = await app.state.services.database.fetch_all( + "SELECT u.id as player_id, u.name, u.country, s.tscore, s.rscore, " + "s.pp, s.plays, s.playtime, s.acc, s.max_combo, " + "s.xh_count, s.x_count, s.sh_count, s.s_count, s.a_count, " + "c.id as clan_id, c.name as clan_name, c.tag as clan_tag " + "FROM stats s " + "LEFT JOIN users u USING (id) " + "LEFT JOIN clans c ON u.clan_id = c.id " + f"WHERE {' AND '.join(query_conditions)} " + f"ORDER BY s.{sort} DESC LIMIT :offset, :limit", + query_parameters | {"offset": offset, "limit": limit}, + ) + + return ORJSONResponse( + {"status": "success", "leaderboard": [dict(row) for row in rows]}, + ) + + +@router.get("/get_clan") +async def api_get_clan( + clan_id: int = Query(..., alias="id", ge=1, le=2_147_483_647), +) -> Response: + """Return information of a given clan.""" + clan = await clans_repo.fetch_one(id=clan_id) + if not clan: + return ORJSONResponse( + {"status": "Clan not found."}, + status_code=status.HTTP_404_NOT_FOUND, + ) + + clan_members = await users_repo.fetch_many(clan_id=clan["id"]) + + owner = await app.state.sessions.players.from_cache_or_sql(id=clan["owner"]) + assert owner is not None + + return ORJSONResponse( + { + "id": clan["id"], + "name": clan["name"], + "tag": clan["tag"], + "members": [ + { + "id": member["id"], + "name": member["name"], + "country": member["country"], + "rank": ("Member", "Officer", "Owner")[member["clan_priv"] - 1], + } + for member in clan_members + ], + "owner": { + "id": owner.id, + "name": owner.name, + "country": owner.geoloc["country"]["acronym"], + "rank": "Owner", + }, + }, + ) + + +@router.get("/get_mappool") +async def api_get_pool( + pool_id: int = Query(..., alias="id", ge=1, le=2_147_483_647), +) -> Response: + """Return information of a given mappool.""" + + tourney_pool = await tourney_pools_repo.fetch_by_id(id=pool_id) + if tourney_pool is None: + return ORJSONResponse( + {"status": "Pool not found."}, + status_code=status.HTTP_404_NOT_FOUND, + ) + + tourney_pool_maps: dict[tuple[int, int], Beatmap] = {} + for pool_map in await tourney_pool_maps_repo.fetch_many(pool_id=pool_id): + bmap = await Beatmap.from_bid(pool_map["map_id"]) + if bmap is not None: + tourney_pool_maps[(pool_map["mods"], pool_map["slot"])] = bmap + + pool_creator = app.state.sessions.players.get(id=tourney_pool["created_by"]) + + if pool_creator is None: + return ORJSONResponse( + {"status": "Pool creator not found."}, + status_code=status.HTTP_404_NOT_FOUND, + ) + + pool_creator_clan = ( + await clans_repo.fetch_one(id=pool_creator.clan_id) + if pool_creator.clan_id is not None + else None + ) + pool_creator_clan_members: list[users_repo.User] = [] + if pool_creator_clan is not None: + pool_creator_clan_members = await users_repo.fetch_many( + clan_id=pool_creator.clan_id, + ) + + return ORJSONResponse( + { + "id": tourney_pool["id"], + "name": tourney_pool["name"], + "created_at": tourney_pool["created_at"], + "created_by": { + "id": pool_creator.id, + "name": pool_creator.name, + "country": pool_creator.geoloc["country"]["acronym"], + "clan": ( + { + "id": pool_creator_clan["id"], + "name": pool_creator_clan["name"], + "tag": pool_creator_clan["tag"], + "members": len(pool_creator_clan_members), + } + if pool_creator_clan is not None + else None + ), + "online": pool_creator.is_online, + }, + "maps": { + f"{mods!r}{slot}": { + "id": bmap.id, + "md5": bmap.md5, + "set_id": bmap.set_id, + "artist": bmap.artist, + "title": bmap.title, + "version": bmap.version, + "creator": bmap.creator, + "last_update": bmap.last_update, + "total_length": bmap.total_length, + "max_combo": bmap.max_combo, + "status": bmap.status, + "plays": bmap.plays, + "passes": bmap.passes, + "mode": bmap.mode, + "bpm": bmap.bpm, + "cs": bmap.cs, + "od": bmap.od, + "ar": bmap.ar, + "hp": bmap.hp, + "diff": bmap.diff, + } + for (mods, slot), bmap in tourney_pool_maps.items() + }, + }, + ) + + +# def requires_api_key(f: Callable) -> Callable: +# @wraps(f) +# async def wrapper(conn: Connection) -> HTTPResponse: +# conn.resp_headers["Content-Type"] = "application/json" +# if "Authorization" not in conn.headers: +# return (400, JSON({"status": "Must provide authorization token."})) + +# api_key = conn.headers["Authorization"] + +# if api_key not in app.state.sessions.api_keys: +# return (401, JSON({"status": "Unknown authorization token."})) + +# # get player from api token +# player_id = app.state.sessions.api_keys[api_key] +# player = await app.state.sessions.players.from_cache_or_sql(id=player_id) + +# return await f(conn, player) + +# return wrapper + + +# NOTE: `Content-Type = application/json` is applied in the above decorator +# for the following api handlers. + + +# @domain.route("/set_avatar", methods=["POST", "PUT"]) +# @requires_api_key +# async def api_set_avatar(conn: Connection, player: Player) -> HTTPResponse: +# """Update the tokenholder's avatar to a given file.""" +# if "avatar" not in conn.files: +# return (400, JSON({"status": "must provide avatar file."})) + +# ava_file = conn.files["avatar"] + +# # block files over 4MB +# if len(ava_file) > (4 * 1024 * 1024): +# return (400, JSON({"status": "avatar file too large (max 4MB)."})) + +# if ava_file[6:10] in (b"JFIF", b"Exif"): +# ext = "jpeg" +# elif ava_file.startswith(b"\211PNG\r\n\032\n"): +# ext = "png" +# else: +# return (400, JSON({"status": "invalid file type."})) + +# # write to the avatar file +# (AVATARS_PATH / f"{player.id}.{ext}").write_bytes(ava_file) +# return JSON({"status": "success."}) diff --git a/app/api/v2/__init__.py b/app/api/v2/__init__.py new file mode 100644 index 0000000..13faca6 --- /dev/null +++ b/app/api/v2/__init__.py @@ -0,0 +1,15 @@ +# isort: dont-add-imports + +from fastapi import APIRouter + +from . import clans +from . import maps +from . import players +from . import scores + +apiv2_router = APIRouter(tags=["API v2"], prefix="/v2") + +apiv2_router.include_router(clans.router) +apiv2_router.include_router(maps.router) +apiv2_router.include_router(players.router) +apiv2_router.include_router(scores.router) diff --git a/app/api/v2/clans.py b/app/api/v2/clans.py new file mode 100644 index 0000000..35fb803 --- /dev/null +++ b/app/api/v2/clans.py @@ -0,0 +1,50 @@ +"""bancho.py's v2 apis for interacting with clans""" + +from __future__ import annotations + +from fastapi import APIRouter +from fastapi import status +from fastapi.param_functions import Query + +from app.api.v2.common import responses +from app.api.v2.common.responses import Failure +from app.api.v2.common.responses import Success +from app.api.v2.models.clans import Clan +from app.repositories import clans as clans_repo + +router = APIRouter() + + +@router.get("/clans") +async def get_clans( + page: int = Query(1, ge=1), + page_size: int = Query(50, ge=1, le=100), +) -> Success[list[Clan]] | Failure: + clans = await clans_repo.fetch_many( + page=page, + page_size=page_size, + ) + total_clans = await clans_repo.fetch_count() + + response = [Clan.from_mapping(rec) for rec in clans] + return responses.success( + content=response, + meta={ + "total": total_clans, + "page": page, + "page_size": page_size, + }, + ) + + +@router.get("/clans/{clan_id}") +async def get_clan(clan_id: int) -> Success[Clan] | Failure: + data = await clans_repo.fetch_one(id=clan_id) + if data is None: + return responses.failure( + message="Clan not found.", + status_code=status.HTTP_404_NOT_FOUND, + ) + + response = Clan.from_mapping(data) + return responses.success(response) diff --git a/app/api/v2/common/json.py b/app/api/v2/common/json.py new file mode 100644 index 0000000..e567991 --- /dev/null +++ b/app/api/v2/common/json.py @@ -0,0 +1,29 @@ +from __future__ import annotations + +from typing import Any + +import orjson +from fastapi.responses import JSONResponse +from pydantic import BaseModel + + +def _default_processor(data: Any) -> Any: + if isinstance(data, BaseModel): + return _default_processor(data.dict()) + elif isinstance(data, dict): + return {k: _default_processor(v) for k, v in data.items()} + elif isinstance(data, list): + return [_default_processor(v) for v in data] + else: + return data + + +def dumps(data: Any) -> bytes: + return orjson.dumps(data, default=_default_processor) + + +class ORJSONResponse(JSONResponse): + media_type = "application/json" + + def render(self, content: Any) -> bytes: + return dumps(content) diff --git a/app/api/v2/common/responses.py b/app/api/v2/common/responses.py new file mode 100644 index 0000000..42f8364 --- /dev/null +++ b/app/api/v2/common/responses.py @@ -0,0 +1,47 @@ +from __future__ import annotations + +from typing import Any +from typing import Generic +from typing import Literal +from typing import TypeVar +from typing import cast + +from pydantic import BaseModel + +from app.api.v2.common import json + +T = TypeVar("T") + + +class Success(BaseModel, Generic[T]): + status: Literal["success"] + data: T + meta: dict[str, Any] + + +def success( + content: T, + status_code: int = 200, + headers: dict[str, Any] | None = None, + meta: dict[str, Any] | None = None, +) -> Success[T]: + if meta is None: + meta = {} + data = {"status": "success", "data": content, "meta": meta} + # XXX:HACK to make typing work + return cast(Success[T], json.ORJSONResponse(data, status_code, headers)) + + +class Failure(BaseModel): + status: Literal["error"] + error: str + + +def failure( + message: str, + status_code: int = 400, + headers: dict[str, Any] | None = None, +) -> Failure: + data = {"status": "error", "error": message} + # XXX:HACK to make typing work + return cast(Failure, json.ORJSONResponse(data, status_code, headers)) diff --git a/app/api/v2/maps.py b/app/api/v2/maps.py new file mode 100644 index 0000000..3489191 --- /dev/null +++ b/app/api/v2/maps.py @@ -0,0 +1,76 @@ +"""bancho.py's v2 apis for interacting with maps""" + +from __future__ import annotations + +from fastapi import APIRouter +from fastapi import status +from fastapi.param_functions import Query + +from app.api.v2.common import responses +from app.api.v2.common.responses import Failure +from app.api.v2.common.responses import Success +from app.api.v2.models.maps import Map +from app.repositories import maps as maps_repo + +router = APIRouter() + + +@router.get("/maps") +async def get_maps( + set_id: int | None = None, + server: str | None = None, + status: int | None = None, + artist: str | None = None, + creator: str | None = None, + filename: str | None = None, + mode: int | None = None, + frozen: bool | None = None, + page: int = Query(1, ge=1), + page_size: int = Query(50, ge=1, le=100), +) -> Success[list[Map]] | Failure: + maps = await maps_repo.fetch_many( + server=server, + set_id=set_id, + status=status, + artist=artist, + creator=creator, + filename=filename, + mode=mode, + frozen=frozen, + page=page, + page_size=page_size, + ) + total_maps = await maps_repo.fetch_count( + server=server, + set_id=set_id, + status=status, + artist=artist, + creator=creator, + filename=filename, + mode=mode, + frozen=frozen, + ) + + response = [Map.from_mapping(rec) for rec in maps] + + return responses.success( + content=response, + meta={ + "total": total_maps, + "page": page, + "page_size": page_size, + }, + ) + + +@router.get("/maps/{map_id}") +async def get_map(map_id: int) -> Success[Map] | Failure: + data = await maps_repo.fetch_one(id=map_id) + if data is None: + return responses.failure( + message="Map not found.", + status_code=status.HTTP_404_NOT_FOUND, + ) + + response = Map.from_mapping(data) + return responses.success(response) diff --git a/app/api/v2/models/__init__.py b/app/api/v2/models/__init__.py new file mode 100644 index 0000000..d7c2bcf --- /dev/null +++ b/app/api/v2/models/__init__.py @@ -0,0 +1,18 @@ +# isort: dont-add-imports + +from collections.abc import Mapping +from typing import Any +from typing import TypeVar + +from pydantic import BaseModel as _pydantic_BaseModel +from pydantic import ConfigDict + +T = TypeVar("T", bound="BaseModel") + + +class BaseModel(_pydantic_BaseModel): + model_config = ConfigDict(str_strip_whitespace=True) + + @classmethod + def from_mapping(cls: type[T], mapping: Mapping[str, Any]) -> T: + return cls(**{k: mapping[k] for k in cls.model_fields}) diff --git a/app/api/v2/models/clans.py b/app/api/v2/models/clans.py new file mode 100644 index 0000000..ebadb39 --- /dev/null +++ b/app/api/v2/models/clans.py @@ -0,0 +1,18 @@ +from __future__ import annotations + +from datetime import datetime + +from . import BaseModel + +# input models + + +# output models + + +class Clan(BaseModel): + id: int + name: str + tag: str + owner: int + created_at: datetime diff --git a/app/api/v2/models/maps.py b/app/api/v2/models/maps.py new file mode 100644 index 0000000..36d3b2a --- /dev/null +++ b/app/api/v2/models/maps.py @@ -0,0 +1,36 @@ +from __future__ import annotations + +from datetime import datetime + +from . import BaseModel + +# input models + + +# output models + + +class Map(BaseModel): + id: int + server: str + set_id: int + status: int + md5: str + artist: str + title: str + version: str + creator: str + filename: str + last_update: datetime + total_length: int + max_combo: int + frozen: bool + plays: int + passes: int + mode: int + bpm: float + cs: float + ar: float + od: float + hp: float + diff: float diff --git a/app/api/v2/models/players.py b/app/api/v2/models/players.py new file mode 100644 index 0000000..3eb9cc6 --- /dev/null +++ b/app/api/v2/models/players.py @@ -0,0 +1,60 @@ +from __future__ import annotations + +from . import BaseModel + +# input models + + +# output models + + +class Player(BaseModel): + id: int + name: str + safe_name: str + + priv: int + country: str + silence_end: int + donor_end: int + creation_time: int + latest_activity: int + + clan_id: int + clan_priv: int + + preferred_mode: int + play_style: int + + custom_badge_name: str | None + custom_badge_icon: str | None + + userpage_content: str | None + + +class PlayerStatus(BaseModel): + login_time: int + action: int + info_text: str + mode: int + mods: int + beatmap_id: int + + +class PlayerStats(BaseModel): + id: int + mode: int + tscore: int + rscore: int + pp: float + plays: int + playtime: int + acc: float + max_combo: int + total_hits: int + replay_views: int + xh_count: int + x_count: int + sh_count: int + s_count: int + a_count: int diff --git a/app/api/v2/models/scores.py b/app/api/v2/models/scores.py new file mode 100644 index 0000000..7ccfe21 --- /dev/null +++ b/app/api/v2/models/scores.py @@ -0,0 +1,36 @@ +from __future__ import annotations + +from datetime import datetime + +from . import BaseModel + +# input models + + +# output models + + +class Score(BaseModel): + id: int + map_md5: str + userid: int + + score: int + pp: float + acc: float + max_combo: int + mods: int + + n300: int + n100: int + n50: int + nmiss: int + nkatu: int + + grade: str + status: int + mode: int + + play_time: datetime + time_elapsed: int + perfect: bool diff --git a/app/api/v2/players.py b/app/api/v2/players.py new file mode 100644 index 0000000..a145715 --- /dev/null +++ b/app/api/v2/players.py @@ -0,0 +1,137 @@ +"""bancho.py's v2 apis for interacting with players""" + +from __future__ import annotations + +from fastapi import APIRouter +from fastapi import status +from fastapi.param_functions import Query + +import app.state.sessions +from app.api.v2.common import responses +from app.api.v2.common.responses import Failure +from app.api.v2.common.responses import Success +from app.api.v2.models.players import Player +from app.api.v2.models.players import PlayerStats +from app.api.v2.models.players import PlayerStatus +from app.repositories import stats as stats_repo +from app.repositories import users as users_repo + +router = APIRouter() + + +@router.get("/players") +async def get_players( + priv: int | None = None, + country: str | None = None, + clan_id: int | None = None, + clan_priv: int | None = None, + preferred_mode: int | None = None, + play_style: int | None = None, + page: int = Query(1, ge=1), + page_size: int = Query(50, ge=1, le=100), +) -> Success[list[Player]] | Failure: + players = await users_repo.fetch_many( + priv=priv, + country=country, + clan_id=clan_id, + clan_priv=clan_priv, + preferred_mode=preferred_mode, + play_style=play_style, + page=page, + page_size=page_size, + ) + total_players = await users_repo.fetch_count( + priv=priv, + country=country, + clan_id=clan_id, + clan_priv=clan_priv, + preferred_mode=preferred_mode, + play_style=play_style, + ) + + response = [Player.from_mapping(rec) for rec in players] + + return responses.success( + content=response, + meta={ + "total": total_players, + "page": page, + "page_size": page_size, + }, + ) + + +@router.get("/players/{player_id}") +async def get_player(player_id: int) -> Success[Player] | Failure: + data = await users_repo.fetch_one(id=player_id) + if data is None: + return responses.failure( + message="Player not found.", + status_code=status.HTTP_404_NOT_FOUND, + ) + + response = Player.from_mapping(data) + return responses.success(response) + + +@router.get("/players/{player_id}/status") +async def get_player_status(player_id: int) -> Success[PlayerStatus] | Failure: + player = app.state.sessions.players.get(id=player_id) + + if not player: + return responses.failure( + message="Player status not found.", + status_code=status.HTTP_404_NOT_FOUND, + ) + + response = PlayerStatus( + login_time=int(player.login_time), + action=int(player.status.action), + info_text=player.status.info_text, + mode=int(player.status.mode), + mods=int(player.status.mods), + beatmap_id=player.status.map_id, + ) + return responses.success(response) + + +@router.get("/players/{player_id}/stats/{mode}") +async def get_player_mode_stats( + player_id: int, + mode: int, +) -> Success[PlayerStats] | Failure: + data = await stats_repo.fetch_one(player_id, mode) + if data is None: + return responses.failure( + message="Player stats not found.", + status_code=status.HTTP_404_NOT_FOUND, + ) + + response = PlayerStats.from_mapping(data) + return responses.success(response) + + +@router.get("/players/{player_id}/stats") +async def get_player_stats( + player_id: int, + page: int = Query(1, ge=1), + page_size: int = Query(50, ge=1, le=100), +) -> Success[list[PlayerStats]] | Failure: + data = await stats_repo.fetch_many( + player_id=player_id, + page=page, + page_size=page_size, + ) + total_stats = await stats_repo.fetch_count( + player_id=player_id, + ) + + response = [PlayerStats.from_mapping(rec) for rec in data] + return responses.success( + response, + meta={ + "total": total_stats, + "page": page, + "page_size": page_size, + }, + ) diff --git a/app/api/v2/scores.py b/app/api/v2/scores.py new file mode 100644 index 0000000..b2d056a --- /dev/null +++ b/app/api/v2/scores.py @@ -0,0 +1,67 @@ +"""bancho.py's v2 apis for interacting with scores""" + +from __future__ import annotations + +from fastapi import APIRouter +from fastapi import status +from fastapi.param_functions import Query + +from app.api.v2.common import responses +from app.api.v2.common.responses import Failure +from app.api.v2.common.responses import Success +from app.api.v2.models.scores import Score +from app.repositories import scores as scores_repo + +router = APIRouter() + + +@router.get("/scores") +async def get_all_scores( + map_md5: str | None = None, + mods: int | None = None, + status: int | None = None, + mode: int | None = None, + user_id: int | None = None, + page: int = Query(1, ge=1), + page_size: int = Query(50, ge=1, le=100), +) -> Success[list[Score]] | Failure: + scores = await scores_repo.fetch_many( + map_md5=map_md5, + mods=mods, + status=status, + mode=mode, + user_id=user_id, + page=page, + page_size=page_size, + ) + total_scores = await scores_repo.fetch_count( + map_md5=map_md5, + mods=mods, + status=status, + mode=mode, + user_id=user_id, + ) + + response = [Score.from_mapping(rec) for rec in scores] + + return responses.success( + content=response, + meta={ + "total": total_scores, + "page": page, + "page_size": page_size, + }, + ) + + +@router.get("/scores/{score_id}") +async def get_score(score_id: int) -> Success[Score] | Failure: + data = await scores_repo.fetch_one(id=score_id) + if data is None: + return responses.failure( + message="Score not found.", + status_code=status.HTTP_404_NOT_FOUND, + ) + + response = Score.from_mapping(data) + return responses.success(response) diff --git a/app/bg_loops.py b/app/bg_loops.py new file mode 100644 index 0000000..02f4a8a --- /dev/null +++ b/app/bg_loops.py @@ -0,0 +1,89 @@ +from __future__ import annotations + +import asyncio +import time + +import app.packets +import app.settings +import app.state +from app.constants.privileges import Privileges +from app.logging import Ansi +from app.logging import log + +OSU_CLIENT_MIN_PING_INTERVAL = 300000 // 1000 # defined by osu! + + +async def initialize_housekeeping_tasks() -> None: + """Create tasks for each housekeeping tasks.""" + log("Initializing housekeeping tasks.", Ansi.LCYAN) + + loop = asyncio.get_running_loop() + + app.state.sessions.housekeeping_tasks.update( + { + loop.create_task(task) + for task in ( + _remove_expired_donation_privileges(interval=30 * 60), + _update_bot_status(interval=5 * 60), + _disconnect_ghosts(interval=OSU_CLIENT_MIN_PING_INTERVAL // 3), + ) + }, + ) + + +async def _remove_expired_donation_privileges(interval: int) -> None: + """Remove donation privileges from users with expired sessions.""" + while True: + if app.settings.DEBUG: + log("Removing expired donation privileges.", Ansi.LMAGENTA) + + expired_donors = await app.state.services.database.fetch_all( + "SELECT id FROM users " + "WHERE donor_end <= UNIX_TIMESTAMP() " + "AND priv & :donor_priv", + {"donor_priv": Privileges.DONATOR.value}, + ) + + for expired_donor in expired_donors: + player = await app.state.sessions.players.from_cache_or_sql( + id=expired_donor["id"], + ) + + assert player is not None + + # TODO: perhaps make a `revoke_donor` method? + await player.remove_privs(Privileges.DONATOR) + player.donor_end = 0 + await app.state.services.database.execute( + "UPDATE users SET donor_end = 0 WHERE id = :id", + {"id": player.id}, + ) + + if player.is_online: + player.enqueue( + app.packets.notification("Your supporter status has expired."), + ) + + log(f"{player}'s supporter status has expired.", Ansi.LMAGENTA) + + await asyncio.sleep(interval) + + +async def _disconnect_ghosts(interval: int) -> None: + """Actively disconnect users above the + disconnection time threshold on the osu! server.""" + while True: + await asyncio.sleep(interval) + current_time = time.time() + + for player in app.state.sessions.players: + if current_time - player.last_recv_time > OSU_CLIENT_MIN_PING_INTERVAL: + log(f"Auto-dced {player}.", Ansi.LMAGENTA) + player.logout() + + +async def _update_bot_status(interval: int) -> None: + """Re roll the bot status, every `interval`.""" + while True: + await asyncio.sleep(interval) + app.packets.bot_stats.cache_clear() diff --git a/app/commands.py b/app/commands.py new file mode 100644 index 0000000..7c3db52 --- /dev/null +++ b/app/commands.py @@ -0,0 +1,2533 @@ +from __future__ import annotations + +import importlib.metadata +import os +import pprint +import random +import secrets +import signal +import time +import traceback +import uuid +from collections.abc import Awaitable +from collections.abc import Callable +from collections.abc import Mapping +from collections.abc import Sequence +from dataclasses import dataclass +from datetime import datetime +from datetime import timedelta +from functools import wraps +from pathlib import Path +from time import perf_counter_ns as clock_ns +from typing import TYPE_CHECKING +from typing import Any +from typing import NamedTuple +from typing import NoReturn +from typing import Optional +from typing import TypedDict +from urllib.parse import urlparse + +import cpuinfo +import psutil +import timeago +from pytimeparse.timeparse import timeparse + +import app.logging +import app.packets +import app.settings +import app.state +import app.usecases.performance +import app.utils +from app.constants import regexes +from app.constants.gamemodes import GAMEMODE_REPR_LIST +from app.constants.mods import SPEED_CHANGING_MODS +from app.constants.mods import Mods +from app.constants.privileges import ClanPrivileges +from app.constants.privileges import Privileges +from app.logging import Ansi +from app.logging import log +from app.objects.beatmap import Beatmap +from app.objects.beatmap import RankedStatus +from app.objects.beatmap import ensure_osu_file_is_available +from app.objects.match import Match +from app.objects.match import MatchTeams +from app.objects.match import MatchTeamTypes +from app.objects.match import MatchWinConditions +from app.objects.match import SlotStatus +from app.objects.player import Player +from app.objects.score import SubmissionStatus +from app.repositories import clans as clans_repo +from app.repositories import logs as logs_repo +from app.repositories import map_requests as map_requests_repo +from app.repositories import maps as maps_repo +from app.repositories import tourney_pool_maps as tourney_pool_maps_repo +from app.repositories import tourney_pools as tourney_pools_repo +from app.repositories import users as users_repo +from app.usecases.performance import ScoreParams + +if TYPE_CHECKING: + from app.objects.channel import Channel + + +BEATMAPS_PATH = Path.cwd() / ".data/osu" + + +@dataclass +class Context: + player: Player + trigger: str + args: Sequence[str] + + recipient: Channel | Player + + +Callback = Callable[[Context], Awaitable[Optional[str]]] + + +class Command(NamedTuple): + triggers: list[str] + callback: Callback + priv: Privileges + hidden: bool + doc: str | None + + +class CommandSet: + def __init__(self, trigger: str, doc: str) -> None: + self.trigger = trigger + self.doc = doc + + self.commands: list[Command] = [] + + def add( + self, + priv: Privileges, + aliases: list[str] = [], + hidden: bool = False, + ) -> Callable[[Callback], Callback]: + def wrapper(f: Callback) -> Callback: + self.commands.append( + Command( + # NOTE: this method assumes that functions without any + # triggers will be named like '{self.trigger}_{trigger}'. + triggers=( + [f.__name__.removeprefix(f"{self.trigger}_").strip()] + aliases + ), + callback=f, + priv=priv, + hidden=hidden, + doc=f.__doc__, + ), + ) + + return f + + return wrapper + + +mp_commands = CommandSet("mp", "Multiplayer commands.") +pool_commands = CommandSet("pool", "Mappool commands.") +clan_commands = CommandSet("clan", "Clan commands.") + +regular_commands = [] +command_sets = [ + mp_commands, + pool_commands, + clan_commands, +] + + +def command( + priv: Privileges, + aliases: list[str] = [], + hidden: bool = False, +) -> Callable[[Callback], Callback]: + def wrapper(f: Callback) -> Callback: + regular_commands.append( + Command( + callback=f, + priv=priv, + hidden=hidden, + triggers=[f.__name__.strip("_")] + aliases, + doc=f.__doc__, + ), + ) + + return f + + return wrapper + + +""" User commands +# The commands below are not considered dangerous, +# and are granted to any unbanned players. +""" + + +@command(Privileges.UNRESTRICTED, aliases=["", "h"], hidden=True) +async def _help(ctx: Context) -> str | None: + """Show all documented commands the player can access.""" + prefix = app.settings.COMMAND_PREFIX + l = ["Individual commands", "-----------"] + + for cmd in regular_commands: + if not cmd.doc or ctx.player.priv & cmd.priv != cmd.priv: + # no doc, or insufficient permissions. + continue + + l.append(f"{prefix}{cmd.triggers[0]}: {cmd.doc}") + + l.append("") # newline + l.extend(["Command sets", "-----------"]) + + for cmd_set in command_sets: + l.append(f"{prefix}{cmd_set.trigger}: {cmd_set.doc}") + + return "\n".join(l) + + +@command(Privileges.UNRESTRICTED) +async def roll(ctx: Context) -> str | None: + """Roll an n-sided die where n is the number you write (100 default).""" + if ctx.args and ctx.args[0].isdecimal(): + max_roll = min(int(ctx.args[0]), 0x7FFF) + else: + max_roll = 100 + + if max_roll == 0: + return "Roll what?" + + points = random.randrange(0, max_roll) + return f"{ctx.player.name} rolls {points} points!" + + +@command(Privileges.UNRESTRICTED, hidden=True) +async def block(ctx: Context) -> str | None: + """Block another user from communicating with you.""" + target = await app.state.sessions.players.from_cache_or_sql(name=" ".join(ctx.args)) + + if not target: + return "User not found." + + if target is app.state.sessions.bot or target is ctx.player: + return "What?" + + if target.id in ctx.player.blocks: + return f"{target.name} already blocked!" + + if target.id in ctx.player.friends: + ctx.player.friends.remove(target.id) + + await ctx.player.add_block(target) + return f"Added {target.name} to blocked users." + + +@command(Privileges.UNRESTRICTED, hidden=True) +async def unblock(ctx: Context) -> str | None: + """Unblock another user from communicating with you.""" + target = await app.state.sessions.players.from_cache_or_sql(name=" ".join(ctx.args)) + + if not target: + return "User not found." + + if target is app.state.sessions.bot or target is ctx.player: + return "What?" + + if target.id not in ctx.player.blocks: + return f"{target.name} not blocked!" + + await ctx.player.remove_block(target) + return f"Removed {target.name} from blocked users." + + +@command(Privileges.UNRESTRICTED) +async def reconnect(ctx: Context) -> str | None: + """Disconnect and reconnect a given player (or self) to the server.""" + if ctx.args: + # !reconnect + if not ctx.player.priv & Privileges.ADMINISTRATOR: + return None # requires admin + + target = app.state.sessions.players.get(name=" ".join(ctx.args)) + if not target: + return "Player not found" + else: + # !reconnect + target = ctx.player + + target.logout() + + return None + + +@command(Privileges.SUPPORTER) +async def changename(ctx: Context) -> str | None: + """Change your username.""" + name = " ".join(ctx.args).strip() + + if not regexes.USERNAME.match(name): + return "Must be 2-15 characters in length." + + if "_" in name and " " in name: + return 'May contain "_" and " ", but not both.' + + if name in app.settings.DISALLOWED_NAMES: + return "Disallowed username; pick another." + + if await users_repo.fetch_one(name=name): + return "Username already taken by another player." + + # all checks passed, update their name + await users_repo.partial_update(ctx.player.id, name=name) + + ctx.player.enqueue( + app.packets.notification(f"Your username has been changed to {name}!"), + ) + ctx.player.logout() + + return None + + +@command(Privileges.UNRESTRICTED, aliases=["bloodcat", "beatconnect", "chimu", "q"]) +async def maplink(ctx: Context) -> str | None: + """Return a download link to the user's current map (situation dependant).""" + bmap = None + + # priority: multiplayer -> spectator -> last np + match = ctx.player.match + spectating = ctx.player.spectating + + if match and match.map_id: + bmap = await Beatmap.from_md5(match.map_md5) + elif spectating and spectating.status.map_id: + bmap = await Beatmap.from_md5(spectating.status.map_md5) + elif ctx.player.last_np is not None and time.time() < ctx.player.last_np["timeout"]: + bmap = ctx.player.last_np["bmap"] + + if bmap is None: + return "No map found!" + + return f"[{app.settings.MIRROR_DOWNLOAD_ENDPOINT}/{bmap.set_id} {bmap.full_name}]" + + +@command(Privileges.UNRESTRICTED, aliases=["last", "r"]) +async def recent(ctx: Context) -> str | None: + """Show information about a player's most recent score.""" + if ctx.args: + target = app.state.sessions.players.get(name=" ".join(ctx.args)) + if not target: + return "Player not found." + else: + target = ctx.player + + score = target.recent_score + if not score: + return "No scores found (only saves per play session)." + + if score.bmap is None: + return "We don't have a beatmap on file for your recent score." + + l = [f"[{score.mode!r}] {score.bmap.embed}", f"{score.acc:.2f}%"] + + if score.mods: + l.insert(1, f"+{score.mods!r}") + + l = [" ".join(l)] + + if score.passed: + rank = score.rank if score.status == SubmissionStatus.BEST else "NA" + l.append(f"PASS {{{score.pp:.2f}pp #{rank}}}") + else: + # XXX: prior to v3.2.0, bancho.py didn't parse total_length from + # the osu!api, and thus this can do some zerodivision moments. + # this can probably be removed in the future, or better yet + # replaced with a better system to fix the maps. + if score.bmap.total_length != 0: + completion = score.time_elapsed / (score.bmap.total_length * 1000) + l.append(f"FAIL {{{completion * 100:.2f}% complete}})") + else: + l.append("FAIL") + + return " | ".join(l) + + +TOP_SCORE_FMTSTR = "{idx}. ({pp:.2f}pp) [https://osu.{domain}/b/{map_id} {artist} - {title} [{version}]]" + + +@command(Privileges.UNRESTRICTED, hidden=True) +async def top(ctx: Context) -> str | None: + """Show information about a player's top 10 scores.""" + # !top (player) + args_len = len(ctx.args) + if args_len not in (1, 2): + return "Invalid syntax: !top (player)" + + if ctx.args[0] not in GAMEMODE_REPR_LIST: + return f'Valid gamemodes: {", ".join(GAMEMODE_REPR_LIST)}.' + + if ctx.args[0] in ( + "rx!mania", + "ap!taiko", + "ap!catch", + "ap!mania", + ): + return "Impossible gamemode combination." + + if args_len == 2: + if not regexes.USERNAME.match(ctx.args[1]): + return "Invalid username." + + # specific player provided + player = app.state.sessions.players.get(name=ctx.args[1]) + if not player: + return "Player not found." + else: + # no player provided, use self + player = ctx.player + + # !top rx!std + mode = GAMEMODE_REPR_LIST.index(ctx.args[0]) + + scores = await app.state.services.database.fetch_all( + "SELECT s.pp, b.artist, b.title, b.version, b.set_id map_set_id, b.id map_id " + "FROM scores s " + "LEFT JOIN maps b ON b.md5 = s.map_md5 " + "WHERE s.userid = :user_id " + "AND s.mode = :mode " + "AND s.status = 2 " + "AND b.status in (2, 3) " + "ORDER BY s.pp DESC LIMIT 10", + {"user_id": player.id, "mode": mode}, + ) + if not scores: + return "No scores" + + return "\n".join( + [f"Top 10 scores for {player.embed} ({ctx.args[0]})."] + + [ + TOP_SCORE_FMTSTR.format(idx=idx + 1, domain=app.settings.DOMAIN, **s) + for idx, s in enumerate(scores) + ], + ) + + +class ParsingError(str): ... + + +def parse__with__command_args( + mode: int, + args: Sequence[str], +) -> Mapping[str, Any] | ParsingError: + """Parse arguments for the !with command.""" + + if not args or len(args) > 4: + return ParsingError("Invalid syntax: !with ") + + # !with 95% 1m 429x hddt + acc = mods = combo = nmiss = None + + # parse acc, misses, combo and mods from arguments. + # tried to balance complexity vs correctness here + for arg in (str.lower(arg) for arg in args): + # mandatory suffix, combo & nmiss + if combo is None and arg.endswith("x") and arg[:-1].isdecimal(): + combo = int(arg[:-1]) + # if combo > bmap.max_combo: + # return "Invalid combo." + elif nmiss is None and arg.endswith("m") and arg[:-1].isdecimal(): + nmiss = int(arg[:-1]) + # TODO: store nobjects? + # if nmiss > bmap.combo: + # return "Invalid misscount." + else: + # optional prefix/suffix, mods & accuracy + arg_stripped = arg.removeprefix("+").removesuffix("%") + if mods is None and arg_stripped.isalpha() and len(arg_stripped) % 2 == 0: + mods = Mods.from_modstr(arg_stripped) + mods = mods.filter_invalid_combos(mode) + elif acc is None and arg_stripped.replace(".", "", 1).isdecimal(): + acc = float(arg_stripped) + if not 0 <= acc <= 100: + return ParsingError("Invalid accuracy.") + else: + return ParsingError(f"Unknown argument: {arg}") + + return { + "acc": acc, + "mods": mods, + "combo": combo, + "nmiss": nmiss, + } + + +@command(Privileges.UNRESTRICTED, aliases=["w"], hidden=True) +async def _with(ctx: Context) -> str | None: + """Specify custom accuracy & mod combinations with `/np`.""" + if ctx.recipient is not app.state.sessions.bot: + return "This command can only be used in DM with bot." + + if ctx.player.last_np is None or time.time() >= ctx.player.last_np["timeout"]: + return "Please /np a map first!" + + bmap: Beatmap = ctx.player.last_np["bmap"] + + osu_file_available = await ensure_osu_file_is_available( + bmap.id, + expected_md5=bmap.md5, + ) + if not osu_file_available: + return "Mapfile could not be found; this incident has been reported." + + mode_vn = ctx.player.last_np["mode_vn"] + + command_args = parse__with__command_args(mode_vn, ctx.args) + if isinstance(command_args, ParsingError): + return str(command_args) + + msg_fields = [] + + score_args = ScoreParams(mode=mode_vn) + + mods = command_args["mods"] + if mods is not None: + score_args.mods = mods + msg_fields.append(f"{mods!r}") + + nmiss = command_args["nmiss"] + if nmiss: + score_args.nmiss = nmiss + msg_fields.append(f"{nmiss}m") + + combo = command_args["combo"] + if combo is not None: + score_args.combo = combo + msg_fields.append(f"{combo}x") + + acc = command_args["acc"] + if acc is not None: + score_args.acc = acc + msg_fields.append(f"{acc:.2f}%") + + result = app.usecases.performance.calculate_performances( + osu_file_path=str(BEATMAPS_PATH / f"{bmap.id}.osu"), + scores=[score_args], # calculate one score + ) + + return "{msg}: {pp:.2f}pp ({stars:.2f}*)".format( + msg=" ".join(msg_fields), + pp=result[0]["performance"]["pp"], + stars=result[0]["difficulty"]["stars"], # (first score result) + ) + + +@command(Privileges.UNRESTRICTED, aliases=["req"]) +async def request(ctx: Context) -> str | None: + """Request a beatmap for nomination.""" + if ctx.args: + return "Invalid syntax: !request" + + if ctx.player.last_np is None or time.time() >= ctx.player.last_np["timeout"]: + return "Please /np a map first!" + + bmap = ctx.player.last_np["bmap"] + + if bmap.status != RankedStatus.Pending: + return "Only pending maps may be requested for status change." + + map_requests = await map_requests_repo.fetch_all( + map_id=bmap.id, + player_id=ctx.player.id, + active=True, + ) + if map_requests: + return "You already have an active nomination request for that map." + + await map_requests_repo.create(map_id=bmap.id, player_id=ctx.player.id, active=True) + + return "Request submitted." + + +@command(Privileges.UNRESTRICTED) +async def apikey(ctx: Context) -> str | None: + """Generate a new api key & assign it to the player.""" + if ctx.recipient is not app.state.sessions.bot: + return f"Command only available in DMs with {app.state.sessions.bot.name}." + + # remove old token + if ctx.player.api_key: + app.state.sessions.api_keys.pop(ctx.player.api_key) + + # generate new token + ctx.player.api_key = str(uuid.uuid4()) + + await users_repo.partial_update(ctx.player.id, api_key=ctx.player.api_key) + app.state.sessions.api_keys[ctx.player.api_key] = ctx.player.id + + return f"API key generated. Copy your api key from (this url)[http://{ctx.player.api_key}]." + + +""" Nominator commands +# The commands below allow users to +# manage the server's state of beatmaps. +""" + + +@command(Privileges.NOMINATOR, aliases=["reqs"], hidden=True) +async def requests(ctx: Context) -> str | None: + """Check the nomination request queue.""" + if ctx.args: + return "Invalid syntax: !requests" + + rows = await map_requests_repo.fetch_all(active=True) + + if not rows: + return "The queue is clean! (0 map request(s))" + + # group rows into {map_id: [map_request, ...]} + grouped: dict[int, list[map_requests_repo.MapRequest]] = {} + for row in rows: + if row["map_id"] not in grouped: + grouped[row["map_id"]] = [] + grouped[row["map_id"]].append(row) + + if not grouped: + return "The queue is clean! (0 map request(s))" + + l = [f"Total requested beatmaps: {len(grouped)}"] + for map_id, reviews in grouped.items(): + assert len(reviews) != 0 + + bmap = await Beatmap.from_bid(map_id) + if not bmap: + log(f"Failed to find requested map ({map_id})?", Ansi.LYELLOW) + continue + + first_review = min(reviews, key=lambda r: r["datetime"]) + + l.append( + f"{len(reviews)}x request(s) starting {first_review['datetime']:%Y-%m-%d}: {bmap.embed}", + ) + + return "\n".join(l) + + +_status_str_to_int_map = {"unrank": 0, "rank": 2, "love": 5} + + +def status_to_id(s: str) -> int: + return _status_str_to_int_map[s] + + +@command(Privileges.NOMINATOR) +async def _map(ctx: Context) -> str | None: + """Changes the ranked status of the most recently /np'ed map.""" + if ( + len(ctx.args) != 2 + or ctx.args[0] not in ("rank", "unrank", "love") + or ctx.args[1] not in ("set", "map") + ): + return "Invalid syntax: !map " + + if ctx.player.last_np is None or time.time() >= ctx.player.last_np["timeout"]: + return "Please /np a map first!" + + bmap = ctx.player.last_np["bmap"] + new_status = RankedStatus(status_to_id(ctx.args[0])) + + if ctx.args[1] == "map": + if bmap.status == new_status: + return f"{bmap.embed} is already {new_status!s}!" + else: # ctx.args[1] == "set" + if all(map.status == new_status for map in bmap.set.maps): + return f"All maps from the set are already {new_status!s}!" + + # update sql & cache based on scope + # XXX: not sure if getting md5s from sql + # for updating cache would be faster? + # surely this will not scale as well... + + async with app.state.services.database.transaction(): + if ctx.args[1] == "set": + # update all maps in the set + for _bmap in bmap.set.maps: + await maps_repo.partial_update(_bmap.id, status=new_status, frozen=True) + + # make sure cache and db are synced about the newest change + for _bmap in app.state.cache.beatmapset[bmap.set_id].maps: + _bmap.status = new_status + _bmap.frozen = True + + # select all map ids for clearing map requests. + modified_beatmap_ids = [ + row["id"] + for row in await maps_repo.fetch_many( + set_id=bmap.set_id, + ) + ] + + else: + # update only map + await maps_repo.partial_update(bmap.id, status=new_status, frozen=True) + + # make sure cache and db are synced about the newest change + if bmap.md5 in app.state.cache.beatmap: + app.state.cache.beatmap[bmap.md5].status = new_status + app.state.cache.beatmap[bmap.md5].frozen = True + + modified_beatmap_ids = [bmap.id] + + # deactivate rank requests for all ids + await map_requests_repo.mark_batch_as_inactive(map_ids=modified_beatmap_ids) + + return f"{bmap.embed} updated to {new_status!s}." + + +""" Mod commands +# The commands below are somewhat dangerous, +# and are generally for managing players. +""" + +ACTION_STRINGS = { + "restrict": "Restricted for", + "unrestrict": "Unrestricted for", + "silence": "Silenced for", + "unsilence": "Unsilenced for", + "note": "Note added:", +} + + +@command(Privileges.MODERATOR, hidden=True) +async def notes(ctx: Context) -> str | None: + """Retrieve the logs of a specified player by name.""" + if len(ctx.args) != 2 or not ctx.args[1].isdecimal(): + return "Invalid syntax: !notes " + + target = await app.state.sessions.players.from_cache_or_sql(name=ctx.args[0]) + if not target: + return f'"{ctx.args[0]}" not found.' + + days = int(ctx.args[1]) + + if days > 365: + return "Please contact a developer to fetch >365 day old information." + elif days <= 0: + return "Invalid syntax: !notes " + + res = await app.state.services.database.fetch_all( + "SELECT `action`, `msg`, `time`, `from` " + "FROM `logs` WHERE `to` = :to " + "AND UNIX_TIMESTAMP(`time`) >= UNIX_TIMESTAMP(NOW()) - :seconds " + "ORDER BY `time` ASC", + {"to": target.id, "seconds": days * 86400}, + ) + + if not res: + return f"No notes found on {target} in the past {days} days." + + notes = [] + for row in res: + logger = await app.state.sessions.players.from_cache_or_sql(id=row["from"]) + if not logger: + continue + + action_str = ACTION_STRINGS.get(row["action"], "Unknown action:") + time_str = row["time"] + note = row["msg"] + + notes.append(f"[{time_str}] {action_str} {note} by {logger.name}") + + return "\n".join(notes) + + +@command(Privileges.MODERATOR, hidden=True) +async def addnote(ctx: Context) -> str | None: + """Add a note to a specified player by name.""" + if len(ctx.args) < 2: + return "Invalid syntax: !addnote " + + target = await app.state.sessions.players.from_cache_or_sql(name=ctx.args[0]) + if not target: + return f'"{ctx.args[0]}" not found.' + + await logs_repo.create( + _from=ctx.player.id, + to=target.id, + action="note", + msg=" ".join(ctx.args[1:]), + ) + + return f"Added note to {target}." + + +# some shorthands that can be used as +# reasons in many moderative commands. +SHORTHAND_REASONS = { + "aa": "having their appeal accepted", + "cc": "using a modified osu! client", + "3p": "using 3rd party programs", + "rx": "using 3rd party programs (relax)", + "tw": "using 3rd party programs (timewarp)", + "au": "using 3rd party programs (auto play)", +} + + +@command(Privileges.MODERATOR, hidden=True) +async def silence(ctx: Context) -> str | None: + """Silence a specified player with a specified duration & reason.""" + if len(ctx.args) < 3: + return "Invalid syntax: !silence " + + target = await app.state.sessions.players.from_cache_or_sql(name=ctx.args[0]) + if not target: + return f'"{ctx.args[0]}" not found.' + + if target.priv & Privileges.STAFF and not ctx.player.priv & Privileges.DEVELOPER: + return "Only developers can manage staff members." + + duration = timeparse(ctx.args[1]) + if not duration: + return "Invalid timespan." + + reason = " ".join(ctx.args[2:]) + + if reason in SHORTHAND_REASONS: + reason = SHORTHAND_REASONS[reason] + + await target.silence(ctx.player, duration, reason) + return f"{target} was silenced." + + +@command(Privileges.MODERATOR, hidden=True) +async def unsilence(ctx: Context) -> str | None: + """Unsilence a specified player.""" + if len(ctx.args) < 2: + return "Invalid syntax: !unsilence " + + target = await app.state.sessions.players.from_cache_or_sql(name=ctx.args[0]) + if not target: + return f'"{ctx.args[0]}" not found.' + + if not target.silenced: + return f"{target} is not silenced." + + if target.priv & Privileges.STAFF and not ctx.player.priv & Privileges.DEVELOPER: + return "Only developers can manage staff members." + + reason = " ".join(ctx.args[1:]) + + await target.unsilence(ctx.player, reason) + return f"{target} was unsilenced." + + +""" Admin commands +# The commands below are relatively dangerous, +# and are generally for managing players. +""" + + +@command(Privileges.ADMINISTRATOR, aliases=["u"], hidden=True) +async def user(ctx: Context) -> str | None: + """Return general information about a given user.""" + if not ctx.args: + # no username specified, use ctx.player + player = ctx.player + else: + # username given, fetch the player + maybe_player = await app.state.sessions.players.from_cache_or_sql( + name=" ".join(ctx.args), + ) + + if maybe_player is None: + return "Player not found." + + player = maybe_player + + priv_list = [ + priv.name + for priv in Privileges + if player.priv & priv and bin(priv).count("1") == 1 + ][::-1] + if player.last_np is not None and time.time() < player.last_np["timeout"]: + last_np = player.last_np["bmap"].embed + else: + last_np = None + + if player.is_online and player.client_details is not None: + osu_version = player.client_details.osu_version.date.isoformat() + else: + osu_version = "Unknown" + + donator_info = ( + f"True (ends {timeago.format(player.donor_end)})" + if player.priv & Privileges.DONATOR != 0 + else "False" + ) + + user_clan = ( + await clans_repo.fetch_one(id=player.clan_id) + if player.clan_id is not None + else None + ) + display_name = ( + f"[{user_clan['tag']}] {player.name}" if user_clan is not None else player.name + ) + + return "\n".join( + ( + f'[{"Bot" if player.is_bot_client else "Player"}] {display_name} ({player.id})', + f"Privileges: {priv_list}", + f"Donator: {donator_info}", + f"Channels: {[c._name for c in player.channels]}", + f"Logged in: {timeago.format(player.login_time)}", + f"Last server interaction: {timeago.format(player.last_recv_time)}", + f"osu! build: {osu_version} | Tourney: {player.is_tourney_client}", + f"Silenced: {player.silenced} | Spectating: {player.spectating}", + f"Last /np: {last_np}", + f"Recent score: {player.recent_score}", + f"Match: {player.match}", + f"Spectators: {player.spectators}", + ), + ) + + +@command(Privileges.ADMINISTRATOR, hidden=True) +async def restrict(ctx: Context) -> str | None: + """Restrict a specified player's account, with a reason.""" + if len(ctx.args) < 2: + return "Invalid syntax: !restrict " + + # find any user matching (including offline). + target = await app.state.sessions.players.from_cache_or_sql(name=ctx.args[0]) + if not target: + return f'"{ctx.args[0]}" not found.' + + if target.priv & Privileges.STAFF and not ctx.player.priv & Privileges.DEVELOPER: + return "Only developers can manage staff members." + + if target.restricted: + return f"{target} is already restricted!" + + reason = " ".join(ctx.args[1:]) + + if reason in SHORTHAND_REASONS: + reason = SHORTHAND_REASONS[reason] + + await target.restrict(admin=ctx.player, reason=reason) + + # refresh their client state + if target.is_online: + target.logout() + + return f"{target} was restricted." + + +@command(Privileges.ADMINISTRATOR, hidden=True) +async def unrestrict(ctx: Context) -> str | None: + """Unrestrict a specified player's account, with a reason.""" + if len(ctx.args) < 2: + return "Invalid syntax: !unrestrict " + + # find any user matching (including offline). + target = await app.state.sessions.players.from_cache_or_sql(name=ctx.args[0]) + if not target: + return f'"{ctx.args[0]}" not found.' + + if target.priv & Privileges.STAFF and not ctx.player.priv & Privileges.DEVELOPER: + return "Only developers can manage staff members." + + if not target.restricted: + return f"{target} is not restricted!" + + reason = " ".join(ctx.args[1:]) + + if reason in SHORTHAND_REASONS: + reason = SHORTHAND_REASONS[reason] + + await target.unrestrict(ctx.player, reason) + + # refresh their client state + if target.is_online: + target.logout() + + return f"{target} was unrestricted." + + +@command(Privileges.ADMINISTRATOR, hidden=True) +async def alert(ctx: Context) -> str | None: + """Send a notification to all players.""" + if len(ctx.args) < 1: + return "Invalid syntax: !alert " + + notif_txt = " ".join(ctx.args) + + app.state.sessions.players.enqueue(app.packets.notification(notif_txt)) + return "Alert sent." + + +@command(Privileges.ADMINISTRATOR, aliases=["alertu"], hidden=True) +async def alertuser(ctx: Context) -> str | None: + """Send a notification to a specified player by name.""" + if len(ctx.args) < 2: + return "Invalid syntax: !alertu " + + target = app.state.sessions.players.get(name=ctx.args[0]) + if not target: + return "Could not find a user by that name." + + notif_txt = " ".join(ctx.args[1:]) + + target.enqueue(app.packets.notification(notif_txt)) + return "Alert sent." + + +# NOTE: this is pretty useless since it doesn't switch anything other +# than the c[e4].ppy.sh domains; it exists on bancho as a tournament +# server switch mechanism, perhaps we could leverage this in the future. +@command(Privileges.ADMINISTRATOR, hidden=True) +async def switchserv(ctx: Context) -> str | None: + """Switch your client's internal endpoints to a specified IP address.""" + if len(ctx.args) != 1: + return "Invalid syntax: !switch " + + new_bancho_ip = ctx.args[0] + + ctx.player.enqueue(app.packets.switch_tournament_server(new_bancho_ip)) + return "Have a nice journey.." + + +@command(Privileges.ADMINISTRATOR) +async def shutdown(ctx: Context) -> str | None | NoReturn: + """Gracefully shutdown the server.""" + if ctx.args: # shutdown after a delay + delay = timeparse(ctx.args[0]) + if not delay: + return "Invalid timespan." + + if delay < 15: + return "Minimum delay is 15 seconds." + + if len(ctx.args) > 1: + # alert all online players of the reboot. + alert_msg = ( + f"The server will {ctx.trigger} in {ctx.args[0]}.\n\n" + f'Reason: {" ".join(ctx.args[1:])}' + ) + + app.state.sessions.players.enqueue(app.packets.notification(alert_msg)) + + app.state.loop.call_later(delay, os.kill, os.getpid(), signal.SIGTERM) + return f"Enqueued {ctx.trigger}." + else: # shutdown immediately + os.kill(os.getpid(), signal.SIGTERM) + return "Process killed" + + +""" Developer commands +# The commands below are either dangerous or +# simply not useful for any other roles. +""" + + +@command(Privileges.DEVELOPER) +async def stealth(ctx: Context) -> str | None: + """Toggle the developer's stealth, allowing them to be hidden.""" + # NOTE: this command is a large work in progress and currently + # half works; eventually it will be moved to the Admin level. + ctx.player.stealth = not ctx.player.stealth + + return f'Stealth {"enabled" if ctx.player.stealth else "disabled"}.' + + +@command(Privileges.DEVELOPER) +async def recalc(ctx: Context) -> str | None: + """Recalculate pp for a given map, or all maps.""" + return ( + "Please use tools/recalc.py instead.\n" + "If you need any support, join our Discord @ https://discord.gg/ShEQgUx." + ) + + +@command(Privileges.DEVELOPER, hidden=True) +async def debug(ctx: Context) -> str | None: + """Toggle the console's debug setting.""" + app.settings.DEBUG = not app.settings.DEBUG + return f"Toggled {'on' if app.settings.DEBUG else 'off'}." + + +# NOTE: these commands will likely be removed +# with the addition of a good frontend. +str_priv_dict = { + "normal": Privileges.UNRESTRICTED, + "verified": Privileges.VERIFIED, + "whitelisted": Privileges.WHITELISTED, + "supporter": Privileges.SUPPORTER, + "premium": Privileges.PREMIUM, + "alumni": Privileges.ALUMNI, + "tournament": Privileges.TOURNEY_MANAGER, + "nominator": Privileges.NOMINATOR, + "mod": Privileges.MODERATOR, + "admin": Privileges.ADMINISTRATOR, + "developer": Privileges.DEVELOPER, +} + + +@command(Privileges.DEVELOPER, hidden=True) +async def addpriv(ctx: Context) -> str | None: + """Set privileges for a specified player (by name).""" + if len(ctx.args) < 2: + return "Invalid syntax: !addpriv " + + bits = Privileges(0) + + for m in [m.lower() for m in ctx.args[1:]]: + if m not in str_priv_dict: + return f"Not found: {m}." + + bits |= str_priv_dict[m] + + target = await app.state.sessions.players.from_cache_or_sql(name=ctx.args[0]) + if not target: + return "Could not find user." + + if bits & Privileges.DONATOR != 0: + return "Please use the !givedonator command to assign donator privileges to players." + + await target.add_privs(bits) + return f"Updated {target}'s privileges." + + +@command(Privileges.DEVELOPER, hidden=True) +async def rmpriv(ctx: Context) -> str | None: + """Set privileges for a specified player (by name).""" + if len(ctx.args) < 2: + return "Invalid syntax: !rmpriv " + + bits = Privileges(0) + + for m in [m.lower() for m in ctx.args[1:]]: + if m not in str_priv_dict: + return f"Not found: {m}." + + bits |= str_priv_dict[m] + + target = await app.state.sessions.players.from_cache_or_sql(name=ctx.args[0]) + if not target: + return "Could not find user." + + await target.remove_privs(bits) + + if bits & Privileges.DONATOR != 0: + target.donor_end = 0 + await app.state.services.database.execute( + "UPDATE users SET donor_end = 0 WHERE id = :user_id", + {"user_id": target.id}, + ) + + return f"Updated {target}'s privileges." + + +@command(Privileges.DEVELOPER, hidden=True) +async def givedonator(ctx: Context) -> str | None: + """Give donator status to a specified player for a specified duration.""" + if len(ctx.args) < 2: + return "Invalid syntax: !givedonator " + + target = await app.state.sessions.players.from_cache_or_sql(name=ctx.args[0]) + if not target: + return "Could not find user." + + timespan = timeparse(ctx.args[1]) + if not timespan: + return "Invalid timespan." + + if target.donor_end < time.time(): + timespan += time.time() + else: + timespan += target.donor_end + + target.donor_end = int(timespan) + await app.state.services.database.execute( + "UPDATE users SET donor_end = :end WHERE id = :user_id", + {"end": timespan, "user_id": target.id}, + ) + + await target.add_privs(Privileges.SUPPORTER) + + return f"Added {ctx.args[1]} of donator status to {target}." + + +@command(Privileges.DEVELOPER) +async def wipemap(ctx: Context) -> str | None: + # (intentionally no docstring) + if ctx.args: + return "Invalid syntax: !wipemap" + + if ctx.player.last_np is None or time.time() >= ctx.player.last_np["timeout"]: + return "Please /np a map first!" + + map_md5 = ctx.player.last_np["bmap"].md5 + + # delete scores from all tables + await app.state.services.database.execute( + "DELETE FROM scores WHERE map_md5 = :map_md5", + {"map_md5": map_md5}, + ) + + return "Scores wiped." + + +@command(Privileges.DEVELOPER, aliases=["re"]) +async def reload(ctx: Context) -> str | None: + """Reload a python module.""" + if len(ctx.args) != 1: + return "Invalid syntax: !reload " + + parent, *children = ctx.args[0].split(".") + + try: + mod = __import__(parent) + except ModuleNotFoundError: + return "Module not found." + + child = None + try: + for child in children: + mod = getattr(mod, child) + except AttributeError: + return f"Failed at {child}." + + try: + mod = importlib.reload(mod) + except TypeError as exc: + return f"{exc.args[0]}." + + return f"Reloaded {mod.__name__}" + + +@command(Privileges.UNRESTRICTED) +async def server(ctx: Context) -> str | None: + """Retrieve performance data about the server.""" + + build_str = f"bancho.py v{app.settings.VERSION} ({app.settings.DOMAIN})" + + # get info about this process + proc = psutil.Process(os.getpid()) + uptime = int(time.time() - proc.create_time()) + + # get info about our cpu + cpu_info = cpuinfo.get_cpu_info() + + # list of all cpus installed with thread count + thread_count = cpu_info["count"] + cpu_name = cpu_info["brand_raw"] + + cpu_info_str = f"{thread_count}x {cpu_name}" + + # get system-wide ram usage + sys_ram = psutil.virtual_memory() + + # output ram usage as `{bancho_used}MB / {sys_used}MB / {sys_total}MB` + bancho_ram = proc.memory_info()[0] + ram_values = (bancho_ram, sys_ram.used, sys_ram.total) + ram_info = " / ".join([f"{v // 1024 ** 2}MB" for v in ram_values]) + + # current state of settings + mirror_search_url = urlparse(app.settings.MIRROR_SEARCH_ENDPOINT).netloc + mirror_download_url = urlparse(app.settings.MIRROR_DOWNLOAD_ENDPOINT).netloc + using_osuapi = bool(app.settings.OSU_API_KEY) + advanced_mode = app.settings.DEVELOPER_MODE + auto_logging = app.settings.AUTOMATICALLY_REPORT_PROBLEMS + + # package versioning info + # divide up pkg versions, 3 displayed per line, e.g. + # aiohttp v3.6.3 | aiomysql v0.0.21 | bcrypt v3.2.0 + # cmyui v1.7.3 | datadog v0.40.1 | geoip2 v4.1.0 + # maniera v1.0.0 | mysql-connector-python v8.0.23 | orjson v3.5.1 + # psutil v5.8.0 | py3rijndael v0.3.3 | uvloop v0.15.2 + requirements = [] + + for dist in importlib.metadata.distributions(): + requirements.append(f"{dist.name} v{dist.version}") + requirements.sort(key=lambda x: x.casefold()) + + requirements_info = "\n".join( + " | ".join(section) + for section in (requirements[i : i + 3] for i in range(0, len(requirements), 3)) + ) + + return "\n".join( + ( + f"{build_str} | uptime: {timedelta(seconds=uptime)}", + f"cpu: {cpu_info_str}", + f"ram: {ram_info}", + f"search mirror: {mirror_search_url} | download mirror: {mirror_download_url}", + f"osu!api connection: {using_osuapi}", + f"advanced mode: {advanced_mode} | auto logging: {auto_logging}", + "", + "requirements", + requirements_info, + ), + ) + + +if app.settings.DEVELOPER_MODE: + """Advanced (& potentially dangerous) commands""" + + # NOTE: some of these commands are potentially dangerous, and only + # really intended for advanced users looking for access to lower level + # utilities. Some may give direct access to utilties that could perform + # harmful tasks to the underlying machine, so use at your own risk. + + from sys import modules as installed_mods + + __py_namespace: dict[str, Any] = globals() | { + mod: importlib.import_module(mod) + for mod in ( + "asyncio", + "dis", + "os", + "sys", + "struct", + "discord", + "datetime", + "time", + "inspect", + "math", + "importlib", + ) + if mod in installed_mods + } + + @command(Privileges.DEVELOPER) + async def py(ctx: Context) -> str | None: + """Allow for (async) access to the python interpreter.""" + # This can be very good for getting used to bancho.py's API; just look + # around the codebase and find things to play with in your server. + # Ex: !py return (await app.state.sessions.players.get(name='cmyui')).status.action + if not ctx.args: + return "owo" + + # turn our input args into a coroutine definition string. + definition = "\n ".join(["async def __py(ctx):", " ".join(ctx.args)]) + + try: # def __py(ctx) + exec(definition, __py_namespace) # add to namespace + ret = await __py_namespace["__py"](ctx) # await it's return + except Exception as exc: # return exception in osu! chat + ret = f"{exc.__class__}: {exc}" + + if "__py" in __py_namespace: + del __py_namespace["__py"] + + if not isinstance(ret, str): + ret = pprint.pformat(ret, compact=True) + + return str(ret) + + +""" Multiplayer commands +# The commands below for multiplayer match management. +# Most commands are open to player usage. +""" + + +def ensure_match( + f: Callable[[Context, Match], Awaitable[str | None]], +) -> Callable[[Context], Awaitable[str | None]]: + @wraps(f) + async def wrapper(ctx: Context) -> str | None: + match = ctx.player.match + + # multi set is a bit of a special case, + # as we do some additional checks. + if match is None: + # player not in a match + return None + + if ctx.recipient is not match.chat: + # message not in match channel + return None + + if not ( + ctx.player in match.refs + or ctx.player.priv & Privileges.TOURNEY_MANAGER + or f is mp_help.__wrapped__ # type: ignore[attr-defined] + ): + return None + + return await f(ctx, match) + + return wrapper + + +@mp_commands.add(Privileges.UNRESTRICTED, aliases=["h"]) +@ensure_match +async def mp_help(ctx: Context, match: Match) -> str | None: + """Show all documented multiplayer commands the player can access.""" + prefix = app.settings.COMMAND_PREFIX + cmds = [] + + for cmd in mp_commands.commands: + if not cmd.doc or ctx.player.priv & cmd.priv != cmd.priv: + # no doc, or insufficient permissions. + continue + + cmds.append(f"{prefix}mp {cmd.triggers[0]}: {cmd.doc}") + + return "\n".join(cmds) + + +@mp_commands.add(Privileges.UNRESTRICTED, aliases=["st"]) +@ensure_match +async def mp_start(ctx: Context, match: Match) -> str | None: + """Start the current multiplayer match, with any players ready.""" + if len(ctx.args) > 1: + return "Invalid syntax: !mp start " + + # this command can be used in a few different ways; + # !mp start: start the match now (make sure all players are ready) + # !mp start force: start the match now (don't check for ready) + # !mp start N: start the match in N seconds (don't check for ready) + # !mp start cancel: cancel the current match start timer + + if not ctx.args: + # !mp start + if match.starting is not None: + time_remaining = int(match.starting["time"] - time.time()) + return f"Match starting in {time_remaining} seconds." + + if any([s.status == SlotStatus.not_ready for s in match.slots]): + return "Not all players are ready (`!mp start force` to override)." + else: + if ctx.args[0].isdecimal(): + # !mp start N + if match.starting is not None: + time_remaining = int(match.starting["time"] - time.time()) + return f"Match starting in {time_remaining} seconds." + + # !mp start + duration = int(ctx.args[0]) + if not 0 < duration <= 300: + return "Timer range is 1-300 seconds." + + def _start() -> None: + """Remove any pending timers & start the match.""" + # remove start & alert timers + match.starting = None + + # make sure player didn't leave the + # match since queueing this start lol... + if ctx.player not in {slot.player for slot in match.slots}: + match.chat.send_bot("Player left match? (cancelled)") + return + + match.start() + match.chat.send_bot("Starting match.") + + def _alert_start(t: int) -> None: + """Alert the match of the impending start.""" + match.chat.send_bot(f"Match starting in {t} seconds.") + + # add timers to our match object, + # so we can cancel them if needed. + match.starting = { + "start": app.state.loop.call_later(duration, _start), + "alerts": [ + app.state.loop.call_later(duration - t, lambda t=t: _alert_start(t)) + for t in (60, 30, 10, 5, 4, 3, 2, 1) + if t < duration + ], + "time": time.time() + duration, + } + + return f"Match will start in {duration} seconds." + elif ctx.args[0] in ("cancel", "c"): + # !mp start cancel + if match.starting is None: + return "Match timer not active!" + + match.starting["start"].cancel() + for alert in match.starting["alerts"]: + alert.cancel() + + match.starting = None + + return "Match timer cancelled." + elif ctx.args[0] not in ("force", "f"): + return "Invalid syntax: !mp start " + # !mp start force simply passes through + + match.start() + return "Good luck!" + + +@mp_commands.add(Privileges.UNRESTRICTED, aliases=["a"]) +@ensure_match +async def mp_abort(ctx: Context, match: Match) -> str | None: + """Abort the current in-progress multiplayer match.""" + if not match.in_progress: + return "Abort what?" + + match.unready_players(expected=SlotStatus.playing) + match.reset_players_loaded_status() + + match.in_progress = False + match.enqueue(app.packets.match_abort()) + match.enqueue_state() + return "Match aborted." + + +@mp_commands.add(Privileges.UNRESTRICTED) +@ensure_match +async def mp_map(ctx: Context, match: Match) -> str | None: + """Set the current match's current map by id.""" + if len(ctx.args) != 1 or not ctx.args[0].isdecimal(): + return "Invalid syntax: !mp map " + + map_id = int(ctx.args[0]) + + if map_id == match.map_id: + return "Map already selected." + + bmap = await Beatmap.from_bid(map_id) + if not bmap: + return "Beatmap not found." + + match.map_id = bmap.id + match.map_md5 = bmap.md5 + match.map_name = bmap.full_name + + match.mode = bmap.mode + + match.enqueue_state() + return f"Selected: {bmap.embed}." + + +@mp_commands.add(Privileges.UNRESTRICTED) +@ensure_match +async def mp_mods(ctx: Context, match: Match) -> str | None: + """Set the current match's mods, from string form.""" + if len(ctx.args) != 1 or len(ctx.args[0]) % 2 != 0: + return "Invalid syntax: !mp mods " + + mods = Mods.from_modstr(ctx.args[0]) + mods = mods.filter_invalid_combos(match.mode.as_vanilla) + + if match.freemods: + if ctx.player is match.host: + # allow host to set speed-changing mods. + match.mods = mods & SPEED_CHANGING_MODS + + # set slot mods + slot = match.get_slot(ctx.player) + assert slot is not None + + slot.mods = mods & ~SPEED_CHANGING_MODS + else: + # not freemods, set match mods. + match.mods = mods + + match.enqueue_state() + return "Match mods updated." + + +@mp_commands.add(Privileges.UNRESTRICTED, aliases=["fm", "fmods"]) +@ensure_match +async def mp_freemods(ctx: Context, match: Match) -> str | None: + """Toggle freemods status for the match.""" + if len(ctx.args) != 1 or ctx.args[0] not in ("on", "off"): + return "Invalid syntax: !mp freemods " + + if ctx.args[0] == "on": + # central mods -> all players mods. + match.freemods = True + + for s in match.slots: + if s.player is not None: + # the slot takes any non-speed + # changing mods from the match. + s.mods = match.mods & ~SPEED_CHANGING_MODS + + match.mods &= SPEED_CHANGING_MODS + else: + # host mods -> central mods. + match.freemods = False + + host_slot = match.get_host_slot() + assert host_slot is not None + + # the match keeps any speed-changing mods, + # and also takes any mods the host has enabled. + match.mods &= SPEED_CHANGING_MODS + match.mods |= host_slot.mods + + for s in match.slots: + if s.player is not None: + s.mods = Mods.NOMOD + + match.enqueue_state() + return "Match freemod status updated." + + +@mp_commands.add(Privileges.UNRESTRICTED) +@ensure_match +async def mp_host(ctx: Context, match: Match) -> str | None: + """Set the current match's current host by id.""" + if len(ctx.args) != 1: + return "Invalid syntax: !mp host " + + target = app.state.sessions.players.get(name=ctx.args[0]) + if not target: + return "Could not find a user by that name." + + if target is match.host: + return "They're already host, silly!" + + if target not in {slot.player for slot in match.slots}: + return "Found no such player in the match." + + match.host_id = target.id + + match.host.enqueue(app.packets.match_transfer_host()) + match.enqueue_state(lobby=True) + return "Match host updated." + + +@mp_commands.add(Privileges.UNRESTRICTED) +@ensure_match +async def mp_randpw(ctx: Context, match: Match) -> str | None: + """Randomize the current match's password.""" + match.passwd = secrets.token_hex(8) + return "Match password randomized." + + +@mp_commands.add(Privileges.UNRESTRICTED, aliases=["inv"]) +@ensure_match +async def mp_invite(ctx: Context, match: Match) -> str | None: + """Invite a player to the current match by name.""" + if len(ctx.args) != 1: + return "Invalid syntax: !mp invite " + + target = app.state.sessions.players.get(name=ctx.args[0]) + if not target: + return "Could not find a user by that name." + + if target is app.state.sessions.bot: + return "I'm too busy!" + + if target is ctx.player: + return "You can't invite yourself!" + + target.enqueue(app.packets.match_invite(ctx.player, target.name)) + return f"Invited {target} to the match." + + +@mp_commands.add(Privileges.UNRESTRICTED) +@ensure_match +async def mp_addref(ctx: Context, match: Match) -> str | None: + """Add a referee to the current match by name.""" + if len(ctx.args) != 1: + return "Invalid syntax: !mp addref " + + target = app.state.sessions.players.get(name=ctx.args[0]) + if not target: + return "Could not find a user by that name." + + if target not in {slot.player for slot in match.slots}: + return "User must be in the current match!" + + if target in match.refs: + return f"{target} is already a match referee!" + + match._refs.add(target) + return f"{target.name} added to match referees." + + +@mp_commands.add(Privileges.UNRESTRICTED) +@ensure_match +async def mp_rmref(ctx: Context, match: Match) -> str | None: + """Remove a referee from the current match by name.""" + if len(ctx.args) != 1: + return "Invalid syntax: !mp addref " + + target = app.state.sessions.players.get(name=ctx.args[0]) + if not target: + return "Could not find a user by that name." + + if target not in match.refs: + return f"{target} is not a match referee!" + + if target is match.host: + return "The host is always a referee!" + + match._refs.remove(target) + return f"{target.name} removed from match referees." + + +@mp_commands.add(Privileges.UNRESTRICTED) +@ensure_match +async def mp_listref(ctx: Context, match: Match) -> str | None: + """List all referees from the current match.""" + return ", ".join(map(str, match.refs)) + "." + + +@mp_commands.add(Privileges.UNRESTRICTED) +@ensure_match +async def mp_lock(ctx: Context, match: Match) -> str | None: + """Lock all unused slots in the current match.""" + for slot in match.slots: + if slot.status == SlotStatus.open: + slot.status = SlotStatus.locked + + match.enqueue_state() + return "All unused slots locked." + + +@mp_commands.add(Privileges.UNRESTRICTED) +@ensure_match +async def mp_unlock(ctx: Context, match: Match) -> str | None: + """Unlock locked slots in the current match.""" + for slot in match.slots: + if slot.status == SlotStatus.locked: + slot.status = SlotStatus.open + + match.enqueue_state() + return "All locked slots unlocked." + + +@mp_commands.add(Privileges.UNRESTRICTED) +@ensure_match +async def mp_teams(ctx: Context, match: Match) -> str | None: + """Change the team type for the current match.""" + if len(ctx.args) != 1: + return "Invalid syntax: !mp teams " + + team_type = ctx.args[0] + + if team_type in ("ffa", "freeforall", "head-to-head"): + match.team_type = MatchTeamTypes.head_to_head + elif team_type in ("tag", "coop", "co-op", "tag-coop"): + match.team_type = MatchTeamTypes.tag_coop + elif team_type in ("teams", "team-vs", "teams-vs"): + match.team_type = MatchTeamTypes.team_vs + elif team_type in ("tag-teams", "tag-team-vs", "tag-teams-vs"): + match.team_type = MatchTeamTypes.tag_team_vs + else: + return "Unknown team type. (ffa, tag, teams, tag-teams)" + + # find the new appropriate default team. + # defaults are (ffa: neutral, teams: red). + if match.team_type in (MatchTeamTypes.head_to_head, MatchTeamTypes.tag_coop): + new_t = MatchTeams.neutral + else: + new_t = MatchTeams.red + + # change each active slots team to + # fit the correspoding team type. + for s in match.slots: + if s.player is not None: + s.team = new_t + + if match.is_scrimming: + # reset score if scrimming. + match.reset_scrim() + + match.enqueue_state() + return "Match team type updated." + + +@mp_commands.add(Privileges.UNRESTRICTED, aliases=["cond"]) +@ensure_match +async def mp_condition(ctx: Context, match: Match) -> str | None: + """Change the win condition for the match.""" + if len(ctx.args) != 1: + return "Invalid syntax: !mp condition " + + cond = ctx.args[0] + + if cond == "pp": + # special case - pp can't actually be used as an ingame + # win condition, but bancho.py allows it to be passed into + # this command during a scrims to use pp as a win cond. + if not match.is_scrimming: + return "PP is only useful as a win condition during scrims." + if match.use_pp_scoring: + return "PP scoring already enabled." + + match.use_pp_scoring = True + else: + if match.use_pp_scoring: + match.use_pp_scoring = False + + if cond == "score": + match.win_condition = MatchWinConditions.score + elif cond in ("accuracy", "acc"): + match.win_condition = MatchWinConditions.accuracy + elif cond == "combo": + match.win_condition = MatchWinConditions.combo + elif cond in ("scorev2", "v2"): + match.win_condition = MatchWinConditions.scorev2 + else: + return "Invalid win condition. (score, acc, combo, scorev2, *pp)" + + match.enqueue_state(lobby=False) + return "Match win condition updated." + + +@mp_commands.add(Privileges.UNRESTRICTED, aliases=["autoref"]) +@ensure_match +async def mp_scrim(ctx: Context, match: Match) -> str | None: + """Start a scrim in the current match.""" + if len(ctx.args) != 1: + return "Invalid syntax: !mp scrim " + + r_match = regexes.BEST_OF.fullmatch(ctx.args[0]) + if not r_match: + return "Invalid syntax: !mp scrim " + + best_of = int(r_match[1]) + if not 0 <= best_of < 16: + return "Best of must be in range 0-15." + + winning_pts = (best_of // 2) + 1 + + if winning_pts != 0: + # setting to real num + if match.is_scrimming: + return "Already scrimming!" + + if best_of % 2 == 0: + return "Best of must be an odd number!" + + match.is_scrimming = True + msg = ( + f"A scrimmage has been started by {ctx.player.name}; " + f"first to {winning_pts} points wins. Best of luck!" + ) + else: + # setting to 0 + if not match.is_scrimming: + return "Not currently scrimming!" + + match.is_scrimming = False + match.reset_scrim() + msg = "Scrimming cancelled." + + match.winning_pts = winning_pts + return msg + + +@mp_commands.add(Privileges.UNRESTRICTED, aliases=["end"]) +@ensure_match +async def mp_endscrim(ctx: Context, match: Match) -> str | None: + """End the current matches ongoing scrim.""" + if not match.is_scrimming: + return "Not currently scrimming!" + + match.is_scrimming = False + match.reset_scrim() + return "Scrimmage ended." # TODO: final score (get_score method?) + + +@mp_commands.add(Privileges.UNRESTRICTED, aliases=["rm"]) +@ensure_match +async def mp_rematch(ctx: Context, match: Match) -> str | None: + """Restart a scrim, or roll back previous match point.""" + if ctx.args: + return "Invalid syntax: !mp rematch" + + if ctx.player is not match.host: + return "Only available to the host." + + if not match.is_scrimming: + if match.winning_pts == 0: + msg = "No scrim to rematch; to start one, use !mp scrim." + else: + # re-start scrimming with old points + match.is_scrimming = True + msg = ( + f"A rematch has been started by {ctx.player.name}; " + f"first to {match.winning_pts} points wins. Best of luck!" + ) + else: + # reset the last match point awarded + if not match.winners: + return "No match points have yet been awarded!" + + recent_winner = match.winners[-1] + if recent_winner is None: + return "The last point was a tie!" + + match.match_points[recent_winner] -= 1 # TODO: team name + match.winners.pop() + + msg = f"A point has been deducted from {recent_winner}." + + return msg + + +@mp_commands.add(Privileges.ADMINISTRATOR, aliases=["f"], hidden=True) +@ensure_match +async def mp_force(ctx: Context, match: Match) -> str | None: + """Force a player into the current match by name.""" + # NOTE: this overrides any limits such as silences or passwd. + if len(ctx.args) != 1: + return "Invalid syntax: !mp force " + + target = app.state.sessions.players.get(name=ctx.args[0]) + if not target: + return "Could not find a user by that name." + + target.join_match(match, match.passwd) + return "Welcome." + + +# mappool-related mp commands + + +@mp_commands.add(Privileges.UNRESTRICTED, aliases=["lp"]) +@ensure_match +async def mp_loadpool(ctx: Context, match: Match) -> str | None: + """Load a mappool into the current match.""" + if len(ctx.args) != 1: + return "Invalid syntax: !mp loadpool " + + if ctx.player is not match.host: + return "Only available to the host." + + name = ctx.args[0] + + tourney_pool = await tourney_pools_repo.fetch_by_name(name) + if tourney_pool is None: + return "Could not find a pool by that name!" + + if ( + match.tourney_pool is not None + and match.tourney_pool["id"] == tourney_pool["id"] + ): + return f"{tourney_pool['name']} already selected!" + + match.tourney_pool = tourney_pool + return f"{tourney_pool['name']} selected." + + +@mp_commands.add(Privileges.UNRESTRICTED, aliases=["ulp"]) +@ensure_match +async def mp_unloadpool(ctx: Context, match: Match) -> str | None: + """Unload the current matches mappool.""" + if ctx.args: + return "Invalid syntax: !mp unloadpool" + + if ctx.player is not match.host: + return "Only available to the host." + + if not match.tourney_pool: + return "No mappool currently selected!" + + match.tourney_pool = None + return "Mappool unloaded." + + +@mp_commands.add(Privileges.UNRESTRICTED) +@ensure_match +async def mp_ban(ctx: Context, match: Match) -> str | None: + """Ban a pick in the currently loaded mappool.""" + if len(ctx.args) != 1: + return "Invalid syntax: !mp ban " + + if not match.tourney_pool: + return "No pool currently selected!" + + mods_slot = ctx.args[0] + + # separate mods & slot + r_match = regexes.MAPPOOL_PICK.fullmatch(mods_slot) + if not r_match: + return "Invalid pick syntax; correct example: HD2" + + # not calling mods.filter_invalid_combos here intentionally. + mods = Mods.from_modstr(r_match[1]) + slot = int(r_match[2]) + + map_pick = await tourney_pool_maps_repo.fetch_by_pool_and_pick( + pool_id=match.tourney_pool["id"], + mods=mods, + slot=slot, + ) + if map_pick is None: + return f"Found no {mods_slot} pick in the pool." + + if (mods, slot) in match.bans: + return "That pick is already banned!" + + match.bans.add((mods, slot)) + return f"{mods_slot} banned." + + +@mp_commands.add(Privileges.UNRESTRICTED) +@ensure_match +async def mp_unban(ctx: Context, match: Match) -> str | None: + """Unban a pick in the currently loaded mappool.""" + if len(ctx.args) != 1: + return "Invalid syntax: !mp unban " + + if not match.tourney_pool: + return "No pool currently selected!" + + mods_slot = ctx.args[0] + + # separate mods & slot + r_match = regexes.MAPPOOL_PICK.fullmatch(mods_slot) + if not r_match: + return "Invalid pick syntax; correct example: HD2" + + # not calling mods.filter_invalid_combos here intentionally. + mods = Mods.from_modstr(r_match[1]) + slot = int(r_match[2]) + + map_pick = await tourney_pool_maps_repo.fetch_by_pool_and_pick( + pool_id=match.tourney_pool["id"], + mods=mods, + slot=slot, + ) + if map_pick is None: + return f"Found no {mods_slot} pick in the pool." + + if (mods, slot) not in match.bans: + return "That pick is not currently banned!" + + match.bans.remove((mods, slot)) + return f"{mods_slot} unbanned." + + +@mp_commands.add(Privileges.UNRESTRICTED) +@ensure_match +async def mp_pick(ctx: Context, match: Match) -> str | None: + """Pick a map from the currently loaded mappool.""" + if len(ctx.args) != 1: + return "Invalid syntax: !mp pick " + + if not match.tourney_pool: + return "No pool currently loaded!" + + mods_slot = ctx.args[0] + + # separate mods & slot + r_match = regexes.MAPPOOL_PICK.fullmatch(mods_slot) + if not r_match: + return "Invalid pick syntax; correct example: HD2" + + # not calling mods.filter_invalid_combos here intentionally. + mods = Mods.from_modstr(r_match[1]) + slot = int(r_match[2]) + + map_pick = await tourney_pool_maps_repo.fetch_by_pool_and_pick( + pool_id=match.tourney_pool["id"], + mods=mods, + slot=slot, + ) + if map_pick is None: + return f"Found no {mods_slot} pick in the pool." + + if (mods, slot) in match.bans: + return f"{mods_slot} has been banned from being picked." + + bmap = await Beatmap.from_bid(map_pick["map_id"]) + if not bmap: + return f"Found no beatmap for {mods_slot} pick." + + match.map_md5 = bmap.md5 + match.map_id = bmap.id + match.map_name = bmap.full_name + + # TODO: some kind of abstraction allowing + # for something like !mp pick fm. + if match.freemods: + # if freemods are enabled, disable them. + match.freemods = False + + for s in match.slots: + if s.player is not None: + s.mods = Mods.NOMOD + + # update match mods to the picked map. + match.mods = mods + + match.enqueue_state() + + return f"Picked {bmap.embed}. ({mods_slot})" + + +""" Mappool management commands +# The commands below are for event managers +# and tournament hosts/referees to help automate +# tedious processes of running tournaments. +""" + + +@pool_commands.add(Privileges.TOURNEY_MANAGER, aliases=["h"], hidden=True) +async def pool_help(ctx: Context) -> str | None: + """Show all documented mappool commands the player can access.""" + prefix = app.settings.COMMAND_PREFIX + cmds = [] + + for cmd in pool_commands.commands: + if not cmd.doc or ctx.player.priv & cmd.priv != cmd.priv: + # no doc, or insufficient permissions. + continue + + cmds.append(f"{prefix}pool {cmd.triggers[0]}: {cmd.doc}") + + return "\n".join(cmds) + + +@pool_commands.add(Privileges.TOURNEY_MANAGER, aliases=["c"], hidden=True) +async def pool_create(ctx: Context) -> str | None: + """Add a new mappool to the database.""" + if len(ctx.args) != 1: + return "Invalid syntax: !pool create " + + name = ctx.args[0] + + existing_pool = await tourney_pools_repo.fetch_by_name(name) + if existing_pool is not None: + return "Pool already exists by that name!" + + tourney_pool = await tourney_pools_repo.create( + name=name, + created_by=ctx.player.id, + ) + + return f"{name} created." + + +@pool_commands.add(Privileges.TOURNEY_MANAGER, aliases=["del", "d"], hidden=True) +async def pool_delete(ctx: Context) -> str | None: + """Remove a mappool from the database.""" + if len(ctx.args) != 1: + return "Invalid syntax: !pool delete " + + name = ctx.args[0] + + existing_pool = await tourney_pools_repo.fetch_by_name(name) + if existing_pool is None: + return "Could not find a pool by that name!" + + await tourney_pools_repo.delete_by_id(existing_pool["id"]) + await tourney_pool_maps_repo.delete_all_in_pool(pool_id=existing_pool["id"]) + + return f"{name} deleted." + + +@pool_commands.add(Privileges.TOURNEY_MANAGER, aliases=["a"], hidden=True) +async def pool_add(ctx: Context) -> str | None: + """Add a new map to a mappool in the database.""" + if len(ctx.args) != 2: + return "Invalid syntax: !pool add " + + if ctx.player.last_np is None or time.time() >= ctx.player.last_np["timeout"]: + return "Please /np a map first!" + + name, mods_slot = ctx.args + mods_slot = mods_slot.upper() # ocd + bmap = ctx.player.last_np["bmap"] + + # separate mods & slot + r_match = regexes.MAPPOOL_PICK.fullmatch(mods_slot) + if not r_match: + return "Invalid pick syntax; correct example: HD2" + + if len(r_match[1]) % 2 != 0: + return "Invalid mods." + + # not calling mods.filter_invalid_combos here intentionally. + mods = Mods.from_modstr(r_match[1]) + slot = int(r_match[2]) + + tourney_pool = await tourney_pools_repo.fetch_by_name(name) + if tourney_pool is None: + return "Could not find a pool by that name!" + + tourney_pool_maps = await tourney_pool_maps_repo.fetch_many( + pool_id=tourney_pool["id"], + ) + for pool_map in tourney_pool_maps: + if mods == pool_map["mods"] and slot == pool_map["slot"]: + pool_beatmap = await Beatmap.from_bid(pool_map["map_id"]) + assert pool_beatmap is not None + return f"{mods_slot} is already {pool_beatmap.embed}!" + + if pool_map["map_id"] == bmap.id: + return f"{bmap.embed} is already in the pool!" + + await tourney_pool_maps_repo.create( + map_id=bmap.id, + pool_id=tourney_pool["id"], + mods=mods, + slot=slot, + ) + + return f"{bmap.embed} added to {name} as {mods_slot}." + + +@pool_commands.add(Privileges.TOURNEY_MANAGER, aliases=["rm", "r"], hidden=True) +async def pool_remove(ctx: Context) -> str | None: + """Remove a map from a mappool in the database.""" + if len(ctx.args) != 2: + return "Invalid syntax: !pool remove " + + name, mods_slot = ctx.args + mods_slot = mods_slot.upper() # ocd + + # separate mods & slot + r_match = regexes.MAPPOOL_PICK.fullmatch(mods_slot) + if not r_match: + return "Invalid pick syntax; correct example: HD2" + + # not calling mods.filter_invalid_combos here intentionally. + mods = Mods.from_modstr(r_match[1]) + slot = int(r_match[2]) + + tourney_pool = await tourney_pools_repo.fetch_by_name(name) + if tourney_pool is None: + return "Could not find a pool by that name!" + + map_pick = await tourney_pool_maps_repo.fetch_by_pool_and_pick( + pool_id=tourney_pool["id"], + mods=mods, + slot=slot, + ) + if map_pick is None: + return f"Found no {mods_slot} pick in the pool." + + await tourney_pool_maps_repo.delete_map_from_pool( + map_pick["pool_id"], + map_pick["map_id"], + ) + + return f"{mods_slot} removed from {name}." + + +@pool_commands.add(Privileges.TOURNEY_MANAGER, aliases=["l"], hidden=True) +async def pool_list(ctx: Context) -> str | None: + """List all existing mappools information.""" + tourney_pools = await tourney_pools_repo.fetch_many(page=None, page_size=None) + if not tourney_pools: + return "There are currently no pools!" + + l = [f"Mappools ({len(tourney_pools)})"] + + for pool in tourney_pools: + created_by = await users_repo.fetch_one(id=pool["created_by"]) + if created_by is None: + log(f"Could not find pool creator (Id {pool['created_by']}).", Ansi.LRED) + continue + + l.append( + f"[{pool['created_at']:%Y-%m-%d}] " + f"{pool['name']}, by {created_by['name']}.", + ) + + return "\n".join(l) + + +@pool_commands.add(Privileges.TOURNEY_MANAGER, aliases=["i"], hidden=True) +async def pool_info(ctx: Context) -> str | None: + """Get all information for a specific mappool.""" + if len(ctx.args) != 1: + return "Invalid syntax: !pool info " + + name = ctx.args[0] + + tourney_pool = await tourney_pools_repo.fetch_by_name(name) + if tourney_pool is None: + return "Could not find a pool by that name!" + + _time = tourney_pool["created_at"].strftime("%H:%M:%S%p") + _date = tourney_pool["created_at"].strftime("%Y-%m-%d") + datetime_fmt = f"Created at {_time} on {_date}" + l = [ + f"{tourney_pool['id']}. {tourney_pool['name']}, by {tourney_pool['created_by']} | {datetime_fmt}.", + ] + + for tourney_map in sorted( + await tourney_pool_maps_repo.fetch_many(pool_id=tourney_pool["id"]), + key=lambda x: (repr(Mods(x["mods"])), x["slot"]), + ): + bmap = await Beatmap.from_bid(tourney_map["map_id"]) + if bmap is None: + log(f"Could not find beatmap {tourney_map['map_id']}.", Ansi.LRED) + continue + l.append(f"{Mods(tourney_map['mods'])!r}{tourney_map['slot']}: {bmap.embed}") + + return "\n".join(l) + + +""" Clan managment commands +# The commands below are for managing bancho.py +# clans, for users, clan staff, and server staff. +""" + + +@clan_commands.add(Privileges.UNRESTRICTED, aliases=["h"]) +async def clan_help(ctx: Context) -> str | None: + """Show all documented clan commands the player can access.""" + prefix = app.settings.COMMAND_PREFIX + cmds = [] + + for cmd in clan_commands.commands: + if not cmd.doc or ctx.player.priv & cmd.priv != cmd.priv: + # no doc, or insufficient permissions. + continue + + cmds.append(f"{prefix}clan {cmd.triggers[0]}: {cmd.doc}") + + return "\n".join(cmds) + + +@clan_commands.add(Privileges.UNRESTRICTED, aliases=["c"]) +async def clan_create(ctx: Context) -> str | None: + """Create a clan with a given tag & name.""" + if len(ctx.args) < 2: + return "Invalid syntax: !clan create " + + tag = ctx.args[0].upper() + if not 1 <= len(tag) <= 6: + return "Clan tag may be 1-6 characters long." + + name = " ".join(ctx.args[1:]) + if not 2 <= len(name) <= 16: + return "Clan name may be 2-16 characters long." + + if ctx.player.clan_id: + clan = await clans_repo.fetch_one(id=ctx.player.clan_id) + if clan: + clan_display_name = f"[{clan['tag']}] {clan['name']}" + return f"You're already a member of {clan_display_name}!" + + if await clans_repo.fetch_one(name=name): + return "That name has already been claimed by another clan." + + if await clans_repo.fetch_one(tag=tag): + return "That tag has already been claimed by another clan." + + # add clan to sql + new_clan = await clans_repo.create( + name=name, + tag=tag, + owner=ctx.player.id, + ) + + # set owner's clan & clan priv (cache & sql) + ctx.player.clan_id = new_clan["id"] + ctx.player.clan_priv = ClanPrivileges.Owner + + await users_repo.partial_update( + ctx.player.id, + clan_id=new_clan["id"], + clan_priv=ClanPrivileges.Owner, + ) + + # announce clan creation + announce_chan = app.state.sessions.channels.get_by_name("#announce") + clan_display_name = f"[{new_clan['tag']}] {new_clan['name']}" + if announce_chan: + msg = f"\x01ACTION founded {clan_display_name}." + announce_chan.send(msg, sender=ctx.player, to_self=True) + + return f"{clan_display_name} founded." + + +@clan_commands.add(Privileges.UNRESTRICTED, aliases=["delete", "d"]) +async def clan_disband(ctx: Context) -> str | None: + """Disband a clan (admins may disband others clans).""" + if ctx.args: + # disband a specified clan by tag + if ctx.player not in app.state.sessions.players.staff: + return "Only staff members may disband the clans of others." + + clan = await clans_repo.fetch_one(tag=" ".join(ctx.args).upper()) + if not clan: + return "Could not find a clan by that tag." + else: + if ctx.player.clan_id is None: + return "You're not a member of a clan!" + + # disband the player's clan + clan = await clans_repo.fetch_one(id=ctx.player.clan_id) + if not clan: + return "You're not a member of a clan!" + + await clans_repo.delete_one(clan["id"]) + + # remove all members from the clan + clan_member_ids = [ + clan_member["id"] + for clan_member in await users_repo.fetch_many(clan_id=clan["id"]) + ] + for member_id in clan_member_ids: + await users_repo.partial_update(member_id, clan_id=0, clan_priv=0) + + member = app.state.sessions.players.get(id=member_id) + if member: + member.clan_id = None + member.clan_priv = None + + # announce clan disbanding + announce_chan = app.state.sessions.channels.get_by_name("#announce") + clan_display_name = f"[{clan['tag']}] {clan['name']}" + if announce_chan: + msg = f"\x01ACTION disbanded {clan_display_name}." + announce_chan.send(msg, sender=ctx.player, to_self=True) + + return f"{clan_display_name} disbanded." + + +@clan_commands.add(Privileges.UNRESTRICTED, aliases=["i"]) +async def clan_info(ctx: Context) -> str | None: + """Lookup information of a clan by tag.""" + if not ctx.args: + return "Invalid syntax: !clan info " + + clan = await clans_repo.fetch_one(tag=" ".join(ctx.args).upper()) + if not clan: + return "Could not find a clan by that tag." + + clan_display_name = f"[{clan['tag']}] {clan['name']}" + msg = [f"{clan_display_name} | Founded {clan['created_at']:%b %d, %Y}."] + + # get members privs from sql + clan_members = await users_repo.fetch_many(clan_id=clan["id"]) + for member in sorted(clan_members, key=lambda m: m["clan_priv"], reverse=True): + priv_str = ("Member", "Officer", "Owner")[member["clan_priv"] - 1] + msg.append(f"[{priv_str}] {member['name']}") + + return "\n".join(msg) + + +@clan_commands.add(Privileges.UNRESTRICTED) +async def clan_leave(ctx: Context) -> str | None: + """Leaves the clan you're in.""" + if not ctx.player.clan_id: + return "You're not in a clan." + elif ctx.player.clan_priv == ClanPrivileges.Owner: + return "You must transfer your clan's ownership before leaving it. Alternatively, you can use !clan disband." + + clan = await clans_repo.fetch_one(id=ctx.player.clan_id) + if not clan: + return "You're not in a clan." + + clan_members = await users_repo.fetch_many(clan_id=clan["id"]) + + await users_repo.partial_update(ctx.player.id, clan_id=0, clan_priv=0) + ctx.player.clan_id = None + ctx.player.clan_priv = None + + clan_display_name = f"[{clan['tag']}] {clan['name']}" + + if not clan_members: + # no members left, disband clan + await clans_repo.delete_one(clan["id"]) + + # announce clan disbanding + announce_chan = app.state.sessions.channels.get_by_name("#announce") + if announce_chan: + msg = f"\x01ACTION disbanded {clan_display_name}." + announce_chan.send(msg, sender=ctx.player, to_self=True) + + return f"You have successfully left {clan_display_name}." + + +# TODO: !clan inv, !clan join, !clan leave + + +@clan_commands.add(Privileges.UNRESTRICTED, aliases=["l"]) +async def clan_list(ctx: Context) -> str | None: + """List all existing clans' information.""" + if ctx.args: + if len(ctx.args) != 1 or not ctx.args[0].isdecimal(): + return "Invalid syntax: !clan list (page)" + else: + offset = 25 * int(ctx.args[0]) + else: + offset = 0 + + all_clans = await clans_repo.fetch_many(page=None, page_size=None) + num_clans = len(all_clans) + if offset >= num_clans: + return "No clans found." + + msg = [f"bancho.py clans listing ({num_clans} total)."] + + for idx, clan in enumerate(all_clans, offset): + clan_display_name = f"[{clan['tag']}] {clan['name']}" + msg.append(f"{idx + 1}. {clan_display_name}") + + return "\n".join(msg) + + +class CommandResponse(TypedDict): + resp: str | None + hidden: bool + + +async def process_commands( + player: Player, + target: Channel | Player, + msg: str, +) -> CommandResponse | None: + # response is either a CommandResponse if we hit a command, + # or simply False if we don't have any command hits. + start_time = clock_ns() + + prefix_len = len(app.settings.COMMAND_PREFIX) + trigger, *args = msg[prefix_len:].strip().split(" ") + + # case-insensitive triggers + trigger = trigger.lower() + + # check if any command sets match. + for cmd_set in command_sets: + if trigger == cmd_set.trigger: + if not args: + args = ["help"] + + trigger, *args = args # get subcommand + + # case-insensitive triggers + trigger = trigger.lower() + + commands = cmd_set.commands + break + else: + # no set commands matched, check normal commands. + commands = regular_commands + + for cmd in commands: + if trigger in cmd.triggers and player.priv & cmd.priv == cmd.priv: + # found matching trigger with sufficient privs + try: + res = await cmd.callback( + Context( + player=player, + trigger=trigger, + args=args, + recipient=target, + ), + ) + except Exception: + # print exception info to the console, + # but do not break the player's session. + traceback.print_exc() + + res = "An exception occurred when running the command." + + if res is not None: + # we have a message to return, include elapsed time + elapsed = app.logging.magnitude_fmt_time(clock_ns() - start_time) + return {"resp": f"{res} | Elapsed: {elapsed}", "hidden": cmd.hidden} + else: + # no message to return + return {"resp": None, "hidden": False} + + return None diff --git a/app/constants/__init__.py b/app/constants/__init__.py new file mode 100644 index 0000000..a80e9d3 --- /dev/null +++ b/app/constants/__init__.py @@ -0,0 +1,8 @@ +# type: ignore +# isort: dont-add-imports + +from . import clientflags +from . import gamemodes +from . import mods +from . import privileges +from . import regexes diff --git a/app/constants/clientflags.py b/app/constants/clientflags.py new file mode 100644 index 0000000..ec23b0f --- /dev/null +++ b/app/constants/clientflags.py @@ -0,0 +1,68 @@ +from __future__ import annotations + +from enum import IntFlag +from enum import unique + +from app.utils import escape_enum +from app.utils import pymysql_encode + + +@unique +@pymysql_encode(escape_enum) +class ClientFlags(IntFlag): + """osu! anticheat <= 2016 (unsure of age)""" + + # NOTE: many of these flags are quite outdated and/or + # broken and are even known to false positive quite often. + # they can be helpful; just take them with a grain of salt. + + CLEAN = 0 # no flags sent + + # flags for timing errors or desync. + SPEED_HACK_DETECTED = 1 << 1 + + # this is to be ignored by server implementations. osu! team trolling hard + INCORRECT_MOD_VALUE = 1 << 2 + + MULTIPLE_OSU_CLIENTS = 1 << 3 + CHECKSUM_FAILURE = 1 << 4 + FLASHLIGHT_CHECKSUM_INCORRECT = 1 << 5 + + # these are only used on the osu!bancho official server. + OSU_EXECUTABLE_CHECKSUM = 1 << 6 + MISSING_PROCESSES_IN_LIST = 1 << 7 # also deprecated as of 2018 + + # flags for either: + # 1. pixels that should be outside the visible radius + # (and thus black) being brighter than they should be. + # 2. from an internal alpha value being incorrect. + FLASHLIGHT_IMAGE_HACK = 1 << 8 + + SPINNER_HACK = 1 << 9 + TRANSPARENT_WINDOW = 1 << 10 + + # (mania) flags for consistently low press intervals. + FAST_PRESS = 1 << 11 + + # from my experience, pretty decent + # for detecting autobotted scores. + RAW_MOUSE_DISCREPANCY = 1 << 12 + RAW_KEYBOARD_DISCREPANCY = 1 << 13 + + +@unique +@pymysql_encode(escape_enum) +class LastFMFlags(IntFlag): + """osu! anticheat 2019""" + + # XXX: the aqn flags were fixed within hours of the osu! + # update, and vanilla hq is not so widely used anymore. + RUN_WITH_LD_FLAG = 1 << 14 + CONSOLE_OPEN = 1 << 15 + EXTRA_THREADS = 1 << 16 + HQ_ASSEMBLY = 1 << 17 + HQ_FILE = 1 << 18 + REGISTRY_EDITS = 1 << 19 + SDL2_LIBRARY = 1 << 20 + OPENSSL_LIBRARY = 1 << 21 + AQN_MENU_SAMPLE = 1 << 22 diff --git a/app/constants/gamemodes.py b/app/constants/gamemodes.py new file mode 100644 index 0000000..768d40c --- /dev/null +++ b/app/constants/gamemodes.py @@ -0,0 +1,75 @@ +from __future__ import annotations + +import functools +from enum import IntEnum +from enum import unique + +from app.constants.mods import Mods +from app.utils import escape_enum +from app.utils import pymysql_encode + +GAMEMODE_REPR_LIST = ( + "vn!std", + "vn!taiko", + "vn!catch", + "vn!mania", + "rx!std", + "rx!taiko", + "rx!catch", + "rx!mania", # unused + "ap!std", + "ap!taiko", # unused + "ap!catch", # unused + "ap!mania", # unused +) + + +@unique +@pymysql_encode(escape_enum) +class GameMode(IntEnum): + VANILLA_OSU = 0 + VANILLA_TAIKO = 1 + VANILLA_CATCH = 2 + VANILLA_MANIA = 3 + + RELAX_OSU = 4 + RELAX_TAIKO = 5 + RELAX_CATCH = 6 + RELAX_MANIA = 7 # unused + + AUTOPILOT_OSU = 8 + AUTOPILOT_TAIKO = 9 # unused + AUTOPILOT_CATCH = 10 # unused + AUTOPILOT_MANIA = 11 # unused + + @classmethod + def from_params(cls, mode_vn: int, mods: Mods) -> GameMode: + mode = mode_vn + + if mods & Mods.AUTOPILOT: + mode += 8 + elif mods & Mods.RELAX: + mode += 4 + + return cls(mode) + + @classmethod + @functools.cache + def valid_gamemodes(cls) -> list[GameMode]: + ret = [] + for mode in cls: + if mode not in ( + cls.RELAX_MANIA, + cls.AUTOPILOT_TAIKO, + cls.AUTOPILOT_CATCH, + cls.AUTOPILOT_MANIA, + ): + ret.append(mode) + return ret + + @property + def as_vanilla(self) -> int: + return self.value % 4 + + def __repr__(self) -> str: + return GAMEMODE_REPR_LIST[self.value] diff --git a/app/constants/mods.py b/app/constants/mods.py new file mode 100644 index 0000000..0392b5a --- /dev/null +++ b/app/constants/mods.py @@ -0,0 +1,296 @@ +from __future__ import annotations + +import functools +from enum import IntFlag +from enum import unique + +from app.utils import escape_enum +from app.utils import pymysql_encode + + +@unique +@pymysql_encode(escape_enum) +class Mods(IntFlag): + NOMOD = 0 + NOFAIL = 1 << 0 + EASY = 1 << 1 + TOUCHSCREEN = 1 << 2 # old: 'NOVIDEO' + HIDDEN = 1 << 3 + HARDROCK = 1 << 4 + SUDDENDEATH = 1 << 5 + DOUBLETIME = 1 << 6 + RELAX = 1 << 7 + HALFTIME = 1 << 8 + NIGHTCORE = 1 << 9 + FLASHLIGHT = 1 << 10 + AUTOPLAY = 1 << 11 + SPUNOUT = 1 << 12 + AUTOPILOT = 1 << 13 + PERFECT = 1 << 14 + KEY4 = 1 << 15 + KEY5 = 1 << 16 + KEY6 = 1 << 17 + KEY7 = 1 << 18 + KEY8 = 1 << 19 + FADEIN = 1 << 20 + RANDOM = 1 << 21 + CINEMA = 1 << 22 + TARGET = 1 << 23 + KEY9 = 1 << 24 + KEYCOOP = 1 << 25 + KEY1 = 1 << 26 + KEY3 = 1 << 27 + KEY2 = 1 << 28 + SCOREV2 = 1 << 29 + MIRROR = 1 << 30 + + @functools.cache + def __repr__(self) -> str: + if self.value == Mods.NOMOD: + return "NM" + + mod_str = [] + _dict = mod2modstr_dict # global + + for mod in Mods: + if self.value & mod: + mod_str.append(_dict[mod]) + + return "".join(mod_str) + + def filter_invalid_combos(self, mode_vn: int) -> Mods: + """Remove any invalid mod combinations.""" + + # 1. mode-inspecific mod conflictions + _dtnc = self & (Mods.DOUBLETIME | Mods.NIGHTCORE) + if _dtnc == (Mods.DOUBLETIME | Mods.NIGHTCORE): + self &= ~Mods.DOUBLETIME # DTNC + elif _dtnc and self & Mods.HALFTIME: + self &= ~Mods.HALFTIME # (DT|NC)HT + + if self & Mods.EASY and self & Mods.HARDROCK: + self &= ~Mods.HARDROCK # EZHR + + if self & (Mods.NOFAIL | Mods.RELAX | Mods.AUTOPILOT): + if self & Mods.SUDDENDEATH: + self &= ~Mods.SUDDENDEATH # (NF|RX|AP)SD + if self & Mods.PERFECT: + self &= ~Mods.PERFECT # (NF|RX|AP)PF + + if self & (Mods.RELAX | Mods.AUTOPILOT): + if self & Mods.NOFAIL: + self &= ~Mods.NOFAIL # (RX|AP)NF + + if self & Mods.PERFECT and self & Mods.SUDDENDEATH: + self &= ~Mods.SUDDENDEATH # PFSD + + # 2. remove mode-unique mods from incorrect gamemodes + if mode_vn != 0: # osu! specific + self &= ~OSU_SPECIFIC_MODS + + # ctb & taiko have no unique mods + + if mode_vn != 3: # mania specific + self &= ~MANIA_SPECIFIC_MODS + + # 3. mode-specific mod conflictions + if mode_vn == 0: + if self & Mods.AUTOPILOT: + if self & (Mods.SPUNOUT | Mods.RELAX): + self &= ~Mods.AUTOPILOT # (SO|RX)AP + + if mode_vn == 3: + self &= ~Mods.RELAX # rx is std/taiko/ctb common + if self & Mods.HIDDEN and self & Mods.FADEIN: + self &= ~Mods.FADEIN # HDFI + + # 4 remove multiple keymods + keymods_used = self & KEY_MODS + + if bin(keymods_used).count("1") > 1: + # keep only the first + first_keymod = None + for mod in KEY_MODS: + if keymods_used & mod: + first_keymod = mod + break + + assert first_keymod is not None + + # remove all but the first keymod. + self &= ~(keymods_used & ~first_keymod) + + return self + + @classmethod + @functools.lru_cache(maxsize=64) + def from_modstr(cls, s: str) -> Mods: + # from fmt: `HDDTRX` + mods = cls.NOMOD + _dict = modstr2mod_dict # global + + # split into 2 character chunks + mod_strs = [s[idx : idx + 2].upper() for idx in range(0, len(s), 2)] + + # find matching mods + for mod in mod_strs: + if mod not in _dict: + continue + + mods |= _dict[mod] + + return mods + + @classmethod + @functools.lru_cache(maxsize=64) + def from_np(cls, s: str, mode_vn: int) -> Mods: + mods = cls.NOMOD + _dict = npstr2mod_dict # global + + for mod in s.split(" "): + if mod not in _dict: + continue + + mods |= _dict[mod] + + # NOTE: for fetching from /np, we automatically + # call cls.filter_invalid_combos as we assume + # the input string is from user input. + return mods.filter_invalid_combos(mode_vn) + + +modstr2mod_dict = { + "NF": Mods.NOFAIL, + "EZ": Mods.EASY, + "TD": Mods.TOUCHSCREEN, + "HD": Mods.HIDDEN, + "HR": Mods.HARDROCK, + "SD": Mods.SUDDENDEATH, + "DT": Mods.DOUBLETIME, + "RX": Mods.RELAX, + "HT": Mods.HALFTIME, + "NC": Mods.NIGHTCORE, + "FL": Mods.FLASHLIGHT, + "AU": Mods.AUTOPLAY, + "SO": Mods.SPUNOUT, + "AP": Mods.AUTOPILOT, + "PF": Mods.PERFECT, + "FI": Mods.FADEIN, + "RN": Mods.RANDOM, + "CN": Mods.CINEMA, + "TP": Mods.TARGET, + "V2": Mods.SCOREV2, + "MR": Mods.MIRROR, + "1K": Mods.KEY1, + "2K": Mods.KEY2, + "3K": Mods.KEY3, + "4K": Mods.KEY4, + "5K": Mods.KEY5, + "6K": Mods.KEY6, + "7K": Mods.KEY7, + "8K": Mods.KEY8, + "9K": Mods.KEY9, + "CO": Mods.KEYCOOP, +} + +npstr2mod_dict = { + "-NoFail": Mods.NOFAIL, + "-Easy": Mods.EASY, + "+Hidden": Mods.HIDDEN, + "+HardRock": Mods.HARDROCK, + "+SuddenDeath": Mods.SUDDENDEATH, + "+DoubleTime": Mods.DOUBLETIME, + "~Relax~": Mods.RELAX, + "-HalfTime": Mods.HALFTIME, + "+Nightcore": Mods.NIGHTCORE, + "+Flashlight": Mods.FLASHLIGHT, + "|Autoplay|": Mods.AUTOPLAY, + "-SpunOut": Mods.SPUNOUT, + "~Autopilot~": Mods.AUTOPILOT, + "+Perfect": Mods.PERFECT, + "|Cinema|": Mods.CINEMA, + "~Target~": Mods.TARGET, + # perhaps could modify regex + # to only allow these once, + # and only at the end of str? + "|1K|": Mods.KEY1, + "|2K|": Mods.KEY2, + "|3K|": Mods.KEY3, + "|4K|": Mods.KEY4, + "|5K|": Mods.KEY5, + "|6K|": Mods.KEY6, + "|7K|": Mods.KEY7, + "|8K|": Mods.KEY8, + "|9K|": Mods.KEY9, + # XXX: kinda mood that there's no way + # to tell K1-K4 co-op from /np, but + # scores won't submit or anything, so + # it's not ultimately a problem. + "|10K|": Mods.KEY5 | Mods.KEYCOOP, + "|12K|": Mods.KEY6 | Mods.KEYCOOP, + "|14K|": Mods.KEY7 | Mods.KEYCOOP, + "|16K|": Mods.KEY8 | Mods.KEYCOOP, + "|18K|": Mods.KEY9 | Mods.KEYCOOP, +} + +mod2modstr_dict = { + Mods.NOFAIL: "NF", + Mods.EASY: "EZ", + Mods.TOUCHSCREEN: "TD", + Mods.HIDDEN: "HD", + Mods.HARDROCK: "HR", + Mods.SUDDENDEATH: "SD", + Mods.DOUBLETIME: "DT", + Mods.RELAX: "RX", + Mods.HALFTIME: "HT", + Mods.NIGHTCORE: "NC", + Mods.FLASHLIGHT: "FL", + Mods.AUTOPLAY: "AU", + Mods.SPUNOUT: "SO", + Mods.AUTOPILOT: "AP", + Mods.PERFECT: "PF", + Mods.FADEIN: "FI", + Mods.RANDOM: "RN", + Mods.CINEMA: "CN", + Mods.TARGET: "TP", + Mods.SCOREV2: "V2", + Mods.MIRROR: "MR", + Mods.KEY1: "1K", + Mods.KEY2: "2K", + Mods.KEY3: "3K", + Mods.KEY4: "4K", + Mods.KEY5: "5K", + Mods.KEY6: "6K", + Mods.KEY7: "7K", + Mods.KEY8: "8K", + Mods.KEY9: "9K", + Mods.KEYCOOP: "CO", +} + +KEY_MODS = ( + Mods.KEY1 + | Mods.KEY2 + | Mods.KEY3 + | Mods.KEY4 + | Mods.KEY5 + | Mods.KEY6 + | Mods.KEY7 + | Mods.KEY8 + | Mods.KEY9 +) + +# FREE_MOD_ALLOWED = ( +# Mods.NOFAIL | Mods.EASY | Mods.HIDDEN | Mods.HARDROCK | +# Mods.SUDDENDEATH | Mods.FLASHLIGHT | Mods.FADEIN | +# Mods.RELAX | Mods.AUTOPILOT | Mods.SPUNOUT | KEY_MODS +# ) + +SCORE_INCREASE_MODS = ( + Mods.HIDDEN | Mods.HARDROCK | Mods.FADEIN | Mods.DOUBLETIME | Mods.FLASHLIGHT +) + +SPEED_CHANGING_MODS = Mods.DOUBLETIME | Mods.NIGHTCORE | Mods.HALFTIME + +OSU_SPECIFIC_MODS = Mods.AUTOPILOT | Mods.SPUNOUT | Mods.TARGET +# taiko & catch have no specific mods +MANIA_SPECIFIC_MODS = Mods.MIRROR | Mods.RANDOM | Mods.FADEIN | KEY_MODS diff --git a/app/constants/privileges.py b/app/constants/privileges.py new file mode 100644 index 0000000..590c646 --- /dev/null +++ b/app/constants/privileges.py @@ -0,0 +1,61 @@ +from __future__ import annotations + +from enum import IntEnum +from enum import IntFlag +from enum import unique + +from app.utils import escape_enum +from app.utils import pymysql_encode + + +@unique +@pymysql_encode(escape_enum) +class Privileges(IntFlag): + """Server side user privileges.""" + + # privileges intended for all normal players. + UNRESTRICTED = 1 << 0 # is an unbanned player. + VERIFIED = 1 << 1 # has logged in to the server in-game. + + # has bypass to low-ceiling anticheat measures (trusted). + WHITELISTED = 1 << 2 + + # donation tiers, receives some extra benefits. + SUPPORTER = 1 << 4 + PREMIUM = 1 << 5 + + # notable users, receives some extra benefits. + ALUMNI = 1 << 7 + + # staff permissions, able to manage server app.state. + TOURNEY_MANAGER = 1 << 10 # able to manage match state without host. + NOMINATOR = 1 << 11 # able to manage maps ranked status. + MODERATOR = 1 << 12 # able to manage users (level 1). + ADMINISTRATOR = 1 << 13 # able to manage users (level 2). + DEVELOPER = 1 << 14 # able to manage full server app.state. + + DONATOR = SUPPORTER | PREMIUM + STAFF = MODERATOR | ADMINISTRATOR | DEVELOPER + + +@unique +@pymysql_encode(escape_enum) +class ClientPrivileges(IntFlag): + """Client side user privileges.""" + + PLAYER = 1 << 0 + MODERATOR = 1 << 1 + SUPPORTER = 1 << 2 + OWNER = 1 << 3 + DEVELOPER = 1 << 4 + TOURNAMENT = 1 << 5 # NOTE: not used in communications with osu! client + + +@unique +@pymysql_encode(escape_enum) +class ClanPrivileges(IntEnum): + """A class to represent a clan members privs.""" + + Member = 1 + Officer = 2 + Owner = 3 diff --git a/app/constants/regexes.py b/app/constants/regexes.py new file mode 100644 index 0000000..2b9368a --- /dev/null +++ b/app/constants/regexes.py @@ -0,0 +1,23 @@ +from __future__ import annotations + +import re + +OSU_VERSION = re.compile( + r"^b(?P\d{8})(?:\.(?P\d))?" + r"(?Pbeta|cuttingedge|dev|tourney)?$", +) + +USERNAME = re.compile(r"^[\w \[\]-]{2,15}$") +EMAIL = re.compile(r"^[^@\s]{1,200}@[^@\s\.]{1,30}(?:\.[^@\.\s]{2,24})+$") + +TOURNEY_MATCHNAME = re.compile( + r"^(?P[a-zA-Z0-9_ ]+): " + r"\((?P[a-zA-Z0-9_ ]+)\)" + r" vs\.? " + r"\((?P[a-zA-Z0-9_ ]+)\)$", + flags=re.IGNORECASE, +) + +MAPPOOL_PICK = re.compile(r"^([a-zA-Z]+)([0-9]+)$") + +BEST_OF = re.compile(r"^(?:bo)?(\d{1,2})$") diff --git a/app/discord.py b/app/discord.py new file mode 100644 index 0000000..87fd13e --- /dev/null +++ b/app/discord.py @@ -0,0 +1,173 @@ +"""Functionality related to Discord interactivity.""" + +from __future__ import annotations + +from typing import Any + +from tenacity import retry +from tenacity import stop_after_attempt +from tenacity import wait_exponential + +from app.state import services + + +class Footer: + def __init__(self, text: str, **kwargs: Any) -> None: + self.text = text + self.icon_url = kwargs.get("icon_url") + self.proxy_icon_url = kwargs.get("proxy_icon_url") + + +class Image: + def __init__(self, **kwargs: Any) -> None: + self.url = kwargs.get("url") + self.proxy_url = kwargs.get("proxy_url") + self.height = kwargs.get("height") + self.width = kwargs.get("width") + + +class Thumbnail: + def __init__(self, **kwargs: Any) -> None: + self.url = kwargs.get("url") + self.proxy_url = kwargs.get("proxy_url") + self.height = kwargs.get("height") + self.width = kwargs.get("width") + + +class Video: + def __init__(self, **kwargs: Any) -> None: + self.url = kwargs.get("url") + self.height = kwargs.get("height") + self.width = kwargs.get("width") + + +class Provider: + def __init__(self, **kwargs: str) -> None: + self.url = kwargs.get("url") + self.name = kwargs.get("name") + + +class Author: + def __init__(self, **kwargs: str) -> None: + self.name = kwargs.get("name") + self.url = kwargs.get("url") + self.icon_url = kwargs.get("icon_url") + self.proxy_icon_url = kwargs.get("proxy_icon_url") + + +class Field: + def __init__(self, name: str, value: str, inline: bool = False) -> None: + self.name = name + self.value = value + self.inline = inline + + +class Embed: + def __init__(self, **kwargs: Any) -> None: + self.title = kwargs.get("title") + self.type = kwargs.get("type") + self.description = kwargs.get("description") + self.url = kwargs.get("url") + self.timestamp = kwargs.get("timestamp") # datetime + self.color = kwargs.get("color", 0x000000) + + self.footer: Footer | None = kwargs.get("footer") + self.image: Image | None = kwargs.get("image") + self.thumbnail: Thumbnail | None = kwargs.get("thumbnail") + self.video: Video | None = kwargs.get("video") + self.provider: Provider | None = kwargs.get("provider") + self.author: Author | None = kwargs.get("author") + + self.fields: list[Field] = kwargs.get("fields", []) + + def set_footer(self, **kwargs: Any) -> None: + self.footer = Footer(**kwargs) + + def set_image(self, **kwargs: Any) -> None: + self.image = Image(**kwargs) + + def set_thumbnail(self, **kwargs: Any) -> None: + self.thumbnail = Thumbnail(**kwargs) + + def set_video(self, **kwargs: Any) -> None: + self.video = Video(**kwargs) + + def set_provider(self, **kwargs: Any) -> None: + self.provider = Provider(**kwargs) + + def set_author(self, **kwargs: Any) -> None: + self.author = Author(**kwargs) + + def add_field(self, name: str, value: str, inline: bool = False) -> None: + self.fields.append(Field(name, value, inline)) + + +class Webhook: + """A class to represent a single-use Discord webhook.""" + + def __init__(self, url: str, **kwargs: Any) -> None: + self.url = url + self.content = kwargs.get("content") + self.username = kwargs.get("username") + self.avatar_url = kwargs.get("avatar_url") + self.tts = kwargs.get("tts") + self.file = kwargs.get("file") + self.embeds = kwargs.get("embeds", []) + + def add_embed(self, embed: Embed) -> None: + self.embeds.append(embed) + + @property + def json(self) -> Any: + if not any([self.content, self.file, self.embeds]): + raise Exception( + "Webhook must contain at least one " "of (content, file, embeds).", + ) + + if self.content and len(self.content) > 2000: + raise Exception("Webhook content must be under " "2000 characters.") + + payload: dict[str, Any] = {"embeds": []} + + for key in ("content", "username", "avatar_url", "tts", "file"): + val = getattr(self, key) + if val is not None: + payload[key] = val + + for embed in self.embeds: + embed_payload = {} + + # simple params + for key in ("title", "type", "description", "url", "timestamp", "color"): + val = getattr(embed, key) + if val is not None: + embed_payload[key] = val + + # class params, must turn into dict + for key in ("footer", "image", "thumbnail", "video", "provider", "author"): + val = getattr(embed, key) + if val is not None: + embed_payload[key] = val.__dict__ + + if embed.fields: + embed_payload["fields"] = [f.__dict__ for f in embed.fields] + + payload["embeds"].append(embed_payload) + + return payload + + @retry( + stop=stop_after_attempt(10), + wait=wait_exponential(multiplier=1, min=4, max=10), + ) + async def post(self) -> None: + """Post the webhook in JSON format.""" + # TODO: if `self.file is not None`, then we should + # use multipart/form-data instead of json payload. + headers = {"Content-Type": "application/json"} + response = await services.http_client.post( + self.url, + json=self.json, + headers=headers, + ) + response.raise_for_status() diff --git a/app/encryption.py b/app/encryption.py new file mode 100644 index 0000000..2ea6f07 --- /dev/null +++ b/app/encryption.py @@ -0,0 +1,59 @@ +from __future__ import annotations + +from base64 import b64decode +from base64 import b64encode + +from py3rijndael import Pkcs7Padding +from py3rijndael import RijndaelCbc + + +def encrypt_score_aes_data( + # to encode + score_data: list[str], + client_hash: str, + # used for encoding + iv_b64: bytes, + osu_version: str, +) -> tuple[bytes, bytes]: + """Encrypt the score data to base64.""" + # TODO: perhaps this should return TypedDict? + + # attempt to encrypt score data + aes = RijndaelCbc( + key=f"osu!-scoreburgr---------{osu_version}".encode(), + iv=b64decode(iv_b64), + padding=Pkcs7Padding(32), + block_size=32, + ) + + score_data_joined = ":".join(score_data) + score_data_b64 = b64encode(aes.encrypt(score_data_joined.encode())) + client_hash_b64 = b64encode(aes.encrypt(client_hash.encode())) + + return score_data_b64, client_hash_b64 + + +def decrypt_score_aes_data( + # to decode + score_data_b64: bytes, + client_hash_b64: bytes, + # used for decoding + iv_b64: bytes, + osu_version: str, +) -> tuple[list[str], str]: + """Decrypt the base64'ed score data.""" + # TODO: perhaps this should return TypedDict? + + # attempt to decrypt score data + aes = RijndaelCbc( + key=f"osu!-scoreburgr---------{osu_version}".encode(), + iv=b64decode(iv_b64), + padding=Pkcs7Padding(32), + block_size=32, + ) + + score_data = aes.decrypt(b64decode(score_data_b64)).decode().split(":") + client_hash_decoded = aes.decrypt(b64decode(client_hash_b64)).decode() + + # score data is delimited by colons (:). + return score_data, client_hash_decoded diff --git a/app/logging.py b/app/logging.py new file mode 100644 index 0000000..bffed18 --- /dev/null +++ b/app/logging.py @@ -0,0 +1,102 @@ +from __future__ import annotations + +import datetime +import logging.config +import re +from collections.abc import Mapping +from enum import IntEnum +from zoneinfo import ZoneInfo + +import yaml + +from app import settings + + +def configure_logging() -> None: + with open("logging.yaml") as f: + config = yaml.safe_load(f.read()) + logging.config.dictConfig(config) + + +class Ansi(IntEnum): + # Default colours + BLACK = 30 + RED = 31 + GREEN = 32 + YELLOW = 33 + BLUE = 34 + MAGENTA = 35 + CYAN = 36 + WHITE = 37 + + # Light colours + GRAY = 90 + LRED = 91 + LGREEN = 92 + LYELLOW = 93 + LBLUE = 94 + LMAGENTA = 95 + LCYAN = 96 + LWHITE = 97 + + RESET = 0 + + def __repr__(self) -> str: + return f"\x1b[{self.value}m" + + +def get_timestamp(full: bool = False, tz: ZoneInfo | None = None) -> str: + fmt = "%d/%m/%Y %I:%M:%S%p" if full else "%I:%M:%S%p" + return f"{datetime.datetime.now(tz=tz):{fmt}}" + + +ANSI_ESCAPE_REGEX = re.compile(r"(\x9B|\x1B\[)[0-?]*[ -\/]*[@-~]") + + +def escape_ansi(line: str) -> str: + return ANSI_ESCAPE_REGEX.sub("", line) + + +ROOT_LOGGER = logging.getLogger() + + +def log( + msg: str, + start_color: Ansi | None = None, + extra: Mapping[str, object] | None = None, +) -> None: + """\ + A thin wrapper around the stdlib logging module to handle mostly + backwards-compatibility for colours during our migration to the + standard library logging module. + """ + + # TODO: decouple colors from the base logging function; move it to + # be a formatter-specific concern such that we can log without color. + if start_color is Ansi.LYELLOW: + log_level = logging.WARNING + elif start_color is Ansi.LRED: + log_level = logging.ERROR + else: + log_level = logging.INFO + + if settings.LOG_WITH_COLORS: + color_prefix = f"{start_color!r}" if start_color is not None else "" + color_suffix = f"{Ansi.RESET!r}" if start_color is not None else "" + else: + msg = escape_ansi(msg) + color_prefix = color_suffix = "" + + ROOT_LOGGER.log(log_level, f"{color_prefix}{msg}{color_suffix}", extra=extra) + + +TIME_ORDER_SUFFIXES = ["nsec", "μsec", "msec", "sec"] + + +def magnitude_fmt_time(nanosec: int | float) -> str: + suffix = None + for suffix in TIME_ORDER_SUFFIXES: + if nanosec < 1000: + break + nanosec /= 1000 + return f"{nanosec:.2f} {suffix}" diff --git a/app/objects/__init__.py b/app/objects/__init__.py new file mode 100644 index 0000000..761dd98 --- /dev/null +++ b/app/objects/__init__.py @@ -0,0 +1,11 @@ +# type: ignore +# isort: dont-add-imports + +from . import achievement +from . import beatmap +from . import channel +from . import collections +from . import match +from . import models +from . import player +from . import score diff --git a/app/objects/achievement.py b/app/objects/achievement.py new file mode 100644 index 0000000..df76723 --- /dev/null +++ b/app/objects/achievement.py @@ -0,0 +1,29 @@ +from __future__ import annotations + +from collections.abc import Callable +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from app.objects.score import Score + + +class Achievement: + """A class to represent a single osu! achievement.""" + + def __init__( + self, + id: int, + file: str, + name: str, + desc: str, + cond: Callable[[Score, int], bool], # (score, mode) -> unlocked + ) -> None: + self.id = id + self.file = file + self.name = name + self.desc = desc + + self.cond = cond + + def __repr__(self) -> str: + return f"{self.file}+{self.name}+{self.desc}" diff --git a/app/objects/beatmap.py b/app/objects/beatmap.py new file mode 100644 index 0000000..894faba --- /dev/null +++ b/app/objects/beatmap.py @@ -0,0 +1,996 @@ +from __future__ import annotations + +import functools +import hashlib +from collections import defaultdict +from collections.abc import Mapping +from datetime import datetime +from datetime import timedelta +from enum import IntEnum +from enum import unique +from pathlib import Path +from typing import Any +from typing import TypedDict + +import httpx +from tenacity import retry +from tenacity.stop import stop_after_attempt + +import app.settings +import app.state +import app.utils +from app.constants.gamemodes import GameMode +from app.logging import Ansi +from app.logging import log +from app.repositories import maps as maps_repo +from app.utils import escape_enum +from app.utils import pymysql_encode + +# from dataclasses import dataclass + +BEATMAPS_PATH = Path.cwd() / ".data/osu" + +DEFAULT_LAST_UPDATE = datetime(1970, 1, 1) + +IGNORED_BEATMAP_CHARS = dict.fromkeys(map(ord, r':\/*<>?"|'), None) + + +class BeatmapApiResponse(TypedDict): + data: list[dict[str, Any]] | None + status_code: int + + +@retry(reraise=True, stop=stop_after_attempt(3)) +async def api_get_beatmaps(**params: Any) -> BeatmapApiResponse: + """\ + Fetch data from the osu!api with a beatmap's md5. + + Optionally use osu.direct's API if the user has not provided an osu! api key. + """ + if app.settings.DEBUG: + log(f"Doing api (getbeatmaps) request {params}", Ansi.LMAGENTA) + + if app.settings.OSU_API_KEY: + # https://github.com/ppy/osu-api/wiki#apiget_beatmaps + url = "https://old.ppy.sh/api/get_beatmaps" + params["k"] = str(app.settings.OSU_API_KEY) + else: + # https://osu.direct/doc + url = "https://osu.direct/api/get_beatmaps" + + response = await app.state.services.http_client.get(url, params=params) + response_data = response.json() + if response.status_code == 200 and response_data: # (data may be []) + return {"data": response_data, "status_code": response.status_code} + + return {"data": None, "status_code": response.status_code} + + +@retry(reraise=True, stop=stop_after_attempt(3)) +async def api_get_osu_file(beatmap_id: int) -> bytes: + url = f"https://old.ppy.sh/osu/{beatmap_id}" + response = await app.state.services.http_client.get(url) + response.raise_for_status() + return response.read() + + +def disk_has_expected_osu_file( + beatmap_id: int, + expected_md5: str | None = None, +) -> bool: + osu_file_path = BEATMAPS_PATH / f"{beatmap_id}.osu" + file_exists = osu_file_path.exists() + if file_exists and expected_md5 is not None: + osu_file_md5 = hashlib.md5(osu_file_path.read_bytes()).hexdigest() + return osu_file_md5 == expected_md5 + return file_exists + + +def write_osu_file_to_disk(beatmap_id: int, data: bytes) -> None: + osu_file_path = BEATMAPS_PATH / f"{beatmap_id}.osu" + osu_file_path.write_bytes(data) + + +async def ensure_osu_file_is_available( + beatmap_id: int, + expected_md5: str | None = None, +) -> bool: + """\ + Download the .osu file for a beatmap if it's not already present. + + If `expected_md5` is provided, the file will be downloaded if it + does not match the expected md5 hash -- this is typically used for + ensuring a file is the latest expected version. + + Returns whether the file is available for use. + """ + if disk_has_expected_osu_file(beatmap_id, expected_md5): + return True + + try: + latest_osu_file = await api_get_osu_file(beatmap_id) + except httpx.HTTPStatusError: + return False + except Exception: + log(f"Failed to fetch osu file for {beatmap_id}", Ansi.LRED) + return False + + write_osu_file_to_disk(beatmap_id, latest_osu_file) + return True + + +# for some ungodly reason, different values are used to +# represent different ranked statuses all throughout osu! +# This drives me and probably everyone else pretty insane, +# but we have nothing to do but deal with it B). + + +@unique +@pymysql_encode(escape_enum) +class RankedStatus(IntEnum): + """Server side osu! beatmap ranked statuses. + Same as used in osu!'s /web/getscores.php. + """ + + NotSubmitted = -1 + Pending = 0 + UpdateAvailable = 1 + Ranked = 2 + Approved = 3 + Qualified = 4 + Loved = 5 + + def __str__(self) -> str: + return { + self.NotSubmitted: "Unsubmitted", + self.Pending: "Unranked", + self.UpdateAvailable: "Outdated", + self.Ranked: "Ranked", + self.Approved: "Approved", + self.Qualified: "Qualified", + self.Loved: "Loved", + }[self] + + @functools.cached_property + def osu_api(self) -> int: + """Convert the value to osu!api status.""" + # XXX: only the ones that exist are mapped. + return { + self.Pending: 0, + self.Ranked: 1, + self.Approved: 2, + self.Qualified: 3, + self.Loved: 4, + }[self] + + @classmethod + @functools.cache + def from_osuapi(cls, osuapi_status: int) -> RankedStatus: + """Convert from osu!api status.""" + mapping: Mapping[int, RankedStatus] = defaultdict( + lambda: cls.UpdateAvailable, + { + -2: cls.Pending, # graveyard + -1: cls.Pending, # wip + 0: cls.Pending, + 1: cls.Ranked, + 2: cls.Approved, + 3: cls.Qualified, + 4: cls.Loved, + }, + ) + return mapping[osuapi_status] + + @classmethod + @functools.cache + def from_osudirect(cls, osudirect_status: int) -> RankedStatus: + """Convert from osu!direct status.""" + mapping: Mapping[int, RankedStatus] = defaultdict( + lambda: cls.UpdateAvailable, + { + 0: cls.Ranked, + 2: cls.Pending, + 3: cls.Qualified, + # 4: all ranked statuses lol + 5: cls.Pending, # graveyard + 7: cls.Ranked, # played before + 8: cls.Loved, + }, + ) + return mapping[osudirect_status] + + @classmethod + @functools.cache + def from_str(cls, status_str: str) -> RankedStatus: + """Convert from string value.""" # could perhaps have `'unranked': cls.Pending`? + mapping: Mapping[str, RankedStatus] = defaultdict( + lambda: cls.UpdateAvailable, + { + "pending": cls.Pending, + "ranked": cls.Ranked, + "approved": cls.Approved, + "qualified": cls.Qualified, + "loved": cls.Loved, + }, + ) + return mapping[status_str] + + +# @dataclass +# class BeatmapInfoRequest: +# filenames: Sequence[str] +# ids: Sequence[int] + +# @dataclass +# class BeatmapInfo: +# id: int # i16 +# map_id: int # i32 +# set_id: int # i32 +# thread_id: int # i32 +# status: int # u8 +# osu_rank: int # u8 +# fruits_rank: int # u8 +# taiko_rank: int # u8 +# mania_rank: int # u8 +# map_md5: str + + +class Beatmap: + """A class representing an osu! beatmap. + + This class provides a high level api which should always be the + preferred method of fetching beatmaps due to its housekeeping. + It will perform caching & invalidation, handle map updates while + minimizing osu!api requests, and always use the most efficient + method available to fetch the beatmap's information, while + maintaining a low overhead. + + The only methods you should need are: + await Beatmap.from_md5(md5: str, set_id: int = -1) -> Beatmap | None + await Beatmap.from_bid(bid: int) -> Beatmap | None + + Properties: + Beatmap.full -> str # Artist - Title [Version] + Beatmap.url -> str # https://osu.cmyui.xyz/b/321 + Beatmap.embed -> str # [{url} {full}] + + Beatmap.has_leaderboard -> bool + Beatmap.awards_ranked_pp -> bool + Beatmap.as_dict -> dict[str, object] + + Lower level API: + Beatmap._from_md5_cache(md5: str, check_updates: bool = True) -> Beatmap | None + Beatmap._from_bid_cache(bid: int, check_updates: bool = True) -> Beatmap | None + + Beatmap._from_md5_sql(md5: str) -> Beatmap | None + Beatmap._from_bid_sql(bid: int) -> Beatmap | None + + Beatmap._parse_from_osuapi_resp(osuapi_resp: dict[str, object]) -> None + + Note that the BeatmapSet class also provides a similar API. + + Possibly confusing attributes + ----------- + frozen: `bool` + Whether the beatmap's status is to be kept when a newer + version is found in the osu!api. + # XXX: This is set when a map's status is manually changed. + """ + + def __init__( + self, + map_set: BeatmapSet, + md5: str = "", + id: int = 0, + set_id: int = 0, + artist: str = "", + title: str = "", + version: str = "", + creator: str = "", + last_update: datetime = DEFAULT_LAST_UPDATE, + total_length: int = 0, + max_combo: int = 0, + status: RankedStatus = RankedStatus.Pending, + frozen: bool = False, + plays: int = 0, + passes: int = 0, + mode: GameMode = GameMode.VANILLA_OSU, + bpm: float = 0.0, + cs: float = 0.0, + od: float = 0.0, + ar: float = 0.0, + hp: float = 0.0, + diff: float = 0.0, + filename: str = "", + ) -> None: + self.set = map_set + + self.md5 = md5 + self.id = id + self.set_id = set_id + self.artist = artist + self.title = title + self.version = version + self.creator = creator + self.last_update = last_update + self.total_length = total_length + self.max_combo = max_combo + self.status = status + self.frozen = frozen + self.plays = plays + self.passes = passes + self.mode = mode + self.bpm = bpm + self.cs = cs + self.od = od + self.ar = ar + self.hp = hp + self.diff = diff + self.filename = filename + + def __repr__(self) -> str: + return self.full_name + + @property + def full_name(self) -> str: + """The full osu! formatted name `self`.""" + return f"{self.artist} - {self.title} [{self.version}]" + + @property + def url(self) -> str: + """The osu! beatmap url for `self`.""" + return f"https://osu.{app.settings.DOMAIN}/b/{self.id}" + + @property + def embed(self) -> str: + """An osu! chat embed to `self`'s osu! beatmap page.""" + return f"[{self.url} {self.full_name}]" + + @property + def has_leaderboard(self) -> bool: + """Return whether the map has a ranked leaderboard.""" + return self.status in ( + RankedStatus.Ranked, + RankedStatus.Approved, + RankedStatus.Loved, + ) + + @property + def awards_ranked_pp(self) -> bool: + """Return whether the map's status awards ranked pp for scores.""" + return self.status in (RankedStatus.Ranked, RankedStatus.Approved) + + @property # perhaps worth caching some of? + def as_dict(self) -> dict[str, object]: + return { + "md5": self.md5, + "id": self.id, + "set_id": self.set_id, + "artist": self.artist, + "title": self.title, + "version": self.version, + "creator": self.creator, + "last_update": self.last_update, + "total_length": self.total_length, + "max_combo": self.max_combo, + "status": self.status, + "plays": self.plays, + "passes": self.passes, + "mode": self.mode, + "bpm": self.bpm, + "cs": self.cs, + "od": self.od, + "ar": self.ar, + "hp": self.hp, + "diff": self.diff, + } + + """ High level API """ + # There are three levels of storage used for beatmaps, + # the cache (ram), the db (disk), and the osu!api (web). + # Going down this list gets exponentially slower, so + # we always prioritze what's fastest when possible. + # These methods will keep beatmaps reasonably up to + # date and use the fastest storage available, while + # populating the higher levels of the cache with new maps. + + @classmethod + async def from_md5(cls, md5: str, set_id: int = -1) -> Beatmap | None: + """Fetch a map from the cache, database, or osuapi by md5.""" + bmap = await cls._from_md5_cache(md5) + + if not bmap: + # map not found in cache + + # to be efficient, we want to cache the whole set + # at once rather than caching the individual map + + if set_id <= 0: + # set id not provided - fetch it from the map md5 + rec = await maps_repo.fetch_one(md5=md5) + + if rec is not None: + # set found in db + set_id = rec["set_id"] + else: + # set not found in db, try api + api_data = await api_get_beatmaps(h=md5) + + if api_data["data"] is None: + return None + + api_response = api_data["data"] + set_id = int(api_response[0]["beatmapset_id"]) + + # fetch (and cache) beatmap set + beatmap_set = await BeatmapSet.from_bsid(set_id) + + if beatmap_set is not None: + # the beatmap set has been cached - fetch beatmap from cache + bmap = await cls._from_md5_cache(md5) + + # XXX:HACK in this case, BeatmapSet.from_bsid will have + # ensured the map is up to date, so we can just return it + return bmap + + if bmap is not None: + if bmap.set._cache_expired(): + await bmap.set._update_if_available() + + return bmap + + @classmethod + async def from_bid(cls, bid: int) -> Beatmap | None: + """Fetch a map from the cache, database, or osuapi by id.""" + bmap = await cls._from_bid_cache(bid) + + if not bmap: + # map not found in cache + + # to be efficient, we want to cache the whole set + # at once rather than caching the individual map + + rec = await maps_repo.fetch_one(id=bid) + + if rec is not None: + # set found in db + set_id = rec["set_id"] + else: + # set not found in db, try getting via api + api_data = await api_get_beatmaps(b=bid) + + if api_data["data"] is None: + return None + + api_response = api_data["data"] + set_id = int(api_response[0]["beatmapset_id"]) + + # fetch (and cache) beatmap set + beatmap_set = await BeatmapSet.from_bsid(set_id) + + if beatmap_set is not None: + # the beatmap set has been cached - fetch beatmap from cache + bmap = await cls._from_bid_cache(bid) + + # XXX:HACK in this case, BeatmapSet.from_bsid will have + # ensured the map is up to date, so we can just return it + return bmap + + if bmap is not None: + if bmap.set._cache_expired(): + await bmap.set._update_if_available() + + return bmap + + """ Lower level API """ + # These functions are meant for internal use under + # all normal circumstances and should only be used + # if you're really modifying bancho.py by adding new + # features, or perhaps optimizing parts of the code. + + def _parse_from_osuapi_resp(self, osuapi_resp: dict[str, Any]) -> None: + """Change internal data with the data in osu!api format.""" + # NOTE: `self` is not guaranteed to have any attributes + # initialized when this is called. + self.md5 = osuapi_resp["file_md5"] + # self.id = int(osuapi_resp['beatmap_id']) + self.set_id = int(osuapi_resp["beatmapset_id"]) + + self.artist, self.title, self.version, self.creator = ( + osuapi_resp["artist"], + osuapi_resp["title"], + osuapi_resp["version"], + osuapi_resp["creator"], + ) + + self.filename = ( + ("{artist} - {title} ({creator}) [{version}].osu") + .format(**osuapi_resp) + .translate(IGNORED_BEATMAP_CHARS) + ) + + # quite a bit faster than using dt.strptime. + _last_update = osuapi_resp["last_update"] + self.last_update = datetime( + year=int(_last_update[0:4]), + month=int(_last_update[5:7]), + day=int(_last_update[8:10]), + hour=int(_last_update[11:13]), + minute=int(_last_update[14:16]), + second=int(_last_update[17:19]), + ) + + self.total_length = int(osuapi_resp["total_length"]) + + if osuapi_resp["max_combo"] is not None: + self.max_combo = int(osuapi_resp["max_combo"]) + else: + self.max_combo = 0 + + # if a map is 'frozen', we keep its status + # even after an update from the osu!api. + if not getattr(self, "frozen", False): + osuapi_status = int(osuapi_resp["approved"]) + self.status = RankedStatus.from_osuapi(osuapi_status) + + self.mode = GameMode(int(osuapi_resp["mode"])) + + if osuapi_resp["bpm"] is not None: + self.bpm = float(osuapi_resp["bpm"]) + else: + self.bpm = 0.0 + + self.cs = float(osuapi_resp["diff_size"]) + self.od = float(osuapi_resp["diff_overall"]) + self.ar = float(osuapi_resp["diff_approach"]) + self.hp = float(osuapi_resp["diff_drain"]) + + self.diff = float(osuapi_resp["difficultyrating"]) + + @staticmethod + async def _from_md5_cache(md5: str) -> Beatmap | None: + """Fetch a map from the cache by md5.""" + return app.state.cache.beatmap.get(md5, None) + + @staticmethod + async def _from_bid_cache(bid: int) -> Beatmap | None: + """Fetch a map from the cache by id.""" + return app.state.cache.beatmap.get(bid, None) + + +class BeatmapSet: + """A class to represent an osu! beatmap set. + + Like the Beatmap class, this class provides a high level api + which should always be the preferred method of fetching beatmaps + due to its housekeeping. It will perform caching & invalidation, + handle map updates while minimizing osu!api requests, and always + use the most efficient method available to fetch the beatmap's + information, while maintaining a low overhead. + + The only methods you should need are: + await BeatmapSet.from_bsid(bsid: int) -> BeatmapSet | None + + BeatmapSet.all_officially_ranked_or_approved() -> bool + BeatmapSet.all_officially_loved() -> bool + + Properties: + BeatmapSet.url -> str # https://osu.cmyui.xyz/s/123 + + Lower level API: + await BeatmapSet._from_bsid_cache(bsid: int) -> BeatmapSet | None + await BeatmapSet._from_bsid_sql(bsid: int) -> BeatmapSet | None + await BeatmapSet._from_bsid_osuapi(bsid: int) -> BeatmapSet | None + + BeatmapSet._cache_expired() -> bool + await BeatmapSet._update_if_available() -> None + await BeatmapSet._save_to_sql() -> None + """ + + def __init__( + self, + id: int, + last_osuapi_check: datetime, + maps: list[Beatmap] | None = None, + ) -> None: + self.id = id + + self.maps = maps or [] + self.last_osuapi_check = last_osuapi_check + + def __repr__(self) -> str: + map_names = [] + for bmap in self.maps: + name = f"{bmap.artist} - {bmap.title}" + if name not in map_names: + map_names.append(name) + return ", ".join(map_names) + + @property + def url(self) -> str: + """The online url for this beatmap set.""" + return f"https://osu.{app.settings.DOMAIN}/s/{self.id}" + + def any_beatmaps_have_official_leaderboards(self) -> bool: + """Whether all the maps in the set have leaderboards on official servers.""" + leaderboard_having_statuses = ( + RankedStatus.Loved, + RankedStatus.Ranked, + RankedStatus.Approved, + ) + return any(bmap.status in leaderboard_having_statuses for bmap in self.maps) + + def _cache_expired(self) -> bool: + """Whether the cached version of the set is + expired and needs an update from the osu!api.""" + current_datetime = datetime.now() + + if not self.maps: + return True + + # the delta between cache invalidations will increase depending + # on how long it's been since the map was last updated on osu! + last_map_update = max(bmap.last_update for bmap in self.maps) + update_delta = current_datetime - last_map_update + + # with a minimum of 2 hours, add 5 hours per year since its update. + # the formula for this is subject to adjustment in the future. + check_delta = timedelta(hours=2 + ((5 / 365) * update_delta.days)) + + # it's much less likely that a beatmapset who has beatmaps with + # leaderboards on official servers will be updated. + if self.any_beatmaps_have_official_leaderboards(): + check_delta *= 4 + + # we'll cache for an absolute maximum of 1 day. + check_delta = min(check_delta, timedelta(days=1)) + + return current_datetime > (self.last_osuapi_check + check_delta) + + async def _update_if_available(self) -> None: + """Fetch the newest data from the api, check for differences + and propogate any update into our cache & database.""" + + try: + api_data = await api_get_beatmaps(s=self.id) + except (httpx.TransportError, httpx.DecodingError): + # NOTE: TransportError is directly caused by the API being unavailable + + # NOTE: DecodingError is caused by the API returning HTML and + # normally happens when CF protection is enabled while + # osu! recovers from a DDOS attack + + # we do not want to delete the beatmap in this case, so we simply return + # but do not set the last check, as we would like to retry these ASAP + + return + + if api_data["data"] is not None: + api_response = api_data["data"] + + old_maps = {bmap.id: bmap for bmap in self.maps} + new_maps = {int(api_map["beatmap_id"]): api_map for api_map in api_response} + + self.last_osuapi_check = datetime.now() + + # delete maps from old_maps where old.id not in new_maps + # update maps from old_maps where old.md5 != new.md5 + # add maps to old_maps where new.id not in old_maps + + updated_maps: list[Beatmap] = [] + map_md5s_to_delete: set[str] = set() + + # temp value for building the new beatmap + bmap: Beatmap + + # find maps in our current state that've been deleted, or need updates + for old_id, old_map in old_maps.items(): + if old_id not in new_maps: + # delete map from old_maps + map_md5s_to_delete.add(old_map.md5) + else: + new_map = new_maps[old_id] + new_ranked_status = RankedStatus.from_osuapi( + int(new_map["approved"]), + ) + if ( + old_map.md5 != new_map["file_md5"] + or old_map.status != new_ranked_status + ): + # update map from old_maps + bmap = old_maps[old_id] + bmap._parse_from_osuapi_resp(new_map) + updated_maps.append(bmap) + else: + # map is the same, make no changes + updated_maps.append(old_map) # (this line is _maybe_ needed?) + + # find maps that aren't in our current state, and add them + for new_id, new_map in new_maps.items(): + if new_id not in old_maps: + # new map we don't have locally, add it + bmap = Beatmap.__new__(Beatmap) + bmap.id = new_id + + bmap._parse_from_osuapi_resp(new_map) + + # (some implementation-specific stuff not given by api) + bmap.frozen = False + bmap.passes = 0 + bmap.plays = 0 + + bmap.set = self + updated_maps.append(bmap) + + # save changes to cache + self.maps = updated_maps + + # save changes to sql + + if map_md5s_to_delete: + # delete maps + await app.state.services.database.execute( + "DELETE FROM maps WHERE md5 IN :map_md5s", + {"map_md5s": map_md5s_to_delete}, + ) + + # delete scores on the maps + await app.state.services.database.execute( + "DELETE FROM scores WHERE map_md5 IN :map_md5s", + {"map_md5s": map_md5s_to_delete}, + ) + + # update last_osuapi_check + await app.state.services.database.execute( + "REPLACE INTO mapsets " + "(id, server, last_osuapi_check) " + "VALUES (:id, :server, :last_osuapi_check)", + { + "id": self.id, + "server": "osu!", + "last_osuapi_check": self.last_osuapi_check, + }, + ) + + # update maps in sql + await self._save_to_sql() + elif api_data["status_code"] in (404, 200): + # NOTE: 200 can return an empty array of beatmaps, + # so we still delete in this case if the beatmap data is None + + # TODO: a couple of open questions here: + # - should we delete the beatmap from the database if it's not in the osu!api? + # - are 404 and 200 the only cases where we should delete the beatmap? + if self.maps: + map_md5s_to_delete = {bmap.md5 for bmap in self.maps} + + # delete maps + await app.state.services.database.execute( + "DELETE FROM maps WHERE md5 IN :map_md5s", + {"map_md5s": map_md5s_to_delete}, + ) + + # delete scores on the maps + await app.state.services.database.execute( + "DELETE FROM scores WHERE map_md5 IN :map_md5s", + {"map_md5s": map_md5s_to_delete}, + ) + + # delete set + await app.state.services.database.execute( + "DELETE FROM mapsets WHERE id = :set_id", + {"set_id": self.id}, + ) + + async def _save_to_sql(self) -> None: + """Save the object's attributes into the database.""" + await app.state.services.database.execute_many( + "REPLACE INTO maps (" + "md5, id, server, set_id, " + "artist, title, version, creator, " + "filename, last_update, total_length, " + "max_combo, status, frozen, " + "plays, passes, mode, bpm, " + "cs, od, ar, hp, diff" + ") VALUES (" + ":md5, :id, :server, :set_id, " + ":artist, :title, :version, :creator, " + ":filename, :last_update, :total_length, " + ":max_combo, :status, :frozen, " + ":plays, :passes, :mode, :bpm, " + ":cs, :od, :ar, :hp, :diff" + ")", + [ + { + "md5": bmap.md5, + "id": bmap.id, + "server": "osu!", + "set_id": bmap.set_id, + "artist": bmap.artist, + "title": bmap.title, + "version": bmap.version, + "creator": bmap.creator, + "filename": bmap.filename, + "last_update": bmap.last_update, + "total_length": bmap.total_length, + "max_combo": bmap.max_combo, + "status": bmap.status, + "frozen": bmap.frozen, + "plays": bmap.plays, + "passes": bmap.passes, + "mode": bmap.mode, + "bpm": bmap.bpm, + "cs": bmap.cs, + "od": bmap.od, + "ar": bmap.ar, + "hp": bmap.hp, + "diff": bmap.diff, + } + for bmap in self.maps + ], + ) + + @staticmethod + async def _from_bsid_cache(bsid: int) -> BeatmapSet | None: + """Fetch a mapset from the cache by set id.""" + return app.state.cache.beatmapset.get(bsid, None) + + @classmethod + async def _from_bsid_sql(cls, bsid: int) -> BeatmapSet | None: + """Fetch a mapset from the database by set id.""" + last_osuapi_check = await app.state.services.database.fetch_val( + "SELECT last_osuapi_check FROM mapsets WHERE id = :set_id", + {"set_id": bsid}, + column=0, # last_osuapi_check + ) + + if last_osuapi_check is None: + return None + + bmap_set = cls(id=bsid, last_osuapi_check=last_osuapi_check) + + for row in await maps_repo.fetch_many(set_id=bsid): + bmap = Beatmap( + md5=row["md5"], + id=row["id"], + set_id=row["set_id"], + artist=row["artist"], + title=row["title"], + version=row["version"], + creator=row["creator"], + last_update=row["last_update"], + total_length=row["total_length"], + max_combo=row["max_combo"], + status=RankedStatus(row["status"]), + frozen=row["frozen"], + plays=row["plays"], + passes=row["passes"], + mode=GameMode(row["mode"]), + bpm=row["bpm"], + cs=row["cs"], + od=row["od"], + ar=row["ar"], + hp=row["hp"], + diff=row["diff"], + filename=row["filename"], + map_set=bmap_set, + ) + + # XXX: tempfix for bancho.py BeatmapSet | None: + """Fetch a mapset from the osu!api by set id.""" + api_data = await api_get_beatmaps(s=bsid) + if api_data["data"] is not None: + api_response = api_data["data"] + + self = cls(id=bsid, last_osuapi_check=datetime.now()) + + # XXX: pre-mapset bancho.py support + # select all current beatmaps + # that're frozen in the db + res = await app.state.services.database.fetch_all( + "SELECT id, status FROM maps WHERE set_id = :set_id AND frozen = 1", + {"set_id": bsid}, + ) + + current_maps = {row["id"]: row["status"] for row in res} + + for api_bmap in api_response: + # newer version available for this map + bmap: Beatmap = Beatmap.__new__(Beatmap) + bmap.id = int(api_bmap["beatmap_id"]) + + if bmap.id in current_maps: + # map is currently frozen, keep it's status. + bmap.status = RankedStatus(current_maps[bmap.id]) + bmap.frozen = True + else: + bmap.frozen = False + + bmap._parse_from_osuapi_resp(api_bmap) + + # (some implementation-specific stuff not given by api) + bmap.passes = 0 + bmap.plays = 0 + + bmap.set = self + self.maps.append(bmap) + + await app.state.services.database.execute( + "REPLACE INTO mapsets " + "(id, server, last_osuapi_check) " + "VALUES (:id, :server, :last_osuapi_check)", + { + "id": self.id, + "server": "osu!", + "last_osuapi_check": self.last_osuapi_check, + }, + ) + + await self._save_to_sql() + return self + + return None + + @classmethod + async def from_bsid(cls, bsid: int) -> BeatmapSet | None: + """Cache all maps in a set from the osuapi, optionally + returning beatmaps by their md5 or id.""" + bmap_set = await cls._from_bsid_cache(bsid) + did_api_request = False + + if not bmap_set: + bmap_set = await cls._from_bsid_sql(bsid) + + if not bmap_set: + bmap_set = await cls._from_bsid_osuapi(bsid) + + if not bmap_set: + return None + + did_api_request = True + + # TODO: this can be done less often for certain types of maps, + # such as ones that're ranked on bancho and won't be updated, + # and perhaps ones that haven't been updated in a long time. + if not did_api_request and bmap_set._cache_expired(): + await bmap_set._update_if_available() + + # cache the beatmap set, and beatmaps + # to be efficient in future requests + cache_beatmap_set(bmap_set) + + return bmap_set + + +def cache_beatmap(beatmap: Beatmap) -> None: + """Add the beatmap to the cache.""" + app.state.cache.beatmap[beatmap.md5] = beatmap + app.state.cache.beatmap[beatmap.id] = beatmap + + +def cache_beatmap_set(beatmap_set: BeatmapSet) -> None: + """Add the beatmap set, and each beatmap to the cache.""" + app.state.cache.beatmapset[beatmap_set.id] = beatmap_set + + for beatmap in beatmap_set.maps: + cache_beatmap(beatmap) diff --git a/app/objects/channel.py b/app/objects/channel.py new file mode 100644 index 0000000..41dbfa8 --- /dev/null +++ b/app/objects/channel.py @@ -0,0 +1,138 @@ +from __future__ import annotations + +from collections.abc import Sequence +from typing import TYPE_CHECKING + +import app.packets +import app.state +from app.constants.privileges import Privileges + +if TYPE_CHECKING: + from app.objects.player import Player + + +class Channel: + """An osu! chat channel. + + Possibly confusing attributes + ----------- + _name: `str` + A name string of the channel. + The cls.`name` property wraps handling for '#multiplayer' and + '#spectator' when communicating with the osu! client; only use + this attr when you need the channel's true name; otherwise you + should use the `name` property described below. + + instance: `bool` + Instanced channels are deleted when all players have left; + this is useful for things like multiplayer, spectator, etc. + """ + + def __init__( + self, + name: str, + topic: str, + read_priv: Privileges = Privileges.UNRESTRICTED, + write_priv: Privileges = Privileges.UNRESTRICTED, + auto_join: bool = True, + instance: bool = False, + ) -> None: + # TODO: think of better names than `_name` and `name` + self._name = name # 'real' name ('#{multi/spec}_{id}') + + if self._name.startswith("#spec_"): + self.name = "#spectator" + elif self._name.startswith("#multi_"): + self.name = "#multiplayer" + else: + self.name = self._name + + self.topic = topic + self.read_priv = read_priv + self.write_priv = write_priv + self.auto_join = auto_join + self.instance = instance + + self.players: list[Player] = [] + + def __repr__(self) -> str: + return f"<{self._name}>" + + def __contains__(self, player: Player) -> bool: + return player in self.players + + # XXX: should this be cached differently? + + def can_read(self, priv: Privileges) -> bool: + if not self.read_priv: + return True + + return priv & self.read_priv != 0 + + def can_write(self, priv: Privileges) -> bool: + if not self.write_priv: + return True + + return priv & self.write_priv != 0 + + def send(self, msg: str, sender: Player, to_self: bool = False) -> None: + """Enqueue `msg` to all appropriate clients from `sender`.""" + data = app.packets.send_message( + sender=sender.name, + msg=msg, + recipient=self.name, + sender_id=sender.id, + ) + + for player in self.players: + if sender.id not in player.blocks and (to_self or player.id != sender.id): + player.enqueue(data) + + def send_bot(self, msg: str) -> None: + """Enqueue `msg` to all connected clients from bot.""" + bot = app.state.sessions.bot + + msg_len = len(msg) + + if msg_len >= 31979: # TODO ?????????? + msg = f"message would have crashed games ({msg_len} chars)" + + self.enqueue( + app.packets.send_message( + sender=bot.name, + msg=msg, + recipient=self.name, + sender_id=bot.id, + ), + ) + + def send_selective( + self, + msg: str, + sender: Player, + recipients: set[Player], + ) -> None: + """Enqueue `sender`'s `msg` to `recipients`.""" + for player in recipients: + if player in self: + player.send(msg, sender=sender, chan=self) + + def append(self, player: Player) -> None: + """Add `player` to the channel's players.""" + self.players.append(player) + + def remove(self, player: Player) -> None: + """Remove `player` from the channel's players.""" + self.players.remove(player) + + if not self.players and self.instance: + # if it's an instance channel and this + # is the last member leaving, just remove + # the channel from the global list. + app.state.sessions.channels.remove(self) + + def enqueue(self, data: bytes, immune: Sequence[int] = []) -> None: + """Enqueue `data` to all connected clients not in `immune`.""" + for player in self.players: + if player.id not in immune: + player.enqueue(data) diff --git a/app/objects/collections.py b/app/objects/collections.py new file mode 100644 index 0000000..252a3c4 --- /dev/null +++ b/app/objects/collections.py @@ -0,0 +1,314 @@ +from __future__ import annotations + +from collections.abc import Iterable +from collections.abc import Iterator +from collections.abc import Sequence +from typing import Any + +import databases.core + +import app.settings +import app.state +import app.utils +from app.constants.privileges import ClanPrivileges +from app.constants.privileges import Privileges +from app.logging import Ansi +from app.logging import log +from app.objects.channel import Channel +from app.objects.match import Match +from app.objects.player import Player +from app.repositories import channels as channels_repo +from app.repositories import clans as clans_repo +from app.repositories import users as users_repo +from app.utils import make_safe_name + + +class Channels(list[Channel]): + """The currently active chat channels on the server.""" + + def __iter__(self) -> Iterator[Channel]: + return super().__iter__() + + def __contains__(self, o: object) -> bool: + """Check whether internal list contains `o`.""" + # Allow string to be passed to compare vs. name. + if isinstance(o, str): + return o in (chan.name for chan in self) + else: + return super().__contains__(o) + + def __repr__(self) -> str: + # XXX: we use the "real" name, aka + # #multi_1 instead of #multiplayer + # #spect_1 instead of #spectator. + return f'[{", ".join(c._name for c in self)}]' + + def get_by_name(self, name: str) -> Channel | None: + """Get a channel from the list by `name`.""" + for channel in self: + if channel._name == name: + return channel + + return None + + def append(self, channel: Channel) -> None: + """Append `channel` to the list.""" + super().append(channel) + + if app.settings.DEBUG: + log(f"{channel} added to channels list.") + + def extend(self, channels: Iterable[Channel]) -> None: + """Extend the list with `channels`.""" + super().extend(channels) + + if app.settings.DEBUG: + log(f"{channels} added to channels list.") + + def remove(self, channel: Channel) -> None: + """Remove `channel` from the list.""" + super().remove(channel) + + if app.settings.DEBUG: + log(f"{channel} removed from channels list.") + + async def prepare(self) -> None: + """Fetch data from sql & return; preparing to run the server.""" + log("Fetching channels from sql.", Ansi.LCYAN) + for row in await channels_repo.fetch_many(): + self.append( + Channel( + name=row["name"], + topic=row["topic"], + read_priv=Privileges(row["read_priv"]), + write_priv=Privileges(row["write_priv"]), + auto_join=row["auto_join"] == 1, + ), + ) + + +class Matches(list[Match | None]): + """The currently active multiplayer matches on the server.""" + + def __init__(self) -> None: + MAX_MATCHES = 64 # TODO: refactor this out of existence + super().__init__([None] * MAX_MATCHES) + + def __iter__(self) -> Iterator[Match | None]: + return super().__iter__() + + def __repr__(self) -> str: + return f'[{", ".join(match.name for match in self if match)}]' + + def get_free(self) -> int | None: + """Return the first free match id from `self`.""" + for idx, match in enumerate(self): + if match is None: + return idx + + return None + + def remove(self, match: Match | None) -> None: + """Remove `match` from the list.""" + for i, _m in enumerate(self): + if match is _m: + self[i] = None + break + + if app.settings.DEBUG: + log(f"{match} removed from matches list.") + + +class Players(list[Player]): + """The currently active players on the server.""" + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + def __iter__(self) -> Iterator[Player]: + return super().__iter__() + + def __contains__(self, player: object) -> bool: + # allow us to either pass in the player + # obj, or the player name as a string. + if isinstance(player, str): + return player in (player.name for player in self) + else: + return super().__contains__(player) + + def __repr__(self) -> str: + return f'[{", ".join(map(repr, self))}]' + + @property + def ids(self) -> set[int]: + """Return a set of the current ids in the list.""" + return {p.id for p in self} + + @property + def staff(self) -> set[Player]: + """Return a set of the current staff online.""" + return {p for p in self if p.priv & Privileges.STAFF} + + @property + def restricted(self) -> set[Player]: + """Return a set of the current restricted players.""" + return {p for p in self if not p.priv & Privileges.UNRESTRICTED} + + @property + def unrestricted(self) -> set[Player]: + """Return a set of the current unrestricted players.""" + return {p for p in self if p.priv & Privileges.UNRESTRICTED} + + def enqueue(self, data: bytes, immune: Sequence[Player] = []) -> None: + """Enqueue `data` to all players, except for those in `immune`.""" + for player in self: + if player not in immune: + player.enqueue(data) + + def get( + self, + token: str | None = None, + id: int | None = None, + name: str | None = None, + ) -> Player | None: + """Get a player by token, id, or name from cache.""" + for player in self: + if token is not None: + if player.token == token: + return player + elif id is not None: + if player.id == id: + return player + elif name is not None: + if player.safe_name == make_safe_name(name): + return player + + return None + + async def get_sql( + self, + id: int | None = None, + name: str | None = None, + ) -> Player | None: + """Get a player by token, id, or name from sql.""" + # try to get from sql. + player = await users_repo.fetch_one( + id=id, + name=name, + fetch_all_fields=True, + ) + if player is None: + return None + + clan_id: int | None = None + clan_priv: ClanPrivileges | None = None + if player["clan_id"] != 0: + clan_id = player["clan_id"] + clan_priv = ClanPrivileges(player["clan_priv"]) + + return Player( + id=player["id"], + name=player["name"], + priv=Privileges(player["priv"]), + pw_bcrypt=player["pw_bcrypt"].encode(), + token=Player.generate_token(), + clan_id=clan_id, + clan_priv=clan_priv, + geoloc={ + "latitude": 0.0, + "longitude": 0.0, + "country": { + "acronym": player["country"], + "numeric": app.state.services.country_codes[player["country"]], + }, + }, + silence_end=player["silence_end"], + donor_end=player["donor_end"], + api_key=player["api_key"], + ) + + async def from_cache_or_sql( + self, + id: int | None = None, + name: str | None = None, + ) -> Player | None: + """Try to get player from cache, or sql as fallback.""" + player = self.get(id=id, name=name) + if player is not None: + return player + player = await self.get_sql(id=id, name=name) + if player is not None: + return player + + return None + + async def from_login( + self, + name: str, + pw_md5: str, + sql: bool = False, + ) -> Player | None: + """Return a player with a given name & pw_md5, from cache or sql.""" + player = self.get(name=name) + if not player: + if not sql: + return None + + player = await self.get_sql(name=name) + if not player: + return None + + assert player.pw_bcrypt is not None + + if app.state.cache.bcrypt[player.pw_bcrypt] == pw_md5.encode(): + return player + + return None + + def append(self, player: Player) -> None: + """Append `p` to the list.""" + if player in self: + if app.settings.DEBUG: + log(f"{player} double-added to global player list?") + return + + super().append(player) + + def remove(self, player: Player) -> None: + """Remove `p` from the list.""" + if player not in self: + if app.settings.DEBUG: + log(f"{player} removed from player list when not online?") + return + + super().remove(player) + + +async def initialize_ram_caches() -> None: + """Setup & cache the global collections before listening for connections.""" + # fetch channels, clans and pools from db + await app.state.sessions.channels.prepare() + + bot = await users_repo.fetch_one(id=1) + if bot is None: + raise RuntimeError("Bot account not found in database.") + + # create bot & add it to online players + app.state.sessions.bot = Player( + id=1, + name=bot["name"], + priv=Privileges.UNRESTRICTED, + pw_bcrypt=None, + token=Player.generate_token(), + login_time=float(0x7FFFFFFF), # (never auto-dc) + is_bot_client=True, + ) + app.state.sessions.players.append(app.state.sessions.bot) + + # static api keys + app.state.sessions.api_keys = { + row["api_key"]: row["id"] + for row in await app.state.services.database.fetch_all( + "SELECT id, api_key FROM users WHERE api_key IS NOT NULL", + ) + } diff --git a/app/objects/match.py b/app/objects/match.py new file mode 100644 index 0000000..69b4b16 --- /dev/null +++ b/app/objects/match.py @@ -0,0 +1,552 @@ +from __future__ import annotations + +import asyncio +from collections import defaultdict +from collections.abc import Sequence +from datetime import datetime as datetime +from datetime import timedelta as timedelta +from enum import IntEnum +from enum import unique +from typing import TYPE_CHECKING +from typing import TypedDict + +import app.packets +import app.settings +import app.state +from app.constants import regexes +from app.constants.gamemodes import GameMode +from app.constants.mods import Mods +from app.objects.beatmap import Beatmap +from app.repositories.tourney_pools import TourneyPool +from app.utils import escape_enum +from app.utils import pymysql_encode + +if TYPE_CHECKING: + from asyncio import TimerHandle + + from app.objects.channel import Channel + from app.objects.player import Player + + +MAX_MATCH_NAME_LENGTH = 50 + + +@unique +@pymysql_encode(escape_enum) +class SlotStatus(IntEnum): + open = 1 + locked = 2 + not_ready = 4 + ready = 8 + no_map = 16 + playing = 32 + complete = 64 + quit = 128 + + # has_player = not_ready | ready | no_map | playing | complete + + +@unique +@pymysql_encode(escape_enum) +class MatchTeams(IntEnum): + neutral = 0 + blue = 1 + red = 2 + + +""" +# implemented by osu! and send between client/server, +# quite frequently even, but seems useless?? +@unique +@pymysql_encode(escape_enum) +class MatchTypes(IntEnum): + standard = 0 + powerplay = 1 # literally no idea what this is for +""" + + +@unique +@pymysql_encode(escape_enum) +class MatchWinConditions(IntEnum): + score = 0 + accuracy = 1 + combo = 2 + scorev2 = 3 + + +@unique +@pymysql_encode(escape_enum) +class MatchTeamTypes(IntEnum): + head_to_head = 0 + tag_coop = 1 + team_vs = 2 + tag_team_vs = 3 + + +class Slot: + """An individual player slot in an osu! multiplayer match.""" + + def __init__(self) -> None: + self.player: Player | None = None + self.status = SlotStatus.open + self.team = MatchTeams.neutral + self.mods = Mods.NOMOD + self.loaded = False + self.skipped = False + + def empty(self) -> bool: + return self.player is None + + def copy_from(self, other: Slot) -> None: + self.player = other.player + self.status = other.status + self.team = other.team + self.mods = other.mods + + def reset(self, new_status: SlotStatus = SlotStatus.open) -> None: + self.player = None + self.status = new_status + self.team = MatchTeams.neutral + self.mods = Mods.NOMOD + self.loaded = False + self.skipped = False + + +class StartingTimers(TypedDict): + start: TimerHandle + alerts: list[TimerHandle] + time: float + + +class Match: + """\ + An osu! multiplayer match. + + Possibly confusing attributes + ----------- + _refs: set[`Player`] + A set of players who have access to mp commands in the match. + These can be used with the !mp commands. + + slots: list[`Slot`] + A list of 16 `Slot` objects representing the match's slots. + + starting: dict[str, `TimerHandle`] | None + Used when the match is started with !mp start . + It stores both the starting timer, and the chat alert timers. + + seed: `int` + The seed used for osu!mania's random mod. + + use_pp_scoring: `bool` + Whether pp should be used as a win condition override during scrims. + """ + + def __init__( + self, + id: int, + name: str, + password: str, + has_public_history: bool, + map_name: str, + map_id: int, + map_md5: str, + host_id: int, + mode: GameMode, + mods: Mods, + win_condition: MatchWinConditions, + team_type: MatchTeamTypes, + freemods: bool, + seed: int, + chat_channel: Channel, + ) -> None: + self.id = id + self.name = name + self.passwd = password + self.has_public_history = has_public_history + + self.host_id = host_id + self._refs: set[Player] = set() + + self.map_id = map_id + self.map_md5 = map_md5 + self.map_name = map_name + self.prev_map_id = 0 # previously chosen map + + self.mods = mods + self.mode = mode + self.freemods = freemods + + self.chat = chat_channel + self.slots = [Slot() for _ in range(16)] + + # self.type = MatchTypes.standard + self.team_type = team_type + self.win_condition = win_condition + + self.in_progress = False + self.starting: StartingTimers | None = None + self.seed = seed # used for mania random mod + + self.tourney_pool: TourneyPool | None = None + + # scrimmage stuff + self.is_scrimming = False + self.match_points: dict[MatchTeams | Player, int] = defaultdict(int) + self.bans: set[tuple[Mods, int]] = set() + self.winners: list[Player | MatchTeams | None] = [] # none for tie + self.winning_pts = 0 + self.use_pp_scoring = False # only for scrims + + self.tourney_clients: set[int] = set() # player ids + + @property + def host(self) -> Player: + player = app.state.sessions.players.get(id=self.host_id) + if player is None: + raise ValueError( + f"Host with id {self.host_id} not found for match {self!r}", + ) + return player + + @property + def url(self) -> str: + """The match's invitation url.""" + return f"osump://{self.id}/{self.passwd}" + + @property + def map_url(self) -> str: + """The osu! beatmap url for `self`'s map.""" + return f"https://osu.{app.settings.DOMAIN}/b/{self.map_id}" + + @property + def embed(self) -> str: + """An osu! chat embed for `self`.""" + return f"[{self.url} {self.name}]" + + @property + def map_embed(self) -> str: + """An osu! chat embed for `self`'s map.""" + return f"[{self.map_url} {self.map_name}]" + + @property + def refs(self) -> set[Player]: + """Return all players with referee permissions.""" + refs = self._refs + + if self.host is not None: + refs.add(self.host) + + return refs + + def __repr__(self) -> str: + return f"<{self.name} ({self.id})>" + + def get_slot(self, player: Player) -> Slot | None: + """Return the slot containing a given player.""" + for s in self.slots: + if player is s.player: + return s + + return None + + def get_slot_id(self, player: Player) -> int | None: + """Return the slot index containing a given player.""" + for idx, s in enumerate(self.slots): + if player is s.player: + return idx + + return None + + def get_free(self) -> int | None: + """Return the first unoccupied slot in multi, if any.""" + for idx, s in enumerate(self.slots): + if s.status == SlotStatus.open: + return idx + + return None + + def get_host_slot(self) -> Slot | None: + """Return the slot containing the host.""" + for s in self.slots: + if s.player is not None and s.player is self.host: + return s + + return None + + def copy(self, m: Match) -> None: + """Fully copy the data of another match obj.""" + self.map_id = m.map_id + self.map_md5 = m.map_md5 + self.map_name = m.map_name + self.freemods = m.freemods + self.mode = m.mode + self.team_type = m.team_type + self.win_condition = m.win_condition + self.mods = m.mods + self.name = m.name + + def enqueue( + self, + data: bytes, + lobby: bool = True, + immune: Sequence[int] = [], + ) -> None: + """Add data to be sent to all clients in the match.""" + self.chat.enqueue(data, immune) + + lchan = app.state.sessions.channels.get_by_name("#lobby") + if lobby and lchan and lchan.players: + lchan.enqueue(data) + + def enqueue_state(self, lobby: bool = True) -> None: + """Enqueue `self`'s state to players in the match & lobby.""" + # TODO: hmm this is pretty bad, writes twice + + # send password only to users currently in the match. + self.chat.enqueue(app.packets.update_match(self, send_pw=True)) + + lchan = app.state.sessions.channels.get_by_name("#lobby") + if lobby and lchan and lchan.players: + lchan.enqueue(app.packets.update_match(self, send_pw=False)) + + def unready_players(self, expected: SlotStatus = SlotStatus.ready) -> None: + """Unready any players in the `expected` state.""" + for s in self.slots: + if s.status == expected: + s.status = SlotStatus.not_ready + + def reset_players_loaded_status(self) -> None: + """Reset all players' loaded status.""" + for s in self.slots: + s.loaded = False + s.skipped = False + + def start(self) -> None: + """Start the match for all ready players with the map.""" + no_map: list[int] = [] + + for s in self.slots: + # start each player who has the map. + if s.player is not None: + if s.status != SlotStatus.no_map: + s.status = SlotStatus.playing + else: + no_map.append(s.player.id) + + self.in_progress = True + self.enqueue(app.packets.match_start(self), immune=no_map, lobby=False) + self.enqueue_state() + + def reset_scrim(self) -> None: + """Reset the current scrim's winning points & bans.""" + self.match_points.clear() + self.winners.clear() + self.bans.clear() + + async def await_submissions( + self, + was_playing: Sequence[Slot], + ) -> tuple[dict[MatchTeams | Player, int], Sequence[Player]]: + """Await score submissions from all players in completed state.""" + scores: dict[MatchTeams | Player, int] = defaultdict(int) + didnt_submit: list[Player] = [] + time_waited = 0.0 # allow up to 10s (total, not per player) + + ffa = self.team_type in (MatchTeamTypes.head_to_head, MatchTeamTypes.tag_coop) + + if self.use_pp_scoring: + win_cond = "pp" + else: + win_cond = ("score", "acc", "max_combo", "score")[self.win_condition] + + bmap = await Beatmap.from_md5(self.map_md5) + + if not bmap: + # map isn't submitted + return {}, () + + for s in was_playing: + # continue trying to fetch each player's + # scores until they've all been submitted. + while True: + assert s.player is not None + rc_score = s.player.recent_score + + max_age = datetime.now() - timedelta( + seconds=bmap.total_length + time_waited + 0.5, + ) + + if ( + rc_score + and rc_score.bmap + and rc_score.bmap.md5 == self.map_md5 + and rc_score.server_time > max_age + ): + # score found, add to our scores dict if != 0. + score: int = getattr(rc_score, win_cond) + if score: + key: MatchTeams | Player = s.player if ffa else s.team + scores[key] += score + + break + + # wait 0.5s and try again + await asyncio.sleep(0.5) + time_waited += 0.5 + + if time_waited > 10: + # inform the match this user didn't + # submit a score in time, and skip them. + didnt_submit.append(s.player) + break + + # all scores retrieved, update the match. + return scores, didnt_submit + + async def update_matchpoints(self, was_playing: Sequence[Slot]) -> None: + """\ + Determine the winner from `scores`, increment & inform players. + + This automatically works with the match settings (such as + win condition, teams, & co-op) to determine the appropriate + winner, and will use any team names included in the match name, + along with the match name (fmt: OWC2020: (Team1) vs. (Team2)). + + For the examples, we'll use accuracy as a win condition. + + Teams, match title: `OWC2015: (United States) vs. (China)`. + United States takes the point! (293.32% vs 292.12%) + Total Score: United States | 7 - 2 | China + United States takes the match, finishing OWC2015 with a score of 7 - 2! + + FFA, the top <=3 players will be listed for the total score. + Justice takes the point! (94.32% [Match avg. 91.22%]) + Total Score: Justice - 3 | cmyui - 2 | FrostiDrinks - 2 + Justice takes the match, finishing with a score of 4 - 2! + """ + + scores, didnt_submit = await self.await_submissions(was_playing) + + for player in didnt_submit: + self.chat.send_bot(f"{player} didn't submit a score (timeout: 10s).") + + if not scores: + self.chat.send_bot("Scores could not be calculated.") + return None + + ffa = self.team_type in ( + MatchTeamTypes.head_to_head, + MatchTeamTypes.tag_coop, + ) + + # all scores are equal, it was a tie. + if len(scores) != 1 and len(set(scores.values())) == 1: + self.winners.append(None) + self.chat.send_bot("The point has ended in a tie!") + return None + + # Find the winner & increment their matchpoints. + winner: Player | MatchTeams = max(scores, key=lambda k: scores[k]) + self.winners.append(winner) + self.match_points[winner] += 1 + + msg: list[str] = [] + + def add_suffix(score: int | float) -> str | int | float: + if self.use_pp_scoring: + return f"{score:.2f}pp" + elif self.win_condition == MatchWinConditions.accuracy: + return f"{score:.2f}%" + elif self.win_condition == MatchWinConditions.combo: + return f"{score}x" + else: + return str(score) + + if ffa: + from app.objects.player import Player + + assert isinstance(winner, Player) + + msg.append( + f"{winner.name} takes the point! ({add_suffix(scores[winner])} " + f"[Match avg. {add_suffix(sum(scores.values()) / len(scores))}])", + ) + + wmp = self.match_points[winner] + + # check if match point #1 has enough points to win. + if self.winning_pts and wmp == self.winning_pts: + # we have a champion, announce & reset our match. + self.is_scrimming = False + self.reset_scrim() + self.bans.clear() + + m = f"{winner.name} takes the match! Congratulations!" + else: + # no winner, just announce the match points so far. + # for ffa, we'll only announce the top <=3 players. + m_points = sorted(self.match_points.items(), key=lambda x: x[1]) + m = f"Total Score: {' | '.join([f'{k.name} - {v}' for k, v in m_points])}" + + msg.append(m) + del m + + else: # teams + assert isinstance(winner, MatchTeams) + + r_match = regexes.TOURNEY_MATCHNAME.match(self.name) + if r_match: + match_name = r_match["name"] + team_names = { + MatchTeams.blue: r_match["T1"], + MatchTeams.red: r_match["T2"], + } + else: + match_name = self.name + team_names = {MatchTeams.blue: "Blue", MatchTeams.red: "Red"} + + # teams are binary, so we have a loser. + if winner is MatchTeams.blue: + loser = MatchTeams.red + else: + loser = MatchTeams.blue + + # from match name if available, else blue/red. + wname = team_names[winner] + lname = team_names[loser] + + # scores from the recent play + # (according to win condition) + ws = add_suffix(scores[winner]) + ls = add_suffix(scores[loser]) + + # total win/loss score in the match. + wmp = self.match_points[winner] + lmp = self.match_points[loser] + + # announce the score for the most recent play. + msg.append(f"{wname} takes the point! ({ws} vs. {ls})") + + # check if the winner has enough match points to win the match. + if self.winning_pts and wmp == self.winning_pts: + # we have a champion, announce & reset our match. + self.is_scrimming = False + self.reset_scrim() + + msg.append( + f"{wname} takes the match, finishing {match_name} " + f"with a score of {wmp} - {lmp}! Congratulations!", + ) + else: + # no winner, just announce the match points so far. + msg.append(f"Total Score: {wname} | {wmp} - {lmp} | {lname}") + + if didnt_submit: + self.chat.send_bot( + "If you'd like to perform a rematch, " + "please use the `!mp rematch` command.", + ) + + for line in msg: + self.chat.send_bot(line) diff --git a/app/objects/models.py b/app/objects/models.py new file mode 100644 index 0000000..b9a852a --- /dev/null +++ b/app/objects/models.py @@ -0,0 +1,8 @@ +from __future__ import annotations + +from pydantic import BaseModel + + +class OsuBeatmapRequestForm(BaseModel): + Filenames: list[str] + Ids: list[int] diff --git a/app/objects/player.py b/app/objects/player.py new file mode 100644 index 0000000..ce42644 --- /dev/null +++ b/app/objects/player.py @@ -0,0 +1,1017 @@ +from __future__ import annotations + +import asyncio +import time +import uuid +from dataclasses import dataclass +from datetime import date +from enum import IntEnum +from enum import StrEnum +from enum import unique +from functools import cached_property +from typing import TYPE_CHECKING +from typing import TypedDict +from typing import cast + +import databases.core + +import app.packets +import app.settings +import app.state +from app._typing import IPAddress +from app.constants.gamemodes import GameMode +from app.constants.mods import Mods +from app.constants.privileges import ClientPrivileges +from app.constants.privileges import Privileges +from app.discord import Webhook +from app.logging import Ansi +from app.logging import log +from app.objects.channel import Channel +from app.objects.match import Match +from app.objects.match import MatchTeams +from app.objects.match import MatchTeamTypes +from app.objects.match import Slot +from app.objects.match import SlotStatus +from app.objects.score import Grade +from app.objects.score import Score +from app.repositories import clans as clans_repo +from app.repositories import logs as logs_repo +from app.repositories import stats as stats_repo +from app.repositories import users as users_repo +from app.state.services import Geolocation +from app.utils import escape_enum +from app.utils import make_safe_name +from app.utils import pymysql_encode + +if TYPE_CHECKING: + from app.constants.privileges import ClanPrivileges + from app.objects.beatmap import Beatmap + from app.objects.score import Score + + +@unique +@pymysql_encode(escape_enum) +class PresenceFilter(IntEnum): + """osu! client side filter for which users the player can see.""" + + Nil = 0 + All = 1 + Friends = 2 + + +@unique +@pymysql_encode(escape_enum) +class Action(IntEnum): + """The client's current app.state.""" + + Idle = 0 + Afk = 1 + Playing = 2 + Editing = 3 + Modding = 4 + Multiplayer = 5 + Watching = 6 + Unknown = 7 + Testing = 8 + Submitting = 9 + Paused = 10 + Lobby = 11 + Multiplaying = 12 + OsuDirect = 13 + + +@dataclass +class ModeData: + """A player's stats in a single gamemode.""" + + tscore: int + rscore: int + pp: int + acc: float + plays: int + playtime: int + max_combo: int + total_hits: int + rank: int # global + + grades: dict[Grade, int] # XH, X, SH, S, A + + +@dataclass +class Status: + """The current status of a player.""" + + action: Action = Action.Idle + info_text: str = "" + map_md5: str = "" + mods: Mods = Mods.NOMOD + mode: GameMode = GameMode.VANILLA_OSU + map_id: int = 0 + + +class LastNp(TypedDict): + bmap: Beatmap + mode_vn: int + mods: Mods | None + timeout: float + + +class OsuStream(StrEnum): + STABLE = "stable" + BETA = "beta" + CUTTINGEDGE = "cuttingedge" + TOURNEY = "tourney" + DEV = "dev" + + +class OsuVersion: + # b20200201.2cuttingedge + # date = 2020/02/01 + # revision = 2 + # stream = cuttingedge + def __init__( + self, + date: date, + revision: int | None, # TODO: should this be optional? + stream: OsuStream, + ) -> None: + self.date = date + self.revision = revision + self.stream = stream + + +class ClientDetails: + def __init__( + self, + osu_version: OsuVersion, + osu_path_md5: str, + adapters_md5: str, + uninstall_md5: str, + disk_signature_md5: str, + adapters: list[str], + ip: IPAddress, + ) -> None: + self.osu_version = osu_version + self.osu_path_md5 = osu_path_md5 + self.adapters_md5 = adapters_md5 + self.uninstall_md5 = uninstall_md5 + self.disk_signature_md5 = disk_signature_md5 + + self.adapters = adapters + self.ip = ip + + @cached_property + def client_hash(self) -> str: + return ( + # NOTE the extra '.' and ':' appended to ends + f"{self.osu_path_md5}:{'.'.join(self.adapters)}." + f":{self.adapters_md5}:{self.uninstall_md5}:{self.disk_signature_md5}:" + ) + + # TODO: __str__ to pack like osu! hashes? + + +class Player: + """\ + Server side representation of a player; not necessarily online. + + Possibly confusing attributes + ----------- + token: `str` + The player's unique token; used to + communicate with the osu! client. + + safe_name: `str` + The player's username (safe). + XXX: Equivalent to `cls.name.lower().replace(' ', '_')`. + + pm_private: `bool` + Whether the player is blocking pms from non-friends. + + silence_end: `int` + The UNIX timestamp the player's silence will end at. + + pres_filter: `PresenceFilter` + The scope of users the client can currently see. + + is_bot_client: `bool` + Whether this is a bot account. + + is_tourney_client: `bool` + Whether this is a management/spectator tourney client. + + _packet_queue: `bytearray` + Bytes enqueued to the player which will be transmitted + at the tail end of their next connection to the server. + XXX: cls.enqueue() will add data to this queue, and + cls.dequeue() will return the data, and remove it. + """ + + def __init__( + self, + id: int, + name: str, + priv: Privileges, + pw_bcrypt: bytes | None, + token: str, + clan_id: int | None = None, + clan_priv: ClanPrivileges | None = None, + geoloc: Geolocation | None = None, + utc_offset: int = 0, + pm_private: bool = False, + silence_end: int = 0, + donor_end: int = 0, + client_details: ClientDetails | None = None, + login_time: float = 0.0, + is_bot_client: bool = False, + is_tourney_client: bool = False, + api_key: str | None = None, + ) -> None: + if geoloc is None: + geoloc = { + "latitude": 0.0, + "longitude": 0.0, + "country": {"acronym": "xx", "numeric": 0}, + } + + self.id = id + self.name = name + self.priv = priv + self.pw_bcrypt = pw_bcrypt + self.token = token + self.clan_id = clan_id + self.clan_priv = clan_priv + self.geoloc = geoloc + self.utc_offset = utc_offset + self.pm_private = pm_private + self.silence_end = silence_end + self.donor_end = donor_end + self.client_details = client_details + self.login_time = login_time + self.last_recv_time = login_time + self.is_bot_client = is_bot_client + self.is_tourney_client = is_tourney_client + self.api_key = api_key + + # avoid enqueuing packets to bot accounts. + if self.is_bot_client: + + def _noop_enqueue(data: bytes) -> None: + pass + + self.enqueue = _noop_enqueue # type: ignore[method-assign] + + self.away_msg: str | None = None + self.in_lobby = False + + self.stats: dict[GameMode, ModeData] = {} + self.status = Status() + + # userids, not player objects + self.friends: set[int] = set() + self.blocks: set[int] = set() + + self.channels: list[Channel] = [] + self.spectators: list[Player] = [] + self.spectating: Player | None = None + self.match: Match | None = None + self.stealth = False + + self.pres_filter = PresenceFilter.Nil + + # store most recent score for each gamemode. + self.recent_scores: dict[GameMode, Score | None] = { + mode: None for mode in GameMode + } + + # store the last beatmap /np'ed by the user. + self.last_np: LastNp | None = None + + self._packet_queue = bytearray() + + def __repr__(self) -> str: + return f"<{self.name} ({self.id})>" + + @property + def safe_name(self) -> str: + return make_safe_name(self.name) + + @property + def is_online(self) -> bool: + return bool(self.token != "") + + @property + def url(self) -> str: + """The url to the player's profile.""" + return f"https://{app.settings.DOMAIN}/u/{self.id}" + + @property + def embed(self) -> str: + """An osu! chat embed to the player's profile.""" + return f"[{self.url} {self.name}]" + + @property + def avatar_url(self) -> str: + """The url to the player's avatar.""" + return f"https://a.{app.settings.DOMAIN}/{self.id}" + + # TODO: chat embed with clan tag hyperlinked? + + @property + def remaining_silence(self) -> int: + """The remaining time of the players silence.""" + return max(0, int(self.silence_end - time.time())) + + @property + def silenced(self) -> bool: + """Whether or not the player is silenced.""" + return self.remaining_silence != 0 + + @cached_property + def bancho_priv(self) -> ClientPrivileges: + """The player's privileges according to the client.""" + ret = ClientPrivileges(0) + if self.priv & Privileges.UNRESTRICTED: + ret |= ClientPrivileges.PLAYER + if self.priv & Privileges.DONATOR: + ret |= ClientPrivileges.SUPPORTER + if self.priv & Privileges.MODERATOR: + ret |= ClientPrivileges.MODERATOR + if self.priv & Privileges.ADMINISTRATOR: + ret |= ClientPrivileges.DEVELOPER + if self.priv & Privileges.DEVELOPER: + ret |= ClientPrivileges.OWNER + return ret + + @property + def restricted(self) -> bool: + """Return whether the player is restricted.""" + return not self.priv & Privileges.UNRESTRICTED + + @property + def gm_stats(self) -> ModeData: + """The player's stats in their currently selected mode.""" + return self.stats[self.status.mode] + + @property + def recent_score(self) -> Score | None: + """The player's most recently submitted score.""" + score = None + for s in self.recent_scores.values(): + if not s: + continue + + if not score: + score = s + continue + + if s.server_time > score.server_time: + score = s + + return score + + @staticmethod + def generate_token() -> str: + """Generate a random uuid as a token.""" + return str(uuid.uuid4()) + + def logout(self) -> None: + """Log `self` out of the server.""" + # invalidate the user's token. + self.token = "" + + # leave multiplayer. + if self.match: + self.leave_match() + + # stop spectating. + host = self.spectating + if host: + host.remove_spectator(self) + + # leave channels + while self.channels: + self.leave_channel(self.channels[0], kick=False) + + # remove from playerlist and + # enqueue logout to all users. + app.state.sessions.players.remove(self) + + if not self.restricted: + if app.state.services.datadog: + app.state.services.datadog.decrement("bancho.online_players") + + app.state.sessions.players.enqueue(app.packets.logout(self.id)) + + log(f"{self} logged out.") + + async def update_privs(self, new: Privileges) -> None: + """Update `self`'s privileges to `new`.""" + + self.priv = new + if "bancho_priv" in vars(self): + del self.bancho_priv # wipe cached_property + + await users_repo.partial_update( + id=self.id, + priv=self.priv, + ) + + async def add_privs(self, bits: Privileges) -> None: + """Update `self`'s privileges, adding `bits`.""" + + self.priv |= bits + if "bancho_priv" in vars(self): + del self.bancho_priv # wipe cached_property + + await users_repo.partial_update( + id=self.id, + priv=self.priv, + ) + + if self.is_online: + # if they're online, send a packet + # to update their client-side privileges + self.enqueue(app.packets.bancho_privileges(self.bancho_priv)) + + async def remove_privs(self, bits: Privileges) -> None: + """Update `self`'s privileges, removing `bits`.""" + + self.priv &= ~bits + if "bancho_priv" in vars(self): + del self.bancho_priv # wipe cached_property + + await users_repo.partial_update( + id=self.id, + priv=self.priv, + ) + + if self.is_online: + # if they're online, send a packet + # to update their client-side privileges + self.enqueue(app.packets.bancho_privileges(self.bancho_priv)) + + async def restrict(self, admin: Player, reason: str) -> None: + """Restrict `self` for `reason`, and log to sql.""" + await self.remove_privs(Privileges.UNRESTRICTED) + + await logs_repo.create( + _from=admin.id, + to=self.id, + action="restrict", + msg=reason, + ) + + for mode in (0, 1, 2, 3, 4, 5, 6, 8): + await app.state.services.redis.zrem( + f"bancho:leaderboard:{mode}", + self.id, + ) + await app.state.services.redis.zrem( + f'bancho:leaderboard:{mode}:{self.geoloc["country"]["acronym"]}', + self.id, + ) + + log_msg = f"{admin} restricted {self} for: {reason}." + + log(log_msg, Ansi.LRED) + + webhook_url = app.settings.DISCORD_AUDIT_LOG_WEBHOOK + if webhook_url: + webhook = Webhook(webhook_url, content=log_msg) + asyncio.create_task(webhook.post()) + + # refresh their client state + if self.is_online: + self.logout() + + async def unrestrict(self, admin: Player, reason: str) -> None: + """Restrict `self` for `reason`, and log to sql.""" + await self.add_privs(Privileges.UNRESTRICTED) + + await logs_repo.create( + _from=admin.id, + to=self.id, + action="unrestrict", + msg=reason, + ) + + if not self.is_online: + await self.stats_from_sql_full() + + for mode, stats in self.stats.items(): + await app.state.services.redis.zadd( + f"bancho:leaderboard:{mode.value}", + {str(self.id): stats.pp}, + ) + await app.state.services.redis.zadd( + f"bancho:leaderboard:{mode.value}:{self.geoloc['country']['acronym']}", + {str(self.id): stats.pp}, + ) + + log_msg = f"{admin} unrestricted {self} for: {reason}." + + log(log_msg, Ansi.LRED) + + webhook_url = app.settings.DISCORD_AUDIT_LOG_WEBHOOK + if webhook_url: + webhook = Webhook(webhook_url, content=log_msg) + asyncio.create_task(webhook.post()) + + if self.is_online: + # log the user out if they're offline, this + # will simply relog them and refresh their app.state + self.logout() + + async def silence(self, admin: Player, duration: float, reason: str) -> None: + """Silence `self` for `duration` seconds, and log to sql.""" + self.silence_end = int(time.time() + duration) + + await users_repo.partial_update( + id=self.id, + silence_end=self.silence_end, + ) + + await logs_repo.create( + _from=admin.id, + to=self.id, + action="silence", + msg=reason, + ) + + # inform the user's client. + self.enqueue(app.packets.silence_end(int(duration))) + + # wipe their messages from any channels. + app.state.sessions.players.enqueue(app.packets.user_silenced(self.id)) + + # remove them from multiplayer match (if any). + if self.match: + self.leave_match() + + log(f"Silenced {self}.", Ansi.LCYAN) + + async def unsilence(self, admin: Player, reason: str) -> None: + """Unsilence `self`, and log to sql.""" + self.silence_end = int(time.time()) + + await users_repo.partial_update( + id=self.id, + silence_end=self.silence_end, + ) + + await logs_repo.create( + _from=admin.id, + to=self.id, + action="unsilence", + msg=reason, + ) + + # inform the user's client + self.enqueue(app.packets.silence_end(0)) + + log(f"Unsilenced {self}.", Ansi.LCYAN) + + def join_match(self, match: Match, passwd: str) -> bool: + """Attempt to add `self` to `match`.""" + if self.match: + log(f"{self} tried to join multiple matches?") + self.enqueue(app.packets.match_join_fail()) + return False + + if self.id in match.tourney_clients: + # the user is already in the match with a tourney client. + # users cannot spectate themselves so this is not possible. + self.enqueue(app.packets.match_join_fail()) + return False + + if self is not match.host: + # match already exists, we're simply joining. + # NOTE: staff members have override to pw and can + # simply use any to join a pw protected match. + if passwd != match.passwd and self not in app.state.sessions.players.staff: + log(f"{self} tried to join {match} w/ incorrect pw.", Ansi.LYELLOW) + self.enqueue(app.packets.match_join_fail()) + return False + slot_id = match.get_free() + if slot_id is None: + log(f"{self} tried to join a full match.", Ansi.LYELLOW) + self.enqueue(app.packets.match_join_fail()) + return False + + else: + # match is being created + slot_id = 0 + + if not self.join_channel(match.chat): + log(f"{self} failed to join {match.chat}.", Ansi.LYELLOW) + return False + + lobby = app.state.sessions.channels.get_by_name("#lobby") + if lobby in self.channels: + self.leave_channel(lobby) + + slot: Slot = match.slots[0 if slot_id == -1 else slot_id] + + # if in a teams-vs mode, switch team from neutral to red. + if match.team_type in (MatchTeamTypes.team_vs, MatchTeamTypes.tag_team_vs): + slot.team = MatchTeams.red + + slot.status = SlotStatus.not_ready + slot.player = self + self.match = match + + self.enqueue(app.packets.match_join_success(match)) + match.enqueue_state() + + return True + + def leave_match(self) -> None: + """Attempt to remove `self` from their match.""" + if not self.match: + if app.settings.DEBUG: + log(f"{self} tried leaving a match they're not in?", Ansi.LYELLOW) + return + + slot = self.match.get_slot(self) + assert slot is not None + + if slot.status == SlotStatus.locked: + # player was kicked, keep the slot locked. + new_status = SlotStatus.locked + else: + # player left, open the slot for new players to join. + new_status = SlotStatus.open + + slot.reset(new_status=new_status) + + self.leave_channel(self.match.chat) + + if all(s.empty() for s in self.match.slots): + # multi is now empty, chat has been removed. + # remove the multi from the channels list. + log(f"Match {self.match} finished.") + + # cancel any pending start timers + if self.match.starting is not None: + self.match.starting["start"].cancel() + for alert in self.match.starting["alerts"]: + alert.cancel() + + self.match.starting = None + + app.state.sessions.matches.remove(self.match) + + lobby = app.state.sessions.channels.get_by_name("#lobby") + if lobby: + lobby.enqueue(app.packets.dispose_match(self.match.id)) + + else: # multi is not empty + if self is self.match.host: + # player was host, trasnfer to first occupied slot + for s in self.match.slots: + if s.player is not None: + self.match.host_id = s.player.id + self.match.host.enqueue(app.packets.match_transfer_host()) + break + + if self in self.match._refs: + self.match._refs.remove(self) + self.match.chat.send_bot(f"{self.name} removed from match referees.") + + # notify others of our deprature + self.match.enqueue_state() + + self.match = None + + def join_channel(self, channel: Channel) -> bool: + """Attempt to add `self` to `channel`.""" + if ( + self in channel + or not channel.can_read(self.priv) # player already in channel + or channel._name == "#lobby" # no read privs + and not self.in_lobby # not in mp lobby + ): + return False + + channel.append(self) # add to channel.players + self.channels.append(channel) # add to player.channels + + self.enqueue(app.packets.channel_join(channel.name)) + + chan_info_packet = app.packets.channel_info( + channel.name, + channel.topic, + len(channel.players), + ) + + if channel.instance: + # instanced channel, only send the players + # who are currently inside the instance + for player in channel.players: + player.enqueue(chan_info_packet) + else: + # normal channel, send to all players who + # have access to see the channel's usercount. + for player in app.state.sessions.players: + if channel.can_read(player.priv): + player.enqueue(chan_info_packet) + + if app.settings.DEBUG: + log(f"{self} joined {channel}.") + + return True + + def leave_channel(self, channel: Channel, kick: bool = True) -> None: + """Attempt to remove `self` from `channel`.""" + # ensure they're in the chan. + if self not in channel: + return + + channel.remove(self) # remove from c.players + self.channels.remove(channel) # remove from player.channels + + if kick: + self.enqueue(app.packets.channel_kick(channel.name)) + + chan_info_packet = app.packets.channel_info( + channel.name, + channel.topic, + len(channel.players), + ) + + if channel.instance: + # instanced channel, only send the players + # who are currently inside the instance + for player in channel.players: + player.enqueue(chan_info_packet) + else: + # normal channel, send to all players who + # have access to see the channel's usercount. + for player in app.state.sessions.players: + if channel.can_read(player.priv): + player.enqueue(chan_info_packet) + + if app.settings.DEBUG: + log(f"{self} left {channel}.") + + def add_spectator(self, player: Player) -> None: + """Attempt to add `player` to `self`'s spectators.""" + chan_name = f"#spec_{self.id}" + + spec_chan = app.state.sessions.channels.get_by_name(chan_name) + if not spec_chan: + # spectator chan doesn't exist, create it. + spec_chan = Channel( + name=chan_name, + topic=f"{self.name}'s spectator channel.", + auto_join=False, + instance=True, + ) + + self.join_channel(spec_chan) + app.state.sessions.channels.append(spec_chan) + + # attempt to join their spectator channel. + if not player.join_channel(spec_chan): + log(f"{self} failed to join {spec_chan}?", Ansi.LYELLOW) + return + + if not player.stealth: + player_joined = app.packets.fellow_spectator_joined(player.id) + for spectator in self.spectators: + spectator.enqueue(player_joined) + player.enqueue(app.packets.fellow_spectator_joined(spectator.id)) + + self.enqueue(app.packets.spectator_joined(player.id)) + else: + # player is admin in stealth, only give + # other players data to us, not vice-versa. + for spectator in self.spectators: + player.enqueue(app.packets.fellow_spectator_joined(spectator.id)) + + self.spectators.append(player) + player.spectating = self + + log(f"{player} is now spectating {self}.") + + def remove_spectator(self, player: Player) -> None: + """Attempt to remove `player` from `self`'s spectators.""" + self.spectators.remove(player) + player.spectating = None + + channel = app.state.sessions.channels.get_by_name(f"#spec_{self.id}") + assert channel is not None + + player.leave_channel(channel) + + if not self.spectators: + # remove host from channel, deleting it. + self.leave_channel(channel) + else: + # send new playercount + channel_info = app.packets.channel_info( + channel.name, + channel.topic, + len(channel.players), + ) + fellow = app.packets.fellow_spectator_left(player.id) + + self.enqueue(channel_info) + + for spectator in self.spectators: + spectator.enqueue(fellow + channel_info) + + self.enqueue(app.packets.spectator_left(player.id)) + log(f"{player} is no longer spectating {self}.") + + async def add_friend(self, player: Player) -> None: + """Attempt to add `player` to `self`'s friends.""" + if player.id in self.friends: + log( + f"{self} tried to add {player}, who is already their friend!", + Ansi.LYELLOW, + ) + return + + self.friends.add(player.id) + await app.state.services.database.execute( + "REPLACE INTO relationships (user1, user2, type) VALUES (:user1, :user2, 'friend')", + {"user1": self.id, "user2": player.id}, + ) + + log(f"{self} friended {player}.") + + async def remove_friend(self, player: Player) -> None: + """Attempt to remove `player` from `self`'s friends.""" + if player.id not in self.friends: + log( + f"{self} tried to unfriend {player}, who is not their friend!", + Ansi.LYELLOW, + ) + return + + self.friends.remove(player.id) + await app.state.services.database.execute( + "DELETE FROM relationships WHERE user1 = :user1 AND user2 = :user2", + {"user1": self.id, "user2": player.id}, + ) + + log(f"{self} unfriended {player}.") + + async def add_block(self, player: Player) -> None: + """Attempt to add `player` to `self`'s blocks.""" + if player.id in self.blocks: + log( + f"{self} tried to block {player}, who they've already blocked!", + Ansi.LYELLOW, + ) + return + + self.blocks.add(player.id) + await app.state.services.database.execute( + "REPLACE INTO relationships VALUES (:user1, :user2, 'block')", + {"user1": self.id, "user2": player.id}, + ) + + log(f"{self} blocked {player}.") + + async def remove_block(self, player: Player) -> None: + """Attempt to remove `player` from `self`'s blocks.""" + if player.id not in self.blocks: + log( + f"{self} tried to unblock {player}, who they haven't blocked!", + Ansi.LYELLOW, + ) + return + + self.blocks.remove(player.id) + await app.state.services.database.execute( + "DELETE FROM relationships WHERE user1 = :user1 AND user2 = :user2", + {"user1": self.id, "user2": player.id}, + ) + + log(f"{self} unblocked {player}.") + + async def relationships_from_sql(self) -> None: + """Retrieve `self`'s relationships from sql.""" + for row in await app.state.services.database.fetch_all( + "SELECT user2, type FROM relationships WHERE user1 = :user1", + {"user1": self.id}, + ): + if row["type"] == "friend": + self.friends.add(row["user2"]) + else: + self.blocks.add(row["user2"]) + + # always have bot added to friends. + self.friends.add(1) + + async def get_global_rank(self, mode: GameMode) -> int: + if self.restricted: + return 0 + + rank = await app.state.services.redis.zrevrank( + f"bancho:leaderboard:{mode.value}", + str(self.id), + ) + return cast(int, rank) + 1 if rank is not None else 0 + + async def get_country_rank(self, mode: GameMode) -> int: + if self.restricted: + return 0 + + country = self.geoloc["country"]["acronym"] + rank = await app.state.services.redis.zrevrank( + f"bancho:leaderboard:{mode.value}:{country}", + str(self.id), + ) + + return cast(int, rank) + 1 if rank is not None else 0 + + async def update_rank(self, mode: GameMode) -> int: + country = self.geoloc["country"]["acronym"] + stats = self.stats[mode] + + if not self.restricted: + # global rank + await app.state.services.redis.zadd( + f"bancho:leaderboard:{mode.value}", + {str(self.id): stats.pp}, + ) + + # country rank + await app.state.services.redis.zadd( + f"bancho:leaderboard:{mode.value}:{country}", + {str(self.id): stats.pp}, + ) + + return await self.get_global_rank(mode) + + async def stats_from_sql_full(self) -> None: + """Retrieve `self`'s stats (all modes) from sql.""" + for row in await stats_repo.fetch_many(player_id=self.id): + game_mode = GameMode(row["mode"]) + self.stats[game_mode] = ModeData( + tscore=row["tscore"], + rscore=row["rscore"], + pp=row["pp"], + acc=row["acc"], + plays=row["plays"], + playtime=row["playtime"], + max_combo=row["max_combo"], + total_hits=row["total_hits"], + rank=await self.get_global_rank(game_mode), + grades={ + Grade.XH: row["xh_count"], + Grade.X: row["x_count"], + Grade.SH: row["sh_count"], + Grade.S: row["s_count"], + Grade.A: row["a_count"], + }, + ) + + def update_latest_activity_soon(self) -> None: + """Update the player's latest activity in the database.""" + task = users_repo.partial_update( + id=self.id, + latest_activity=int(time.time()), + ) + app.state.loop.create_task(task) + + def enqueue(self, data: bytes) -> None: + """Add data to be sent to the client.""" + self._packet_queue += data + + def dequeue(self) -> bytes | None: + """Get data from the queue to send to the client.""" + if self._packet_queue: + data = bytes(self._packet_queue) + self._packet_queue.clear() + return data + + return None + + def send(self, msg: str, sender: Player, chan: Channel | None = None) -> None: + """Enqueue `sender`'s `msg` to `self`. Sent in `chan`, or dm.""" + self.enqueue( + app.packets.send_message( + sender=sender.name, + msg=msg, + recipient=(chan or self).name, + sender_id=sender.id, + ), + ) + + def send_bot(self, msg: str) -> None: + """Enqueue `msg` to `self` from bot.""" + bot = app.state.sessions.bot + + self.enqueue( + app.packets.send_message( + sender=bot.name, + msg=msg, + recipient=self.name, + sender_id=bot.id, + ), + ) diff --git a/app/objects/score.py b/app/objects/score.py new file mode 100644 index 0000000..c9c7260 --- /dev/null +++ b/app/objects/score.py @@ -0,0 +1,453 @@ +from __future__ import annotations + +import functools +import hashlib +from datetime import datetime +from enum import IntEnum +from enum import unique +from pathlib import Path +from typing import TYPE_CHECKING + +import app.state +import app.usecases.performance +import app.utils +from app.constants.clientflags import ClientFlags +from app.constants.gamemodes import GameMode +from app.constants.mods import Mods +from app.objects.beatmap import Beatmap +from app.repositories import scores as scores_repo +from app.usecases.performance import ScoreParams +from app.utils import escape_enum +from app.utils import pymysql_encode + +if TYPE_CHECKING: + from app.objects.player import Player + +BEATMAPS_PATH = Path.cwd() / ".data/osu" + + +@unique +class Grade(IntEnum): + # NOTE: these are implemented in the opposite order + # as osu! to make more sense with <> operators. + N = 0 + F = 1 + D = 2 + C = 3 + B = 4 + A = 5 + S = 6 # S + SH = 7 # HD S + X = 8 # SS + XH = 9 # HD SS + + @classmethod + @functools.cache + def from_str(cls, s: str) -> Grade: + return { + "xh": Grade.XH, + "x": Grade.X, + "sh": Grade.SH, + "s": Grade.S, + "a": Grade.A, + "b": Grade.B, + "c": Grade.C, + "d": Grade.D, + "f": Grade.F, + "n": Grade.N, + }[s.lower()] + + def __format__(self, format_spec: str) -> str: + if format_spec == "stats_column": + return f"{self.name.lower()}_count" + else: + raise ValueError(f"Invalid format specifier {format_spec}") + + +@unique +@pymysql_encode(escape_enum) +class SubmissionStatus(IntEnum): + # TODO: make a system more like bancho's? + FAILED = 0 + SUBMITTED = 1 + BEST = 2 + + def __repr__(self) -> str: + return { + self.FAILED: "Failed", + self.SUBMITTED: "Submitted", + self.BEST: "Best", + }[self] + + +class Score: + """\ + Server side representation of an osu! score; any gamemode. + + Possibly confusing attributes + ----------- + bmap: `Beatmap | None` + A beatmap obj representing the osu map. + + player: `Player | None` + A player obj of the player who submitted the score. + + grade: `Grade` + The letter grade in the score. + + rank: `int` + The leaderboard placement of the score. + + perfect: `bool` + Whether the score is a full-combo. + + time_elapsed: `int` + The total elapsed time of the play (in milliseconds). + + client_flags: `int` + osu!'s old anticheat flags. + + prev_best: `Score | None` + The previous best score before this play was submitted. + NOTE: just because a score has a `prev_best` attribute does + mean the score is our best score on the map! the `status` + value will always be accurate for any score. + """ + + def __init__(self) -> None: + # TODO: check whether the reamining Optional's should be + self.id: int | None = None + self.bmap: Beatmap | None = None + self.player: Player | None = None + + self.mode: GameMode + self.mods: Mods + + self.pp: float + self.sr: float + self.score: int + self.max_combo: int + self.acc: float + + # TODO: perhaps abstract these differently + # since they're mode dependant? feels weird.. + self.n300: int + self.n100: int # n150 for taiko + self.n50: int + self.nmiss: int + self.ngeki: int + self.nkatu: int + + self.grade: Grade + + self.passed: bool + self.perfect: bool + self.status: SubmissionStatus + + self.client_time: datetime + self.server_time: datetime + self.time_elapsed: int + + self.client_flags: ClientFlags + self.client_checksum: str + + self.rank: int | None = None + self.prev_best: Score | None = None + + def __repr__(self) -> str: + # TODO: i really need to clean up my reprs + try: + assert self.bmap is not None + return ( + f"<{self.acc:.2f}% {self.max_combo}x {self.nmiss}M " + f"#{self.rank} on {self.bmap.full_name} for {self.pp:,.2f}pp>" + ) + except: + return super().__repr__() + + """Classmethods to fetch a score object from various data types.""" + + @classmethod + async def from_sql(cls, score_id: int) -> Score | None: + """Create a score object from sql using its scoreid.""" + rec = await scores_repo.fetch_one(score_id) + + if rec is None: + return None + + s = cls() + + s.id = rec["id"] + s.bmap = await Beatmap.from_md5(rec["map_md5"]) + s.player = await app.state.sessions.players.from_cache_or_sql(id=rec["userid"]) + + s.sr = 0.0 # TODO + + s.pp = rec["pp"] + s.score = rec["score"] + s.max_combo = rec["max_combo"] + s.mods = Mods(rec["mods"]) + s.acc = rec["acc"] + s.n300 = rec["n300"] + s.n100 = rec["n100"] + s.n50 = rec["n50"] + s.nmiss = rec["nmiss"] + s.ngeki = rec["ngeki"] + s.nkatu = rec["nkatu"] + s.grade = Grade.from_str(rec["grade"]) + s.perfect = rec["perfect"] == 1 + s.status = SubmissionStatus(rec["status"]) + s.passed = s.status != SubmissionStatus.FAILED + s.mode = GameMode(rec["mode"]) + s.server_time = rec["play_time"] + s.time_elapsed = rec["time_elapsed"] + s.client_flags = ClientFlags(rec["client_flags"]) + s.client_checksum = rec["online_checksum"] + + if s.bmap: + s.rank = await s.calculate_placement() + + return s + + @classmethod + def from_submission(cls, data: list[str]) -> Score: + """Create a score object from an osu! submission string.""" + s = cls() + + """ parse the following format + # 0 online_checksum + # 1 n300 + # 2 n100 + # 3 n50 + # 4 ngeki + # 5 nkatu + # 6 nmiss + # 7 score + # 8 max_combo + # 9 perfect + # 10 grade + # 11 mods + # 12 passed + # 13 gamemode + # 14 play_time # yyMMddHHmmss + # 15 osu_version + (" " * client_flags) + """ + + s.client_checksum = data[0] + s.n300 = int(data[1]) + s.n100 = int(data[2]) + s.n50 = int(data[3]) + s.ngeki = int(data[4]) + s.nkatu = int(data[5]) + s.nmiss = int(data[6]) + s.score = int(data[7]) + s.max_combo = int(data[8]) + s.perfect = data[9] == "True" + s.grade = Grade.from_str(data[10]) + s.mods = Mods(int(data[11])) + s.passed = data[12] == "True" + s.mode = GameMode.from_params(int(data[13]), s.mods) + s.client_time = datetime.strptime(data[14], "%y%m%d%H%M%S") + s.client_flags = ClientFlags(data[15].count(" ") & ~4) + + s.server_time = datetime.now() + + return s + + def compute_online_checksum( + self, + osu_version: str, + osu_client_hash: str, + storyboard_checksum: str, + ) -> str: + """Validate the online checksum of the score.""" + assert self.player is not None + assert self.bmap is not None + + return hashlib.md5( + "chickenmcnuggets{0}o15{1}{2}smustard{3}{4}uu{5}{6}{7}{8}{9}{10}{11}Q{12}{13}{15}{14:%y%m%d%H%M%S}{16}{17}".format( + self.n100 + self.n300, + self.n50, + self.ngeki, + self.nkatu, + self.nmiss, + self.bmap.md5, + self.max_combo, + self.perfect, + self.player.name, + self.score, + self.grade.name, + int(self.mods), + self.passed, + self.mode.as_vanilla, + self.client_time, + osu_version, # 20210520 + osu_client_hash, + storyboard_checksum, + # yyMMddHHmmss + ).encode(), + ).hexdigest() + + """Methods to calculate internal data for a score.""" + + async def calculate_placement(self) -> int: + assert self.bmap is not None + + if self.mode >= GameMode.RELAX_OSU: + scoring_metric = "pp" + score = self.pp + else: + scoring_metric = "score" + score = self.score + + num_better_scores: int | None = await app.state.services.database.fetch_val( + "SELECT COUNT(*) AS c FROM scores s " + "INNER JOIN users u ON u.id = s.userid " + "WHERE s.map_md5 = :map_md5 AND s.mode = :mode " + "AND s.status = 2 AND u.priv & 1 " + f"AND s.{scoring_metric} > :score", + { + "map_md5": self.bmap.md5, + "mode": self.mode, + "score": score, + }, + column=0, # COUNT(*) + ) + assert num_better_scores is not None + return num_better_scores + 1 + + def calculate_performance(self, beatmap_id: int) -> tuple[float, float]: + """Calculate PP and star rating for our score.""" + mode_vn = self.mode.as_vanilla + + score_args = ScoreParams( + mode=mode_vn, + mods=int(self.mods), + combo=self.max_combo, + ngeki=self.ngeki, + n300=self.n300, + nkatu=self.nkatu, + n100=self.n100, + n50=self.n50, + nmiss=self.nmiss, + ) + + result = app.usecases.performance.calculate_performances( + osu_file_path=str(BEATMAPS_PATH / f"{beatmap_id}.osu"), + scores=[score_args], + ) + + return result[0]["performance"]["pp"], result[0]["difficulty"]["stars"] + + async def calculate_status(self) -> None: + """Calculate the submission status of a submitted score.""" + assert self.player is not None + assert self.bmap is not None + + recs = await scores_repo.fetch_many( + user_id=self.player.id, + map_md5=self.bmap.md5, + mode=self.mode, + status=SubmissionStatus.BEST, + ) + + if recs: + rec = recs[0] + + # we have a score on the map. + # save it as our previous best score. + self.prev_best = await Score.from_sql(rec["id"]) + assert self.prev_best is not None + + # if our new score is better, update + # both of our score's submission statuses. + # NOTE: this will be updated in sql later on in submission + if self.pp > rec["pp"]: + self.status = SubmissionStatus.BEST + self.prev_best.status = SubmissionStatus.SUBMITTED + else: + self.status = SubmissionStatus.SUBMITTED + else: + # this is our first score on the map. + self.status = SubmissionStatus.BEST + + def calculate_accuracy(self) -> float: + """Calculate the accuracy of our score.""" + mode_vn = self.mode.as_vanilla + + if mode_vn == 0: # osu! + total = self.n300 + self.n100 + self.n50 + self.nmiss + + if total == 0: + return 0.0 + + return ( + 100.0 + * ((self.n300 * 300.0) + (self.n100 * 100.0) + (self.n50 * 50.0)) + / (total * 300.0) + ) + + elif mode_vn == 1: # osu!taiko + total = self.n300 + self.n100 + self.nmiss + + if total == 0: + return 0.0 + + return 100.0 * ((self.n100 * 0.5) + self.n300) / total + + elif mode_vn == 2: # osu!catch + total = self.n300 + self.n100 + self.n50 + self.nkatu + self.nmiss + + if total == 0: + return 0.0 + + return 100.0 * (self.n300 + self.n100 + self.n50) / total + + elif mode_vn == 3: # osu!mania + total = ( + self.n300 + self.n100 + self.n50 + self.ngeki + self.nkatu + self.nmiss + ) + + if total == 0: + return 0.0 + + if self.mods & Mods.SCOREV2: + return ( + 100.0 + * ( + (self.n50 * 50.0) + + (self.n100 * 100.0) + + (self.nkatu * 200.0) + + (self.n300 * 300.0) + + (self.ngeki * 305.0) + ) + / (total * 305.0) + ) + + return ( + 100.0 + * ( + (self.n50 * 50.0) + + (self.n100 * 100.0) + + (self.nkatu * 200.0) + + ((self.n300 + self.ngeki) * 300.0) + ) + / (total * 300.0) + ) + else: + raise Exception(f"Invalid vanilla mode {mode_vn}") + + """ Methods for updating a score. """ + + async def increment_replay_views(self) -> None: + # TODO: move replay views to be per-score rather than per-user + assert self.player is not None + + # TODO: apparently cached stats don't store replay views? + # need to refactor that to be able to use stats_repo here + await app.state.services.database.execute( + f"UPDATE stats " + "SET replay_views = replay_views + 1 " + "WHERE id = :user_id AND mode = :mode", + {"user_id": self.player.id, "mode": self.mode}, + ) diff --git a/app/packets.py b/app/packets.py new file mode 100644 index 0000000..94bfbbd --- /dev/null +++ b/app/packets.py @@ -0,0 +1,1289 @@ +from __future__ import annotations + +import random +import struct +from abc import ABC +from abc import abstractmethod +from collections.abc import Callable +from collections.abc import Collection +from collections.abc import Iterator +from dataclasses import dataclass +from dataclasses import field +from enum import IntEnum +from enum import unique +from functools import cache +from functools import lru_cache +from typing import TYPE_CHECKING +from typing import Any +from typing import NamedTuple +from typing import cast + +# from app.objects.beatmap import BeatmapInfo + +if TYPE_CHECKING: + from app.objects.match import Match + from app.objects.player import Player + +# packets are comprised of 3 parts: +# - a unique identifier (the packet id), representing the type of request +# - the length of the request data +# - request data; specific to the packet id + +# the packet id is sent over the wire as an unsigned short (2 bytes, u16) +# the packet data length is sent as an unsigned long (4 bytes, u32) +# the packet data +# - is of variable length +# - may comprise of multiple objects +# - is specific to the request type (packet id) +# - types can vary, but are from a fixed set of possibilities (u8, u16, u32, u64, i8, i16, i32, i64, f32, f64, string, and some higher level types comprising of these primitives) + +# osu! packets are sent in "little endian" ordering. +# little endian: [2, 0, 0, 0] == 2 +# big endian: [0, 0, 0, 2] == 2 + + +@unique +class ClientPackets(IntEnum): + CHANGE_ACTION = 0 + SEND_PUBLIC_MESSAGE = 1 + LOGOUT = 2 + REQUEST_STATUS_UPDATE = 3 + PING = 4 + START_SPECTATING = 16 + STOP_SPECTATING = 17 + SPECTATE_FRAMES = 18 + ERROR_REPORT = 20 + CANT_SPECTATE = 21 + SEND_PRIVATE_MESSAGE = 25 + PART_LOBBY = 29 + JOIN_LOBBY = 30 + CREATE_MATCH = 31 + JOIN_MATCH = 32 + PART_MATCH = 33 + MATCH_CHANGE_SLOT = 38 + MATCH_READY = 39 + MATCH_LOCK = 40 + MATCH_CHANGE_SETTINGS = 41 + MATCH_START = 44 + MATCH_SCORE_UPDATE = 47 + MATCH_COMPLETE = 49 + MATCH_CHANGE_MODS = 51 + MATCH_LOAD_COMPLETE = 52 + MATCH_NO_BEATMAP = 54 + MATCH_NOT_READY = 55 + MATCH_FAILED = 56 + MATCH_HAS_BEATMAP = 59 + MATCH_SKIP_REQUEST = 60 + CHANNEL_JOIN = 63 + BEATMAP_INFO_REQUEST = 68 + MATCH_TRANSFER_HOST = 70 + FRIEND_ADD = 73 + FRIEND_REMOVE = 74 + MATCH_CHANGE_TEAM = 77 + CHANNEL_PART = 78 + RECEIVE_UPDATES = 79 + SET_AWAY_MESSAGE = 82 + IRC_ONLY = 84 + USER_STATS_REQUEST = 85 + MATCH_INVITE = 87 + MATCH_CHANGE_PASSWORD = 90 + TOURNAMENT_MATCH_INFO_REQUEST = 93 + USER_PRESENCE_REQUEST = 97 + USER_PRESENCE_REQUEST_ALL = 98 + TOGGLE_BLOCK_NON_FRIEND_DMS = 99 + TOURNAMENT_JOIN_MATCH_CHANNEL = 108 + TOURNAMENT_LEAVE_MATCH_CHANNEL = 109 + + def __repr__(self) -> str: + return f"<{self.name} ({self.value})>" + + +@unique +class ServerPackets(IntEnum): + USER_ID = 5 + SEND_MESSAGE = 7 + PONG = 8 + HANDLE_IRC_CHANGE_USERNAME = 9 # unused + HANDLE_IRC_QUIT = 10 + USER_STATS = 11 + USER_LOGOUT = 12 + SPECTATOR_JOINED = 13 + SPECTATOR_LEFT = 14 + SPECTATE_FRAMES = 15 + VERSION_UPDATE = 19 + SPECTATOR_CANT_SPECTATE = 22 + GET_ATTENTION = 23 + NOTIFICATION = 24 + UPDATE_MATCH = 26 + NEW_MATCH = 27 + DISPOSE_MATCH = 28 + TOGGLE_BLOCK_NON_FRIEND_DMS = 34 + MATCH_JOIN_SUCCESS = 36 + MATCH_JOIN_FAIL = 37 + FELLOW_SPECTATOR_JOINED = 42 + FELLOW_SPECTATOR_LEFT = 43 + ALL_PLAYERS_LOADED = 45 + MATCH_START = 46 + MATCH_SCORE_UPDATE = 48 + MATCH_TRANSFER_HOST = 50 + MATCH_ALL_PLAYERS_LOADED = 53 + MATCH_PLAYER_FAILED = 57 + MATCH_COMPLETE = 58 + MATCH_SKIP = 61 + UNAUTHORIZED = 62 # unused + CHANNEL_JOIN_SUCCESS = 64 + CHANNEL_INFO = 65 + CHANNEL_KICK = 66 + CHANNEL_AUTO_JOIN = 67 + BEATMAP_INFO_REPLY = 69 + PRIVILEGES = 71 + FRIENDS_LIST = 72 + PROTOCOL_VERSION = 75 + MAIN_MENU_ICON = 76 + MONITOR = 80 # unused + MATCH_PLAYER_SKIPPED = 81 + USER_PRESENCE = 83 + RESTART = 86 + MATCH_INVITE = 88 + CHANNEL_INFO_END = 89 + MATCH_CHANGE_PASSWORD = 91 + SILENCE_END = 92 + USER_SILENCED = 94 + USER_PRESENCE_SINGLE = 95 + USER_PRESENCE_BUNDLE = 96 + USER_DM_BLOCKED = 100 + TARGET_IS_SILENCED = 101 + VERSION_UPDATE_FORCED = 102 + SWITCH_SERVER = 103 + ACCOUNT_RESTRICTED = 104 + RTX = 105 # unused + MATCH_ABORT = 106 + SWITCH_TOURNAMENT_SERVER = 107 + + def __repr__(self) -> str: + return f"<{self.name} ({self.value})>" + + +@unique +class osuTypes(IntEnum): + # integral + i8 = 0 + u8 = 1 + i16 = 2 + u16 = 3 + i32 = 4 + u32 = 5 + f32 = 6 + i64 = 7 + u64 = 8 + f64 = 9 + + # osu + message = 11 + channel = 12 + match = 13 + scoreframe = 14 + mapInfoRequest = 15 + mapInfoReply = 16 + replayFrameBundle = 17 + + # misc + i32_list = 18 # 2 bytes len + i32_list4l = 19 # 4 bytes len + string = 20 + raw = 21 + + +class Message(NamedTuple): + sender: str + text: str + recipient: str + sender_id: int + + +class Channel(NamedTuple): + name: str + topic: str + players: int + + +class ReplayAction(IntEnum): + Standard = 0 + NewSong = 1 + Skip = 2 + Completion = 3 + Fail = 4 + Pause = 5 + Unpause = 6 + SongSelect = 7 + WatchingOther = 8 + + +@dataclass +class ScoreFrame: + time: int + id: int + num300: int + num100: int + num50: int + num_geki: int + num_katu: int + num_miss: int + total_score: int + max_combo: int + current_combo: int + perfect: bool + current_hp: int + tag_byte: int + + score_v2: bool + + # if score_v2: + combo_portion: float | None = None + bonus_portion: float | None = None + + +class ReplayFrame(NamedTuple): + button_state: int + taiko_byte: int # pre-taiko support (<=2008) + x: float + y: float + time: int + + +class ReplayFrameBundle(NamedTuple): + replay_frames: list[ReplayFrame] + score_frame: ScoreFrame + action: ReplayAction + extra: int + sequence: int + + raw_data: memoryview # readonly + + +@dataclass +class MultiplayerMatch: + id: int = 0 + in_progress: bool = False + + powerplay: int = 0 # i8 + mods: int = 0 # i32 + name: str = "" + passwd: str = "" + + map_name: str = "" + map_id: int = 0 # i32 + map_md5: str = "" + + slot_statuses: list[int] = field(default_factory=list) # i8 + slot_teams: list[int] = field(default_factory=list) # i8 + slot_ids: list[int] = field(default_factory=list) # i8 + + host_id: int = 0 # i32 + + mode: int = 0 # i8 + win_condition: int = 0 # i8 + team_type: int = 0 # i8 + + freemods: bool = False # i8 + slot_mods: list[int] = field(default_factory=list) # i32 + + seed: int = 0 # i32 + + +class BasePacket(ABC): + def __init__(self, reader: BanchoPacketReader) -> None: ... + + @abstractmethod + async def handle(self, player: Player) -> None: ... + + +PacketMap = dict[ClientPackets, type[BasePacket]] + + +class BanchoPacketReader: + """\ + A class for reading bancho packets + from the osu! client's request body. + + Attributes + ----------- + body_view: `memoryview` + A readonly view of the request's body. + + packet_map: `dict[ClientPackets, BasePacket]` + The map of registered packets the reader may handle. + + current_length: int + The length in bytes of the packet currently being handled. + + Intended Usage: + >>> with memoryview(await request.body()) as body_view: + ... for packet in BanchoPacketReader(body_view): + ... await packet.handle() + """ + + def __init__(self, body_view: memoryview, packet_map: PacketMap) -> None: + self.body_view = body_view # readonly + self.packet_map = packet_map + + self.current_len = 0 # last read packet's length + + def __iter__(self) -> Iterator[BasePacket]: + return self + + def __next__(self) -> BasePacket: + # do not break until we've read the + # header of a packet we can handle. + while self.body_view: # len(self.view) < 7? + p_type, p_len = self._read_header() + + if p_type not in self.packet_map: + # packet type not handled, remove + # from internal buffer and continue. + if p_len != 0: + self.body_view = self.body_view[p_len:] + else: + # we can handle this one. + break + else: + raise StopIteration + + # we have a packet handler for this. + packet_cls = self.packet_map[p_type] + self.current_len = p_len + + return packet_cls(self) + + def _read_header(self) -> tuple[ClientPackets, int]: + """Read the header of an osu! packet (id & length).""" + # read type & length from the body + data = struct.unpack(" memoryview: + val = self.body_view[: self.current_len] + self.body_view = self.body_view[self.current_len :] + return val + + # integral types + + def read_i8(self) -> int: + val = self.body_view[0] + self.body_view = self.body_view[1:] + return val - 256 if val > 127 else val + + def read_u8(self) -> int: + val = self.body_view[0] + self.body_view = self.body_view[1:] + return val + + def read_i16(self) -> int: + val = int.from_bytes(self.body_view[:2], "little", signed=True) + self.body_view = self.body_view[2:] + return val + + def read_u16(self) -> int: + val = int.from_bytes(self.body_view[:2], "little", signed=False) + self.body_view = self.body_view[2:] + return val + + def read_i32(self) -> int: + val = int.from_bytes(self.body_view[:4], "little", signed=True) + self.body_view = self.body_view[4:] + return val + + def read_u32(self) -> int: + val = int.from_bytes(self.body_view[:4], "little", signed=False) + self.body_view = self.body_view[4:] + return val + + def read_i64(self) -> int: + val = int.from_bytes(self.body_view[:8], "little", signed=True) + self.body_view = self.body_view[8:] + return val + + def read_u64(self) -> int: + val = int.from_bytes(self.body_view[:8], "little", signed=False) + self.body_view = self.body_view[8:] + return val + + # floating-point types + + def read_f16(self) -> float: + (val,) = struct.unpack_from(" float: + (val,) = struct.unpack_from(" float: + (val,) = struct.unpack_from(" tuple[int, ...]: + length = int.from_bytes(self.body_view[:2], "little") + self.body_view = self.body_view[2:] + + val = struct.unpack(f'<{"I" * length}', self.body_view[: length * 4]) + self.body_view = self.body_view[length * 4 :] + return val + + def read_i32_list_i32l(self) -> tuple[int, ...]: + length = int.from_bytes(self.body_view[:4], "little") + self.body_view = self.body_view[4:] + + val = struct.unpack(f'<{"I" * length}', self.body_view[: length * 4]) + self.body_view = self.body_view[length * 4 :] + return val + + def read_string(self) -> str: + exists = self.body_view[0] == 0x0B + self.body_view = self.body_view[1:] + + if not exists: + # no string sent. + return "" + + # non-empty string, decode str length (uleb128) + length = shift = 0 + + while True: + byte = self.body_view[0] + self.body_view = self.body_view[1:] + + length |= (byte & 0x7F) << shift + if (byte & 0x80) == 0: + break + + shift += 7 + + val = self.body_view[:length].tobytes().decode() # copy + self.body_view = self.body_view[length:] + return val + + # custom osu! types + + def read_message(self) -> Message: + """Read an osu! message from the internal buffer.""" + return Message( + sender=self.read_string(), + text=self.read_string(), + recipient=self.read_string(), + sender_id=self.read_i32(), + ) + + def read_channel(self) -> Channel: + """Read an osu! channel from the internal buffer.""" + return Channel( + name=self.read_string(), + topic=self.read_string(), + players=self.read_i32(), + ) + + def read_match(self) -> MultiplayerMatch: + """Read an osu! match from the internal buffer.""" + match = MultiplayerMatch( + id=self.read_i16(), + in_progress=self.read_i8() == 1, + powerplay=self.read_i8(), + mods=self.read_i32(), + name=self.read_string(), + passwd=self.read_string(), + map_name=self.read_string(), + map_id=self.read_i32(), + map_md5=self.read_string(), + slot_statuses=[self.read_i8() for _ in range(16)], + slot_teams=[self.read_i8() for _ in range(16)], + # ^^ up to slot_ids, as it relies on slot_statuses ^^ + ) + + for status in match.slot_statuses: + if status & 124 != 0: # slot has a player + match.slot_ids.append(self.read_i32()) + + match.host_id = self.read_i32() + match.mode = self.read_i8() + match.win_condition = self.read_i8() + match.team_type = self.read_i8() + match.freemods = self.read_i8() == 1 + + if match.freemods: + match.slot_mods = [self.read_i32() for _ in range(16)] + + match.seed = self.read_i32() # used for mania random mod + + return match + + def read_scoreframe(self) -> ScoreFrame: + sf = ScoreFrame(*SCOREFRAME_FMT.unpack_from(self.body_view[:29])) + self.body_view = self.body_view[29:] + + if sf.score_v2: + sf.combo_portion = self.read_f64() + sf.bonus_portion = self.read_f64() + + return sf + + def read_replayframe(self) -> ReplayFrame: + return ReplayFrame( + button_state=self.read_u8(), + taiko_byte=self.read_u8(), # pre-taiko support (<=2008) + x=self.read_f32(), + y=self.read_f32(), + time=self.read_i32(), + ) + + def read_replayframe_bundle(self) -> ReplayFrameBundle: + # save raw format to distribute to the other clients + raw_data = self.body_view[: self.current_len] + + extra = self.read_i32() # bancho proto >= 18 + framecount = self.read_u16() + frames = [self.read_replayframe() for _ in range(framecount)] + action = ReplayAction(self.read_u8()) + scoreframe = self.read_scoreframe() + sequence = self.read_u16() + + return ReplayFrameBundle(frames, scoreframe, action, extra, sequence, raw_data) + + +# write functions + + +def write_uleb128(num: int) -> bytes | bytearray: + """Write `num` into an unsigned LEB128.""" + if num == 0: + return b"\x00" + + ret = bytearray() + + while num != 0: + ret.append(num & 0x7F) + num >>= 7 + if num != 0: + ret[-1] |= 0x80 + + return ret + + +def write_string(s: str) -> bytes: + """Write `s` into bytes (ULEB128 & string).""" + if s: + encoded = s.encode() + ret = b"\x0b" + write_uleb128(len(encoded)) + encoded + else: + ret = b"\x00" + + return ret + + +def write_i32_list(l: Collection[int]) -> bytearray: + """Write `l` into bytes (int32 list).""" + ret = bytearray(len(l).to_bytes(2, "little")) + + for i in l: + ret += i.to_bytes(4, "little", signed=True) + + return ret + + +def write_message(sender: str, msg: str, recipient: str, sender_id: int) -> bytearray: + """Write params into bytes (osu! message).""" + ret = bytearray(write_string(sender)) + ret += write_string(msg) + ret += write_string(recipient) + ret += sender_id.to_bytes(4, "little", signed=True) + return ret + + +def write_channel(name: str, topic: str, count: int) -> bytearray: + """Write params into bytes (osu! channel).""" + ret = bytearray(write_string(name)) + ret += write_string(topic) + ret += count.to_bytes(2, "little") + return ret + + +# XXX: deprecated +# def write_mapInfoReply(maps: Sequence[BeatmapInfo]) -> bytearray: +# """ Write `maps` into bytes (osu! map info). """ +# ret = bytearray(len(maps).to_bytes(4, 'little')) +# +# # Write files +# for map in maps: +# ret += struct.pack(' bytearray: + """Write `m` into bytes (osu! match).""" + # 0 is for match type + ret = bytearray(struct.pack(" bytes: + """Write `s` into bytes (osu! scoreframe).""" + return SCOREFRAME_FMT.pack( + s.time, + s.id, + s.num300, + s.num100, + s.num50, + s.num_geki, + s.num_katu, + s.num_miss, + s.total_score, + s.max_combo, + s.current_combo, + s.perfect, + s.current_hp, + s.tag_byte, + s.score_v2, + ) + + +_noexpand_types: dict[osuTypes, Callable[..., bytes]] = { + # base + osuTypes.i8: struct.Struct(" bytes: + """Write `args` into bytes.""" + ret = bytearray(struct.pack(" bytes: + """\ + Construct a login reply packet. + + In successful cases, we'll send the user's ID. + + In failure cases, we'll send a negative integer of type `LoginFailureReason`. + """ + return write(ServerPackets.USER_ID, (user_id, osuTypes.i32)) + + +# packet id: 7 +def send_message(sender: str, msg: str, recipient: str, sender_id: int) -> bytes: + return write( + ServerPackets.SEND_MESSAGE, + ((sender, msg, recipient, sender_id), osuTypes.message), + ) + + +# packet id: 8 +@cache +def pong() -> bytes: + return write(ServerPackets.PONG) + + +# packet id: 9 +# NOTE: deprecated +def change_username(old: str, new: str) -> bytes: + return write( + ServerPackets.HANDLE_IRC_CHANGE_USERNAME, + (f"{old}>>>>{new}", osuTypes.string), + ) + + +BOT_STATUSES = ( + (3, "the source code.."), # editing + (6, "geohot livestreams.."), # watching + (6, "asottile tutorials.."), # watching + (6, "over the server.."), # watching + (8, "out new features.."), # testing + (9, "a pull request.."), # submitting +) + +# since the bot is always online and is +# also automatically added to all player's +# friends list, their stats are requested +# *very* frequently, and should be cached. +# NOTE: this is cleared once in a while by +# `bg_loops.reroll_bot_status` to keep fresh. + + +@cache +def bot_stats(player: Player) -> bytes: + # pick at random from list of potential statuses. + status_id, status_txt = random.choice(BOT_STATUSES) + + return write( + ServerPackets.USER_STATS, + (player.id, osuTypes.i32), # id + (status_id, osuTypes.u8), # action + (status_txt, osuTypes.string), # info_text + ("", osuTypes.string), # map_md5 + (0, osuTypes.i32), # mods + (0, osuTypes.u8), # mode + (0, osuTypes.i32), # map_id + (0, osuTypes.i64), # rscore + (0.0, osuTypes.f32), # acc + (0, osuTypes.i32), # plays + (0, osuTypes.i64), # tscore + (0, osuTypes.i32), # rank + (0, osuTypes.u16), # pp + ) + + +# packet id: 11 +def _user_stats( + user_id: int, + action: int, + info_text: str, + map_md5: str, + mods: int, + mode: int, + map_id: int, + ranked_score: int, + accuracy: float, + plays: int, + total_score: int, + global_rank: int, + pp: int, +) -> bytes: + if pp > 0xFFFF: + # HACK: if pp is over osu!'s ingame cap, + # we can instead display it as ranked score + ranked_score = pp + pp = 0 + + return write( + ServerPackets.USER_STATS, + (user_id, osuTypes.i32), + (action, osuTypes.u8), + (info_text, osuTypes.string), + (map_md5, osuTypes.string), + (mods, osuTypes.i32), + (mode, osuTypes.u8), + (map_id, osuTypes.i32), + (ranked_score, osuTypes.i64), + (accuracy / 100.0, osuTypes.f32), + (plays, osuTypes.i32), + (total_score, osuTypes.i64), + (global_rank, osuTypes.i32), + (pp, osuTypes.u16), + ) + + +def user_stats(player: Player) -> bytes: + gm_stats = player.gm_stats + if gm_stats.pp > 0xFFFF: + # HACK: if pp is over osu!'s ingame cap, + # we can instead display it as ranked score + rscore = gm_stats.pp + pp = 0 + else: + rscore = gm_stats.rscore + pp = gm_stats.pp + + return write( + ServerPackets.USER_STATS, + (player.id, osuTypes.i32), + (player.status.action, osuTypes.u8), + (player.status.info_text, osuTypes.string), + (player.status.map_md5, osuTypes.string), + (player.status.mods, osuTypes.i32), + (player.status.mode.as_vanilla, osuTypes.u8), + (player.status.map_id, osuTypes.i32), + (rscore, osuTypes.i64), + (gm_stats.acc / 100.0, osuTypes.f32), + (gm_stats.plays, osuTypes.i32), + (gm_stats.tscore, osuTypes.i64), + (gm_stats.rank, osuTypes.i32), + (pp, osuTypes.u16), + ) + + +# packet id: 12 +@cache +def logout(user_id: int) -> bytes: + return write(ServerPackets.USER_LOGOUT, (user_id, osuTypes.i32), (0, osuTypes.u8)) + + +# packet id: 13 +@cache +def spectator_joined(user_id: int) -> bytes: + return write(ServerPackets.SPECTATOR_JOINED, (user_id, osuTypes.i32)) + + +# packet id: 14 +@cache +def spectator_left(user_id: int) -> bytes: + return write(ServerPackets.SPECTATOR_LEFT, (user_id, osuTypes.i32)) + + +# packet id: 15 +def spectate_frames(data: bytes) -> bytes: + # NOTE: this is left as unvalidated (raw) for efficiency due to the + # sheer rate of usage of these packets in spectator mode. + + # spectator frames *received* by the server are always validated. + + return write(ServerPackets.SPECTATE_FRAMES, (data, osuTypes.raw)) + + +# packet id: 19 +@cache +def version_update() -> bytes: + return write(ServerPackets.VERSION_UPDATE) + + +# packet id: 22 +@cache +def spectator_cant_spectate(user_id: int) -> bytes: + return write(ServerPackets.SPECTATOR_CANT_SPECTATE, (user_id, osuTypes.i32)) + + +# packet id: 23 +@cache +def get_attention() -> bytes: + return write(ServerPackets.GET_ATTENTION) + + +# packet id: 24 +@lru_cache(maxsize=4) +def notification(msg: str) -> bytes: + return write(ServerPackets.NOTIFICATION, (msg, osuTypes.string)) + + +# packet id: 26 +def update_match(m: Match, send_pw: bool = True) -> bytes: + return write(ServerPackets.UPDATE_MATCH, ((m, send_pw), osuTypes.match)) + + +# packet id: 27 +def new_match(m: Match) -> bytes: + return write(ServerPackets.NEW_MATCH, ((m, True), osuTypes.match)) + + +# packet id: 28 +@cache +def dispose_match(id: int) -> bytes: + return write(ServerPackets.DISPOSE_MATCH, (id, osuTypes.i32)) + + +# packet id: 34 +@cache +def toggle_block_non_friend_dm() -> bytes: + return write(ServerPackets.TOGGLE_BLOCK_NON_FRIEND_DMS) + + +# packet id: 36 +def match_join_success(m: Match) -> bytes: + return write(ServerPackets.MATCH_JOIN_SUCCESS, ((m, True), osuTypes.match)) + + +# packet id: 37 +@cache +def match_join_fail() -> bytes: + return write(ServerPackets.MATCH_JOIN_FAIL) + + +# packet id: 42 +@cache +def fellow_spectator_joined(user_id: int) -> bytes: + return write(ServerPackets.FELLOW_SPECTATOR_JOINED, (user_id, osuTypes.i32)) + + +# packet id: 43 +@cache +def fellow_spectator_left(user_id: int) -> bytes: + return write(ServerPackets.FELLOW_SPECTATOR_LEFT, (user_id, osuTypes.i32)) + + +# packet id: 46 +def match_start(m: Match) -> bytes: + return write(ServerPackets.MATCH_START, ((m, True), osuTypes.match)) + + +# packet id: 48 +# NOTE: this is actually unused, since it's +# much faster to just send the bytes back +# rather than parsing them. Though I might +# end up doing it eventually for security reasons +def match_score_update(frame: ScoreFrame) -> bytes: + return write(ServerPackets.MATCH_SCORE_UPDATE, (frame, osuTypes.scoreframe)) + + +# packet id: 50 +@cache +def match_transfer_host() -> bytes: + return write(ServerPackets.MATCH_TRANSFER_HOST) + + +# packet id: 53 +@cache +def match_all_players_loaded() -> bytes: + return write(ServerPackets.MATCH_ALL_PLAYERS_LOADED) + + +# packet id: 57 +@cache +def match_player_failed(slot_id: int) -> bytes: + return write(ServerPackets.MATCH_PLAYER_FAILED, (slot_id, osuTypes.i32)) + + +# packet id: 58 +@cache +def match_complete() -> bytes: + return write(ServerPackets.MATCH_COMPLETE) + + +# packet id: 61 +@cache +def match_skip() -> bytes: + return write(ServerPackets.MATCH_SKIP) + + +# packet id: 64 +@lru_cache(maxsize=16) +def channel_join(name: str) -> bytes: + return write(ServerPackets.CHANNEL_JOIN_SUCCESS, (name, osuTypes.string)) + + +# packet id: 65 +@lru_cache(maxsize=8) +def channel_info(name: str, topic: str, p_count: int) -> bytes: + return write(ServerPackets.CHANNEL_INFO, ((name, topic, p_count), osuTypes.channel)) + + +# packet id: 66 +@lru_cache(maxsize=8) +def channel_kick(name: str) -> bytes: + return write(ServerPackets.CHANNEL_KICK, (name, osuTypes.string)) + + +# packet id: 67 +@lru_cache(maxsize=8) +def channel_auto_join(name: str, topic: str, p_count: int) -> bytes: + return write( + ServerPackets.CHANNEL_AUTO_JOIN, + ((name, topic, p_count), osuTypes.channel), + ) + + +# packet id: 69 +# def beatmap_info_reply(maps: Sequence[BeatmapInfo]) -> bytes: +# return write( +# Packets.CHO_BEATMAP_INFO_REPLY, +# (maps, osuTypes.mapInfoReply) +# ) + + +# packet id: 71 +@cache +def bancho_privileges(priv: int) -> bytes: + return write(ServerPackets.PRIVILEGES, (priv, osuTypes.i32)) + + +# packet id: 72 +def friends_list(friends: Collection[int]) -> bytes: + return write(ServerPackets.FRIENDS_LIST, (friends, osuTypes.i32_list)) + + +# packet id: 75 +@cache +def protocol_version(ver: int) -> bytes: + return write(ServerPackets.PROTOCOL_VERSION, (ver, osuTypes.i32)) + + +# packet id: 76 +@cache +def main_menu_icon(icon_url: str, onclick_url: str) -> bytes: + return write( + ServerPackets.MAIN_MENU_ICON, + (icon_url + "|" + onclick_url, osuTypes.string), + ) + + +# packet id: 80 +# NOTE: deprecated +@cache +def monitor() -> bytes: + # this is an older (now removed) 'anticheat' feature of the osu! + # client; basically, it would do some checks (most likely for aqn), + # screenshot your desktop (and send it to osu! servers), then trigger + # the processlist to be sent to bancho as well (also now unused). + + # this doesn't work on newer clients, and I had no plans + # of trying to put it to use - just coded for completion. + return write(ServerPackets.MONITOR) + + +# packet id: 81 +@cache +def match_player_skipped(user_id: int) -> bytes: + return write(ServerPackets.MATCH_PLAYER_SKIPPED, (user_id, osuTypes.i32)) + + +# since the bot is always online and is +# also automatically added to all player's +# friends list, their presence is requested +# *very* frequently; only build it once. +@cache +def bot_presence(player: Player) -> bytes: + return write( + ServerPackets.USER_PRESENCE, + (player.id, osuTypes.i32), + (player.name, osuTypes.string), + (-5 + 24, osuTypes.u8), + (245, osuTypes.u8), # satellite provider + (31, osuTypes.u8), + (1234.0, osuTypes.f32), # send coordinates waaay + (4321.0, osuTypes.f32), # off the map for the bot + (0, osuTypes.i32), + ) + + +# packet id: 83 +def _user_presence( + user_id: int, + name: str, + utc_offset: int, + country_code: int, + bancho_privileges: int, + mode: int, + latitude: int, + longitude: int, + global_rank: int, +) -> bytes: + return write( + ServerPackets.USER_PRESENCE, + (user_id, osuTypes.i32), + (name, osuTypes.string), + (utc_offset + 24, osuTypes.u8), + (country_code, osuTypes.u8), + (bancho_privileges | (mode << 5), osuTypes.u8), + (longitude, osuTypes.f32), + (latitude, osuTypes.f32), + (global_rank, osuTypes.i32), + ) + + +def user_presence(player: Player) -> bytes: + return write( + ServerPackets.USER_PRESENCE, + (player.id, osuTypes.i32), + (player.name, osuTypes.string), + (player.utc_offset + 24, osuTypes.u8), + (player.geoloc["country"]["numeric"], osuTypes.u8), + (player.bancho_priv | (player.status.mode.as_vanilla << 5), osuTypes.u8), + (player.geoloc["longitude"], osuTypes.f32), + (player.geoloc["latitude"], osuTypes.f32), + (player.gm_stats.rank, osuTypes.i32), + ) + + +# packet id: 86 +@cache +def restart_server(ms: int) -> bytes: + return write(ServerPackets.RESTART, (ms, osuTypes.i32)) + + +# packet id: 88 +def match_invite(player: Player, target_name: str) -> bytes: + assert player.match is not None + msg = f"Come join my game: {player.match.embed}." + return write( + ServerPackets.MATCH_INVITE, + ((player.name, msg, target_name, player.id), osuTypes.message), + ) + + +# packet id: 89 +@cache +def channel_info_end() -> bytes: + return write(ServerPackets.CHANNEL_INFO_END) + + +# packet id: 91 +def match_change_password(new: str) -> bytes: + return write(ServerPackets.MATCH_CHANGE_PASSWORD, (new, osuTypes.string)) + + +# packet id: 92 +def silence_end(delta: int) -> bytes: + return write(ServerPackets.SILENCE_END, (delta, osuTypes.i32)) + + +# packet id: 94 +@cache +def user_silenced(user_id: int) -> bytes: + return write(ServerPackets.USER_SILENCED, (user_id, osuTypes.i32)) + + +""" not sure why 95 & 96 exist? unused in bancho.py """ + + +# packet id: 95 +@cache +def user_presence_single(user_id: int) -> bytes: + return write(ServerPackets.USER_PRESENCE_SINGLE, (user_id, osuTypes.i32)) + + +# packet id: 96 +def user_presence_bundle(user_ids: Collection[int]) -> bytes: + return write(ServerPackets.USER_PRESENCE_BUNDLE, (user_ids, osuTypes.i32_list)) + + +# packet id: 100 +def user_dm_blocked(target: str) -> bytes: + return write(ServerPackets.USER_DM_BLOCKED, (("", "", target, 0), osuTypes.message)) + + +# packet id: 101 +def target_silenced(target: str) -> bytes: + return write( + ServerPackets.TARGET_IS_SILENCED, + (("", "", target, 0), osuTypes.message), + ) + + +# packet id: 102 +@cache +def version_update_forced() -> bytes: + return write(ServerPackets.VERSION_UPDATE_FORCED) + + +# packet id: 103 +def switch_server(t: int) -> bytes: + # increment endpoint index if + # idletime >= t && match == null + return write(ServerPackets.SWITCH_SERVER, (t, osuTypes.i32)) + + +# packet id: 104 +@cache +def account_restricted() -> bytes: + return write(ServerPackets.ACCOUNT_RESTRICTED) + + +# packet id: 105 +# NOTE: deprecated +def rtx(msg: str) -> bytes: + # a bit of a weird one, sends a request to the client + # to show some visual effects on screen for 5 seconds: + # - black screen, freezes game, beeps loudly. + # within the next 3-8 seconds at random. + return write(ServerPackets.RTX, (msg, osuTypes.string)) + + +# packet id: 106 +@cache +def match_abort() -> bytes: + return write(ServerPackets.MATCH_ABORT) + + +# packet id: 107 +def switch_tournament_server(ip: str) -> bytes: + # the client only reads the string if it's + # not on the client's normal endpoints, + # but we can send it either way xd. + return write(ServerPackets.SWITCH_TOURNAMENT_SERVER, (ip, osuTypes.string)) diff --git a/app/repositories/__init__.py b/app/repositories/__init__.py new file mode 100644 index 0000000..74d2ac2 --- /dev/null +++ b/app/repositories/__init__.py @@ -0,0 +1,15 @@ +from __future__ import annotations + +from sqlalchemy.orm import DeclarativeMeta +from sqlalchemy.orm import registry + +mapper_registry = registry() + + +class Base(metaclass=DeclarativeMeta): + __abstract__ = True + + registry = mapper_registry + metadata = mapper_registry.metadata + + __init__ = mapper_registry.constructor diff --git a/app/repositories/achievements.py b/app/repositories/achievements.py new file mode 100644 index 0000000..7f131d2 --- /dev/null +++ b/app/repositories/achievements.py @@ -0,0 +1,173 @@ +from __future__ import annotations + +from collections.abc import Callable +from typing import TYPE_CHECKING +from typing import TypedDict +from typing import cast + +import app.state.services +from app._typing import UNSET +from app._typing import _UnsetSentinel +from app.repositories import Base + +if TYPE_CHECKING: + from app.objects.score import Score + +from sqlalchemy import Column +from sqlalchemy import Index +from sqlalchemy import Integer +from sqlalchemy import String +from sqlalchemy import delete +from sqlalchemy import func +from sqlalchemy import insert +from sqlalchemy import select +from sqlalchemy import update + + +class AchievementsTable(Base): + __tablename__ = "achievements" + + id = Column("id", Integer, primary_key=True, nullable=False, autoincrement=True) + file = Column("file", String(128), nullable=False) + name = Column("name", String(128, collation="utf8"), nullable=False) + desc = Column("desc", String(256, collation="utf8"), nullable=False) + cond = Column("cond", String(64), nullable=False) + + __table_args__ = ( + Index("achievements_desc_uindex", desc, unique=True), + Index("achievements_file_uindex", file, unique=True), + Index("achievements_name_uindex", name, unique=True), + ) + + +READ_PARAMS = ( + AchievementsTable.id, + AchievementsTable.file, + AchievementsTable.name, + AchievementsTable.desc, + AchievementsTable.cond, +) + + +class Achievement(TypedDict): + id: int + file: str + name: str + desc: str + cond: Callable[[Score, int], bool] + + +async def create( + file: str, + name: str, + desc: str, + cond: str, +) -> Achievement: + """Create a new achievement.""" + insert_stmt = insert(AchievementsTable).values( + file=file, + name=name, + desc=desc, + cond=cond, + ) + rec_id = await app.state.services.database.execute(insert_stmt) + + select_stmt = select(*READ_PARAMS).where(AchievementsTable.id == rec_id) + achievement = await app.state.services.database.fetch_one(select_stmt) + assert achievement is not None + + achievement["cond"] = eval(f'lambda score, mode_vn: {achievement["cond"]}') + return cast(Achievement, achievement) + + +async def fetch_one( + id: int | None = None, + name: str | None = None, +) -> Achievement | None: + """Fetch a single achievement.""" + if id is None and name is None: + raise ValueError("Must provide at least one parameter.") + + select_stmt = select(*READ_PARAMS) + + if id is not None: + select_stmt = select_stmt.where(AchievementsTable.id == id) + if name is not None: + select_stmt = select_stmt.where(AchievementsTable.name == name) + + achievement = await app.state.services.database.fetch_one(select_stmt) + if achievement is None: + return None + + achievement["cond"] = eval(f'lambda score, mode_vn: {achievement["cond"]}') + return cast(Achievement, achievement) + + +async def fetch_count() -> int: + """Fetch the number of achievements.""" + select_stmt = select(func.count().label("count")).select_from(AchievementsTable) + + rec = await app.state.services.database.fetch_one(select_stmt) + assert rec is not None + return cast(int, rec["count"]) + + +async def fetch_many( + page: int | None = None, + page_size: int | None = None, +) -> list[Achievement]: + """Fetch a list of achievements.""" + select_stmt = select(*READ_PARAMS) + if page is not None and page_size is not None: + select_stmt = select_stmt.limit(page_size).offset((page - 1) * page_size) + + achievements = await app.state.services.database.fetch_all(select_stmt) + for achievement in achievements: + achievement["cond"] = eval(f'lambda score, mode_vn: {achievement["cond"]}') + + return cast(list[Achievement], achievements) + + +async def partial_update( + id: int, + file: str | _UnsetSentinel = UNSET, + name: str | _UnsetSentinel = UNSET, + desc: str | _UnsetSentinel = UNSET, + cond: str | _UnsetSentinel = UNSET, +) -> Achievement | None: + """Update an existing achievement.""" + update_stmt = update(AchievementsTable).where(AchievementsTable.id == id) + if not isinstance(file, _UnsetSentinel): + update_stmt = update_stmt.values(file=file) + if not isinstance(name, _UnsetSentinel): + update_stmt = update_stmt.values(name=name) + if not isinstance(desc, _UnsetSentinel): + update_stmt = update_stmt.values(desc=desc) + if not isinstance(cond, _UnsetSentinel): + update_stmt = update_stmt.values(cond=cond) + + await app.state.services.database.execute(update_stmt) + + select_stmt = select(*READ_PARAMS).where(AchievementsTable.id == id) + achievement = await app.state.services.database.fetch_one(select_stmt) + if achievement is None: + return None + + achievement["cond"] = eval(f'lambda score, mode_vn: {achievement["cond"]}') + return cast(Achievement, achievement) + + +async def delete_one( + id: int, +) -> Achievement | None: + """Delete an existing achievement.""" + select_stmt = select(*READ_PARAMS).where(AchievementsTable.id == id) + achievement = await app.state.services.database.fetch_one(select_stmt) + if achievement is None: + return None + + delete_stmt = delete(AchievementsTable).where(AchievementsTable.id == id) + await app.state.services.database.execute(delete_stmt) + + achievement["cond"] = eval(f'lambda score, mode_vn: {achievement["cond"]}') + return cast(Achievement, achievement) diff --git a/app/repositories/channels.py b/app/repositories/channels.py new file mode 100644 index 0000000..d478a75 --- /dev/null +++ b/app/repositories/channels.py @@ -0,0 +1,184 @@ +from __future__ import annotations + +from typing import TypedDict +from typing import cast + +from sqlalchemy import Column +from sqlalchemy import Index +from sqlalchemy import Integer +from sqlalchemy import String +from sqlalchemy import delete +from sqlalchemy import func +from sqlalchemy import insert +from sqlalchemy import select +from sqlalchemy import update +from sqlalchemy.dialects.mysql import TINYINT + +import app.state.services +from app._typing import UNSET +from app._typing import _UnsetSentinel +from app.repositories import Base + + +class ChannelsTable(Base): + __tablename__ = "channels" + + id = Column("id", Integer, primary_key=True, nullable=False, autoincrement=True) + name = Column("name", String(32), nullable=False) + topic = Column("topic", String(256), nullable=False) + read_priv = Column("read_priv", Integer, nullable=False, server_default="1") + write_priv = Column("write_priv", Integer, nullable=False, server_default="2") + auto_join = Column("auto_join", TINYINT(1), nullable=False, server_default="0") + + __table_args__ = ( + Index("channels_name_uindex", name, unique=True), + Index("channels_auto_join_index", auto_join), + ) + + +READ_PARAMS = ( + ChannelsTable.id, + ChannelsTable.name, + ChannelsTable.topic, + ChannelsTable.read_priv, + ChannelsTable.write_priv, + ChannelsTable.auto_join, +) + + +class Channel(TypedDict): + id: int + name: str + topic: str + read_priv: int + write_priv: int + auto_join: bool + + +async def create( + name: str, + topic: str, + read_priv: int, + write_priv: int, + auto_join: bool, +) -> Channel: + """Create a new channel.""" + insert_stmt = insert(ChannelsTable).values( + name=name, + topic=topic, + read_priv=read_priv, + write_priv=write_priv, + auto_join=auto_join, + ) + rec_id = await app.state.services.database.execute(insert_stmt) + + select_stmt = select(*READ_PARAMS).where(ChannelsTable.id == rec_id) + channel = await app.state.services.database.fetch_one(select_stmt) + + assert channel is not None + return cast(Channel, channel) + + +async def fetch_one( + id: int | None = None, + name: str | None = None, +) -> Channel | None: + """Fetch a single channel.""" + if id is None and name is None: + raise ValueError("Must provide at least one parameter.") + + select_stmt = select(*READ_PARAMS) + + if id is not None: + select_stmt = select_stmt.where(ChannelsTable.id == id) + if name is not None: + select_stmt = select_stmt.where(ChannelsTable.name == name) + + channel = await app.state.services.database.fetch_one(select_stmt) + return cast(Channel | None, channel) + + +async def fetch_count( + read_priv: int | None = None, + write_priv: int | None = None, + auto_join: bool | None = None, +) -> int: + if read_priv is None and write_priv is None and auto_join is None: + raise ValueError("Must provide at least one parameter.") + + select_stmt = select(func.count().label("count")).select_from(ChannelsTable) + + if read_priv is not None: + select_stmt = select_stmt.where(ChannelsTable.read_priv == read_priv) + if write_priv is not None: + select_stmt = select_stmt.where(ChannelsTable.write_priv == write_priv) + if auto_join is not None: + select_stmt = select_stmt.where(ChannelsTable.auto_join == auto_join) + + rec = await app.state.services.database.fetch_one(select_stmt) + assert rec is not None + return cast(int, rec["count"]) + + +async def fetch_many( + read_priv: int | None = None, + write_priv: int | None = None, + auto_join: bool | None = None, + page: int | None = None, + page_size: int | None = None, +) -> list[Channel]: + """Fetch multiple channels from the database.""" + select_stmt = select(*READ_PARAMS) + + if read_priv is not None: + select_stmt = select_stmt.where(ChannelsTable.read_priv == read_priv) + if write_priv is not None: + select_stmt = select_stmt.where(ChannelsTable.write_priv == write_priv) + if auto_join is not None: + select_stmt = select_stmt.where(ChannelsTable.auto_join == auto_join) + + if page is not None and page_size is not None: + select_stmt = select_stmt.limit(page_size).offset((page - 1) * page_size) + + channels = await app.state.services.database.fetch_all(select_stmt) + return cast(list[Channel], channels) + + +async def partial_update( + name: str, + topic: str | _UnsetSentinel = UNSET, + read_priv: int | _UnsetSentinel = UNSET, + write_priv: int | _UnsetSentinel = UNSET, + auto_join: bool | _UnsetSentinel = UNSET, +) -> Channel | None: + """Update a channel in the database.""" + update_stmt = update(ChannelsTable).where(ChannelsTable.name == name) + + if not isinstance(topic, _UnsetSentinel): + update_stmt = update_stmt.values(topic=topic) + if not isinstance(read_priv, _UnsetSentinel): + update_stmt = update_stmt.values(read_priv=read_priv) + if not isinstance(write_priv, _UnsetSentinel): + update_stmt = update_stmt.values(write_priv=write_priv) + if not isinstance(auto_join, _UnsetSentinel): + update_stmt = update_stmt.values(auto_join=auto_join) + + await app.state.services.database.execute(update_stmt) + + select_stmt = select(*READ_PARAMS).where(ChannelsTable.name == name) + channel = await app.state.services.database.fetch_one(select_stmt) + return cast(Channel | None, channel) + + +async def delete_one( + name: str, +) -> Channel | None: + """Delete a channel from the database.""" + select_stmt = select(*READ_PARAMS).where(ChannelsTable.name == name) + channel = await app.state.services.database.fetch_one(select_stmt) + if channel is None: + return None + + delete_stmt = delete(ChannelsTable).where(ChannelsTable.name == name) + await app.state.services.database.execute(delete_stmt) + return cast(Channel | None, channel) diff --git a/app/repositories/clans.py b/app/repositories/clans.py new file mode 100644 index 0000000..0fa8f15 --- /dev/null +++ b/app/repositories/clans.py @@ -0,0 +1,156 @@ +from __future__ import annotations + +from datetime import datetime +from typing import TypedDict +from typing import cast + +from sqlalchemy import Column +from sqlalchemy import DateTime +from sqlalchemy import Index +from sqlalchemy import Integer +from sqlalchemy import String +from sqlalchemy import delete +from sqlalchemy import func +from sqlalchemy import insert +from sqlalchemy import select +from sqlalchemy import update + +import app.state.services +from app._typing import UNSET +from app._typing import _UnsetSentinel +from app.repositories import Base + + +class ClansTable(Base): + __tablename__ = "clans" + + id = Column("id", Integer, primary_key=True, nullable=False, autoincrement=True) + name = Column("name", String(16, collation="utf8"), nullable=False) + tag = Column("tag", String(6, collation="utf8"), nullable=False) + owner = Column("owner", Integer, nullable=False) + created_at = Column("created_at", DateTime, nullable=False) + + __table_args__ = ( + Index("clans_name_uindex", name, unique=False), + Index("clans_owner_uindex", owner, unique=True), + Index("clans_tag_uindex", tag, unique=True), + ) + + +READ_PARAMS = ( + ClansTable.id, + ClansTable.name, + ClansTable.tag, + ClansTable.owner, + ClansTable.created_at, +) + + +class Clan(TypedDict): + id: int + name: str + tag: str + owner: int + created_at: datetime + + +async def create( + name: str, + tag: str, + owner: int, +) -> Clan: + """Create a new clan in the database.""" + insert_stmt = insert(ClansTable).values( + name=name, + tag=tag, + owner=owner, + created_at=func.now(), + ) + rec_id = await app.state.services.database.execute(insert_stmt) + + select_stmt = select(*READ_PARAMS).where(ClansTable.id == rec_id) + clan = await app.state.services.database.fetch_one(select_stmt) + + assert clan is not None + return cast(Clan, clan) + + +async def fetch_one( + id: int | None = None, + name: str | None = None, + tag: str | None = None, + owner: int | None = None, +) -> Clan | None: + """Fetch a single clan from the database.""" + if id is None and name is None and tag is None and owner is None: + raise ValueError("Must provide at least one parameter.") + + select_stmt = select(*READ_PARAMS) + + if id is not None: + select_stmt = select_stmt.where(ClansTable.id == id) + if name is not None: + select_stmt = select_stmt.where(ClansTable.name == name) + if tag is not None: + select_stmt = select_stmt.where(ClansTable.tag == tag) + if owner is not None: + select_stmt = select_stmt.where(ClansTable.owner == owner) + + clan = await app.state.services.database.fetch_one(select_stmt) + return cast(Clan | None, clan) + + +async def fetch_count() -> int: + """Fetch the number of clans in the database.""" + select_stmt = select(func.count().label("count")).select_from(ClansTable) + rec = await app.state.services.database.fetch_one(select_stmt) + + assert rec is not None + return cast(int, rec["count"]) + + +async def fetch_many( + page: int | None = None, + page_size: int | None = None, +) -> list[Clan]: + """Fetch many clans from the database.""" + select_stmt = select(*READ_PARAMS) + if page is not None and page_size is not None: + select_stmt = select_stmt.limit(page_size).offset((page - 1) * page_size) + + clans = await app.state.services.database.fetch_all(select_stmt) + return cast(list[Clan], clans) + + +async def partial_update( + id: int, + name: str | _UnsetSentinel = UNSET, + tag: str | _UnsetSentinel = UNSET, + owner: int | _UnsetSentinel = UNSET, +) -> Clan | None: + """Update a clan in the database.""" + update_stmt = update(ClansTable).where(ClansTable.id == id) + if not isinstance(name, _UnsetSentinel): + update_stmt = update_stmt.values(name=name) + if not isinstance(tag, _UnsetSentinel): + update_stmt = update_stmt.values(tag=tag) + if not isinstance(owner, _UnsetSentinel): + update_stmt = update_stmt.values(owner=owner) + + await app.state.services.database.execute(update_stmt) + + select_stmt = select(*READ_PARAMS).where(ClansTable.id == id) + clan = await app.state.services.database.fetch_one(select_stmt) + return cast(Clan | None, clan) + + +async def delete_one(id: int) -> Clan | None: + """Delete a clan from the database.""" + select_stmt = select(*READ_PARAMS).where(ClansTable.id == id) + clan = await app.state.services.database.fetch_one(select_stmt) + if clan is None: + return None + + delete_stmt = delete(ClansTable).where(ClansTable.id == id) + await app.state.services.database.execute(delete_stmt) + return cast(Clan, clan) diff --git a/app/repositories/client_hashes.py b/app/repositories/client_hashes.py new file mode 100644 index 0000000..8f7d50c --- /dev/null +++ b/app/repositories/client_hashes.py @@ -0,0 +1,133 @@ +from __future__ import annotations + +from datetime import datetime +from typing import TypedDict +from typing import cast + +from sqlalchemy import CHAR +from sqlalchemy import Column +from sqlalchemy import DateTime +from sqlalchemy import Integer +from sqlalchemy import func +from sqlalchemy import or_ +from sqlalchemy import select +from sqlalchemy.dialects.mysql import Insert as MysqlInsert +from sqlalchemy.dialects.mysql import insert as mysql_insert +from sqlalchemy.sql import ColumnElement +from sqlalchemy.types import Boolean + +import app.state.services +from app.repositories import Base +from app.repositories.users import UsersTable + + +class ClientHashesTable(Base): + __tablename__ = "client_hashes" + + userid = Column("userid", Integer, nullable=False, primary_key=True) + osupath = Column("osupath", CHAR(32), nullable=False, primary_key=True) + adapters = Column("adapters", CHAR(32), nullable=False, primary_key=True) + uninstall_id = Column("uninstall_id", CHAR(32), nullable=False, primary_key=True) + disk_serial = Column("disk_serial", CHAR(32), nullable=False, primary_key=True) + latest_time = Column("latest_time", DateTime, nullable=False) + occurrences = Column("occurrences", Integer, nullable=False, server_default="0") + + +READ_PARAMS = ( + ClientHashesTable.userid, + ClientHashesTable.osupath, + ClientHashesTable.adapters, + ClientHashesTable.uninstall_id, + ClientHashesTable.disk_serial, + ClientHashesTable.latest_time, + ClientHashesTable.occurrences, +) + + +class ClientHash(TypedDict): + userid: int + osupath: str + adapters: str + uninstall_id: str + disk_serial: str + latest_time: datetime + occurrences: int + + +class ClientHashWithPlayer(ClientHash): + name: str + priv: int + + +async def create( + userid: int, + osupath: str, + adapters: str, + uninstall_id: str, + disk_serial: str, +) -> ClientHash: + """Create a new client hash entry in the database.""" + insert_stmt: MysqlInsert = ( + mysql_insert(ClientHashesTable) + .values( + userid=userid, + osupath=osupath, + adapters=adapters, + uninstall_id=uninstall_id, + disk_serial=disk_serial, + latest_time=func.now(), + occurrences=1, + ) + .on_duplicate_key_update( + latest_time=func.now(), + occurrences=ClientHashesTable.occurrences + 1, + ) + ) + + await app.state.services.database.execute(insert_stmt) + + select_stmt = ( + select(*READ_PARAMS) + .where(ClientHashesTable.userid == userid) + .where(ClientHashesTable.osupath == osupath) + .where(ClientHashesTable.adapters == adapters) + .where(ClientHashesTable.uninstall_id == uninstall_id) + .where(ClientHashesTable.disk_serial == disk_serial) + ) + client_hash = await app.state.services.database.fetch_one(select_stmt) + + assert client_hash is not None + return cast(ClientHash, client_hash) + + +async def fetch_any_hardware_matches_for_user( + userid: int, + running_under_wine: bool, + adapters: str, + uninstall_id: str, + disk_serial: str | None = None, +) -> list[ClientHashWithPlayer]: + """\ + Fetch a list of matching hardware addresses where any of + `adapters`, `uninstall_id` or `disk_serial` match other users + from the database. + """ + select_stmt = ( + select(*READ_PARAMS, UsersTable.name, UsersTable.priv) + .join(UsersTable, ClientHashesTable.userid == UsersTable.id) + .where(ClientHashesTable.userid != userid) + ) + + if running_under_wine: + select_stmt = select_stmt.where(ClientHashesTable.uninstall_id == uninstall_id) + else: + # make disk serial optional in the OR + oneof_filters: list[ColumnElement[Boolean]] = [] + oneof_filters.append(ClientHashesTable.adapters == adapters) + oneof_filters.append(ClientHashesTable.uninstall_id == uninstall_id) + if disk_serial is not None: + oneof_filters.append(ClientHashesTable.disk_serial == disk_serial) + select_stmt = select_stmt.where(or_(*oneof_filters)) + + client_hashes = await app.state.services.database.fetch_all(select_stmt) + return cast(list[ClientHashWithPlayer], client_hashes) diff --git a/app/repositories/comments.py b/app/repositories/comments.py new file mode 100644 index 0000000..bfd96ae --- /dev/null +++ b/app/repositories/comments.py @@ -0,0 +1,125 @@ +from __future__ import annotations + +from enum import StrEnum +from typing import TypedDict +from typing import cast + +from sqlalchemy import CHAR +from sqlalchemy import Column +from sqlalchemy import Enum +from sqlalchemy import Integer +from sqlalchemy import String +from sqlalchemy import and_ +from sqlalchemy import insert +from sqlalchemy import or_ +from sqlalchemy import select +from sqlalchemy.dialects.mysql import FLOAT + +import app.state.services +from app.repositories import Base +from app.repositories.users import UsersTable + + +class TargetType(StrEnum): + REPLAY = "replay" + BEATMAP = "map" + SONG = "song" + + +class CommentsTable(Base): + __tablename__ = "comments" + + id = Column("id", Integer, nullable=False, primary_key=True, autoincrement=True) + target_id = Column("target_id", nullable=False) + target_type = Column(Enum(TargetType, name="target_type"), nullable=False) + userid = Column("userid", Integer, nullable=False) + time = Column("time", FLOAT(precision=6, scale=3), nullable=False) + comment = Column("comment", String(80, collation="utf8"), nullable=False) + colour = Column("colour", CHAR(6), nullable=True) + + +READ_PARAMS = ( + CommentsTable.id, + CommentsTable.target_id, + CommentsTable.target_type, + CommentsTable.userid, + CommentsTable.time, + CommentsTable.comment, + CommentsTable.colour, +) + + +class Comment(TypedDict): + id: int + target_id: int + target_type: TargetType + userid: int + time: float + comment: str + colour: str | None + + +async def create( + target_id: int, + target_type: TargetType, + userid: int, + time: float, + comment: str, + colour: str | None, +) -> Comment: + """Create a new comment entry in the database.""" + insert_stmt = insert(CommentsTable).values( + target_id=target_id, + target_type=target_type, + userid=userid, + time=time, + comment=comment, + colour=colour, + ) + rec_id = await app.state.services.database.execute(insert_stmt) + + select_stmt = select(*READ_PARAMS).where(CommentsTable.id == rec_id) + _comment = await app.state.services.database.fetch_one(select_stmt) + + assert _comment is not None + return cast(Comment, _comment) + + +class CommentWithUserPrivileges(Comment): + priv: int + + +async def fetch_all_relevant_to_replay( + score_id: int | None = None, + map_set_id: int | None = None, + map_id: int | None = None, +) -> list[CommentWithUserPrivileges]: + """\ + Fetch all comments from the database where any of the following match: + - `score_id` + - `map_set_id` + - `map_id` + """ + select_stmt = ( + select(READ_PARAMS, UsersTable.priv) + .join(UsersTable, CommentsTable.userid == UsersTable.id) + .where( + or_( + and_( + CommentsTable.target_type == TargetType.REPLAY, + CommentsTable.target_id == score_id, + ), + and_( + CommentsTable.target_type == TargetType.SONG, + CommentsTable.target_id == map_set_id, + ), + and_( + CommentsTable.target_type == TargetType.BEATMAP, + CommentsTable.target_id == map_id, + ), + ), + ) + ) + + comments = await app.state.services.database.fetch_all(select_stmt) + return cast(list[CommentWithUserPrivileges], comments) diff --git a/app/repositories/favourites.py b/app/repositories/favourites.py new file mode 100644 index 0000000..1c34547 --- /dev/null +++ b/app/repositories/favourites.py @@ -0,0 +1,75 @@ +from __future__ import annotations + +from typing import TypedDict +from typing import cast + +from sqlalchemy import Column +from sqlalchemy import Integer +from sqlalchemy import func +from sqlalchemy import insert +from sqlalchemy import select + +import app.state.services +from app.repositories import Base + + +class FavouritesTable(Base): + __tablename__ = "favourites" + + userid = Column("userid", Integer, nullable=False, primary_key=True) + setid = Column("setid", Integer, nullable=False, primary_key=True) + created_at = Column("created_at", Integer, nullable=False, server_default="0") + + +READ_PARAMS = ( + FavouritesTable.userid, + FavouritesTable.setid, + FavouritesTable.created_at, +) + + +class Favourite(TypedDict): + userid: int + setid: int + created_at: int + + +async def create( + userid: int, + setid: int, +) -> Favourite: + """Create a new favourite mapset entry in the database.""" + insert_stmt = insert(FavouritesTable).values( + userid=userid, + setid=setid, + created_at=func.unix_timestamp(), + ) + await app.state.services.database.execute(insert_stmt) + + select_stmt = ( + select(*READ_PARAMS) + .where(FavouritesTable.userid == userid) + .where(FavouritesTable.setid == setid) + ) + favourite = await app.state.services.database.fetch_one(select_stmt) + + assert favourite is not None + return cast(Favourite, favourite) + + +async def fetch_all(userid: int) -> list[Favourite]: + """Fetch all favourites from a player.""" + select_stmt = select(*READ_PARAMS).where(FavouritesTable.userid == userid) + favourites = await app.state.services.database.fetch_all(select_stmt) + return cast(list[Favourite], favourites) + + +async def fetch_one(userid: int, setid: int) -> Favourite | None: + """Check if a mapset is already a favourite.""" + select_stmt = ( + select(*READ_PARAMS) + .where(FavouritesTable.userid == userid) + .where(FavouritesTable.setid == setid) + ) + favourite = await app.state.services.database.fetch_one(select_stmt) + return cast(Favourite | None, favourite) diff --git a/app/repositories/ingame_logins.py b/app/repositories/ingame_logins.py new file mode 100644 index 0000000..0a44983 --- /dev/null +++ b/app/repositories/ingame_logins.py @@ -0,0 +1,128 @@ +from __future__ import annotations + +from datetime import date +from datetime import datetime +from typing import TypedDict +from typing import cast + +from sqlalchemy import Column +from sqlalchemy import Date +from sqlalchemy import DateTime +from sqlalchemy import Integer +from sqlalchemy import String +from sqlalchemy import func +from sqlalchemy import insert +from sqlalchemy import select + +import app.state.services +from app.repositories import Base + + +class IngameLoginsTable(Base): + __tablename__ = "ingame_logins" + + id = Column("id", Integer, nullable=False, primary_key=True, autoincrement=True) + userid = Column("userid", Integer, nullable=False) + ip = Column("ip", String(45), nullable=False) + osu_ver = Column("osu_ver", Date, nullable=False) + osu_stream = Column("osu_stream", String(11), nullable=False) + datetime = Column("datetime", DateTime, nullable=False) + + +READ_PARAMS = ( + IngameLoginsTable.id, + IngameLoginsTable.userid, + IngameLoginsTable.ip, + IngameLoginsTable.osu_ver, + IngameLoginsTable.osu_stream, + IngameLoginsTable.datetime, +) + + +class IngameLogin(TypedDict): + id: int + userid: str + ip: str + osu_ver: date + osu_stream: str + datetime: datetime + + +class InGameLoginUpdateFields(TypedDict, total=False): + userid: str + ip: str + osu_ver: date + osu_stream: str + + +async def create( + user_id: int, + ip: str, + osu_ver: date, + osu_stream: str, +) -> IngameLogin: + """Create a new login entry in the database.""" + insert_stmt = insert(IngameLoginsTable).values( + userid=user_id, + ip=ip, + osu_ver=osu_ver, + osu_stream=osu_stream, + datetime=func.now(), + ) + rec_id = await app.state.services.database.execute(insert_stmt) + + select_stmt = select(*READ_PARAMS).where(IngameLoginsTable.id == rec_id) + ingame_login = await app.state.services.database.fetch_one(select_stmt) + + assert ingame_login is not None + return cast(IngameLogin, ingame_login) + + +async def fetch_one(id: int) -> IngameLogin | None: + """Fetch a login entry from the database.""" + select_stmt = select(*READ_PARAMS).where(IngameLoginsTable.id == id) + ingame_login = await app.state.services.database.fetch_one(select_stmt) + return cast(IngameLogin | None, ingame_login) + + +async def fetch_count( + user_id: int | None = None, + ip: str | None = None, +) -> int: + """Fetch the number of logins in the database.""" + select_stmt = select(func.count().label("count")).select_from(IngameLoginsTable) + if user_id is not None: + select_stmt = select_stmt.where(IngameLoginsTable.userid == user_id) + if ip is not None: + select_stmt = select_stmt.where(IngameLoginsTable.ip == ip) + + rec = await app.state.services.database.fetch_one(select_stmt) + assert rec is not None + return cast(int, rec["count"]) + + +async def fetch_many( + user_id: int | None = None, + ip: str | None = None, + osu_ver: date | None = None, + osu_stream: str | None = None, + page: int | None = None, + page_size: int | None = None, +) -> list[IngameLogin]: + """Fetch a list of logins from the database.""" + select_stmt = select(*READ_PARAMS) + + if user_id is not None: + select_stmt = select_stmt.where(IngameLoginsTable.userid == user_id) + if ip is not None: + select_stmt = select_stmt.where(IngameLoginsTable.ip == ip) + if osu_ver is not None: + select_stmt = select_stmt.where(IngameLoginsTable.osu_ver == osu_ver) + if osu_stream is not None: + select_stmt = select_stmt.where(IngameLoginsTable.osu_stream == osu_stream) + + if page is not None and page_size is not None: + select_stmt.limit(page_size).offset((page - 1) * page_size) + + ingame_logins = await app.state.services.database.fetch_all(select_stmt) + return cast(list[IngameLogin], ingame_logins) diff --git a/app/repositories/logs.py b/app/repositories/logs.py new file mode 100644 index 0000000..a533fcb --- /dev/null +++ b/app/repositories/logs.py @@ -0,0 +1,70 @@ +from __future__ import annotations + +from datetime import datetime +from typing import TypedDict +from typing import cast + +from sqlalchemy import Column +from sqlalchemy import DateTime +from sqlalchemy import Integer +from sqlalchemy import String +from sqlalchemy import func +from sqlalchemy import insert +from sqlalchemy import select + +import app.state.services +from app.repositories import Base + + +class LogTable(Base): + __tablename__ = "logs" + + id = Column("id", Integer, nullable=False, primary_key=True, autoincrement=True) + _from = Column("from", Integer, nullable=False) + to = Column("to", Integer, nullable=False) + action = Column("action", String(32), nullable=False) + msg = Column("msg", String(2048, collation="utf8"), nullable=True) + time = Column("time", DateTime, nullable=False, onupdate=func.now()) + + +READ_PARAMS = ( + LogTable.id, + LogTable._from.label("from"), + LogTable.to, + LogTable.action, + LogTable.msg, + LogTable.time, +) + + +class Log(TypedDict): + id: int + _from: int + to: int + action: str + msg: str | None + time: datetime + + +async def create( + _from: int, + to: int, + action: str, + msg: str, +) -> Log: + """Create a new log entry in the database.""" + insert_stmt = insert(LogTable).values( + { + "from": _from, + "to": to, + "action": action, + "msg": msg, + "time": func.now(), + }, + ) + rec_id = await app.state.services.database.execute(insert_stmt) + + select_stmt = select(*READ_PARAMS).where(LogTable.id == rec_id) + log = await app.state.services.database.fetch_one(select_stmt) + assert log is not None + return cast(Log, log) diff --git a/app/repositories/mail.py b/app/repositories/mail.py new file mode 100644 index 0000000..6ce18e6 --- /dev/null +++ b/app/repositories/mail.py @@ -0,0 +1,113 @@ +from __future__ import annotations + +from typing import TypedDict +from typing import cast + +from sqlalchemy import Column +from sqlalchemy import Integer +from sqlalchemy import String +from sqlalchemy import func +from sqlalchemy import insert +from sqlalchemy import select +from sqlalchemy import update +from sqlalchemy.dialects.mysql import TINYINT + +import app.state.services +from app.repositories import Base + + +class MailTable(Base): + __tablename__ = "mail" + + id = Column("id", Integer, nullable=False, primary_key=True, autoincrement=True) + from_id = Column("from_id", Integer, nullable=False) + to_id = Column("to_id", Integer, nullable=False) + msg = Column("msg", String(2048, collation="utf8"), nullable=False) + time = Column("time", Integer, nullable=True) + read = Column("read", TINYINT(1), nullable=False, server_default="0") + + +READ_PARAMS = ( + MailTable.id, + MailTable.from_id, + MailTable.to_id, + MailTable.msg, + MailTable.time, + MailTable.read, +) + + +class Mail(TypedDict): + id: int + from_id: int + to_id: int + msg: str + time: int + read: bool + + +class MailWithUsernames(Mail): + from_name: str + to_name: str + + +async def create(from_id: int, to_id: int, msg: str) -> Mail: + """Create a new mail entry in the database.""" + insert_stmt = insert(MailTable).values( + from_id=from_id, + to_id=to_id, + msg=msg, + time=func.unix_timestamp(), + ) + rec_id = await app.state.services.database.execute(insert_stmt) + + select_stmt = select(*READ_PARAMS).where(MailTable.id == rec_id) + mail = await app.state.services.database.fetch_one(select_stmt) + assert mail is not None + return cast(Mail, mail) + + +from app.repositories.users import UsersTable + + +async def fetch_all_mail_to_user( + user_id: int, + read: bool | None = None, +) -> list[MailWithUsernames]: + """Fetch all of mail to a given target from the database.""" + from_subquery = select(UsersTable.name).where(UsersTable.id == MailTable.from_id) + to_subquery = select(UsersTable.name).where(UsersTable.id == MailTable.to_id) + + select_stmt = select( + *READ_PARAMS, + from_subquery.label("from_name"), + to_subquery.label("to_name"), + ).where(MailTable.to_id == user_id) + + if read is not None: + select_stmt = select_stmt.where(MailTable.read == read) + + mail = await app.state.services.database.fetch_all(select_stmt) + return cast(list[MailWithUsernames], mail) + + +async def mark_conversation_as_read(to_id: int, from_id: int) -> list[Mail]: + """Mark any mail in a user's conversation with another user as read.""" + select_stmt = select(*READ_PARAMS).where( + MailTable.to_id == to_id, + MailTable.from_id == from_id, + MailTable.read == False, + ) + mail = await app.state.services.database.fetch_all(select_stmt) + if not mail: + return [] + + update_stmt = ( + update(MailTable) + .where(MailTable.to_id == to_id) + .where(MailTable.from_id == from_id) + .where(MailTable.read == False) + .values(read=True) + ) + await app.state.services.database.execute(update_stmt) + return cast(list[Mail], mail) diff --git a/app/repositories/map_requests.py b/app/repositories/map_requests.py new file mode 100644 index 0000000..1b193f7 --- /dev/null +++ b/app/repositories/map_requests.py @@ -0,0 +1,97 @@ +from __future__ import annotations + +from datetime import datetime +from typing import Any +from typing import TypedDict +from typing import cast + +from sqlalchemy import Column +from sqlalchemy import DateTime +from sqlalchemy import Integer +from sqlalchemy import func +from sqlalchemy import insert +from sqlalchemy import select +from sqlalchemy import update +from sqlalchemy.dialects.mysql import TINYINT + +import app.state.services +from app.repositories import Base + + +class MapRequestsTable(Base): + __tablename__ = "map_requests" + + id = Column("id", Integer, nullable=False, primary_key=True, autoincrement=True) + map_id = Column("map_id", Integer, nullable=False) + player_id = Column("player_id", Integer, nullable=False) + datetime = Column("datetime", DateTime, nullable=False) + active = Column("active", TINYINT(1), nullable=False) + + +READ_PARAMS = ( + MapRequestsTable.id, + MapRequestsTable.map_id, + MapRequestsTable.player_id, + MapRequestsTable.datetime, +) + + +class MapRequest(TypedDict): + id: int + map_id: int + player_id: int + datetime: datetime + active: bool + + +async def create( + map_id: int, + player_id: int, + active: bool, +) -> MapRequest: + """Create a new map request entry in the database.""" + insert_stmt = insert(MapRequestsTable).values( + map_id=map_id, + player_id=player_id, + datetime=func.now(), + active=active, + ) + rec_id = await app.state.services.database.execute(insert_stmt) + + select_stmt = select(*READ_PARAMS).where(MapRequestsTable.id == rec_id) + map_request = await app.state.services.database.fetch_one(select_stmt) + assert map_request is not None + + return cast(MapRequest, map_request) + + +async def fetch_all( + map_id: int | None = None, + player_id: int | None = None, + active: bool | None = None, +) -> list[MapRequest]: + """Fetch a list of map requests from the database.""" + select_stmt = select(*READ_PARAMS) + if map_id is not None: + select_stmt = select_stmt.where(MapRequestsTable.map_id == map_id) + if player_id is not None: + select_stmt = select_stmt.where(MapRequestsTable.player_id == player_id) + if active is not None: + select_stmt = select_stmt.where(MapRequestsTable.active == active) + + map_requests = await app.state.services.database.fetch_all(select_stmt) + return cast(list[MapRequest], map_requests) + + +async def mark_batch_as_inactive(map_ids: list[Any]) -> list[MapRequest]: + """Mark a map request as inactive.""" + update_stmt = ( + update(MapRequestsTable) + .where(MapRequestsTable.map_id.in_(map_ids)) + .values(active=False) + ) + await app.state.services.database.execute(update_stmt) + + select_stmt = select(*READ_PARAMS).where(MapRequestsTable.map_id.in_(map_ids)) + map_requests = await app.state.services.database.fetch_all(select_stmt) + return cast(list[MapRequest], map_requests) diff --git a/app/repositories/maps.py b/app/repositories/maps.py new file mode 100644 index 0000000..a870b74 --- /dev/null +++ b/app/repositories/maps.py @@ -0,0 +1,370 @@ +from __future__ import annotations + +from datetime import datetime +from enum import StrEnum +from typing import TypedDict +from typing import cast + +from sqlalchemy import Column +from sqlalchemy import DateTime +from sqlalchemy import Enum +from sqlalchemy import Index +from sqlalchemy import Integer +from sqlalchemy import String +from sqlalchemy import delete +from sqlalchemy import func +from sqlalchemy import insert +from sqlalchemy import select +from sqlalchemy import update +from sqlalchemy.dialects.mysql import FLOAT +from sqlalchemy.dialects.mysql import TINYINT + +import app.state.services +from app._typing import UNSET +from app._typing import _UnsetSentinel +from app.repositories import Base + + +class MapServer(StrEnum): + OSU = "osu!" + PRIVATE = "private" + + +class MapsTable(Base): + __tablename__ = "maps" + + server = Column( + Enum(MapServer, name="server"), + nullable=False, + server_default="osu!", + primary_key=True, + ) + id = Column(Integer, nullable=False, primary_key=True) + set_id = Column(Integer, nullable=False) + status = Column(Integer, nullable=False) + md5 = Column(String(32), nullable=False) + artist = Column(String(128, collation="utf8"), nullable=False) + title = Column(String(128, collation="utf8"), nullable=False) + version = Column(String(128, collation="utf8"), nullable=False) + creator = Column(String(19, collation="utf8"), nullable=False) + filename = Column(String(256, collation="utf8"), nullable=False) + last_update = Column(DateTime, nullable=False) + total_length = Column(Integer, nullable=False) + max_combo = Column(Integer, nullable=False) + frozen = Column(TINYINT(1), nullable=False, server_default="0") + plays = Column(Integer, nullable=False, server_default="0") + passes = Column(Integer, nullable=False, server_default="0") + mode = Column(TINYINT(1), nullable=False, server_default="0") + bpm = Column(FLOAT(12, 2), nullable=False, server_default="0.00") + cs = Column(FLOAT(4, 2), nullable=False, server_default="0.00") + ar = Column(FLOAT(4, 2), nullable=False, server_default="0.00") + od = Column(FLOAT(4, 2), nullable=False, server_default="0.00") + hp = Column(FLOAT(4, 2), nullable=False, server_default="0.00") + diff = Column(FLOAT(6, 3), nullable=False, server_default="0.000") + + __table_args__ = ( + Index("maps_set_id_index", "set_id"), + Index("maps_status_index", "status"), + Index("maps_filename_index", "filename"), + Index("maps_plays_index", "plays"), + Index("maps_mode_index", "mode"), + Index("maps_frozen_index", "frozen"), + Index("maps_md5_uindex", "md5", unique=True), + Index("maps_id_uindex", "id", unique=True), + ) + + +READ_PARAMS = ( + MapsTable.id, + MapsTable.server, + MapsTable.set_id, + MapsTable.status, + MapsTable.md5, + MapsTable.artist, + MapsTable.title, + MapsTable.version, + MapsTable.creator, + MapsTable.filename, + MapsTable.last_update, + MapsTable.total_length, + MapsTable.max_combo, + MapsTable.frozen, + MapsTable.plays, + MapsTable.passes, + MapsTable.mode, + MapsTable.bpm, + MapsTable.cs, + MapsTable.ar, + MapsTable.od, + MapsTable.hp, + MapsTable.diff, +) + + +class Map(TypedDict): + id: int + server: str + set_id: int + status: int + md5: str + artist: str + title: str + version: str + creator: str + filename: str + last_update: datetime + total_length: int + max_combo: int + frozen: bool + plays: int + passes: int + mode: int + bpm: float + cs: float + ar: float + od: float + hp: float + diff: float + + +async def create( + id: int, + server: str, + set_id: int, + status: int, + md5: str, + artist: str, + title: str, + version: str, + creator: str, + filename: str, + last_update: datetime, + total_length: int, + max_combo: int, + frozen: bool, + plays: int, + passes: int, + mode: int, + bpm: float, + cs: float, + ar: float, + od: float, + hp: float, + diff: float, +) -> Map: + """Create a new beatmap entry in the database.""" + insert_stmt = insert(MapsTable).values( + id=id, + server=server, + set_id=set_id, + status=status, + md5=md5, + artist=artist, + title=title, + version=version, + creator=creator, + filename=filename, + last_update=last_update, + total_length=total_length, + max_combo=max_combo, + frozen=frozen, + plays=plays, + passes=passes, + mode=mode, + bpm=bpm, + cs=cs, + ar=ar, + od=od, + hp=hp, + diff=diff, + ) + rec_id = await app.state.services.database.execute(insert_stmt) + + select_stmt = select(*READ_PARAMS).where(MapsTable.id == rec_id) + map = await app.state.services.database.fetch_one(select_stmt) + assert map is not None + return cast(Map, map) + + +async def fetch_one( + id: int | None = None, + md5: str | None = None, + filename: str | None = None, +) -> Map | None: + """Fetch a beatmap entry from the database.""" + if id is None and md5 is None and filename is None: + raise ValueError("Must provide at least one parameter.") + + select_stmt = select(*READ_PARAMS) + if id is not None: + select_stmt = select_stmt.where(MapsTable.id == id) + if md5 is not None: + select_stmt = select_stmt.where(MapsTable.md5 == md5) + if filename is not None: + select_stmt = select_stmt.where(MapsTable.filename == filename) + + map = await app.state.services.database.fetch_one(select_stmt) + return cast(Map | None, map) + + +async def fetch_count( + server: str | None = None, + set_id: int | None = None, + status: int | None = None, + artist: str | None = None, + creator: str | None = None, + filename: str | None = None, + mode: int | None = None, + frozen: bool | None = None, +) -> int: + """Fetch the number of maps in the database.""" + select_stmt = select(func.count().label("count")).select_from(MapsTable) + if server is not None: + select_stmt = select_stmt.where(MapsTable.server == server) + if set_id is not None: + select_stmt = select_stmt.where(MapsTable.set_id == set_id) + if status is not None: + select_stmt = select_stmt.where(MapsTable.status == status) + if artist is not None: + select_stmt = select_stmt.where(MapsTable.artist == artist) + if creator is not None: + select_stmt = select_stmt.where(MapsTable.creator == creator) + if filename is not None: + select_stmt = select_stmt.where(MapsTable.filename == filename) + if mode is not None: + select_stmt = select_stmt.where(MapsTable.mode == mode) + if frozen is not None: + select_stmt = select_stmt.where(MapsTable.frozen == frozen) + + rec = await app.state.services.database.fetch_one(select_stmt) + assert rec is not None + return cast(int, rec["count"]) + + +async def fetch_many( + server: str | None = None, + set_id: int | None = None, + status: int | None = None, + artist: str | None = None, + creator: str | None = None, + filename: str | None = None, + mode: int | None = None, + frozen: bool | None = None, + page: int | None = None, + page_size: int | None = None, +) -> list[Map]: + """Fetch a list of maps from the database.""" + select_stmt = select(*READ_PARAMS) + if server is not None: + select_stmt = select_stmt.where(MapsTable.server == server) + if set_id is not None: + select_stmt = select_stmt.where(MapsTable.set_id == set_id) + if status is not None: + select_stmt = select_stmt.where(MapsTable.status == status) + if artist is not None: + select_stmt = select_stmt.where(MapsTable.artist == artist) + if creator is not None: + select_stmt = select_stmt.where(MapsTable.creator == creator) + if filename is not None: + select_stmt = select_stmt.where(MapsTable.filename == filename) + if mode is not None: + select_stmt = select_stmt.where(MapsTable.mode == mode) + if frozen is not None: + select_stmt = select_stmt.where(MapsTable.frozen == frozen) + + if page is not None and page_size is not None: + select_stmt = select_stmt.limit(page_size).offset((page - 1) * page_size) + + maps = await app.state.services.database.fetch_all(select_stmt) + return cast(list[Map], maps) + + +async def partial_update( + id: int, + server: str | _UnsetSentinel = UNSET, + set_id: int | _UnsetSentinel = UNSET, + status: int | _UnsetSentinel = UNSET, + md5: str | _UnsetSentinel = UNSET, + artist: str | _UnsetSentinel = UNSET, + title: str | _UnsetSentinel = UNSET, + version: str | _UnsetSentinel = UNSET, + creator: str | _UnsetSentinel = UNSET, + filename: str | _UnsetSentinel = UNSET, + last_update: datetime | _UnsetSentinel = UNSET, + total_length: int | _UnsetSentinel = UNSET, + max_combo: int | _UnsetSentinel = UNSET, + frozen: bool | _UnsetSentinel = UNSET, + plays: int | _UnsetSentinel = UNSET, + passes: int | _UnsetSentinel = UNSET, + mode: int | _UnsetSentinel = UNSET, + bpm: float | _UnsetSentinel = UNSET, + cs: float | _UnsetSentinel = UNSET, + ar: float | _UnsetSentinel = UNSET, + od: float | _UnsetSentinel = UNSET, + hp: float | _UnsetSentinel = UNSET, + diff: float | _UnsetSentinel = UNSET, +) -> Map | None: + """Update a beatmap entry in the database.""" + update_stmt = update(MapsTable).where(MapsTable.id == id) + if not isinstance(server, _UnsetSentinel): + update_stmt = update_stmt.values(server=server) + if not isinstance(set_id, _UnsetSentinel): + update_stmt = update_stmt.values(set_id=set_id) + if not isinstance(status, _UnsetSentinel): + update_stmt = update_stmt.values(status=status) + if not isinstance(md5, _UnsetSentinel): + update_stmt = update_stmt.values(md5=md5) + if not isinstance(artist, _UnsetSentinel): + update_stmt = update_stmt.values(artist=artist) + if not isinstance(title, _UnsetSentinel): + update_stmt = update_stmt.values(title=title) + if not isinstance(version, _UnsetSentinel): + update_stmt = update_stmt.values(version=version) + if not isinstance(creator, _UnsetSentinel): + update_stmt = update_stmt.values(creator=creator) + if not isinstance(filename, _UnsetSentinel): + update_stmt = update_stmt.values(filename=filename) + if not isinstance(last_update, _UnsetSentinel): + update_stmt = update_stmt.values(last_update=last_update) + if not isinstance(total_length, _UnsetSentinel): + update_stmt = update_stmt.values(total_length=total_length) + if not isinstance(max_combo, _UnsetSentinel): + update_stmt = update_stmt.values(max_combo=max_combo) + if not isinstance(frozen, _UnsetSentinel): + update_stmt = update_stmt.values(frozen=frozen) + if not isinstance(plays, _UnsetSentinel): + update_stmt = update_stmt.values(plays=plays) + if not isinstance(passes, _UnsetSentinel): + update_stmt = update_stmt.values(passes=passes) + if not isinstance(mode, _UnsetSentinel): + update_stmt = update_stmt.values(mode=mode) + if not isinstance(bpm, _UnsetSentinel): + update_stmt = update_stmt.values(bpm=bpm) + if not isinstance(cs, _UnsetSentinel): + update_stmt = update_stmt.values(cs=cs) + if not isinstance(ar, _UnsetSentinel): + update_stmt = update_stmt.values(ar=ar) + if not isinstance(od, _UnsetSentinel): + update_stmt = update_stmt.values(od=od) + if not isinstance(hp, _UnsetSentinel): + update_stmt = update_stmt.values(hp=hp) + if not isinstance(diff, _UnsetSentinel): + update_stmt = update_stmt.values(diff=diff) + + await app.state.services.database.execute(update_stmt) + + select_stmt = select(*READ_PARAMS).where(MapsTable.id == id) + map = await app.state.services.database.fetch_one(select_stmt) + return cast(Map | None, map) + + +async def delete_one(id: int) -> Map | None: + """Delete a beatmap entry from the database.""" + select_stmt = select(*READ_PARAMS).where(MapsTable.id == id) + map = await app.state.services.database.fetch_one(select_stmt) + if map is None: + return None + + delete_stmt = delete(MapsTable).where(MapsTable.id == id) + await app.state.services.database.execute(delete_stmt) + return cast(Map, map) diff --git a/app/repositories/ratings.py b/app/repositories/ratings.py new file mode 100644 index 0000000..a603555 --- /dev/null +++ b/app/repositories/ratings.py @@ -0,0 +1,85 @@ +from __future__ import annotations + +from typing import TypedDict +from typing import cast + +from sqlalchemy import Column +from sqlalchemy import Integer +from sqlalchemy import String +from sqlalchemy import insert +from sqlalchemy import select +from sqlalchemy.dialects.mysql import TINYINT + +import app.state.services +from app.repositories import Base + + +class RatingsTable(Base): + __tablename__ = "ratings" + + userid = Column("userid", Integer, nullable=False, primary_key=True) + map_md5 = Column("map_md5", String(32), nullable=False, primary_key=True) + rating = Column("rating", TINYINT(2), nullable=False) + + +READ_PARAMS = ( + RatingsTable.userid, + RatingsTable.map_md5, + RatingsTable.rating, +) + + +class Rating(TypedDict): + userid: int + map_md5: str + rating: int + + +async def create(userid: int, map_md5: str, rating: int) -> Rating: + """Create a new rating.""" + insert_stmt = insert(RatingsTable).values( + userid=userid, + map_md5=map_md5, + rating=rating, + ) + await app.state.services.database.execute(insert_stmt) + + select_stmt = ( + select(*READ_PARAMS) + .where(RatingsTable.userid == userid) + .where(RatingsTable.map_md5 == map_md5) + ) + _rating = await app.state.services.database.fetch_one(select_stmt) + assert _rating is not None + return cast(Rating, _rating) + + +async def fetch_many( + userid: int | None = None, + map_md5: str | None = None, + page: int | None = 1, + page_size: int | None = 50, +) -> list[Rating]: + """Fetch multiple ratings, optionally with filter params and pagination.""" + select_stmt = select(*READ_PARAMS) + if userid is not None: + select_stmt = select_stmt.where(RatingsTable.userid == userid) + if map_md5 is not None: + select_stmt = select_stmt.where(RatingsTable.map_md5 == map_md5) + + if page is not None and page_size is not None: + select_stmt = select_stmt.limit(page_size).offset((page - 1) * page_size) + + ratings = await app.state.services.database.fetch_all(select_stmt) + return cast(list[Rating], ratings) + + +async def fetch_one(userid: int, map_md5: str) -> Rating | None: + """Fetch a single rating for a given user and map.""" + select_stmt = ( + select(*READ_PARAMS) + .where(RatingsTable.userid == userid) + .where(RatingsTable.map_md5 == map_md5) + ) + rating = await app.state.services.database.fetch_one(select_stmt) + return cast(Rating | None, rating) diff --git a/app/repositories/scores.py b/app/repositories/scores.py new file mode 100644 index 0000000..94868e8 --- /dev/null +++ b/app/repositories/scores.py @@ -0,0 +1,246 @@ +from __future__ import annotations + +from datetime import datetime +from typing import TypedDict +from typing import cast + +from sqlalchemy import Column +from sqlalchemy import DateTime +from sqlalchemy import Index +from sqlalchemy import Integer +from sqlalchemy import String +from sqlalchemy import func +from sqlalchemy import insert +from sqlalchemy import select +from sqlalchemy import update +from sqlalchemy.dialects.mysql import FLOAT +from sqlalchemy.dialects.mysql import TINYINT + +import app.state.services +from app._typing import UNSET +from app._typing import _UnsetSentinel +from app.repositories import Base + + +class ScoresTable(Base): + __tablename__ = "scores" + + id = Column("id", Integer, nullable=False, primary_key=True, autoincrement=True) + map_md5 = Column("map_md5", String(32), nullable=False) + score = Column("score", Integer, nullable=False) + pp = Column("pp", FLOAT(precision=6, scale=3), nullable=False) + acc = Column("acc", FLOAT(precision=6, scale=3), nullable=False) + max_combo = Column("max_combo", Integer, nullable=False) + mods = Column("mods", Integer, nullable=False) + n300 = Column("n300", Integer, nullable=False) + n100 = Column("n100", Integer, nullable=False) + n50 = Column("n50", Integer, nullable=False) + nmiss = Column("nmiss", Integer, nullable=False) + ngeki = Column("ngeki", Integer, nullable=False) + nkatu = Column("nkatu", Integer, nullable=False) + grade = Column("grade", String(2), nullable=False, server_default="N") + status = Column("status", Integer, nullable=False) + mode = Column("mode", Integer, nullable=False) + play_time = Column("play_time", DateTime, nullable=False) + time_elapsed = Column("time_elapsed", Integer, nullable=False) + client_flags = Column("client_flags", Integer, nullable=False) + userid = Column("userid", Integer, nullable=False) + perfect = Column("perfect", TINYINT(1), nullable=False) + online_checksum = Column("online_checksum", String(32), nullable=False) + + __table_args__ = ( + Index("scores_map_md5_index", map_md5), + Index("scores_score_index", score), + Index("scores_pp_index", pp), + Index("scores_mods_index", mods), + Index("scores_status_index", status), + Index("scores_mode_index", mode), + Index("scores_play_time_index", play_time), + Index("scores_userid_index", userid), + Index("scores_online_checksum_index", online_checksum), + ) + + +READ_PARAMS = ( + ScoresTable.id, + ScoresTable.map_md5, + ScoresTable.score, + ScoresTable.pp, + ScoresTable.acc, + ScoresTable.max_combo, + ScoresTable.mods, + ScoresTable.n300, + ScoresTable.n100, + ScoresTable.n50, + ScoresTable.nmiss, + ScoresTable.ngeki, + ScoresTable.nkatu, + ScoresTable.grade, + ScoresTable.status, + ScoresTable.mode, + ScoresTable.play_time, + ScoresTable.time_elapsed, + ScoresTable.client_flags, + ScoresTable.userid, + ScoresTable.perfect, + ScoresTable.online_checksum, +) + + +class Score(TypedDict): + id: int + map_md5: str + score: int + pp: float + acc: float + max_combo: int + mods: int + n300: int + n100: int + n50: int + nmiss: int + ngeki: int + nkatu: int + grade: str + status: int + mode: int + play_time: datetime + time_elapsed: int + client_flags: int + userid: int + perfect: int + online_checksum: str + + +async def create( + map_md5: str, + score: int, + pp: float, + acc: float, + max_combo: int, + mods: int, + n300: int, + n100: int, + n50: int, + nmiss: int, + ngeki: int, + nkatu: int, + grade: str, + status: int, + mode: int, + play_time: datetime, + time_elapsed: int, + client_flags: int, + user_id: int, + perfect: int, + online_checksum: str, +) -> Score: + insert_stmt = insert(ScoresTable).values( + map_md5=map_md5, + score=score, + pp=pp, + acc=acc, + max_combo=max_combo, + mods=mods, + n300=n300, + n100=n100, + n50=n50, + nmiss=nmiss, + ngeki=ngeki, + nkatu=nkatu, + grade=grade, + status=status, + mode=mode, + play_time=play_time, + time_elapsed=time_elapsed, + client_flags=client_flags, + userid=user_id, + perfect=perfect, + online_checksum=online_checksum, + ) + rec_id = await app.state.services.database.execute(insert_stmt) + + select_stmt = select(*READ_PARAMS).where(ScoresTable.id == rec_id) + _score = await app.state.services.database.fetch_one(select_stmt) + assert _score is not None + return cast(Score, _score) + + +async def fetch_one(id: int) -> Score | None: + select_stmt = select(*READ_PARAMS).where(ScoresTable.id == id) + _score = await app.state.services.database.fetch_one(select_stmt) + return cast(Score | None, _score) + + +async def fetch_count( + map_md5: str | None = None, + mods: int | None = None, + status: int | None = None, + mode: int | None = None, + user_id: int | None = None, +) -> int: + select_stmt = select(func.count().label("count")).select_from(ScoresTable) + if map_md5 is not None: + select_stmt = select_stmt.where(ScoresTable.map_md5 == map_md5) + if mods is not None: + select_stmt = select_stmt.where(ScoresTable.mods == mods) + if status is not None: + select_stmt = select_stmt.where(ScoresTable.status == status) + if mode is not None: + select_stmt = select_stmt.where(ScoresTable.mode == mode) + if user_id is not None: + select_stmt = select_stmt.where(ScoresTable.userid == user_id) + + rec = await app.state.services.database.fetch_one(select_stmt) + assert rec is not None + return cast(int, rec["count"]) + + +async def fetch_many( + map_md5: str | None = None, + mods: int | None = None, + status: int | None = None, + mode: int | None = None, + user_id: int | None = None, + page: int | None = None, + page_size: int | None = None, +) -> list[Score]: + select_stmt = select(*READ_PARAMS) + if map_md5 is not None: + select_stmt = select_stmt.where(ScoresTable.map_md5 == map_md5) + if mods is not None: + select_stmt = select_stmt.where(ScoresTable.mods == mods) + if status is not None: + select_stmt = select_stmt.where(ScoresTable.status == status) + if mode is not None: + select_stmt = select_stmt.where(ScoresTable.mode == mode) + if user_id is not None: + select_stmt = select_stmt.where(ScoresTable.userid == user_id) + + if page is not None and page_size is not None: + select_stmt = select_stmt.limit(page_size).offset((page - 1) * page_size) + + scores = await app.state.services.database.fetch_all(select_stmt) + return cast(list[Score], scores) + + +async def partial_update( + id: int, + pp: float | _UnsetSentinel = UNSET, + status: int | _UnsetSentinel = UNSET, +) -> Score | None: + """Update an existing score.""" + update_stmt = update(ScoresTable).where(ScoresTable.id == id) + if not isinstance(pp, _UnsetSentinel): + update_stmt = update_stmt.values(pp=pp) + if not isinstance(status, _UnsetSentinel): + update_stmt = update_stmt.values(status=status) + + await app.state.services.database.execute(update_stmt) + + select_stmt = select(*READ_PARAMS).where(ScoresTable.id == id) + _score = await app.state.services.database.fetch_one(select_stmt) + return cast(Score | None, _score) + + +# TODO: delete diff --git a/app/repositories/stats.py b/app/repositories/stats.py new file mode 100644 index 0000000..ee8a9f7 --- /dev/null +++ b/app/repositories/stats.py @@ -0,0 +1,237 @@ +from __future__ import annotations + +from typing import TypedDict +from typing import cast + +from sqlalchemy import Column +from sqlalchemy import Index +from sqlalchemy import Integer +from sqlalchemy import func +from sqlalchemy import insert +from sqlalchemy import select +from sqlalchemy import update +from sqlalchemy.dialects.mysql import FLOAT +from sqlalchemy.dialects.mysql import TINYINT + +import app.state.services +from app._typing import UNSET +from app._typing import _UnsetSentinel +from app.repositories import Base + + +class StatsTable(Base): + __tablename__ = "stats" + + id = Column("id", Integer, nullable=False, primary_key=True, autoincrement=True) + mode = Column("mode", TINYINT(1), primary_key=True) + tscore = Column("tscore", Integer, nullable=False, server_default="0") + rscore = Column("rscore", Integer, nullable=False, server_default="0") + pp = Column("pp", Integer, nullable=False, server_default="0") + plays = Column("plays", Integer, nullable=False, server_default="0") + playtime = Column("playtime", Integer, nullable=False, server_default="0") + acc = Column( + "acc", + FLOAT(precision=6, scale=3), + nullable=False, + server_default="0.000", + ) + max_combo = Column("max_combo", Integer, nullable=False, server_default="0") + total_hits = Column("total_hits", Integer, nullable=False, server_default="0") + replay_views = Column("replay_views", Integer, nullable=False, server_default="0") + xh_count = Column("xh_count", Integer, nullable=False, server_default="0") + x_count = Column("x_count", Integer, nullable=False, server_default="0") + sh_count = Column("sh_count", Integer, nullable=False, server_default="0") + s_count = Column("s_count", Integer, nullable=False, server_default="0") + a_count = Column("a_count", Integer, nullable=False, server_default="0") + + __table_args__ = ( + Index("stats_mode_index", mode), + Index("stats_pp_index", pp), + Index("stats_tscore_index", tscore), + Index("stats_rscore_index", rscore), + ) + + +READ_PARAMS = ( + StatsTable.id, + StatsTable.mode, + StatsTable.tscore, + StatsTable.rscore, + StatsTable.pp, + StatsTable.plays, + StatsTable.playtime, + StatsTable.acc, + StatsTable.max_combo, + StatsTable.total_hits, + StatsTable.replay_views, + StatsTable.xh_count, + StatsTable.x_count, + StatsTable.sh_count, + StatsTable.s_count, + StatsTable.a_count, +) + + +class Stat(TypedDict): + id: int + mode: int + tscore: int + rscore: int + pp: int + plays: int + playtime: int + acc: float + max_combo: int + total_hits: int + replay_views: int + xh_count: int + x_count: int + sh_count: int + s_count: int + a_count: int + + +async def create(player_id: int, mode: int) -> Stat: + """Create a new player stats entry in the database.""" + insert_stmt = insert(StatsTable).values(id=player_id, mode=mode) + rec_id = await app.state.services.database.execute(insert_stmt) + + select_stmt = select(*READ_PARAMS).where(StatsTable.id == rec_id) + stat = await app.state.services.database.fetch_one(select_stmt) + assert stat is not None + return cast(Stat, stat) + + +async def create_all_modes(player_id: int) -> list[Stat]: + """Create new player stats entries for each game mode in the database.""" + insert_stmt = insert(StatsTable).values( + [ + {"id": player_id, "mode": mode} + for mode in ( + 0, # vn!std + 1, # vn!taiko + 2, # vn!catch + 3, # vn!mania + 4, # rx!std + 5, # rx!taiko + 6, # rx!catch + 8, # ap!std + ) + ], + ) + await app.state.services.database.execute(insert_stmt) + + select_stmt = select(*READ_PARAMS).where(StatsTable.id == player_id) + stats = await app.state.services.database.fetch_all(select_stmt) + return cast(list[Stat], stats) + + +async def fetch_one(player_id: int, mode: int) -> Stat | None: + """Fetch a player stats entry from the database.""" + select_stmt = ( + select(*READ_PARAMS) + .where(StatsTable.id == player_id) + .where(StatsTable.mode == mode) + ) + stat = await app.state.services.database.fetch_one(select_stmt) + return cast(Stat | None, stat) + + +async def fetch_count( + player_id: int | None = None, + mode: int | None = None, +) -> int: + select_stmt = select(func.count().label("count")).select_from(StatsTable) + if player_id is not None: + select_stmt = select_stmt.where(StatsTable.id == player_id) + if mode is not None: + select_stmt = select_stmt.where(StatsTable.mode == mode) + + rec = await app.state.services.database.fetch_one(select_stmt) + assert rec is not None + return cast(int, rec["count"]) + + +async def fetch_many( + player_id: int | None = None, + mode: int | None = None, + page: int | None = None, + page_size: int | None = None, +) -> list[Stat]: + select_stmt = select(*READ_PARAMS) + if player_id is not None: + select_stmt = select_stmt.where(StatsTable.id == player_id) + if mode is not None: + select_stmt = select_stmt.where(StatsTable.mode == mode) + if page is not None and page_size is not None: + select_stmt = select_stmt.limit(page_size).offset((page - 1) * page_size) + + stats = await app.state.services.database.fetch_all(select_stmt) + return cast(list[Stat], stats) + + +async def partial_update( + player_id: int, + mode: int, + tscore: int | _UnsetSentinel = UNSET, + rscore: int | _UnsetSentinel = UNSET, + pp: int | _UnsetSentinel = UNSET, + plays: int | _UnsetSentinel = UNSET, + playtime: int | _UnsetSentinel = UNSET, + acc: float | _UnsetSentinel = UNSET, + max_combo: int | _UnsetSentinel = UNSET, + total_hits: int | _UnsetSentinel = UNSET, + replay_views: int | _UnsetSentinel = UNSET, + xh_count: int | _UnsetSentinel = UNSET, + x_count: int | _UnsetSentinel = UNSET, + sh_count: int | _UnsetSentinel = UNSET, + s_count: int | _UnsetSentinel = UNSET, + a_count: int | _UnsetSentinel = UNSET, +) -> Stat | None: + """Update a player stats entry in the database.""" + update_stmt = ( + update(StatsTable) + .where(StatsTable.id == player_id) + .where(StatsTable.mode == mode) + ) + if not isinstance(tscore, _UnsetSentinel): + update_stmt = update_stmt.values(tscore=tscore) + if not isinstance(rscore, _UnsetSentinel): + update_stmt = update_stmt.values(rscore=rscore) + if not isinstance(pp, _UnsetSentinel): + update_stmt = update_stmt.values(pp=pp) + if not isinstance(plays, _UnsetSentinel): + update_stmt = update_stmt.values(plays=plays) + if not isinstance(playtime, _UnsetSentinel): + update_stmt = update_stmt.values(playtime=playtime) + if not isinstance(acc, _UnsetSentinel): + update_stmt = update_stmt.values(acc=acc) + if not isinstance(max_combo, _UnsetSentinel): + update_stmt = update_stmt.values(max_combo=max_combo) + if not isinstance(total_hits, _UnsetSentinel): + update_stmt = update_stmt.values(total_hits=total_hits) + if not isinstance(replay_views, _UnsetSentinel): + update_stmt = update_stmt.values(replay_views=replay_views) + if not isinstance(xh_count, _UnsetSentinel): + update_stmt = update_stmt.values(xh_count=xh_count) + if not isinstance(x_count, _UnsetSentinel): + update_stmt = update_stmt.values(x_count=x_count) + if not isinstance(sh_count, _UnsetSentinel): + update_stmt = update_stmt.values(sh_count=sh_count) + if not isinstance(s_count, _UnsetSentinel): + update_stmt = update_stmt.values(s_count=s_count) + if not isinstance(a_count, _UnsetSentinel): + update_stmt = update_stmt.values(a_count=a_count) + + await app.state.services.database.execute(update_stmt) + + select_stmt = ( + select(*READ_PARAMS) + .where(StatsTable.id == player_id) + .where(StatsTable.mode == mode) + ) + stat = await app.state.services.database.fetch_one(select_stmt) + return cast(Stat | None, stat) + + +# TODO: delete? diff --git a/app/repositories/tourney_pool_maps.py b/app/repositories/tourney_pool_maps.py new file mode 100644 index 0000000..476da1d --- /dev/null +++ b/app/repositories/tourney_pool_maps.py @@ -0,0 +1,137 @@ +from __future__ import annotations + +from typing import TypedDict +from typing import cast + +from sqlalchemy import Column +from sqlalchemy import Index +from sqlalchemy import Integer +from sqlalchemy import delete +from sqlalchemy import insert +from sqlalchemy import select + +import app.state.services +from app.repositories import Base + + +class TourneyPoolMapsTable(Base): + __tablename__ = "tourney_pool_maps" + + map_id = Column("map_id", Integer, nullable=False, primary_key=True) + pool_id = Column("pool_id", Integer, nullable=False, primary_key=True) + mods = Column("mods", Integer, nullable=False) + slot = Column("slot", Integer, nullable=False) + + __table_args__ = ( + Index("tourney_pool_maps_mods_slot_index", mods, slot), + Index("tourney_pool_maps_tourney_pools_id_fk", pool_id), + ) + + +READ_PARAMS = ( + TourneyPoolMapsTable.map_id, + TourneyPoolMapsTable.pool_id, + TourneyPoolMapsTable.mods, + TourneyPoolMapsTable.slot, +) + + +class TourneyPoolMap(TypedDict): + map_id: int + pool_id: int + mods: int + slot: int + + +async def create(map_id: int, pool_id: int, mods: int, slot: int) -> TourneyPoolMap: + """Create a new map pool entry in the database.""" + insert_stmt = insert(TourneyPoolMapsTable).values( + map_id=map_id, + pool_id=pool_id, + mods=mods, + slot=slot, + ) + await app.state.services.database.execute(insert_stmt) + + select_stmt = ( + select(*READ_PARAMS) + .where(TourneyPoolMapsTable.map_id == map_id) + .where(TourneyPoolMapsTable.pool_id == pool_id) + ) + tourney_pool_map = await app.state.services.database.fetch_one(select_stmt) + assert tourney_pool_map is not None + return cast(TourneyPoolMap, tourney_pool_map) + + +async def fetch_many( + pool_id: int | None = None, + mods: int | None = None, + slot: int | None = None, + page: int | None = 1, + page_size: int | None = 50, +) -> list[TourneyPoolMap]: + """Fetch a list of map pool entries from the database.""" + select_stmt = select(*READ_PARAMS) + if pool_id is not None: + select_stmt = select_stmt.where(TourneyPoolMapsTable.pool_id == pool_id) + if mods is not None: + select_stmt = select_stmt.where(TourneyPoolMapsTable.mods == mods) + if slot is not None: + select_stmt = select_stmt.where(TourneyPoolMapsTable.slot == slot) + if page and page_size: + select_stmt = select_stmt.limit(page_size).offset((page - 1) * page_size) + + tourney_pool_maps = await app.state.services.database.fetch_all(select_stmt) + return cast(list[TourneyPoolMap], tourney_pool_maps) + + +async def fetch_by_pool_and_pick( + pool_id: int, + mods: int, + slot: int, +) -> TourneyPoolMap | None: + """Fetch a map pool entry by pool and pick from the database.""" + select_stmt = ( + select(*READ_PARAMS) + .where(TourneyPoolMapsTable.pool_id == pool_id) + .where(TourneyPoolMapsTable.mods == mods) + .where(TourneyPoolMapsTable.slot == slot) + ) + tourney_pool_map = await app.state.services.database.fetch_one(select_stmt) + return cast(TourneyPoolMap | None, tourney_pool_map) + + +async def delete_map_from_pool(pool_id: int, map_id: int) -> TourneyPoolMap | None: + """Delete a map pool entry from a given tourney pool from the database.""" + select_stmt = ( + select(*READ_PARAMS) + .where(TourneyPoolMapsTable.pool_id == pool_id) + .where(TourneyPoolMapsTable.map_id == map_id) + ) + + tourney_pool_map = await app.state.services.database.fetch_one(select_stmt) + if tourney_pool_map is None: + return None + + delete_stmt = ( + delete(TourneyPoolMapsTable) + .where(TourneyPoolMapsTable.pool_id == pool_id) + .where(TourneyPoolMapsTable.map_id == map_id) + ) + + await app.state.services.database.execute(delete_stmt) + return cast(TourneyPoolMap, tourney_pool_map) + + +async def delete_all_in_pool(pool_id: int) -> list[TourneyPoolMap]: + """Delete all map pool entries from a given tourney pool from the database.""" + select_stmt = select(*READ_PARAMS).where(TourneyPoolMapsTable.pool_id == pool_id) + tourney_pool_maps = await app.state.services.database.fetch_all(select_stmt) + if not tourney_pool_maps: + return [] + + delete_stmt = delete(TourneyPoolMapsTable).where( + TourneyPoolMapsTable.pool_id == pool_id, + ) + await app.state.services.database.execute(delete_stmt) + return cast(list[TourneyPoolMap], tourney_pool_maps) diff --git a/app/repositories/tourney_pools.py b/app/repositories/tourney_pools.py new file mode 100644 index 0000000..af11127 --- /dev/null +++ b/app/repositories/tourney_pools.py @@ -0,0 +1,104 @@ +from __future__ import annotations + +from datetime import datetime +from typing import TypedDict +from typing import cast + +from sqlalchemy import Column +from sqlalchemy import DateTime +from sqlalchemy import Index +from sqlalchemy import Integer +from sqlalchemy import String +from sqlalchemy import delete +from sqlalchemy import func +from sqlalchemy import insert +from sqlalchemy import select + +import app.state.services +from app.repositories import Base + + +class TourneyPoolsTable(Base): + __tablename__ = "tourney_pools" + + id = Column("id", Integer, nullable=False, primary_key=True, autoincrement=True) + name = Column("name", String(16), nullable=False) + created_at = Column("created_at", DateTime, nullable=False) + created_by = Column("created_by", Integer, nullable=False) + + __table_args__ = (Index("tourney_pools_users_id_fk", created_by),) + + +class TourneyPool(TypedDict): + id: int + name: str + created_at: datetime + created_by: int + + +READ_PARAMS = ( + TourneyPoolsTable.id, + TourneyPoolsTable.name, + TourneyPoolsTable.created_at, + TourneyPoolsTable.created_by, +) + + +async def create(name: str, created_by: int) -> TourneyPool: + """Create a new tourney pool entry in the database.""" + insert_stmt = insert(TourneyPoolsTable).values( + name=name, + created_at=func.now(), + created_by=created_by, + ) + rec_id = await app.state.services.database.execute(insert_stmt) + + select_stmt = select(*READ_PARAMS).where(TourneyPoolsTable.id == rec_id) + tourney_pool = await app.state.services.database.fetch_one(select_stmt) + assert tourney_pool is not None + return cast(TourneyPool, tourney_pool) + + +async def fetch_many( + id: int | None = None, + created_by: int | None = None, + page: int | None = 1, + page_size: int | None = 50, +) -> list[TourneyPool]: + """Fetch many tourney pools from the database.""" + select_stmt = select(*READ_PARAMS) + if id is not None: + select_stmt = select_stmt.where(TourneyPoolsTable.id == id) + if created_by is not None: + select_stmt = select_stmt.where(TourneyPoolsTable.created_by == created_by) + if page and page_size: + select_stmt = select_stmt.limit(page_size).offset((page - 1) * page_size) + + tourney_pools = await app.state.services.database.fetch_all(select_stmt) + return cast(list[TourneyPool], tourney_pools) + + +async def fetch_by_name(name: str) -> TourneyPool | None: + """Fetch a tourney pool by name from the database.""" + select_stmt = select(*READ_PARAMS).where(TourneyPoolsTable.name == name) + tourney_pool = await app.state.services.database.fetch_one(select_stmt) + return cast(TourneyPool | None, tourney_pool) + + +async def fetch_by_id(id: int) -> TourneyPool | None: + """Fetch a tourney pool by id from the database.""" + select_stmt = select(*READ_PARAMS).where(TourneyPoolsTable.id == id) + tourney_pool = await app.state.services.database.fetch_one(select_stmt) + return cast(TourneyPool | None, tourney_pool) + + +async def delete_by_id(id: int) -> TourneyPool | None: + """Delete a tourney pool by id from the database.""" + select_stmt = select(*READ_PARAMS).where(TourneyPoolsTable.id == id) + tourney_pool = await app.state.services.database.fetch_one(select_stmt) + if tourney_pool is None: + return None + + delete_stmt = delete(TourneyPoolsTable).where(TourneyPoolsTable.id == id) + await app.state.services.database.execute(delete_stmt) + return cast(TourneyPool, tourney_pool) diff --git a/app/repositories/user_achievements.py b/app/repositories/user_achievements.py new file mode 100644 index 0000000..1c3065a --- /dev/null +++ b/app/repositories/user_achievements.py @@ -0,0 +1,79 @@ +from __future__ import annotations + +from typing import TypedDict +from typing import cast + +from sqlalchemy import Column +from sqlalchemy import Index +from sqlalchemy import Integer +from sqlalchemy import insert +from sqlalchemy import select + +import app.state.services +from app._typing import UNSET +from app._typing import _UnsetSentinel +from app.repositories import Base + + +class UserAchievementsTable(Base): + __tablename__ = "user_achievements" + + userid = Column("userid", Integer, nullable=False, primary_key=True) + achid = Column("achid", Integer, nullable=False, primary_key=True) + + __table_args__ = ( + Index("user_achievements_achid_index", achid), + Index("user_achievements_userid_index", userid), + ) + + +READ_PARAMS = ( + UserAchievementsTable.userid, + UserAchievementsTable.achid, +) + + +class UserAchievement(TypedDict): + userid: int + achid: int + + +async def create(user_id: int, achievement_id: int) -> UserAchievement: + """Creates a new user achievement entry.""" + insert_stmt = insert(UserAchievementsTable).values( + userid=user_id, + achid=achievement_id, + ) + await app.state.services.database.execute(insert_stmt) + + select_stmt = ( + select(*READ_PARAMS) + .where(UserAchievementsTable.userid == user_id) + .where(UserAchievementsTable.achid == achievement_id) + ) + user_achievement = await app.state.services.database.fetch_one(select_stmt) + assert user_achievement is not None + return cast(UserAchievement, user_achievement) + + +async def fetch_many( + user_id: int | _UnsetSentinel = UNSET, + achievement_id: int | _UnsetSentinel = UNSET, + page: int | None = None, + page_size: int | None = None, +) -> list[UserAchievement]: + """Fetch a list of user achievements.""" + select_stmt = select(*READ_PARAMS) + if not isinstance(user_id, _UnsetSentinel): + select_stmt = select_stmt.where(UserAchievementsTable.userid == user_id) + if not isinstance(achievement_id, _UnsetSentinel): + select_stmt = select_stmt.where(UserAchievementsTable.achid == achievement_id) + + if page and page_size: + select_stmt = select_stmt.limit(page_size).offset((page - 1) * page_size) + + user_achievements = await app.state.services.database.fetch_all(select_stmt) + return cast(list[UserAchievement], user_achievements) + + +# TODO: delete? diff --git a/app/repositories/users.py b/app/repositories/users.py new file mode 100644 index 0000000..67c6705 --- /dev/null +++ b/app/repositories/users.py @@ -0,0 +1,270 @@ +from __future__ import annotations + +from typing import TypedDict +from typing import cast + +from sqlalchemy import Column +from sqlalchemy import Index +from sqlalchemy import Integer +from sqlalchemy import String +from sqlalchemy import func +from sqlalchemy import insert +from sqlalchemy import select +from sqlalchemy import update +from sqlalchemy.dialects.mysql import TINYINT + +import app.state.services +from app._typing import UNSET +from app._typing import _UnsetSentinel +from app.repositories import Base +from app.utils import make_safe_name + + +class UsersTable(Base): + __tablename__ = "users" + + id = Column(Integer, primary_key=True, nullable=False, autoincrement=True) + name = Column(String(32, collation="utf8"), nullable=False) + safe_name = Column(String(32, collation="utf8"), nullable=False) + email = Column(String(254), nullable=False) + priv = Column(Integer, nullable=False, server_default="1") + pw_bcrypt = Column(String(60), nullable=False) + country = Column(String(2), nullable=False, server_default="xx") + silence_end = Column(Integer, nullable=False, server_default="0") + donor_end = Column(Integer, nullable=False, server_default="0") + creation_time = Column(Integer, nullable=False, server_default="0") + latest_activity = Column(Integer, nullable=False, server_default="0") + clan_id = Column(Integer, nullable=False, server_default="0") + clan_priv = Column(TINYINT, nullable=False, server_default="0") + preferred_mode = Column(Integer, nullable=False, server_default="0") + play_style = Column(Integer, nullable=False, server_default="0") + custom_badge_name = Column(String(16, collation="utf8")) + custom_badge_icon = Column(String(64)) + userpage_content = Column(String(2048, collation="utf8")) + api_key = Column(String(36)) + + __table_args__ = ( + Index("users_priv_index", priv), + Index("users_clan_id_index", clan_id), + Index("users_clan_priv_index", clan_priv), + Index("users_country_index", country), + Index("users_api_key_uindex", api_key, unique=True), + Index("users_email_uindex", email, unique=True), + Index("users_name_uindex", name, unique=True), + Index("users_safe_name_uindex", safe_name, unique=True), + ) + + +READ_PARAMS = ( + UsersTable.id, + UsersTable.name, + UsersTable.safe_name, + UsersTable.priv, + UsersTable.country, + UsersTable.silence_end, + UsersTable.donor_end, + UsersTable.creation_time, + UsersTable.latest_activity, + UsersTable.clan_id, + UsersTable.clan_priv, + UsersTable.preferred_mode, + UsersTable.play_style, + UsersTable.custom_badge_name, + UsersTable.custom_badge_icon, + UsersTable.userpage_content, +) + + +class User(TypedDict): + id: int + name: str + safe_name: str + priv: int + pw_bcrypt: str + country: str + silence_end: int + donor_end: int + creation_time: int + latest_activity: int + clan_id: int + clan_priv: int + preferred_mode: int + play_style: int + custom_badge_name: str | None + custom_badge_icon: str | None + userpage_content: str | None + api_key: str | None + + +async def create( + name: str, + email: str, + pw_bcrypt: bytes, + country: str, +) -> User: + """Create a new user in the database.""" + insert_stmt = insert(UsersTable).values( + name=name, + safe_name=make_safe_name(name), + email=email, + pw_bcrypt=pw_bcrypt, + country=country, + creation_time=func.unix_timestamp(), + latest_activity=func.unix_timestamp(), + ) + rec_id = await app.state.services.database.execute(insert_stmt) + + select_stmt = select(*READ_PARAMS).where(UsersTable.id == rec_id) + user = await app.state.services.database.fetch_one(select_stmt) + assert user is not None + return cast(User, user) + + +async def fetch_one( + id: int | None = None, + name: str | None = None, + email: str | None = None, + fetch_all_fields: bool = False, # TODO: probably remove this if possible +) -> User | None: + """Fetch a single user from the database.""" + if id is None and name is None and email is None: + raise ValueError("Must provide at least one parameter.") + + if fetch_all_fields: + select_stmt = select(UsersTable) + else: + select_stmt = select(*READ_PARAMS) + + if id is not None: + select_stmt = select_stmt.where(UsersTable.id == id) + if name is not None: + select_stmt = select_stmt.where(UsersTable.safe_name == make_safe_name(name)) + if email is not None: + select_stmt = select_stmt.where(UsersTable.email == email) + + user = await app.state.services.database.fetch_one(select_stmt) + return cast(User | None, user) + + +async def fetch_count( + priv: int | None = None, + country: str | None = None, + clan_id: int | None = None, + clan_priv: int | None = None, + preferred_mode: int | None = None, + play_style: int | None = None, +) -> int: + """Fetch the number of users in the database.""" + select_stmt = select(func.count().label("count")).select_from(UsersTable) + if priv is not None: + select_stmt = select_stmt.where(UsersTable.priv == priv) + if country is not None: + select_stmt = select_stmt.where(UsersTable.country == country) + if clan_id is not None: + select_stmt = select_stmt.where(UsersTable.clan_id == clan_id) + if clan_priv is not None: + select_stmt = select_stmt.where(UsersTable.clan_priv == clan_priv) + if preferred_mode is not None: + select_stmt = select_stmt.where(UsersTable.preferred_mode == preferred_mode) + if play_style is not None: + select_stmt = select_stmt.where(UsersTable.play_style == play_style) + + rec = await app.state.services.database.fetch_one(select_stmt) + assert rec is not None + return cast(int, rec["count"]) + + +async def fetch_many( + priv: int | None = None, + country: str | None = None, + clan_id: int | None = None, + clan_priv: int | None = None, + preferred_mode: int | None = None, + play_style: int | None = None, + page: int | None = None, + page_size: int | None = None, +) -> list[User]: + """Fetch multiple users from the database.""" + select_stmt = select(*READ_PARAMS) + if priv is not None: + select_stmt = select_stmt.where(UsersTable.priv == priv) + if country is not None: + select_stmt = select_stmt.where(UsersTable.country == country) + if clan_id is not None: + select_stmt = select_stmt.where(UsersTable.clan_id == clan_id) + if clan_priv is not None: + select_stmt = select_stmt.where(UsersTable.clan_priv == clan_priv) + if preferred_mode is not None: + select_stmt = select_stmt.where(UsersTable.preferred_mode == preferred_mode) + if play_style is not None: + select_stmt = select_stmt.where(UsersTable.play_style == play_style) + + if page is not None and page_size is not None: + select_stmt = select_stmt.limit(page_size).offset((page - 1) * page_size) + + users = await app.state.services.database.fetch_all(select_stmt) + return cast(list[User], users) + + +async def partial_update( + id: int, + name: str | _UnsetSentinel = UNSET, + email: str | _UnsetSentinel = UNSET, + priv: int | _UnsetSentinel = UNSET, + country: str | _UnsetSentinel = UNSET, + silence_end: int | _UnsetSentinel = UNSET, + donor_end: int | _UnsetSentinel = UNSET, + creation_time: _UnsetSentinel | _UnsetSentinel = UNSET, + latest_activity: int | _UnsetSentinel = UNSET, + clan_id: int | _UnsetSentinel = UNSET, + clan_priv: int | _UnsetSentinel = UNSET, + preferred_mode: int | _UnsetSentinel = UNSET, + play_style: int | _UnsetSentinel = UNSET, + custom_badge_name: str | None | _UnsetSentinel = UNSET, + custom_badge_icon: str | None | _UnsetSentinel = UNSET, + userpage_content: str | None | _UnsetSentinel = UNSET, + api_key: str | None | _UnsetSentinel = UNSET, +) -> User | None: + """Update a user in the database.""" + update_stmt = update(UsersTable).where(UsersTable.id == id) + if not isinstance(name, _UnsetSentinel): + update_stmt = update_stmt.values(name=name, safe_name=make_safe_name(name)) + if not isinstance(email, _UnsetSentinel): + update_stmt = update_stmt.values(email=email) + if not isinstance(priv, _UnsetSentinel): + update_stmt = update_stmt.values(priv=priv) + if not isinstance(country, _UnsetSentinel): + update_stmt = update_stmt.values(country=country) + if not isinstance(silence_end, _UnsetSentinel): + update_stmt = update_stmt.values(silence_end=silence_end) + if not isinstance(donor_end, _UnsetSentinel): + update_stmt = update_stmt.values(donor_end=donor_end) + if not isinstance(creation_time, _UnsetSentinel): + update_stmt = update_stmt.values(creation_time=creation_time) + if not isinstance(latest_activity, _UnsetSentinel): + update_stmt = update_stmt.values(latest_activity=latest_activity) + if not isinstance(clan_id, _UnsetSentinel): + update_stmt = update_stmt.values(clan_id=clan_id) + if not isinstance(clan_priv, _UnsetSentinel): + update_stmt = update_stmt.values(clan_priv=clan_priv) + if not isinstance(preferred_mode, _UnsetSentinel): + update_stmt = update_stmt.values(preferred_mode=preferred_mode) + if not isinstance(play_style, _UnsetSentinel): + update_stmt = update_stmt.values(play_style=play_style) + if not isinstance(custom_badge_name, _UnsetSentinel): + update_stmt = update_stmt.values(custom_badge_name=custom_badge_name) + if not isinstance(custom_badge_icon, _UnsetSentinel): + update_stmt = update_stmt.values(custom_badge_icon=custom_badge_icon) + if not isinstance(userpage_content, _UnsetSentinel): + update_stmt = update_stmt.values(userpage_content=userpage_content) + if not isinstance(api_key, _UnsetSentinel): + update_stmt = update_stmt.values(api_key=api_key) + + await app.state.services.database.execute(update_stmt) + + select_stmt = select(*READ_PARAMS).where(UsersTable.id == id) + user = await app.state.services.database.fetch_one(select_stmt) + return cast(User | None, user) + + +# TODO: delete? diff --git a/app/settings.py b/app/settings.py new file mode 100644 index 0000000..85e9bfc --- /dev/null +++ b/app/settings.py @@ -0,0 +1,73 @@ +from __future__ import annotations + +import os +import tomllib +from urllib.parse import quote + +from dotenv import load_dotenv + +from app.settings_utils import read_bool +from app.settings_utils import read_list + +load_dotenv() + +APP_HOST = os.environ["APP_HOST"] +APP_PORT = int(os.environ["APP_PORT"]) + +DB_HOST = os.environ["DB_HOST"] +DB_PORT = int(os.environ["DB_PORT"]) +DB_USER = os.environ["DB_USER"] +DB_PASS = quote(os.environ["DB_PASS"]) +DB_NAME = os.environ["DB_NAME"] +DB_DSN = f"mysql://{DB_USER}:{DB_PASS}@{DB_HOST}:{DB_PORT}/{DB_NAME}" + +REDIS_HOST = os.environ["REDIS_HOST"] +REDIS_PORT = int(os.environ["REDIS_PORT"]) +REDIS_USER = os.environ["REDIS_USER"] +REDIS_PASS = quote(os.environ["REDIS_PASS"]) +REDIS_DB = int(os.environ["REDIS_DB"]) + +REDIS_AUTH_STRING = f"{REDIS_USER}:{REDIS_PASS}@" if REDIS_USER and REDIS_PASS else "" +REDIS_DSN = f"redis://{REDIS_AUTH_STRING}{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}" + +OSU_API_KEY = os.environ.get("OSU_API_KEY") or None + +DOMAIN = os.environ["DOMAIN"] +MIRROR_SEARCH_ENDPOINT = os.environ["MIRROR_SEARCH_ENDPOINT"] +MIRROR_DOWNLOAD_ENDPOINT = os.environ["MIRROR_DOWNLOAD_ENDPOINT"] + +COMMAND_PREFIX = os.environ["COMMAND_PREFIX"] + +SEASONAL_BGS = read_list(os.environ["SEASONAL_BGS"]) + +MENU_ICON_URL = os.environ["MENU_ICON_URL"] +MENU_ONCLICK_URL = os.environ["MENU_ONCLICK_URL"] + +DATADOG_API_KEY = os.environ["DATADOG_API_KEY"] +DATADOG_APP_KEY = os.environ["DATADOG_APP_KEY"] + +DEBUG = read_bool(os.environ["DEBUG"]) +REDIRECT_OSU_URLS = read_bool(os.environ["REDIRECT_OSU_URLS"]) + +PP_CACHED_ACCURACIES = [int(acc) for acc in read_list(os.environ["PP_CACHED_ACCS"])] + +DISALLOWED_NAMES = read_list(os.environ["DISALLOWED_NAMES"]) +DISALLOWED_PASSWORDS = read_list(os.environ["DISALLOWED_PASSWORDS"]) +DISALLOW_OLD_CLIENTS = read_bool(os.environ["DISALLOW_OLD_CLIENTS"]) +DISALLOW_INGAME_REGISTRATION = read_bool(os.environ["DISALLOW_INGAME_REGISTRATION"]) + +DISCORD_AUDIT_LOG_WEBHOOK = os.environ["DISCORD_AUDIT_LOG_WEBHOOK"] + +AUTOMATICALLY_REPORT_PROBLEMS = read_bool(os.environ["AUTOMATICALLY_REPORT_PROBLEMS"]) + +LOG_WITH_COLORS = read_bool(os.environ["LOG_WITH_COLORS"]) + +# advanced dev settings + +## WARNING touch this once you've +## read through what it enables. +## you could put your server at risk. +DEVELOPER_MODE = read_bool(os.environ["DEVELOPER_MODE"]) + +with open("pyproject.toml", "rb") as f: + VERSION = tomllib.load(f)["tool"]["poetry"]["version"] diff --git a/app/settings_utils.py b/app/settings_utils.py new file mode 100644 index 0000000..e71249c --- /dev/null +++ b/app/settings_utils.py @@ -0,0 +1,48 @@ +from __future__ import annotations + +import os +from datetime import date + +from app.logging import Ansi +from app.logging import log + + +def read_bool(value: str) -> bool: + return value.lower() in ("true", "1", "yes") + + +def read_list(value: str) -> list[str]: + return [v.strip() for v in value.split(",")] + + +def support_deprecated_vars( + new_name: str, + deprecated_name: str, + *, + until: date, + allow_empty_string: bool = False, +) -> str: + val1 = os.getenv(new_name) + if val1: + return val1 + + val2 = os.getenv(deprecated_name) + if val2: + if until < date.today(): + raise ValueError( + f'The "{deprecated_name}" config option has been deprecated as of {until.isoformat()} and is no longer supported. Use {new_name} instead.', + ) + + log( + f'The "{deprecated_name}" config option has been deprecated and will be supported until {until.isoformat()}. Use {new_name} instead.', + Ansi.LYELLOW, + ) + return val2 + + if allow_empty_string: + if val1 is not None: + return val1 + if val2 is not None: + return val2 + + raise KeyError(f"{new_name} is not set in the environment") diff --git a/app/state/__init__.py b/app/state/__init__.py new file mode 100644 index 0000000..cf88cbc --- /dev/null +++ b/app/state/__init__.py @@ -0,0 +1,24 @@ +from __future__ import annotations + +import asyncio +from collections import defaultdict +from typing import TYPE_CHECKING +from typing import Literal + +from . import cache +from . import services +from . import sessions + +if TYPE_CHECKING: + from asyncio import AbstractEventLoop + + from app.packets import BasePacket + from app.packets import ClientPackets + +loop: AbstractEventLoop +score_submission_locks: defaultdict[str, asyncio.Lock] = defaultdict(asyncio.Lock) +packets: dict[Literal["all", "restricted"], dict[ClientPackets, type[BasePacket]]] = { + "all": {}, + "restricted": {}, +} +shutting_down = False diff --git a/app/state/cache.py b/app/state/cache.py new file mode 100644 index 0000000..1075942 --- /dev/null +++ b/app/state/cache.py @@ -0,0 +1,14 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from app.objects.beatmap import Beatmap + from app.objects.beatmap import BeatmapSet + + +bcrypt: dict[bytes, bytes] = {} # {bcrypt: md5, ...} +beatmap: dict[str | int, Beatmap] = {} # {md5: map, id: map, ...} +beatmapset: dict[int, BeatmapSet] = {} # {bsid: map_set} +unsubmitted: set[str] = set() # {md5, ...} +needs_update: set[str] = set() # {md5, ...} diff --git a/app/state/services.py b/app/state/services.py new file mode 100644 index 0000000..2292925 --- /dev/null +++ b/app/state/services.py @@ -0,0 +1,492 @@ +from __future__ import annotations + +import ipaddress +import logging +import pickle +import re +import secrets +from collections.abc import AsyncGenerator +from collections.abc import Mapping +from collections.abc import MutableMapping +from pathlib import Path +from typing import TYPE_CHECKING +from typing import TypedDict + +import datadog as datadog_module +import datadog.threadstats.base as datadog_client +import httpx +import pymysql +from redis import asyncio as aioredis + +import app.settings +import app.state +from app._typing import IPAddress +from app.adapters.database import Database +from app.logging import Ansi +from app.logging import log + +STRANGE_LOG_DIR = Path.cwd() / ".data/logs" + +VERSION_RGX = re.compile(r"^# v(?P\d+\.\d+\.\d+)$") +SQL_UPDATES_FILE = Path.cwd() / "migrations/migrations.sql" + + +""" session objects """ + +http_client = httpx.AsyncClient() +database = Database(app.settings.DB_DSN) +redis: aioredis.Redis = aioredis.from_url(app.settings.REDIS_DSN) + +datadog: datadog_client.ThreadStats | None = None +if str(app.settings.DATADOG_API_KEY) and str(app.settings.DATADOG_APP_KEY): + datadog_module.initialize( + api_key=str(app.settings.DATADOG_API_KEY), + app_key=str(app.settings.DATADOG_APP_KEY), + ) + datadog = datadog_client.ThreadStats() + +ip_resolver: IPResolver + +""" session usecases """ + + +class Country(TypedDict): + acronym: str + numeric: int + + +class Geolocation(TypedDict): + latitude: float + longitude: float + country: Country + + +# fmt: off +country_codes = { + "oc": 1, "eu": 2, "ad": 3, "ae": 4, "af": 5, "ag": 6, "ai": 7, "al": 8, + "am": 9, "an": 10, "ao": 11, "aq": 12, "ar": 13, "as": 14, "at": 15, "au": 16, + "aw": 17, "az": 18, "ba": 19, "bb": 20, "bd": 21, "be": 22, "bf": 23, "bg": 24, + "bh": 25, "bi": 26, "bj": 27, "bm": 28, "bn": 29, "bo": 30, "br": 31, "bs": 32, + "bt": 33, "bv": 34, "bw": 35, "by": 36, "bz": 37, "ca": 38, "cc": 39, "cd": 40, + "cf": 41, "cg": 42, "ch": 43, "ci": 44, "ck": 45, "cl": 46, "cm": 47, "cn": 48, + "co": 49, "cr": 50, "cu": 51, "cv": 52, "cx": 53, "cy": 54, "cz": 55, "de": 56, + "dj": 57, "dk": 58, "dm": 59, "do": 60, "dz": 61, "ec": 62, "ee": 63, "eg": 64, + "eh": 65, "er": 66, "es": 67, "et": 68, "fi": 69, "fj": 70, "fk": 71, "fm": 72, + "fo": 73, "fr": 74, "fx": 75, "ga": 76, "gb": 77, "gd": 78, "ge": 79, "gf": 80, + "gh": 81, "gi": 82, "gl": 83, "gm": 84, "gn": 85, "gp": 86, "gq": 87, "gr": 88, + "gs": 89, "gt": 90, "gu": 91, "gw": 92, "gy": 93, "hk": 94, "hm": 95, "hn": 96, + "hr": 97, "ht": 98, "hu": 99, "id": 100, "ie": 101, "il": 102, "in": 103, "io": 104, + "iq": 105, "ir": 106, "is": 107, "it": 108, "jm": 109, "jo": 110, "jp": 111, "ke": 112, + "kg": 113, "kh": 114, "ki": 115, "km": 116, "kn": 117, "kp": 118, "kr": 119, "kw": 120, + "ky": 121, "kz": 122, "la": 123, "lb": 124, "lc": 125, "li": 126, "lk": 127, "lr": 128, + "ls": 129, "lt": 130, "lu": 131, "lv": 132, "ly": 133, "ma": 134, "mc": 135, "md": 136, + "mg": 137, "mh": 138, "mk": 139, "ml": 140, "mm": 141, "mn": 142, "mo": 143, "mp": 144, + "mq": 145, "mr": 146, "ms": 147, "mt": 148, "mu": 149, "mv": 150, "mw": 151, "mx": 152, + "my": 153, "mz": 154, "na": 155, "nc": 156, "ne": 157, "nf": 158, "ng": 159, "ni": 160, + "nl": 161, "no": 162, "np": 163, "nr": 164, "nu": 165, "nz": 166, "om": 167, "pa": 168, + "pe": 169, "pf": 170, "pg": 171, "ph": 172, "pk": 173, "pl": 174, "pm": 175, "pn": 176, + "pr": 177, "ps": 178, "pt": 179, "pw": 180, "py": 181, "qa": 182, "re": 183, "ro": 184, + "ru": 185, "rw": 186, "sa": 187, "sb": 188, "sc": 189, "sd": 190, "se": 191, "sg": 192, + "sh": 193, "si": 194, "sj": 195, "sk": 196, "sl": 197, "sm": 198, "sn": 199, "so": 200, + "sr": 201, "st": 202, "sv": 203, "sy": 204, "sz": 205, "tc": 206, "td": 207, "tf": 208, + "tg": 209, "th": 210, "tj": 211, "tk": 212, "tm": 213, "tn": 214, "to": 215, "tl": 216, + "tr": 217, "tt": 218, "tv": 219, "tw": 220, "tz": 221, "ua": 222, "ug": 223, "um": 224, + "us": 225, "uy": 226, "uz": 227, "va": 228, "vc": 229, "ve": 230, "vg": 231, "vi": 232, + "vn": 233, "vu": 234, "wf": 235, "ws": 236, "ye": 237, "yt": 238, "rs": 239, "za": 240, + "zm": 241, "me": 242, "zw": 243, "xx": 244, "a2": 245, "o1": 246, "ax": 247, "gg": 248, + "im": 249, "je": 250, "bl": 251, "mf": 252, +} +# fmt: on + + +class IPResolver: + def __init__(self) -> None: + self.cache: MutableMapping[str, IPAddress] = {} + + def get_ip(self, headers: Mapping[str, str]) -> IPAddress: + """Resolve the IP address from the headers.""" + ip_str = headers.get("CF-Connecting-IP") + if ip_str is None: + forwards = headers["X-Forwarded-For"].split(",") + + if len(forwards) != 1: + ip_str = forwards[0] + else: + ip_str = headers["X-Real-IP"] + + ip = self.cache.get(ip_str) + if ip is None: + ip = ipaddress.ip_address(ip_str) + self.cache[ip_str] = ip + + return ip + + +async def fetch_geoloc( + ip: IPAddress, + headers: Mapping[str, str] | None = None, +) -> Geolocation | None: + """Attempt to fetch geolocation data by any means necessary.""" + geoloc = None + if headers is not None: + geoloc = _fetch_geoloc_from_headers(headers) + + if geoloc is None: + geoloc = await _fetch_geoloc_from_ip(ip) + + return geoloc + + +def _fetch_geoloc_from_headers(headers: Mapping[str, str]) -> Geolocation | None: + """Attempt to fetch geolocation data from http headers.""" + geoloc = __fetch_geoloc_cloudflare(headers) + + if geoloc is None: + geoloc = __fetch_geoloc_nginx(headers) + + return geoloc + + +def __fetch_geoloc_cloudflare(headers: Mapping[str, str]) -> Geolocation | None: + """Attempt to fetch geolocation data from cloudflare headers.""" + if not all( + key in headers for key in ("CF-IPCountry", "CF-IPLatitude", "CF-IPLongitude") + ): + return None + + country_code = headers["CF-IPCountry"].lower() + latitude = float(headers["CF-IPLatitude"]) + longitude = float(headers["CF-IPLongitude"]) + + return { + "latitude": latitude, + "longitude": longitude, + "country": { + "acronym": country_code, + "numeric": country_codes[country_code], + }, + } + + +def __fetch_geoloc_nginx(headers: Mapping[str, str]) -> Geolocation | None: + """Attempt to fetch geolocation data from nginx headers.""" + if not all( + key in headers for key in ("X-Country-Code", "X-Latitude", "X-Longitude") + ): + return None + + country_code = headers["X-Country-Code"].lower() + latitude = float(headers["X-Latitude"]) + longitude = float(headers["X-Longitude"]) + + return { + "latitude": latitude, + "longitude": longitude, + "country": { + "acronym": country_code, + "numeric": country_codes[country_code], + }, + } + + +async def _fetch_geoloc_from_ip(ip: IPAddress) -> Geolocation | None: + """Fetch geolocation data based on ip (using ip-api).""" + if not ip.is_private: + url = f"http://ip-api.com/line/{ip}" + else: + url = "http://ip-api.com/line/" + + response = await http_client.get( + url, + params={ + "fields": ",".join(("status", "message", "countryCode", "lat", "lon")), + }, + ) + if response.status_code != 200: + log("Failed to get geoloc data: request failed.", Ansi.LRED) + return None + + status, *lines = response.read().decode().split("\n") + + if status != "success": + err_msg = lines[0] + if err_msg == "invalid query": + err_msg += f" ({url})" + + log(f"Failed to get geoloc data: {err_msg} for ip {ip}.", Ansi.LRED) + return None + + country_acronym = lines[0].lower() + + return { + "latitude": float(lines[1]), + "longitude": float(lines[2]), + "country": { + "acronym": country_acronym, + "numeric": country_codes[country_acronym], + }, + } + + +async def log_strange_occurrence(obj: object) -> None: + pickled_obj: bytes = pickle.dumps(obj) + uploaded = False + + if app.settings.AUTOMATICALLY_REPORT_PROBLEMS: + # automatically reporting problems to cmyui's server + response = await http_client.post( + url="https://log.cmyui.xyz/", + headers={ + "Bancho-Version": app.settings.VERSION, + "Bancho-Domain": app.settings.DOMAIN, + }, + content=pickled_obj, + ) + if response.status_code == 200 and response.read() == b"ok": + uploaded = True + log( + "Logged strange occurrence to cmyui's server. " + "Thank you for your participation! <3", + Ansi.LBLUE, + ) + else: + log( + f"Autoupload to cmyui's server failed (HTTP {response.status_code})", + Ansi.LRED, + ) + + if not uploaded: + # log to a file locally, and prompt the user + while True: + log_file = STRANGE_LOG_DIR / f"strange_{secrets.token_hex(4)}.db" + if not log_file.exists(): + break + + log_file.touch(exist_ok=False) + log_file.write_bytes(pickled_obj) + + log( + "Logged strange occurrence to" + "/".join(log_file.parts[-4:]), + Ansi.LYELLOW, + ) + log( + "It would be greatly appreciated if you could forward this to the " + "bancho.py development team. To do so, please email josh@akatsuki.gg", + Ansi.LYELLOW, + ) + + +# dependency management + + +class Version: + def __init__(self, major: int, minor: int, micro: int) -> None: + self.major = major + self.minor = minor + self.micro = micro + + def __repr__(self) -> str: + return f"{self.major}.{self.minor}.{self.micro}" + + def __hash__(self) -> int: + return self.as_tuple.__hash__() + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Version): + return NotImplemented + + return self.as_tuple == other.as_tuple + + def __lt__(self, other: Version) -> bool: + return self.as_tuple < other.as_tuple + + def __le__(self, other: Version) -> bool: + return self.as_tuple <= other.as_tuple + + def __gt__(self, other: Version) -> bool: + return self.as_tuple > other.as_tuple + + def __ge__(self, other: Version) -> bool: + return self.as_tuple >= other.as_tuple + + @property + def as_tuple(self) -> tuple[int, int, int]: + return (self.major, self.minor, self.micro) + + @classmethod + def from_str(cls, s: str) -> Version | None: + split = s.split(".") + if len(split) == 3: + return cls( + major=int(split[0]), + minor=int(split[1]), + micro=int(split[2]), + ) + + return None + + +async def _get_latest_dependency_versions() -> AsyncGenerator[ + tuple[str, Version, Version], + None, +]: + """Return the current installed & latest version for each dependency.""" + with open("requirements.txt") as f: + dependencies = f.read().splitlines(keepends=False) + + # TODO: use asyncio.gather() to do all requests at once? or chunk them + + for dependency in dependencies: + dependency_name, _, dependency_ver = dependency.partition("==") + current_ver = Version.from_str(dependency_ver) + + if not current_ver: + # the module uses some more advanced (and often hard to parse) + # versioning system, so we won't be able to report updates. + continue + + # TODO: split up and do the requests asynchronously + url = f"https://pypi.org/pypi/{dependency_name}/json" + response = await http_client.get(url) + json = response.json() + + if response.status_code == 200 and json: + latest_ver = Version.from_str(json["info"]["version"]) + + if not latest_ver: + # they've started using a more advanced versioning system. + continue + + yield (dependency_name, latest_ver, current_ver) + else: + yield (dependency_name, current_ver, current_ver) + + +async def check_for_dependency_updates() -> None: + """Notify the developer of any dependency updates available.""" + updates_available = False + + async for module, current_ver, latest_ver in _get_latest_dependency_versions(): + if latest_ver > current_ver: + updates_available = True + log( + f"{module} has an update available " + f"[{current_ver!r} -> {latest_ver!r}]", + Ansi.LMAGENTA, + ) + + if updates_available: + log( + "Python modules can be updated with " + "`python3.11 -m pip install -U `.", + Ansi.LMAGENTA, + ) + + +# sql migrations + + +async def _get_current_sql_structure_version() -> Version | None: + """Get the last launched version of the server.""" + res = await app.state.services.database.fetch_one( + "SELECT ver_major, ver_minor, ver_micro " + "FROM startups ORDER BY datetime DESC LIMIT 1", + ) + + if res: + return Version(res["ver_major"], res["ver_minor"], res["ver_micro"]) + + return None + + +async def run_sql_migrations() -> None: + """Update the sql structure, if it has changed.""" + software_version = Version.from_str(app.settings.VERSION) + if software_version is None: + raise RuntimeError(f"Invalid bancho.py version '{app.settings.VERSION}'") + + last_run_migration_version = await _get_current_sql_structure_version() + if not last_run_migration_version: + # Migrations have never run before - this is the first time starting the server. + # We'll insert the current version into the database, so future versions know to migrate. + await app.state.services.database.execute( + "INSERT INTO startups (ver_major, ver_minor, ver_micro, datetime) " + "VALUES (:major, :minor, :micro, NOW())", + { + "major": software_version.major, + "minor": software_version.minor, + "micro": software_version.micro, + }, + ) + return # already up to date (server has never run before) + + if software_version == last_run_migration_version: + return # already up to date + + # version changed; there may be sql changes. + content = SQL_UPDATES_FILE.read_text() + + queries: list[str] = [] + q_lines: list[str] = [] + + update_ver = None + + for line in content.splitlines(): + if not line: + continue + + if line.startswith("#"): + # may be normal comment or new version + r_match = VERSION_RGX.fullmatch(line) + if r_match: + update_ver = Version.from_str(r_match["ver"]) + + continue + elif not update_ver: + continue + + # we only need the updates between the + # previous and new version of the server. + if last_run_migration_version < update_ver <= software_version: + if line.endswith(";"): + if q_lines: + q_lines.append(line) + queries.append(" ".join(q_lines)) + q_lines = [] + else: + queries.append(line) + else: + q_lines.append(line) + + if queries: + log( + f"Updating mysql structure (v{last_run_migration_version!r} -> v{software_version!r}).", + Ansi.LMAGENTA, + ) + + # XXX: we can't use a transaction here with mysql as structural changes to + # tables implicitly commit: https://dev.mysql.com/doc/refman/5.7/en/implicit-commit.html + for query in queries: + try: + await app.state.services.database.execute(query) + except pymysql.err.MySQLError as exc: + log(f"Failed: {query}", Ansi.GRAY) + log(repr(exc)) + log( + "SQL failed to update - unless you've been " + "modifying sql and know what caused this, " + "please contact @cmyui on Discord.", + Ansi.LRED, + ) + raise KeyboardInterrupt from exc + else: + # all queries executed successfully + await app.state.services.database.execute( + "INSERT INTO startups (ver_major, ver_minor, ver_micro, datetime) " + "VALUES (:major, :minor, :micro, NOW())", + { + "major": software_version.major, + "minor": software_version.minor, + "micro": software_version.micro, + }, + ) diff --git a/app/state/sessions.py b/app/state/sessions.py new file mode 100644 index 0000000..43356ef --- /dev/null +++ b/app/state/sessions.py @@ -0,0 +1,54 @@ +from __future__ import annotations + +import asyncio +from typing import TYPE_CHECKING +from typing import Any + +from app.logging import Ansi +from app.logging import log +from app.objects.collections import Channels +from app.objects.collections import Matches +from app.objects.collections import Players + +if TYPE_CHECKING: + from app.objects.player import Player + +players = Players() +channels = Channels() +matches = Matches() + +api_keys: dict[str, int] = {} + +housekeeping_tasks: set[asyncio.Task[Any]] = set() + +bot: Player + + +# use cases + + +async def cancel_housekeeping_tasks() -> None: + log( + f"-> Cancelling {len(housekeeping_tasks)} housekeeping tasks.", + Ansi.LMAGENTA, + ) + + # cancel housekeeping tasks + for task in housekeeping_tasks: + task.cancel() + + await asyncio.gather(*housekeeping_tasks, return_exceptions=True) + + loop = asyncio.get_running_loop() + + for task in housekeeping_tasks: + if not task.cancelled(): + exception = task.exception() + if exception: + loop.call_exception_handler( + { + "message": "unhandled exception during loop shutdown", + "exception": exception, + "task": task, + }, + ) diff --git a/app/timer.py b/app/timer.py new file mode 100644 index 0000000..3589268 --- /dev/null +++ b/app/timer.py @@ -0,0 +1,27 @@ +from __future__ import annotations + +import time +from types import TracebackType + + +class Timer: + def __init__(self) -> None: + self.start_time: float | None = None + self.end_time: float | None = None + + def __enter__(self) -> Timer: + self.start_time = time.time() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc: BaseException | None, + traceback: TracebackType | None, + ) -> None: + self.end_time = time.time() + + def elapsed(self) -> float: + if self.start_time is None or self.end_time is None: + raise ValueError("Timer has not been started or stopped.") + return self.end_time - self.start_time diff --git a/app/usecases/__init__.py b/app/usecases/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/usecases/achievements.py b/app/usecases/achievements.py new file mode 100644 index 0000000..af00cb4 --- /dev/null +++ b/app/usecases/achievements.py @@ -0,0 +1,30 @@ +from __future__ import annotations + +import app.repositories.achievements +from app.repositories.achievements import Achievement + + +async def create( + file: str, + name: str, + desc: str, + cond: str, +) -> Achievement: + achievement = await app.repositories.achievements.create( + file, + name, + desc, + cond, + ) + return achievement + + +async def fetch_many( + page: int | None = None, + page_size: int | None = None, +) -> list[Achievement]: + achievements = await app.repositories.achievements.fetch_many( + page, + page_size, + ) + return achievements diff --git a/app/usecases/performance.py b/app/usecases/performance.py new file mode 100644 index 0000000..2c2ed10 --- /dev/null +++ b/app/usecases/performance.py @@ -0,0 +1,138 @@ +from __future__ import annotations + +import math +from collections.abc import Iterable +from dataclasses import dataclass +from typing import TypedDict + +from akatsuki_pp_py import Beatmap +from akatsuki_pp_py import Calculator + +from app.constants.mods import Mods + + +@dataclass +class ScoreParams: + mode: int + mods: int | None = None + combo: int | None = None + + # caller may pass either acc OR 300/100/50/geki/katu/miss + # passing both will result in a value error being raised + acc: float | None = None + + n300: int | None = None + n100: int | None = None + n50: int | None = None + ngeki: int | None = None + nkatu: int | None = None + nmiss: int | None = None + + +class PerformanceRating(TypedDict): + pp: float + pp_acc: float | None + pp_aim: float | None + pp_speed: float | None + pp_flashlight: float | None + effective_miss_count: float | None + pp_difficulty: float | None + + +class DifficultyRating(TypedDict): + stars: float + aim: float | None + speed: float | None + flashlight: float | None + slider_factor: float | None + speed_note_count: float | None + stamina: float | None + color: float | None + rhythm: float | None + peak: float | None + + +class PerformanceResult(TypedDict): + performance: PerformanceRating + difficulty: DifficultyRating + + +def calculate_performances( + osu_file_path: str, + scores: Iterable[ScoreParams], +) -> list[PerformanceResult]: + """\ + Calculate performance for multiple scores on a single beatmap. + + Typically most useful for mass-recalculation situations. + + TODO: Some level of error handling & returning to caller should be + implemented here to handle cases where e.g. the beatmap file is invalid + or there an issue during calculation. + """ + calc_bmap = Beatmap(path=osu_file_path) + + results: list[PerformanceResult] = [] + + for score in scores: + if score.acc and ( + score.n300 or score.n100 or score.n50 or score.ngeki or score.nkatu + ): + raise ValueError( + "Must not specify accuracy AND 300/100/50/geki/katu. Only one or the other.", + ) + + # rosupp ignores NC and requires DT + if score.mods is not None: + if score.mods & Mods.NIGHTCORE: + score.mods |= Mods.DOUBLETIME + + calculator = Calculator( + mode=score.mode, + mods=score.mods or 0, + combo=score.combo, + acc=score.acc, + n300=score.n300, + n100=score.n100, + n50=score.n50, + n_geki=score.ngeki, + n_katu=score.nkatu, + n_misses=score.nmiss, + ) + result = calculator.performance(calc_bmap) + + pp = result.pp + + if math.isnan(pp) or math.isinf(pp): + # TODO: report to logserver + pp = 0.0 + else: + pp = round(pp, 3) + + results.append( + { + "performance": { + "pp": pp, + "pp_acc": result.pp_acc, + "pp_aim": result.pp_aim, + "pp_speed": result.pp_speed, + "pp_flashlight": result.pp_flashlight, + "effective_miss_count": result.effective_miss_count, + "pp_difficulty": result.pp_difficulty, + }, + "difficulty": { + "stars": result.difficulty.stars, + "aim": result.difficulty.aim, + "speed": result.difficulty.speed, + "flashlight": result.difficulty.flashlight, + "slider_factor": result.difficulty.slider_factor, + "speed_note_count": result.difficulty.speed_note_count, + "stamina": result.difficulty.stamina, + "color": result.difficulty.color, + "rhythm": result.difficulty.rhythm, + "peak": result.difficulty.peak, + }, + }, + ) + + return results diff --git a/app/usecases/user_achievements.py b/app/usecases/user_achievements.py new file mode 100644 index 0000000..256673c --- /dev/null +++ b/app/usecases/user_achievements.py @@ -0,0 +1,27 @@ +from __future__ import annotations + +import app.repositories.user_achievements +from app._typing import UNSET +from app._typing import _UnsetSentinel +from app.repositories.user_achievements import UserAchievement + + +async def create(user_id: int, achievement_id: int) -> UserAchievement: + user_achievement = await app.repositories.user_achievements.create( + user_id, + achievement_id, + ) + return user_achievement + + +async def fetch_many( + user_id: int | _UnsetSentinel = UNSET, + page: int | None = None, + page_size: int | None = None, +) -> list[UserAchievement]: + user_achievements = await app.repositories.user_achievements.fetch_many( + user_id=user_id, + page=page, + page_size=page_size, + ) + return user_achievements diff --git a/app/utils.py b/app/utils.py new file mode 100644 index 0000000..a294cb1 --- /dev/null +++ b/app/utils.py @@ -0,0 +1,254 @@ +from __future__ import annotations + +import ctypes +import inspect +import os +import socket +import sys +from collections.abc import Callable +from pathlib import Path +from typing import TYPE_CHECKING +from typing import Any +from typing import TypedDict +from typing import TypeVar + +import httpx +import pymysql + +import app.settings +from app.logging import Ansi +from app.logging import log + +if TYPE_CHECKING: + from app.repositories.users import User + +T = TypeVar("T") + + +DATA_PATH = Path.cwd() / ".data" +ACHIEVEMENTS_ASSETS_PATH = DATA_PATH / "assets/medals/client" +DEFAULT_AVATAR_PATH = DATA_PATH / "avatars/default.jpg" + + +def make_safe_name(name: str) -> str: + """Return a name safe for usage in sql.""" + return name.lower().replace(" ", "_") + + +def determine_highest_ranking_clan_member(members: list[User]) -> User: + return next(iter(sorted(members, key=lambda m: m["clan_priv"], reverse=True))) + + +def _download_achievement_images_osu(achievements_path: Path) -> bool: + """Download all used achievement images (one by one, from osu!).""" + achs: list[str] = [] + + for resolution in ("", "@2x"): + for mode in ("osu", "taiko", "fruits", "mania"): + # only osu!std has 9 & 10 star pass/fc medals. + for star_rating in range(1, 1 + (10 if mode == "osu" else 8)): + achs.append(f"{mode}-skill-pass-{star_rating}{resolution}.png") + achs.append(f"{mode}-skill-fc-{star_rating}{resolution}.png") + + for combo in (500, 750, 1000, 2000): + achs.append(f"osu-combo-{combo}{resolution}.png") + + for mod in ( + "suddendeath", + "hidden", + "perfect", + "hardrock", + "doubletime", + "flashlight", + "easy", + "nofail", + "nightcore", + "halftime", + "spunout", + ): + achs.append(f"all-intro-{mod}{resolution}.png") + + log("Downloading achievement images from osu!.", Ansi.LCYAN) + + for ach in achs: + resp = httpx.get(f"https://assets.ppy.sh/medals/client/{ach}") + if resp.status_code != 200: + return False + + log(f"Saving achievement: {ach}", Ansi.LCYAN) + (achievements_path / ach).write_bytes(resp.content) + + return True + + +def download_achievement_images(achievements_path: Path) -> None: + """Download all used achievement images (using the best available source).""" + + # download individual files from the official osu! servers + downloaded = _download_achievement_images_osu(achievements_path) + + if downloaded: + log("Downloaded all achievement images.", Ansi.LGREEN) + else: + # TODO: make the code safe in this state + log("Failed to download achievement images.", Ansi.LRED) + achievements_path.rmdir() + + # allow passthrough (don't hard crash). + # the server will *mostly* work in this state. + pass + + +def download_default_avatar(default_avatar_path: Path) -> None: + """Download an avatar to use as the server's default.""" + resp = httpx.get("https://i.cmyui.xyz/U24XBZw-4wjVME-JaEz3.png") + + if resp.status_code != 200: + log("Failed to fetch default avatar.", Ansi.LRED) + return + + log("Downloaded default avatar.", Ansi.LGREEN) + default_avatar_path.write_bytes(resp.content) + + +def has_internet_connectivity(timeout: float = 1.0) -> bool: + """Check for an active internet connection.""" + COMMON_DNS_SERVERS = ( + # Cloudflare + "1.1.1.1", + "1.0.0.1", + # Google + "8.8.8.8", + "8.8.4.4", + ) + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as client: + client.settimeout(timeout) + for host in COMMON_DNS_SERVERS: + try: + client.connect((host, 53)) + except OSError: + continue + else: + return True + + # all connections failed + return False + + +class FrameInfo(TypedDict): + function: str + filename: str + lineno: int + charno: int + locals: dict[str, str] + + +def get_appropriate_stacktrace() -> list[FrameInfo]: + """Return information of all frames related to cmyui_pkg and below.""" + stack = inspect.stack()[1:] + for idx, frame in enumerate(stack): + if frame.function == "run": + break + else: + raise Exception + + return [ + { + "function": frame.function, + "filename": Path(frame.filename).name, + "lineno": frame.lineno, + "charno": frame.index or 0, + "locals": {k: repr(v) for k, v in frame.frame.f_locals.items()}, + } + # reverse for python-like stacktrace + # ordering; puts the most recent + # call closest to the command line + for frame in reversed(stack[:idx]) + ] + + +def pymysql_encode( + conv: Callable[[Any, dict[object, object] | None], str], +) -> Callable[[type[T]], type[T]]: + """Decorator to allow for adding to pymysql's encoders.""" + + def wrapper(cls: type[T]) -> type[T]: + pymysql.converters.encoders[cls] = conv + return cls + + return wrapper + + +def escape_enum( + val: Any, + _: dict[object, object] | None = None, +) -> str: # used for ^ + return str(int(val)) + + +def ensure_persistent_volumes_are_available() -> None: + # create /.data directory + DATA_PATH.mkdir(exist_ok=True) + + # create /.data/... subdirectories + for sub_dir in ("avatars", "logs", "osu", "osr", "ss"): + subdir = DATA_PATH / sub_dir + subdir.mkdir(exist_ok=True) + + # download achievement images from osu! + if not ACHIEVEMENTS_ASSETS_PATH.exists(): + ACHIEVEMENTS_ASSETS_PATH.mkdir(parents=True) + download_achievement_images(ACHIEVEMENTS_ASSETS_PATH) + + # download a default avatar image for new users + if not DEFAULT_AVATAR_PATH.exists(): + download_default_avatar(DEFAULT_AVATAR_PATH) + + +def is_running_as_admin() -> bool: + try: + return os.geteuid() == 0 # type: ignore[attr-defined, no-any-return, unused-ignore] + except AttributeError: + pass + + try: + return ctypes.windll.shell32.IsUserAnAdmin() == 1 # type: ignore[attr-defined, no-any-return, unused-ignore] + except AttributeError: + raise Exception( + f"{sys.platform} is not currently supported on bancho.py, please create a github issue!", + ) + + +def display_startup_dialog() -> None: + """Print any general information or warnings to the console.""" + if app.settings.DEVELOPER_MODE: + log("running in advanced mode", Ansi.LYELLOW) + if app.settings.DEBUG: + log("running in debug mode", Ansi.LMAGENTA) + + # running on root/admin grants the software potentally dangerous and + # unnecessary power over the operating system and is not advised. + if is_running_as_admin(): + log( + "It is not recommended to run bancho.py as root/admin, especially in production." + + ( + " You are at increased risk as developer mode is enabled." + if app.settings.DEVELOPER_MODE + else "" + ), + Ansi.LYELLOW, + ) + + if not has_internet_connectivity(): + log("No internet connectivity detected", Ansi.LYELLOW) + + +def has_jpeg_headers_and_trailers(data_view: memoryview) -> bool: + return data_view[:4] == b"\xff\xd8\xff\xe0" and data_view[6:11] == b"JFIF\x00" + + +def has_png_headers_and_trailers(data_view: memoryview) -> bool: + return ( + data_view[:8] == b"\x89PNG\r\n\x1a\n" + and data_view[-8:] == b"\x49END\xae\x42\x60\x82" + ) diff --git a/docker-compose.test.yml b/docker-compose.test.yml new file mode 100644 index 0000000..87125cd --- /dev/null +++ b/docker-compose.test.yml @@ -0,0 +1,99 @@ +services: + ## shared services + + mysql-test: + image: mysql:latest + # ports: + # - ${DB_PORT}:${DB_PORT} + environment: + MYSQL_USER: ${DB_USER} + MYSQL_PASSWORD: ${DB_PASS} + MYSQL_DATABASE: ${DB_NAME} + MYSQL_HOST: ${DB_HOST} + MYSQL_PORT: ${DB_PORT} + MYSQL_ROOT_PASSWORD: ${DB_PASS} + volumes: + - ./migrations/base.sql:/docker-entrypoint-initdb.d/init.sql:ro + - test-db-data:/var/lib/mysql + networks: + - test-network + healthcheck: + test: "/usr/bin/mysql --user=$$MYSQL_USER --password=$$MYSQL_PASSWORD --execute \"SHOW DATABASES;\"" + interval: 2s + timeout: 20s + retries: 10 + + redis-test: + image: bitnami/redis:latest + # ports: + # - ${REDIS_PORT}:${REDIS_PORT} + user: root + volumes: + - test-redis-data:/bitnami/redis/data + networks: + - test-network + environment: + - ALLOW_EMPTY_PASSWORD=yes + - REDIS_PASSWORD=${REDIS_PASS} + + ## application services + + bancho-test: + # we also have a public image: osuakatsuki/bancho.py:latest + image: bancho:latest + depends_on: + mysql-test: + condition: service_healthy + redis-test: + condition: service_started + tty: true + init: true + volumes: + - .:/srv/root + - test-data:/srv/root/.data + networks: + - test-network + environment: + - APP_HOST=${APP_HOST} + - APP_PORT=${APP_PORT} + - DB_USER=${DB_USER} + - DB_PASS=${DB_PASS} + - DB_NAME=${DB_NAME} + - DB_HOST=${DB_HOST} + - DB_PORT=${DB_PORT} + - REDIS_USER=${REDIS_USER} + - REDIS_PASS=${REDIS_PASS} + - REDIS_HOST=${REDIS_HOST} + - REDIS_PORT=${REDIS_PORT} + - REDIS_DB=${REDIS_DB} + - OSU_API_KEY=${OSU_API_KEY} + - MIRROR_SEARCH_ENDPOINT=${MIRROR_SEARCH_ENDPOINT} + - MIRROR_DOWNLOAD_ENDPOINT=${MIRROR_DOWNLOAD_ENDPOINT} + - DOMAIN=${DOMAIN} + - COMMAND_PREFIX=${COMMAND_PREFIX} + - SEASONAL_BGS=${SEASONAL_BGS} + - MENU_ICON_URL=${MENU_ICON_URL} + - MENU_ONCLICK_URL=${MENU_ONCLICK_URL} + - DATADOG_API_KEY=${DATADOG_API_KEY} + - DATADOG_APP_KEY=${DATADOG_APP_KEY} + - DEBUG=${DEBUG} + - REDIRECT_OSU_URLS=${REDIRECT_OSU_URLS} + - PP_CACHED_ACCS=${PP_CACHED_ACCS} + - DISALLOWED_NAMES=${DISALLOWED_NAMES} + - DISALLOWED_PASSWORDS=${DISALLOWED_PASSWORDS} + - DISALLOW_OLD_CLIENTS=${DISALLOW_OLD_CLIENTS} + - DISALLOW_INGAME_REGISTRATION=${DISALLOW_INGAME_REGISTRATION} + - DISCORD_AUDIT_LOG_WEBHOOK=${DISCORD_AUDIT_LOG_WEBHOOK} + - AUTOMATICALLY_REPORT_PROBLEMS=${AUTOMATICALLY_REPORT_PROBLEMS} + - LOG_WITH_COLORS=${LOG_WITH_COLORS} + - SSL_CERT_PATH=${SSL_CERT_PATH} + - SSL_KEY_PATH=${SSL_KEY_PATH} + - DEVELOPER_MODE=${DEVELOPER_MODE} + +volumes: + test-data: + test-db-data: + test-redis-data: + +networks: + test-network: diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..6aa1605 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,92 @@ +services: + ## shared services + + mysql: + image: mysql:latest + # ports: + # - ${DB_PORT}:${DB_PORT} + environment: + MYSQL_USER: ${DB_USER} + MYSQL_PASSWORD: ${DB_PASS} + MYSQL_DATABASE: ${DB_NAME} + MYSQL_HOST: ${DB_HOST} + MYSQL_PORT: ${DB_PORT} + MYSQL_RANDOM_ROOT_PASSWORD: "true" + volumes: + - ./migrations/base.sql:/docker-entrypoint-initdb.d/init.sql:ro + - db-data:/var/lib/mysql + healthcheck: + test: "/usr/bin/mysql --user=$$MYSQL_USER --password=$$MYSQL_PASSWORD --execute \"SHOW DATABASES;\"" + interval: 2s + timeout: 20s + retries: 10 + + redis: + image: bitnami/redis:latest + # ports: + # - ${REDIS_PORT}:${REDIS_PORT} + user: root + volumes: + - redis-data:/bitnami/redis/data + environment: + - ALLOW_EMPTY_PASSWORD=yes + - REDIS_PASSWORD=${REDIS_PASS} + + ## application services + + bancho: + # we also have a public image: osuakatsuki/bancho.py:latest + image: bancho:latest + ports: + - ${APP_PORT}:${APP_PORT} + depends_on: + mysql: + condition: service_healthy + redis: + condition: service_started + tty: true + init: true + volumes: + - .:/srv/root + - data:/srv/root/.data + environment: + - APP_HOST=${APP_HOST} + - APP_PORT=${APP_PORT} + - DB_USER=${DB_USER} + - DB_PASS=${DB_PASS} + - DB_NAME=${DB_NAME} + - DB_HOST=${DB_HOST} + - DB_PORT=${DB_PORT} + - REDIS_USER=${REDIS_USER} + - REDIS_PASS=${REDIS_PASS} + - REDIS_HOST=${REDIS_HOST} + - REDIS_PORT=${REDIS_PORT} + - REDIS_DB=${REDIS_DB} + - OSU_API_KEY=${OSU_API_KEY} + - MIRROR_SEARCH_ENDPOINT=${MIRROR_SEARCH_ENDPOINT} + - MIRROR_DOWNLOAD_ENDPOINT=${MIRROR_DOWNLOAD_ENDPOINT} + - DOMAIN=${DOMAIN} + - COMMAND_PREFIX=${COMMAND_PREFIX} + - SEASONAL_BGS=${SEASONAL_BGS} + - MENU_ICON_URL=${MENU_ICON_URL} + - MENU_ONCLICK_URL=${MENU_ONCLICK_URL} + - DATADOG_API_KEY=${DATADOG_API_KEY} + - DATADOG_APP_KEY=${DATADOG_APP_KEY} + - DEBUG=${DEBUG} + - REDIRECT_OSU_URLS=${REDIRECT_OSU_URLS} + - PP_CACHED_ACCS=${PP_CACHED_ACCS} + - DISALLOWED_NAMES=${DISALLOWED_NAMES} + - DISALLOWED_PASSWORDS=${DISALLOWED_PASSWORDS} + - DISALLOW_OLD_CLIENTS=${DISALLOW_OLD_CLIENTS} + - DISALLOW_INGAME_REGISTRATION=${DISALLOW_INGAME_REGISTRATION} + - DISCORD_AUDIT_LOG_WEBHOOK=${DISCORD_AUDIT_LOG_WEBHOOK} + - AUTOMATICALLY_REPORT_PROBLEMS=${AUTOMATICALLY_REPORT_PROBLEMS} + - LOG_WITH_COLORS=${LOG_WITH_COLORS} + - SSL_CERT_PATH=${SSL_CERT_PATH} + - SSL_KEY_PATH=${SSL_KEY_PATH} + - DEVELOPER_MODE=${DEVELOPER_MODE} + +volumes: + data: + db-data: + redis-data: diff --git a/ext/Caddyfile b/ext/Caddyfile new file mode 100644 index 0000000..1eb5b18 --- /dev/null +++ b/ext/Caddyfile @@ -0,0 +1,30 @@ +# Comment this out if you need to explicitly +# use self-signed certs. +# NOTE: Not necessary if using a '.local' domain +# +# { +# local_certs +# } + +c.{$DOMAIN}, ce.{$DOMAIN}, c4.{$DOMAIN}, osu.{$DOMAIN}, b.{$DOMAIN}, api.{$DOMAIN} { + encode gzip + reverse_proxy * 127.0.0.1:{$APP_PORT} { + header_up X-Real-IP {remote_host} + } + + request_body { + max_size 20MB + } +} + +assets.{$DOMAIN} { + encode gzip + root * {$DATA_DIRECTORY}/assets + file_server +} + +a.{$DOMAIN} { + encode gzip + root * {$DATA_DIRECTORY}/avatars + try_files {path} {file.base}.png {file.base}.jpg {file.base}.gif {file.base}.jpeg {file.base}.jfif default.jpg =404 +} diff --git a/ext/nginx.conf.example b/ext/nginx.conf.example new file mode 100644 index 0000000..1885382 --- /dev/null +++ b/ext/nginx.conf.example @@ -0,0 +1,54 @@ +# c[e4]?.ppy.sh is used for bancho +# osu.ppy.sh is used for /web, /api, etc. +# a.ppy.sh is used for osu! avatars + +upstream bancho { + server 127.0.0.1:${APP_PORT}; +} + +server { + listen 443 ssl; + server_name c.${DOMAIN} ce.${DOMAIN} c4.${DOMAIN} osu.${DOMAIN} b.${DOMAIN} api.${DOMAIN}; + client_max_body_size 20M; + + ssl_certificate ${SSL_CERT_PATH}; + ssl_certificate_key ${SSL_KEY_PATH}; + ssl_ciphers "EECDH+AESGCM:EDH+AESGCM:AES256+EECDH:AES256+EDH:@SECLEVEL=1"; + + location / { + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header Host $http_host; + add_header Access-Control-Allow-Origin *; + proxy_redirect off; + proxy_pass http://bancho; + } +} + +server { + listen 443 ssl; + server_name assets.${DOMAIN}; + + ssl_certificate ${SSL_CERT_PATH}; + ssl_certificate_key ${SSL_KEY_PATH}; + ssl_ciphers "EECDH+AESGCM:EDH+AESGCM:AES256+EECDH:AES256+EDH:@SECLEVEL=1"; + + location / { + default_type image/png; + root ${DATA_DIRECTORY}/assets; + } +} + +server { + listen 443 ssl; + server_name a.${DOMAIN}; + + ssl_certificate ${SSL_CERT_PATH}; + ssl_certificate_key ${SSL_KEY_PATH}; + ssl_ciphers "EECDH+AESGCM:EDH+AESGCM:AES256+EECDH:AES256+EDH:@SECLEVEL=1"; + + location / { + root ${DATA_DIRECTORY}/avatars; + try_files $uri $uri.png $uri.jpg $uri.gif $uri.jpeg $uri.jfif /default.jpg = 404; + } +} diff --git a/logging.yaml.example b/logging.yaml.example new file mode 100644 index 0000000..647b56d --- /dev/null +++ b/logging.yaml.example @@ -0,0 +1,36 @@ +version: 1 +disable_existing_loggers: true +loggers: + httpx: + level: WARNING + handlers: [console] + propagate: no + httpcore: + level: WARNING + handlers: [console] + propagate: no + multipart.multipart: + level: ERROR + handlers: [console] + propagate: no +handlers: + console: + class: logging.StreamHandler + level: INFO + formatter: plaintext + stream: ext://sys.stdout + # file: + # class: logging.FileHandler + # level: INFO + # formatter: json + # filename: logs.log +formatters: + plaintext: + format: '[%(asctime)s] %(levelname)s %(message)s' + datefmt: '%Y-%m-%d %H:%M:%S' + # json: + # class: pythonjsonlogger.jsonlogger.JsonFormatter + # format: '%(asctime)s %(name)s %(levelname)s %(message)s' +root: + level: INFO + handlers: [console] # , file] diff --git a/main.py b/main.py new file mode 100644 index 0000000..fae3396 --- /dev/null +++ b/main.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python3.11 +from __future__ import annotations + +import logging + +import uvicorn + +import app.logging +import app.settings +import app.utils + +app.logging.configure_logging() + + +def main() -> int: + app.utils.display_startup_dialog() + uvicorn.run( + "app.api.init_api:asgi_app", + reload=app.settings.DEBUG, + log_level=logging.WARNING, + server_header=False, + date_header=False, + headers=[("bancho-version", app.settings.VERSION)], + host=app.settings.APP_HOST, + port=app.settings.APP_PORT, + ) + return 0 + + +if __name__ == "__main__": + exit(main()) diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..0ec5051 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,1953 @@ +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. + +[[package]] +name = "aiomysql" +version = "0.2.0" +description = "MySQL driver for asyncio." +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiomysql-0.2.0-py3-none-any.whl", hash = "sha256:b7c26da0daf23a5ec5e0b133c03d20657276e4eae9b73e040b72787f6f6ade0a"}, + {file = "aiomysql-0.2.0.tar.gz", hash = "sha256:558b9c26d580d08b8c5fd1be23c5231ce3aeff2dadad989540fee740253deb67"}, +] + +[package.dependencies] +PyMySQL = ">=1.0" + +[package.extras] +rsa = ["PyMySQL[rsa] (>=1.0)"] +sa = ["sqlalchemy (>=1.3,<1.4)"] + +[[package]] +name = "akatsuki-pp-py" +version = "1.0.5" +description = "osu! difficulty and pp calculation for all modes" +optional = false +python-versions = ">=3.7" +files = [ + {file = "akatsuki_pp_py-1.0.5-cp310-cp310-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:6b518b0adf89960b37b52302fda3b8dc20fbf015630de0aa1bbc19d11299d129"}, + {file = "akatsuki_pp_py-1.0.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:1495ac93c753699348e30ec1fdeda68cf652103a07a87b445a405d2014064797"}, + {file = "akatsuki_pp_py-1.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89c10f32b638237d720d014fe9f7184a2f5316bcb7276044156483622cad684c"}, + {file = "akatsuki_pp_py-1.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7118aea0a3fc827e0c26b8d3e8436e16945d6f58e6f842e5264e1f06abe96a8"}, + {file = "akatsuki_pp_py-1.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:546ff2202c843e8497816d7dc5f3e98ddf3496ee11930eac535b2a1822048bd0"}, + {file = "akatsuki_pp_py-1.0.5-cp310-none-win32.whl", hash = "sha256:68172e9225a81c344c2ab2e5bd305ec57500d0df7b4ad18f549d881b782965a8"}, + {file = "akatsuki_pp_py-1.0.5-cp310-none-win_amd64.whl", hash = "sha256:7503c0ef2ce8767051039c9ae89e00644fd4ff43eee06734c51ae5dbbbfe9998"}, + {file = "akatsuki_pp_py-1.0.5-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:747cd1ebd257a369d32e0db0169e1ee1af06f1e6d59db2d7b64dafa64b4cacc5"}, + {file = "akatsuki_pp_py-1.0.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:0272cf6789f3413a9db10b617e4628cf110184b8eec6506d2d60a99ee4b8de31"}, + {file = "akatsuki_pp_py-1.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1913382468a4ac18a76ab32462ce0a74c03d5e0a6719199f34dbff5cd4ef37be"}, + {file = "akatsuki_pp_py-1.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e348cb33cb5028512f8d6834c0ffbd113bb17978040664fa067f08f2774d44b7"}, + {file = "akatsuki_pp_py-1.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8a5f9b943a6cc97ce041586fdcc87227920fb74ce8a0867076a0a635a2ba3277"}, + {file = "akatsuki_pp_py-1.0.5-cp311-none-win32.whl", hash = "sha256:73ed5b0287f3b4c83e14e83a1238382677b5d185572e45e83474f60ab2d38af2"}, + {file = "akatsuki_pp_py-1.0.5-cp311-none-win_amd64.whl", hash = "sha256:f9951ddc1b79a56f55592f8205c36c687e50c4b5a96fb71e0931d347fad99c23"}, + {file = "akatsuki_pp_py-1.0.5-cp37-cp37m-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:605744ea0d7ea94606a26211756645e37128368878d6dcb698d0efe412816c70"}, + {file = "akatsuki_pp_py-1.0.5-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:99c6cd10cb467415fbff27d4c1bf779f5f7d8af5438ad7b8ee07b9c6bba08b82"}, + {file = "akatsuki_pp_py-1.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7751efd54b3a94b1ccbd332412cdf6145e1a0bf8e3eaf479e0524017ec15f419"}, + {file = "akatsuki_pp_py-1.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16d203212272e58cc179a677124ceef2b021d9f2134183f3f5bdfc2a7590603a"}, + {file = "akatsuki_pp_py-1.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e35b77b5e4b222af3c4fcdb643f4231af1074dc09a773e501c75f46fe66c94e9"}, + {file = "akatsuki_pp_py-1.0.5-cp37-none-win32.whl", hash = "sha256:d4645c3d20df462fa7775de6a2cd4ff1d25fbbf562a0b21d3dd53eda6227872d"}, + {file = "akatsuki_pp_py-1.0.5-cp37-none-win_amd64.whl", hash = "sha256:c0e28e3767c73960a58904ef3ed1a798a3832bcc88f112387b219778fd828e29"}, + {file = "akatsuki_pp_py-1.0.5-cp38-cp38-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:e0024c7bc2b2d7966f7bd33e9cb4f588d2c7698257c6b626adb458bdcc315a04"}, + {file = "akatsuki_pp_py-1.0.5-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:fc59ca5c521c45959284a3d1b41d2ba62fe350ac1df0563d7c9bc989ed44217d"}, + {file = "akatsuki_pp_py-1.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b89782c87a9a623dcdcb4ad59319d12ee0c5308d1ecf9d087e7d50f969a21cc1"}, + {file = "akatsuki_pp_py-1.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b2b14cd8df72811a9bf6c2173601584315131b4a7b54037ac753e0d0a4bd16b"}, + {file = "akatsuki_pp_py-1.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:92b211192a6e80f16e0ae46030d1aaa8ec64acee275d3f77ab537e3ceb3ef5ad"}, + {file = "akatsuki_pp_py-1.0.5-cp38-none-win32.whl", hash = "sha256:6a3bbee78b93d99a4fbaecf1a1cfca2f040193ecad1bddcf24b2e052285cf239"}, + {file = "akatsuki_pp_py-1.0.5-cp38-none-win_amd64.whl", hash = "sha256:b74a727c18dd86cc665563a6146b960427c3d25b0b96b9767ad5a274bfef0c4a"}, + {file = "akatsuki_pp_py-1.0.5-cp39-cp39-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:28e45d694df23d99a770f535b52e8e40972ada2cc4379c6c43df67949504c888"}, + {file = "akatsuki_pp_py-1.0.5-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:6ccf1d16e19c6f1e8829b7bb86841621ba30881e0bd41b2dd6d8b698495f6f12"}, + {file = "akatsuki_pp_py-1.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:faaa58b19353bff70563b4fc731f51bcd55f55df0b37ec702af669730b0368cb"}, + {file = "akatsuki_pp_py-1.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e6bc462806480cbc1883052f3933746eb1b8631a4acf3ecf3d1802d44165aaf"}, + {file = "akatsuki_pp_py-1.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:334a6e7cb19dd820c6536b971f17be2d23718001fc1675ebdf9a9d83474c0b22"}, + {file = "akatsuki_pp_py-1.0.5-cp39-none-win32.whl", hash = "sha256:a33b2f5954823fab8316ac8b5da8e633b9c944346ba2c441353624c0c884619c"}, + {file = "akatsuki_pp_py-1.0.5-cp39-none-win_amd64.whl", hash = "sha256:1d16cd61dc763ef8f6d6e10427e71c3c923608474969914f376685a0701600af"}, + {file = "akatsuki_pp_py-1.0.5.tar.gz", hash = "sha256:6368629844d9f1edea8cbc86c3310449150ba7af6e1a3c70cc7053cf29021394"}, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "anyio" +version = "4.4.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, + {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, +] + +[package.dependencies] +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.23)"] + +[[package]] +name = "asgi-lifespan" +version = "2.1.0" +description = "Programmatic startup/shutdown of ASGI apps." +optional = false +python-versions = ">=3.7" +files = [ + {file = "asgi-lifespan-2.1.0.tar.gz", hash = "sha256:5e2effaf0bfe39829cf2d64e7ecc47c7d86d676a6599f7afba378c31f5e3a308"}, + {file = "asgi_lifespan-2.1.0-py3-none-any.whl", hash = "sha256:ed840706680e28428c01e14afb3875d7d76d3206f3d5b2f2294e059b5c23804f"}, +] + +[package.dependencies] +sniffio = "*" + +[[package]] +name = "async-timeout" +version = "4.0.3" +description = "Timeout context manager for asyncio programs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, +] + +[[package]] +name = "autoflake" +version = "2.2.1" +description = "Removes unused imports and unused variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "autoflake-2.2.1-py3-none-any.whl", hash = "sha256:265cde0a43c1f44ecfb4f30d95b0437796759d07be7706a2f70e4719234c0f79"}, + {file = "autoflake-2.2.1.tar.gz", hash = "sha256:62b7b6449a692c3c9b0c916919bbc21648da7281e8506bcf8d3f8280e431ebc1"}, +] + +[package.dependencies] +pyflakes = ">=3.0.0" + +[[package]] +name = "bcrypt" +version = "4.1.2" +description = "Modern password hashing for your software and your servers" +optional = false +python-versions = ">=3.7" +files = [ + {file = "bcrypt-4.1.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:ac621c093edb28200728a9cca214d7e838529e557027ef0581685909acd28b5e"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea505c97a5c465ab8c3ba75c0805a102ce526695cd6818c6de3b1a38f6f60da1"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57fa9442758da926ed33a91644649d3e340a71e2d0a5a8de064fb621fd5a3326"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:eb3bd3321517916696233b5e0c67fd7d6281f0ef48e66812db35fc963a422a1c"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6cad43d8c63f34b26aef462b6f5e44fdcf9860b723d2453b5d391258c4c8e966"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:44290ccc827d3a24604f2c8bcd00d0da349e336e6503656cb8192133e27335e2"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:732b3920a08eacf12f93e6b04ea276c489f1c8fb49344f564cca2adb663b3e4c"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1c28973decf4e0e69cee78c68e30a523be441972c826703bb93099868a8ff5b5"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b8df79979c5bae07f1db22dcc49cc5bccf08a0380ca5c6f391cbb5790355c0b0"}, + {file = "bcrypt-4.1.2-cp37-abi3-win32.whl", hash = "sha256:fbe188b878313d01b7718390f31528be4010fed1faa798c5a1d0469c9c48c369"}, + {file = "bcrypt-4.1.2-cp37-abi3-win_amd64.whl", hash = "sha256:9800ae5bd5077b13725e2e3934aa3c9c37e49d3ea3d06318010aa40f54c63551"}, + {file = "bcrypt-4.1.2-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:71b8be82bc46cedd61a9f4ccb6c1a493211d031415a34adde3669ee1b0afbb63"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e3c6642077b0c8092580c819c1684161262b2e30c4f45deb000c38947bf483"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:387e7e1af9a4dd636b9505a465032f2f5cb8e61ba1120e79a0e1cd0b512f3dfc"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f70d9c61f9c4ca7d57f3bfe88a5ccf62546ffbadf3681bb1e268d9d2e41c91a7"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2a298db2a8ab20056120b45e86c00a0a5eb50ec4075b6142db35f593b97cb3fb"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ba55e40de38a24e2d78d34c2d36d6e864f93e0d79d0b6ce915e4335aa81d01b1"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:3566a88234e8de2ccae31968127b0ecccbb4cddb629da744165db72b58d88ca4"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b90e216dc36864ae7132cb151ffe95155a37a14e0de3a8f64b49655dd959ff9c"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:69057b9fc5093ea1ab00dd24ede891f3e5e65bee040395fb1e66ee196f9c9b4a"}, + {file = "bcrypt-4.1.2-cp39-abi3-win32.whl", hash = "sha256:02d9ef8915f72dd6daaef40e0baeef8a017ce624369f09754baf32bb32dba25f"}, + {file = "bcrypt-4.1.2-cp39-abi3-win_amd64.whl", hash = "sha256:be3ab1071662f6065899fe08428e45c16aa36e28bc42921c4901a191fda6ee42"}, + {file = "bcrypt-4.1.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d75fc8cd0ba23f97bae88a6ec04e9e5351ff3c6ad06f38fe32ba50cbd0d11946"}, + {file = "bcrypt-4.1.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:a97e07e83e3262599434816f631cc4c7ca2aa8e9c072c1b1a7fec2ae809a1d2d"}, + {file = "bcrypt-4.1.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e51c42750b7585cee7892c2614be0d14107fad9581d1738d954a262556dd1aab"}, + {file = "bcrypt-4.1.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba4e4cc26610581a6329b3937e02d319f5ad4b85b074846bf4fef8a8cf51e7bb"}, + {file = "bcrypt-4.1.2.tar.gz", hash = "sha256:33313a1200a3ae90b75587ceac502b048b840fc69e7f7a0905b5f87fac7a1258"}, +] + +[package.extras] +tests = ["pytest (>=3.2.1,!=3.3.0)"] +typecheck = ["mypy"] + +[[package]] +name = "black" +version = "24.1.1" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.8" +files = [ + {file = "black-24.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2588021038bd5ada078de606f2a804cadd0a3cc6a79cb3e9bb3a8bf581325a4c"}, + {file = "black-24.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a95915c98d6e32ca43809d46d932e2abc5f1f7d582ffbe65a5b4d1588af7445"}, + {file = "black-24.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fa6a0e965779c8f2afb286f9ef798df770ba2b6cee063c650b96adec22c056a"}, + {file = "black-24.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:5242ecd9e990aeb995b6d03dc3b2d112d4a78f2083e5a8e86d566340ae80fec4"}, + {file = "black-24.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fc1ec9aa6f4d98d022101e015261c056ddebe3da6a8ccfc2c792cbe0349d48b7"}, + {file = "black-24.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0269dfdea12442022e88043d2910429bed717b2d04523867a85dacce535916b8"}, + {file = "black-24.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3d64db762eae4a5ce04b6e3dd745dcca0fb9560eb931a5be97472e38652a161"}, + {file = "black-24.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:5d7b06ea8816cbd4becfe5f70accae953c53c0e53aa98730ceccb0395520ee5d"}, + {file = "black-24.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e2c8dfa14677f90d976f68e0c923947ae68fa3961d61ee30976c388adc0b02c8"}, + {file = "black-24.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a21725862d0e855ae05da1dd25e3825ed712eaaccef6b03017fe0853a01aa45e"}, + {file = "black-24.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07204d078e25327aad9ed2c64790d681238686bce254c910de640c7cc4fc3aa6"}, + {file = "black-24.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:a83fe522d9698d8f9a101b860b1ee154c1d25f8a82ceb807d319f085b2627c5b"}, + {file = "black-24.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:08b34e85170d368c37ca7bf81cf67ac863c9d1963b2c1780c39102187ec8dd62"}, + {file = "black-24.1.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7258c27115c1e3b5de9ac6c4f9957e3ee2c02c0b39222a24dc7aa03ba0e986f5"}, + {file = "black-24.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40657e1b78212d582a0edecafef133cf1dd02e6677f539b669db4746150d38f6"}, + {file = "black-24.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:e298d588744efda02379521a19639ebcd314fba7a49be22136204d7ed1782717"}, + {file = "black-24.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:34afe9da5056aa123b8bfda1664bfe6fb4e9c6f311d8e4a6eb089da9a9173bf9"}, + {file = "black-24.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:854c06fb86fd854140f37fb24dbf10621f5dab9e3b0c29a690ba595e3d543024"}, + {file = "black-24.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3897ae5a21ca132efa219c029cce5e6bfc9c3d34ed7e892113d199c0b1b444a2"}, + {file = "black-24.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:ecba2a15dfb2d97105be74bbfe5128bc5e9fa8477d8c46766505c1dda5883aac"}, + {file = "black-24.1.1-py3-none-any.whl", hash = "sha256:5cdc2e2195212208fbcae579b931407c1fa9997584f0a415421748aeafff1168"}, + {file = "black-24.1.1.tar.gz", hash = "sha256:48b5760dcbfe5cf97fd4fba23946681f3a81514c6ab8a45b50da67ac8fbc6c7b"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "certifi" +version = "2024.8.30" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, +] + +[[package]] +name = "cffi" +version = "1.17.0" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9338cc05451f1942d0d8203ec2c346c830f8e86469903d5126c1f0a13a2bcbb"}, + {file = "cffi-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0ce71725cacc9ebf839630772b07eeec220cbb5f03be1399e0457a1464f8e1a"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c815270206f983309915a6844fe994b2fa47e5d05c4c4cef267c3b30e34dbe42"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6bdcd415ba87846fd317bee0774e412e8792832e7805938987e4ede1d13046d"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a98748ed1a1df4ee1d6f927e151ed6c1a09d5ec21684de879c7ea6aa96f58f2"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a048d4f6630113e54bb4b77e315e1ba32a5a31512c31a273807d0027a7e69ab"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24aa705a5f5bd3a8bcfa4d123f03413de5d86e497435693b638cbffb7d5d8a1b"}, + {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:856bf0924d24e7f93b8aee12a3a1095c34085600aa805693fb7f5d1962393206"}, + {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4304d4416ff032ed50ad6bb87416d802e67139e31c0bde4628f36a47a3164bfa"}, + {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:331ad15c39c9fe9186ceaf87203a9ecf5ae0ba2538c9e898e3a6967e8ad3db6f"}, + {file = "cffi-1.17.0-cp310-cp310-win32.whl", hash = "sha256:669b29a9eca6146465cc574659058ed949748f0809a2582d1f1a324eb91054dc"}, + {file = "cffi-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:48b389b1fd5144603d61d752afd7167dfd205973a43151ae5045b35793232aa2"}, + {file = "cffi-1.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5d97162c196ce54af6700949ddf9409e9833ef1003b4741c2b39ef46f1d9720"}, + {file = "cffi-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ba5c243f4004c750836f81606a9fcb7841f8874ad8f3bf204ff5e56332b72b9"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb9333f58fc3a2296fb1d54576138d4cf5d496a2cc118422bd77835e6ae0b9cb"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:435a22d00ec7d7ea533db494da8581b05977f9c37338c80bc86314bec2619424"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1df34588123fcc88c872f5acb6f74ae59e9d182a2707097f9e28275ec26a12d"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df8bb0010fdd0a743b7542589223a2816bdde4d94bb5ad67884348fa2c1c67e8"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b5b9712783415695663bd463990e2f00c6750562e6ad1d28e072a611c5f2a6"}, + {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ffef8fd58a36fb5f1196919638f73dd3ae0db1a878982b27a9a5a176ede4ba91"}, + {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e67d26532bfd8b7f7c05d5a766d6f437b362c1bf203a3a5ce3593a645e870b8"}, + {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45f7cd36186db767d803b1473b3c659d57a23b5fa491ad83c6d40f2af58e4dbb"}, + {file = "cffi-1.17.0-cp311-cp311-win32.whl", hash = "sha256:a9015f5b8af1bb6837a3fcb0cdf3b874fe3385ff6274e8b7925d81ccaec3c5c9"}, + {file = "cffi-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:b50aaac7d05c2c26dfd50c3321199f019ba76bb650e346a6ef3616306eed67b0"}, + {file = "cffi-1.17.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aec510255ce690d240f7cb23d7114f6b351c733a74c279a84def763660a2c3bc"}, + {file = "cffi-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2770bb0d5e3cc0e31e7318db06efcbcdb7b31bcb1a70086d3177692a02256f59"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db9a30ec064129d605d0f1aedc93e00894b9334ec74ba9c6bdd08147434b33eb"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a47eef975d2b8b721775a0fa286f50eab535b9d56c70a6e62842134cf7841195"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3e0992f23bbb0be00a921eae5363329253c3b86287db27092461c887b791e5e"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6107e445faf057c118d5050560695e46d272e5301feffda3c41849641222a828"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb862356ee9391dc5a0b3cbc00f416b48c1b9a52d252d898e5b7696a5f9fe150"}, + {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1c13185b90bbd3f8b5963cd8ce7ad4ff441924c31e23c975cb150e27c2bf67a"}, + {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17c6d6d3260c7f2d94f657e6872591fe8733872a86ed1345bda872cfc8c74885"}, + {file = "cffi-1.17.0-cp312-cp312-win32.whl", hash = "sha256:c3b8bd3133cd50f6b637bb4322822c94c5ce4bf0d724ed5ae70afce62187c492"}, + {file = "cffi-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:dca802c8db0720ce1c49cce1149ff7b06e91ba15fa84b1d59144fef1a1bc7ac2"}, + {file = "cffi-1.17.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ce01337d23884b21c03869d2f68c5523d43174d4fc405490eb0091057943118"}, + {file = "cffi-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cab2eba3830bf4f6d91e2d6718e0e1c14a2f5ad1af68a89d24ace0c6b17cced7"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14b9cbc8f7ac98a739558eb86fabc283d4d564dafed50216e7f7ee62d0d25377"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b00e7bcd71caa0282cbe3c90966f738e2db91e64092a877c3ff7f19a1628fdcb"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:41f4915e09218744d8bae14759f983e466ab69b178de38066f7579892ff2a555"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4760a68cab57bfaa628938e9c2971137e05ce48e762a9cb53b76c9b569f1204"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:011aff3524d578a9412c8b3cfaa50f2c0bd78e03eb7af7aa5e0df59b158efb2f"}, + {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:a003ac9edc22d99ae1286b0875c460351f4e101f8c9d9d2576e78d7e048f64e0"}, + {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ef9528915df81b8f4c7612b19b8628214c65c9b7f74db2e34a646a0a2a0da2d4"}, + {file = "cffi-1.17.0-cp313-cp313-win32.whl", hash = "sha256:70d2aa9fb00cf52034feac4b913181a6e10356019b18ef89bc7c12a283bf5f5a"}, + {file = "cffi-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:b7b6ea9e36d32582cda3465f54c4b454f62f23cb083ebc7a94e2ca6ef011c3a7"}, + {file = "cffi-1.17.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:964823b2fc77b55355999ade496c54dde161c621cb1f6eac61dc30ed1b63cd4c"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:516a405f174fd3b88829eabfe4bb296ac602d6a0f68e0d64d5ac9456194a5b7e"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dec6b307ce928e8e112a6bb9921a1cb00a0e14979bf28b98e084a4b8a742bd9b"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4094c7b464cf0a858e75cd14b03509e84789abf7b79f8537e6a72152109c76e"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2404f3de742f47cb62d023f0ba7c5a916c9c653d5b368cc966382ae4e57da401"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa9d43b02a0c681f0bfbc12d476d47b2b2b6a3f9287f11ee42989a268a1833c"}, + {file = "cffi-1.17.0-cp38-cp38-win32.whl", hash = "sha256:0bb15e7acf8ab35ca8b24b90af52c8b391690ef5c4aec3d31f38f0d37d2cc499"}, + {file = "cffi-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:93a7350f6706b31f457c1457d3a3259ff9071a66f312ae64dc024f049055f72c"}, + {file = "cffi-1.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a2ddbac59dc3716bc79f27906c010406155031a1c801410f1bafff17ea304d2"}, + {file = "cffi-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6327b572f5770293fc062a7ec04160e89741e8552bf1c358d1a23eba68166759"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbc183e7bef690c9abe5ea67b7b60fdbca81aa8da43468287dae7b5c046107d4"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bdc0f1f610d067c70aa3737ed06e2726fd9d6f7bfee4a351f4c40b6831f4e82"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6d872186c1617d143969defeadac5a904e6e374183e07977eedef9c07c8953bf"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d46ee4764b88b91f16661a8befc6bfb24806d885e27436fdc292ed7e6f6d058"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f76a90c345796c01d85e6332e81cab6d70de83b829cf1d9762d0a3da59c7932"}, + {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0e60821d312f99d3e1569202518dddf10ae547e799d75aef3bca3a2d9e8ee693"}, + {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:eb09b82377233b902d4c3fbeeb7ad731cdab579c6c6fda1f763cd779139e47c3"}, + {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:24658baf6224d8f280e827f0a50c46ad819ec8ba380a42448e24459daf809cf4"}, + {file = "cffi-1.17.0-cp39-cp39-win32.whl", hash = "sha256:0fdacad9e0d9fc23e519efd5ea24a70348305e8d7d85ecbb1a5fa66dc834e7fb"}, + {file = "cffi-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:7cbc78dc018596315d4e7841c8c3a7ae31cc4d638c9b627f87d52e8abaaf2d29"}, + {file = "cffi-1.17.0.tar.gz", hash = "sha256:f3157624b7558b914cb039fd1af735e5e8049a87c817cc215109ad1c8779df76"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "cfgv" +version = "3.4.0" +description = "Validate configuration and produce human readable error messages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, + {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coverage" +version = "7.6.1" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, + {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, + {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, + {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, + {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, + {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, + {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, + {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, + {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, + {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, + {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, + {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, + {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, + {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, + {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, + {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, + {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, +] + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "cryptography" +version = "42.0.2" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-42.0.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:701171f825dcab90969596ce2af253143b93b08f1a716d4b2a9d2db5084ef7be"}, + {file = "cryptography-42.0.2-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:61321672b3ac7aade25c40449ccedbc6db72c7f5f0fdf34def5e2f8b51ca530d"}, + {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea2c3ffb662fec8bbbfce5602e2c159ff097a4631d96235fcf0fb00e59e3ece4"}, + {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b15c678f27d66d247132cbf13df2f75255627bcc9b6a570f7d2fd08e8c081d2"}, + {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8e88bb9eafbf6a4014d55fb222e7360eef53e613215085e65a13290577394529"}, + {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a047682d324ba56e61b7ea7c7299d51e61fd3bca7dad2ccc39b72bd0118d60a1"}, + {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:36d4b7c4be6411f58f60d9ce555a73df8406d484ba12a63549c88bd64f7967f1"}, + {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a00aee5d1b6c20620161984f8ab2ab69134466c51f58c052c11b076715e72929"}, + {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b97fe7d7991c25e6a31e5d5e795986b18fbbb3107b873d5f3ae6dc9a103278e9"}, + {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5fa82a26f92871eca593b53359c12ad7949772462f887c35edaf36f87953c0e2"}, + {file = "cryptography-42.0.2-cp37-abi3-win32.whl", hash = "sha256:4b063d3413f853e056161eb0c7724822a9740ad3caa24b8424d776cebf98e7ee"}, + {file = "cryptography-42.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:841ec8af7a8491ac76ec5a9522226e287187a3107e12b7d686ad354bb78facee"}, + {file = "cryptography-42.0.2-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:55d1580e2d7e17f45d19d3b12098e352f3a37fe86d380bf45846ef257054b242"}, + {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28cb2c41f131a5758d6ba6a0504150d644054fd9f3203a1e8e8d7ac3aea7f73a"}, + {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9097a208875fc7bbeb1286d0125d90bdfed961f61f214d3f5be62cd4ed8a446"}, + {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:44c95c0e96b3cb628e8452ec060413a49002a247b2b9938989e23a2c8291fc90"}, + {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2f9f14185962e6a04ab32d1abe34eae8a9001569ee4edb64d2304bf0d65c53f3"}, + {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:09a77e5b2e8ca732a19a90c5bca2d124621a1edb5438c5daa2d2738bfeb02589"}, + {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad28cff53f60d99a928dfcf1e861e0b2ceb2bc1f08a074fdd601b314e1cc9e0a"}, + {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:130c0f77022b2b9c99d8cebcdd834d81705f61c68e91ddd614ce74c657f8b3ea"}, + {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:fa3dec4ba8fb6e662770b74f62f1a0c7d4e37e25b58b2bf2c1be4c95372b4a33"}, + {file = "cryptography-42.0.2-cp39-abi3-win32.whl", hash = "sha256:3dbd37e14ce795b4af61b89b037d4bc157f2cb23e676fa16932185a04dfbf635"}, + {file = "cryptography-42.0.2-cp39-abi3-win_amd64.whl", hash = "sha256:8a06641fb07d4e8f6c7dda4fc3f8871d327803ab6542e33831c7ccfdcb4d0ad6"}, + {file = "cryptography-42.0.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:087887e55e0b9c8724cf05361357875adb5c20dec27e5816b653492980d20380"}, + {file = "cryptography-42.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a7ef8dd0bf2e1d0a27042b231a3baac6883cdd5557036f5e8df7139255feaac6"}, + {file = "cryptography-42.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4383b47f45b14459cab66048d384614019965ba6c1a1a141f11b5a551cace1b2"}, + {file = "cryptography-42.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:fbeb725c9dc799a574518109336acccaf1303c30d45c075c665c0793c2f79a7f"}, + {file = "cryptography-42.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:320948ab49883557a256eab46149df79435a22d2fefd6a66fe6946f1b9d9d008"}, + {file = "cryptography-42.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5ef9bc3d046ce83c4bbf4c25e1e0547b9c441c01d30922d812e887dc5f125c12"}, + {file = "cryptography-42.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:52ed9ebf8ac602385126c9a2fe951db36f2cb0c2538d22971487f89d0de4065a"}, + {file = "cryptography-42.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:141e2aa5ba100d3788c0ad7919b288f89d1fe015878b9659b307c9ef867d3a65"}, + {file = "cryptography-42.0.2.tar.gz", hash = "sha256:e0ec52ba3c7f1b7d813cd52649a5b3ef1fc0d433219dc8c93827c57eab6cf888"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "cython" +version = "3.0.11" +description = "The Cython compiler for writing C extensions in the Python language." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +files = [ + {file = "Cython-3.0.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:44292aae17524abb4b70a25111fe7dec1a0ad718711d47e3786a211d5408fdaa"}, + {file = "Cython-3.0.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a75d45fbc20651c1b72e4111149fed3b33d270b0a4fb78328c54d965f28d55e1"}, + {file = "Cython-3.0.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d89a82937ce4037f092e9848a7bbcc65bc8e9fc9aef2bb74f5c15e7d21a73080"}, + {file = "Cython-3.0.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a8ea2e7e2d3bc0d8630dafe6c4a5a89485598ff8a61885b74f8ed882597efd5"}, + {file = "Cython-3.0.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cee29846471ce60226b18e931d8c1c66a158db94853e3e79bc2da9bd22345008"}, + {file = "Cython-3.0.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eeb6860b0f4bfa402de8929833fe5370fa34069c7ebacb2d543cb017f21fb891"}, + {file = "Cython-3.0.11-cp310-cp310-win32.whl", hash = "sha256:3699391125ab344d8d25438074d1097d9ba0fb674d0320599316cfe7cf5f002a"}, + {file = "Cython-3.0.11-cp310-cp310-win_amd64.whl", hash = "sha256:d02f4ebe15aac7cdacce1a628e556c1983f26d140fd2e0ac5e0a090e605a2d38"}, + {file = "Cython-3.0.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75ba1c70b6deeaffbac123856b8d35f253da13552207aa969078611c197377e4"}, + {file = "Cython-3.0.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af91497dc098718e634d6ec8f91b182aea6bb3690f333fc9a7777bc70abe8810"}, + {file = "Cython-3.0.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3999fb52d3328a6a5e8c63122b0a8bd110dfcdb98dda585a3def1426b991cba7"}, + {file = "Cython-3.0.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d566a4e09b8979be8ab9f843bac0dd216c81f5e5f45661a9b25cd162ed80508c"}, + {file = "Cython-3.0.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:46aec30f217bdf096175a1a639203d44ac73a36fe7fa3dd06bd012e8f39eca0f"}, + {file = "Cython-3.0.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ddd1fe25af330f4e003421636746a546474e4ccd8f239f55d2898d80983d20ed"}, + {file = "Cython-3.0.11-cp311-cp311-win32.whl", hash = "sha256:221de0b48bf387f209003508e602ce839a80463522fc6f583ad3c8d5c890d2c1"}, + {file = "Cython-3.0.11-cp311-cp311-win_amd64.whl", hash = "sha256:3ff8ac1f0ecd4f505db4ab051e58e4531f5d098b6ac03b91c3b902e8d10c67b3"}, + {file = "Cython-3.0.11-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:11996c40c32abf843ba652a6d53cb15944c88d91f91fc4e6f0028f5df8a8f8a1"}, + {file = "Cython-3.0.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63f2c892e9f9c1698ecfee78205541623eb31cd3a1b682668be7ac12de94aa8e"}, + {file = "Cython-3.0.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b14c24f1dc4c4c9d997cca8d1b7fb01187a218aab932328247dcf5694a10102"}, + {file = "Cython-3.0.11-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c8eed5c015685106db15dd103fd040948ddca9197b1dd02222711815ea782a27"}, + {file = "Cython-3.0.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:780f89c95b8aec1e403005b3bf2f0a2afa060b3eba168c86830f079339adad89"}, + {file = "Cython-3.0.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a690f2ff460682ea985e8d38ec541be97e0977fa0544aadc21efc116ff8d7579"}, + {file = "Cython-3.0.11-cp312-cp312-win32.whl", hash = "sha256:2252b5aa57621848e310fe7fa6f7dce5f73aa452884a183d201a8bcebfa05a00"}, + {file = "Cython-3.0.11-cp312-cp312-win_amd64.whl", hash = "sha256:da394654c6da15c1d37f0b7ec5afd325c69a15ceafee2afba14b67a5df8a82c8"}, + {file = "Cython-3.0.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4341d6a64d47112884e0bcf31e6c075268220ee4cd02223047182d4dda94d637"}, + {file = "Cython-3.0.11-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:351955559b37e6c98b48aecb178894c311be9d731b297782f2b78d111f0c9015"}, + {file = "Cython-3.0.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c02361af9bfa10ff1ccf967fc75159e56b1c8093caf565739ed77a559c1f29f"}, + {file = "Cython-3.0.11-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6823aef13669a32caf18bbb036de56065c485d9f558551a9b55061acf9c4c27f"}, + {file = "Cython-3.0.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6fb68cef33684f8cc97987bee6ae919eee7e18ee6a3ad7ed9516b8386ef95ae6"}, + {file = "Cython-3.0.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:790263b74432cb997740d73665f4d8d00b9cd1cecbdd981d93591ddf993d4f12"}, + {file = "Cython-3.0.11-cp313-cp313-win32.whl", hash = "sha256:e6dd395d1a704e34a9fac00b25f0036dce6654c6b898be6f872ac2bb4f2eda48"}, + {file = "Cython-3.0.11-cp313-cp313-win_amd64.whl", hash = "sha256:52186101d51497519e99b60d955fd5cb3bf747c67f00d742e70ab913f1e42d31"}, + {file = "Cython-3.0.11-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c69d5cad51388522b98a99b4be1b77316de85b0c0523fa865e0ea58bbb622e0a"}, + {file = "Cython-3.0.11-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8acdc87e9009110adbceb7569765eb0980129055cc954c62f99fe9f094c9505e"}, + {file = "Cython-3.0.11-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1dd47865f4c0a224da73acf83d113f93488d17624e2457dce1753acdfb1cc40c"}, + {file = "Cython-3.0.11-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:301bde949b4f312a1c70e214b0c3bc51a3f955d466010d2f68eb042df36447b0"}, + {file = "Cython-3.0.11-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:f3953d2f504176f929862e5579cfc421860c33e9707f585d70d24e1096accdf7"}, + {file = "Cython-3.0.11-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:3f2b062f6df67e8a56c75e500ca330cf62c85ac26dd7fd006f07ef0f83aebfa3"}, + {file = "Cython-3.0.11-cp36-cp36m-win32.whl", hash = "sha256:c3d68751668c66c7a140b6023dba5d5d507f72063407bb609d3a5b0f3b8dfbe4"}, + {file = "Cython-3.0.11-cp36-cp36m-win_amd64.whl", hash = "sha256:bcd29945fafd12484cf37b1d84f12f0e7a33ba3eac5836531c6bd5283a6b3a0c"}, + {file = "Cython-3.0.11-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4e9a8d92978b15a0c7ca7f98447c6c578dc8923a0941d9d172d0b077cb69c576"}, + {file = "Cython-3.0.11-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:421017466e9260aca86823974e26e158e6358622f27c0f4da9c682f3b6d2e624"}, + {file = "Cython-3.0.11-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d80a7232938d523c1a12f6b1794ab5efb1ae77ad3fde79de4bb558d8ab261619"}, + {file = "Cython-3.0.11-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfa550d9ae39e827a6e7198076df763571cb53397084974a6948af558355e028"}, + {file = "Cython-3.0.11-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:aedceb6090a60854b31bf9571dc55f642a3fa5b91f11b62bcef167c52cac93d8"}, + {file = "Cython-3.0.11-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:473d35681d9f93ce380e6a7c8feb2d65fc6333bd7117fbc62989e404e241dbb0"}, + {file = "Cython-3.0.11-cp37-cp37m-win32.whl", hash = "sha256:3379c6521e25aa6cd7703bb7d635eaca75c0f9c7f1b0fdd6dd15a03bfac5f68d"}, + {file = "Cython-3.0.11-cp37-cp37m-win_amd64.whl", hash = "sha256:14701edb3107a5d9305a82d9d646c4f28bfecbba74b26cc1ee2f4be08f602057"}, + {file = "Cython-3.0.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:598699165cfa7c6d69513ee1bffc9e1fdd63b00b624409174c388538aa217975"}, + {file = "Cython-3.0.11-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0583076c4152b417a3a8a5d81ec02f58c09b67d3f22d5857e64c8734ceada8c"}, + {file = "Cython-3.0.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52205347e916dd65d2400b977df4c697390c3aae0e96275a438cc4ae85dadc08"}, + {file = "Cython-3.0.11-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:989899a85f0d9a57cebb508bd1f194cb52f0e3f7e22ac259f33d148d6422375c"}, + {file = "Cython-3.0.11-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:53b6072a89049a991d07f42060f65398448365c59c9cb515c5925b9bdc9d71f8"}, + {file = "Cython-3.0.11-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:f988f7f8164a6079c705c39e2d75dbe9967e3dacafe041420d9af7b9ee424162"}, + {file = "Cython-3.0.11-cp38-cp38-win32.whl", hash = "sha256:a1f4cbc70f6b7f0c939522118820e708e0d490edca42d852fa8004ec16780be2"}, + {file = "Cython-3.0.11-cp38-cp38-win_amd64.whl", hash = "sha256:187685e25e037320cae513b8cc4bf9dbc4465c037051aede509cbbf207524de2"}, + {file = "Cython-3.0.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0fc6fdd6fa493be7bdda22355689d5446ac944cd71286f6f44a14b0d67ee3ff5"}, + {file = "Cython-3.0.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b1d1f6f94cc5d42a4591f6d60d616786b9cd15576b112bc92a23131fcf38020"}, + {file = "Cython-3.0.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4ab2b92a3e6ed552adbe9350fd2ef3aa0cc7853cf91569f9dbed0c0699bbeab"}, + {file = "Cython-3.0.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:104d6f2f2c827ccc5e9e42c80ef6773a6aa94752fe6bc5b24a4eab4306fb7f07"}, + {file = "Cython-3.0.11-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:13062ce556a1e98d2821f7a0253b50569fdc98c36efd6653a65b21e3f8bbbf5f"}, + {file = "Cython-3.0.11-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:525d09b3405534763fa73bd78c8e51ac8264036ce4c16d37dfd1555a7da6d3a7"}, + {file = "Cython-3.0.11-cp39-cp39-win32.whl", hash = "sha256:b8c7e514075696ca0f60c337f9e416e61d7ccbc1aa879a56c39181ed90ec3059"}, + {file = "Cython-3.0.11-cp39-cp39-win_amd64.whl", hash = "sha256:8948802e1f5677a673ea5d22a1e7e273ca5f83e7a452786ca286eebf97cee67c"}, + {file = "Cython-3.0.11-py2.py3-none-any.whl", hash = "sha256:0e25f6425ad4a700d7f77cd468da9161e63658837d1bc34861a9861a4ef6346d"}, + {file = "cython-3.0.11.tar.gz", hash = "sha256:7146dd2af8682b4ca61331851e6aebce9fe5158e75300343f80c07ca80b1faff"}, +] + +[[package]] +name = "databases" +version = "0.8.0" +description = "Async database support for Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "databases-0.8.0-py3-none-any.whl", hash = "sha256:0ceb7fd5c740d846e1f4f58c0256d780a6786841ec8e624a21f1eb1b51a9093d"}, + {file = "databases-0.8.0.tar.gz", hash = "sha256:6544d82e9926f233d694ec29cd018403444c7fb6e863af881a8304d1ff5cfb90"}, +] + +[package.dependencies] +aiomysql = {version = "*", optional = true, markers = "extra == \"mysql\""} +sqlalchemy = ">=1.4.42,<1.5" + +[package.extras] +aiomysql = ["aiomysql"] +aiopg = ["aiopg"] +aiosqlite = ["aiosqlite"] +asyncmy = ["asyncmy"] +asyncpg = ["asyncpg"] +mysql = ["aiomysql"] +postgresql = ["asyncpg"] +sqlite = ["aiosqlite"] + +[[package]] +name = "datadog" +version = "0.48.0" +description = "The Datadog Python library" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "datadog-0.48.0-py2.py3-none-any.whl", hash = "sha256:c3f819e2dc632a546a5b4e8d45409e996d4fa18c60df7814c82eda548e0cca59"}, + {file = "datadog-0.48.0.tar.gz", hash = "sha256:d4d661358c3e7f801fbfe15118f5ccf08b9bd9b1f45b8b910605965283edad64"}, +] + +[package.dependencies] +requests = ">=2.6.0" + +[[package]] +name = "distlib" +version = "0.3.8" +description = "Distribution utilities" +optional = false +python-versions = "*" +files = [ + {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, + {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, +] + +[[package]] +name = "fastapi" +version = "0.109.2" +description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fastapi-0.109.2-py3-none-any.whl", hash = "sha256:2c9bab24667293b501cad8dd388c05240c850b58ec5876ee3283c47d6e1e3a4d"}, + {file = "fastapi-0.109.2.tar.gz", hash = "sha256:f3817eac96fe4f65a2ebb4baa000f394e55f5fccdaf7f75250804bc58f354f73"}, +] + +[package.dependencies] +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" +starlette = ">=0.36.3,<0.37.0" +typing-extensions = ">=4.8.0" + +[package.extras] +all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] + +[[package]] +name = "filelock" +version = "3.15.4" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, + {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] +typing = ["typing-extensions (>=4.8)"] + +[[package]] +name = "greenlet" +version = "3.0.3" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, + {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, + {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, + {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, + {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, + {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, + {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, + {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, + {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, + {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, + {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, + {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, + {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, + {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, + {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, + {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil"] + +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "hiredis" +version = "3.0.0" +description = "Python wrapper for hiredis" +optional = false +python-versions = ">=3.8" +files = [ + {file = "hiredis-3.0.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:4b182791c41c5eb1d9ed736f0ff81694b06937ca14b0d4dadde5dadba7ff6dae"}, + {file = "hiredis-3.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:13c275b483a052dd645eb2cb60d6380f1f5215e4c22d6207e17b86be6dd87ffa"}, + {file = "hiredis-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c1018cc7f12824506f165027eabb302735b49e63af73eb4d5450c66c88f47026"}, + {file = "hiredis-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83a29cc7b21b746cb6a480189e49f49b2072812c445e66a9e38d2004d496b81c"}, + {file = "hiredis-3.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e241fab6332e8fb5f14af00a4a9c6aefa22f19a336c069b7ddbf28ef8341e8d6"}, + {file = "hiredis-3.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1fb8de899f0145d6c4d5d4bd0ee88a78eb980a7ffabd51e9889251b8f58f1785"}, + {file = "hiredis-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b23291951959141173eec10f8573538e9349fa27f47a0c34323d1970bf891ee5"}, + {file = "hiredis-3.0.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e421ac9e4b5efc11705a0d5149e641d4defdc07077f748667f359e60dc904420"}, + {file = "hiredis-3.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:77c8006c12154c37691b24ff293c077300c22944018c3ff70094a33e10c1d795"}, + {file = "hiredis-3.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:41afc0d3c18b59eb50970479a9c0e5544fb4b95e3a79cf2fbaece6ddefb926fe"}, + {file = "hiredis-3.0.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:04ccae6dcd9647eae6025425ab64edb4d79fde8b9e6e115ebfabc6830170e3b2"}, + {file = "hiredis-3.0.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:fe91d62b0594db5ea7d23fc2192182b1a7b6973f628a9b8b2e0a42a2be721ac6"}, + {file = "hiredis-3.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:99516d99316062824a24d145d694f5b0d030c80da693ea6f8c4ecf71a251d8bb"}, + {file = "hiredis-3.0.0-cp310-cp310-win32.whl", hash = "sha256:562eaf820de045eb487afaa37e6293fe7eceb5b25e158b5a1974b7e40bf04543"}, + {file = "hiredis-3.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:a1c81c89ed765198da27412aa21478f30d54ef69bf5e4480089d9c3f77b8f882"}, + {file = "hiredis-3.0.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:4664dedcd5933364756d7251a7ea86d60246ccf73a2e00912872dacbfcef8978"}, + {file = "hiredis-3.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:47de0bbccf4c8a9f99d82d225f7672b9dd690d8fd872007b933ef51a302c9fa6"}, + {file = "hiredis-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e43679eca508ba8240d016d8cca9d27342d70184773c15bea78a23c87a1922f1"}, + {file = "hiredis-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13c345e7278c210317e77e1934b27b61394fee0dec2e8bd47e71570900f75823"}, + {file = "hiredis-3.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00018f22f38530768b73ea86c11f47e8d4df65facd4e562bd78773bd1baef35e"}, + {file = "hiredis-3.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ea3a86405baa8eb0d3639ced6926ad03e07113de54cb00fd7510cb0db76a89d"}, + {file = "hiredis-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c073848d2b1d5561f3903879ccf4e1a70c9b1e7566c7bdcc98d082fa3e7f0a1d"}, + {file = "hiredis-3.0.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a8dffb5f5b3415a4669d25de48b617fd9d44b0bccfc4c2ab24b06406ecc9ecb"}, + {file = "hiredis-3.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:22c17c96143c2a62dfd61b13803bc5de2ac526b8768d2141c018b965d0333b66"}, + {file = "hiredis-3.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:c3ece960008dab66c6b8bb3a1350764677ee7c74ccd6270aaf1b1caf9ccebb46"}, + {file = "hiredis-3.0.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f75999ae00a920f7dce6ecae76fa5e8674a3110e5a75f12c7a2c75ae1af53396"}, + {file = "hiredis-3.0.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e069967cbd5e1900aafc4b5943888f6d34937fc59bf8918a1a546cb729b4b1e4"}, + {file = "hiredis-3.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0aacc0a78e1d94d843a6d191f224a35893e6bdfeb77a4a89264155015c65f126"}, + {file = "hiredis-3.0.0-cp311-cp311-win32.whl", hash = "sha256:719c32147ba29528cb451f037bf837dcdda4ff3ddb6cdb12c4216b0973174718"}, + {file = "hiredis-3.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:bdc144d56333c52c853c31b4e2e52cfbdb22d3da4374c00f5f3d67c42158970f"}, + {file = "hiredis-3.0.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:484025d2eb8f6348f7876fc5a2ee742f568915039fcb31b478fd5c242bb0fe3a"}, + {file = "hiredis-3.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:fcdb552ffd97151dab8e7bc3ab556dfa1512556b48a367db94b5c20253a35ee1"}, + {file = "hiredis-3.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0bb6f9fd92f147ba11d338ef5c68af4fd2908739c09e51f186e1d90958c68cc1"}, + {file = "hiredis-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa86bf9a0ed339ec9e8a9a9d0ae4dccd8671625c83f9f9f2640729b15e07fbfd"}, + {file = "hiredis-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e194a0d5df9456995d8f510eab9f529213e7326af6b94770abf8f8b7952ddcaa"}, + {file = "hiredis-3.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a1df39d74ec507d79c7a82c8063eee60bf80537cdeee652f576059b9cdd15c"}, + {file = "hiredis-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f91456507427ba36fd81b2ca11053a8e112c775325acc74e993201ea912d63e9"}, + {file = "hiredis-3.0.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9862db92ef67a8a02e0d5370f07d380e14577ecb281b79720e0d7a89aedb9ee5"}, + {file = "hiredis-3.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d10fcd9e0eeab835f492832b2a6edb5940e2f1230155f33006a8dfd3bd2c94e4"}, + {file = "hiredis-3.0.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:48727d7d405d03977d01885f317328dc21d639096308de126c2c4e9950cbd3c9"}, + {file = "hiredis-3.0.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8e0bb6102ebe2efecf8a3292c6660a0e6fac98176af6de67f020bea1c2343717"}, + {file = "hiredis-3.0.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:df274e3abb4df40f4c7274dd3e587dfbb25691826c948bc98d5fead019dfb001"}, + {file = "hiredis-3.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:034925b5fb514f7b11aac38cd55b3fd7e9d3af23bd6497f3f20aa5b8ba58e232"}, + {file = "hiredis-3.0.0-cp312-cp312-win32.whl", hash = "sha256:120f2dda469b28d12ccff7c2230225162e174657b49cf4cd119db525414ae281"}, + {file = "hiredis-3.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:e584fe5f4e6681d8762982be055f1534e0170f6308a7a90f58d737bab12ff6a8"}, + {file = "hiredis-3.0.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:122171ff47d96ed8dd4bba6c0e41d8afaba3e8194949f7720431a62aa29d8895"}, + {file = "hiredis-3.0.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:ba9fc605ac558f0de67463fb588722878641e6fa1dabcda979e8e69ff581d0bd"}, + {file = "hiredis-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a631e2990b8be23178f655cae8ac6c7422af478c420dd54e25f2e26c29e766f1"}, + {file = "hiredis-3.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63482db3fadebadc1d01ad33afa6045ebe2ea528eb77ccaabd33ee7d9c2bad48"}, + {file = "hiredis-3.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f669212c390eebfbe03c4e20181f5970b82c5d0a0ad1df1785f7ffbe7d61150"}, + {file = "hiredis-3.0.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a49ef161739f8018c69b371528bdb47d7342edfdee9ddc75a4d8caddf45a6e"}, + {file = "hiredis-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98a152052b8878e5e43a2e3a14075218adafc759547c98668a21e9485882696c"}, + {file = "hiredis-3.0.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50a196af0ce657fcde9bf8a0bbe1032e22c64d8fcec2bc926a35e7ff68b3a166"}, + {file = "hiredis-3.0.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f2f312eef8aafc2255e3585dcf94d5da116c43ef837db91db9ecdc1bc930072d"}, + {file = "hiredis-3.0.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:6ca41fa40fa019cde42c21add74aadd775e71458051a15a352eabeb12eb4d084"}, + {file = "hiredis-3.0.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:6eecb343c70629f5af55a8b3e53264e44fa04e155ef7989de13668a0cb102a90"}, + {file = "hiredis-3.0.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:c3fdad75e7837a475900a1d3a5cc09aa024293c3b0605155da2d42f41bc0e482"}, + {file = "hiredis-3.0.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:8854969e7480e8d61ed7549eb232d95082a743e94138d98d7222ba4e9f7ecacd"}, + {file = "hiredis-3.0.0-cp38-cp38-win32.whl", hash = "sha256:f114a6c86edbf17554672b050cce72abf489fe58d583c7921904d5f1c9691605"}, + {file = "hiredis-3.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:7d99b91e42217d7b4b63354b15b41ce960e27d216783e04c4a350224d55842a4"}, + {file = "hiredis-3.0.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:4c6efcbb5687cf8d2aedcc2c3ed4ac6feae90b8547427d417111194873b66b06"}, + {file = "hiredis-3.0.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:5b5cff42a522a0d81c2ae7eae5e56d0ee7365e0c4ad50c4de467d8957aff4414"}, + {file = "hiredis-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:82f794d564f4bc76b80c50b03267fe5d6589e93f08e66b7a2f674faa2fa76ebc"}, + {file = "hiredis-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7a4c1791d7aa7e192f60fe028ae409f18ccdd540f8b1e6aeb0df7816c77e4a4"}, + {file = "hiredis-3.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2537b2cd98192323fce4244c8edbf11f3cac548a9d633dbbb12b48702f379f4"}, + {file = "hiredis-3.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8fed69bbaa307040c62195a269f82fc3edf46b510a17abb6b30a15d7dab548df"}, + {file = "hiredis-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:869f6d5537d243080f44253491bb30aa1ec3c21754003b3bddeadedeb65842b0"}, + {file = "hiredis-3.0.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d435ae89073d7cd51e6b6bf78369c412216261c9c01662e7008ff00978153729"}, + {file = "hiredis-3.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:204b79b30a0e6be0dc2301a4d385bb61472809f09c49f400497f1cdd5a165c66"}, + {file = "hiredis-3.0.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3ea635101b739c12effd189cc19b2671c268abb03013fd1f6321ca29df3ca625"}, + {file = "hiredis-3.0.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:f359175197fd833c8dd7a8c288f1516be45415bb5c939862ab60c2918e1e1943"}, + {file = "hiredis-3.0.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ac6d929cb33dd12ad3424b75725975f0a54b5b12dbff95f2a2d660c510aa106d"}, + {file = "hiredis-3.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:100431e04d25a522ef2c3b94f294c4219c4de3bfc7d557b6253296145a144c11"}, + {file = "hiredis-3.0.0-cp39-cp39-win32.whl", hash = "sha256:e1a9c14ae9573d172dc050a6f63a644457df5d01ec4d35a6a0f097f812930f83"}, + {file = "hiredis-3.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:54a6dd7b478e6eb01ce15b3bb5bf771e108c6c148315bf194eb2ab776a3cac4d"}, + {file = "hiredis-3.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:50da7a9edf371441dfcc56288d790985ee9840d982750580710a9789b8f4a290"}, + {file = "hiredis-3.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9b285ef6bf1581310b0d5e8f6ce64f790a1c40e89c660e1320b35f7515433672"}, + {file = "hiredis-3.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0dcfa684966f25b335072115de2f920228a3c2caf79d4bfa2b30f6e4f674a948"}, + {file = "hiredis-3.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a41be8af1fd78ca97bc948d789a09b730d1e7587d07ca53af05758f31f4b985d"}, + {file = "hiredis-3.0.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:038756db735e417ab36ee6fd7725ce412385ed2bd0767e8179a4755ea11b804f"}, + {file = "hiredis-3.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:fcecbd39bd42cef905c0b51c9689c39d0cc8b88b1671e7f40d4fb213423aef3a"}, + {file = "hiredis-3.0.0-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a131377493a59fb0f5eaeb2afd49c6540cafcfba5b0b3752bed707be9e7c4eaf"}, + {file = "hiredis-3.0.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:3d22c53f0ec5c18ecb3d92aa9420563b1c5d657d53f01356114978107b00b860"}, + {file = "hiredis-3.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8a91e9520fbc65a799943e5c970ffbcd67905744d8becf2e75f9f0a5e8414f0"}, + {file = "hiredis-3.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3dc8043959b50141df58ab4f398e8ae84c6f9e673a2c9407be65fc789138f4a6"}, + {file = "hiredis-3.0.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:51b99cfac514173d7b8abdfe10338193e8a0eccdfe1870b646009d2fb7cbe4b5"}, + {file = "hiredis-3.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:fa1fcad89d8a41d8dc10b1e54951ec1e161deabd84ed5a2c95c3c7213bdb3514"}, + {file = "hiredis-3.0.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:898636a06d9bf575d2c594129085ad6b713414038276a4bfc5db7646b8a5be78"}, + {file = "hiredis-3.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:466f836dbcf86de3f9692097a7a01533dc9926986022c6617dc364a402b265c5"}, + {file = "hiredis-3.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23142a8af92a13fc1e3f2ca1d940df3dcf2af1d176be41fe8d89e30a837a0b60"}, + {file = "hiredis-3.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:793c80a3d6b0b0e8196a2d5de37a08330125668c8012922685e17aa9108c33ac"}, + {file = "hiredis-3.0.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:467d28112c7faa29b7db743f40803d927c8591e9da02b6ce3d5fadc170a542a2"}, + {file = "hiredis-3.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:dc384874a719c767b50a30750f937af18842ee5e288afba95a5a3ed703b1515a"}, + {file = "hiredis-3.0.0.tar.gz", hash = "sha256:fed8581ae26345dea1f1e0d1a96e05041a727a45e7d8d459164583e23c6ac441"}, +] + +[[package]] +name = "httpcore" +version = "1.0.5" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, + {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<0.26.0)"] + +[[package]] +name = "httpx" +version = "0.26.0" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.26.0-py3-none-any.whl", hash = "sha256:8915f5a3627c4d47b73e8202457cb28f1266982d1159bd5779d86a80c0eab1cd"}, + {file = "httpx-0.26.0.tar.gz", hash = "sha256:451b55c30d5185ea6b23c2c793abf9bb237d2a7dfb901ced6ff69ad37ec1dfaf"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] + +[[package]] +name = "identify" +version = "2.6.0" +description = "File identification library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "identify-2.6.0-py2.py3-none-any.whl", hash = "sha256:e79ae4406387a9d300332b5fd366d8994f1525e8414984e1a59e058b2eda2dd0"}, + {file = "identify-2.6.0.tar.gz", hash = "sha256:cb171c685bdc31bcc4c1734698736a7d5b6c8bf2e0c15117f4d469c8640ae5cf"}, +] + +[package.extras] +license = ["ukkonen"] + +[[package]] +name = "idna" +version = "3.8" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.6" +files = [ + {file = "idna-3.8-py3-none-any.whl", hash = "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac"}, + {file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + +[[package]] +name = "mypy" +version = "1.8.0" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"}, + {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"}, + {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"}, + {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"}, + {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, + {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, + {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, + {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, + {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, + {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, + {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"}, + {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, + {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"}, + {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, + {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"}, + {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, + {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"}, + {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, + {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +description = "Node.js virtual environment builder" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, +] + +[[package]] +name = "orjson" +version = "3.9.13" +description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +optional = false +python-versions = ">=3.8" +files = [ + {file = "orjson-3.9.13-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:fa6b67f8bef277c2a4aadd548d58796854e7d760964126c3209b19bccc6a74f1"}, + {file = "orjson-3.9.13-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b812417199eeb169c25f67815cfb66fd8de7ff098bf57d065e8c1943a7ba5c8f"}, + {file = "orjson-3.9.13-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7ccd5bd222e5041069ad9d9868ab59e6dbc53ecde8d8c82b919954fbba43b46b"}, + {file = "orjson-3.9.13-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaaf80957c38e9d3f796f355a80fad945e72cd745e6b64c210e635b7043b673e"}, + {file = "orjson-3.9.13-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:60da7316131185d0110a1848e9ad15311e6c8938ee0b5be8cbd7261e1d80ee8f"}, + {file = "orjson-3.9.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b98cd948372f0eb219bc309dee4633db1278687161e3280d9e693b6076951d2"}, + {file = "orjson-3.9.13-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3869d65561f10071d3e7f35ae58fd377056f67d7aaed5222f318390c3ad30339"}, + {file = "orjson-3.9.13-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:43fd6036b16bb6742d03dae62f7bdf8214d06dea47e4353cde7e2bd1358d186f"}, + {file = "orjson-3.9.13-cp310-none-win32.whl", hash = "sha256:0d3ba9d88e20765335260d7b25547d7c571eee2b698200f97afa7d8c7cd668fc"}, + {file = "orjson-3.9.13-cp310-none-win_amd64.whl", hash = "sha256:6e47153db080f5e87e8ba638f1a8b18995eede6b0abb93964d58cf11bcea362f"}, + {file = "orjson-3.9.13-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:4584e8eb727bc431baaf1bf97e35a1d8a0109c924ec847395673dfd5f4ef6d6f"}, + {file = "orjson-3.9.13-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f37f0cdd026ef777a4336e599d8194c8357fc14760c2a5ddcfdf1965d45504b"}, + {file = "orjson-3.9.13-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d714595d81efab11b42bccd119977d94b25d12d3a806851ff6bfd286a4bce960"}, + {file = "orjson-3.9.13-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9171e8e1a1f221953e38e84ae0abffe8759002fd8968106ee379febbb5358b33"}, + {file = "orjson-3.9.13-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ab9dbdec3f13f3ea6f937564ce21651844cfbf2725099f2f490426acf683c23"}, + {file = "orjson-3.9.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:811ac076855e33e931549340288e0761873baf29276ad00f221709933c644330"}, + {file = "orjson-3.9.13-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:860d0f5b42d0c0afd73fa4177709f6e1b966ba691fcd72175affa902052a81d6"}, + {file = "orjson-3.9.13-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:838b898e8c1f26eb6b8d81b180981273f6f5110c76c22c384979aca854194f1b"}, + {file = "orjson-3.9.13-cp311-none-win32.whl", hash = "sha256:d3222db9df629ef3c3673124f2e05fb72bc4a320c117e953fec0d69dde82e36d"}, + {file = "orjson-3.9.13-cp311-none-win_amd64.whl", hash = "sha256:978117122ca4cc59b28af5322253017f6c5fc03dbdda78c7f4b94ae984c8dd43"}, + {file = "orjson-3.9.13-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:031df1026c7ea8303332d78711f180231e3ae8b564271fb748a03926587c5546"}, + {file = "orjson-3.9.13-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fd9a2101d04e85086ea6198786a3f016e45475f800712e6833e14bf9ce2832f"}, + {file = "orjson-3.9.13-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:446d9ad04204e79229ae19502daeea56479e55cbc32634655d886f5a39e91b44"}, + {file = "orjson-3.9.13-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b57c0954a9fdd2b05b9cec0f5a12a0bdce5bf021a5b3b09323041613972481ab"}, + {file = "orjson-3.9.13-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:266e55c83f81248f63cc93d11c5e3a53df49a5d2598fa9e9db5f99837a802d5d"}, + {file = "orjson-3.9.13-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31372ba3a9fe8ad118e7d22fba46bbc18e89039e3bfa89db7bc8c18ee722dca8"}, + {file = "orjson-3.9.13-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e3b0c4da61f39899561e08e571f54472a09fa71717d9797928af558175ae5243"}, + {file = "orjson-3.9.13-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2cc03a35bfc71c8ebf96ce49b82c2a7be6af4b3cd3ac34166fdb42ac510bbfff"}, + {file = "orjson-3.9.13-cp312-none-win_amd64.whl", hash = "sha256:49b7e3fe861cb246361825d1a238f2584ed8ea21e714bf6bb17cebb86772e61c"}, + {file = "orjson-3.9.13-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:62e9a99879c4d5a04926ac2518a992134bfa00d546ea5a4cae4b9be454d35a22"}, + {file = "orjson-3.9.13-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d92a3e835a5100f1d5b566fff79217eab92223ca31900dba733902a182a35ab0"}, + {file = "orjson-3.9.13-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:23f21faf072ed3b60b5954686f98157e073f6a8068eaa58dbde83e87212eda84"}, + {file = "orjson-3.9.13-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:828c502bb261588f7de897e06cb23c4b122997cb039d2014cb78e7dabe92ef0c"}, + {file = "orjson-3.9.13-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16946d095212a3dec552572c5d9bca7afa40f3116ad49695a397be07d529f1fa"}, + {file = "orjson-3.9.13-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3deadd8dc0e9ff844b5b656fa30a48dbee1c3b332d8278302dd9637f6b09f627"}, + {file = "orjson-3.9.13-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:9b1b5adc5adf596c59dca57156b71ad301d73956f5bab4039b0e34dbf50b9fa0"}, + {file = "orjson-3.9.13-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:ddc089315d030c54f0f03fb38286e2667c05009a78d659f108a8efcfbdf2e585"}, + {file = "orjson-3.9.13-cp38-none-win32.whl", hash = "sha256:ae77275a28667d9c82d4522b681504642055efa0368d73108511647c6499b31c"}, + {file = "orjson-3.9.13-cp38-none-win_amd64.whl", hash = "sha256:730385fdb99a21fce9bb84bb7fcbda72c88626facd74956bda712834b480729d"}, + {file = "orjson-3.9.13-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:7e8e4a571d958910272af8d53a9cbe6599f9f5fd496a1bc51211183bb2072cbd"}, + {file = "orjson-3.9.13-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfad553a36548262e7da0f3a7464270e13900b898800fb571a5d4b298c3f8356"}, + {file = "orjson-3.9.13-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0d691c44604941945b00e0a13b19a7d9c1a19511abadf0080f373e98fdeb6b31"}, + {file = "orjson-3.9.13-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a8c83718346de08d68b3cb1105c5d91e5fc39885d8610fdda16613d4e3941459"}, + {file = "orjson-3.9.13-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:63ef57a53bfc2091a7cd50a640d9ae866bd7d92a5225a1bab6baa60ef62583f2"}, + {file = "orjson-3.9.13-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9156b96afa38db71344522f5517077eaedf62fcd2c9148392ff93d801128809c"}, + {file = "orjson-3.9.13-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31fb66b41fb2c4c817d9610f0bc7d31345728d7b5295ac78b63603407432a2b2"}, + {file = "orjson-3.9.13-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8a730bf07feacb0863974e67b206b7c503a62199de1cece2eb0d4c233ec29c11"}, + {file = "orjson-3.9.13-cp39-none-win32.whl", hash = "sha256:5ef58869f3399acbbe013518d8b374ee9558659eef14bca0984f67cb1fbd3c37"}, + {file = "orjson-3.9.13-cp39-none-win_amd64.whl", hash = "sha256:9bcf56efdb83244cde070e82a69c0f03c47c235f0a5cb6c81d9da23af7fbaae4"}, + {file = "orjson-3.9.13.tar.gz", hash = "sha256:fc6bc65b0cf524ee042e0bc2912b9206ef242edfba7426cf95763e4af01f527a"}, +] + +[[package]] +name = "packaging" +version = "24.1" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "platformdirs" +version = "4.2.2" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, + {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +type = ["mypy (>=1.8)"] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pre-commit" +version = "3.6.1" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +optional = false +python-versions = ">=3.9" +files = [ + {file = "pre_commit-3.6.1-py2.py3-none-any.whl", hash = "sha256:9fe989afcf095d2c4796ce7c553cf28d4d4a9b9346de3cda079bcf40748454a4"}, + {file = "pre_commit-3.6.1.tar.gz", hash = "sha256:c90961d8aa706f75d60935aba09469a6b0bcb8345f127c3fbee4bdc5f114cf4b"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + +[[package]] +name = "psutil" +version = "5.9.8" +description = "Cross-platform lib for process and system monitoring in Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "psutil-5.9.8-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8"}, + {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73"}, + {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7"}, + {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36"}, + {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d"}, + {file = "psutil-5.9.8-cp27-none-win32.whl", hash = "sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e"}, + {file = "psutil-5.9.8-cp27-none-win_amd64.whl", hash = "sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631"}, + {file = "psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81"}, + {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421"}, + {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4"}, + {file = "psutil-5.9.8-cp36-cp36m-win32.whl", hash = "sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee"}, + {file = "psutil-5.9.8-cp36-cp36m-win_amd64.whl", hash = "sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2"}, + {file = "psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0"}, + {file = "psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf"}, + {file = "psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8"}, + {file = "psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c"}, +] + +[package.extras] +test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] + +[[package]] +name = "py-cpuinfo" +version = "9.0.0" +description = "Get CPU info with pure Python" +optional = false +python-versions = "*" +files = [ + {file = "py-cpuinfo-9.0.0.tar.gz", hash = "sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690"}, + {file = "py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5"}, +] + +[[package]] +name = "py3rijndael" +version = "0.3.3" +description = "Rijndael algorithm library for Python3." +optional = false +python-versions = "*" +files = [ + {file = "py3rijndael-0.3.3-py2-none-any.whl", hash = "sha256:08b017cb3a8c4dffc2fbd9e8664f380e6472752f2c0f347ef42f9c86f979e5aa"}, + {file = "py3rijndael-0.3.3.tar.gz", hash = "sha256:b6655a3ebff3a10540eaed049e8788eea3ac93d6b71b3a6ac2b00224bbece9a8"}, +] + +[[package]] +name = "pycparser" +version = "2.22" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] + +[[package]] +name = "pydantic" +version = "2.6.1" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.6.1-py3-none-any.whl", hash = "sha256:0b6a909df3192245cb736509a92ff69e4fef76116feffec68e93a567347bae6f"}, + {file = "pydantic-2.6.1.tar.gz", hash = "sha256:4fd5c182a2488dc63e6d32737ff19937888001e2a6d86e94b3f233104a5d1fa9"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +pydantic-core = "2.16.2" +typing-extensions = ">=4.6.1" + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.16.2" +description = "" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3fab4e75b8c525a4776e7630b9ee48aea50107fea6ca9f593c98da3f4d11bf7c"}, + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8bde5b48c65b8e807409e6f20baee5d2cd880e0fad00b1a811ebc43e39a00ab2"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2924b89b16420712e9bb8192396026a8fbd6d8726224f918353ac19c4c043d2a"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16aa02e7a0f539098e215fc193c8926c897175d64c7926d00a36188917717a05"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:936a787f83db1f2115ee829dd615c4f684ee48ac4de5779ab4300994d8af325b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:459d6be6134ce3b38e0ef76f8a672924460c455d45f1ad8fdade36796df1ddc8"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9ee4febb249c591d07b2d4dd36ebcad0ccd128962aaa1801508320896575ef"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40a0bd0bed96dae5712dab2aba7d334a6c67cbcac2ddfca7dbcc4a8176445990"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:870dbfa94de9b8866b37b867a2cb37a60c401d9deb4a9ea392abf11a1f98037b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:308974fdf98046db28440eb3377abba274808bf66262e042c412eb2adf852731"}, + {file = "pydantic_core-2.16.2-cp310-none-win32.whl", hash = "sha256:a477932664d9611d7a0816cc3c0eb1f8856f8a42435488280dfbf4395e141485"}, + {file = "pydantic_core-2.16.2-cp310-none-win_amd64.whl", hash = "sha256:8f9142a6ed83d90c94a3efd7af8873bf7cefed2d3d44387bf848888482e2d25f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:406fac1d09edc613020ce9cf3f2ccf1a1b2f57ab00552b4c18e3d5276c67eb11"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce232a6170dd6532096cadbf6185271e4e8c70fc9217ebe105923ac105da9978"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a90fec23b4b05a09ad988e7a4f4e081711a90eb2a55b9c984d8b74597599180f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8aafeedb6597a163a9c9727d8a8bd363a93277701b7bfd2749fbefee2396469e"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9957433c3a1b67bdd4c63717eaf174ebb749510d5ea612cd4e83f2d9142f3fc8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0d7a9165167269758145756db43a133608a531b1e5bb6a626b9ee24bc38a8f7"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dffaf740fe2e147fedcb6b561353a16243e654f7fe8e701b1b9db148242e1272"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8ed79883b4328b7f0bd142733d99c8e6b22703e908ec63d930b06be3a0e7113"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cf903310a34e14651c9de056fcc12ce090560864d5a2bb0174b971685684e1d8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:46b0d5520dbcafea9a8645a8164658777686c5c524d381d983317d29687cce97"}, + {file = "pydantic_core-2.16.2-cp311-none-win32.whl", hash = "sha256:70651ff6e663428cea902dac297066d5c6e5423fda345a4ca62430575364d62b"}, + {file = "pydantic_core-2.16.2-cp311-none-win_amd64.whl", hash = "sha256:98dc6f4f2095fc7ad277782a7c2c88296badcad92316b5a6e530930b1d475ebc"}, + {file = "pydantic_core-2.16.2-cp311-none-win_arm64.whl", hash = "sha256:ef6113cd31411eaf9b39fc5a8848e71c72656fd418882488598758b2c8c6dfa0"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:88646cae28eb1dd5cd1e09605680c2b043b64d7481cdad7f5003ebef401a3039"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7b883af50eaa6bb3299780651e5be921e88050ccf00e3e583b1e92020333304b"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bf26c2e2ea59d32807081ad51968133af3025c4ba5753e6a794683d2c91bf6e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99af961d72ac731aae2a1b55ccbdae0733d816f8bfb97b41909e143de735f522"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02906e7306cb8c5901a1feb61f9ab5e5c690dbbeaa04d84c1b9ae2a01ebe9379"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5362d099c244a2d2f9659fb3c9db7c735f0004765bbe06b99be69fbd87c3f15"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ac426704840877a285d03a445e162eb258924f014e2f074e209d9b4ff7bf380"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b94cbda27267423411c928208e89adddf2ea5dd5f74b9528513f0358bba019cb"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6db58c22ac6c81aeac33912fb1af0e930bc9774166cdd56eade913d5f2fff35e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396fdf88b1b503c9c59c84a08b6833ec0c3b5ad1a83230252a9e17b7dfb4cffc"}, + {file = "pydantic_core-2.16.2-cp312-none-win32.whl", hash = "sha256:7c31669e0c8cc68400ef0c730c3a1e11317ba76b892deeefaf52dcb41d56ed5d"}, + {file = "pydantic_core-2.16.2-cp312-none-win_amd64.whl", hash = "sha256:a3b7352b48fbc8b446b75f3069124e87f599d25afb8baa96a550256c031bb890"}, + {file = "pydantic_core-2.16.2-cp312-none-win_arm64.whl", hash = "sha256:a9e523474998fb33f7c1a4d55f5504c908d57add624599e095c20fa575b8d943"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ae34418b6b389d601b31153b84dce480351a352e0bb763684a1b993d6be30f17"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:732bd062c9e5d9582a30e8751461c1917dd1ccbdd6cafb032f02c86b20d2e7ec"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b52776a2e3230f4854907a1e0946eec04d41b1fc64069ee774876bbe0eab55"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef551c053692b1e39e3f7950ce2296536728871110e7d75c4e7753fb30ca87f4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ebb892ed8599b23fa8f1799e13a12c87a97a6c9d0f497525ce9858564c4575a4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa6c8c582036275997a733427b88031a32ffa5dfc3124dc25a730658c47a572f"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ba0884a91f1aecce75202473ab138724aa4fb26d7707f2e1fa6c3e68c84fbf"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7924e54f7ce5d253d6160090ddc6df25ed2feea25bfb3339b424a9dd591688bc"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69a7b96b59322a81c2203be537957313b07dd333105b73db0b69212c7d867b4b"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7e6231aa5bdacda78e96ad7b07d0c312f34ba35d717115f4b4bff6cb87224f0f"}, + {file = "pydantic_core-2.16.2-cp38-none-win32.whl", hash = "sha256:41dac3b9fce187a25c6253ec79a3f9e2a7e761eb08690e90415069ea4a68ff7a"}, + {file = "pydantic_core-2.16.2-cp38-none-win_amd64.whl", hash = "sha256:f685dbc1fdadb1dcd5b5e51e0a378d4685a891b2ddaf8e2bba89bd3a7144e44a"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:55749f745ebf154c0d63d46c8c58594d8894b161928aa41adbb0709c1fe78b77"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b30b0dd58a4509c3bd7eefddf6338565c4905406aee0c6e4a5293841411a1286"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18de31781cdc7e7b28678df7c2d7882f9692ad060bc6ee3c94eb15a5d733f8f7"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5864b0242f74b9dd0b78fd39db1768bc3f00d1ffc14e596fd3e3f2ce43436a33"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8f9186ca45aee030dc8234118b9c0784ad91a0bb27fc4e7d9d6608a5e3d386c"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc6f6c9be0ab6da37bc77c2dda5f14b1d532d5dbef00311ee6e13357a418e646"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa057095f621dad24a1e906747179a69780ef45cc8f69e97463692adbcdae878"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ad84731a26bcfb299f9eab56c7932d46f9cad51c52768cace09e92a19e4cf55"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3b052c753c4babf2d1edc034c97851f867c87d6f3ea63a12e2700f159f5c41c3"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e0f686549e32ccdb02ae6f25eee40cc33900910085de6aa3790effd391ae10c2"}, + {file = "pydantic_core-2.16.2-cp39-none-win32.whl", hash = "sha256:7afb844041e707ac9ad9acad2188a90bffce2c770e6dc2318be0c9916aef1469"}, + {file = "pydantic_core-2.16.2-cp39-none-win_amd64.whl", hash = "sha256:9da90d393a8227d717c19f5397688a38635afec89f2e2d7af0df037f3249c39a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f60f920691a620b03082692c378661947d09415743e437a7478c309eb0e4f82"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:47924039e785a04d4a4fa49455e51b4eb3422d6eaacfde9fc9abf8fdef164e8a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6294e76b0380bb7a61eb8a39273c40b20beb35e8c87ee101062834ced19c545"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe56851c3f1d6f5384b3051c536cc81b3a93a73faf931f404fef95217cf1e10d"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9d776d30cde7e541b8180103c3f294ef7c1862fd45d81738d156d00551005784"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:72f7919af5de5ecfaf1eba47bf9a5d8aa089a3340277276e5636d16ee97614d7"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:4bfcbde6e06c56b30668a0c872d75a7ef3025dc3c1823a13cf29a0e9b33f67e8"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ff7c97eb7a29aba230389a2661edf2e9e06ce616c7e35aa764879b6894a44b25"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9b5f13857da99325dcabe1cc4e9e6a3d7b2e2c726248ba5dd4be3e8e4a0b6d0e"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a7e41e3ada4cca5f22b478c08e973c930e5e6c7ba3588fb8e35f2398cdcc1545"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60eb8ceaa40a41540b9acae6ae7c1f0a67d233c40dc4359c256ad2ad85bdf5e5"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7beec26729d496a12fd23cf8da9944ee338c8b8a17035a560b585c36fe81af20"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:22c5f022799f3cd6741e24f0443ead92ef42be93ffda0d29b2597208c94c3753"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:eca58e319f4fd6df004762419612122b2c7e7d95ffafc37e890252f869f3fb2a"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed957db4c33bc99895f3a1672eca7e80e8cda8bd1e29a80536b4ec2153fa9804"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:459c0d338cc55d099798618f714b21b7ece17eb1a87879f2da20a3ff4c7628e2"}, + {file = "pydantic_core-2.16.2.tar.gz", hash = "sha256:0ba503850d8b8dcc18391f10de896ae51d37fe5fe43dbfb6a35c5c5cad271a06"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pyflakes" +version = "3.2.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, + {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, +] + +[[package]] +name = "pymysql" +version = "1.1.1" +description = "Pure Python MySQL Driver" +optional = false +python-versions = ">=3.7" +files = [ + {file = "PyMySQL-1.1.1-py3-none-any.whl", hash = "sha256:4de15da4c61dc132f4fb9ab763063e693d521a80fd0e87943b9a453dd4c19d6c"}, + {file = "pymysql-1.1.1.tar.gz", hash = "sha256:e127611aaf2b417403c60bf4dc570124aeb4a57f5f37b8e95ae399a42f904cd0"}, +] + +[package.extras] +ed25519 = ["PyNaCl (>=1.4.0)"] +rsa = ["cryptography"] + +[[package]] +name = "pytest" +version = "8.0.0" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.0.0-py3-none-any.whl", hash = "sha256:50fb9cbe836c3f20f0dfa99c565201fb75dc54c8d76373cd1bde06b06657bdb6"}, + {file = "pytest-8.0.0.tar.gz", hash = "sha256:249b1b0864530ba251b7438274c4d251c58d868edaaec8762893ad4a0d71c36c"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.3.0,<2.0" + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-asyncio" +version = "0.23.5" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-asyncio-0.23.5.tar.gz", hash = "sha256:3a048872a9c4ba14c3e90cc1aa20cbc2def7d01c7c8db3777ec281ba9c057675"}, + {file = "pytest_asyncio-0.23.5-py3-none-any.whl", hash = "sha256:4e7093259ba018d58ede7d5315131d21923a60f8a6e9ee266ce1589685c89eac"}, +] + +[package.dependencies] +pytest = ">=7.0.0,<9" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] +testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] + +[[package]] +name = "python-dotenv" +version = "1.0.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "python-json-logger" +version = "2.0.7" +description = "A python library adding a json log formatter" +optional = false +python-versions = ">=3.6" +files = [ + {file = "python-json-logger-2.0.7.tar.gz", hash = "sha256:23e7ec02d34237c5aa1e29a070193a4ea87583bb4e7f8fd06d3de8264c4b2e1c"}, + {file = "python_json_logger-2.0.7-py3-none-any.whl", hash = "sha256:f380b826a991ebbe3de4d897aeec42760035ac760345e57b812938dc8b35e2bd"}, +] + +[[package]] +name = "python-multipart" +version = "0.0.9" +description = "A streaming multipart parser for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python_multipart-0.0.9-py3-none-any.whl", hash = "sha256:97ca7b8ea7b05f977dc3849c3ba99d51689822fab725c3703af7c866a0c2b215"}, + {file = "python_multipart-0.0.9.tar.gz", hash = "sha256:03f54688c663f1b7977105f021043b0793151e4cb1c1a9d4a11fc13d622c4026"}, +] + +[package.extras] +dev = ["atomicwrites (==1.4.1)", "attrs (==23.2.0)", "coverage (==7.4.1)", "hatch", "invoke (==2.2.0)", "more-itertools (==10.2.0)", "pbr (==6.0.0)", "pluggy (==1.4.0)", "py (==1.11.0)", "pytest (==8.0.0)", "pytest-cov (==4.1.0)", "pytest-timeout (==2.2.0)", "pyyaml (==6.0.1)", "ruff (==0.2.1)"] + +[[package]] +name = "pytimeparse" +version = "1.1.8" +description = "Time expression parser" +optional = false +python-versions = "*" +files = [ + {file = "pytimeparse-1.1.8-py2.py3-none-any.whl", hash = "sha256:04b7be6cc8bd9f5647a6325444926c3ac34ee6bc7e69da4367ba282f076036bd"}, + {file = "pytimeparse-1.1.8.tar.gz", hash = "sha256:e86136477be924d7e670646a98561957e8ca7308d44841e21f5ddea757556a0a"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + +[[package]] +name = "redis" +version = "5.0.1" +description = "Python client for Redis database and key-value store" +optional = false +python-versions = ">=3.7" +files = [ + {file = "redis-5.0.1-py3-none-any.whl", hash = "sha256:ed4802971884ae19d640775ba3b03aa2e7bd5e8fb8dfaed2decce4d0fc48391f"}, + {file = "redis-5.0.1.tar.gz", hash = "sha256:0dab495cd5753069d3bc650a0dde8a8f9edde16fc5691b689a566eda58100d0f"}, +] + +[package.dependencies] +async-timeout = {version = ">=4.0.2", markers = "python_full_version <= \"3.11.2\""} +hiredis = {version = ">=1.0.0", optional = true, markers = "extra == \"hiredis\""} + +[package.extras] +hiredis = ["hiredis (>=1.0.0)"] +ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "respx" +version = "0.20.2" +description = "A utility for mocking out the Python HTTPX and HTTP Core libraries." +optional = false +python-versions = ">=3.7" +files = [ + {file = "respx-0.20.2-py2.py3-none-any.whl", hash = "sha256:ab8e1cf6da28a5b2dd883ea617f8130f77f676736e6e9e4a25817ad116a172c9"}, + {file = "respx-0.20.2.tar.gz", hash = "sha256:07cf4108b1c88b82010f67d3c831dae33a375c7b436e54d87737c7f9f99be643"}, +] + +[package.dependencies] +httpx = ">=0.21.0" + +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "sqlalchemy" +version = "1.4.53" +description = "Database Abstraction Library" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "SQLAlchemy-1.4.53-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:b61ac5457d91b5629a3dea2b258deb4cdd35ac8f6fa2031d2b9b2fff5b3396da"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a96aa8d425047551676b0e178ddb0683421e78eda879ab55775128b2e612cae"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e10ac36f0b994235c13388b39598bf27219ec8bdea5be99bdac612b01cbe525"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:437592b341a3229dd0443c9c803b0bf0a466f8f539014fef6cdb9c06b7edb7f9"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:784272ceb5eb71421fea9568749bcbe8bd019261a0e2e710a7efa76057af2499"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-win32.whl", hash = "sha256:122d7b5722df1a24402c6748bbb04687ef981493bb559d0cc0beffe722e0e6ed"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-win_amd64.whl", hash = "sha256:4604d42b2abccba266d3f5bbe883684b5df93e74054024c70d3fbb5eea45e530"}, + {file = "SQLAlchemy-1.4.53-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fb8e15dfa47f5de11ab073e12aadd6b502cfb7ac4bafd18bd18cfd1c7d13dbbc"}, + {file = "SQLAlchemy-1.4.53-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc8be4df55e8fde3006d9cb1f6b3df2ba26db613855dc4df2c0fcd5ec15cb3b7"}, + {file = "SQLAlchemy-1.4.53-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86b11640251f9a9789fd96cd6e5d176b1c230230c70ad40299bcbcc568451b4c"}, + {file = "SQLAlchemy-1.4.53-cp311-cp311-win32.whl", hash = "sha256:cd534c716f86bdf95b7b984a34ee278c91d1b1d7d183e7e5ff878600b1696046"}, + {file = "SQLAlchemy-1.4.53-cp311-cp311-win_amd64.whl", hash = "sha256:6dd06572872ca13ef5a90306a3e5af787498ddaa17fb00109b1243642646cd69"}, + {file = "SQLAlchemy-1.4.53-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:2774c24c405136c3ef472e2352bdca7330659d481fbf2283f996c0ef9eb90f22"}, + {file = "SQLAlchemy-1.4.53-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68a614765197b3d13a730d631a78c3bb9b3b72ba58ed7ab295d58d517464e315"}, + {file = "SQLAlchemy-1.4.53-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d13d4dfbc6e52363886b47cf02cf68c5d2a37c468626694dc210d7e97d4ad330"}, + {file = "SQLAlchemy-1.4.53-cp312-cp312-win32.whl", hash = "sha256:197065b91456574d70b6459bfa62bc0b52a4960a29ef923c375ec427274a3e05"}, + {file = "SQLAlchemy-1.4.53-cp312-cp312-win_amd64.whl", hash = "sha256:421306c4b936b0271a3ce2dc074928d5ece4a36f9c482daa5770f44ecfc3a883"}, + {file = "SQLAlchemy-1.4.53-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:13fc34b35d8ddb3fbe3f8fcfdf6c2546e676187f0fb20f5774da362ddaf8fa2d"}, + {file = "SQLAlchemy-1.4.53-cp36-cp36m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:626be971ff89541cfd3e70b54be00b57a7f8557204decb6223ce0428fec058f3"}, + {file = "SQLAlchemy-1.4.53-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:991e42fdfec561ebc6a4fae7161a86d129d6069fa14210b96b8dd752afa7059c"}, + {file = "SQLAlchemy-1.4.53-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:95123f3a1e0e8020848fd32ba751db889a01a44e4e4fef7e58c87ddd0b2fca59"}, + {file = "SQLAlchemy-1.4.53-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c58e011e9e6373b3a091d83f20601fb335a3b4bace80bfcb914ac168aad3b70d"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:670c7769bf5dcae9aff331247b5d82fe635c63731088a46ce68ba2ba519ef36e"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07ba54f09033d387ae9df8d62cbe211ed7304e0bfbece1f8c55e21db9fae5c11"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a38834b4c183c33daf58544281395aad2e985f0b47cca1e88ea5ada88344e63"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:616492f5315128a847f293a7c552f3561ac7e996d2aa5dc46bef4fb0d3781f1d"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0cf8c0af9563892c6632f7343bc393dfce6eeef8e4d10c5fadba9c0390520bd"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-win32.whl", hash = "sha256:c05fe05941424c2f3747a8952381b7725e24cba2ca00141380e54789d5b616b6"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-win_amd64.whl", hash = "sha256:93e90aa3e3b2f8e8cbae4d5509f8e0cf82972378d323c740a8df1c1e9f484172"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:9d7368df54d3ed45a18955f6cec38ebe075290594ac0d5c87a8ddaff7e10de27"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89d8ac4158ef68eea8bb0f6dd0583127d9aa8720606964ba8eee20b254f9c83a"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16bb9fa4d00b4581b14d9f0e2224dc7745b854aa4687738279af0f48f7056c98"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4fe5168d0249c23f537950b6d75935ff2709365a113e29938a979aec36668ecf"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b8608d162d3bd29d807aab32c3fb6e2f8e225a43d1c54c917fed38513785380"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-win32.whl", hash = "sha256:a9d4d132198844bd6828047135ce7b887687c92925049a2468a605fc775c7a1a"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-win_amd64.whl", hash = "sha256:c15d1f1fcf1f9bec0499ae1d9132b950fcc7730f2d26d10484c8808b4e077816"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:edf094a20a386ff2ec73de65ef18014b250259cb860edc61741e240ca22d6981"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83a9c3514ff19d9d30d8a8d378b24cd1dfa5528d20891481cb5f196117db6a48"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaaeedbceb4dfd688fff2faf25a9a87a391f548811494f7bff7fa701b639abc3"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d021699b9007deb7aa715629078830c99a5fec2753d9bdd5ff33290d363ef755"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0465b8a68f8f4de754c1966c45b187ac784ad97bc9747736f913130f0e1adea0"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-win32.whl", hash = "sha256:5f67b9e9dcac3241781e96575468d55a42332157dee04bdbf781df573dff5f85"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-win_amd64.whl", hash = "sha256:a8c2f2a0b2c4e3b86eb58c9b6bb98548205eea2fba9dae4edfd29dc6aebbe95a"}, + {file = "SQLAlchemy-1.4.53.tar.gz", hash = "sha256:5e6ab710c4c064755fd92d1a417bef360228a19bdf0eee32b03aa0f5f8e9fe0d"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)", "mariadb (>=1.0.1,!=1.1.2)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql", "pymssql"] +mssql-pyodbc = ["pyodbc", "pyodbc"] +mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] +mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] +mysql-connector = ["mysql-connector-python", "mysql-connector-python"] +oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "asyncpg", "greenlet (!=0.4.17)", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)", "pg8000 (>=1.16.6,!=1.29.0)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +pymysql = ["pymysql", "pymysql (<1)"] +sqlcipher = ["sqlcipher3_binary"] + +[[package]] +name = "sqlalchemy2-stubs" +version = "0.0.2a38" +description = "Typing Stubs for SQLAlchemy 1.4" +optional = false +python-versions = ">=3.6" +files = [ + {file = "sqlalchemy2-stubs-0.0.2a38.tar.gz", hash = "sha256:861d722abeb12f13eacd775a9f09379b11a5a9076f469ccd4099961b95800f9e"}, + {file = "sqlalchemy2_stubs-0.0.2a38-py3-none-any.whl", hash = "sha256:b62aa46943807287550e2033dafe07564b33b6a815fbaa3c144e396f9cc53bcb"}, +] + +[package.dependencies] +typing-extensions = ">=3.7.4" + +[[package]] +name = "starlette" +version = "0.36.3" +description = "The little ASGI library that shines." +optional = false +python-versions = ">=3.8" +files = [ + {file = "starlette-0.36.3-py3-none-any.whl", hash = "sha256:13d429aa93a61dc40bf503e8c801db1f1bca3dc706b10ef2434a36123568f044"}, + {file = "starlette-0.36.3.tar.gz", hash = "sha256:90a671733cfb35771d8cc605e0b679d23b992f8dcfad48cc60b38cb29aeb7080"}, +] + +[package.dependencies] +anyio = ">=3.4.0,<5" + +[package.extras] +full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] + +[[package]] +name = "tenacity" +version = "8.2.3" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tenacity-8.2.3-py3-none-any.whl", hash = "sha256:ce510e327a630c9e1beaf17d42e6ffacc88185044ad85cf74c0a8887c6a0f88c"}, + {file = "tenacity-8.2.3.tar.gz", hash = "sha256:5398ef0d78e63f40007c1fb4c0bff96e1911394d2fa8d194f77619c05ff6cc8a"}, +] + +[package.extras] +doc = ["reno", "sphinx", "tornado (>=4.5)"] + +[[package]] +name = "timeago" +version = "1.0.16" +description = "A very simple python library, used to format datetime with `*** time ago` statement. eg: \"3 hours ago\"." +optional = false +python-versions = "*" +files = [ + {file = "timeago-1.0.16-py3-none-any.whl", hash = "sha256:9b8cb2e3102b329f35a04aa4531982d867b093b19481cfbb1dac7845fa2f79b0"}, +] + +[[package]] +name = "types-psutil" +version = "5.9.5.20240205" +description = "Typing stubs for psutil" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-psutil-5.9.5.20240205.tar.gz", hash = "sha256:51df36a361aa597bf483dcc5b58f2ab7aa87452a36d2da97c90994d6a81ef743"}, + {file = "types_psutil-5.9.5.20240205-py3-none-any.whl", hash = "sha256:3ec9bd8b95a64fe1269241d3ffb74b94a45df2d0391da1402423cd33f29745ca"}, +] + +[[package]] +name = "types-pymysql" +version = "1.1.0.1" +description = "Typing stubs for PyMySQL" +optional = false +python-versions = "*" +files = [ + {file = "types-PyMySQL-1.1.0.1.tar.gz", hash = "sha256:72bdaecb88de4a30bc3e1842e1d4522ceb3c4b2e883a6a2a7a7162775dd27b93"}, + {file = "types_PyMySQL-1.1.0.1-py3-none-any.whl", hash = "sha256:9aec9ee0453314d477ef26e5832b4a992bc4cc3557358d62b0fe4af760a7728f"}, +] + +[[package]] +name = "types-pyyaml" +version = "6.0.12.20240808" +description = "Typing stubs for PyYAML" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-PyYAML-6.0.12.20240808.tar.gz", hash = "sha256:b8f76ddbd7f65440a8bda5526a9607e4c7a322dc2f8e1a8c405644f9a6f4b9af"}, + {file = "types_PyYAML-6.0.12.20240808-py3-none-any.whl", hash = "sha256:deda34c5c655265fc517b546c902aa6eed2ef8d3e921e4765fe606fe2afe8d35"}, +] + +[[package]] +name = "types-requests" +version = "2.31.0.20240125" +description = "Typing stubs for requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-requests-2.31.0.20240125.tar.gz", hash = "sha256:03a28ce1d7cd54199148e043b2079cdded22d6795d19a2c2a6791a4b2b5e2eb5"}, + {file = "types_requests-2.31.0.20240125-py3-none-any.whl", hash = "sha256:9592a9a4cb92d6d75d9b491a41477272b710e021011a2a3061157e2fb1f1a5d1"}, +] + +[package.dependencies] +urllib3 = ">=2" + +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "tzdata" +version = "2024.1" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, +] + +[[package]] +name = "urllib3" +version = "2.2.2" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, + {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "uvicorn" +version = "0.27.1" +description = "The lightning-fast ASGI server." +optional = false +python-versions = ">=3.8" +files = [ + {file = "uvicorn-0.27.1-py3-none-any.whl", hash = "sha256:5c89da2f3895767472a35556e539fd59f7edbe9b1e9c0e1c99eebeadc61838e4"}, + {file = "uvicorn-0.27.1.tar.gz", hash = "sha256:3d9a267296243532db80c83a959a3400502165ade2c1338dea4e67915fd4745a"}, +] + +[package.dependencies] +click = ">=7.0" +h11 = ">=0.8" + +[package.extras] +standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] + +[[package]] +name = "uvloop" +version = "0.19.0" +description = "Fast implementation of asyncio event loop on top of libuv" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de4313d7f575474c8f5a12e163f6d89c0a878bc49219641d49e6f1444369a90e"}, + {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5588bd21cf1fcf06bded085f37e43ce0e00424197e7c10e77afd4bbefffef428"}, + {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b1fd71c3843327f3bbc3237bedcdb6504fd50368ab3e04d0410e52ec293f5b8"}, + {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a05128d315e2912791de6088c34136bfcdd0c7cbc1cf85fd6fd1bb321b7c849"}, + {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:cd81bdc2b8219cb4b2556eea39d2e36bfa375a2dd021404f90a62e44efaaf957"}, + {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5f17766fb6da94135526273080f3455a112f82570b2ee5daa64d682387fe0dcd"}, + {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4ce6b0af8f2729a02a5d1575feacb2a94fc7b2e983868b009d51c9a9d2149bef"}, + {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:31e672bb38b45abc4f26e273be83b72a0d28d074d5b370fc4dcf4c4eb15417d2"}, + {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:570fc0ed613883d8d30ee40397b79207eedd2624891692471808a95069a007c1"}, + {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5138821e40b0c3e6c9478643b4660bd44372ae1e16a322b8fc07478f92684e24"}, + {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:91ab01c6cd00e39cde50173ba4ec68a1e578fee9279ba64f5221810a9e786533"}, + {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:47bf3e9312f63684efe283f7342afb414eea4d3011542155c7e625cd799c3b12"}, + {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:da8435a3bd498419ee8c13c34b89b5005130a476bda1d6ca8cfdde3de35cd650"}, + {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:02506dc23a5d90e04d4f65c7791e65cf44bd91b37f24cfc3ef6cf2aff05dc7ec"}, + {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2693049be9d36fef81741fddb3f441673ba12a34a704e7b4361efb75cf30befc"}, + {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7010271303961c6f0fe37731004335401eb9075a12680738731e9c92ddd96ad6"}, + {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5daa304d2161d2918fa9a17d5635099a2f78ae5b5960e742b2fcfbb7aefaa593"}, + {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7207272c9520203fea9b93843bb775d03e1cf88a80a936ce760f60bb5add92f3"}, + {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:78ab247f0b5671cc887c31d33f9b3abfb88d2614b84e4303f1a63b46c046c8bd"}, + {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:472d61143059c84947aa8bb74eabbace30d577a03a1805b77933d6bd13ddebbd"}, + {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45bf4c24c19fb8a50902ae37c5de50da81de4922af65baf760f7c0c42e1088be"}, + {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:271718e26b3e17906b28b67314c45d19106112067205119dddbd834c2b7ce797"}, + {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:34175c9fd2a4bc3adc1380e1261f60306344e3407c20a4d684fd5f3be010fa3d"}, + {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e27f100e1ff17f6feeb1f33968bc185bf8ce41ca557deee9d9bbbffeb72030b7"}, + {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:13dfdf492af0aa0a0edf66807d2b465607d11c4fa48f4a1fd41cbea5b18e8e8b"}, + {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6e3d4e85ac060e2342ff85e90d0c04157acb210b9ce508e784a944f852a40e67"}, + {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ca4956c9ab567d87d59d49fa3704cf29e37109ad348f2d5223c9bf761a332e7"}, + {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f467a5fd23b4fc43ed86342641f3936a68ded707f4627622fa3f82a120e18256"}, + {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:492e2c32c2af3f971473bc22f086513cedfc66a130756145a931a90c3958cb17"}, + {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2df95fca285a9f5bfe730e51945ffe2fa71ccbfdde3b0da5772b4ee4f2e770d5"}, + {file = "uvloop-0.19.0.tar.gz", hash = "sha256:0246f4fd1bf2bf702e06b0d45ee91677ee5c31242f39aab4ea6fe0c51aedd0fd"}, +] + +[package.extras] +docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] +test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] + +[[package]] +name = "virtualenv" +version = "20.26.3" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.7" +files = [ + {file = "virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589"}, + {file = "virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] + +[[package]] +name = "winloop" +version = "0.1.1" +description = "An alternative library for uvloop compatibility with Windows" +optional = false +python-versions = "*" +files = [ + {file = "winloop-0.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:1d4bd7bf1abbc5e473bc45924f78c9668288137dc453fba79cddabc64d673492"}, + {file = "winloop-0.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:6e092e31e7eb2946c86ad4d2968ebbd5fc236a90a00b3a2552d7d2f6995b9592"}, + {file = "winloop-0.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:43239bfaa65e94992c7819163bfa7859f876a40ff32518fd971f89e45b29eed9"}, + {file = "winloop-0.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3df42803f47618b4ced79279f2227cae23e8dbfd89aa1accb9bb751954e4e5a0"}, + {file = "winloop-0.1.1.tar.gz", hash = "sha256:0702af7e2811ba07af10f5ce4d9f6e374d8952bc760a6b0ce1d90e0fb42ca517"}, +] + +[package.dependencies] +cython = "*" + +[metadata] +lock-version = "2.0" +python-versions = "^3.11" +content-hash = "4bb41f79b35f536a962e582a9aec83d0f4ec9941bf5d6cf8a3f77f8dd6a54e21" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..adb03a0 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,92 @@ +[tool.mypy] +plugins = ["pydantic.mypy"] +strict = true +disallow_untyped_calls = false + +[[tool.mypy.overrides]] +module = "tests.*" +disable_error_code = ["var-annotated", "has-type"] +disallow_untyped_defs = false + +[[tool.mypy.overrides]] +module = [ + "aiomysql.*", + "mitmproxy.*", + "py3rijndael.*", + "timeago.*", + "pytimeparse.*", + "cpuinfo.*", +] +ignore_missing_imports = true + +[tool.pydantic-mypy] +init_forbid_extra = true +init_typed = true +warn_requird_dynamic_aliases = true + +[tool.pytest.ini_options] +asyncio_mode = "auto" + +[tool.isort] +add_imports = ["from __future__ import annotations"] +force_single_line = true +profile = "black" + +[tool.poetry] +name = "bancho-py" +version = "5.2.2" +description = "An osu! server implementation optimized for maintainability in modern python" +authors = ["Akatsuki Team"] +license = "MIT" +readme = "README.md" +packages = [{ include = "bancho" }] + +[tool.poetry.dependencies] +python = "^3.11" +async-timeout = "4.0.3" +bcrypt = "4.1.2" +datadog = "0.48.0" +fastapi = "0.109.2" +orjson = "3.9.13" +psutil = "5.9.8" +python-dotenv = "1.0.1" +python-multipart = "0.0.9" +requests = "2.31.0" +timeago = "1.0.16" +uvicorn = "0.27.1" +uvloop = { markers = "sys_platform != 'win32'", version = "0.19.0" } +winloop = { platform = "win32", version = "0.1.1" } +py3rijndael = "0.3.3" +pytimeparse = "1.1.8" +pydantic = "2.6.1" +redis = { extras = ["hiredis"], version = "5.0.1" } +sqlalchemy = ">=1.4.42,<1.5" +akatsuki-pp-py = "1.0.5" +cryptography = "42.0.2" +tenacity = "8.2.3" +httpx = "0.26.0" +py-cpuinfo = "9.0.0" +pytest = "8.0.0" +pytest-asyncio = "0.23.5" +asgi-lifespan = "2.1.0" +respx = "0.20.2" +tzdata = "2024.1" +coverage = "^7.4.1" +databases = { version = "^0.8.0", extras = ["mysql"] } +python-json-logger = "^2.0.7" + +[tool.poetry.group.dev.dependencies] +pre-commit = "3.6.1" +black = "24.1.1" +isort = "5.13.2" +autoflake = "2.2.1" +types-psutil = "5.9.5.20240205" +types-pymysql = "1.1.0.1" +types-requests = "2.31.0.20240125" +mypy = "1.8.0" +types-pyyaml = "^6.0.12.12" +sqlalchemy2-stubs = "^0.0.2a38" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api"