From 5f036756057e5725cd4f721950b49b70e9348f65 Mon Sep 17 00:00:00 2001 From: "Laura F. D" Date: Thu, 24 Jan 2019 16:34:06 +0000 Subject: [PATCH] Initial rewrite commit. --- Pipfile | 23 --- owapi/app.py | 271 ++++++++------------------ owapi/blizz_interface.py | 217 --------------------- owapi/{v3 => }/parsing.py | 79 ++++++-- owapi/prestige.py | 271 +++++++++----------------- owapi/util.py | 60 +----- owapi/v3/__init__.py | 285 --------------------------- owapi/v3/v3_util.py | 128 ------------- poetry.lock | 391 ++++++++++++++++++++++++++++++++++++++ 9 files changed, 614 insertions(+), 1111 deletions(-) delete mode 100644 Pipfile delete mode 100644 owapi/blizz_interface.py rename owapi/{v3 => }/parsing.py (86%) delete mode 100644 owapi/v3/__init__.py delete mode 100644 owapi/v3/v3_util.py create mode 100644 poetry.lock diff --git a/Pipfile b/Pipfile deleted file mode 100644 index e2e9e98..0000000 --- a/Pipfile +++ /dev/null @@ -1,23 +0,0 @@ -[[source]] - -url = "https://pypi.python.org/simple" -verify_ssl = true -name = "pypi" - - -[packages] - -aioredis = ">=0.3.0,<1.0.0" -aiohttp = "<=2.2.5,>=2.2.0" -asphalt = {git = "https://github.com/asphalt-framework/asphalt.git"} -asphalt-redis = "*" -asyncio-extras = "<=1.4.0,>=1.3.0" -"html5-parser" = "==0.3.3" -unidecode = "==0.04.19" -kyoukai = {git = "https://github.com/SuNDwarf/Kyoukai.git"} -yarl = "<1.2" -uvloop = "==0.9.1" - - -[dev-packages] - diff --git a/owapi/app.py b/owapi/app.py index c6b45fb..55df139 100644 --- a/owapi/app.py +++ b/owapi/app.py @@ -1,228 +1,105 @@ -""" -Main OWAPI App. -""" -import cProfile -import datetime -import io import json -import logging -import os -import pstats -import traceback -from email.utils import format_datetime -import aiohttp -from aiohttp import ClientSession -from asphalt.core import ContainerComponent -from kyoukai import Blueprint -from kyoukai import Kyoukai -from kyoukai.asphalt import KyoukaiComponent, HTTPRequestContext -from werkzeug.exceptions import HTTPException, InternalServerError -from werkzeug.routing import RequestRedirect -from werkzeug.wrappers import Response +import asks +import multio +import trio +from asks.response_objects import Response as ar +from html5_parser import parse +from quart import request +from quart_trio import QuartTrio -from owapi.v3 import api_v3 +from owapi import parsing -# Fuck your logging config. +multio.init("trio") -logging.basicConfig(filename='/dev/null', level=logging.INFO) +app = QuartTrio(__name__) -formatter = logging.Formatter('%(asctime)s - [%(levelname)s] %(name)s -> %(message)s') -root = logging.getLogger() -root.handlers = [] +B_BASE_URL = "https://playoverwatch.com/en-us/" +B_PAGE_URL = B_BASE_URL + "career/{platform}/{btag}" +B_HEROES_URL = B_BASE_URL + "heroes" +B_HERO_URL = B_HEROES_URL + "/{hero}" -consoleHandler = logging.StreamHandler() -consoleHandler.setFormatter(formatter) -root.addHandler(consoleHandler) -logger = logging.getLogger("OWAPI") +class ProfileNotFound(Exception): + pass -# Fucking aiohttp +class StatusError(Exception): + pass -class AiohttpHackyClientRequest(aiohttp.ClientRequest): - def __init__(self, *args, **kwargs): - kwargs["proxy_from_env"] = True - super().__init__(*args, **kwargs) - -# here's some more hotpatches, this api is a massive piece of garbage -async def handle_httpexception(self, ctx: HTTPRequestContext, exception: HTTPException, - environ: dict = None) -> Response: +async def get_user_page(tag: str, platform: str): + """ + Downloads a user page and parses it with LXML. """ - Handle a HTTP Exception. + built_url = B_PAGE_URL.format(btag=tag.replace("#", "-"), platform=platform) + result: ar = await asks.get(built_url) - :param ctx: The context of the request. - :param exception: The HTTPException to handle. - :param environ: The fake WSGI environment. + if result.status_code != 200: + raise StatusError(result.status_code) - :return: A :class:`werkzeug.wrappers.Response` that handles this response. - """ - # Try and load the error handler recursively from the ctx.route.blueprint. - bp = ctx.bp or self.root - - if environ is None: - environ = ctx.environ - - cbl = lambda environ: Response("Internal server error during processing. Report this.", - status=500) - - error_handler = bp.get_errorhandler(exception) - if not error_handler: - # Try the root Blueprint. This may happen if the blueprint requested isn't registered - # properly in the root, for some reason. - error_handler = self.root.get_errorhandler(exception) - if not error_handler: - # Just return the Exception's get_response. - cbl = exception.get_response - - else: - # Try and invoke the error handler to get the Response. - # Wrap it in the try/except, so we can handle a default one. - try: - res = await error_handler.invoke(ctx, args=(exception,)) - # hacky way of unifying everything - cbl = lambda environ: res - except HTTPException as e: - # why tho? - logger.warning("Error handler function raised another error, using the " - "response from that...") - cbl = e.get_response - except Exception as e: - logger.exception("Error in error handler!") - cbl = InternalServerError(e).get_response - # else: - # result = wrap_response(result, self.response_class) + def _parse(content: str): + res = parse(content) + node = res.findall(".//section[@class='u-nav-offset']//h1[@class='u-align-center']") + for nodes in node: + if nodes.text.strip() == "Profile Not Found": + raise ProfileNotFound() - try: - result = cbl(environ=environ) - except Exception: - # ok - logger.critical("Whilst handling a {}, response.get_response ({}) raised exception" - .format(exception.code, cbl), exc_info=True) - result = Response("Critical server error. Your application is broken.", - status=500) + return res - if result.status_code != exception.code: - logger.warning("Error handler {} returned code {} when exception was code {}..." - .format(error_handler.callable_repr, result.status_code, - exception.code)) + parsed = await trio.run_sync_in_worker_thread(_parse, result.content) + return parsed - return result -Kyoukai.handle_httpexception = handle_httpexception +def jsonify(result: dict, code: int = 200): + return json.dumps(result), code, {"Content-Type": "application/json"} -class APIComponent(ContainerComponent): +# noinspection PyCallingNonCallable +@app.route("/api/v4/u//blob") +async def get_blob(tag: str): """ - Container for other components. I think. + Gets the blob of data for a specified user. """ - - def __init__(self, components, use_redis=True, do_profiling=False, disable_ratelimits=False, - cache_time: int = None): - super().__init__(components) - app.config["owapi_use_redis"] = use_redis - app.config["owapi_do_profiling"] = do_profiling - app.config["owapi_disable_ratelimits"] = disable_ratelimits - app.config["owapi_cache_time"] = cache_time - - async def start(self, ctx): - self.add_component('kyoukai', KyoukaiComponent, ip="127.0.0.1", port=4444, - app="app:app", template_renderer=None) - ctx.session = ClientSession(headers={"User-Agent": "owapi scraper/1.0.1"}, - request_class=AiohttpHackyClientRequest) - if app.config["owapi_use_redis"]: - from asphalt.redis.component import RedisComponent - self.add_component('redis', RedisComponent) - else: - logger.warning('redis is disabled by config, rate limiting and caching not available') - await super().start(ctx) - - logger.info("Started OWAPI server.") - - -app = Kyoukai("owapi") - - -@app.route("/") -async def root(ctx: HTTPRequestContext): - raise RequestRedirect("https://github.com/SunDwarf/OWAPI/blob/master/api.md") - - -@app.root.errorhandler(500) -async def e500(ctx: HTTPRequestContext, exc: HTTPException): - obb = { - "error": 500, - "msg": "please report this!", - "exc": repr(exc.__cause__) + try: + result = await get_user_page(tag, platform=request.args.get("platform", "pc")) + except StatusError as e: + return jsonify({"code": e.args[0]}, e.args[0]) + + status = result.xpath(".//p[@class='masthead-permission-level-text']")[0].text + if status == "Private Profile": + return jsonify({"error": "Private"}, 403) + + # noinspection PyDictCreation + d = { + "heroes": { + "playtime": { + "competitive": {}, + "quickplay": {} + }, + "stats": { + "competitive": {}, "quickplay": {} + } + }, + "stats": {}, + "achievements": {} } - logger.error("Unhandled exception - Blizzard format probably changed!") - traceback.print_exc() - return json.dumps(obb), 500, {"Content-Type": "application/json"} - -@app.root.errorhandler(404) -async def e404(ctx: HTTPRequestContext, exc: HTTPException): - return json.dumps({"error": 404}), 404, {"Content-Type": "application/json"} + d["stats"]["quickplay"] = parsing.bl_parse_stats(result, status=status) + d["stats"]["competitive"] = parsing.bl_parse_stats(result, mode="competitive", + status=status) + d["heroes"]["stats"]["quickplay"] = parsing.bl_parse_hero_data(result) + d["heroes"]["playtime"]["quickplay"] = parsing.bl_parse_all_heroes(result) -@app.root.before_request -async def start_profiling(ctx: HTTPRequestContext): - if ctx.app.config["owapi_do_profiling"]: - pr = cProfile.Profile() - ctx.app.config['owapi_profiling_obj'] = pr - pr.enable() - return ctx + d["heroes"]["stats"]["competitive"] = parsing.bl_parse_hero_data(result, mode="competitive") + d["heroes"]["playtime"]["competitive"] = parsing.bl_parse_all_heroes(result, + mode="competitive") + d["achievements"] = parsing.bl_parse_achievement_data(result) -@app.root.after_request -async def stop_profiling(ctx: HTTPRequestContext, response: Response): - if ctx.app.config["owapi_do_profiling"]: - pr = ctx.app.config['owapi_profiling_obj'] - pr.disable() - s = io.StringIO() - ps = pstats.Stats(pr, stream=s).sort_stats('cumulative') - # print into s, with regex filter - ps.print_stats("owapi") - # strip useless part of path infos and print with logger - logger.info(s.getvalue().replace( - os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + "/", "" - )) - return response + return jsonify(d, 200) -# Create the api blueprint and add children -api_bp = Blueprint("api", prefix="/api") - - -@api_bp.after_request -async def jsonify(ctx, response: Response): - """ - JSONify the response. - """ - if not isinstance(response.response, dict): - return response - - # json.dump the body. - status_code = response.status_code - if not any(response.response.values()): - status_code = 404 - if ctx.request.args.get("format", "json") in ["json_pretty", "pretty"]: - d = json.dumps(response.response, sort_keys=True, indent=4, separators=(',', ': ')) - else: - d = json.dumps(response.response) - response.set_data(d) - response.headers["Content-Type"] = "application/json" - - # 261 - response.headers["Cache-Control"] = "public, max-age=300" - expires = (datetime.datetime.utcnow() + datetime.timedelta(seconds=300))\ - .replace(tzinfo=datetime.timezone.utc) - response.headers["Expires"] = format_datetime(expires, usegmt=True) - response.status_code = status_code - return response - - -api_bp.add_child(api_v3) - -app.register_blueprint(api_bp) +if __name__ == "__main__": + app.run() diff --git a/owapi/blizz_interface.py b/owapi/blizz_interface.py deleted file mode 100644 index a941219..0000000 --- a/owapi/blizz_interface.py +++ /dev/null @@ -1,217 +0,0 @@ -""" -Interface that uses Blizzard's pages as the source. -""" -import asyncio -import functools -import logging -import traceback - -import aiohttp -from kyoukai.asphalt import HTTPRequestContext -from lxml import etree - -try: - from html5_parser import parse - _has_html5_parser = True -except ImportError: - _has_html5_parser = False -from werkzeug.exceptions import HTTPException, NotFound, InternalServerError - -from owapi import util - -B_BASE_URL = "https://playoverwatch.com/en-us/" -B_PAGE_URL = B_BASE_URL + "career/{platform}{region}/{btag}" -B_HEROES_URL = B_BASE_URL + "heroes" -B_HERO_URL = B_HEROES_URL + "/{hero}" - -# The currently available specific regions. -AVAILABLE_REGIONS = ["/eu", "/us", "/kr"] - -logger = logging.getLogger("OWAPI") - - -async def get_page_body(ctx: HTTPRequestContext, url: str, cache_time=300, cache_404=False) -> str: - """ - Downloads page body from PlayOverwatch and caches it. - """ - - async def _real_get_body(_, url: str): - # Real function. - logger.info("GET => {}".format(url)) - async with ctx.session.get(url) as req: - assert isinstance(req, aiohttp.ClientResponse) - logger.info("GET => {} => {}".format(url, req.status)) - if req.status != 200: - return None - return (await req.read()).decode() - - result = await util.with_cache(ctx, _real_get_body, url, expires=cache_time, - cache_404=cache_404) - return result - - -def _parse_page_html5(content: str) -> etree._Element: - """ - Internal function to parse a page and return the data. - - This uses html5_parser. - """ - if content and content.lower() != 'none': - data = parse(content) - return data - - -def _parse_page_lxml(content: str) -> etree._Element: - """ - Internal function to parse a page and return the data. - - This uses raw LXML. - """ - if content and content.lower() != 'none': - data = etree.HTML(content) - return data - - -async def get_user_page(ctx: HTTPRequestContext, battletag: str, platform: str = "pc", - region: str = "us", - cache_time=300, cache_404=False) -> etree._Element: - """ - Downloads the BZ page for a user, and parses it. - """ - if platform != "pc": - region = "" - built_url = B_PAGE_URL.format( - region=region, btag=battletag.replace("#", "-"), platform=platform) - page_body = await get_page_body(ctx, built_url, cache_time=cache_time, cache_404=cache_404) - - if not page_body: - return None - - # parse the page - parse_partial = functools.partial(_parse_page, page_body) - loop = asyncio.get_event_loop() - parsed = await loop.run_in_executor(None, parse_partial) - - # sanity check - node = parsed.findall(".//section[@class='u-nav-offset']//h1[@class='u-align-center']") - for nodes in node: - if nodes.text.strip() == "Profile Not Found": - return None - - return parsed - - -async def fetch_all_user_pages(ctx: HTTPRequestContext, battletag: str, *, - platform="pc"): - """ - Fetches all user pages for a specified user. - - Returns a dictionary in the format of `{region: etree._Element | None}`. - """ - if platform != "pc": - coro = get_user_page(ctx, battletag, region="", platform=platform, cache_404=True) - result = await coro - if isinstance(result, etree._Element): - return {"any": result, - "eu": None, "us": None, "kr": None} - else: - # Raise a 404. - raise NotFound() - - futures = [] - for region in AVAILABLE_REGIONS: - # Add the get_user_page coroutine. - coro = get_user_page(ctx, battletag, region=region, platform=platform, cache_404=True) - futures.append(coro) - - # Gather all the futures to download paralellely. - results = await asyncio.gather(*futures, return_exceptions=True) - d = {"any": None} - error = None - for region, result in zip(AVAILABLE_REGIONS, results): - # Remove the `/` from the front of the region. - # This is used internally to make building the URL to get simpler. - region = region[1:] - # Make sure it's either a None or an element. - if isinstance(result, etree._Element): - d[region] = result - elif isinstance(result, Exception): - logger.error("Failed to fetch user page!\n{}".format( - ''.join(traceback.format_exception(type(result), result, result.__traceback__)) - )) - error = result - d[region] = None - else: - d[region] = None - - # Check if we should raise or return. - if not any(d[i[1:]] is not None for i in AVAILABLE_REGIONS): - if error is not None: - e = InternalServerError() - e.__cause__ = error - e.__context__ = error - raise e - raise NotFound() - - return d - - -async def region_helper_v2(ctx: HTTPRequestContext, battletag: str, platform="pc", region=None, - extra=""): - """ - Downloads the correct page for a user in the right region. - - This will return either (etree._Element, region) or (None, None). - """ - if region is None: - reg_l = ["/eu", "/us", "/kr"] - else: - if not region.startswith("/"): - # ugh - region = "/" + region - reg_l = [region] - - for reg in reg_l: - # Get the user page. - page = await get_user_page(ctx, battletag, platform=platform, region=reg) - # Check if the page was returned successfully. - # If it was, return it. - if page is not None: - return page, reg[1:] - else: - # Since we continued without returning, give back the None, None. - return None, None - - -async def get_hero_data(ctx: HTTPRequestContext, hero: str): - built_url = B_HERO_URL.format(hero=hero) - page_body = await get_page_body(ctx, built_url) - - if not page_body: - raise HTTPException(404) - - parse_partial = functools.partial(_parse_page, page_body) - loop = asyncio.get_event_loop() - parsed = await loop.run_in_executor(None, parse_partial) - - return parsed - - -async def get_all_heroes(ctx: HTTPRequestContext): - built_url = B_HEROES_URL - page_body = await get_page_body(ctx, built_url) - - if not page_body: - raise HTTPException(404) - - parse_partial = functools.partial(_parse_page, page_body) - loop = asyncio.get_event_loop() - parsed = await loop.run_in_executor(None, parse_partial) - - return parsed - - -if _has_html5_parser: - _parse_page = _parse_page_html5 -else: - _parse_page = _parse_page_lxml diff --git a/owapi/v3/parsing.py b/owapi/parsing.py similarity index 86% rename from owapi/v3/parsing.py rename to owapi/parsing.py index 2fc8a25..8eaa45a 100644 --- a/owapi/v3/parsing.py +++ b/owapi/parsing.py @@ -4,7 +4,7 @@ from lxml import etree from owapi import util -from owapi.prestige import PRESTIGE +from owapi.prestige import PRESTIGE_BORDERS, PRESTIGE_STARS hero_data_div_ids = { "reaper": "0x02E0000000000002", @@ -34,7 +34,8 @@ "doomfist": "0x02E000000000012F", "moira": "0x02E00000000001A2", "brigitte": "0x02E0000000000195", - "wrecking_ball": "0x02E00000000001CA" + "wrecking_ball": "0x02E00000000001CA", + "ashe": "0x02E0000000000200" } tier_data_img_src = { @@ -78,10 +79,30 @@ def bl_parse_stats(parsed, mode="quickplay", status=None): mast_head = parsed.xpath(".//div[@class='masthead-player']")[0] - # Get the prestige. - prestige = mast_head.xpath(".//div[@class='player-level']")[0] + # Rank images are now based on 2 separate images. Prestige now also relies on 'player-rank' element + prestige_stars = 0 + try: + prestige_rank = mast_head.xpath(".//div[@class='player-rank']")[0] + bg_image = [x for x in prestige_rank.values() if 'background-image' in x][0] + except IndexError: + # No stars + prestige_stars = 0 + else: + for key, val in PRESTIGE_STARS.items(): + if key in bg_image: + prestige_stars = val + # Adds a new dict key called "prestige_image". Left the old name below of "rank_image" for compatibility + built_dict["overall_stats"]["prestige_image"] = bg_image.split("(")[1][:-1] + break + else: + # Unknown prestige image + prestige_stars = None + # Extract the background-image from the styles. + prestige_num = 0 try: + # Get the player-level base (border). + prestige = mast_head.xpath(".//div[@class='player-level']")[0] bg_image = [x for x in prestige.values() if 'background-image' in x][0] except IndexError: # Cannot find background-image. @@ -89,15 +110,20 @@ def bl_parse_stats(parsed, mode="quickplay", status=None): # Don't set a prestige. built_dict["overall_stats"]["prestige"] = 0 else: - for key, val in PRESTIGE.items(): + for key, val in PRESTIGE_BORDERS.items(): if key in bg_image: prestige_num = val built_dict["overall_stats"]["rank_image"] = bg_image.split("(")[1][:-1] break else: - # Unknown. + # Unknown rank image prestige_num = None - built_dict["overall_stats"]["prestige"] = prestige_num + + # If we have prestige values, return them. Otherwise, return None + if prestige_num is not None or prestige_stars is not None: + built_dict["overall_stats"]["prestige"] = prestige_num + prestige_stars + else: + built_dict["overall_stats"]["prestige"] = None # Parse out the HTML. level = int(prestige.findall(".//div")[0].text) @@ -105,29 +131,36 @@ def bl_parse_stats(parsed, mode="quickplay", status=None): # Get and parse out endorsement level. endorsement = mast_head.xpath(".//div[@class='endorsement-level']")[0] - built_dict["overall_stats"]["endorsement_level"] = int(endorsement.findall(".//div[@class='u-center']")[0].text) + built_dict["overall_stats"]["endorsement_level"] = int( + endorsement.findall(".//div[@class='u-center']")[0].text) # Get endorsement circle. - endorsement_icon_inner = mast_head.xpath(".//div[@class='endorsement-level']/div[@class='EndorsementIcon']/div[@class='EndorsementIcon-inner']")[0] + endorsement_icon_inner = mast_head.xpath( + ".//div[@class='endorsement-level']/div[@class='EndorsementIcon']/div[" + "@class='EndorsementIcon-inner']")[ + 0] # Get individual endorsement segments. try: - endorsement_shotcaller_image = endorsement_icon_inner.findall(".//svg[@class='EndorsementIcon-border EndorsementIcon-border--shotcaller']")[0] + endorsement_shotcaller_image = endorsement_icon_inner.findall( + ".//svg[@class='EndorsementIcon-border EndorsementIcon-border--shotcaller']")[0] endorsement_shotcaller_level = endorsement_shotcaller_image.get('data-value') except: endorsement_shotcaller_level = 0 try: - endorsement_teammate_image = endorsement_icon_inner.findall(".//svg[@class='EndorsementIcon-border EndorsementIcon-border--teammate']")[0] + endorsement_teammate_image = endorsement_icon_inner.findall( + ".//svg[@class='EndorsementIcon-border EndorsementIcon-border--teammate']")[0] endorsement_teammate_level = endorsement_teammate_image.get('data-value') except: endorsement_teammate_level = 0 try: - endorsement_sportsmanship_image = endorsement_icon_inner.findall(".//svg[@class='EndorsementIcon-border EndorsementIcon-border--sportsmanship']")[0] + endorsement_sportsmanship_image = endorsement_icon_inner.findall( + ".//svg[@class='EndorsementIcon-border EndorsementIcon-border--sportsmanship']")[0] endorsement_sportsmanship_level = endorsement_sportsmanship_image.get('data-value') except: - endorsement_sportsmanship_level = 0 + endorsement_sportsmanship_level = 0 # Parse out endorsement segements. built_dict["overall_stats"]["endorsement_shotcaller"] = endorsement_shotcaller_level @@ -193,7 +226,7 @@ def bl_parse_stats(parsed, mode="quickplay", status=None): game_box = stat_groups[3] except IndexError: try: - game_box = stat_groups[2] # I guess use 2? + game_box = stat_groups[2] # I guess use 2? except IndexError: # edge cases... # we can't really extract any more stats @@ -338,13 +371,18 @@ def bl_parse_all_heroes(parsed, mode="quickplay"): else: _root = parsed - _hero_info = _root.findall(".//div[@data-group-id='comparisons']")[0] + _hero_info = _root.xpath(".//div[@data-group-id='comparisons' and " + "@data-category-id='0x0860000000000021']")[0] hero_info = _hero_info.findall(".//div[@class='ProgressBar-textWrapper']") + print(etree.tostring(_hero_info)) # Loop over each one, extracting the name and hours counted. percent_per_second = None for child in reversed(hero_info): name, played = child.getchildren() + if not name.text: + continue + name, played = util.sanitize_string(name.text), played.text.lower() time = 0 @@ -355,12 +393,12 @@ def bl_parse_all_heroes(parsed, mode="quickplay"): # Requires reversing hero_info category_item = child.getparent().getparent() percent = float(category_item.attrib['data-overwatch-progress-percent']) - if percent_per_second is None and time < 1 and time > 0: + if percent_per_second is None and 1 > time > 0: seconds = 3600 * time percent_per_second = percent / seconds built_dict[name] = time - if percent_per_second != None: + if percent_per_second is not None: built_dict[name] = (percent / percent_per_second) / float(3600) return built_dict @@ -407,10 +445,15 @@ def bl_parse_hero_data(parsed: etree._Element, mode="quickplay"): subbox_offset = 0 # .find on the assumption hero box is the *first* item + hbtitle = None try: hbtitle = stat_groups.find(".//span[@class='stat-title']").text except AttributeError: - hbtitle = stat_groups.find(".//h5[@class='stat-title']").text + try: + hbtitle = stat_groups.find(".//h5[@class='stat-title']").text + except AttributeError: + # Unable to parse stat boxes. This is likely due to 0 playtime on a hero, so there are no stats + pass if hbtitle == "Hero Specific": subbox_offset = 1 hero_specific_box = stat_groups[0] diff --git a/owapi/prestige.py b/owapi/prestige.py index 5c7c672..72a3e47 100644 --- a/owapi/prestige.py +++ b/owapi/prestige.py @@ -5,188 +5,91 @@ The dict is used to map them as appropriate. """ -PRESTIGE = { - # Bronze 0 - 5 - "0x0250000000000918": 0, - "0x0250000000000919": 0, - "0x025000000000091A": 0, - "0x025000000000091B": 0, - "0x025000000000091C": 0, - "0x025000000000091D": 0, - "0x025000000000091E": 0, - "0x025000000000091F": 0, - "0x0250000000000920": 0, - "0x0250000000000921": 0, - "0x0250000000000922": 1, - "0x0250000000000924": 1, - "0x0250000000000925": 1, - "0x0250000000000926": 1, - "0x025000000000094C": 1, - "0x0250000000000927": 1, - "0x0250000000000928": 1, - "0x0250000000000929": 1, - "0x025000000000092B": 1, - "0x0250000000000950": 1, - "0x025000000000092A": 2, - "0x025000000000092C": 2, - "0x0250000000000937": 2, - "0x025000000000093B": 2, - "0x0250000000000933": 2, - "0x0250000000000923": 2, - "0x0250000000000944": 2, - "0x0250000000000948": 2, - "0x025000000000093F": 2, - "0x0250000000000951": 2, - "0x025000000000092D": 3, - "0x0250000000000930": 3, - "0x0250000000000934": 3, - "0x0250000000000938": 3, - "0x0250000000000940": 3, - "0x0250000000000949": 3, - "0x0250000000000952": 3, - "0x025000000000094D": 3, - "0x0250000000000945": 3, - "0x025000000000093C": 3, - "0x025000000000092E": 4, - "0x0250000000000931": 4, - "0x0250000000000935": 4, - "0x025000000000093D": 4, - "0x0250000000000946": 4, - "0x025000000000094A": 4, - "0x0250000000000953": 4, - "0x025000000000094E": 4, - "0x0250000000000939": 4, - "0x0250000000000941": 4, - "0x025000000000092F": 5, - "0x0250000000000932": 5, - "0x025000000000093E": 5, - "0x0250000000000936": 5, - "0x025000000000093A": 5, - "0x0250000000000942": 5, - "0x0250000000000947": 5, - "0x025000000000094F": 5, - "0x025000000000094B": 5, - "0x0250000000000954": 5, - # Silver 6 - 11 - "0x0250000000000956": 6, - "0x025000000000095C": 6, - "0x025000000000095D": 6, - "0x025000000000095E": 6, - "0x025000000000095F": 6, - "0x0250000000000960": 6, - "0x0250000000000961": 6, - "0x0250000000000962": 6, - "0x0250000000000963": 6, - "0x0250000000000964": 6, - "0x0250000000000957": 7, - "0x0250000000000965": 7, - "0x0250000000000966": 7, - "0x0250000000000967": 7, - "0x0250000000000968": 7, - "0x0250000000000969": 7, - "0x025000000000096A": 7, - "0x025000000000096B": 7, - "0x025000000000096C": 7, - "0x025000000000096D": 7, - "0x0250000000000958": 8, - "0x025000000000096E": 8, - "0x025000000000096F": 8, - "0x0250000000000970": 8, - "0x0250000000000971": 8, - "0x0250000000000972": 8, - "0x0250000000000973": 8, - "0x0250000000000974": 8, - "0x0250000000000975": 8, - "0x0250000000000976": 8, - "0x0250000000000959": 9, - "0x0250000000000977": 9, - "0x0250000000000978": 9, - "0x0250000000000979": 9, - "0x025000000000097A": 9, - "0x025000000000097B": 9, - "0x025000000000097C": 9, - "0x025000000000097D": 9, - "0x025000000000097E": 9, - "0x025000000000097F": 9, - "0x025000000000095A": 10, - "0x0250000000000980": 10, - "0x0250000000000981": 10, - "0x0250000000000982": 10, - "0x0250000000000983": 10, - "0x0250000000000984": 10, - "0x0250000000000985": 10, - "0x0250000000000986": 10, - "0x0250000000000987": 10, - "0x0250000000000988": 10, - "0x025000000000095B": 11, - "0x0250000000000989": 11, - "0x025000000000098A": 11, - "0x025000000000098B": 11, - "0x025000000000098C": 11, - "0x025000000000098D": 11, - "0x025000000000098E": 11, - "0x025000000000098F": 11, - "0x0250000000000991": 11, - "0x0250000000000990": 11, - # Gold 12 - 17 - "0x0250000000000992": 12, - "0x0250000000000993": 12, - "0x0250000000000994": 12, - "0x0250000000000995": 12, - "0x0250000000000996": 12, - "0x0250000000000997": 12, - "0x0250000000000998": 12, - "0x0250000000000999": 12, - "0x025000000000099A": 12, - "0x025000000000099B": 12, - "0x025000000000099C": 13, - "0x025000000000099D": 13, - "0x025000000000099E": 13, - "0x025000000000099F": 13, - "0x02500000000009A0": 13, - "0x02500000000009A1": 13, - "0x02500000000009A2": 13, - "0x02500000000009A3": 13, - "0x02500000000009A4": 13, - "0x02500000000009A5": 13, - "0x02500000000009A6": 14, - "0x02500000000009A7": 14, - "0x02500000000009A8": 14, - "0x02500000000009A9": 14, - "0x02500000000009AA": 14, - "0x02500000000009AB": 14, - "0x02500000000009AC": 14, - "0x02500000000009AD": 14, - "0x02500000000009AE": 14, - "0x02500000000009AF": 14, - "0x02500000000009B0": 15, - "0x02500000000009B1": 15, - "0x02500000000009B2": 15, - "0x02500000000009B3": 15, - "0x02500000000009B4": 15, - "0x02500000000009B5": 15, - "0x02500000000009B6": 15, - "0x02500000000009B7": 15, - "0x02500000000009B8": 15, - "0x02500000000009B9": 15, - "0x02500000000009BA": 16, - "0x02500000000009BB": 16, - "0x02500000000009BC": 16, - "0x02500000000009BD": 16, - "0x02500000000009BE": 16, - "0x02500000000009BF": 16, - "0x02500000000009C0": 16, - "0x02500000000009C1": 16, - "0x02500000000009C2": 16, - "0x02500000000009C3": 16, - "0x02500000000009C4": 17, - "0x02500000000009C5": 17, - "0x02500000000009C6": 17, - "0x02500000000009C7": 17, - "0x02500000000009C8": 17, - "0x02500000000009C9": 17, - "0x02500000000009CA": 17, - "0x02500000000009CB": 17, - "0x02500000000009CC": 17, - "0x02500000000009CD": 17 +PRESTIGE_BORDERS = { + # Base level Bronze = 0 + "1055f5ae3a84b7bd8afa9fcbd2baaf9a412c63e8fe5411025b3264db12927771": 0, # Bronze Lv 1 + "69c2c1aff0db8429a980bad7db76a3388003e43f0034097dc4cfa7f13c5de7d7": 0, # Bronze Lv 11 + "4d63c2aadf536e87c84bdb7157c7b688cffb286e17a5362d2fa5c5281f4fc2a2": 0, # Bronze Lv 21 + "78ebb45dd26b0050404305fdc1cb9ddc311d2c7e62400fd6348a3a488c69eee7": 0, # Bronze Lv 31 + "888c84f2dfd211cde0c595036574040ca96b1698578daab90ce6822d89f7fe0e": 0, # Bronze Lv 41 + "3fdfdd16c34ab7cdc9b7be3c04197e900928b368285ce639c1d3e1c0619eea6d": 0, # Bronze Lv 51 + "e8b7df4b88998380658d49d00e7bc483c740432ac417218e94fab4137bec4ae0": 0, # Bronze Lv 61 + "45cc69ca29f3981fa085b5337d2303a4eb555853daae1c29351b7ba46b27bbcd": 0, # Bronze Lv 71 + "8b4be1017beff0bcd1f7a48d8cdf7faf9f22c1ffd2bdeaaff2684da5cddeaa76": 0, # Bronze Lv 81 + "1b00b8cab530e98c378de2f3e8834d92ee41b4cd7b118179a8ecbccee83c8104": 0, # Bronze Lv 91 + + # Base level Silver = 6 + "f5d80c8b7370cda9a491bdf89e02bcd8c6ba1708189d907c7e4f55a719030264": 6, # Silver Lv 1 + "ddb6f3f79241b8af2fa77b52910f60a2332db5d8347b3039d1328ae6d1272a59": 6, # Silver Lv 11 + "c59072a340e6187116f5ae7456674dd6e1cba4b15781922d63fb94f56d9539c0": 6, # Silver Lv 21 + "624461e537900ce98e3178d1a298cba4830c14f6a81a8b36319da6273bed255a": 6, # Silver Lv 31 + "ba68d2c0f1b55e1991161cb1f88f369b97311452564b200ea1da226eb493e2e8": 6, # Silver Lv 41 + "3c078f588353feeb3f52b0198fade12a78573a01c53050aca890969a395ff66a": 6, # Silver Lv 51 + "f9bc9c6bb95f07f4e882b9e003ba7fa5ca6552fb8e0c27473a8b031714670116": 6, # Silver Lv 61 + "8aa9f56cdd250579dd8b0ce6bd835934fffe8c27b9ce609f046c19a4a81591f8": 6, # Silver Lv 71 + "32f84a58719318fa0aeee530ed3240952ba9945b998cd9e8150ebb583db0d4f6": 6, # Silver Lv 81 + "c95fa44c02a1eae89a7c8d503026f181f1cc565da93d47c6254fab2c3d8793ef": 6, # Silver Lv 91 + + # Base level Gold = 12 + "5ab5c29e0e1e33f338ae9afc37f51917b151016aef42d10d361baac3e0965df1": 12, # Gold Lv 1 + "7fd73e680007054dbb8ac5ea8757a565858b9d7dba19f389228101bda18f36b0": 12, # Gold Lv 11 + "0ada1b8721830853d3fbcfabf616e1841f2100279cff15b386093f69cc6c09ad": 12, # Gold Lv 21 + "7095ee84fc0a3aaac172120ffe0daa0d9abca33112e878cd863cd925cd8404b6": 12, # Gold Lv 31 + "fa410247dd3f5b7bf2eb1a65583f3b0a3c8800bcd6b512ab1c1c4d9dd81675ae": 12, # Gold Lv 41 + "a938ef37b673a240c4ade00d5a95f330b1e1ba93da9f0d3754bdb8a77bbbd7a1": 12, # Gold Lv 51 + "49afee29dc05547ceebe6c1f61a54f7105a0e1b7f2c8509ff2b4aeaf4d384c8e": 12, # Gold Lv 61 + "2c1464fb96d38839281c0bdb6e1a0cd06769782a5130609c13f6ca76fa358bcf": 12, # Gold Lv 71 + "98f6eea1a2a10576251d6c690c13d52aaac19b06811ed2b684b43e7a9318f622": 12, # Gold Lv 81 + "6e1036eab98de41694d785e076c32dbabe66962d38325117436b31210b003ad4": 12, # Gold Lv 91 + + # Base level Platinum = 18 + "69fde7abebb0bb5aa870e62362e84984cae13e441aec931a5e2c9dc5d22a56dc": 18, # Platinum Lv 1 + "9c84055f9d91a297ccd1bac163c144e52bcce981dc385ff9e2957c5bd4433452": 18, # Platinum Lv 11 + "97c803711cddc691bc458ec83dec73c570b0cc07219632c274bb5c5534786984": 18, # Platinum Lv 21 + "c562ec882ababf2030e40ad3ce27e38176899f732166a1b335fd8f83735261f3": 18, # Platinum Lv 31 + "da2cb4ab3281329c367cea51f9438c3d20d29ee07f55fa65762481777663f7f9": 18, # Platinum Lv 41 + "460670e4d61b9bf0bcde6d93a52e50f01541177a20aaf69bbda91fe4353ed2b0": 18, # Platinum Lv 51 + "5a019024b384de73f4348ed981ae58ec458a7ae6db68e0c44cda4d7062521b04": 18, # Platinum Lv 61 + "1d5a458ecaf00fe0ef494b4159412d30a4b58ee76b9f0ff44b1db14ed211273c": 18, # Platinum Lv 71 + "f1d43d87bbe5868cb99062ac02099001dd9f8215831347d8978e895468e81ef6": 18, # Platinum Lv 81 + "27b2d05f97179aae72c8f72b69978777e1c5022f77e84f28e5943be8e9cd1d49": 18, # Platinum Lv 91 + + # Base level Diamond = 24 + "5c83959aa079f9ed9fd633411289920568e616c5117b2a7bb280dd8c857f8406": 24, # Diamond Lv 1 + "ac14208753baf77110880020450fa4aa0121df0c344c32a2d20f77c18ba75db5": 24, # Diamond Lv 11 + "a42bcb3339e1b3c999fc2799b0787fd862e163ec504d7541fa3ea8893b83957a": 24, # Diamond Lv 21 + "7f1cc30ed6981974b6950666bb8236a6aa7b5a8579b14969394212dd7fa2951d": 24, # Diamond Lv 31 + "efe3ab1c85c6266199ac7539566d4c811b0ee17bc5fb3e3e7a48e9bc2473cf50": 24, # Diamond Lv 41 + "c7b9df20c91b10dc25bfdc847d069318ed9e8e69c5cad760803470caa9576e48": 24, # Diamond Lv 51 + "413bdc1e11f9b190ed2c6257a9f7ea021fd9fcef577d50efcf30a5ea8df989a4": 24, # Diamond Lv 61 + "625645c3c9af49eb315b504dba32137bb4081d348ec5b9750196b0ec0c9bb6a6": 24, # Diamond Lv 71 + "f9813603e19350bb6d458bbee3c8c2a177b6503e6ff54868e8d176fa424a0191": 24, # Diamond Lv 81 + "9e8600f97ea4a84d822d8b336f2b1dbfe7372fb9f2b6bf1d0336193567f6f943": 24, # Diamond Lv 91 / Max } + +PRESTIGE_STARS = { + # Prestige modifiers + "8de2fe5d938256a5725abe4b3655ee5e9067b7a1f4d5ff637d974eb9c2e4a1ea": 1, # 1 Bronze star + "755825d4a6768a22de17b48cfbe66ad85a54310ba5a8f8ab1e9c9a606b389354": 2, # 2 Bronze stars + "4a2c852a16043f613b7bfac33c8536dd9f9621a3d567174cb4ad9a80e3b13102": 3, # 3 Bronze stars + "bc80149bbd78d2f940984712485bce23ddaa6f2bd0edd1c0494464ef55251eef": 4, # 4 Bronze stars + "d35d380b7594b8f6af2d01040d80a5bfb6621553406c0905d4764bdc92a4ede8": 5, # 5 Bronze stars + + "426c754c76cd12e6aacd30293a67363571341eea37880df549d3e02015a588fe": 1, # 1 Silver star + "c137dd97008328ed94efc5a9ec446e024c9ac92fce89fa5b825c5b1d7ff8d807": 2, # 2 Silver stars + "9a7c57aee22733a47c2b562000861d687d0423a74eb5e609c425f10db5528ed9": 3, # 3 Silver stars + "b944cf1de6653b629c951fd14583069bc91b1f1b7efdb171203448b2dbc39917": 4, # 4 Silver stars + "9b838b75065248ec14360723e4caf523239128ff8c13bda36cfd0b59ef501c1e": 5, # 5 Silver stars + + "1858704e180db3578839aefdb83b89054f380fbb3d4c46b3ee12d34ed8af8712": 1, # 1 Gold/Platinum star + "e8568b9f9f5cac7016955f57c7b192ccd70f7b38504c7849efa8b1e3f7a1b077": 2, # 2 Gold/Platinum stars + "a25388825a0e00c946a23f5dd74c5b63f77f564231e0fd01e42ff2d1c9f10d38": 3, # 3 Gold/Platinum stars + "cff520765f143c521b25ad19e560abde9a90eeae79890b14146a60753d7baff8": 4, # 4 Gold/Platinum stars + "35fd7b9b98f57389c43e5a8e7ca989ca593c9f530985adf4670845bb598e1a9d": 5, # 5 Gold/Platinum stars + + "8033fa55e3de5e7655cd694340870da851cdef348d7dcb76411f3a9c2c93002c": 1, # 1 Diamond star + "605c201cf3f0d24b318f643acb812084ff284e660f2bb5d62b487847d33fad29": 2, # 2 Diamond stars + "1c8c752d0f2757dc0bcc9e3db76f81c3802c874164a3b661475e1c7bd67c571f": 3, # 3 Diamond stars + "58b1323ab2eb1298fa6be649a8d4d7f0e623523bd01964ed8fefd5175d9073c0": 4, # 4 Diamond stars + "cd877430ccc400c10e24507dba972e24a4543edc05628045300f1349cf003f3a": 5, # 5 Diamond stars +} \ No newline at end of file diff --git a/owapi/util.py b/owapi/util.py index 8b30b37..d73142e 100644 --- a/owapi/util.py +++ b/owapi/util.py @@ -1,69 +1,11 @@ -""" -Useful utilities. -""" -import logging import re import unidecode -from kyoukai.asphalt import HTTPRequestContext - -logger = logging.getLogger("OWAPI") HOUR_REGEX = re.compile(r"([0-9]*) hours?") MINUTE_REGEX = re.compile(r"([0-9]*) minutes?") SECOND_REGEX = re.compile(r"([0-9]*\.?[0-9]*) seconds?") -PERCENT_REGEX = re.compile(r"([0-9]{1,3})\s?\%") - - -async def with_cache(ctx: HTTPRequestContext, func, *args, expires: int = None, cache_404=False): - """ - Run a coroutine with cache. - - Stores the result in redis. - - Unless we don't have redis. - """ - if expires is None: - expires = 300 - - if ctx.app.config["owapi_cache_time"] is not None: - expires = ctx.app.config["owapi_cache_time"] - - if not ctx.app.config["owapi_use_redis"]: - # no caching without redis, just call the function - logger.info("Loading `{}` with disabled cache".format(repr(args))) - result = await func(ctx, *args) - return result - else: - import aioredis - assert isinstance(ctx.redis, aioredis.Redis) - built = func.__name__ + repr(args) - # Check for the key. - # Uses a simple func name + repr(args) as the key to use. - got = await ctx.redis.get(built) - if got and got != "None": - if await ctx.redis.ttl(built) == -1: - logger.info("Caching `{}` for `{}` seconds".format(built, expires)) - await ctx.redis.expire(built, expires) - - logger.info("Cache hit for `{}`".format(built)) - return got.decode() - - logger.info("Cache miss for `{}`".format(built)) - - # Call the function. - result = await func(ctx, *args) - if result is None and not cache_404: - # return None, no caching for 404s. - return None - - # Store the result as cached. - to_set = result if result else "None" - logger.info("Storing {} with expiration {}".format(built, expires)) - await ctx.redis.set(built, to_set, expire=expires) - if to_set == "None": - return None - return result +PERCENT_REGEX = re.compile(r"([0-9]{1,3})\s?%") def int_or_string(val: str): diff --git a/owapi/v3/__init__.py b/owapi/v3/__init__.py deleted file mode 100644 index dda6e68..0000000 --- a/owapi/v3/__init__.py +++ /dev/null @@ -1,285 +0,0 @@ -""" -api_v3 routes. -""" -import json - -from kyoukai import Blueprint -from kyoukai.asphalt import HTTPRequestContext -from werkzeug.wrappers import Response - -from owapi.blizz_interface import fetch_all_user_pages -from owapi.blizz_interface import get_all_heroes -from owapi.blizz_interface import get_hero_data -from owapi.v3 import parsing -from owapi.v3.v3_util import with_ratelimit - -api_v3 = Blueprint("api_v3", prefix="/v3") - - -@api_v3.after_request -async def add__request(ctx: HTTPRequestContext, r: Response): - # Edit the body, and add a _request. - if isinstance(r.response, list): - h = r.response[0] - else: - h = r.response - if isinstance(h, dict): - # Add a _request var to the body. - h["_request"] = { - "api_ver": 3, - "route": ctx.request.path - } - - return r - - -@api_v3.errorhandler(404) -async def e404(ctx: HTTPRequestContext, exc): - return json.dumps({"error": 404, "msg": "profile not found"}), \ - 404, \ - {"Retry-After": 300, - "Content-Type": "application/json"} - - -@api_v3.route("/u//blob", reverse_hooks=True) -@with_ratelimit("blob", timelimit=5, max_reqs=1) -async def get_blob(ctx: HTTPRequestContext, battletag: str): - """ - Returns a giant blob of data. - """ - pages = await fetch_all_user_pages(ctx, battletag, - platform=ctx.request.args.get("platform", "pc")) - - built_dict = {} - for region, result in pages.items(): - if result is None: - built_dict[region] = None - continue - - status = result.xpath(".//p[@class='masthead-permission-level-text']")[0].text - if status == "Private Profile": - return {"error": "Private"}, 403 - - d = {"heroes": {"playtime": {"competitive": {}, "quickplay": {}}, - "stats": {"competitive": {}, "quickplay": {}}}, - "stats": {}, - "achievements": {}} - - d["stats"]["quickplay"] = parsing.bl_parse_stats(result, status=status) - d["stats"]["competitive"] = parsing.bl_parse_stats(result, mode="competitive", - status=status) - - d["heroes"]["stats"]["quickplay"] = parsing.bl_parse_hero_data(result) - d["heroes"]["playtime"]["quickplay"] = parsing.bl_parse_all_heroes(result) - - d["heroes"]["stats"]["competitive"] = parsing.bl_parse_hero_data(result, mode="competitive") - d["heroes"]["playtime"]["competitive"] = parsing.bl_parse_all_heroes(result, - mode="competitive") - - d["achievements"] = parsing.bl_parse_achievement_data(result) - - built_dict[region] = d - - return built_dict - - -@api_v3.route("/u//stats", reverse_hooks=True) -@with_ratelimit("stats") -async def get_stats(ctx: HTTPRequestContext, battletag: str): - """ - Fetches stats about the user. - """ - pages = await fetch_all_user_pages(ctx, battletag, - platform=ctx.request.args.get("platform", "pc")) - - built_dict = {} - for region, result in pages.items(): - if result is None: - built_dict[region] = None - continue - - status = result.xpath(".//p[@class='masthead-permission-level-text']")[0].text - # if status == "Private Profile": - # return {"error": "Private"}, 403 - - d = { - "stats": {}, - } - - d["stats"]["quickplay"] = parsing.bl_parse_stats(result, status=status) - d["stats"]["competitive"] = parsing.bl_parse_stats(result, mode="competitive", status=status) - - built_dict[region] = d - - return built_dict - - -@api_v3.route("/u//heroes", reverse_hooks=True) -@with_ratelimit("stats") -async def get_heroes(ctx: HTTPRequestContext, battletag: str): - """ - Fetches hero stats, in one big blob. - """ - pages = await fetch_all_user_pages(ctx, battletag, - platform=ctx.request.args.get("platform", "pc")) - - built_dict = {} - for region, result in pages.items(): - if result is None: - built_dict[region] = None - continue - - status = result.xpath(".//p[@class='masthead-permission-level-text']")[0].text - if status == "Private Profile": - return {"error": "Private"}, 403 - - d = { - "heroes": {"playtime": {"competitive": {}, "quickplay": {}}, - "stats": {"competitive": {}, "quickplay": {}}}, - } - - d["heroes"]["stats"]["quickplay"] = parsing.bl_parse_hero_data(result) - d["heroes"]["playtime"]["quickplay"] = parsing.bl_parse_all_heroes(result) - - d["heroes"]["stats"]["competitive"] = parsing.bl_parse_hero_data(result, mode="competitive") - d["heroes"]["playtime"]["competitive"] = parsing.bl_parse_all_heroes(result, - mode="competitive") - - built_dict[region] = d - - return built_dict - - -# Separate routes. -@api_v3.route("/u//heroes/quickplay", reverse_hooks=True) -@with_ratelimit("stats") -async def get_heroes_qp(ctx: HTTPRequestContext, battletag: str): - """ - Fetches hero stats, for quick-play. - """ - pages = await fetch_all_user_pages(ctx, battletag, - platform=ctx.request.args.get("platform", "pc")) - - built_dict = {} - for region, result in pages.items(): - if result is None: - built_dict[region] = None - continue - - status = result.xpath(".//p[@class='masthead-permission-level-text']")[0].text - if status == "Private Profile": - return {"error": "Private"}, 403 - - d = { - "heroes": {"playtime": {"competitive": {}, "quickplay": {}}, - "stats": {"competitive": {}, "quickplay": {}}}, - } - - d["heroes"]["stats"]["quickplay"] = parsing.bl_parse_hero_data(result) - - d["heroes"]["playtime"]["quickplay"] = parsing.bl_parse_all_heroes(result) - - built_dict[region] = d - - return built_dict - - -@api_v3.route("/u//heroes/competitive", reverse_hooks=True) -@with_ratelimit("stats") -async def get_heroes_comp(ctx: HTTPRequestContext, battletag: str): - """ - Fetches hero stats, for competitive. - """ - pages = await fetch_all_user_pages(ctx, battletag, - platform=ctx.request.args.get("platform", "pc")) - - built_dict = {} - for region, result in pages.items(): - if result is None: - built_dict[region] = None - continue - - status = result.xpath(".//p[@class='masthead-permission-level-text']")[0].text - if status == "Private Profile": - return {"error": "Private"}, 403 - - d = { - "heroes": { - "playtime": - { - "competitive": {}, - "quickplay": {} - }, - "stats": - { - "competitive": {}, - "quickplay": {} - } - }, - } - - d["heroes"]["stats"]["competitive"] = parsing.bl_parse_hero_data(result, mode="competitive") - - d["heroes"]["playtime"]["competitive"] = parsing.bl_parse_all_heroes(result, - mode="competitive") - - built_dict[region] = d - - return built_dict - - -@api_v3.route("/u//achievements", reverse_hooks=True) -@with_ratelimit("stats") -async def get_achievements(ctx: HTTPRequestContext, battletag: str): - """ - Fetches hero stats, for competitive. - """ - pages = await fetch_all_user_pages(ctx, battletag, - platform=ctx.request.args.get("platform", "pc")) - - built_dict = {} - for region, result in pages.items(): - if result is None: - built_dict[region] = None - continue - - status = result.xpath(".//p[@class='masthead-permission-level-text']")[0].text - if status == "Private Profile": - return {"error": "Private"}, 403 - - d = {"achievements": parsing.bl_parse_achievement_data(result)} - - built_dict[region] = d - - return built_dict - - -@api_v3.route("/heroes", reverse_hooks=True) -async def get_hero_list(ctx: HTTPRequestContext): - """ - Send hero list. - """ - parsed = await get_all_heroes(ctx) - heroes = parsing.bl_get_all_heroes(parsed) - - built_dict = {"Offense": {}, "Defense": {}, "Tank": {}, "Support": {}} - for hero in heroes: - _parsed = await get_hero_data(ctx, hero.lower()) - retHero = parsing.bl_find_heroes(_parsed) - built_dict[retHero["role"]][hero] = retHero - - return built_dict - - -@api_v3.route("/heroes/", reverse_hooks=True) -async def get_hero(ctx: HTTPRequestContext, hero: str): - """ - Send hero data for selected hero. - """ - parsed = await get_hero_data(ctx, hero) - _hero = parsing.bl_find_heroes(parsed) - _hero["name"] = hero - return _hero - - -get_achievements.should_convert = False diff --git a/owapi/v3/v3_util.py b/owapi/v3/v3_util.py deleted file mode 100644 index 1e938a6..0000000 --- a/owapi/v3/v3_util.py +++ /dev/null @@ -1,128 +0,0 @@ -""" -v3-specific utilities. -""" -import functools -import os -import re -import shutil -from ruamel import yaml - -from kyoukai.asphalt import HTTPRequestContext - -RATES_PATH = os.path.join(os.getcwd(), "rates.yml") - -# No default user agents. -# Customize them, please. -DISALLOW_AGENTS = re.compile(r"(?:.*aiohttp/.*|.*python-requests/.*)") - -# Bad useragent response text. -BAD_USERAGENT = { - "error": 400, - "msg": "Hi! To prevent abuse of this service, it is required that you " - "customize your user agent." - }, 400, {"Content-Type": "application/json"} - -if not os.path.exists(RATES_PATH): - shutil.copy(os.path.join(os.getcwd(), "rates.default.yml"), RATES_PATH) - -with open(RATES_PATH) as r: - ratelimits = yaml.load(r, Loader=yaml.Loader).get("rates") - -compiled = [] - -# Compile the ratelimits. -for key, val in ratelimits.items(): - compiled.append((re.compile(key), val)) - -# Deref as we don't use it anymore -del ratelimits - - -def check_default_useragents(useragent: str): - """ - Checks if the user agent matches a disallowed one. - """ - return DISALLOW_AGENTS.match(useragent) - - -def with_ratelimit(bucket: str, timelimit: int = None, max_reqs: int = 0): - """ - Defines a function to rate limit for. - - Rate limits are stored in `rates.yml`. - """ - - # Compile regular expressions - def _rl_inner1(func): - @functools.wraps(func) - async def _rl_inner2(ctx: HTTPRequestContext, *args, **kwargs): - """ - Inner ratelimit function. - """ - if ctx.app.config["owapi_disable_ratelimits"]: - # Don't bother with ratelimits. - return await func(ctx, *args, **kwargs) - - # only ratelimit if we have redis. Can't make this decision in - # outer functions because they are called before globalsettings are set - if ctx.app.config["owapi_use_redis"]: - import aioredis - assert isinstance(ctx.redis, aioredis.Redis) - # Get the IP. - ip = ctx.request.headers.get("X-Real-IP") or \ - ctx.request.headers.get("X-Forwarded-For") or ctx.request.remote_addr - - # Build the ratelimit string. - built = "{bucket}:{ip}:ratelimit".format(bucket=bucket, ip=ip) - - # Check the user agent before. - user_agent = ctx.request.headers.get("User-Agent") - - if user_agent is None: - return BAD_USERAGENT - - if check_default_useragents(user_agent): - return BAD_USERAGENT - - # Load the rate limit based on the regular expression provided. - for regex, rates in compiled: - if regex.match(user_agent): - break - else: - # UH OH - raise RuntimeError("Failed to match User-Agent - did you wipe rates.yml?") - - _timelimit = timelimit or rates.get("time", 1) - _max_reqs = max_reqs or rates.get("max_reqs", 1) - - # Redis-based ratelimiting. - # First, check if the key even exists. - if not (await ctx.redis.exists(built)): - # LPUSH, and EXPIRE it. - await ctx.redis.lpush(built, _max_reqs) - await ctx.redis.expire(built, _timelimit) - else: - # LLEN it. - tries = await ctx.redis.llen(built) - if tries >= max_reqs: - # 429 You Are Being Ratelimited. - ttl = await ctx.redis.ttl(built) - - if ttl == -1: - # wtf - await ctx.redis.expire(built, _timelimit) - ttl = _timelimit - - return {"error": 429, "msg": "you are being ratelimited", - "retry": ttl}, 429, {"Retry-After": ttl} - - # LPUSH a `1` or something onto the edge of the list. - # The actual value doesn't matter. - await ctx.redis.lpush(built, 1) - - # Now, await the underlying function. - return await func(ctx, *args, **kwargs) - - return _rl_inner2 - - return _rl_inner1 diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..3501718 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,391 @@ +[[package]] +category = "main" +description = "File support for asyncio." +name = "aiofiles" +optional = false +python-versions = "*" +version = "0.4.0" + +[[package]] +category = "main" +description = "asks - async http" +name = "asks" +optional = false +python-versions = "*" +version = "2.2.0" + +[package.dependencies] +async_generator = "*" +h11 = "*" +multio = ">=0.2.3" + +[[package]] +category = "main" +description = "Async generators and context managers for Python 3.5+" +name = "async-generator" +optional = false +python-versions = ">=3.5" +version = "1.10" + +[[package]] +category = "main" +description = "Classes Without Boilerplate" +name = "attrs" +optional = false +python-versions = "*" +version = "18.2.0" + +[package.extras] +dev = ["coverage", "hypothesis", "pympler", "pytest", "six", "zope.interface", "sphinx", "zope.interface", "pre-commit"] +docs = ["sphinx", "zope.interface"] +tests = ["coverage", "hypothesis", "pympler", "pytest", "six", "zope.interface"] + +[[package]] +category = "main" +description = "Fast, simple object-to-object and broadcast signaling" +name = "blinker" +optional = false +python-versions = "*" +version = "1.4" + +[[package]] +category = "main" +description = "Foreign Function Interface for Python calling C code." +marker = "os_name == \"nt\"" +name = "cffi" +optional = false +python-versions = "*" +version = "1.11.5" + +[package.dependencies] +pycparser = "*" + +[[package]] +category = "main" +description = "Universal encoding detector for Python 2 and 3" +name = "chardet" +optional = false +python-versions = "*" +version = "3.0.4" + +[[package]] +category = "main" +description = "Composable command line interface toolkit" +name = "click" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "7.0" + +[[package]] +category = "main" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +name = "h11" +optional = false +python-versions = "*" +version = "0.8.1" + +[[package]] +category = "main" +description = "HTTP/2 State-Machine based protocol implementation" +name = "h2" +optional = false +python-versions = "*" +version = "3.1.0" + +[package.dependencies] +hpack = ">=2.3,<4" +hyperframe = ">=5.2.0,<6" + +[[package]] +category = "main" +description = "Pure-Python HPACK header compression" +name = "hpack" +optional = false +python-versions = "*" +version = "3.0.0" + +[[package]] +category = "main" +description = "Fast C based HTML 5 parsing for python" +name = "html5-parser" +optional = false +python-versions = "*" +version = "0.4.5" + +[package.dependencies] +chardet = "*" +lxml = ">=3.8.0" + +[package.extras] +soup = ["beautifulsoup4"] + +[[package]] +category = "main" +description = "A ASGI Server based on Hyper libraries and inspired by Gunicorn." +name = "hypercorn" +optional = false +python-versions = ">=3.6.1" +version = "0.4.6" + +[package.dependencies] +h11 = "*" +h2 = "*" +pytoml = "*" +typing-extensions = "*" +wsproto = ">=0.12.0" + +[package.extras] +trio = ["trio (>=0.9.0)"] +uvloop = ["uvloop"] + +[[package]] +category = "main" +description = "HTTP/2 framing layer for Python" +name = "hyperframe" +optional = false +python-versions = "*" +version = "5.2.0" + +[[package]] +category = "main" +description = "Internationalized Domain Names in Applications (IDNA)" +name = "idna" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "2.8" + +[[package]] +category = "main" +description = "Various helpers to pass data to untrusted environments and back." +name = "itsdangerous" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.1.0" + +[[package]] +category = "main" +description = "A small but fast and easy to use stand-alone template engine written in pure python." +name = "jinja2" +optional = false +python-versions = "*" +version = "2.10" + +[package.dependencies] +MarkupSafe = ">=0.23" + +[package.extras] +i18n = ["Babel (>=0.8)"] + +[[package]] +category = "main" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +name = "lxml" +optional = false +python-versions = "*" +version = "4.3.0" + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html5 = ["html5lib"] +htmlsoup = ["beautifulsoup4"] +source = ["Cython (>=0.29.1)"] + +[[package]] +category = "main" +description = "Safely add untrusted strings to HTML/XML markup." +name = "markupsafe" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" +version = "1.1.0" + +[[package]] +category = "main" +description = "multidict implementation" +name = "multidict" +optional = false +python-versions = ">=3.4.1" +version = "4.5.2" + +[[package]] +category = "main" +description = "multio - an unified async library for curio and trio" +name = "multio" +optional = false +python-versions = ">=3.5.2" +version = "0.2.4" + +[package.extras] +curio = ["curio (>=0.7.0)"] +trio = ["trio (>=0.2.0)"] + +[[package]] +category = "main" +description = "Capture the outcome of Python function calls." +name = "outcome" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.0.0" + +[package.dependencies] +attrs = "*" + +[[package]] +category = "main" +description = "C parser in Python" +marker = "os_name == \"nt\"" +name = "pycparser" +optional = false +python-versions = "*" +version = "2.19" + +[[package]] +category = "main" +description = "A parser for TOML-0.4.0" +name = "pytoml" +optional = false +python-versions = "*" +version = "0.1.20" + +[[package]] +category = "main" +description = "A Python ASGI web microframework with the same API as Flask" +name = "quart" +optional = false +python-versions = ">=3.7.0" +version = "0.7.2" + +[package.dependencies] +aiofiles = "*" +blinker = "*" +click = "*" +hypercorn = ">=0.4" +itsdangerous = "*" +jinja2 = "*" +multidict = "*" +sortedcontainers = "*" + +[[package]] +category = "main" +description = "A Quart extension to provide trio support." +name = "quart-trio" +optional = false +python-versions = ">=3.7.0" +version = "0.1.0" + +[package.dependencies] +quart = ">=0.7.0" +trio = ">=0.9.0" + +[package.dependencies.hypercorn] +extras = ["trio"] +version = "*" + +[[package]] +category = "main" +description = "Sniff out which async library your code is running under" +name = "sniffio" +optional = false +python-versions = ">=3.5" +version = "1.0.0" + +[[package]] +category = "main" +description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" +name = "sortedcontainers" +optional = false +python-versions = "*" +version = "2.1.0" + +[[package]] +category = "main" +description = "An async/await-native I/O library for humans and snake people" +name = "trio" +optional = false +python-versions = ">=3.5" +version = "0.10.0" + +[package.dependencies] +async_generator = ">=1.9" +attrs = ">=18.1.0" +cffi = "*" +idna = "*" +outcome = "*" +sniffio = "*" +sortedcontainers = "*" + +[[package]] +category = "main" +description = "Type Hints for Python" +name = "typing" +optional = false +python-versions = "*" +version = "3.6.6" + +[[package]] +category = "main" +description = "Backported and Experimental Type Hints for Python 3.5+" +name = "typing-extensions" +optional = false +python-versions = "*" +version = "3.7.2" + +[package.dependencies] +typing = ">=3.6.2" + +[[package]] +category = "main" +description = "ASCII transliterations of Unicode text" +name = "unidecode" +optional = false +python-versions = "*" +version = "1.0.23" + +[[package]] +category = "main" +description = "WebSockets state-machine based protocol implementation" +name = "wsproto" +optional = false +python-versions = "*" +version = "0.12.0" + +[package.dependencies] +h11 = ">=0.8.0,<0.9.0" + +[metadata] +content-hash = "bc77b1449d23de6234596a15067006faf2727f3d4b0baf7174a61ee9e4e019e5" +python-versions = "^3.7" + +[metadata.hashes] +aiofiles = ["021ea0ba314a86027c166ecc4b4c07f2d40fc0f4b3a950d1868a0f2571c2bbee", "1e644c2573f953664368de28d2aa4c89dfd64550429d0c27c4680ccd3aa4985d"] +asks = ["1679e5bd1dfa6c5d2220bdf2b8921c9c0d063d08370a7c66b9e167113681406f"] +async-generator = ["01c7bf666359b4967d2cda0000cc2e4af16a0ae098cbffcb8472fb9e8ad6585b", "6ebb3d106c12920aaae42ccb6f787ef5eefdcdd166ea3d628fa8476abe712144"] +attrs = ["10cbf6e27dbce8c30807caf056c8eb50917e0eaafe86347671b57254006c3e69", "ca4be454458f9dec299268d472aaa5a11f67a4ff70093396e1ceae9c76cf4bbb"] +blinker = ["471aee25f3992bd325afa3772f1063dbdbbca947a041b8b89466dc00d606f8b6"] +cffi = ["151b7eefd035c56b2b2e1eb9963c90c6302dc15fbd8c1c0a83a163ff2c7d7743", "1553d1e99f035ace1c0544050622b7bc963374a00c467edafac50ad7bd276aef", "1b0493c091a1898f1136e3f4f991a784437fac3673780ff9de3bcf46c80b6b50", "2ba8a45822b7aee805ab49abfe7eec16b90587f7f26df20c71dd89e45a97076f", "3bb6bd7266598f318063e584378b8e27c67de998a43362e8fce664c54ee52d30", "3c85641778460581c42924384f5e68076d724ceac0f267d66c757f7535069c93", "3eb6434197633b7748cea30bf0ba9f66727cdce45117a712b29a443943733257", "495c5c2d43bf6cebe0178eb3e88f9c4aa48d8934aa6e3cddb865c058da76756b", "4c91af6e967c2015729d3e69c2e51d92f9898c330d6a851bf8f121236f3defd3", "57b2533356cb2d8fac1555815929f7f5f14d68ac77b085d2326b571310f34f6e", "770f3782b31f50b68627e22f91cb182c48c47c02eb405fd689472aa7b7aa16dc", "79f9b6f7c46ae1f8ded75f68cf8ad50e5729ed4d590c74840471fc2823457d04", "7a33145e04d44ce95bcd71e522b478d282ad0eafaf34fe1ec5bbd73e662f22b6", "857959354ae3a6fa3da6651b966d13b0a8bed6bbc87a0de7b38a549db1d2a359", "87f37fe5130574ff76c17cab61e7d2538a16f843bb7bca8ebbc4b12de3078596", "95d5251e4b5ca00061f9d9f3d6fe537247e145a8524ae9fd30a2f8fbce993b5b", "9d1d3e63a4afdc29bd76ce6aa9d58c771cd1599fbba8cf5057e7860b203710dd", "a36c5c154f9d42ec176e6e620cb0dd275744aa1d804786a71ac37dc3661a5e95", "a6a5cb8809091ec9ac03edde9304b3ad82ad4466333432b16d78ef40e0cce0d5", "ae5e35a2c189d397b91034642cb0eab0e346f776ec2eb44a49a459e6615d6e2e", "b0f7d4a3df8f06cf49f9f121bead236e328074de6449866515cea4907bbc63d6", "b75110fb114fa366b29a027d0c9be3709579602ae111ff61674d28c93606acca", "ba5e697569f84b13640c9e193170e89c13c6244c24400fc57e88724ef610cd31", "be2a9b390f77fd7676d80bc3cdc4f8edb940d8c198ed2d8c0be1319018c778e1", "ca1bd81f40adc59011f58159e4aa6445fc585a32bb8ac9badf7a2c1aa23822f2", "d5d8555d9bfc3f02385c1c37e9f998e2011f0db4f90e250e5bc0c0a85a813085", "e55e22ac0a30023426564b1059b035973ec82186ddddbac867078435801c7801", "e90f17980e6ab0f3c2f3730e56d1fe9bcba1891eeea58966e89d352492cc74f4", "ecbb7b01409e9b782df5ded849c178a0aa7c906cf8c5a67368047daab282b184", "ed01918d545a38998bfa5902c7c00e0fee90e957ce036a4000a88e3fe2264917", "edabd457cd23a02965166026fd9bfd196f4324fe6032e866d0f3bd0301cd486f", "fdf1c1dc5bafc32bc5d08b054f94d659422b05aba244d6be4ddc1c72d9aa70fb"] +chardet = ["84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", "fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"] +click = ["2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13", "5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7"] +h11 = ["acca6a44cb52a32ab442b1779adf0875c443c689e9e028f8d831a3769f9c5208", "f2b1ca39bfed357d1f19ac732913d5f9faa54a5062eca7d2ec3a916cfb7ae4c7"] +h2 = ["c8f387e0e4878904d4978cd688a3195f6b169d49b1ffa572a3d347d7adc5e09f", "fd07e865a3272ac6ef195d8904de92dc7b38dc28297ec39cfa22716b6d62e6eb"] +hpack = ["0edd79eda27a53ba5be2dfabf3b15780928a0dff6eb0c60a3d6767720e970c89", "8eec9c1f4bfae3408a3f30500261f7e6a65912dc138526ea054f9ad98892e9d2"] +html5-parser = ["a903ef8b93b51788a6d1604b3833303e9f2f8db488306ee4241436d2f518bd06"] +hypercorn = ["84c53ef8248a4067c6ec0b7c9a8a850eed2c85e660b1afdaac37f4f8cc8420f2", "d3bd0ec5f6377f3eb1aea85314b74ef6720ab5e28be4afb8cced038efea9c6b9"] +hyperframe = ["5187962cb16dcc078f23cb5a4b110098d546c3f41ff2d4038a9896893bbd0b40", "a9f5c17f2cc3c719b917c4f33ed1c61bd1f8dfac4b1bd23b7c80b3400971b41f"] +idna = ["c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407", "ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"] +itsdangerous = ["321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19", "b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749"] +jinja2 = ["74c935a1b8bb9a3947c50a54766a969d4846290e1e788ea44c1392163723c3bd", "f84be1bb0040caca4cea721fcbbbbd61f9be9464ca236387158b0feea01914a4"] +lxml = ["0dd6589fa75d369ba06d2b5f38dae107f76ea127f212f6a7bee134f6df2d1d21", "1afbac344aa68c29e81ab56c1a9411c3663157b5aee5065b7fa030b398d4f7e0", "1baad9d073692421ad5dbbd81430aba6c7f5fdc347f03537ae046ddf2c9b2297", "1d8736421a2358becd3edf20260e41a06a0bf08a560480d3a5734a6bcbacf591", "1e1d9bddc5afaddf0de76246d3f2152f961697ad7439c559f179002682c45801", "1f179dc8b2643715f020f4d119d5529b02cd794c1c8f305868b73b8674d2a03f", "241fb7bdf97cb1df1edfa8f0bcdfd80525d4023dac4523a241907c8b2f44e541", "2f9765ee5acd3dbdcdc0d0c79309e01f7c16bc8d39b49250bf88de7b46daaf58", "312e1e1b1c3ce0c67e0b8105317323e12807955e8186872affb667dbd67971f6", "3273db1a8055ca70257fd3691c6d2c216544e1a70b673543e15cc077d8e9c730", "34dfaa8c02891f9a246b17a732ca3e99c5e42802416628e740a5d1cb2f50ff49", "3aa3f5288af349a0f3a96448ebf2e57e17332d99f4f30b02093b7948bd9f94cc", "51102e160b9d83c1cc435162d90b8e3c8c93b28d18d87b60c56522d332d26879", "56115fc2e2a4140e8994eb9585119a1ae9223b506826089a3ba753a62bd194a6", "69d83de14dbe8fe51dccfd36f88bf0b40f5debeac763edf9f8325180190eba6e", "99fdce94aeaa3ccbdfcb1e23b34273605c5853aa92ec23d84c84765178662c6c", "a7c0cd5b8a20f3093ee4a67374ccb3b8a126743b15a4d759e2a1bf098faac2b2", "abe12886554634ed95416a46701a917784cb2b4c77bfacac6916681d49bbf83d", "b4f67b5183bd5f9bafaeb76ad119e977ba570d2b0e61202f534ac9b5c33b4485", "bdd7c1658475cc1b867b36d5c4ed4bc316be8d3368abe03d348ba906a1f83b0e", "c6f24149a19f611a415a51b9bc5f17b6c2f698e0d6b41ffb3fa9f24d35d05d73", "d1e111b3ab98613115a208c1017f266478b0ab224a67bc8eac670fa0bad7d488", "d6520aa965773bbab6cb7a791d5895b00d02cf9adc93ac2bf4edb9ac1a6addc5", "dd185cde2ccad7b649593b0cda72021bc8a91667417001dbaf24cd746ecb7c11", "de2e5b0828a9d285f909b5d2e9d43f1cf6cf21fe65bc7660bdaa1780c7b58298", "f726444b8e909c4f41b4fde416e1071cf28fa84634bfb4befdf400933b6463af"] +markupsafe = ["048ef924c1623740e70204aa7143ec592504045ae4429b59c30054cb31e3c432", "130f844e7f5bdd8e9f3f42e7102ef1d49b2e6fdf0d7526df3f87281a532d8c8b", "19f637c2ac5ae9da8bfd98cef74d64b7e1bb8a63038a3505cd182c3fac5eb4d9", "1b8a7a87ad1b92bd887568ce54b23565f3fd7018c4180136e1cf412b405a47af", "1c25694ca680b6919de53a4bb3bdd0602beafc63ff001fea2f2fc16ec3a11834", "1f19ef5d3908110e1e891deefb5586aae1b49a7440db952454b4e281b41620cd", "1fa6058938190ebe8290e5cae6c351e14e7bb44505c4a7624555ce57fbbeba0d", "31cbb1359e8c25f9f48e156e59e2eaad51cd5242c05ed18a8de6dbe85184e4b7", "3e835d8841ae7863f64e40e19477f7eb398674da6a47f09871673742531e6f4b", "4e97332c9ce444b0c2c38dd22ddc61c743eb208d916e4265a2a3b575bdccb1d3", "525396ee324ee2da82919f2ee9c9e73b012f23e7640131dd1b53a90206a0f09c", "52b07fbc32032c21ad4ab060fec137b76eb804c4b9a1c7c7dc562549306afad2", "52ccb45e77a1085ec5461cde794e1aa037df79f473cbc69b974e73940655c8d7", "5c3fbebd7de20ce93103cb3183b47671f2885307df4a17a0ad56a1dd51273d36", "5e5851969aea17660e55f6a3be00037a25b96a9b44d2083651812c99d53b14d1", "5edfa27b2d3eefa2210fb2f5d539fbed81722b49f083b2c6566455eb7422fd7e", "7d263e5770efddf465a9e31b78362d84d015cc894ca2c131901a4445eaa61ee1", "83381342bfc22b3c8c06f2dd93a505413888694302de25add756254beee8449c", "857eebb2c1dc60e4219ec8e98dfa19553dae33608237e107db9c6078b1167856", "98e439297f78fca3a6169fd330fbe88d78b3bb72f967ad9961bcac0d7fdd1550", "bf54103892a83c64db58125b3f2a43df6d2cb2d28889f14c78519394feb41492", "d9ac82be533394d341b41d78aca7ed0e0f4ba5a2231602e2f05aa87f25c51672", "e982fe07ede9fada6ff6705af70514a52beb1b2c3d25d4e873e82114cf3c5401", "edce2ea7f3dfc981c4ddc97add8a61381d9642dc3273737e756517cc03e84dd6", "efdc45ef1afc238db84cb4963aa689c0408912a0239b0721cb172b4016eb31d6", "f137c02498f8b935892d5c0172560d7ab54bc45039de8805075e19079c639a9c", "f82e347a72f955b7017a39708a3667f106e6ad4d10b25f237396a7115d8ed5fd", "fb7c206e01ad85ce57feeaaa0bf784b97fa3cad0d4a5737bc5295785f5c613a1"] +multidict = ["024b8129695a952ebd93373e45b5d341dbb87c17ce49637b34000093f243dd4f", "041e9442b11409be5e4fc8b6a97e4bcead758ab1e11768d1e69160bdde18acc3", "045b4dd0e5f6121e6f314d81759abd2c257db4634260abcfe0d3f7083c4908ef", "047c0a04e382ef8bd74b0de01407e8d8632d7d1b4db6f2561106af812a68741b", "068167c2d7bbeebd359665ac4fff756be5ffac9cda02375b5c5a7c4777038e73", "148ff60e0fffa2f5fad2eb25aae7bef23d8f3b8bdaf947a65cdbe84a978092bc", "1d1c77013a259971a72ddaa83b9f42c80a93ff12df6a4723be99d858fa30bee3", "1d48bc124a6b7a55006d97917f695effa9725d05abe8ee78fd60d6588b8344cd", "31dfa2fc323097f8ad7acd41aa38d7c614dd1960ac6681745b6da124093dc351", "34f82db7f80c49f38b032c5abb605c458bac997a6c3142e0d6c130be6fb2b941", "3d5dd8e5998fb4ace04789d1d008e2bb532de501218519d70bb672c4c5a2fc5d", "4a6ae52bd3ee41ee0f3acf4c60ceb3f44e0e3bc52ab7da1c2b2aa6703363a3d1", "4b02a3b2a2f01d0490dd39321c74273fed0568568ea0e7ea23e02bd1fb10a10b", "4b843f8e1dd6a3195679d9838eb4670222e8b8d01bc36c9894d6c3538316fa0a", "5de53a28f40ef3c4fd57aeab6b590c2c663de87a5af76136ced519923d3efbb3", "61b2b33ede821b94fa99ce0b09c9ece049c7067a33b279f343adfe35108a4ea7", "6a3a9b0f45fd75dc05d8e93dc21b18fc1670135ec9544d1ad4acbcf6b86781d0", "76ad8e4c69dadbb31bad17c16baee61c0d1a4a73bed2590b741b2e1a46d3edd0", "7ba19b777dc00194d1b473180d4ca89a054dd18de27d0ee2e42a103ec9b7d014", "7c1b7eab7a49aa96f3db1f716f0113a8a2e93c7375dd3d5d21c4941f1405c9c5", "7fc0eee3046041387cbace9314926aa48b681202f8897f8bff3809967a049036", "8ccd1c5fff1aa1427100ce188557fc31f1e0a383ad8ec42c559aabd4ff08802d", "8e08dd76de80539d613654915a2f5196dbccc67448df291e69a88712ea21e24a", "c18498c50c59263841862ea0501da9f2b3659c00db54abfbf823a80787fde8ce", "c49db89d602c24928e68c0d510f4fcf8989d77defd01c973d6cbe27e684833b1", "ce20044d0317649ddbb4e54dab3c1bcc7483c78c27d3f58ab3d0c7e6bc60d26a", "d1071414dd06ca2eafa90c85a079169bfeb0e5f57fd0b45d44c092546fcd6fd9", "d3be11ac43ab1a3e979dac80843b42226d5d3cccd3986f2e03152720a4297cd7", "db603a1c235d110c860d5f39988ebc8218ee028f07a7cbc056ba6424372ca31b"] +multio = ["e8bce12aa8d2e076d96f4c4b6bfb70c01e0e0af9892f9ffc4ec868854e1b877e"] +outcome = ["7357af9ba2a08fdff8c742818909c5d146fc1fe75aee4bddadaa4f8ad726d262", "9d58c05db36a900ce60c6da0167d76e28869f64b338d60fa3a61841cfa54ac71"] +pycparser = ["a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3"] +pytoml = ["ca2d0cb127c938b8b76a9a0d0f855cf930c1d50cc3a0af6d3595b566519a1013"] +quart = ["a07d139a6ac05c27db965f8bd444a4adf47f2d14c475aa08991cd734135aa293", "a918745611572a2ebaad0a5890c9cffdf7db0b8aa87ed18341139aca96f05234"] +quart-trio = ["96d3e15832c3970008d70e6bc1317a5059e9b82eba63c37614a44c8619d7f4e9", "fc77cbce48ad389add4780bbdf0912f1a0728b51fcbb24066fefa0cd55684fdd"] +sniffio = ["2e9b81429e3b7c9e119fcee2673ee3be3229982adc68b3f59317863aba05ebb7", "afb4997584a920e6e378a81ded2b3e71a696b85a68c4bfbe4dadf1ba57a9ef45"] +sortedcontainers = ["974e9a32f56b17c1bac2aebd9dcf197f3eb9cd30553c5852a3187ad162e1a03a", "d9e96492dd51fae31e60837736b38fe42a187b5404c16606ff7ee7cd582d4c60"] +trio = ["d323cc15f6406d15954af91e5e34af2001cc24163fdde29e3f88a227a1b53ab0"] +typing = ["4027c5f6127a6267a435201981ba156de91ad0d1d98e9ddc2aa173453453492d", "57dcf675a99b74d64dacf6fba08fb17cf7e3d5fdff53d4a30ea2a5e7e52543d4", "a4c8473ce11a65999c8f59cb093e70686b6c84c98df58c1dae9b3b196089858a"] +typing-extensions = ["07b2c978670896022a43c4b915df8958bec4a6b84add7f2c87b2b728bda3ba64", "f3f0e67e1d42de47b5c67c32c9b26641642e9170fe7e292991793705cd5fef7c", "fb2cd053238d33a8ec939190f30cfd736c00653a85a2919415cecf7dc3d9da71"] +unidecode = ["092cdf7ad9d1052c50313426a625b717dab52f7ac58f859e09ea020953b1ad8f", "8b85354be8fd0c0e10adbf0675f6dc2310e56fda43fa8fe049123b6c475e52fb"] +wsproto = ["1fcb726d448f1b9bcbea884e26621af5ddd01d2d502941a024f4c727828b6009", "6a51cf18d9de612892b9c1d38a8c1bdadec0cfe15de61cd5c0f09174bf0c7e82"]