From bc6cf7f7848b847e709eb0081fb05c230132eac5 Mon Sep 17 00:00:00 2001 From: David Lord Date: Mon, 8 Apr 2024 15:07:10 -0700 Subject: [PATCH 1/2] update project files * update pre-commit hook * upgrade pip with venv * update description and version * show url in publish environment * update versions * update versions, separate typing job * use dependabot grouped updates ignore upload/download-artifact until slsa updates * use sphinx.ext.extlinks instead of sphinx-issues * update dev dependencies * update editorconfig * update gitignore * update .readthedocs.yaml * license is txt, readme is md * update pyproject.toml add typed classifier add pyright config simplify urls * tox builds docs in place * update min test py version * add tox env to update all dev dependencies * update issue and pr templates * rename security docs page to not conflict with org policy file * simplify matrix * readme is markdown, remove install and links sections --- .devcontainer/on-create-command.sh | 4 +- .editorconfig | 2 +- .github/ISSUE_TEMPLATE/bug-report.md | 4 +- .github/ISSUE_TEMPLATE/config.yml | 14 +- .github/ISSUE_TEMPLATE/feature-request.md | 2 +- .github/dependabot.yml | 29 ++- .github/pull_request_template.md | 25 +-- .github/workflows/lock.yaml | 16 +- .github/workflows/publish.yaml | 29 +-- .github/workflows/tests.yaml | 51 +++--- .gitignore | 10 +- .pre-commit-config.yaml | 4 +- .readthedocs.yaml | 4 +- CHANGES.rst | 4 +- LICENSE.rst => LICENSE.txt | 0 README.rst => README.md | 66 +++---- docs/conf.py | 23 +-- docs/license.rst | 3 +- pyproject.toml | 42 ++--- requirements/build.txt | 13 +- requirements/dev.in | 7 +- requirements/dev.txt | 214 ++++++++++++++++++---- requirements/docs.in | 5 +- requirements/docs.txt | 48 +++-- requirements/tests.in | 5 +- requirements/tests.txt | 43 +++-- requirements/typing.in | 2 + requirements/typing.txt | 42 +++-- tox.ini | 18 +- 29 files changed, 438 insertions(+), 291 deletions(-) rename LICENSE.rst => LICENSE.txt (100%) rename README.rst => README.md (56%) diff --git a/.devcontainer/on-create-command.sh b/.devcontainer/on-create-command.sh index fdf77952f..eaebea618 100755 --- a/.devcontainer/on-create-command.sh +++ b/.devcontainer/on-create-command.sh @@ -1,9 +1,7 @@ #!/bin/bash set -e - -python3 -m venv .venv +python3 -m venv --upgrade-deps .venv . .venv/bin/activate -pip install -U pip pip install -r requirements/dev.txt pip install -e . pre-commit install --install-hooks diff --git a/.editorconfig b/.editorconfig index e32c8029d..2ff985a67 100644 --- a/.editorconfig +++ b/.editorconfig @@ -9,5 +9,5 @@ end_of_line = lf charset = utf-8 max_line_length = 88 -[*.{yml,yaml,json,js,css,html}] +[*.{css,html,js,json,jsx,scss,ts,tsx,yaml,yml}] indent_size = 2 diff --git a/.github/ISSUE_TEMPLATE/bug-report.md b/.github/ISSUE_TEMPLATE/bug-report.md index eb5e22b21..cdbeececf 100644 --- a/.github/ISSUE_TEMPLATE/bug-report.md +++ b/.github/ISSUE_TEMPLATE/bug-report.md @@ -4,8 +4,8 @@ about: Report a bug in Werkzeug (not other projects which depend on Werkzeug) --- diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 9df4cec0e..88a049ead 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -1,11 +1,11 @@ blank_issues_enabled: false contact_links: - name: Security issue - url: security@palletsprojects.com - about: Do not report security issues publicly. Email our security contact. - - name: Questions - url: https://stackoverflow.com/questions/tagged/werkzeug?tab=Frequent - about: Search for and ask questions about your code on Stack Overflow. - - name: Questions and discussions + url: https://github.com/pallets/werkzeug/security/advisories/new + about: Do not report security issues publicly. Create a private advisory. + - name: Questions on Discussions + url: https://github.com/pallets/werkzeug/discussions/ + about: Ask questions about your own code on the Discussions tab. + - name: Questions on Chat url: https://discord.gg/pallets - about: Discuss questions about your code on our Discord chat. + about: Ask questions about your own code on our Discord chat. diff --git a/.github/ISSUE_TEMPLATE/feature-request.md b/.github/ISSUE_TEMPLATE/feature-request.md index 48698798f..18eaef7b5 100644 --- a/.github/ISSUE_TEMPLATE/feature-request.md +++ b/.github/ISSUE_TEMPLATE/feature-request.md @@ -5,7 +5,7 @@ about: Suggest a new feature for Werkzeug -- fixes # +fixes # +--> - -Checklist: - -- [ ] Add tests that demonstrate the correct behavior of the change. Tests should fail without the change. -- [ ] Add or update relevant docs, in the docs folder and in code. -- [ ] Add an entry in `CHANGES.rst` summarizing the change and linking to the issue. -- [ ] Add `.. versionchanged::` entries in any relevant code docs. -- [ ] Run `pre-commit` hooks and fix any issues. -- [ ] Run `pytest` and `tox`, no tests failed. diff --git a/.github/workflows/lock.yaml b/.github/workflows/lock.yaml index e962fd041..22228a1cd 100644 --- a/.github/workflows/lock.yaml +++ b/.github/workflows/lock.yaml @@ -1,25 +1,23 @@ -name: 'Lock threads' -# Lock closed issues that have not received any further activity for -# two weeks. This does not close open issues, only humans may do that. -# We find that it is easier to respond to new issues with fresh examples -# rather than continuing discussions on old issues. +name: Lock inactive closed issues +# Lock closed issues that have not received any further activity for two weeks. +# This does not close open issues, only humans may do that. It is easier to +# respond to new issues with fresh examples rather than continuing discussions +# on old issues. on: schedule: - cron: '0 0 * * *' - permissions: issues: write pull-requests: write - concurrency: group: lock - jobs: lock: runs-on: ubuntu-latest steps: - - uses: dessant/lock-threads@be8aa5be94131386884a6da4189effda9b14aa21 + - uses: dessant/lock-threads@1bf7ec25051fe7c00bdd17e6a7cf3d7bfb7dc771 # v5.0.1 with: issue-inactive-days: 14 pr-inactive-days: 14 + discussion-inactive-days: 14 diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index d328c29af..4e8139b79 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -9,12 +9,12 @@ jobs: outputs: hash: ${{ steps.hash.outputs.hash }} steps: - - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 + - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 with: python-version: '3.x' - cache: 'pip' - cache-dependency-path: 'requirements/*.txt' + cache: pip + cache-dependency-path: requirements*/*.txt - run: pip install -r requirements/build.txt # Use the commit date instead of the current date during the build. - run: echo "SOURCE_DATE_EPOCH=$(git log -1 --pretty=%ct)" >> $GITHUB_ENV @@ -23,11 +23,11 @@ jobs: - name: generate hash id: hash run: cd dist && echo "hash=$(sha256sum * | base64 -w0)" >> $GITHUB_OUTPUT - - uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce + - uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 # v3.1.3 with: path: ./dist provenance: - needs: ['build'] + needs: [build] permissions: actions: read id-token: write @@ -39,12 +39,12 @@ jobs: create-release: # Upload the sdist, wheels, and provenance to a GitHub release. They remain # available as build artifacts for a while as well. - needs: ['provenance'] + needs: [provenance] runs-on: ubuntu-latest permissions: contents: write steps: - - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a + - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2 - name: create release run: > gh release create --draft --repo ${{ github.repository }} @@ -53,20 +53,21 @@ jobs: env: GH_TOKEN: ${{ github.token }} publish-pypi: - needs: ['provenance'] + needs: [provenance] # Wait for approval before attempting to upload to PyPI. This allows reviewing the # files in the draft release. - environment: 'publish' + environment: + name: publish + url: https://pypi.org/project/Werkzeug/${{ github.ref_name }} runs-on: ubuntu-latest permissions: id-token: write steps: - - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a - # Try uploading to Test PyPI first, in case something fails. - - uses: pypa/gh-action-pypi-publish@b7f401de30cb6434a1e19f805ff006643653240e + - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2 + - uses: pypa/gh-action-pypi-publish@81e9d935c883d0b210363ab89cf05f3894778450 # v1.8.14 with: repository-url: https://test.pypi.org/legacy/ packages-dir: artifact/ - - uses: pypa/gh-action-pypi-publish@b7f401de30cb6434a1e19f805ff006643653240e + - uses: pypa/gh-action-pypi-publish@81e9d935c883d0b210363ab89cf05f3894778450 # v1.8.14 with: packages-dir: artifact/ diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index c1e6ea314..91a02d0ca 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -9,42 +9,49 @@ on: - '*.md' - '*.rst' pull_request: - branches: - - main - - '*.x' paths-ignore: - 'docs/**' - '*.md' - '*.rst' jobs: tests: - name: ${{ matrix.name }} - runs-on: ${{ matrix.os }} + name: ${{ matrix.name || matrix.python }} + runs-on: ${{ matrix.os || 'ubuntu-latest' }} strategy: fail-fast: false matrix: include: - - {name: Linux, python: '3.11', os: ubuntu-latest, tox: py311} - - {name: Windows, python: '3.11', os: windows-latest, tox: py311} - - {name: Mac, python: '3.11', os: macos-latest, tox: py311} - - {name: '3.12-dev', python: '3.12-dev', os: ubuntu-latest, tox: py312} - - {name: '3.10', python: '3.10', os: ubuntu-latest, tox: py310} - - {name: '3.9', python: '3.9', os: ubuntu-latest, tox: py39} - - {name: '3.8', python: '3.8', os: ubuntu-latest, tox: py38} - - {name: 'PyPy', python: 'pypy-3.10', os: ubuntu-latest, tox: pypy310} - - {name: Typing, python: '3.11', os: ubuntu-latest, tox: typing} + - {python: '3.12'} + - {name: Windows, python: '3.12', os: windows-latest} + - {name: Mac, python: '3.12', os: macos-latest} + - {python: '3.11'} + - {python: '3.10'} + - {python: '3.9'} + - {python: '3.8'} + - {name: PyPy, python: 'pypy-3.10', tox: pypy310} steps: - - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 + - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 with: python-version: ${{ matrix.python }} - cache: 'pip' - cache-dependency-path: 'requirements/*.txt' + allow-prereleases: true + cache: pip + cache-dependency-path: requirements*/*.txt + - run: pip install tox + - run: tox run -e ${{ matrix.tox || format('py{0}', matrix.python) }} + typing: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 + with: + python-version: '3.x' + cache: pip + cache-dependency-path: requirements*/*.txt - name: cache mypy - uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 + uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2 with: path: ./.mypy_cache - key: mypy|${{ matrix.python }}|${{ hashFiles('pyproject.toml') }} - if: matrix.tox == 'typing' + key: mypy|${{ hashFiles('pyproject.toml') }} - run: pip install tox - - run: tox run -e ${{ matrix.tox }} + - run: tox run -e typing diff --git a/.gitignore b/.gitignore index cd9550b9e..bbeb14f16 100644 --- a/.gitignore +++ b/.gitignore @@ -1,11 +1,11 @@ .idea/ .vscode/ +.venv*/ +venv*/ __pycache__/ +dist/ +.coverage* +htmlcov/ .pytest_cache/ .tox/ -.coverage -.coverage.* -htmlcov/ docs/_build/ -dist/ -venv/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 447fd5869..828916171 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,12 +2,12 @@ ci: autoupdate_schedule: monthly repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.5 + rev: v0.3.5 hooks: - id: ruff - id: ruff-format - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 + rev: v4.6.0 hooks: - id: check-merge-conflict - id: debug-statements diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 346900b20..865c68597 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -1,8 +1,8 @@ version: 2 build: - os: ubuntu-20.04 + os: ubuntu-22.04 tools: - python: "3.10" + python: '3.12' python: install: - requirements: requirements/docs.txt diff --git a/CHANGES.rst b/CHANGES.rst index fda41a89c..401886d36 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -21,7 +21,7 @@ Version 3.0.1 Released 2023-10-24 - Fix slow multipart parsing for large parts potentially enabling DoS - attacks. :cwe:`CWE-407` + attacks. Version 3.0.0 ------------- @@ -44,7 +44,7 @@ Version 2.3.8 Released 2023-11-08 - Fix slow multipart parsing for large parts potentially enabling DoS - attacks. :cwe:`CWE-407` + attacks. Version 2.3.7 diff --git a/LICENSE.rst b/LICENSE.txt similarity index 100% rename from LICENSE.rst rename to LICENSE.txt diff --git a/README.rst b/README.md similarity index 56% rename from README.rst rename to README.md index 220c9979a..011c0c45f 100644 --- a/README.rst +++ b/README.md @@ -1,9 +1,8 @@ -Werkzeug -======== +# Werkzeug *werkzeug* German noun: "tool". Etymology: *werk* ("work"), *zeug* ("stuff") -Werkzeug is a comprehensive `WSGI`_ web application library. It began as +Werkzeug is a comprehensive [WSGI][] web application library. It began as a simple collection of various utilities for WSGI applications and has become one of the most advanced WSGI utility libraries. @@ -31,59 +30,40 @@ choose a template engine, database adapter, and even how to handle requests. It can be used to build all sorts of end user applications such as blogs, wikis, or bulletin boards. -`Flask`_ wraps Werkzeug, using it to handle the details of WSGI while +[Flask][] wraps Werkzeug, using it to handle the details of WSGI while providing more structure and patterns for defining powerful applications. -.. _WSGI: https://wsgi.readthedocs.io/en/latest/ -.. _Flask: https://www.palletsprojects.com/p/flask/ +[WSGI]: https://wsgi.readthedocs.io/en/latest/ +[Flask]: https://www.palletsprojects.com/p/flask/ -Installing ----------- +## A Simple Example -Install and update using `pip`_: +```python +# save this as app.py +from werkzeug.wrappers import Request, Response -.. code-block:: text +@Request.application +def application(request: Request) -> Response: + return Response("Hello, World!") - pip install -U Werkzeug +if __name__ == "__main__": + from werkzeug.serving import run_simple + run_simple("127.0.0.1", 5000, application) +``` -.. _pip: https://pip.pypa.io/en/stable/getting-started/ +``` +$ python -m app + * Running on http://127.0.0.1:5000/ (Press CTRL+C to quit) +``` -A Simple Example ----------------- - -.. code-block:: python - - from werkzeug.wrappers import Request, Response - - @Request.application - def application(request): - return Response('Hello, World!') - - if __name__ == '__main__': - from werkzeug.serving import run_simple - run_simple('localhost', 4000, application) - - -Donate ------- +## Donate The Pallets organization develops and supports Werkzeug and other popular packages. In order to grow the community of contributors and users, and allow the maintainers to devote more time to the projects, -`please donate today`_. - -.. _please donate today: https://palletsprojects.com/donate - - -Links ------ +[please donate today][]. -- Documentation: https://werkzeug.palletsprojects.com/ -- Changes: https://werkzeug.palletsprojects.com/changes/ -- PyPI Releases: https://pypi.org/project/Werkzeug/ -- Source Code: https://github.com/pallets/werkzeug/ -- Issue Tracker: https://github.com/pallets/werkzeug/issues/ -- Chat: https://discord.gg/pallets +[please donate today]: https://palletsprojects.com/donate diff --git a/docs/conf.py b/docs/conf.py index e09ef8f7b..5e04cb817 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -10,18 +10,25 @@ # General -------------------------------------------------------------- -master_doc = "index" +default_role = "code" extensions = [ "sphinx.ext.autodoc", + "sphinx.ext.extlinks", "sphinx.ext.intersphinx", - "pallets_sphinx_themes", - "sphinx_issues", "sphinxcontrib.log_cabinet", + "pallets_sphinx_themes", ] autoclass_content = "both" +autodoc_member_order = "bysource" autodoc_typehints = "description" -intersphinx_mapping = {"python": ("https://docs.python.org/3/", None)} -issues_github_path = "pallets/werkzeug" +autodoc_preserve_defaults = True +extlinks = { + "issue": ("https://github.com/pallets/werkzeug/issues/%s", "#%s"), + "pr": ("https://github.com/pallets/werkzeug/pull/%s", "#%s"), +} +intersphinx_mapping = { + "python": ("https://docs.python.org/3/", None), +} # HTML ----------------------------------------------------------------- @@ -46,9 +53,3 @@ html_logo = "_static/werkzeug-vertical.png" html_title = f"Werkzeug Documentation ({version})" html_show_sourcelink = False - -# LaTeX ---------------------------------------------------------------- - -latex_documents = [ - (master_doc, f"Werkzeug-{version}.tex", html_title, author, "manual") -] diff --git a/docs/license.rst b/docs/license.rst index a53a98cf3..2a445f9c6 100644 --- a/docs/license.rst +++ b/docs/license.rst @@ -1,4 +1,5 @@ BSD-3-Clause License ==================== -.. include:: ../LICENSE.rst +.. literalinclude:: ../LICENSE.txt + :language: text diff --git a/pyproject.toml b/pyproject.toml index 6b3509a8b..f54060f6e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -2,8 +2,8 @@ name = "Werkzeug" version = "3.0.2" description = "The comprehensive WSGI web application library." -readme = "README.rst" -license = {file = "LICENSE.rst"} +readme = "README.md" +license = {file = "LICENSE.txt"} maintainers = [{name = "Pallets", email = "contact@palletsprojects.com"}] classifiers = [ "Development Status :: 5 - Production/Stable", @@ -17,9 +17,12 @@ classifiers = [ "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", "Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware", "Topic :: Software Development :: Libraries :: Application Frameworks", + "Typing :: Typed", ] requires-python = ">=3.8" -dependencies = ["MarkupSafe>=2.1.1"] +dependencies = [ + "MarkupSafe>=2.1.1", +] [project.urls] Donate = "https://palletsprojects.com/donate" @@ -67,29 +70,10 @@ source = ["werkzeug", "tests"] source = ["src", "*/site-packages"] [tool.mypy] -python_version = "3.8" files = ["src/werkzeug"] show_error_codes = true pretty = true -#strict = true -allow_redefinition = true -disallow_subclassing_any = true -#disallow_untyped_calls = true -disallow_untyped_defs = true -disallow_incomplete_defs = true -no_implicit_optional = true -local_partial_types = true -no_implicit_reexport = true -strict_equality = true -warn_redundant_casts = true -warn_unused_configs = true -warn_unused_ignores = true -warn_return_any = true -#warn_unreachable = True - -[[tool.mypy.overrides]] -module = ["werkzeug.wrappers"] -no_implicit_reexport = false +strict = true [[tool.mypy.overrides]] module = [ @@ -103,24 +87,28 @@ module = [ ] ignore_missing_imports = true +[tool.pyright] +pythonVersion = "3.8" +include = ["src/werkzeug"] + [tool.ruff] extend-exclude = ["examples/"] src = ["src"] -fix = false +fix = true show-fixes = true -show-source = true +output-format = "full" [tool.ruff.lint] select = [ "B", # flake8-bugbear "E", # pycodestyle error "F", # pyflakes - #"I", # isort + "I", # isort "UP", # pyupgrade "W", # pycodestyle warning ] ignore = [ - "E402" # allow circular imports at end of file + "E402", # allow circular imports at end of file ] ignore-init-module-imports = true diff --git a/requirements/build.txt b/requirements/build.txt index 196545d0e..9ecc48952 100644 --- a/requirements/build.txt +++ b/requirements/build.txt @@ -1,13 +1,12 @@ -# SHA1:80754af91bfb6d1073585b046fe0a474ce868509 # -# This file is autogenerated by pip-compile-multi -# To update, run: +# This file is autogenerated by pip-compile with Python 3.12 +# by the following command: # -# pip-compile-multi +# pip-compile build.in # -build==0.10.0 - # via -r requirements/build.in -packaging==23.1 +build==1.2.1 + # via -r build.in +packaging==24.0 # via build pyproject-hooks==1.0.0 # via build diff --git a/requirements/dev.in b/requirements/dev.in index 99f5942f8..1efde82b1 100644 --- a/requirements/dev.in +++ b/requirements/dev.in @@ -1,6 +1,5 @@ --r docs.in --r tests.in --r typing.in -pip-compile-multi +-r docs.txt +-r tests.txt +-r typing.txt pre-commit tox diff --git a/requirements/dev.txt b/requirements/dev.txt index ed462080a..186ceda46 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -1,64 +1,202 @@ -# SHA1:54b5b77ec8c7a0064ffa93b2fd16cb0130ba177c # -# This file is autogenerated by pip-compile-multi -# To update, run: +# This file is autogenerated by pip-compile with Python 3.12 +# by the following command: # -# pip-compile-multi +# pip-compile dev.in # --r docs.txt --r tests.txt --r typing.txt -build==0.10.0 - # via pip-tools -cachetools==5.3.1 +alabaster==0.7.16 + # via + # -r docs.txt + # sphinx +babel==2.14.0 + # via + # -r docs.txt + # sphinx +cachetools==5.3.3 # via tox -cfgv==3.3.1 +certifi==2024.2.2 + # via + # -r docs.txt + # requests +cffi==1.16.0 + # via + # -r tests.txt + # cryptography +cfgv==3.4.0 # via pre-commit -chardet==5.1.0 +chardet==5.2.0 # via tox -click==8.1.3 +charset-normalizer==3.3.2 # via - # pip-compile-multi - # pip-tools + # -r docs.txt + # requests colorama==0.4.6 # via tox -distlib==0.3.6 +cryptography==42.0.5 + # via -r tests.txt +distlib==0.3.8 # via virtualenv -filelock==3.12.2 +docutils==0.20.1 + # via + # -r docs.txt + # sphinx +ephemeral-port-reserve==1.1.4 + # via -r tests.txt +filelock==3.13.3 # via # tox # virtualenv -identify==2.5.24 +greenlet==3.0.3 + # via -r tests.txt +identify==2.5.35 # via pre-commit +idna==3.6 + # via + # -r docs.txt + # requests +imagesize==1.4.1 + # via + # -r docs.txt + # sphinx +iniconfig==2.0.0 + # via + # -r tests.txt + # -r typing.txt + # pytest +jinja2==3.1.3 + # via + # -r docs.txt + # sphinx +markupsafe==2.1.5 + # via + # -r docs.txt + # jinja2 +mypy==1.9.0 + # via -r typing.txt +mypy-extensions==1.0.0 + # via + # -r typing.txt + # mypy nodeenv==1.8.0 - # via pre-commit -pip-compile-multi==2.6.3 - # via -r requirements/dev.in -pip-tools==6.13.0 - # via pip-compile-multi -platformdirs==3.8.0 + # via + # -r typing.txt + # pre-commit + # pyright +packaging==24.0 + # via + # -r docs.txt + # -r tests.txt + # -r typing.txt + # pallets-sphinx-themes + # pyproject-api + # pytest + # sphinx + # tox +pallets-sphinx-themes==2.1.1 + # via -r docs.txt +platformdirs==4.2.0 # via # tox # virtualenv -pre-commit==3.3.3 - # via -r requirements/dev.in -pyproject-api==1.5.2 +pluggy==1.4.0 + # via + # -r tests.txt + # -r typing.txt + # pytest + # tox +pre-commit==3.7.0 + # via -r dev.in +psutil==5.9.8 + # via + # -r tests.txt + # pytest-xprocess +pycparser==2.22 + # via + # -r tests.txt + # cffi +pygments==2.17.2 + # via + # -r docs.txt + # sphinx +pyproject-api==1.6.1 # via tox -pyproject-hooks==1.0.0 - # via build -pyyaml==6.0 +pyright==1.1.357 + # via -r typing.txt +pytest==8.1.1 + # via + # -r tests.txt + # -r typing.txt + # pytest-timeout + # pytest-xprocess +pytest-timeout==2.3.1 + # via -r tests.txt +pytest-xprocess==0.23.0 + # via -r tests.txt +pyyaml==6.0.1 # via pre-commit -toposort==1.10 - # via pip-compile-multi -tox==4.6.3 - # via -r requirements/dev.in -virtualenv==20.23.1 +requests==2.31.0 + # via + # -r docs.txt + # sphinx +snowballstemmer==2.2.0 + # via + # -r docs.txt + # sphinx +sphinx==7.2.6 + # via + # -r docs.txt + # pallets-sphinx-themes + # sphinxcontrib-log-cabinet +sphinxcontrib-applehelp==1.0.8 + # via + # -r docs.txt + # sphinx +sphinxcontrib-devhelp==1.0.6 + # via + # -r docs.txt + # sphinx +sphinxcontrib-htmlhelp==2.0.5 + # via + # -r docs.txt + # sphinx +sphinxcontrib-jsmath==1.0.1 + # via + # -r docs.txt + # sphinx +sphinxcontrib-log-cabinet==1.0.1 + # via -r docs.txt +sphinxcontrib-qthelp==1.0.7 + # via + # -r docs.txt + # sphinx +sphinxcontrib-serializinghtml==1.1.10 + # via + # -r docs.txt + # sphinx +tox==4.14.2 + # via -r dev.in +types-contextvars==2.4.7.3 + # via -r typing.txt +types-dataclasses==0.6.6 + # via -r typing.txt +types-setuptools==69.2.0.20240317 + # via -r typing.txt +typing-extensions==4.11.0 + # via + # -r typing.txt + # mypy +urllib3==2.2.1 + # via + # -r docs.txt + # requests +virtualenv==20.25.1 # via # pre-commit # tox -wheel==0.40.0 - # via pip-tools +watchdog==4.0.0 + # via + # -r tests.txt + # -r typing.txt # The following packages are considered to be unsafe in a requirements file: -# pip # setuptools diff --git a/requirements/docs.in b/requirements/docs.in index 7ec501b6d..ba3fd7774 100644 --- a/requirements/docs.in +++ b/requirements/docs.in @@ -1,4 +1,3 @@ -Pallets-Sphinx-Themes -Sphinx -sphinx-issues +pallets-sphinx-themes +sphinx sphinxcontrib-log-cabinet diff --git a/requirements/docs.txt b/requirements/docs.txt index e125c59a4..ed605ea92 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -1,61 +1,57 @@ -# SHA1:45c590f97fe95b8bdc755eef796e91adf5fbe4ea # -# This file is autogenerated by pip-compile-multi -# To update, run: +# This file is autogenerated by pip-compile with Python 3.12 +# by the following command: # -# pip-compile-multi +# pip-compile docs.in # -alabaster==0.7.13 +alabaster==0.7.16 # via sphinx -babel==2.12.1 +babel==2.14.0 # via sphinx -certifi==2023.5.7 +certifi==2024.2.2 # via requests -charset-normalizer==3.1.0 +charset-normalizer==3.3.2 # via requests docutils==0.20.1 # via sphinx -idna==3.4 +idna==3.6 # via requests imagesize==1.4.1 # via sphinx -jinja2==3.1.2 +jinja2==3.1.3 # via sphinx -markupsafe==2.1.3 +markupsafe==2.1.5 # via jinja2 -packaging==23.1 +packaging==24.0 # via # pallets-sphinx-themes # sphinx pallets-sphinx-themes==2.1.1 - # via -r requirements/docs.in -pygments==2.15.1 + # via -r docs.in +pygments==2.17.2 # via sphinx requests==2.31.0 # via sphinx snowballstemmer==2.2.0 # via sphinx -sphinx==7.0.1 +sphinx==7.2.6 # via - # -r requirements/docs.in + # -r docs.in # pallets-sphinx-themes - # sphinx-issues # sphinxcontrib-log-cabinet -sphinx-issues==3.0.1 - # via -r requirements/docs.in -sphinxcontrib-applehelp==1.0.4 +sphinxcontrib-applehelp==1.0.8 # via sphinx -sphinxcontrib-devhelp==1.0.2 +sphinxcontrib-devhelp==1.0.6 # via sphinx -sphinxcontrib-htmlhelp==2.0.1 +sphinxcontrib-htmlhelp==2.0.5 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx sphinxcontrib-log-cabinet==1.0.1 - # via -r requirements/docs.in -sphinxcontrib-qthelp==1.0.3 + # via -r docs.in +sphinxcontrib-qthelp==1.0.7 # via sphinx -sphinxcontrib-serializinghtml==1.1.5 +sphinxcontrib-serializinghtml==1.1.10 # via sphinx -urllib3==2.0.3 +urllib3==2.2.1 # via requests diff --git a/requirements/tests.in b/requirements/tests.in index 3ced491be..8228f8ee6 100644 --- a/requirements/tests.in +++ b/requirements/tests.in @@ -1,7 +1,8 @@ pytest pytest-timeout -pytest-xprocess +# pinned for python 3.8 support +pytest-xprocess<1 cryptography -greenlet ; python_version < "3.11" +greenlet watchdog ephemeral-port-reserve diff --git a/requirements/tests.txt b/requirements/tests.txt index 057d62859..14b67436f 100644 --- a/requirements/tests.txt +++ b/requirements/tests.txt @@ -1,36 +1,35 @@ -# SHA1:42b4e3e66395275e048d9a92c294b2c650393866 # -# This file is autogenerated by pip-compile-multi -# To update, run: +# This file is autogenerated by pip-compile with Python 3.12 +# by the following command: # -# pip-compile-multi +# pip-compile tests.in # -cffi==1.15.1 +cffi==1.16.0 # via cryptography -cryptography==41.0.1 - # via -r requirements/tests.in +cryptography==42.0.5 + # via -r tests.in ephemeral-port-reserve==1.1.4 - # via -r requirements/tests.in + # via -r tests.in +greenlet==3.0.3 + # via -r tests.in iniconfig==2.0.0 # via pytest -packaging==23.1 +packaging==24.0 # via pytest -pluggy==1.2.0 +pluggy==1.4.0 # via pytest -psutil==5.9.5 +psutil==5.9.8 # via pytest-xprocess -py==1.11.0 - # via pytest-xprocess -pycparser==2.21 +pycparser==2.22 # via cffi -pytest==7.4.0 +pytest==8.1.1 # via - # -r requirements/tests.in + # -r tests.in # pytest-timeout # pytest-xprocess -pytest-timeout==2.1.0 - # via -r requirements/tests.in -pytest-xprocess==0.22.2 - # via -r requirements/tests.in -watchdog==3.0.0 - # via -r requirements/tests.in +pytest-timeout==2.3.1 + # via -r tests.in +pytest-xprocess==0.23.0 + # via -r tests.in +watchdog==4.0.0 + # via -r tests.in diff --git a/requirements/typing.in b/requirements/typing.in index 23ab1587b..096413b22 100644 --- a/requirements/typing.in +++ b/requirements/typing.in @@ -1,4 +1,6 @@ mypy +pyright +pytest types-contextvars types-dataclasses types-setuptools diff --git a/requirements/typing.txt b/requirements/typing.txt index 99c46d2e0..09c78d711 100644 --- a/requirements/typing.txt +++ b/requirements/typing.txt @@ -1,21 +1,35 @@ -# SHA1:162796b1b3ac7a29da65fe0e32278f14b68ed8c8 # -# This file is autogenerated by pip-compile-multi -# To update, run: +# This file is autogenerated by pip-compile with Python 3.12 +# by the following command: # -# pip-compile-multi +# pip-compile typing.in # -mypy==1.4.1 - # via -r requirements/typing.in +iniconfig==2.0.0 + # via pytest +mypy==1.9.0 + # via -r typing.in mypy-extensions==1.0.0 # via mypy -types-contextvars==2.4.7.2 - # via -r requirements/typing.in +nodeenv==1.8.0 + # via pyright +packaging==24.0 + # via pytest +pluggy==1.4.0 + # via pytest +pyright==1.1.357 + # via -r typing.in +pytest==8.1.1 + # via -r typing.in +types-contextvars==2.4.7.3 + # via -r typing.in types-dataclasses==0.6.6 - # via -r requirements/typing.in -types-setuptools==68.0.0.0 - # via -r requirements/typing.in -typing-extensions==4.6.3 + # via -r typing.in +types-setuptools==69.2.0.20240317 + # via -r typing.in +typing-extensions==4.11.0 # via mypy -watchdog==3.0.0 - # via -r requirements/typing.in +watchdog==4.0.0 + # via -r typing.in + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/tox.ini b/tox.ini index eca667f84..f7bc0b3b5 100644 --- a/tox.ini +++ b/tox.ini @@ -10,6 +10,8 @@ skip_missing_interpreters = true [testenv] package = wheel wheel_build_env = .pkg +constrain_package_deps = true +use_frozen_constraints = true deps = -r requirements/tests.txt commands = pytest -v --tb=short --basetemp={envtmpdir} {posargs} @@ -24,4 +26,18 @@ commands = mypy [testenv:docs] deps = -r requirements/docs.txt -commands = sphinx-build -W -b html -d {envtmpdir}/doctrees docs {envtmpdir}/html +commands = sphinx-build -E -W -b dirhtml docs docs/_build/dirhtml + +[testenv:update-requirements] +deps = + pip-tools + pre-commit +skip_install = true +change_dir = requirements +commands = + pre-commit autoupdate -j4 + pip-compile -U build.in + pip-compile -U docs.in + pip-compile -U tests.in + pip-compile -U typing.in + pip-compile -U dev.in From f5b71d82b87212bb5468656422f5a7423980432d Mon Sep 17 00:00:00 2001 From: David Lord Date: Mon, 8 Apr 2024 15:07:43 -0700 Subject: [PATCH 2/2] address mypy strict mode findings --- src/werkzeug/_internal.py | 15 ++--- src/werkzeug/_reloader.py | 18 +++--- src/werkzeug/datastructures/auth.py | 6 +- src/werkzeug/datastructures/cache_control.pyi | 18 ++++-- src/werkzeug/datastructures/file_storage.pyi | 6 +- src/werkzeug/datastructures/structures.pyi | 28 ++++---- src/werkzeug/debug/__init__.py | 3 +- src/werkzeug/debug/console.py | 2 +- src/werkzeug/debug/repr.py | 9 +-- src/werkzeug/debug/tbtools.py | 2 +- src/werkzeug/exceptions.py | 18 +++--- src/werkzeug/formparser.py | 18 +++--- src/werkzeug/http.py | 56 ++++++++-------- src/werkzeug/local.py | 64 +++++++++++-------- src/werkzeug/middleware/dispatcher.py | 1 + src/werkzeug/middleware/http_proxy.py | 1 + src/werkzeug/middleware/lint.py | 11 ++-- src/werkzeug/middleware/profiler.py | 1 + src/werkzeug/middleware/proxy_fix.py | 1 + src/werkzeug/middleware/shared_data.py | 6 +- src/werkzeug/routing/__init__.py | 1 + src/werkzeug/routing/converters.py | 16 ++--- src/werkzeug/routing/exceptions.py | 7 +- src/werkzeug/routing/map.py | 14 ++-- src/werkzeug/routing/rules.py | 4 +- src/werkzeug/sansio/http.py | 4 +- src/werkzeug/sansio/request.py | 7 +- src/werkzeug/sansio/response.py | 49 +++++++------- src/werkzeug/security.py | 12 ++-- src/werkzeug/serving.py | 5 +- src/werkzeug/test.py | 24 +++---- src/werkzeug/testapp.py | 27 ++++++-- src/werkzeug/utils.py | 21 +++--- src/werkzeug/wrappers/request.py | 17 ++--- src/werkzeug/wrappers/response.py | 27 ++++---- tests/test_exceptions.py | 3 +- tests/test_utils.py | 1 + 37 files changed, 282 insertions(+), 241 deletions(-) diff --git a/src/werkzeug/_internal.py b/src/werkzeug/_internal.py index 70ab68761..7dd2fbccd 100644 --- a/src/werkzeug/_internal.py +++ b/src/werkzeug/_internal.py @@ -9,6 +9,7 @@ if t.TYPE_CHECKING: from _typeshed.wsgi import WSGIEnvironment + from .wrappers.request import Request _logger: logging.Logger | None = None @@ -60,7 +61,7 @@ def _has_level_handler(logger: logging.Logger) -> bool: return False -class _ColorStreamHandler(logging.StreamHandler): +class _ColorStreamHandler(logging.StreamHandler): # type: ignore[type-arg] """On Windows, wrap stream with Colorama for ANSI style support.""" def __init__(self) -> None: @@ -97,13 +98,11 @@ def _log(type: str, message: str, *args: t.Any, **kwargs: t.Any) -> None: @t.overload -def _dt_as_utc(dt: None) -> None: - ... +def _dt_as_utc(dt: None) -> None: ... @t.overload -def _dt_as_utc(dt: datetime) -> datetime: - ... +def _dt_as_utc(dt: datetime) -> datetime: ... def _dt_as_utc(dt: datetime | None) -> datetime | None: @@ -149,12 +148,10 @@ def lookup(self, instance: t.Any) -> t.MutableMapping[str, t.Any]: @t.overload def __get__( self, instance: None, owner: type - ) -> _DictAccessorProperty[_TAccessorValue]: - ... + ) -> _DictAccessorProperty[_TAccessorValue]: ... @t.overload - def __get__(self, instance: t.Any, owner: type) -> _TAccessorValue: - ... + def __get__(self, instance: t.Any, owner: type) -> _TAccessorValue: ... def __get__( self, instance: t.Any | None, owner: type diff --git a/src/werkzeug/_reloader.py b/src/werkzeug/_reloader.py index c8683593f..24c2dab79 100644 --- a/src/werkzeug/_reloader.py +++ b/src/werkzeug/_reloader.py @@ -141,7 +141,7 @@ def _find_watchdog_paths( def _find_common_roots(paths: t.Iterable[str]) -> t.Iterable[str]: - root: dict[str, dict] = {} + root: dict[str, dict[str, t.Any]] = {} for chunks in sorted((PurePath(x).parts for x in paths), key=len, reverse=True): node = root @@ -153,7 +153,7 @@ def _find_common_roots(paths: t.Iterable[str]) -> t.Iterable[str]: rv = set() - def _walk(node: t.Mapping[str, dict], path: tuple[str, ...]) -> None: + def _walk(node: t.Mapping[str, dict[str, t.Any]], path: tuple[str, ...]) -> None: for prefix, child in node.items(): _walk(child, path + (prefix,)) @@ -310,10 +310,10 @@ def run_step(self) -> None: class WatchdogReloaderLoop(ReloaderLoop): def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: - from watchdog.observers import Observer - from watchdog.events import PatternMatchingEventHandler from watchdog.events import EVENT_TYPE_OPENED from watchdog.events import FileModifiedEvent + from watchdog.events import PatternMatchingEventHandler + from watchdog.observers import Observer super().__init__(*args, **kwargs) trigger_reload = self.trigger_reload @@ -338,7 +338,7 @@ def on_any_event(self, event: FileModifiedEvent): # type: ignore # the source file (or initial pyc file) as well. Ignore Git and # Mercurial internal changes. extra_patterns = [p for p in self.extra_files if not os.path.isdir(p)] - self.event_handler = EventHandler( + self.event_handler = EventHandler( # type: ignore[no-untyped-call] patterns=["*.py", "*.pyc", "*.zip", *extra_patterns], ignore_patterns=[ *[f"*/{d}/*" for d in _ignore_common_dirs], @@ -356,11 +356,11 @@ def trigger_reload(self, filename: str) -> None: def __enter__(self) -> ReloaderLoop: self.watches: dict[str, t.Any] = {} - self.observer.start() + self.observer.start() # type: ignore[no-untyped-call] return super().__enter__() def __exit__(self, exc_type, exc_val, exc_tb): # type: ignore - self.observer.stop() + self.observer.stop() # type: ignore[no-untyped-call] self.observer.join() def run(self) -> None: @@ -376,7 +376,7 @@ def run_step(self) -> None: for path in _find_watchdog_paths(self.extra_files, self.exclude_patterns): if path not in self.watches: try: - self.watches[path] = self.observer.schedule( + self.watches[path] = self.observer.schedule( # type: ignore[no-untyped-call] self.event_handler, path, recursive=True ) except OSError: @@ -391,7 +391,7 @@ def run_step(self) -> None: watch = self.watches.pop(path, None) if watch is not None: - self.observer.unschedule(watch) + self.observer.unschedule(watch) # type: ignore[no-untyped-call] reloader_loops: dict[str, type[ReloaderLoop]] = { diff --git a/src/werkzeug/datastructures/auth.py b/src/werkzeug/datastructures/auth.py index 830529da7..a3ca0de46 100644 --- a/src/werkzeug/datastructures/auth.py +++ b/src/werkzeug/datastructures/auth.py @@ -172,7 +172,7 @@ def __init__( token: str | None = None, ): self._type = auth_type.lower() - self._parameters: dict[str, str | None] = CallbackDict( # type: ignore[misc] + self._parameters: dict[str, str | None] = CallbackDict( values, lambda _: self._trigger_on_update() ) self._token = token @@ -201,9 +201,7 @@ def parameters(self) -> dict[str, str | None]: @parameters.setter def parameters(self, value: dict[str, str]) -> None: - self._parameters = CallbackDict( # type: ignore[misc] - value, lambda _: self._trigger_on_update() - ) + self._parameters = CallbackDict(value, lambda _: self._trigger_on_update()) self._trigger_on_update() @property diff --git a/src/werkzeug/datastructures/cache_control.pyi b/src/werkzeug/datastructures/cache_control.pyi index 06fe667a2..54ec02082 100644 --- a/src/werkzeug/datastructures/cache_control.pyi +++ b/src/werkzeug/datastructures/cache_control.pyi @@ -8,15 +8,19 @@ from .mixins import UpdateDictMixin T = TypeVar("T") _CPT = TypeVar("_CPT", str, int, bool) -_OptCPT = _CPT | None -def cache_control_property(key: str, empty: _OptCPT, type: type[_CPT]) -> property: ... +def cache_control_property( + key: str, empty: _CPT | None, type: type[_CPT] +) -> property: ... -class _CacheControl(UpdateDictMixin[str, _OptCPT], dict[str, _OptCPT]): +class _CacheControl( + UpdateDictMixin[str, str | int | bool | None], dict[str, str | int | bool | None] +): provided: bool def __init__( self, - values: Mapping[str, _OptCPT] | Iterable[tuple[str, _OptCPT]] = (), + values: Mapping[str, str | int | bool | None] + | Iterable[tuple[str, str | int | bool | None]] = (), on_update: Callable[[_CacheControl], None] | None = None, ) -> None: ... @property @@ -48,9 +52,11 @@ class _CacheControl(UpdateDictMixin[str, _OptCPT], dict[str, _OptCPT]): def _del_cache_value(self, key: str) -> None: ... def to_header(self) -> str: ... @staticmethod - def cache_property(key: str, empty: _OptCPT, type: type[_CPT]) -> property: ... + def cache_property(key: str, empty: _CPT | None, type: type[_CPT]) -> property: ... -class RequestCacheControl(ImmutableDictMixin[str, _OptCPT], _CacheControl): +class RequestCacheControl( # type: ignore[misc] + ImmutableDictMixin[str, str | int | bool | None], _CacheControl +): @property def max_stale(self) -> int | None: ... @max_stale.setter diff --git a/src/werkzeug/datastructures/file_storage.pyi b/src/werkzeug/datastructures/file_storage.pyi index 730789e35..36a7ed9f2 100644 --- a/src/werkzeug/datastructures/file_storage.pyi +++ b/src/werkzeug/datastructures/file_storage.pyi @@ -15,7 +15,7 @@ class FileStorage: def __init__( self, stream: IO[bytes] | None = None, - filename: str | PathLike | None = None, + filename: str | PathLike[str] | None = None, name: str | None = None, content_type: str | None = None, content_length: int | None = None, @@ -30,7 +30,9 @@ class FileStorage: def mimetype(self) -> str: ... @property def mimetype_params(self) -> dict[str, str]: ... - def save(self, dst: str | PathLike | IO[bytes], buffer_size: int = ...) -> None: ... + def save( + self, dst: str | PathLike[str] | IO[bytes], buffer_size: int = ... + ) -> None: ... def close(self) -> None: ... def __bool__(self) -> bool: ... def __getattr__(self, name: str) -> Any: ... diff --git a/src/werkzeug/datastructures/structures.pyi b/src/werkzeug/datastructures/structures.pyi index 2e7af35be..7086ddae1 100644 --- a/src/werkzeug/datastructures/structures.pyi +++ b/src/werkzeug/datastructures/structures.pyi @@ -9,22 +9,20 @@ from typing import NoReturn from typing import overload from typing import TypeVar -from .mixins import ( - ImmutableDictMixin, - ImmutableListMixin, - ImmutableMultiDictMixin, - UpdateDictMixin, -) +from .mixins import ImmutableDictMixin +from .mixins import ImmutableListMixin +from .mixins import ImmutableMultiDictMixin +from .mixins import UpdateDictMixin D = TypeVar("D") K = TypeVar("K") T = TypeVar("T") V = TypeVar("V") -_CD = TypeVar("_CD", bound="CallbackDict") +_CD = TypeVar("_CD", bound="CallbackDict[Any, Any]") def is_immutable(self: object) -> NoReturn: ... def iter_multi_items( - mapping: Mapping[K, V | Iterable[V]] | Iterable[tuple[K, V]] + mapping: Mapping[K, V | Iterable[V]] | Iterable[tuple[K, V]], ) -> Iterator[tuple[K, V]]: ... class ImmutableList(ImmutableListMixin[V]): ... @@ -41,7 +39,7 @@ class TypeConversionDict(dict[K, V]): class ImmutableTypeConversionDict(ImmutableDictMixin[K, V], TypeConversionDict[K, V]): def copy(self) -> TypeConversionDict[K, V]: ... - def __copy__(self) -> ImmutableTypeConversionDict: ... + def __copy__(self) -> ImmutableTypeConversionDict[K, V]: ... class MultiDict(TypeConversionDict[K, V]): def __init__( @@ -84,16 +82,16 @@ class MultiDict(TypeConversionDict[K, V]): def __deepcopy__(self, memo: Any) -> MultiDict[K, V]: ... class _omd_bucket(Generic[K, V]): - prev: _omd_bucket | None - next: _omd_bucket | None + prev: _omd_bucket[K, V] | None + next: _omd_bucket[K, V] | None key: K value: V - def __init__(self, omd: OrderedMultiDict, key: K, value: V) -> None: ... - def unlink(self, omd: OrderedMultiDict) -> None: ... + def __init__(self, omd: OrderedMultiDict[K, V], key: K, value: V) -> None: ... + def unlink(self, omd: OrderedMultiDict[K, V]) -> None: ... class OrderedMultiDict(MultiDict[K, V]): - _first_bucket: _omd_bucket | None - _last_bucket: _omd_bucket | None + _first_bucket: _omd_bucket[K, V] | None + _last_bucket: _omd_bucket[K, V] | None def __init__(self, mapping: Mapping[K, V] | None = None) -> None: ... def __eq__(self, other: object) -> bool: ... def __getitem__(self, key: K) -> V: ... diff --git a/src/werkzeug/debug/__init__.py b/src/werkzeug/debug/__init__.py index f8756d890..a55480aa3 100644 --- a/src/werkzeug/debug/__init__.py +++ b/src/werkzeug/debug/__init__.py @@ -82,7 +82,8 @@ def _generate() -> str | bytes | None: try: # subprocess may not be available, e.g. Google App Engine # https://github.com/pallets/werkzeug/issues/925 - from subprocess import Popen, PIPE + from subprocess import PIPE + from subprocess import Popen dump = Popen( ["ioreg", "-c", "IOPlatformExpertDevice", "-d", "2"], stdout=PIPE diff --git a/src/werkzeug/debug/console.py b/src/werkzeug/debug/console.py index 03ddc07f2..4e40475a5 100644 --- a/src/werkzeug/debug/console.py +++ b/src/werkzeug/debug/console.py @@ -13,7 +13,7 @@ from .repr import helper _stream: ContextVar[HTMLStringO] = ContextVar("werkzeug.debug.console.stream") -_ipy: ContextVar = ContextVar("werkzeug.debug.console.ipy") +_ipy: ContextVar[_InteractiveConsole] = ContextVar("werkzeug.debug.console.ipy") class HTMLStringO: diff --git a/src/werkzeug/debug/repr.py b/src/werkzeug/debug/repr.py index 1dcdd67be..2bbd9d546 100644 --- a/src/werkzeug/debug/repr.py +++ b/src/werkzeug/debug/repr.py @@ -4,6 +4,7 @@ Together with the CSS and JavaScript of the debugger this gives a colorful and more compact output. """ + from __future__ import annotations import codecs @@ -95,8 +96,8 @@ def _add_subclass_info(inner: str, obj: object, base: type | tuple[type, ...]) - def _sequence_repr_maker( left: str, right: str, base: type, limit: int = 8 -) -> t.Callable[[DebugReprGenerator, t.Iterable, bool], str]: - def proxy(self: DebugReprGenerator, obj: t.Iterable, recursive: bool) -> str: +) -> t.Callable[[DebugReprGenerator, t.Iterable[t.Any], bool], str]: + def proxy(self: DebugReprGenerator, obj: t.Iterable[t.Any], recursive: bool) -> str: if recursive: return _add_subclass_info(f"{left}...{right}", obj, base) buf = [left] @@ -128,7 +129,7 @@ def __init__(self) -> None: 'collections.deque([', "])", deque ) - def regex_repr(self, obj: t.Pattern) -> str: + def regex_repr(self, obj: t.Pattern[t.AnyStr]) -> str: pattern = repr(obj.pattern) pattern = codecs.decode(pattern, "unicode-escape", "ignore") pattern = f"r{pattern}" @@ -186,7 +187,7 @@ def dict_repr( buf.append("}") return _add_subclass_info("".join(buf), d, dict) - def object_repr(self, obj: type[dict] | t.Callable | type[list] | None) -> str: + def object_repr(self, obj: t.Any) -> str: r = repr(obj) return f'{escape(r)}' diff --git a/src/werkzeug/debug/tbtools.py b/src/werkzeug/debug/tbtools.py index f9be17c42..0574c966b 100644 --- a/src/werkzeug/debug/tbtools.py +++ b/src/werkzeug/debug/tbtools.py @@ -187,7 +187,7 @@ def _process_traceback( if hasattr(fs, "colno"): frame_args["colno"] = fs.colno - frame_args["end_colno"] = fs.end_colno # type: ignore[attr-defined] + frame_args["end_colno"] = fs.end_colno new_stack.append(DebugFrameSummary(**frame_args)) diff --git a/src/werkzeug/exceptions.py b/src/werkzeug/exceptions.py index 253612918..6ce7ef955 100644 --- a/src/werkzeug/exceptions.py +++ b/src/werkzeug/exceptions.py @@ -43,6 +43,7 @@ def application(request): return e """ + from __future__ import annotations import typing as t @@ -56,6 +57,7 @@ def application(request): if t.TYPE_CHECKING: from _typeshed.wsgi import StartResponse from _typeshed.wsgi import WSGIEnvironment + from .datastructures import WWWAuthenticate from .sansio.response import Response from .wrappers.request import Request as WSGIRequest @@ -94,7 +96,7 @@ def name(self) -> str: def get_description( self, environ: WSGIEnvironment | None = None, - scope: dict | None = None, + scope: dict[str, t.Any] | None = None, ) -> str: """Get the description.""" if self.description is None: @@ -108,7 +110,7 @@ def get_description( def get_body( self, environ: WSGIEnvironment | None = None, - scope: dict | None = None, + scope: dict[str, t.Any] | None = None, ) -> str: """Get the HTML body.""" return ( @@ -122,7 +124,7 @@ def get_body( def get_headers( self, environ: WSGIEnvironment | None = None, - scope: dict | None = None, + scope: dict[str, t.Any] | None = None, ) -> list[tuple[str, str]]: """Get a list of headers.""" return [("Content-Type", "text/html; charset=utf-8")] @@ -130,7 +132,7 @@ def get_headers( def get_response( self, environ: WSGIEnvironment | WSGIRequest | None = None, - scope: dict | None = None, + scope: dict[str, t.Any] | None = None, ) -> Response: """Get a response object. If one was passed to the exception it's returned directly. @@ -312,7 +314,7 @@ def __init__( def get_headers( self, environ: WSGIEnvironment | None = None, - scope: dict | None = None, + scope: dict[str, t.Any] | None = None, ) -> list[tuple[str, str]]: headers = super().get_headers(environ, scope) if self.www_authenticate: @@ -376,7 +378,7 @@ def __init__( def get_headers( self, environ: WSGIEnvironment | None = None, - scope: dict | None = None, + scope: dict[str, t.Any] | None = None, ) -> list[tuple[str, str]]: headers = super().get_headers(environ, scope) if self.valid_methods: @@ -536,7 +538,7 @@ def __init__( def get_headers( self, environ: WSGIEnvironment | None = None, - scope: dict | None = None, + scope: dict[str, t.Any] | None = None, ) -> list[tuple[str, str]]: headers = super().get_headers(environ, scope) if self.length is not None: @@ -645,7 +647,7 @@ def __init__( def get_headers( self, environ: WSGIEnvironment | None = None, - scope: dict | None = None, + scope: dict[str, t.Any] | None = None, ) -> list[tuple[str, str]]: headers = super().get_headers(environ, scope) diff --git a/src/werkzeug/formparser.py b/src/werkzeug/formparser.py index 5117a2673..ba84721e3 100644 --- a/src/werkzeug/formparser.py +++ b/src/werkzeug/formparser.py @@ -30,9 +30,12 @@ if t.TYPE_CHECKING: import typing as te + from _typeshed.wsgi import WSGIEnvironment - t_parse_result = t.Tuple[t.IO[bytes], MultiDict, MultiDict] + t_parse_result = t.Tuple[ + t.IO[bytes], MultiDict[str, str], MultiDict[str, FileStorage] + ] class TStreamFactory(te.Protocol): def __call__( @@ -41,8 +44,7 @@ def __call__( content_type: str | None, filename: str | None, content_length: int | None = None, - ) -> t.IO[bytes]: - ... + ) -> t.IO[bytes]: ... F = t.TypeVar("F", bound=t.Callable[..., t.Any]) @@ -69,7 +71,7 @@ def parse_form_data( stream_factory: TStreamFactory | None = None, max_form_memory_size: int | None = None, max_content_length: int | None = None, - cls: type[MultiDict] | None = None, + cls: type[MultiDict[str, t.Any]] | None = None, silent: bool = True, *, max_form_parts: int | None = None, @@ -170,7 +172,7 @@ def __init__( stream_factory: TStreamFactory | None = None, max_form_memory_size: int | None = None, max_content_length: int | None = None, - cls: type[MultiDict] | None = None, + cls: type[MultiDict[str, t.Any]] | None = None, silent: bool = True, *, max_form_parts: int | None = None, @@ -184,7 +186,7 @@ def __init__( self.max_form_parts = max_form_parts if cls is None: - cls = MultiDict + cls = t.cast("type[MultiDict[str, t.Any]]", MultiDict) self.cls = cls self.silent = silent @@ -296,7 +298,7 @@ def __init__( self, stream_factory: TStreamFactory | None = None, max_form_memory_size: int | None = None, - cls: type[MultiDict] | None = None, + cls: type[MultiDict[str, t.Any]] | None = None, buffer_size: int = 64 * 1024, max_form_parts: int | None = None, ) -> None: @@ -309,7 +311,7 @@ def __init__( self.stream_factory = stream_factory if cls is None: - cls = MultiDict + cls = t.cast("type[MultiDict[str, t.Any]]", MultiDict) self.cls = cls self.buffer_size = buffer_size diff --git a/src/werkzeug/http.py b/src/werkzeug/http.py index 8280f51fa..27fa9af90 100644 --- a/src/werkzeug/http.py +++ b/src/werkzeug/http.py @@ -157,19 +157,19 @@ def quote_header_value(value: t.Any, allow_token: bool = True) -> str: .. versionadded:: 0.5 """ - value = str(value) + value_str = str(value) - if not value: + if not value_str: return '""' if allow_token: token_chars = _token_chars - if token_chars.issuperset(value): - return value + if token_chars.issuperset(value_str): + return value_str - value = value.replace("\\", "\\\\").replace('"', '\\"') - return f'"{value}"' + value_str = value_str.replace("\\", "\\\\").replace('"', '\\"') + return f'"{value_str}"' def unquote_header_value(value: str) -> str: @@ -553,13 +553,11 @@ def parse_options_header(value: str | None) -> tuple[str, dict[str, str]]: @t.overload -def parse_accept_header(value: str | None) -> ds.Accept: - ... +def parse_accept_header(value: str | None) -> ds.Accept: ... @t.overload -def parse_accept_header(value: str | None, cls: type[_TAnyAccept]) -> _TAnyAccept: - ... +def parse_accept_header(value: str | None, cls: type[_TAnyAccept]) -> _TAnyAccept: ... def parse_accept_header( @@ -616,26 +614,26 @@ def parse_accept_header( _TAnyCC = t.TypeVar("_TAnyCC", bound="ds.cache_control._CacheControl") -_t_cc_update = t.Optional[t.Callable[[_TAnyCC], None]] @t.overload def parse_cache_control_header( - value: str | None, on_update: _t_cc_update, cls: None = None -) -> ds.RequestCacheControl: - ... + value: str | None, + on_update: t.Callable[[ds.cache_control._CacheControl], None] | None = None, +) -> ds.RequestCacheControl: ... @t.overload def parse_cache_control_header( - value: str | None, on_update: _t_cc_update, cls: type[_TAnyCC] -) -> _TAnyCC: - ... + value: str | None, + on_update: t.Callable[[ds.cache_control._CacheControl], None] | None = None, + cls: type[_TAnyCC] = ..., +) -> _TAnyCC: ... def parse_cache_control_header( value: str | None, - on_update: _t_cc_update = None, + on_update: t.Callable[[ds.cache_control._CacheControl], None] | None = None, cls: type[_TAnyCC] | None = None, ) -> _TAnyCC: """Parse a cache control header. The RFC differs between response and @@ -655,7 +653,7 @@ def parse_cache_control_header( :return: a `cls` object. """ if cls is None: - cls = t.cast(t.Type[_TAnyCC], ds.RequestCacheControl) + cls = t.cast("type[_TAnyCC]", ds.RequestCacheControl) if not value: return cls((), on_update) @@ -664,26 +662,26 @@ def parse_cache_control_header( _TAnyCSP = t.TypeVar("_TAnyCSP", bound="ds.ContentSecurityPolicy") -_t_csp_update = t.Optional[t.Callable[[_TAnyCSP], None]] @t.overload def parse_csp_header( - value: str | None, on_update: _t_csp_update, cls: None = None -) -> ds.ContentSecurityPolicy: - ... + value: str | None, + on_update: t.Callable[[ds.ContentSecurityPolicy], None] | None = None, +) -> ds.ContentSecurityPolicy: ... @t.overload def parse_csp_header( - value: str | None, on_update: _t_csp_update, cls: type[_TAnyCSP] -) -> _TAnyCSP: - ... + value: str | None, + on_update: t.Callable[[ds.ContentSecurityPolicy], None] | None = None, + cls: type[_TAnyCSP] = ..., +) -> _TAnyCSP: ... def parse_csp_header( value: str | None, - on_update: _t_csp_update = None, + on_update: t.Callable[[ds.ContentSecurityPolicy], None] | None = None, cls: type[_TAnyCSP] | None = None, ) -> _TAnyCSP: """Parse a Content Security Policy header. @@ -699,7 +697,7 @@ def parse_csp_header( :return: a `cls` object. """ if cls is None: - cls = t.cast(t.Type[_TAnyCSP], ds.ContentSecurityPolicy) + cls = t.cast("type[_TAnyCSP]", ds.ContentSecurityPolicy) if value is None: return cls((), on_update) @@ -1160,7 +1158,7 @@ def is_hop_by_hop_header(header: str) -> bool: def parse_cookie( header: WSGIEnvironment | str | None, - cls: type[ds.MultiDict] | None = None, + cls: type[ds.MultiDict[str, str]] | None = None, ) -> ds.MultiDict[str, str]: """Parse a cookie from a string or WSGI environ. diff --git a/src/werkzeug/local.py b/src/werkzeug/local.py index 525ac0c80..302589bba 100644 --- a/src/werkzeug/local.py +++ b/src/werkzeug/local.py @@ -20,7 +20,7 @@ F = t.TypeVar("F", bound=t.Callable[..., t.Any]) -def release_local(local: Local | LocalStack) -> None: +def release_local(local: Local | LocalStack[t.Any]) -> None: """Release the data for the current context in a :class:`Local` or :class:`LocalStack` without using a :class:`LocalManager`. @@ -64,7 +64,9 @@ def __init__(self, context_var: ContextVar[dict[str, t.Any]] | None = None) -> N def __iter__(self) -> t.Iterator[tuple[str, t.Any]]: return iter(self.__storage.get({}).items()) - def __call__(self, name: str, *, unbound_message: str | None = None) -> LocalProxy: + def __call__( + self, name: str, *, unbound_message: str | None = None + ) -> LocalProxy[t.Any]: """Create a :class:`LocalProxy` that access an attribute on this local namespace. @@ -169,7 +171,7 @@ def top(self) -> T | None: def __call__( self, name: str | None = None, *, unbound_message: str | None = None - ) -> LocalProxy: + ) -> LocalProxy[t.Any]: """Create a :class:`LocalProxy` that accesses the top of this local stack. @@ -205,7 +207,8 @@ class LocalManager: def __init__( self, - locals: None | (Local | LocalStack | t.Iterable[Local | LocalStack]) = None, + locals: None + | (Local | LocalStack[t.Any] | t.Iterable[Local | LocalStack[t.Any]]) = None, ) -> None: if locals is None: self.locals = [] @@ -269,23 +272,27 @@ class _ProxyLookup: def __init__( self, - f: t.Callable | None = None, - fallback: t.Callable | None = None, + f: t.Callable[..., t.Any] | None = None, + fallback: t.Callable[[LocalProxy[t.Any]], t.Any] | None = None, class_value: t.Any | None = None, is_attr: bool = False, ) -> None: - bind_f: t.Callable[[LocalProxy, t.Any], t.Callable] | None + bind_f: t.Callable[[LocalProxy[t.Any], t.Any], t.Callable[..., t.Any]] | None if hasattr(f, "__get__"): # A Python function, can be turned into a bound method. - def bind_f(instance: LocalProxy, obj: t.Any) -> t.Callable: + def bind_f( + instance: LocalProxy[t.Any], obj: t.Any + ) -> t.Callable[..., t.Any]: return f.__get__(obj, type(obj)) # type: ignore elif f is not None: # A C function, use partial to bind the first argument. - def bind_f(instance: LocalProxy, obj: t.Any) -> t.Callable: + def bind_f( + instance: LocalProxy[t.Any], obj: t.Any + ) -> t.Callable[..., t.Any]: return partial(f, obj) else: @@ -297,10 +304,10 @@ def bind_f(instance: LocalProxy, obj: t.Any) -> t.Callable: self.class_value = class_value self.is_attr = is_attr - def __set_name__(self, owner: LocalProxy, name: str) -> None: + def __set_name__(self, owner: LocalProxy[t.Any], name: str) -> None: self.name = name - def __get__(self, instance: LocalProxy, owner: type | None = None) -> t.Any: + def __get__(self, instance: LocalProxy[t.Any], owner: type | None = None) -> t.Any: if instance is None: if self.class_value is not None: return self.class_value @@ -330,7 +337,9 @@ def __get__(self, instance: LocalProxy, owner: type | None = None) -> t.Any: def __repr__(self) -> str: return f"proxy {self.name}" - def __call__(self, instance: LocalProxy, *args: t.Any, **kwargs: t.Any) -> t.Any: + def __call__( + self, instance: LocalProxy[t.Any], *args: t.Any, **kwargs: t.Any + ) -> t.Any: """Support calling unbound methods from the class. For example, this happens with ``copy.copy``, which does ``type(x).__copy__(x)``. ``type(x)`` can't be proxied, so it @@ -347,12 +356,14 @@ class _ProxyIOp(_ProxyLookup): __slots__ = () def __init__( - self, f: t.Callable | None = None, fallback: t.Callable | None = None + self, + f: t.Callable[..., t.Any] | None = None, + fallback: t.Callable[[LocalProxy[t.Any]], t.Any] | None = None, ) -> None: super().__init__(f, fallback) - def bind_f(instance: LocalProxy, obj: t.Any) -> t.Callable: - def i_op(self: t.Any, other: t.Any) -> LocalProxy: + def bind_f(instance: LocalProxy[t.Any], obj: t.Any) -> t.Callable[..., t.Any]: + def i_op(self: t.Any, other: t.Any) -> LocalProxy[t.Any]: f(self, other) # type: ignore return instance @@ -520,32 +531,33 @@ def _get_current_object() -> T: object.__setattr__(self, "_LocalProxy__wrapped", local) object.__setattr__(self, "_get_current_object", _get_current_object) - __doc__ = _ProxyLookup( # type: ignore + __doc__ = _ProxyLookup( # type: ignore[assignment] class_value=__doc__, fallback=lambda self: type(self).__doc__, is_attr=True ) __wrapped__ = _ProxyLookup( - fallback=lambda self: self._LocalProxy__wrapped, is_attr=True + fallback=lambda self: self._LocalProxy__wrapped, # type: ignore[attr-defined] + is_attr=True, ) # __del__ should only delete the proxy - __repr__ = _ProxyLookup( # type: ignore + __repr__ = _ProxyLookup( # type: ignore[assignment] repr, fallback=lambda self: f"<{type(self).__name__} unbound>" ) - __str__ = _ProxyLookup(str) # type: ignore + __str__ = _ProxyLookup(str) # type: ignore[assignment] __bytes__ = _ProxyLookup(bytes) - __format__ = _ProxyLookup() # type: ignore + __format__ = _ProxyLookup() # type: ignore[assignment] __lt__ = _ProxyLookup(operator.lt) __le__ = _ProxyLookup(operator.le) - __eq__ = _ProxyLookup(operator.eq) # type: ignore - __ne__ = _ProxyLookup(operator.ne) # type: ignore + __eq__ = _ProxyLookup(operator.eq) # type: ignore[assignment] + __ne__ = _ProxyLookup(operator.ne) # type: ignore[assignment] __gt__ = _ProxyLookup(operator.gt) __ge__ = _ProxyLookup(operator.ge) - __hash__ = _ProxyLookup(hash) # type: ignore + __hash__ = _ProxyLookup(hash) # type: ignore[assignment] __bool__ = _ProxyLookup(bool, fallback=lambda self: False) __getattr__ = _ProxyLookup(getattr) # __getattribute__ triggered through __getattr__ - __setattr__ = _ProxyLookup(setattr) # type: ignore - __delattr__ = _ProxyLookup(delattr) # type: ignore - __dir__ = _ProxyLookup(dir, fallback=lambda self: []) # type: ignore + __setattr__ = _ProxyLookup(setattr) # type: ignore[assignment] + __delattr__ = _ProxyLookup(delattr) # type: ignore[assignment] + __dir__ = _ProxyLookup(dir, fallback=lambda self: []) # type: ignore[assignment] # __get__ (proxying descriptor not supported) # __set__ (descriptor) # __delete__ (descriptor) diff --git a/src/werkzeug/middleware/dispatcher.py b/src/werkzeug/middleware/dispatcher.py index 559fea585..e11bacc52 100644 --- a/src/werkzeug/middleware/dispatcher.py +++ b/src/werkzeug/middleware/dispatcher.py @@ -30,6 +30,7 @@ :copyright: 2007 Pallets :license: BSD-3-Clause """ + from __future__ import annotations import typing as t diff --git a/src/werkzeug/middleware/http_proxy.py b/src/werkzeug/middleware/http_proxy.py index 59ba9b324..5e239156a 100644 --- a/src/werkzeug/middleware/http_proxy.py +++ b/src/werkzeug/middleware/http_proxy.py @@ -7,6 +7,7 @@ :copyright: 2007 Pallets :license: BSD-3-Clause """ + from __future__ import annotations import typing as t diff --git a/src/werkzeug/middleware/lint.py b/src/werkzeug/middleware/lint.py index 8c858673b..de93b526a 100644 --- a/src/werkzeug/middleware/lint.py +++ b/src/werkzeug/middleware/lint.py @@ -12,6 +12,7 @@ :copyright: 2007 Pallets :license: BSD-3-Clause """ + from __future__ import annotations import typing as t @@ -340,10 +341,10 @@ def check_start_response( if exc_info is not None and not isinstance(exc_info, tuple): warn("Invalid value for exc_info.", WSGIWarning, stacklevel=3) - headers = Headers(headers) - self.check_headers(headers) + headers_obj = Headers(headers) + self.check_headers(headers_obj) - return status_code, headers + return status_code, headers_obj def check_headers(self, headers: Headers) -> None: etag = headers.get("etag") @@ -424,8 +425,8 @@ def checking_start_response( status: str = args[0] headers: list[tuple[str, str]] = args[1] - exc_info: None | ( - tuple[type[BaseException], BaseException, TracebackType] + exc_info: ( + None | (tuple[type[BaseException], BaseException, TracebackType]) ) = args[2] if len(args) == 3 else None headers_set[:] = self.check_start_response(status, headers, exc_info) diff --git a/src/werkzeug/middleware/profiler.py b/src/werkzeug/middleware/profiler.py index 1120c83ef..112b87776 100644 --- a/src/werkzeug/middleware/profiler.py +++ b/src/werkzeug/middleware/profiler.py @@ -11,6 +11,7 @@ :copyright: 2007 Pallets :license: BSD-3-Clause """ + from __future__ import annotations import os.path diff --git a/src/werkzeug/middleware/proxy_fix.py b/src/werkzeug/middleware/proxy_fix.py index 8dfbb36c0..cbf4e0bae 100644 --- a/src/werkzeug/middleware/proxy_fix.py +++ b/src/werkzeug/middleware/proxy_fix.py @@ -21,6 +21,7 @@ :copyright: 2007 Pallets :license: BSD-3-Clause """ + from __future__ import annotations import typing as t diff --git a/src/werkzeug/middleware/shared_data.py b/src/werkzeug/middleware/shared_data.py index e3ec7cab8..0a0c95675 100644 --- a/src/werkzeug/middleware/shared_data.py +++ b/src/werkzeug/middleware/shared_data.py @@ -8,6 +8,7 @@ :copyright: 2007 Pallets :license: BSD-3-Clause """ + from __future__ import annotations import importlib.util @@ -38,7 +39,6 @@ class SharedDataMiddleware: - """A WSGI middleware which provides static content for development environments or simple server setups. Its usage is quite simple:: @@ -218,9 +218,9 @@ def loader( return loader def generate_etag(self, mtime: datetime, file_size: int, real_filename: str) -> str: - real_filename = os.fsencode(real_filename) + fn_str = os.fsencode(real_filename) timestamp = mtime.timestamp() - checksum = adler32(real_filename) & 0xFFFFFFFF + checksum = adler32(fn_str) & 0xFFFFFFFF return f"wzsdm-{timestamp}-{file_size}-{checksum}" def __call__( diff --git a/src/werkzeug/routing/__init__.py b/src/werkzeug/routing/__init__.py index 84b043fdf..62adc48fb 100644 --- a/src/werkzeug/routing/__init__.py +++ b/src/werkzeug/routing/__init__.py @@ -105,6 +105,7 @@ routing tried to match a ``POST`` request) a ``MethodNotAllowed`` exception is raised. """ + from .converters import AnyConverter as AnyConverter from .converters import BaseConverter as BaseConverter from .converters import FloatConverter as FloatConverter diff --git a/src/werkzeug/routing/converters.py b/src/werkzeug/routing/converters.py index ce01dd1ea..6016a975e 100644 --- a/src/werkzeug/routing/converters.py +++ b/src/werkzeug/routing/converters.py @@ -131,7 +131,7 @@ class NumberConverter(BaseConverter): """ weight = 50 - num_convert: t.Callable = int + num_convert: t.Callable[[t.Any], t.Any] = int def __init__( self, @@ -152,18 +152,18 @@ def __init__( def to_python(self, value: str) -> t.Any: if self.fixed_digits and len(value) != self.fixed_digits: raise ValidationError() - value = self.num_convert(value) - if (self.min is not None and value < self.min) or ( - self.max is not None and value > self.max + value_num = self.num_convert(value) + if (self.min is not None and value_num < self.min) or ( + self.max is not None and value_num > self.max ): raise ValidationError() - return value + return value_num def to_url(self, value: t.Any) -> str: - value = str(self.num_convert(value)) + value_str = str(self.num_convert(value)) if self.fixed_digits: - value = value.zfill(self.fixed_digits) - return value + value_str = value_str.zfill(self.fixed_digits) + return value_str @property def signed_regex(self) -> str: diff --git a/src/werkzeug/routing/exceptions.py b/src/werkzeug/routing/exceptions.py index 9d0a5281b..b63fe5b9c 100644 --- a/src/werkzeug/routing/exceptions.py +++ b/src/werkzeug/routing/exceptions.py @@ -10,10 +10,11 @@ if t.TYPE_CHECKING: from _typeshed.wsgi import WSGIEnvironment - from .map import MapAdapter - from .rules import Rule + from ..wrappers.request import Request from ..wrappers.response import Response + from .map import MapAdapter + from .rules import Rule class RoutingException(Exception): @@ -40,7 +41,7 @@ def __init__(self, new_url: str) -> None: def get_response( self, environ: WSGIEnvironment | Request | None = None, - scope: dict | None = None, + scope: dict[str, t.Any] | None = None, ) -> Response: return redirect(self.new_url, self.code) diff --git a/src/werkzeug/routing/map.py b/src/werkzeug/routing/map.py index 87b83a54c..73671bf94 100644 --- a/src/werkzeug/routing/map.py +++ b/src/werkzeug/routing/map.py @@ -32,9 +32,10 @@ if t.TYPE_CHECKING: from _typeshed.wsgi import WSGIApplication from _typeshed.wsgi import WSGIEnvironment + + from ..wrappers.request import Request from .converters import BaseConverter from .rules import RuleFactory - from ..wrappers.request import Request class Map: @@ -144,9 +145,9 @@ def is_endpoint_expecting(self, endpoint: str, *arguments: str) -> bool: checked. """ self.update() - arguments = set(arguments) + arguments_set = set(arguments) for rule in self._rules_by_endpoint[endpoint]: - if arguments.issubset(rule.arguments): + if arguments_set.issubset(rule.arguments): return True return False @@ -379,7 +380,6 @@ def __repr__(self) -> str: class MapAdapter: - """Returned by :meth:`Map.bind` or :meth:`Map.bind_to_environ` and does the URL matching and building based on runtime information. """ @@ -477,8 +477,7 @@ def match( # type: ignore return_rule: t.Literal[False] = False, query_args: t.Mapping[str, t.Any] | str | None = None, websocket: bool | None = None, - ) -> tuple[str, t.Mapping[str, t.Any]]: - ... + ) -> tuple[str, t.Mapping[str, t.Any]]: ... @t.overload def match( @@ -488,8 +487,7 @@ def match( return_rule: t.Literal[True] = True, query_args: t.Mapping[str, t.Any] | str | None = None, websocket: bool | None = None, - ) -> tuple[Rule, t.Mapping[str, t.Any]]: - ... + ) -> tuple[Rule, t.Mapping[str, t.Any]]: ... def match( self, diff --git a/src/werkzeug/routing/rules.py b/src/werkzeug/routing/rules.py index 7029d8bc0..75323357c 100644 --- a/src/werkzeug/routing/rules.py +++ b/src/werkzeug/routing/rules.py @@ -912,6 +912,6 @@ def __repr__(self) -> str: parts.append(f"<{data}>") else: parts.append(data) - parts = "".join(parts).lstrip("|") + parts_str = "".join(parts).lstrip("|") methods = f" ({', '.join(self.methods)})" if self.methods is not None else "" - return f"<{type(self).__name__} {parts!r}{methods} -> {self.endpoint}>" + return f"<{type(self).__name__} {parts_str!r}{methods} -> {self.endpoint}>" diff --git a/src/werkzeug/sansio/http.py b/src/werkzeug/sansio/http.py index e3cd3330c..b2b887799 100644 --- a/src/werkzeug/sansio/http.py +++ b/src/werkzeug/sansio/http.py @@ -122,7 +122,7 @@ def _cookie_unslash_replace(m: t.Match[bytes]) -> bytes: def parse_cookie( cookie: str | None = None, - cls: type[ds.MultiDict] | None = None, + cls: type[ds.MultiDict[str, str]] | None = None, ) -> ds.MultiDict[str, str]: """Parse a cookie from a string. @@ -141,7 +141,7 @@ def parse_cookie( .. versionadded:: 2.2 """ if cls is None: - cls = ds.MultiDict + cls = t.cast("type[ds.MultiDict[str, str]]", ds.MultiDict) if not cookie: return cls() diff --git a/src/werkzeug/sansio/request.py b/src/werkzeug/sansio/request.py index 41c9b18a1..dd0805d71 100644 --- a/src/werkzeug/sansio/request.py +++ b/src/werkzeug/sansio/request.py @@ -1,5 +1,6 @@ from __future__ import annotations +import typing as t from datetime import datetime from urllib.parse import parse_qsl @@ -73,7 +74,7 @@ class Request: #: possible to use mutable structures, but this is not recommended. #: #: .. versionadded:: 0.6 - parameter_storage_class: type[MultiDict] = ImmutableMultiDict + parameter_storage_class: type[MultiDict[str, t.Any]] = ImmutableMultiDict #: The type to be used for dict values from the incoming WSGI #: environment. (For example for :attr:`cookies`.) By default an @@ -83,14 +84,14 @@ class Request: #: Changed to ``ImmutableMultiDict`` to support multiple values. #: #: .. versionadded:: 0.6 - dict_storage_class: type[MultiDict] = ImmutableMultiDict + dict_storage_class: type[MultiDict[str, t.Any]] = ImmutableMultiDict #: the type to be used for list values from the incoming WSGI environment. #: By default an :class:`~werkzeug.datastructures.ImmutableList` is used #: (for example for :attr:`access_list`). #: #: .. versionadded:: 0.6 - list_storage_class: type[list] = ImmutableList + list_storage_class: type[list[t.Any]] = ImmutableList user_agent_class: type[UserAgent] = UserAgent """The class used and returned by the :attr:`user_agent` property to diff --git a/src/werkzeug/sansio/response.py b/src/werkzeug/sansio/response.py index 271974ecf..9093b0a8c 100644 --- a/src/werkzeug/sansio/response.py +++ b/src/werkzeug/sansio/response.py @@ -6,32 +6,35 @@ from datetime import timezone from http import HTTPStatus +from ..datastructures import CallbackDict +from ..datastructures import ContentRange +from ..datastructures import ContentSecurityPolicy from ..datastructures import Headers from ..datastructures import HeaderSet +from ..datastructures import ResponseCacheControl +from ..datastructures import WWWAuthenticate +from ..http import COEP +from ..http import COOP +from ..http import dump_age from ..http import dump_cookie +from ..http import dump_header +from ..http import dump_options_header +from ..http import http_date from ..http import HTTP_STATUS_CODES +from ..http import parse_age +from ..http import parse_cache_control_header +from ..http import parse_content_range_header +from ..http import parse_csp_header +from ..http import parse_date +from ..http import parse_options_header +from ..http import parse_set_header +from ..http import quote_etag +from ..http import unquote_etag from ..utils import get_content_type -from werkzeug.datastructures import CallbackDict -from werkzeug.datastructures import ContentRange -from werkzeug.datastructures import ContentSecurityPolicy -from werkzeug.datastructures import ResponseCacheControl -from werkzeug.datastructures import WWWAuthenticate -from werkzeug.http import COEP -from werkzeug.http import COOP -from werkzeug.http import dump_age -from werkzeug.http import dump_header -from werkzeug.http import dump_options_header -from werkzeug.http import http_date -from werkzeug.http import parse_age -from werkzeug.http import parse_cache_control_header -from werkzeug.http import parse_content_range_header -from werkzeug.http import parse_csp_header -from werkzeug.http import parse_date -from werkzeug.http import parse_options_header -from werkzeug.http import parse_set_header -from werkzeug.http import quote_etag -from werkzeug.http import unquote_etag -from werkzeug.utils import header_property +from ..utils import header_property + +if t.TYPE_CHECKING: + from ..datastructures.cache_control import _CacheControl def _set_property(name: str, doc: str | None = None) -> property: @@ -305,7 +308,7 @@ def mimetype_params(self) -> dict[str, str]: .. versionadded:: 0.5 """ - def on_update(d: CallbackDict) -> None: + def on_update(d: CallbackDict[str, str]) -> None: self.headers["Content-Type"] = dump_options_header(self.mimetype, d) d = parse_options_header(self.headers.get("content-type", ""))[1] @@ -480,7 +483,7 @@ def cache_control(self) -> ResponseCacheControl: request/response chain. """ - def on_update(cache_control: ResponseCacheControl) -> None: + def on_update(cache_control: _CacheControl) -> None: if not cache_control and "cache-control" in self.headers: del self.headers["cache-control"] elif cache_control: diff --git a/src/werkzeug/security.py b/src/werkzeug/security.py index 587a3cc74..9999509d1 100644 --- a/src/werkzeug/security.py +++ b/src/werkzeug/security.py @@ -24,8 +24,8 @@ def gen_salt(length: int) -> str: def _hash_internal(method: str, salt: str, password: str) -> tuple[str, str]: method, *args = method.split(":") - salt = salt.encode() - password = password.encode() + salt_bytes = salt.encode() + password_bytes = password.encode() if method == "scrypt": if not args: @@ -40,7 +40,9 @@ def _hash_internal(method: str, salt: str, password: str) -> tuple[str, str]: maxmem = 132 * n * r * p # ideally 128, but some extra seems needed return ( - hashlib.scrypt(password, salt=salt, n=n, r=r, p=p, maxmem=maxmem).hex(), + hashlib.scrypt( + password_bytes, salt=salt_bytes, n=n, r=r, p=p, maxmem=maxmem + ).hex(), f"scrypt:{n}:{r}:{p}", ) elif method == "pbkdf2": @@ -59,7 +61,9 @@ def _hash_internal(method: str, salt: str, password: str) -> tuple[str, str]: raise ValueError("'pbkdf2' takes 2 arguments.") return ( - hashlib.pbkdf2_hmac(hash_name, password, salt, iterations).hex(), + hashlib.pbkdf2_hmac( + hash_name, password_bytes, salt_bytes, iterations + ).hex(), f"pbkdf2:{hash_name}:{iterations}", ) else: diff --git a/src/werkzeug/serving.py b/src/werkzeug/serving.py index ff5eb8c66..ad6bf911b 100644 --- a/src/werkzeug/serving.py +++ b/src/werkzeug/serving.py @@ -11,6 +11,7 @@ from myapp import create_app from werkzeug import run_simple """ + from __future__ import annotations import errno @@ -496,10 +497,10 @@ def generate_adhoc_ssl_pair( ) -> tuple[Certificate, RSAPrivateKeyWithSerialization]: try: from cryptography import x509 - from cryptography.x509.oid import NameOID from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import hashes from cryptography.hazmat.primitives.asymmetric import rsa + from cryptography.x509.oid import NameOID except ImportError: raise TypeError( "Using ad-hoc certificates requires the cryptography library." @@ -583,8 +584,8 @@ def make_ssl_devcert( def generate_adhoc_ssl_context() -> ssl.SSLContext: """Generates an adhoc SSL context for the development server.""" - import tempfile import atexit + import tempfile cert, pkey = generate_adhoc_ssl_pair() diff --git a/src/werkzeug/test.py b/src/werkzeug/test.py index 694e5d8e5..38f69bfb9 100644 --- a/src/werkzeug/test.py +++ b/src/werkzeug/test.py @@ -46,9 +46,9 @@ from .wsgi import get_current_url if t.TYPE_CHECKING: + import typing_extensions as te from _typeshed.wsgi import WSGIApplication from _typeshed.wsgi import WSGIEnvironment - import typing_extensions as te def stream_encode_multipart( @@ -172,7 +172,7 @@ def _iter_data(data: t.Mapping[str, t.Any]) -> t.Iterator[tuple[str, t.Any]]: yield key, value -_TAnyMultiDict = t.TypeVar("_TAnyMultiDict", bound=MultiDict) +_TAnyMultiDict = t.TypeVar("_TAnyMultiDict", bound="MultiDict[t.Any, t.Any]") class EnvironBuilder: @@ -289,10 +289,10 @@ class EnvironBuilder: json_dumps = staticmethod(json.dumps) del json - _args: MultiDict | None + _args: MultiDict[str, str] | None _query_string: str | None _input_stream: t.IO[bytes] | None - _form: MultiDict | None + _form: MultiDict[str, str] | None _files: FileMultiDict | None def __init__( @@ -506,7 +506,7 @@ def mimetype_params(self) -> t.Mapping[str, str]: .. versionadded:: 0.14 """ - def on_update(d: CallbackDict) -> None: + def on_update(d: CallbackDict[str, str]) -> None: self.headers["Content-Type"] = dump_options_header(self.mimetype, d) d = parse_options_header(self.headers.get("content-type", ""))[1] @@ -545,7 +545,7 @@ def _get_form(self, name: str, storage: type[_TAnyMultiDict]) -> _TAnyMultiDict: return rv # type: ignore - def _set_form(self, name: str, value: MultiDict) -> None: + def _set_form(self, name: str, value: MultiDict[str, t.Any]) -> None: """Common behavior for setting the :attr:`form` and :attr:`files` properties. @@ -556,12 +556,12 @@ def _set_form(self, name: str, value: MultiDict) -> None: setattr(self, name, value) @property - def form(self) -> MultiDict: + def form(self) -> MultiDict[str, str]: """A :class:`MultiDict` of form values.""" return self._get_form("_form", MultiDict) @form.setter - def form(self, value: MultiDict) -> None: + def form(self, value: MultiDict[str, str]) -> None: self._set_form("_form", value) @property @@ -607,7 +607,7 @@ def query_string(self, value: str | None) -> None: self._args = None @property - def args(self) -> MultiDict: + def args(self) -> MultiDict[str, str]: """The URL arguments as :class:`MultiDict`.""" if self._query_string is not None: raise AttributeError("a query string is defined") @@ -616,7 +616,7 @@ def args(self) -> MultiDict: return self._args @args.setter - def args(self, value: MultiDict | None) -> None: + def args(self, value: MultiDict[str, str] | None) -> None: self._query_string = None self._args = value @@ -1113,8 +1113,8 @@ def open( finally: builder.close() - response = self.run_wsgi_app(request.environ, buffered=buffered) - response = self.response_wrapper(*response, request=request) + response_parts = self.run_wsgi_app(request.environ, buffered=buffered) + response = self.response_wrapper(*response_parts, request=request) redirects = set() history: list[TestResponse] = [] diff --git a/src/werkzeug/testapp.py b/src/werkzeug/testapp.py index 57f1f6fdf..cdf7fac1a 100644 --- a/src/werkzeug/testapp.py +++ b/src/werkzeug/testapp.py @@ -1,8 +1,10 @@ """A small application that can be used to test a WSGI server and check it for WSGI compliance. """ + from __future__ import annotations +import importlib.metadata import os import sys import typing as t @@ -10,7 +12,6 @@ from markupsafe import escape -from . import __version__ as _werkzeug_version from .wrappers.request import Request from .wrappers.response import Response @@ -153,13 +154,13 @@ def test_app(req: Request) -> Response: sys_path = [] for item, virtual, expanded in iter_sys_path(): - class_ = [] + css = [] if virtual: - class_.append("virtual") + css.append("virtual") if expanded: - class_.append("exp") - class_ = f' class="{" ".join(class_)}"' if class_ else "" - sys_path.append(f"{escape(item)}") + css.append("exp") + class_str = f' class="{" ".join(css)}"' if css else "" + sys_path.append(f"{escape(item)}") context = { "python_version": "
".join(escape(sys.version).splitlines()), @@ -167,7 +168,7 @@ def test_app(req: Request) -> Response: "os": escape(os.name), "api_version": sys.api_version, "byteorder": sys.byteorder, - "werkzeug_version": _werkzeug_version, + "werkzeug_version": _get_werkzeug_version(), "python_eggs": "\n".join(python_eggs), "wsgi_env": "\n".join(wsgi_env), "sys_path": "\n".join(sys_path), @@ -175,6 +176,18 @@ def test_app(req: Request) -> Response: return Response(TEMPLATE % context, mimetype="text/html") +_werkzeug_version = "" + + +def _get_werkzeug_version() -> str: + global _werkzeug_version + + if not _werkzeug_version: + _werkzeug_version = importlib.metadata.version("werkzeug") + + return _werkzeug_version + + if __name__ == "__main__": from .serving import run_simple diff --git a/src/werkzeug/utils.py b/src/werkzeug/utils.py index 32ca9dad6..59b97b732 100644 --- a/src/werkzeug/utils.py +++ b/src/werkzeug/utils.py @@ -26,6 +26,7 @@ if t.TYPE_CHECKING: from _typeshed.wsgi import WSGIEnvironment + from .wrappers.request import Request from .wrappers.response import Response @@ -316,7 +317,7 @@ def append_slash_redirect(environ: WSGIEnvironment, code: int = 308) -> Response def send_file( - path_or_file: os.PathLike | str | t.IO[bytes], + path_or_file: os.PathLike[str] | str | t.IO[bytes], environ: WSGIEnvironment, mimetype: str | None = None, as_attachment: bool = False, @@ -327,7 +328,7 @@ def send_file( max_age: None | (int | t.Callable[[str | None], int | None]) = None, use_x_sendfile: bool = False, response_class: type[Response] | None = None, - _root_path: os.PathLike | str | None = None, + _root_path: os.PathLike[str] | str | None = None, ) -> Response: """Send the contents of a file to the client. @@ -415,7 +416,7 @@ def send_file( if isinstance(path_or_file, (os.PathLike, str)) or hasattr( path_or_file, "__fspath__" ): - path_or_file = t.cast(t.Union[os.PathLike, str], path_or_file) + path_or_file = t.cast("t.Union[os.PathLike[str], str]", path_or_file) # Flask will pass app.root_path, allowing its send_file wrapper # to not have to deal with paths. @@ -535,8 +536,8 @@ def send_file( def send_from_directory( - directory: os.PathLike | str, - path: os.PathLike | str, + directory: os.PathLike[str] | str, + path: os.PathLike[str] | str, environ: WSGIEnvironment, **kwargs: t.Any, ) -> Response: @@ -560,20 +561,20 @@ def send_from_directory( .. versionadded:: 2.0 Adapted from Flask's implementation. """ - path = safe_join(os.fspath(directory), os.fspath(path)) + path_str = safe_join(os.fspath(directory), os.fspath(path)) - if path is None: + if path_str is None: raise NotFound() # Flask will pass app.root_path, allowing its send_from_directory # wrapper to not have to deal with paths. if "_root_path" in kwargs: - path = os.path.join(kwargs["_root_path"], path) + path_str = os.path.join(kwargs["_root_path"], path_str) - if not os.path.isfile(path): + if not os.path.isfile(path_str): raise NotFound() - return send_file(path, environ, **kwargs) + return send_file(path_str, environ, **kwargs) def import_string(import_name: str, silent: bool = False) -> t.Any: diff --git a/src/werkzeug/wrappers/request.py b/src/werkzeug/wrappers/request.py index 25b091691..38053c210 100644 --- a/src/werkzeug/wrappers/request.py +++ b/src/werkzeug/wrappers/request.py @@ -1,5 +1,6 @@ from __future__ import annotations +import collections.abc as cabc import functools import json import typing as t @@ -182,13 +183,13 @@ def my_wsgi_app(request): from ..exceptions import HTTPException @functools.wraps(f) - def application(*args): # type: ignore + def application(*args: t.Any) -> cabc.Iterable[bytes]: request = cls(args[-2]) with request: try: resp = f(*args[:-2] + (request,)) except HTTPException as e: - resp = e.get_response(args[-2]) + resp = t.cast("WSGIApplication", e.get_response(args[-2])) return resp(*args[-2:]) return t.cast("WSGIApplication", application) @@ -374,8 +375,7 @@ def get_data( # type: ignore cache: bool = True, as_text: t.Literal[False] = False, parse_form_data: bool = False, - ) -> bytes: - ... + ) -> bytes: ... @t.overload def get_data( @@ -383,8 +383,7 @@ def get_data( cache: bool = True, as_text: t.Literal[True] = ..., parse_form_data: bool = False, - ) -> str: - ... + ) -> str: ... def get_data( self, cache: bool = True, as_text: bool = False, parse_form_data: bool = False @@ -564,14 +563,12 @@ def json(self) -> t.Any | None: @t.overload def get_json( self, force: bool = ..., silent: t.Literal[False] = ..., cache: bool = ... - ) -> t.Any: - ... + ) -> t.Any: ... @t.overload def get_json( self, force: bool = ..., silent: bool = ..., cache: bool = ... - ) -> t.Any | None: - ... + ) -> t.Any | None: ... def get_json( self, force: bool = False, silent: bool = False, cache: bool = True diff --git a/src/werkzeug/wrappers/response.py b/src/werkzeug/wrappers/response.py index ee5c69478..7b666e3e8 100644 --- a/src/werkzeug/wrappers/response.py +++ b/src/werkzeug/wrappers/response.py @@ -5,26 +5,27 @@ from http import HTTPStatus from urllib.parse import urljoin +from .._internal import _get_environ from ..datastructures import Headers +from ..http import generate_etag +from ..http import http_date +from ..http import is_resource_modified +from ..http import parse_etags +from ..http import parse_range_header from ..http import remove_entity_headers from ..sansio.response import Response as _SansIOResponse from ..urls import _invalid_iri_to_uri from ..urls import iri_to_uri from ..utils import cached_property +from ..wsgi import _RangeWrapper from ..wsgi import ClosingIterator from ..wsgi import get_current_url -from werkzeug._internal import _get_environ -from werkzeug.http import generate_etag -from werkzeug.http import http_date -from werkzeug.http import is_resource_modified -from werkzeug.http import parse_etags -from werkzeug.http import parse_range_header -from werkzeug.wsgi import _RangeWrapper if t.TYPE_CHECKING: from _typeshed.wsgi import StartResponse from _typeshed.wsgi import WSGIApplication from _typeshed.wsgi import WSGIEnvironment + from .request import Request @@ -260,12 +261,10 @@ def from_app( return cls(*run_wsgi_app(app, environ, buffered)) @t.overload - def get_data(self, as_text: t.Literal[False] = False) -> bytes: - ... + def get_data(self, as_text: t.Literal[False] = False) -> bytes: ... @t.overload - def get_data(self, as_text: t.Literal[True]) -> str: - ... + def get_data(self, as_text: t.Literal[True]) -> str: ... def get_data(self, as_text: bool = False) -> bytes | str: """The string representation of the response body. Whenever you call @@ -595,12 +594,10 @@ def json(self) -> t.Any | None: return self.get_json() @t.overload - def get_json(self, force: bool = ..., silent: t.Literal[False] = ...) -> t.Any: - ... + def get_json(self, force: bool = ..., silent: t.Literal[False] = ...) -> t.Any: ... @t.overload - def get_json(self, force: bool = ..., silent: bool = ...) -> t.Any | None: - ... + def get_json(self, force: bool = ..., silent: bool = ...) -> t.Any | None: ... def get_json(self, force: bool = False, silent: bool = False) -> t.Any | None: """Parse :attr:`data` as JSON. Useful during testing. diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py index 91ad1a7ce..ad20b3f8b 100644 --- a/tests/test_exceptions.py +++ b/tests/test_exceptions.py @@ -7,7 +7,8 @@ from werkzeug import exceptions from werkzeug.datastructures import Headers from werkzeug.datastructures import WWWAuthenticate -from werkzeug.exceptions import default_exceptions, HTTPException +from werkzeug.exceptions import default_exceptions +from werkzeug.exceptions import HTTPException from werkzeug.wrappers import Response diff --git a/tests/test_utils.py b/tests/test_utils.py index b7f1bcb1a..c48eba556 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -176,6 +176,7 @@ def test_assign(): def test_import_string(): from datetime import date + from werkzeug.debug import DebuggedApplication assert utils.import_string("datetime.date") is date