From c0ab71c4a371697cca2d35f2f7511785cdd16c1f Mon Sep 17 00:00:00 2001 From: XueFeng <77104643+Lns-XueFeng@users.noreply.github.com> Date: Mon, 1 May 2023 00:19:38 +0800 Subject: [PATCH 001/159] fix example in docs (#2677) --- docs/test.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/test.rst b/docs/test.rst index 704eb5f59..d31ac5938 100644 --- a/docs/test.rst +++ b/docs/test.rst @@ -18,8 +18,8 @@ requests. >>> response = c.get("/") >>> response.status_code 200 ->>> resp.headers -Headers([('Content-Type', 'text/html; charset=utf-8'), ('Content-Length', '6658')]) +>>> response.headers +Headers([('Content-Type', 'text/html; charset=utf-8'), ('Content-Length', '5211')]) >>> response.get_data(as_text=True) '...' From 7a33be5edacfe823067a90fae5cbb19551895d9c Mon Sep 17 00:00:00 2001 From: David Lord Date: Mon, 1 May 2023 08:26:44 -0700 Subject: [PATCH 002/159] release version 2.3.3 --- CHANGES.rst | 2 +- src/werkzeug/__init__.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 091aa553b..3e46b1ec7 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,7 +3,7 @@ Version 2.3.3 ------------- -Unreleased +Released 2023-05-01 - Fix parsing of large multipart bodies. Remove invalid leading newline, and restore parsing speed. :issue:`2658, 2675` diff --git a/src/werkzeug/__init__.py b/src/werkzeug/__init__.py index 64640b0ea..a448e0f32 100644 --- a/src/werkzeug/__init__.py +++ b/src/werkzeug/__init__.py @@ -3,4 +3,4 @@ from .wrappers import Request as Request from .wrappers import Response as Response -__version__ = "2.3.3.dev" +__version__ = "2.3.3" From a2b1560bf325f54c88314490f041c011aee5ca4c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 May 2023 16:57:12 +0000 Subject: [PATCH 003/159] Bump actions/setup-python from 4.5.0 to 4.6.0 Bumps [actions/setup-python](https://github.com/actions/setup-python) from 4.5.0 to 4.6.0. - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435...57ded4d7d5e986d7296eab16560982c6dd7c923b) --- updated-dependencies: - dependency-name: actions/setup-python dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- .github/workflows/publish.yaml | 2 +- .github/workflows/tests.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index 45a9c51b3..9fd54feb3 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -10,7 +10,7 @@ jobs: hash: ${{ steps.hash.outputs.hash }} steps: - uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 - - uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 + - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b with: python-version: '3.x' cache: 'pip' diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 682e07440..272b39f79 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -35,7 +35,7 @@ jobs: - {name: Typing, python: '3.11', os: ubuntu-latest, tox: typing} steps: - uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 - - uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 + - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b with: python-version: ${{ matrix.python }} cache: 'pip' From 3507c70be9a7e3e22076bd98847f4947af203ac7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 May 2023 16:57:18 +0000 Subject: [PATCH 004/159] Bump pypa/gh-action-pypi-publish from 1.8.4 to 1.8.5 Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.8.4 to 1.8.5. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/29930c9cf57955dc1b98162d0d8bc3ec80d9e75c...0bf742be3ebe032c25dd15117957dc15d0cfc38d) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/publish.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index 45a9c51b3..73303f3d8 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -63,10 +63,10 @@ jobs: steps: - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # Try uploading to Test PyPI first, in case something fails. - - uses: pypa/gh-action-pypi-publish@29930c9cf57955dc1b98162d0d8bc3ec80d9e75c + - uses: pypa/gh-action-pypi-publish@0bf742be3ebe032c25dd15117957dc15d0cfc38d with: repository_url: https://test.pypi.org/legacy/ packages_dir: artifact/ - - uses: pypa/gh-action-pypi-publish@29930c9cf57955dc1b98162d0d8bc3ec80d9e75c + - uses: pypa/gh-action-pypi-publish@0bf742be3ebe032c25dd15117957dc15d0cfc38d with: packages_dir: artifact/ From dd3b231fd347837d13da00dcce2c17707c9896c3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 May 2023 17:17:39 +0000 Subject: [PATCH 005/159] Bump actions/checkout from 3.5.0 to 3.5.2 Bumps [actions/checkout](https://github.com/actions/checkout) from 3.5.0 to 3.5.2. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/8f4b7f84864484a7bf31766abe9204da3cbe65b3...8e5e7e5ab8b370d6c329ec480221332ada57f0ab) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/publish.yaml | 2 +- .github/workflows/tests.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index 9fd54feb3..437ec9e6b 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -9,7 +9,7 @@ jobs: outputs: hash: ${{ steps.hash.outputs.hash }} steps: - - uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 + - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b with: python-version: '3.x' diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 272b39f79..130b0ddb7 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -34,7 +34,7 @@ jobs: - {name: 'PyPy', python: 'pypy-3.9', os: ubuntu-latest, tox: pypy39} - {name: Typing, python: '3.11', os: ubuntu-latest, tox: typing} steps: - - uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 + - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b with: python-version: ${{ matrix.python }} From 486e298f4f7736d9711c5f5ffa1f50a17b079ac0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 1 Jun 2023 16:56:57 +0000 Subject: [PATCH 006/159] Bump actions/setup-python from 4.6.0 to 4.6.1 Bumps [actions/setup-python](https://github.com/actions/setup-python) from 4.6.0 to 4.6.1. - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/57ded4d7d5e986d7296eab16560982c6dd7c923b...bd6b4b6205c4dbad673328db7b31b7fab9e241c0) --- updated-dependencies: - dependency-name: actions/setup-python dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/publish.yaml | 2 +- .github/workflows/tests.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index 82285542d..254eb533b 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -10,7 +10,7 @@ jobs: hash: ${{ steps.hash.outputs.hash }} steps: - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab - - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b + - uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 with: python-version: '3.x' cache: 'pip' diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 18fb9c899..318bafdc1 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -35,7 +35,7 @@ jobs: - {name: Typing, python: '3.11', os: ubuntu-latest, tox: typing} steps: - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab - - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b + - uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 with: python-version: ${{ matrix.python }} cache: 'pip' From b3dfe6502c93fc50d82f14add67ccdfe732cae09 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 1 Jun 2023 16:57:01 +0000 Subject: [PATCH 007/159] Bump slsa-framework/slsa-github-generator from 1.5.0 to 1.6.0 Bumps [slsa-framework/slsa-github-generator](https://github.com/slsa-framework/slsa-github-generator) from 1.5.0 to 1.6.0. - [Release notes](https://github.com/slsa-framework/slsa-github-generator/releases) - [Changelog](https://github.com/slsa-framework/slsa-github-generator/blob/main/CHANGELOG.md) - [Commits](https://github.com/slsa-framework/slsa-github-generator/compare/v1.5.0...v1.6.0) --- updated-dependencies: - dependency-name: slsa-framework/slsa-github-generator dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- .github/workflows/publish.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index 82285542d..5238bed69 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -33,7 +33,7 @@ jobs: id-token: write contents: write # Can't pin with hash due to how this workflow works. - uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v1.5.0 + uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v1.6.0 with: base64-subjects: ${{ needs.build.outputs.hash }} create-release: From 603fc6f1558d5cc4af778aac65f3047d13f6cf84 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 1 Jun 2023 16:57:06 +0000 Subject: [PATCH 008/159] Bump pypa/gh-action-pypi-publish from 1.8.5 to 1.8.6 Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.8.5 to 1.8.6. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/0bf742be3ebe032c25dd15117957dc15d0cfc38d...a56da0b891b3dc519c7ee3284aff1fad93cc8598) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/publish.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index 82285542d..e660aa599 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -63,10 +63,10 @@ jobs: steps: - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # Try uploading to Test PyPI first, in case something fails. - - uses: pypa/gh-action-pypi-publish@0bf742be3ebe032c25dd15117957dc15d0cfc38d + - uses: pypa/gh-action-pypi-publish@a56da0b891b3dc519c7ee3284aff1fad93cc8598 with: repository-url: https://test.pypi.org/legacy/ packages-dir: artifact/ - - uses: pypa/gh-action-pypi-publish@0bf742be3ebe032c25dd15117957dc15d0cfc38d + - uses: pypa/gh-action-pypi-publish@a56da0b891b3dc519c7ee3284aff1fad93cc8598 with: packages-dir: artifact/ From ed495185e46e0b87b9f0d0d2d45ad7cdba5bfdee Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 1 Jul 2023 16:38:07 +0000 Subject: [PATCH 009/159] Bump dessant/lock-threads from 4.0.0 to 4.0.1 Bumps [dessant/lock-threads](https://github.com/dessant/lock-threads) from 4.0.0 to 4.0.1. - [Release notes](https://github.com/dessant/lock-threads/releases) - [Changelog](https://github.com/dessant/lock-threads/blob/main/CHANGELOG.md) - [Commits](https://github.com/dessant/lock-threads/compare/c1b35aecc5cdb1a34539d14196df55838bb2f836...be8aa5be94131386884a6da4189effda9b14aa21) --- updated-dependencies: - dependency-name: dessant/lock-threads dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/lock.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lock.yaml b/.github/workflows/lock.yaml index c790fae5c..e962fd041 100644 --- a/.github/workflows/lock.yaml +++ b/.github/workflows/lock.yaml @@ -19,7 +19,7 @@ jobs: lock: runs-on: ubuntu-latest steps: - - uses: dessant/lock-threads@c1b35aecc5cdb1a34539d14196df55838bb2f836 + - uses: dessant/lock-threads@be8aa5be94131386884a6da4189effda9b14aa21 with: issue-inactive-days: 14 pr-inactive-days: 14 From 9b5eaf09913a083c30a1737f0d4fe104ff849471 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 1 Jul 2023 16:38:11 +0000 Subject: [PATCH 010/159] Bump slsa-framework/slsa-github-generator from 1.6.0 to 1.7.0 Bumps [slsa-framework/slsa-github-generator](https://github.com/slsa-framework/slsa-github-generator) from 1.6.0 to 1.7.0. - [Release notes](https://github.com/slsa-framework/slsa-github-generator/releases) - [Changelog](https://github.com/slsa-framework/slsa-github-generator/blob/main/CHANGELOG.md) - [Commits](https://github.com/slsa-framework/slsa-github-generator/compare/v1.6.0...v1.7.0) --- updated-dependencies: - dependency-name: slsa-framework/slsa-github-generator dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- .github/workflows/publish.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index 4523d071d..aed998b3b 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -33,7 +33,7 @@ jobs: id-token: write contents: write # Can't pin with hash due to how this workflow works. - uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v1.6.0 + uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v1.7.0 with: base64-subjects: ${{ needs.build.outputs.hash }} create-release: From 753163234ebff57a3ca595e6948c1cb0255bb9e2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 1 Jul 2023 16:38:15 +0000 Subject: [PATCH 011/159] Bump actions/checkout from 3.5.2 to 3.5.3 Bumps [actions/checkout](https://github.com/actions/checkout) from 3.5.2 to 3.5.3. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/8e5e7e5ab8b370d6c329ec480221332ada57f0ab...c85c95e3d7251135ab7dc9ce3241c5835cc595a9) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/publish.yaml | 2 +- .github/workflows/tests.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index 4523d071d..993b8d492 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -9,7 +9,7 @@ jobs: outputs: hash: ${{ steps.hash.outputs.hash }} steps: - - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab + - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 - uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 with: python-version: '3.x' diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index a5aac417f..f66e89c3c 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -34,7 +34,7 @@ jobs: - {name: 'PyPy', python: 'pypy-3.10', os: ubuntu-latest, tox: pypy310} - {name: Typing, python: '3.11', os: ubuntu-latest, tox: typing} steps: - - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab + - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 - uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 with: python-version: ${{ matrix.python }} From 1954694c559b963eea8ea46a3916c9cb07b65013 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 1 Jul 2023 16:38:18 +0000 Subject: [PATCH 012/159] Bump pypa/gh-action-pypi-publish from 1.8.6 to 1.8.7 Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.8.6 to 1.8.7. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/a56da0b891b3dc519c7ee3284aff1fad93cc8598...f5622bde02b04381239da3573277701ceca8f6a0) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/publish.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index 4523d071d..27cc1c443 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -63,10 +63,10 @@ jobs: steps: - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # Try uploading to Test PyPI first, in case something fails. - - uses: pypa/gh-action-pypi-publish@a56da0b891b3dc519c7ee3284aff1fad93cc8598 + - uses: pypa/gh-action-pypi-publish@f5622bde02b04381239da3573277701ceca8f6a0 with: repository-url: https://test.pypi.org/legacy/ packages-dir: artifact/ - - uses: pypa/gh-action-pypi-publish@a56da0b891b3dc519c7ee3284aff1fad93cc8598 + - uses: pypa/gh-action-pypi-publish@f5622bde02b04381239da3573277701ceca8f6a0 with: packages-dir: artifact/ From 3e2e3c74f94df174cc777a19c9c2fe4abfb6bd8a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 1 Aug 2023 16:19:14 +0000 Subject: [PATCH 013/159] Bump pypa/gh-action-pypi-publish from 1.8.7 to 1.8.8 Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.8.7 to 1.8.8. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/f5622bde02b04381239da3573277701ceca8f6a0...f8c70e705ffc13c3b4d1221169b84f12a75d6ca8) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/publish.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index af2ab1b66..9379792b9 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -63,10 +63,10 @@ jobs: steps: - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # Try uploading to Test PyPI first, in case something fails. - - uses: pypa/gh-action-pypi-publish@f5622bde02b04381239da3573277701ceca8f6a0 + - uses: pypa/gh-action-pypi-publish@f8c70e705ffc13c3b4d1221169b84f12a75d6ca8 with: repository-url: https://test.pypi.org/legacy/ packages-dir: artifact/ - - uses: pypa/gh-action-pypi-publish@f5622bde02b04381239da3573277701ceca8f6a0 + - uses: pypa/gh-action-pypi-publish@f8c70e705ffc13c3b4d1221169b84f12a75d6ca8 with: packages-dir: artifact/ From 9148746e97522b4918e6f5d355b8417197aa140f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 1 Aug 2023 16:19:17 +0000 Subject: [PATCH 014/159] Bump actions/setup-python from 4.6.1 to 4.7.0 Bumps [actions/setup-python](https://github.com/actions/setup-python) from 4.6.1 to 4.7.0. - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/bd6b4b6205c4dbad673328db7b31b7fab9e241c0...61a6322f88396a6271a6ee3565807d608ecaddd1) --- updated-dependencies: - dependency-name: actions/setup-python dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- .github/workflows/publish.yaml | 2 +- .github/workflows/tests.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index af2ab1b66..708cfa558 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -10,7 +10,7 @@ jobs: hash: ${{ steps.hash.outputs.hash }} steps: - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 - - uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 + - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 with: python-version: '3.x' cache: 'pip' diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index f66e89c3c..8dbd601ad 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -35,7 +35,7 @@ jobs: - {name: Typing, python: '3.11', os: ubuntu-latest, tox: typing} steps: - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 - - uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 + - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 with: python-version: ${{ matrix.python }} cache: 'pip' From 43d6cdca9d017bc9150dcce5fa16519db050a09a Mon Sep 17 00:00:00 2001 From: David Lord Date: Mon, 14 Aug 2023 07:20:01 -0700 Subject: [PATCH 015/159] start version 3.0.0 --- CHANGES.rst | 6 ++++++ src/werkzeug/__init__.py | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index 96e67009b..1c62e2c69 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,5 +1,11 @@ .. currentmodule:: werkzeug +Version 3.0.0 +------------- + +Unreleased + + Version 2.3.8 ------------- diff --git a/src/werkzeug/__init__.py b/src/werkzeug/__init__.py index f6b9f5786..0436769c9 100644 --- a/src/werkzeug/__init__.py +++ b/src/werkzeug/__init__.py @@ -3,4 +3,4 @@ from .wrappers import Request as Request from .wrappers import Response as Response -__version__ = "2.3.7.dev" +__version__ = "3.0.0.dev" From 0266f51cc341bc17e0b9ed61314db5a5344274b9 Mon Sep 17 00:00:00 2001 From: David Lord Date: Mon, 14 Aug 2023 08:55:12 -0700 Subject: [PATCH 016/159] remove urllib.parse copy --- CHANGES.rst | 6 +- src/werkzeug/_internal.py | 25 - src/werkzeug/test.py | 4 +- src/werkzeug/urls.py | 1241 ++----------------------------------- tests/test_urls.py | 300 --------- 5 files changed, 46 insertions(+), 1530 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 1c62e2c69..c2e67e1dc 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -5,6 +5,8 @@ Version 3.0.0 Unreleased +- Remove previously deprecated code. :pr:`2768` + Version 2.3.8 ------------- @@ -1796,8 +1798,8 @@ Version 0.9.2 (bugfix release, released on July 18th 2013) -- Added `unsafe` parameter to :func:`~werkzeug.urls.url_quote`. -- Fixed an issue with :func:`~werkzeug.urls.url_quote_plus` not quoting +- Added ``unsafe`` parameter to ``urls.url_quote``. +- Fixed an issue with ``urls.url_quote_plus`` not quoting `'+'` correctly. - Ported remaining parts of :class:`~werkzeug.contrib.RedisCache` to Python 3.3. diff --git a/src/werkzeug/_internal.py b/src/werkzeug/_internal.py index 6ed4d3024..c158c92df 100644 --- a/src/werkzeug/_internal.py +++ b/src/werkzeug/_internal.py @@ -287,31 +287,6 @@ def __repr__(self) -> str: return f"<{type(self).__name__} {self.name}>" -def _decode_idna(domain: str) -> str: - try: - data = domain.encode("ascii") - except UnicodeEncodeError: - # If the domain is not ASCII, it's decoded already. - return domain - - try: - # Try decoding in one shot. - return data.decode("idna") - except UnicodeDecodeError: - pass - - # Decode each part separately, leaving invalid parts as punycode. - parts = [] - - for part in data.split(b"."): - try: - parts.append(part.decode("idna")) - except UnicodeDecodeError: - parts.append(part.decode("ascii")) - - return ".".join(parts) - - _plain_int_re = re.compile(r"-?\d+", re.ASCII) diff --git a/src/werkzeug/test.py b/src/werkzeug/test.py index 968553f2b..3be9b20f3 100644 --- a/src/werkzeug/test.py +++ b/src/werkzeug/test.py @@ -354,9 +354,7 @@ def __init__( self.path = iri_to_uri(request_uri.path) self.request_uri = path if base_url is not None: - base_url = iri_to_uri( - base_url, charset=charset if charset != "utf-8" else None - ) + base_url = iri_to_uri(base_url) self.base_url = base_url # type: ignore if isinstance(query_string, str): self.query_string = query_string diff --git a/src/werkzeug/urls.py b/src/werkzeug/urls.py index f5760eb4c..cf29da0d9 100644 --- a/src/werkzeug/urls.py +++ b/src/werkzeug/urls.py @@ -1,796 +1,16 @@ -"""Functions for working with URLs. - -Contains implementations of functions from :mod:`urllib.parse` that -handle bytes and strings. -""" from __future__ import annotations import codecs -import os import re import typing as t -import warnings from urllib.parse import quote from urllib.parse import unquote from urllib.parse import urlencode from urllib.parse import urlsplit from urllib.parse import urlunsplit -from ._internal import _check_str_tuple -from ._internal import _decode_idna -from ._internal import _make_encode_wrapper -from ._internal import _to_str from .datastructures import iter_multi_items -if t.TYPE_CHECKING: - from . import datastructures as ds - -# A regular expression for what a valid schema looks like -_scheme_re = re.compile(r"^[a-zA-Z0-9+-.]+$") - -# Characters that are safe in any part of an URL. -_always_safe_chars = ( - "abcdefghijklmnopqrstuvwxyz" - "ABCDEFGHIJKLMNOPQRSTUVWXYZ" - "0123456789" - "-._~" - "$!'()*+,;" # RFC3986 sub-delims set, not including query string delimiters &= -) -_always_safe = frozenset(_always_safe_chars.encode("ascii")) - -_hexdigits = "0123456789ABCDEFabcdef" -_hextobyte = { - f"{a}{b}".encode("ascii"): int(f"{a}{b}", 16) - for a in _hexdigits - for b in _hexdigits -} -_bytetohex = [f"%{char:02X}".encode("ascii") for char in range(256)] - - -class _URLTuple(t.NamedTuple): - scheme: str - netloc: str - path: str - query: str - fragment: str - - -class BaseURL(_URLTuple): - """Superclass of :py:class:`URL` and :py:class:`BytesURL`. - - .. deprecated:: 2.3 - Will be removed in Werkzeug 3.0. Use the ``urllib.parse`` library instead. - """ - - __slots__ = () - _at: str - _colon: str - _lbracket: str - _rbracket: str - - def __new__(cls, *args: t.Any, **kwargs: t.Any) -> BaseURL: - warnings.warn( - f"'werkzeug.urls.{cls.__name__}' is deprecated and will be removed in" - " Werkzeug 3.0. Use the 'urllib.parse' library instead.", - DeprecationWarning, - stacklevel=2, - ) - return super().__new__(cls, *args, **kwargs) - - def __str__(self) -> str: - return self.to_url() - - def replace(self, **kwargs: t.Any) -> BaseURL: - """Return an URL with the same values, except for those parameters - given new values by whichever keyword arguments are specified.""" - return self._replace(**kwargs) - - @property - def host(self) -> str | None: - """The host part of the URL if available, otherwise `None`. The - host is either the hostname or the IP address mentioned in the - URL. It will not contain the port. - """ - return self._split_host()[0] - - @property - def ascii_host(self) -> str | None: - """Works exactly like :attr:`host` but will return a result that - is restricted to ASCII. If it finds a netloc that is not ASCII - it will attempt to idna decode it. This is useful for socket - operations when the URL might include internationalized characters. - """ - rv = self.host - if rv is not None and isinstance(rv, str): - try: - rv = rv.encode("idna").decode("ascii") - except UnicodeError: - pass - return rv - - @property - def port(self) -> int | None: - """The port in the URL as an integer if it was present, `None` - otherwise. This does not fill in default ports. - """ - try: - rv = int(_to_str(self._split_host()[1])) - if 0 <= rv <= 65535: - return rv - except (ValueError, TypeError): - pass - return None - - @property - def auth(self) -> str | None: - """The authentication part in the URL if available, `None` - otherwise. - """ - return self._split_netloc()[0] - - @property - def username(self) -> str | None: - """The username if it was part of the URL, `None` otherwise. - This undergoes URL decoding and will always be a string. - """ - rv = self._split_auth()[0] - if rv is not None: - return _url_unquote_legacy(rv) - return None - - @property - def raw_username(self) -> str | None: - """The username if it was part of the URL, `None` otherwise. - Unlike :attr:`username` this one is not being decoded. - """ - return self._split_auth()[0] - - @property - def password(self) -> str | None: - """The password if it was part of the URL, `None` otherwise. - This undergoes URL decoding and will always be a string. - """ - rv = self._split_auth()[1] - if rv is not None: - return _url_unquote_legacy(rv) - return None - - @property - def raw_password(self) -> str | None: - """The password if it was part of the URL, `None` otherwise. - Unlike :attr:`password` this one is not being decoded. - """ - return self._split_auth()[1] - - def decode_query(self, *args: t.Any, **kwargs: t.Any) -> ds.MultiDict[str, str]: - """Decodes the query part of the URL. Ths is a shortcut for - calling :func:`url_decode` on the query argument. The arguments and - keyword arguments are forwarded to :func:`url_decode` unchanged. - """ - return url_decode(self.query, *args, **kwargs) - - def join(self, *args: t.Any, **kwargs: t.Any) -> BaseURL: - """Joins this URL with another one. This is just a convenience - function for calling into :meth:`url_join` and then parsing the - return value again. - """ - return url_parse(url_join(self, *args, **kwargs)) - - def to_url(self) -> str: - """Returns a URL string or bytes depending on the type of the - information stored. This is just a convenience function - for calling :meth:`url_unparse` for this URL. - """ - return url_unparse(self) - - def encode_netloc(self) -> str: - """Encodes the netloc part to an ASCII safe URL as bytes.""" - rv = self.ascii_host or "" - if ":" in rv: - rv = f"[{rv}]" - port = self.port - if port is not None: - rv = f"{rv}:{port}" - auth = ":".join( - filter( - None, - [ - url_quote(self.raw_username or "", "utf-8", "strict", "/:%"), - url_quote(self.raw_password or "", "utf-8", "strict", "/:%"), - ], - ) - ) - if auth: - rv = f"{auth}@{rv}" - return rv - - def decode_netloc(self) -> str: - """Decodes the netloc part into a string.""" - host = self.host or "" - - if isinstance(host, bytes): - host = host.decode() - - rv = _decode_idna(host) - - if ":" in rv: - rv = f"[{rv}]" - port = self.port - if port is not None: - rv = f"{rv}:{port}" - auth = ":".join( - filter( - None, - [ - _url_unquote_legacy(self.raw_username or "", "/:%@"), - _url_unquote_legacy(self.raw_password or "", "/:%@"), - ], - ) - ) - if auth: - rv = f"{auth}@{rv}" - return rv - - def to_uri_tuple(self) -> BaseURL: - """Returns a :class:`BytesURL` tuple that holds a URI. This will - encode all the information in the URL properly to ASCII using the - rules a web browser would follow. - - It's usually more interesting to directly call :meth:`iri_to_uri` which - will return a string. - """ - return url_parse(iri_to_uri(self)) - - def to_iri_tuple(self) -> BaseURL: - """Returns a :class:`URL` tuple that holds a IRI. This will try - to decode as much information as possible in the URL without - losing information similar to how a web browser does it for the - URL bar. - - It's usually more interesting to directly call :meth:`uri_to_iri` which - will return a string. - """ - return url_parse(uri_to_iri(self)) - - def get_file_location( - self, pathformat: str | None = None - ) -> tuple[str | None, str | None]: - """Returns a tuple with the location of the file in the form - ``(server, location)``. If the netloc is empty in the URL or - points to localhost, it's represented as ``None``. - - The `pathformat` by default is autodetection but needs to be set - when working with URLs of a specific system. The supported values - are ``'windows'`` when working with Windows or DOS paths and - ``'posix'`` when working with posix paths. - - If the URL does not point to a local file, the server and location - are both represented as ``None``. - - :param pathformat: The expected format of the path component. - Currently ``'windows'`` and ``'posix'`` are - supported. Defaults to ``None`` which is - autodetect. - """ - if self.scheme != "file": - return None, None - - path = url_unquote(self.path) - host = self.netloc or None - - if pathformat is None: - if os.name == "nt": - pathformat = "windows" - else: - pathformat = "posix" - - if pathformat == "windows": - if path[:1] == "/" and path[1:2].isalpha() and path[2:3] in "|:": - path = f"{path[1:2]}:{path[3:]}" - windows_share = path[:3] in ("\\" * 3, "/" * 3) - import ntpath - - path = ntpath.normpath(path) - # Windows shared drives are represented as ``\\host\\directory``. - # That results in a URL like ``file://///host/directory``, and a - # path like ``///host/directory``. We need to special-case this - # because the path contains the hostname. - if windows_share and host is None: - parts = path.lstrip("\\").split("\\", 1) - if len(parts) == 2: - host, path = parts - else: - host = parts[0] - path = "" - elif pathformat == "posix": - import posixpath - - path = posixpath.normpath(path) - else: - raise TypeError(f"Invalid path format {pathformat!r}") - - if host in ("127.0.0.1", "::1", "localhost"): - host = None - - return host, path - - def _split_netloc(self) -> tuple[str | None, str]: - if self._at in self.netloc: - auth, _, netloc = self.netloc.partition(self._at) - return auth, netloc - return None, self.netloc - - def _split_auth(self) -> tuple[str | None, str | None]: - auth = self._split_netloc()[0] - if not auth: - return None, None - if self._colon not in auth: - return auth, None - - username, _, password = auth.partition(self._colon) - return username, password - - def _split_host(self) -> tuple[str | None, str | None]: - rv = self._split_netloc()[1] - if not rv: - return None, None - - if not rv.startswith(self._lbracket): - if self._colon in rv: - host, _, port = rv.partition(self._colon) - return host, port - return rv, None - - idx = rv.find(self._rbracket) - if idx < 0: - return rv, None - - host = rv[1:idx] - rest = rv[idx + 1 :] - if rest.startswith(self._colon): - return host, rest[1:] - return host, None - - -class URL(BaseURL): - """Represents a parsed URL. This behaves like a regular tuple but - also has some extra attributes that give further insight into the - URL. - - .. deprecated:: 2.3 - Will be removed in Werkzeug 3.0. Use the ``urllib.parse`` library instead. - """ - - __slots__ = () - _at = "@" - _colon = ":" - _lbracket = "[" - _rbracket = "]" - - def encode(self, charset: str = "utf-8", errors: str = "replace") -> BytesURL: - """Encodes the URL to a tuple made out of bytes. The charset is - only being used for the path, query and fragment. - """ - return BytesURL( - self.scheme.encode("ascii"), - self.encode_netloc(), - self.path.encode(charset, errors), - self.query.encode(charset, errors), - self.fragment.encode(charset, errors), - ) - - -class BytesURL(BaseURL): - """Represents a parsed URL in bytes. - - .. deprecated:: 2.3 - Will be removed in Werkzeug 3.0. Use the ``urllib.parse`` library instead. - """ - - __slots__ = () - _at = b"@" # type: ignore - _colon = b":" # type: ignore - _lbracket = b"[" # type: ignore - _rbracket = b"]" # type: ignore - - def __str__(self) -> str: - return self.to_url().decode("utf-8", "replace") # type: ignore - - def encode_netloc(self) -> bytes: # type: ignore - """Returns the netloc unchanged as bytes.""" - return self.netloc # type: ignore - - def decode(self, charset: str = "utf-8", errors: str = "replace") -> URL: - """Decodes the URL to a tuple made out of strings. The charset is - only being used for the path, query and fragment. - """ - return URL( - self.scheme.decode("ascii"), # type: ignore - self.decode_netloc(), - self.path.decode(charset, errors), # type: ignore - self.query.decode(charset, errors), # type: ignore - self.fragment.decode(charset, errors), # type: ignore - ) - - -_unquote_maps: dict[frozenset[int], dict[bytes, int]] = {frozenset(): _hextobyte} - - -def _unquote_to_bytes(string: str | bytes, unsafe: str | bytes = "") -> bytes: - if isinstance(string, str): - string = string.encode("utf-8") - - if isinstance(unsafe, str): - unsafe = unsafe.encode("utf-8") - - unsafe = frozenset(bytearray(unsafe)) - groups = iter(string.split(b"%")) - result = bytearray(next(groups, b"")) - - try: - hex_to_byte = _unquote_maps[unsafe] - except KeyError: - hex_to_byte = _unquote_maps[unsafe] = { - h: b for h, b in _hextobyte.items() if b not in unsafe - } - - for group in groups: - code = group[:2] - - if code in hex_to_byte: - result.append(hex_to_byte[code]) - result.extend(group[2:]) - else: - result.append(37) # % - result.extend(group) - - return bytes(result) - - -def _url_encode_impl( - obj: t.Mapping[str, str] | t.Iterable[tuple[str, str]], - charset: str, - sort: bool, - key: t.Callable[[tuple[str, str]], t.Any] | None, -) -> t.Iterator[str]: - from .datastructures import iter_multi_items - - iterable: t.Iterable[tuple[str, str]] = iter_multi_items(obj) - - if sort: - iterable = sorted(iterable, key=key) - - for key_str, value_str in iterable: - if value_str is None: - continue - - if not isinstance(key_str, bytes): - key_bytes = str(key_str).encode(charset) - else: - key_bytes = key_str - - if not isinstance(value_str, bytes): - value_bytes = str(value_str).encode(charset) - else: - value_bytes = value_str - - yield f"{_fast_url_quote_plus(key_bytes)}={_fast_url_quote_plus(value_bytes)}" - - -def _url_unquote_legacy(value: str, unsafe: str = "") -> str: - try: - return url_unquote(value, charset="utf-8", errors="strict", unsafe=unsafe) - except UnicodeError: - return url_unquote(value, charset="latin1", unsafe=unsafe) - - -def url_parse( - url: str, scheme: str | None = None, allow_fragments: bool = True -) -> BaseURL: - """Parses a URL from a string into a :class:`URL` tuple. If the URL - is lacking a scheme it can be provided as second argument. Otherwise, - it is ignored. Optionally fragments can be stripped from the URL - by setting `allow_fragments` to `False`. - - The inverse of this function is :func:`url_unparse`. - - :param url: the URL to parse. - :param scheme: the default schema to use if the URL is schemaless. - :param allow_fragments: if set to `False` a fragment will be removed - from the URL. - - .. deprecated:: 2.3 - Will be removed in Werkzeug 3.0. Use ``urllib.parse.urlsplit`` instead. - """ - warnings.warn( - "'werkzeug.urls.url_parse' is deprecated and will be removed in Werkzeug 3.0." - " Use 'urllib.parse.urlsplit' instead.", - DeprecationWarning, - stacklevel=2, - ) - s = _make_encode_wrapper(url) - is_text_based = isinstance(url, str) - - if scheme is None: - scheme = s("") - netloc = query = fragment = s("") - i = url.find(s(":")) - if i > 0 and _scheme_re.match(_to_str(url[:i], errors="replace")): - # make sure "iri" is not actually a port number (in which case - # "scheme" is really part of the path) - rest = url[i + 1 :] - if not rest or any(c not in s("0123456789") for c in rest): - # not a port number - scheme, url = url[:i].lower(), rest - - if url[:2] == s("//"): - delim = len(url) - for c in s("/?#"): - wdelim = url.find(c, 2) - if wdelim >= 0: - delim = min(delim, wdelim) - netloc, url = url[2:delim], url[delim:] - if (s("[") in netloc and s("]") not in netloc) or ( - s("]") in netloc and s("[") not in netloc - ): - raise ValueError("Invalid IPv6 URL") - - if allow_fragments and s("#") in url: - url, fragment = url.split(s("#"), 1) - if s("?") in url: - url, query = url.split(s("?"), 1) - - result_type = URL if is_text_based else BytesURL - - return result_type(scheme, netloc, url, query, fragment) - - -def _make_fast_url_quote( - charset: str = "utf-8", - errors: str = "strict", - safe: str | bytes = "/:", - unsafe: str | bytes = "", -) -> t.Callable[[bytes], str]: - """Precompile the translation table for a URL encoding function. - - Unlike :func:`url_quote`, the generated function only takes the - string to quote. - - :param charset: The charset to encode the result with. - :param errors: How to handle encoding errors. - :param safe: An optional sequence of safe characters to never encode. - :param unsafe: An optional sequence of unsafe characters to always encode. - """ - if isinstance(safe, str): - safe = safe.encode(charset, errors) - - if isinstance(unsafe, str): - unsafe = unsafe.encode(charset, errors) - - safe = (frozenset(bytearray(safe)) | _always_safe) - frozenset(bytearray(unsafe)) - table = [chr(c) if c in safe else f"%{c:02X}" for c in range(256)] - - def quote(string: bytes) -> str: - return "".join([table[c] for c in string]) - - return quote - - -_fast_url_quote = _make_fast_url_quote() -_fast_quote_plus = _make_fast_url_quote(safe=" ", unsafe="+") - - -def _fast_url_quote_plus(string: bytes) -> str: - return _fast_quote_plus(string).replace(" ", "+") - - -def url_quote( - string: str | bytes, - charset: str = "utf-8", - errors: str = "strict", - safe: str | bytes = "/:", - unsafe: str | bytes = "", -) -> str: - """URL encode a single string with a given encoding. - - :param s: the string to quote. - :param charset: the charset to be used. - :param safe: an optional sequence of safe characters. - :param unsafe: an optional sequence of unsafe characters. - - .. deprecated:: 2.3 - Will be removed in Werkzeug 3.0. Use ``urllib.parse.quote`` instead. - - .. versionadded:: 0.9.2 - The `unsafe` parameter was added. - """ - warnings.warn( - "'werkzeug.urls.url_quote' is deprecated and will be removed in Werkzeug 3.0." - " Use 'urllib.parse.quote' instead.", - DeprecationWarning, - stacklevel=2, - ) - - if not isinstance(string, (str, bytes, bytearray)): - string = str(string) - if isinstance(string, str): - string = string.encode(charset, errors) - if isinstance(safe, str): - safe = safe.encode(charset, errors) - if isinstance(unsafe, str): - unsafe = unsafe.encode(charset, errors) - safe = (frozenset(bytearray(safe)) | _always_safe) - frozenset(bytearray(unsafe)) - rv = bytearray() - for char in bytearray(string): - if char in safe: - rv.append(char) - else: - rv.extend(_bytetohex[char]) - return bytes(rv).decode(charset) - - -def url_quote_plus( - string: str, charset: str = "utf-8", errors: str = "strict", safe: str = "" -) -> str: - """URL encode a single string with the given encoding and convert - whitespace to "+". - - :param s: The string to quote. - :param charset: The charset to be used. - :param safe: An optional sequence of safe characters. - - .. deprecated:: 2.3 - Will be removed in Werkzeug 3.0. Use ``urllib.parse.quote_plus`` instead. - """ - warnings.warn( - "'werkzeug.urls.url_quote_plus' is deprecated and will be removed in Werkzeug" - " 2.4. Use 'urllib.parse.quote_plus' instead.", - DeprecationWarning, - stacklevel=2, - ) - - return url_quote(string, charset, errors, safe + " ", "+").replace(" ", "+") - - -def url_unparse(components: tuple[str, str, str, str, str]) -> str: - """The reverse operation to :meth:`url_parse`. This accepts arbitrary - as well as :class:`URL` tuples and returns a URL as a string. - - :param components: the parsed URL as tuple which should be converted - into a URL string. - - .. deprecated:: 2.3 - Will be removed in Werkzeug 3.0. Use ``urllib.parse.urlunsplit`` instead. - """ - warnings.warn( - "'werkzeug.urls.url_unparse' is deprecated and will be removed in Werkzeug 3.0." - " Use 'urllib.parse.urlunsplit' instead.", - DeprecationWarning, - stacklevel=2, - ) - _check_str_tuple(components) - scheme, netloc, path, query, fragment = components - s = _make_encode_wrapper(scheme) - url = s("") - - # We generally treat file:///x and file:/x the same which is also - # what browsers seem to do. This also allows us to ignore a schema - # register for netloc utilization or having to differentiate between - # empty and missing netloc. - if netloc or (scheme and path.startswith(s("/"))): - if path and path[:1] != s("/"): - path = s("/") + path - url = s("//") + (netloc or s("")) + path - elif path: - url += path - if scheme: - url = scheme + s(":") + url - if query: - url = url + s("?") + query - if fragment: - url = url + s("#") + fragment - return url - - -def url_unquote( - s: str | bytes, - charset: str = "utf-8", - errors: str = "replace", - unsafe: str = "", -) -> str: - """URL decode a single string with a given encoding. If the charset - is set to `None` no decoding is performed and raw bytes are - returned. - - :param s: the string to unquote. - :param charset: the charset of the query string. If set to `None` - no decoding will take place. - :param errors: the error handling for the charset decoding. - - .. deprecated:: 2.3 - Will be removed in Werkzeug 3.0. Use ``urllib.parse.unquote`` instead. - """ - warnings.warn( - "'werkzeug.urls.url_unquote' is deprecated and will be removed in Werkzeug 3.0." - " Use 'urllib.parse.unquote' instead.", - DeprecationWarning, - stacklevel=2, - ) - rv = _unquote_to_bytes(s, unsafe) - if charset is None: - return rv - return rv.decode(charset, errors) - - -def url_unquote_plus( - s: str | bytes, charset: str = "utf-8", errors: str = "replace" -) -> str: - """URL decode a single string with the given `charset` and decode "+" to - whitespace. - - Per default encoding errors are ignored. If you want a different behavior - you can set `errors` to ``'replace'`` or ``'strict'``. - - :param s: The string to unquote. - :param charset: the charset of the query string. If set to `None` - no decoding will take place. - :param errors: The error handling for the `charset` decoding. - - .. deprecated:: 2.3 - Will be removed in Werkzeug 3.0. Use ``urllib.parse.unquote_plus`` instead. - """ - warnings.warn( - "'werkzeug.urls.url_unquote_plus' is deprecated and will be removed in Werkzeug" - " 2.4. Use 'urllib.parse.unquote_plus' instead.", - DeprecationWarning, - stacklevel=2, - ) - - if isinstance(s, str): - s = s.replace("+", " ") - else: - s = s.replace(b"+", b" ") - - return url_unquote(s, charset, errors) - - -def url_fix(s: str, charset: str = "utf-8") -> str: - r"""Sometimes you get an URL by a user that just isn't a real URL because - it contains unsafe characters like ' ' and so on. This function can fix - some of the problems in a similar way browsers handle data entered by the - user: - - >>> url_fix('http://de.wikipedia.org/wiki/Elf (Begriffskl\xe4rung)') - 'http://de.wikipedia.org/wiki/Elf%20(Begriffskl%C3%A4rung)' - - :param s: the string with the URL to fix. - :param charset: The target charset for the URL if the url was given - as a string. - - .. deprecated:: 2.3 - Will be removed in Werkzeug 3.0. - """ - warnings.warn( - "'werkzeug.urls.url_fix' is deprecated and will be removed in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - # First step is to switch to text processing and to convert - # backslashes (which are invalid in URLs anyways) to slashes. This is - # consistent with what Chrome does. - s = _to_str(s, charset, "replace").replace("\\", "/") - - # For the specific case that we look like a malformed windows URL - # we want to fix this up manually: - if s.startswith("file://") and s[7:8].isalpha() and s[8:10] in (":/", "|/"): - s = f"file:///{s[7:]}" - - url = url_parse(s) - path = url_quote(url.path, charset, safe="/%+$!*'(),") - qs = url_quote_plus(url.query, charset, safe=":&%=+$!*'(),") - anchor = url_quote_plus(url.fragment, charset, safe=":&%=+$!*'(),") - return url_unparse((url.scheme, url.encode_netloc(), path, qs, anchor)) - def _codec_error_url_quote(e: UnicodeError) -> tuple[str, int]: """Used in :func:`uri_to_iri` after unquoting to re-quote any @@ -805,7 +25,7 @@ def _codec_error_url_quote(e: UnicodeError) -> tuple[str, int]: codecs.register_error("werkzeug.url_quote", _codec_error_url_quote) -def _make_unquote_part(name: str, chars: str) -> t.Callable[[str, str, str], str]: +def _make_unquote_part(name: str, chars: str) -> t.Callable[[str], str]: """Create a function that unquotes all percent encoded characters except those given. This allows working with unquoted characters if possible while not changing the meaning of a given part of a URL. @@ -813,12 +33,12 @@ def _make_unquote_part(name: str, chars: str) -> t.Callable[[str, str, str], str choices = "|".join(f"{ord(c):02X}" for c in sorted(chars)) pattern = re.compile(f"((?:%(?:{choices}))+)", re.I) - def _unquote_partial(value: str, encoding: str, errors: str) -> str: + def _unquote_partial(value: str) -> str: parts = iter(pattern.split(value)) out = [] for part in parts: - out.append(unquote(part, encoding, errors)) + out.append(unquote(part, "utf-8", "werkzeug.url_quote")) out.append(next(parts, "")) return "".join(out) @@ -837,11 +57,7 @@ def _unquote_partial(value: str, encoding: str, errors: str) -> str: _unquote_user = _make_unquote_part("user", _always_unsafe + ":@/?#") -def uri_to_iri( - uri: str | tuple[str, str, str, str, str], - charset: str | None = None, - errors: str | None = None, -) -> str: +def uri_to_iri(uri: str) -> str: """Convert a URI to an IRI. All valid UTF-8 characters are unquoted, leaving all reserved and invalid characters quoted. If the URL has a domain, it is decoded from Punycode. @@ -850,13 +66,10 @@ def uri_to_iri( 'http://\\u2603.net/p\\xe5th?q=\\xe8ry%DF' :param uri: The URI to convert. - :param charset: The encoding to encode unquoted bytes with. - :param errors: Error handler to use during ``bytes.encode``. By - default, invalid bytes are left quoted. - .. versionchanged:: 2.3 - Passing a tuple or bytes, and the ``charset`` and ``errors`` parameters, are - deprecated and will be removed in Werkzeug 3.0. + .. versionchanged:: 3.0 + Passing a tuple or bytes, and the ``charset`` and ``errors`` parameters, + are removed. .. versionchanged:: 2.3 Which characters remain quoted is specific to each part of the URL. @@ -868,45 +81,10 @@ def uri_to_iri( .. versionadded:: 0.6 """ - if isinstance(uri, tuple): - warnings.warn( - "Passing a tuple is deprecated and will not be supported in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - uri = urlunsplit(uri) - - if isinstance(uri, bytes): - warnings.warn( - "Passing bytes is deprecated and will not be supported in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - uri = uri.decode() - - if charset is not None: - warnings.warn( - "The 'charset' parameter is deprecated and will be removed" - " in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - else: - charset = "utf-8" - - if errors is not None: - warnings.warn( - "The 'errors' parameter is deprecated and will be removed in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - else: - errors = "werkzeug.url_quote" - parts = urlsplit(uri) - path = _unquote_path(parts.path, charset, errors) - query = _unquote_query(parts.query, charset, errors) - fragment = _unquote_fragment(parts.fragment, charset, errors) + path = _unquote_path(parts.path) + query = _unquote_query(parts.query) + fragment = _unquote_fragment(parts.fragment) if parts.hostname: netloc = _decode_idna(parts.hostname) @@ -920,22 +98,18 @@ def uri_to_iri( netloc = f"{netloc}:{parts.port}" if parts.username: - auth = _unquote_user(parts.username, charset, errors) + auth = _unquote_user(parts.username) if parts.password: - auth = f"{auth}:{_unquote_user(parts.password, charset, errors)}" + password = _unquote_user(parts.password) + auth = f"{auth}:{password}" netloc = f"{auth}@{netloc}" return urlunsplit((parts.scheme, netloc, path, query, fragment)) -def iri_to_uri( - iri: str | tuple[str, str, str, str, str], - charset: str | None = None, - errors: str | None = None, - safe_conversion: bool | None = None, -) -> str: +def iri_to_uri(iri: str) -> str: """Convert an IRI to a URI. All non-ASCII and unsafe characters are quoted. If the URL has a domain, it is encoded to Punycode. @@ -943,20 +117,14 @@ def iri_to_uri( 'http://xn--n3h.net/p%C3%A5th?q=%C3%A8ry%DF' :param iri: The IRI to convert. - :param charset: The encoding of the IRI. - :param errors: Error handler to use during ``bytes.encode``. - .. versionchanged:: 2.3 - Passing a tuple or bytes, and the ``charset`` and ``errors`` parameters, are - deprecated and will be removed in Werkzeug 3.0. + .. versionchanged:: 3.0 + Passing a tuple or bytes, the ``charset`` and ``errors`` parameters, + and the ``safe_conversion`` parameter, are removed. .. versionchanged:: 2.3 Which characters remain unquoted is specific to each part of the URL. - .. versionchanged:: 2.3 - The ``safe_conversion`` parameter is deprecated and will be removed in Werkzeug - 2.4. - .. versionchanged:: 0.15 All reserved characters remain unquoted. Previously, only some reserved characters were left unquoted. @@ -966,69 +134,12 @@ def iri_to_uri( .. versionadded:: 0.6 """ - if charset is not None: - warnings.warn( - "The 'charset' parameter is deprecated and will be removed" - " in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - else: - charset = "utf-8" - - if isinstance(iri, tuple): - warnings.warn( - "Passing a tuple is deprecated and will not be supported in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - iri = urlunsplit(iri) - - if isinstance(iri, bytes): - warnings.warn( - "Passing bytes is deprecated and will not be supported in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - iri = iri.decode(charset) - - if errors is not None: - warnings.warn( - "The 'errors' parameter is deprecated and will be removed in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - else: - errors = "strict" - - if safe_conversion is not None: - warnings.warn( - "The 'safe_conversion' parameter is deprecated and will be removed in" - " Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - - if safe_conversion: - # If we're not sure if it's safe to normalize the URL, and it only contains - # ASCII characters, return it as-is. - try: - ascii_iri = iri.encode("ascii") - - # Only return if it doesn't have whitespace. (Why?) - if len(ascii_iri.split()) == 1: - return iri - except UnicodeError: - pass - parts = urlsplit(iri) # safe = https://url.spec.whatwg.org/#url-path-segment-string # as well as percent for things that are already quoted - path = quote(parts.path, safe="%!$&'()*+,/:;=@", encoding=charset, errors=errors) - query = quote(parts.query, safe="%!$&'()*+,/:;=?@", encoding=charset, errors=errors) - fragment = quote( - parts.fragment, safe="%!#$&'()*+,/:;=?@", encoding=charset, errors=errors - ) + path = quote(parts.path, safe="%!$&'()*+,/:;=@") + query = quote(parts.query, safe="%!$&'()*+,/:;=?@") + fragment = quote(parts.fragment, safe="%!#$&'()*+,/:;=?@") if parts.hostname: netloc = parts.hostname.encode("idna").decode("ascii") @@ -1045,8 +156,8 @@ def iri_to_uri( auth = quote(parts.username, safe="%!$&'()*+,;=") if parts.password: - pass_quoted = quote(parts.password, safe="%!$&'()*+,;=") - auth = f"{auth}:{pass_quoted}" + password = quote(parts.password, safe="%!$&'()*+,;=") + auth = f"{auth}:{password}" netloc = f"{auth}@{netloc}" @@ -1074,299 +185,29 @@ def _invalid_iri_to_uri(iri: str) -> str: return iri_to_uri(iri) -def url_decode( - s: t.AnyStr, - charset: str = "utf-8", - include_empty: bool = True, - errors: str = "replace", - separator: str = "&", - cls: type[ds.MultiDict] | None = None, -) -> ds.MultiDict[str, str]: - """Parse a query string and return it as a :class:`MultiDict`. - - :param s: The query string to parse. - :param charset: Decode bytes to string with this charset. If not - given, bytes are returned as-is. - :param include_empty: Include keys with empty values in the dict. - :param errors: Error handling behavior when decoding bytes. - :param separator: Separator character between pairs. - :param cls: Container to hold result instead of :class:`MultiDict`. - - .. deprecated:: 2.3 - Will be removed in Werkzeug 3.0. Use ``urllib.parse.parse_qs`` instead. - - .. versionchanged:: 2.1 - The ``decode_keys`` parameter was removed. - - .. versionchanged:: 0.5 - In previous versions ";" and "&" could be used for url decoding. - Now only "&" is supported. If you want to use ";", a different - ``separator`` can be provided. - - .. versionchanged:: 0.5 - The ``cls`` parameter was added. - """ - warnings.warn( - "'werkzeug.urls.url_decode' is deprecated and will be removed in Werkzeug 2.4." - " Use 'urllib.parse.parse_qs' instead.", - DeprecationWarning, - stacklevel=2, - ) - - if cls is None: - from .datastructures import MultiDict # noqa: F811 - - cls = MultiDict - if isinstance(s, str) and not isinstance(separator, str): - separator = separator.decode(charset or "ascii") - elif isinstance(s, bytes) and not isinstance(separator, bytes): - separator = separator.encode(charset or "ascii") # type: ignore - return cls( - _url_decode_impl( - s.split(separator), charset, include_empty, errors # type: ignore - ) - ) - - -def url_decode_stream( - stream: t.IO[bytes], - charset: str = "utf-8", - include_empty: bool = True, - errors: str = "replace", - separator: bytes = b"&", - cls: type[ds.MultiDict] | None = None, - limit: int | None = None, -) -> ds.MultiDict[str, str]: - """Works like :func:`url_decode` but decodes a stream. The behavior - of stream and limit follows functions like - :func:`~werkzeug.wsgi.make_line_iter`. The generator of pairs is - directly fed to the `cls` so you can consume the data while it's - parsed. - - :param stream: a stream with the encoded querystring - :param charset: the charset of the query string. If set to `None` - no decoding will take place. - :param include_empty: Set to `False` if you don't want empty values to - appear in the dict. - :param errors: the decoding error behavior. - :param separator: the pair separator to be used, defaults to ``&`` - :param cls: an optional dict class to use. If this is not specified - or `None` the default :class:`MultiDict` is used. - :param limit: the content length of the URL data. Not necessary if - a limited stream is provided. - - .. deprecated:: 2.3 - Will be removed in Werkzeug 2.4. Use ``urllib.parse.parse_qs`` instead. - - .. versionchanged:: 2.1 - The ``decode_keys`` and ``return_iterator`` parameters were removed. - - .. versionadded:: 0.8 - """ - warnings.warn( - "'werkzeug.urls.url_decode_stream' is deprecated and will be removed in" - " Werkzeug 2.4. Use 'urllib.parse.parse_qs' instead.", - DeprecationWarning, - stacklevel=2, - ) - - from .wsgi import make_chunk_iter - - pair_iter = make_chunk_iter(stream, separator, limit) - decoder = _url_decode_impl(pair_iter, charset, include_empty, errors) - - if cls is None: - from .datastructures import MultiDict # noqa: F811 - - cls = MultiDict - - return cls(decoder) - - -def _url_decode_impl( - pair_iter: t.Iterable[t.AnyStr], charset: str, include_empty: bool, errors: str -) -> t.Iterator[tuple[str, str]]: - for pair in pair_iter: - if not pair: - continue - s = _make_encode_wrapper(pair) - equal = s("=") - if equal in pair: - key, value = pair.split(equal, 1) - else: - if not include_empty: - continue - key = pair - value = s("") - yield ( - url_unquote_plus(key, charset, errors), - url_unquote_plus(value, charset, errors), - ) - - -def url_encode( - obj: t.Mapping[str, str] | t.Iterable[tuple[str, str]], - charset: str = "utf-8", - sort: bool = False, - key: t.Callable[[tuple[str, str]], t.Any] | None = None, - separator: str = "&", -) -> str: - """URL encode a dict/`MultiDict`. If a value is `None` it will not appear - in the result string. Per default only values are encoded into the target - charset strings. - - :param obj: the object to encode into a query string. - :param charset: the charset of the query string. - :param sort: set to `True` if you want parameters to be sorted by `key`. - :param separator: the separator to be used for the pairs. - :param key: an optional function to be used for sorting. For more details - check out the :func:`sorted` documentation. - - .. deprecated:: 2.3 - Will be removed in Werkzeug 2.4. Use ``urllib.parse.urlencode`` instead. - - .. versionchanged:: 2.1 - The ``encode_keys`` parameter was removed. - - .. versionchanged:: 0.5 - Added the ``sort``, ``key``, and ``separator`` parameters. - """ - warnings.warn( - "'werkzeug.urls.url_encode' is deprecated and will be removed in Werkzeug 2.4." - " Use 'urllib.parse.urlencode' instead.", - DeprecationWarning, - stacklevel=2, - ) - separator = _to_str(separator, "ascii") - return separator.join(_url_encode_impl(obj, charset, sort, key)) - - -def url_encode_stream( - obj: t.Mapping[str, str] | t.Iterable[tuple[str, str]], - stream: t.IO[str] | None = None, - charset: str = "utf-8", - sort: bool = False, - key: t.Callable[[tuple[str, str]], t.Any] | None = None, - separator: str = "&", -) -> None: - """Like :meth:`url_encode` but writes the results to a stream - object. If the stream is `None` a generator over all encoded - pairs is returned. - - :param obj: the object to encode into a query string. - :param stream: a stream to write the encoded object into or `None` if - an iterator over the encoded pairs should be returned. In - that case the separator argument is ignored. - :param charset: the charset of the query string. - :param sort: set to `True` if you want parameters to be sorted by `key`. - :param separator: the separator to be used for the pairs. - :param key: an optional function to be used for sorting. For more details - check out the :func:`sorted` documentation. - - .. deprecated:: 2.3 - Will be removed in Werkzeug 2.4. Use ``urllib.parse.urlencode`` instead. - - .. versionchanged:: 2.1 - The ``encode_keys`` parameter was removed. - - .. versionadded:: 0.8 - """ - warnings.warn( - "'werkzeug.urls.url_encode_stream' is deprecated and will be removed in" - " Werkzeug 2.4. Use 'urllib.parse.urlencode' instead.", - DeprecationWarning, - stacklevel=2, - ) - separator = _to_str(separator, "ascii") - gen = _url_encode_impl(obj, charset, sort, key) - if stream is None: - return gen # type: ignore - for idx, chunk in enumerate(gen): - if idx: - stream.write(separator) - stream.write(chunk) - return None - - -def url_join( - base: str | tuple[str, str, str, str, str], - url: str | tuple[str, str, str, str, str], - allow_fragments: bool = True, -) -> str: - """Join a base URL and a possibly relative URL to form an absolute - interpretation of the latter. - - :param base: the base URL for the join operation. - :param url: the URL to join. - :param allow_fragments: indicates whether fragments should be allowed. - - .. deprecated:: 2.3 - Will be removed in Werkzeug 2.4. Use ``urllib.parse.urljoin`` instead. - """ - warnings.warn( - "'werkzeug.urls.url_join' is deprecated and will be removed in Werkzeug 2.4." - " Use 'urllib.parse.urljoin' instead.", - DeprecationWarning, - stacklevel=2, - ) - - if isinstance(base, tuple): - base = url_unparse(base) - if isinstance(url, tuple): - url = url_unparse(url) - - _check_str_tuple((base, url)) - s = _make_encode_wrapper(base) - - if not base: - return url - if not url: - return base - - bscheme, bnetloc, bpath, bquery, bfragment = url_parse( - base, allow_fragments=allow_fragments - ) - scheme, netloc, path, query, fragment = url_parse(url, bscheme, allow_fragments) - if scheme != bscheme: - return url - if netloc: - return url_unparse((scheme, netloc, path, query, fragment)) - netloc = bnetloc - - if path[:1] == s("/"): - segments = path.split(s("/")) - elif not path: - segments = bpath.split(s("/")) - if not query: - query = bquery - else: - segments = bpath.split(s("/"))[:-1] + path.split(s("/")) +def _decode_idna(domain: str) -> str: + try: + data = domain.encode("ascii") + except UnicodeEncodeError: + # If the domain is not ASCII, it's decoded already. + return domain - # If the rightmost part is "./" we want to keep the slash but - # remove the dot. - if segments[-1] == s("."): - segments[-1] = s("") + try: + # Try decoding in one shot. + return data.decode("idna") + except UnicodeDecodeError: + pass - # Resolve ".." and "." - segments = [segment for segment in segments if segment != s(".")] - while True: - i = 1 - n = len(segments) - 1 - while i < n: - if segments[i] == s("..") and segments[i - 1] not in (s(""), s("..")): - del segments[i - 1 : i + 1] - break - i += 1 - else: - break + # Decode each part separately, leaving invalid parts as punycode. + parts = [] - # Remove trailing ".." if the URL is absolute - unwanted_marker = [s(""), s("..")] - while segments[:2] == unwanted_marker: - del segments[1] + for part in data.split(b"."): + try: + parts.append(part.decode("idna")) + except UnicodeDecodeError: + parts.append(part.decode("ascii")) - path = s("/").join(segments) - return url_unparse((scheme, netloc, path, query, fragment)) + return ".".join(parts) def _urlencode( diff --git a/tests/test_urls.py b/tests/test_urls.py index 0b0f2aeed..fdaa913a6 100644 --- a/tests/test_urls.py +++ b/tests/test_urls.py @@ -1,239 +1,20 @@ -import io -import warnings - import pytest from werkzeug import urls -from werkzeug.datastructures import OrderedMultiDict - -pytestmark = [ - pytest.mark.filterwarnings("ignore:'werkzeug:DeprecationWarning"), - pytest.mark.filterwarnings("ignore:'_?make_chunk_iter':DeprecationWarning"), -] - - -def test_parsing(): - url = urls.url_parse("http://anon:hunter2@[2001:db8:0:1]:80/a/b/c") - assert url.netloc == "anon:hunter2@[2001:db8:0:1]:80" - assert url.username == "anon" - assert url.password == "hunter2" - assert url.port == 80 - assert url.ascii_host == "2001:db8:0:1" - - assert url.get_file_location() == (None, None) # no file scheme - - -@pytest.mark.parametrize("implicit_format", (True, False)) -@pytest.mark.parametrize("localhost", ("127.0.0.1", "::1", "localhost")) -def test_fileurl_parsing_windows(implicit_format, localhost, monkeypatch): - if implicit_format: - pathformat = None - monkeypatch.setattr("os.name", "nt") - else: - pathformat = "windows" - monkeypatch.delattr("os.name") # just to make sure it won't get used - - url = urls.url_parse("file:///C:/Documents and Settings/Foobar/stuff.txt") - assert url.netloc == "" - assert url.scheme == "file" - assert url.get_file_location(pathformat) == ( - None, - r"C:\Documents and Settings\Foobar\stuff.txt", - ) - - url = urls.url_parse("file://///server.tld/file.txt") - assert url.get_file_location(pathformat) == ("server.tld", r"file.txt") - - url = urls.url_parse("file://///server.tld") - assert url.get_file_location(pathformat) == ("server.tld", "") - - url = urls.url_parse(f"file://///{localhost}") - assert url.get_file_location(pathformat) == (None, "") - - url = urls.url_parse(f"file://///{localhost}/file.txt") - assert url.get_file_location(pathformat) == (None, r"file.txt") - - -def test_replace(): - url = urls.url_parse("http://de.wikipedia.org/wiki/Troll") - assert url.replace(query="foo=bar") == urls.url_parse( - "http://de.wikipedia.org/wiki/Troll?foo=bar" - ) - assert url.replace(scheme="https") == urls.url_parse( - "https://de.wikipedia.org/wiki/Troll" - ) - - -def test_quoting(): - assert urls.url_quote("\xf6\xe4\xfc") == "%C3%B6%C3%A4%C3%BC" - assert urls.url_unquote(urls.url_quote('#%="\xf6')) == '#%="\xf6' - assert urls.url_quote_plus("foo bar") == "foo+bar" - assert urls.url_unquote_plus("foo+bar") == "foo bar" - assert urls.url_quote_plus("foo+bar") == "foo%2Bbar" - assert urls.url_unquote_plus("foo%2Bbar") == "foo+bar" - assert urls.url_encode({b"a": None, b"b": b"foo bar"}) == "b=foo+bar" - assert urls.url_encode({"a": None, "b": "foo bar"}) == "b=foo+bar" - assert ( - urls.url_fix("http://de.wikipedia.org/wiki/Elf (Begriffsklärung)") - == "http://de.wikipedia.org/wiki/Elf%20(Begriffskl%C3%A4rung)" - ) - assert urls.url_quote_plus(42) == "42" - assert urls.url_quote(b"\xff") == "%FF" - - -def test_bytes_unquoting(): - assert ( - urls.url_unquote(urls.url_quote('#%="\xf6', charset="latin1"), charset=None) - == b'#%="\xf6' - ) - - -def test_url_decoding(): - x = urls.url_decode(b"foo=42&bar=23&uni=H%C3%A4nsel") - assert x["foo"] == "42" - assert x["bar"] == "23" - assert x["uni"] == "Hänsel" - - x = urls.url_decode(b"foo=42;bar=23;uni=H%C3%A4nsel", separator=b";") - assert x["foo"] == "42" - assert x["bar"] == "23" - assert x["uni"] == "Hänsel" - - x = urls.url_decode(b"%C3%9Ch=H%C3%A4nsel") - assert x["Üh"] == "Hänsel" - - -def test_url_bytes_decoding(): - x = urls.url_decode(b"foo=42&bar=23&uni=H%C3%A4nsel", charset=None) - assert x[b"foo"] == b"42" - assert x[b"bar"] == b"23" - assert x[b"uni"] == "Hänsel".encode() - - -def test_stream_decoding_string_fails(): - pytest.raises(TypeError, urls.url_decode_stream, "testing") - - -def test_url_encoding(): - assert urls.url_encode({"foo": "bar 45"}) == "foo=bar+45" - d = {"foo": 1, "bar": 23, "blah": "Hänsel"} - assert urls.url_encode(d, sort=True) == "bar=23&blah=H%C3%A4nsel&foo=1" - assert ( - urls.url_encode(d, sort=True, separator=";") == "bar=23;blah=H%C3%A4nsel;foo=1" - ) - - -def test_sorted_url_encode(): - assert ( - urls.url_encode( - {"a": 42, "b": 23, 1: 1, 2: 2}, sort=True, key=lambda i: str(i[0]) - ) - == "1=1&2=2&a=42&b=23" - ) - assert ( - urls.url_encode( - {"A": 1, "a": 2, "B": 3, "b": 4}, - sort=True, - key=lambda x: x[0].lower() + x[0], - ) - == "A=1&a=2&B=3&b=4" - ) - - -def test_streamed_url_encoding(): - out = io.StringIO() - urls.url_encode_stream({"foo": "bar 45"}, out) - assert out.getvalue() == "foo=bar+45" - - d = {"foo": 1, "bar": 23, "blah": "Hänsel"} - out = io.StringIO() - urls.url_encode_stream(d, out, sort=True) - assert out.getvalue() == "bar=23&blah=H%C3%A4nsel&foo=1" - out = io.StringIO() - urls.url_encode_stream(d, out, sort=True, separator=";") - assert out.getvalue() == "bar=23;blah=H%C3%A4nsel;foo=1" - - gen = urls.url_encode_stream(d, sort=True) - assert next(gen) == "bar=23" - assert next(gen) == "blah=H%C3%A4nsel" - assert next(gen) == "foo=1" - pytest.raises(StopIteration, lambda: next(gen)) - - -def test_url_fixing(): - x = urls.url_fix("http://de.wikipedia.org/wiki/Elf (Begriffskl\xe4rung)") - assert x == "http://de.wikipedia.org/wiki/Elf%20(Begriffskl%C3%A4rung)" - - x = urls.url_fix("http://just.a.test/$-_.+!*'(),") - assert x == "http://just.a.test/$-_.+!*'()," - - x = urls.url_fix("http://höhöhö.at/höhöhö/hähähä") - assert x == r"http://xn--hhh-snabb.at/h%C3%B6h%C3%B6h%C3%B6/h%C3%A4h%C3%A4h%C3%A4" - - -def test_url_fixing_filepaths(): - x = urls.url_fix(r"file://C:\Users\Administrator\My Documents\ÑÈáÇíí") - assert x == ( - r"file:///C%3A/Users/Administrator/My%20Documents/" - r"%C3%91%C3%88%C3%A1%C3%87%C3%AD%C3%AD" - ) - - a = urls.url_fix(r"file:/C:/") - b = urls.url_fix(r"file://C:/") - c = urls.url_fix(r"file:///C:/") - assert a == b == c == r"file:///C%3A/" - - x = urls.url_fix(r"file://host/sub/path") - assert x == r"file://host/sub/path" - - x = urls.url_fix(r"file:///") - assert x == r"file:///" - - -def test_url_fixing_qs(): - x = urls.url_fix(b"http://example.com/?foo=%2f%2f") - assert x == "http://example.com/?foo=%2f%2f" - - x = urls.url_fix( - "http://acronyms.thefreedictionary.com/" - "Algebraic+Methods+of+Solving+the+Schr%C3%B6dinger+Equation" - ) - assert x == ( - "http://acronyms.thefreedictionary.com/" - "Algebraic+Methods+of+Solving+the+Schr%C3%B6dinger+Equation" - ) def test_iri_support(): assert urls.uri_to_iri("http://xn--n3h.net/") == "http://\u2603.net/" - - with pytest.deprecated_call(): - assert ( - urls.uri_to_iri(b"http://%C3%BCser:p%C3%A4ssword@xn--n3h.net/p%C3%A5th") - == "http://\xfcser:p\xe4ssword@\u2603.net/p\xe5th" - ) - assert urls.iri_to_uri("http://☃.net/") == "http://xn--n3h.net/" assert ( urls.iri_to_uri("http://üser:pässword@☃.net/påth") == "http://%C3%BCser:p%C3%A4ssword@xn--n3h.net/p%C3%A5th" ) - assert ( urls.uri_to_iri("http://test.com/%3Fmeh?foo=%26%2F") == "http://test.com/%3Fmeh?foo=%26/" ) - - # this should work as well, might break on 2.4 because of a broken - # idna codec - with pytest.deprecated_call(): - assert urls.uri_to_iri(b"/foo") == "/foo" - - with pytest.deprecated_call(): - assert urls.iri_to_uri(b"/foo") == "/foo" - assert urls.iri_to_uri("/foo") == "/foo" - assert ( urls.iri_to_uri("http://föö.com:8080/bam/baz") == "http://xn--f-1gaa.com:8080/bam/baz" @@ -247,83 +28,11 @@ def test_iri_safe_quoting(): assert urls.iri_to_uri(urls.uri_to_iri(uri)) == uri -def test_ordered_multidict_encoding(): - d = OrderedMultiDict() - d.add("foo", 1) - d.add("foo", 2) - d.add("foo", 3) - d.add("bar", 0) - d.add("foo", 4) - assert urls.url_encode(d) == "foo=1&foo=2&foo=3&bar=0&foo=4" - - -def test_multidict_encoding(): - d = OrderedMultiDict() - d.add("2013-10-10T23:26:05.657975+0000", "2013-10-10T23:26:05.657975+0000") - assert ( - urls.url_encode(d) - == "2013-10-10T23%3A26%3A05.657975%2B0000=2013-10-10T23%3A26%3A05.657975%2B0000" - ) - - -def test_url_unquote_plus_unicode(): - # was broken in 0.6 - assert urls.url_unquote_plus("\x6d") == "\x6d" - - def test_quoting_of_local_urls(): rv = urls.iri_to_uri("/foo\x8f") assert rv == "/foo%C2%8F" -def test_url_attributes(): - rv = urls.url_parse("http://foo%3a:bar%3a@[::1]:80/123?x=y#frag") - assert rv.scheme == "http" - assert rv.auth == "foo%3a:bar%3a" - assert rv.username == "foo:" - assert rv.password == "bar:" - assert rv.raw_username == "foo%3a" - assert rv.raw_password == "bar%3a" - assert rv.host == "::1" - assert rv.port == 80 - assert rv.path == "/123" - assert rv.query == "x=y" - assert rv.fragment == "frag" - - rv = urls.url_parse("http://\N{SNOWMAN}.com/") - assert rv.host == "\N{SNOWMAN}.com" - assert rv.ascii_host == "xn--n3h.com" - - -def test_url_attributes_bytes(): - rv = urls.url_parse(b"http://foo%3a:bar%3a@[::1]:80/123?x=y#frag") - assert rv.scheme == b"http" - assert rv.auth == b"foo%3a:bar%3a" - assert rv.username == "foo:" - assert rv.password == "bar:" - assert rv.raw_username == b"foo%3a" - assert rv.raw_password == b"bar%3a" - assert rv.host == b"::1" - assert rv.port == 80 - assert rv.path == b"/123" - assert rv.query == b"x=y" - assert rv.fragment == b"frag" - - -def test_url_joining(): - assert urls.url_join("/foo", "/bar") == "/bar" - assert urls.url_join("http://example.com/foo", "/bar") == "http://example.com/bar" - assert urls.url_join("file:///tmp/", "test.html") == "file:///tmp/test.html" - assert urls.url_join("file:///tmp/x", "test.html") == "file:///tmp/test.html" - assert urls.url_join("file:///tmp/x", "../../../x.html") == "file:///x.html" - - -def test_partial_unencoded_decode(): - ref = "foo=정상처리".encode("euc-kr") - x = urls.url_decode(ref, charset="euc-kr") - assert x["foo"] == "정상처리" - - def test_iri_to_uri_idempotence_ascii_only(): uri = "http://www.idempoten.ce" uri = urls.iri_to_uri(uri) @@ -389,12 +98,3 @@ def test_iri_to_uri_dont_quote_valid_code_points(): # [] are not valid URL code points according to WhatWG URL Standard # https://url.spec.whatwg.org/#url-code-points assert urls.iri_to_uri("/path[bracket]?(paren)") == "/path%5Bbracket%5D?(paren)" - - -def test_url_parse_does_not_clear_warnings_registry(recwarn): - warnings.simplefilter("default") - warnings.simplefilter("ignore", DeprecationWarning) - for _ in range(2): - urls.url_parse("http://example.org/") - warnings.warn("test warning") - assert len(recwarn) == 1 From bafffa9fb597156567c1ca80042bfbc842a42284 Mon Sep 17 00:00:00 2001 From: David Lord Date: Mon, 14 Aug 2023 09:10:03 -0700 Subject: [PATCH 017/159] remove old auth behavior --- CHANGES.rst | 2 +- docs/http.rst | 4 - src/werkzeug/datastructures/auth.py | 194 +--------------------------- src/werkzeug/http.py | 59 --------- 4 files changed, 2 insertions(+), 257 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index c2e67e1dc..1666274a1 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1033,7 +1033,7 @@ Released 2019-03-19 (:pr:`1358`) - :func:`http.parse_cookie` ignores empty segments rather than producing a cookie with no key or value. (:issue:`1245`, :pr:`1301`) -- :func:`~http.parse_authorization_header` (and +- ``http.parse_authorization_header`` (and :class:`~datastructures.Authorization`, :attr:`~wrappers.Request.authorization`) treats the authorization header as UTF-8. On Python 2, basic auth username and password are diff --git a/docs/http.rst b/docs/http.rst index cbf4e04ed..790de3172 100644 --- a/docs/http.rst +++ b/docs/http.rst @@ -53,10 +53,6 @@ by :rfc:`2616`, Werkzeug implements some custom data structures that are .. autofunction:: parse_cache_control_header -.. autofunction:: parse_authorization_header - -.. autofunction:: parse_www_authenticate_header - .. autofunction:: parse_if_range_header .. autofunction:: parse_range_header diff --git a/src/werkzeug/datastructures/auth.py b/src/werkzeug/datastructures/auth.py index 2f2515020..81e588318 100644 --- a/src/werkzeug/datastructures/auth.py +++ b/src/werkzeug/datastructures/auth.py @@ -3,15 +3,11 @@ import base64 import binascii import typing as t -import warnings -from functools import wraps from ..http import dump_header from ..http import parse_dict_header -from ..http import parse_set_header from ..http import quote_header_value from .structures import CallbackDict -from .structures import HeaderSet if t.TYPE_CHECKING: import typing_extensions as te @@ -143,31 +139,6 @@ def __repr__(self) -> str: return f"<{type(self).__name__} {self.to_header()}>" -def auth_property(name: str, doc: str | None = None) -> property: - """A static helper function for Authentication subclasses to add - extra authentication system properties onto a class:: - - class FooAuthenticate(WWWAuthenticate): - special_realm = auth_property('special_realm') - - .. deprecated:: 2.3 - Will be removed in Werkzeug 3.0. - """ - warnings.warn( - "'auth_property' is deprecated and will be removed in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - - def _set_value(self, value): # type: ignore[no-untyped-def] - if value is None: - self.pop(name, None) - else: - self[name] = str(value) - - return property(lambda x: x.get(name), _set_value, doc=doc) - - class WWWAuthenticate: """Represents the parts of a ``WWW-Authenticate`` response header. @@ -196,19 +167,10 @@ class WWWAuthenticate: def __init__( self, - auth_type: str | None = None, + auth_type: str, values: dict[str, str] | None = None, token: str | None = None, ): - if auth_type is None: - warnings.warn( - "An auth type must be given as the first parameter. Assuming 'basic' is" - " deprecated and will be removed in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - auth_type = "basic" - self._type = auth_type.lower() self._parameters: dict[str, str] = CallbackDict( # type: ignore[misc] values, lambda _: self._trigger_on_update() @@ -261,62 +223,6 @@ def token(self, value: str | None) -> None: self._token = value self._trigger_on_update() - def set_basic(self, realm: str = "authentication required") -> None: - """Clear any existing data and set a ``Basic`` challenge. - - .. deprecated:: 2.3 - Will be removed in Werkzeug 3.0. Create and assign an instance instead. - """ - warnings.warn( - "The 'set_basic' method is deprecated and will be removed in Werkzeug 3.0." - " Create and assign an instance instead." - ) - self._type = "basic" - dict.clear(self.parameters) # type: ignore[arg-type] - dict.update( - self.parameters, # type: ignore[arg-type] - {"realm": realm}, # type: ignore[dict-item] - ) - self._token = None - self._trigger_on_update() - - def set_digest( - self, - realm: str, - nonce: str, - qop: t.Sequence[str] = ("auth",), - opaque: str | None = None, - algorithm: str | None = None, - stale: bool = False, - ) -> None: - """Clear any existing data and set a ``Digest`` challenge. - - .. deprecated:: 2.3 - Will be removed in Werkzeug 3.0. Create and assign an instance instead. - """ - warnings.warn( - "The 'set_digest' method is deprecated and will be removed in Werkzeug 3.0." - " Create and assign an instance instead." - ) - self._type = "digest" - dict.clear(self.parameters) # type: ignore[arg-type] - parameters = { - "realm": realm, - "nonce": nonce, - "qop": ", ".join(qop), - "stale": "TRUE" if stale else "FALSE", - } - - if opaque is not None: - parameters["opaque"] = opaque - - if algorithm is not None: - parameters["algorithm"] = algorithm - - dict.update(self.parameters, parameters) # type: ignore[arg-type] - self._token = None - self._trigger_on_update() - def __getitem__(self, key: str) -> str | None: return self.parameters.get(key) @@ -410,101 +316,3 @@ def __str__(self) -> str: def __repr__(self) -> str: return f"<{type(self).__name__} {self.to_header()}>" - - @property - def qop(self) -> set[str]: - """The ``qop`` parameter as a set. - - .. deprecated:: 2.3 - Will be removed in Werkzeug 3.0. It will become the same as other - parameters, returning a string. - """ - warnings.warn( - "The 'qop' property is deprecated and will be removed in Werkzeug 3.0." - " It will become the same as other parameters, returning a string.", - DeprecationWarning, - stacklevel=2, - ) - - def on_update(value: HeaderSet) -> None: - if not value: - if "qop" in self: - del self["qop"] - - return - - self.parameters["qop"] = value.to_header() - - return parse_set_header(self.parameters.get("qop"), on_update) - - @property - def stale(self) -> bool | None: - """The ``stale`` parameter as a boolean. - - .. deprecated:: 2.3 - Will be removed in Werkzeug 3.0. It will become the same as other - parameters, returning a string. - """ - warnings.warn( - "The 'stale' property is deprecated and will be removed in Werkzeug 3.0." - " It will become the same as other parameters, returning a string.", - DeprecationWarning, - stacklevel=2, - ) - - if "stale" in self.parameters: - return self.parameters["stale"].lower() == "true" - - return None - - @stale.setter - def stale(self, value: bool | str | None) -> None: - if value is None: - if "stale" in self.parameters: - del self.parameters["stale"] - - return - - if isinstance(value, bool): - warnings.warn( - "Setting the 'stale' property to a boolean is deprecated and will be" - " removed in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - self.parameters["stale"] = "TRUE" if value else "FALSE" - else: - self.parameters["stale"] = value - - auth_property = staticmethod(auth_property) - - -def _deprecated_dict_method(f): # type: ignore[no-untyped-def] - @wraps(f) - def wrapper(*args, **kwargs): # type: ignore[no-untyped-def] - warnings.warn( - "Treating 'Authorization' and 'WWWAuthenticate' as a dict is deprecated and" - " will be removed in Werkzeug 3.0. Use the 'parameters' attribute instead.", - DeprecationWarning, - stacklevel=2, - ) - return f(*args, **kwargs) - - return wrapper - - -for name in ( - "__iter__", - "clear", - "copy", - "items", - "keys", - "pop", - "popitem", - "setdefault", - "update", - "values", -): - f = _deprecated_dict_method(getattr(dict, name)) - setattr(Authorization, name, f) - setattr(WWWAuthenticate, name, f) diff --git a/src/werkzeug/http.py b/src/werkzeug/http.py index 07d1fd496..14ae65ba6 100644 --- a/src/werkzeug/http.py +++ b/src/werkzeug/http.py @@ -815,65 +815,6 @@ def parse_set_header( return ds.HeaderSet(parse_list_header(value), on_update) -def parse_authorization_header( - value: str | None, -) -> ds.Authorization | None: - """Parse an HTTP basic/digest authorization header transmitted by the web - browser. The return value is either `None` if the header was invalid or - not given, otherwise an :class:`~werkzeug.datastructures.Authorization` - object. - - :param value: the authorization header to parse. - :return: a :class:`~werkzeug.datastructures.Authorization` object or `None`. - - .. deprecated:: 2.3 - Will be removed in Werkzeug 3.0. Use :meth:`.Authorization.from_header` instead. - """ - from .datastructures import Authorization - - warnings.warn( - "'parse_authorization_header' is deprecated and will be removed in Werkzeug" - " 2.4. Use 'Authorization.from_header' instead.", - DeprecationWarning, - stacklevel=2, - ) - return Authorization.from_header(value) - - -def parse_www_authenticate_header( - value: str | None, - on_update: t.Callable[[ds.WWWAuthenticate], None] | None = None, -) -> ds.WWWAuthenticate: - """Parse an HTTP WWW-Authenticate header into a - :class:`~werkzeug.datastructures.WWWAuthenticate` object. - - :param value: a WWW-Authenticate header to parse. - :param on_update: an optional callable that is called every time a value - on the :class:`~werkzeug.datastructures.WWWAuthenticate` - object is changed. - :return: a :class:`~werkzeug.datastructures.WWWAuthenticate` object. - - .. deprecated:: 2.3 - Will be removed in Werkzeug 3.0. Use :meth:`.WWWAuthenticate.from_header` - instead. - """ - from .datastructures.auth import WWWAuthenticate - - warnings.warn( - "'parse_www_authenticate_header' is deprecated and will be removed in Werkzeug" - " 2.4. Use 'WWWAuthenticate.from_header' instead.", - DeprecationWarning, - stacklevel=2, - ) - rv = WWWAuthenticate.from_header(value) - - if rv is None: - rv = WWWAuthenticate("basic") - - rv._on_update = on_update - return rv - - def parse_if_range_header(value: str | None) -> ds.IfRange: """Parses an if-range header which can be an etag or a date. Returns a :class:`~werkzeug.datastructures.IfRange` object. From 4c769c91e388a1d78b4d75931ba25e5771406da9 Mon Sep 17 00:00:00 2001 From: David Lord Date: Mon, 14 Aug 2023 09:19:41 -0700 Subject: [PATCH 018/159] remove old http parsing parameters --- src/werkzeug/http.py | 85 ++++++++++---------------------------------- 1 file changed, 18 insertions(+), 67 deletions(-) diff --git a/src/werkzeug/http.py b/src/werkzeug/http.py index 14ae65ba6..1b89e191b 100644 --- a/src/werkzeug/http.py +++ b/src/werkzeug/http.py @@ -136,11 +136,7 @@ class COOP(Enum): SAME_ORIGIN = "same-origin" -def quote_header_value( - value: t.Any, - extra_chars: str | None = None, - allow_token: bool = True, -) -> str: +def quote_header_value(value: t.Any, allow_token: bool = True) -> str: """Add double quotes around a header value. If the header contains only ASCII token characters, it will be returned unchanged. If the header contains ``"`` or ``\\`` characters, they will be escaped with an additional ``\\`` character. @@ -150,15 +146,15 @@ def quote_header_value( :param value: The value to quote. Will be converted to a string. :param allow_token: Disable to quote the value even if it only has token characters. + .. versionchanged:: 3.0 + The ``extra_chars`` parameter is removed. + .. versionchanged:: 2.3 The value is quoted if it is the empty string. .. versionchanged:: 2.3 Passing bytes is deprecated and will not be supported in Werkzeug 3.0. - .. versionchanged:: 2.3 - The ``extra_chars`` parameter is deprecated and will be removed in Werkzeug 3.0. - .. versionadded:: 0.5 """ if isinstance(value, bytes): @@ -169,14 +165,6 @@ def quote_header_value( ) value = value.decode("latin1") - if extra_chars is not None: - warnings.warn( - "The 'extra_chars' parameter is deprecated and will be" - " removed in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - value = str(value) if not value: @@ -185,9 +173,6 @@ def quote_header_value( if allow_token: token_chars = _token_chars - if extra_chars: - token_chars |= set(extra_chars) - if token_chars.issuperset(value): return value @@ -195,7 +180,7 @@ def quote_header_value( return f'"{value}"' -def unquote_header_value(value: str, is_filename: bool | None = None) -> str: +def unquote_header_value(value: str) -> str: """Remove double quotes and decode slash-escaped ``"`` and ``\\`` characters in a header value. @@ -203,22 +188,12 @@ def unquote_header_value(value: str, is_filename: bool | None = None) -> str: :param value: The header value to unquote. - .. versionchanged:: 2.3 - The ``is_filename`` parameter is deprecated and will be removed in Werkzeug 3.0. + .. versionchanged:: 3.0 + The ``is_filename`` parameter is removed. """ - if is_filename is not None: - warnings.warn( - "The 'is_filename' parameter is deprecated and will be" - " removed in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - if len(value) >= 2 and value[0] == value[-1] == '"': value = value[1:-1] - - if not is_filename: - return value.replace("\\\\", "\\").replace('\\"', '"') + return value.replace("\\\\", "\\").replace('\\"', '"') return value @@ -269,10 +244,7 @@ def dump_options_header(header: str | None, options: t.Mapping[str, t.Any]) -> s return "; ".join(segments) -def dump_header( - iterable: dict[str, t.Any] | t.Iterable[t.Any], - allow_token: bool | None = None, -) -> str: +def dump_header(iterable: dict[str, t.Any] | t.Iterable[t.Any]) -> str: """Produce a header value from a list of items or ``key=value`` pairs, separated by commas ``,``. @@ -298,22 +270,12 @@ def dump_header( :param iterable: The items to create a header from. - .. versionchanged:: 2.3 - The ``allow_token`` parameter is deprecated and will be removed in Werkzeug 3.0. + .. versionchanged:: 3.0 + The ``allow_token`` parameter is removed. .. versionchanged:: 2.2.3 If a key ends with ``*``, its value will not be quoted. """ - if allow_token is not None: - warnings.warn( - "'The 'allow_token' parameter is deprecated and will be" - " removed in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - else: - allow_token = True - if isinstance(iterable, dict): items = [] @@ -323,11 +285,9 @@ def dump_header( elif key[-1] == "*": items.append(f"{key}={value}") else: - items.append( - f"{key}={quote_header_value(value, allow_token=allow_token)}" - ) + items.append(f"{key}={quote_header_value(value)}") else: - items = [quote_header_value(x, allow_token=allow_token) for x in iterable] + items = [quote_header_value(x) for x in iterable] return ", ".join(items) @@ -372,7 +332,7 @@ def parse_list_header(value: str) -> list[str]: return result -def parse_dict_header(value: str, cls: type[dict] | None = None) -> dict[str, str]: +def parse_dict_header(value: str) -> dict[str, str]: """Parse a list header using :func:`parse_list_header`, then parse each item as a ``key=value`` pair. @@ -391,28 +351,19 @@ def parse_dict_header(value: str, cls: type[dict] | None = None) -> dict[str, st :param value: The header value to parse. + .. versionchanged:: 3.0 + The ``cls`` argument is removed. + .. versionchanged:: 2.3 Added support for ``key*=charset''value`` encoded items. .. versionchanged:: 2.3 Passing bytes is deprecated, support will be removed in Werkzeug 3.0. - .. versionchanged:: 2.3 - The ``cls`` argument is deprecated and will be removed in Werkzeug 3.0. - .. versionchanged:: 0.9 The ``cls`` argument was added. """ - if cls is None: - cls = dict - else: - warnings.warn( - "The 'cls' parameter is deprecated and will be removed in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - - result = cls() + result = {} if isinstance(value, bytes): warnings.warn( From 0495bed470e6cbcfe6013e7389d89fb975e17bbb Mon Sep 17 00:00:00 2001 From: David Lord Date: Mon, 14 Aug 2023 09:24:27 -0700 Subject: [PATCH 019/159] remove old formparser behavior --- src/werkzeug/formparser.py | 73 +++----------------------------------- 1 file changed, 5 insertions(+), 68 deletions(-) diff --git a/src/werkzeug/formparser.py b/src/werkzeug/formparser.py index 25ef0d61b..8b88b2dc7 100644 --- a/src/werkzeug/formparser.py +++ b/src/werkzeug/formparser.py @@ -163,9 +163,8 @@ class FormDataParser: The ``charset`` and ``errors`` parameters are deprecated and will be removed in Werkzeug 3.0. - .. versionchanged:: 2.3 - The ``parse_functions`` attribute and ``get_parse_func`` methods are deprecated - and will be removed in Werkzeug 3.0. + .. versionchanged:: 3.0 + The ``parse_functions`` attribute and ``get_parse_func`` methods were removed. .. versionchanged:: 2.2.3 Added the ``max_form_parts`` parameter. @@ -223,44 +222,6 @@ def __init__( self.cls = cls self.silent = silent - def get_parse_func( - self, mimetype: str, options: dict[str, str] - ) -> None | ( - t.Callable[ - [FormDataParser, t.IO[bytes], str, int | None, dict[str, str]], - t_parse_result, - ] - ): - warnings.warn( - "The 'get_parse_func' method is deprecated and will be" - " removed in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - - if mimetype == "multipart/form-data": - return type(self)._parse_multipart - elif mimetype == "application/x-www-form-urlencoded": - return type(self)._parse_urlencoded - elif mimetype == "application/x-url-encoded": - warnings.warn( - "The 'application/x-url-encoded' mimetype is invalid, and will not be" - " treated as 'application/x-www-form-urlencoded' in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - return type(self)._parse_urlencoded - elif mimetype in self.parse_functions: - warnings.warn( - "The 'parse_functions' attribute is deprecated and will be removed in" - " Werkzeug 3.0. Override 'parse' instead.", - DeprecationWarning, - stacklevel=2, - ) - return self.parse_functions[mimetype] - - return None - def parse_from_environ(self, environ: WSGIEnvironment) -> t_parse_result: """Parses the information from the environment as form data. @@ -294,30 +255,14 @@ def parse( the multipart boundary for instance) :return: A tuple in the form ``(stream, form, files)``. - .. versionchanged:: 2.3 - The ``application/x-url-encoded`` content type is deprecated and will not be - treated as ``application/x-www-form-urlencoded`` in Werkzeug 3.0. + .. versionchanged:: 3.0 + The invalid ``application/x-url-encoded`` content type is not + treated as ``application/x-www-form-urlencoded``. """ if mimetype == "multipart/form-data": parse_func = self._parse_multipart elif mimetype == "application/x-www-form-urlencoded": parse_func = self._parse_urlencoded - elif mimetype == "application/x-url-encoded": - warnings.warn( - "The 'application/x-url-encoded' mimetype is invalid, and will not be" - " treated as 'application/x-www-form-urlencoded' in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - parse_func = self._parse_urlencoded - elif mimetype in self.parse_functions: - warnings.warn( - "The 'parse_functions' attribute is deprecated and will be removed in" - " Werkzeug 3.0. Override 'parse' instead.", - DeprecationWarning, - stacklevel=2, - ) - parse_func = self.parse_functions[mimetype].__get__(self, type(self)) else: return stream, self.cls(), self.cls() @@ -383,14 +328,6 @@ def _parse_urlencoded( return stream, self.cls(items), self.cls() - parse_functions: dict[ - str, - t.Callable[ - [FormDataParser, t.IO[bytes], str, int | None, dict[str, str]], - t_parse_result, - ], - ] = {} - class MultiPartParser: def __init__( From 4756a636c69dc63329ee03b6b42156f391b8bbf2 Mon Sep 17 00:00:00 2001 From: David Lord Date: Mon, 14 Aug 2023 09:42:23 -0700 Subject: [PATCH 020/159] remove old password hash methods --- src/werkzeug/security.py | 16 +--------------- tests/test_security.py | 5 +++++ 2 files changed, 6 insertions(+), 15 deletions(-) diff --git a/src/werkzeug/security.py b/src/werkzeug/security.py index 282c4fd8c..3620d7c9c 100644 --- a/src/werkzeug/security.py +++ b/src/werkzeug/security.py @@ -5,7 +5,6 @@ import os import posixpath import secrets -import warnings SALT_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" DEFAULT_PBKDF2_ITERATIONS = 600000 @@ -24,14 +23,6 @@ def gen_salt(length: int) -> str: def _hash_internal(method: str, salt: str, password: str) -> tuple[str, str]: - if method == "plain": - warnings.warn( - "The 'plain' password method is deprecated and will be removed in" - " Werkzeug 3.0. Migrate to the 'scrypt' method.", - stacklevel=3, - ) - return password, method - method, *args = method.split(":") salt = salt.encode("utf-8") password = password.encode("utf-8") @@ -72,12 +63,7 @@ def _hash_internal(method: str, salt: str, password: str) -> tuple[str, str]: f"pbkdf2:{hash_name}:{iterations}", ) else: - warnings.warn( - f"The '{method}' password method is deprecated and will be removed in" - " Werkzeug 3.0. Migrate to the 'scrypt' method.", - stacklevel=3, - ) - return hmac.new(salt, password, method).hexdigest(), method + raise ValueError(f"Invalid hash method '{method}'.") def generate_password_hash( diff --git a/tests/test_security.py b/tests/test_security.py index 0ef1eb052..ae63046d5 100644 --- a/tests/test_security.py +++ b/tests/test_security.py @@ -42,6 +42,11 @@ def test_require_salt(): generate_password_hash("secret", salt_length=0) +def test_invalid_method(): + with pytest.raises(ValueError, match="Invalid hash method"): + generate_password_hash("secret", "sha256") + + def test_safe_join(): assert safe_join("foo", "bar/baz") == posixpath.join("foo", "bar/baz") assert safe_join("foo", "../bar/baz") is None From 56f820790a7ca28df972bc50c6790f99cdf5c793 Mon Sep 17 00:00:00 2001 From: David Lord Date: Mon, 14 Aug 2023 09:56:28 -0700 Subject: [PATCH 021/159] remove old cookie behavior in test client --- src/werkzeug/test.py | 75 +++++++------------------------------------- 1 file changed, 12 insertions(+), 63 deletions(-) diff --git a/src/werkzeug/test.py b/src/werkzeug/test.py index 3be9b20f3..26e9c3b13 100644 --- a/src/werkzeug/test.py +++ b/src/werkzeug/test.py @@ -855,20 +855,6 @@ def __init__( self.allow_subdomain_redirects = allow_subdomain_redirects - @property - def cookie_jar(self) -> t.Iterable[Cookie] | None: - warnings.warn( - "The 'cookie_jar' attribute is a private API and will be removed in" - " Werkzeug 3.0. Use the 'get_cookie' method instead.", - DeprecationWarning, - stacklevel=2, - ) - - if self._cookies is None: - return None - - return self._cookies.values() - def get_cookie( self, key: str, domain: str = "localhost", path: str = "/" ) -> Cookie | None: @@ -892,7 +878,7 @@ def set_cookie( self, key: str, value: str = "", - *args: t.Any, + *, domain: str = "localhost", origin_only: bool = True, path: str = "/", @@ -918,34 +904,21 @@ def set_cookie( or as a prefix. :param kwargs: Passed to :func:`.dump_cookie`. + .. versionchanged:: 3.0 + The parameter ``server_name`` is removed. The first parameter is + ``key``. Use the ``domain`` and ``origin_only`` parameters instead. + .. versionchanged:: 2.3 The ``origin_only`` parameter was added. .. versionchanged:: 2.3 The ``domain`` parameter defaults to ``localhost``. - - .. versionchanged:: 2.3 - The first parameter ``server_name`` is deprecated and will be removed in - Werkzeug 3.0. The first parameter is ``key``. Use the ``domain`` and - ``origin_only`` parameters instead. """ if self._cookies is None: raise TypeError( "Cookies are disabled. Create a client with 'use_cookies=True'." ) - if args: - warnings.warn( - "The first parameter 'server_name' is no longer used, and will be" - " removed in Werkzeug 3.0. The positional parameters are 'key' and" - " 'value'. Use the 'domain' and 'origin_only' parameters instead.", - DeprecationWarning, - stacklevel=2, - ) - domain = key - key = value - value = args[0] - cookie = Cookie._from_response_header( domain, "/", dump_cookie(key, value, domain=domain, path=path, **kwargs) ) @@ -959,10 +932,9 @@ def set_cookie( def delete_cookie( self, key: str, - *args: t.Any, + *, domain: str = "localhost", path: str = "/", - **kwargs: t.Any, ) -> None: """Delete a cookie if it exists. Cookies are uniquely identified by ``(domain, path, key)``. @@ -971,44 +943,21 @@ def delete_cookie( :param domain: The domain the cookie was set for. :param path: The path the cookie was set for. - .. versionchanged:: 2.3 - The ``domain`` parameter defaults to ``localhost``. + .. versionchanged:: 3.0 + The ``server_name`` parameter is removed. The first parameter is + ``key``. Use the ``domain`` parameter instead. - .. versionchanged:: 2.3 - The first parameter ``server_name`` is deprecated and will be removed in - Werkzeug 3.0. The first parameter is ``key``. Use the ``domain`` parameter - instead. + .. versionchanged:: 3.0 + The ``secure``, ``httponly`` and ``samesite`` parameters are removed. .. versionchanged:: 2.3 - The ``secure``, ``httponly`` and ``samesite`` parameters are deprecated and - will be removed in Werkzeug 2.4. + The ``domain`` parameter defaults to ``localhost``. """ if self._cookies is None: raise TypeError( "Cookies are disabled. Create a client with 'use_cookies=True'." ) - if args: - warnings.warn( - "The first parameter 'server_name' is no longer used, and will be" - " removed in Werkzeug 2.4. The first parameter is 'key'. Use the" - " 'domain' parameter instead.", - DeprecationWarning, - stacklevel=2, - ) - domain = key - key = args[0] - - if kwargs: - kwargs_keys = ", ".join(f"'{k}'" for k in kwargs) - plural = "parameters are" if len(kwargs) > 1 else "parameter is" - warnings.warn( - f"The {kwargs_keys} {plural} deprecated and will be" - f" removed in Werkzeug 2.4.", - DeprecationWarning, - stacklevel=2, - ) - self._cookies.pop((domain, path, key), None) def _add_cookies_to_wsgi(self, environ: WSGIEnvironment) -> None: From 1058daa00a7142fda26096854835b37da8bfca50 Mon Sep 17 00:00:00 2001 From: David Lord Date: Mon, 14 Aug 2023 09:58:54 -0700 Subject: [PATCH 022/159] remove make_chunk_iter --- CHANGES.rst | 13 ++- docs/wsgi.rst | 4 - src/werkzeug/wsgi.py | 224 ------------------------------------------- tests/test_wsgi.py | 136 -------------------------- 4 files changed, 6 insertions(+), 371 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 1666274a1..e4cb06d50 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1848,9 +1848,8 @@ Released on June 13nd 2013, codename Planierraupe. certificates easily and load them from files. - Refactored test client to invoke the open method on the class for redirects. This makes subclassing more powerful. -- :func:`werkzeug.wsgi.make_chunk_iter` and - :func:`werkzeug.wsgi.make_line_iter` now support processing of - iterators and streams. +- ``wsgi.make_chunk_iter`` and ``make_line_iter`` now support processing + of iterators and streams. - URL generation by the routing system now no longer quotes ``+``. - URL fixing now no longer quotes certain reserved characters. @@ -1948,7 +1947,7 @@ Version 0.8.3 (bugfix release, released on February 5th 2012) -- Fixed another issue with :func:`werkzeug.wsgi.make_line_iter` +- Fixed another issue with ``wsgi.make_line_iter`` where lines longer than the buffer size were not handled properly. - Restore stdout after debug console finished executing so @@ -2016,7 +2015,7 @@ Released on September 29th 2011, codename Lötkolben - Werkzeug now uses a new method to check that the length of incoming data is complete and will raise IO errors by itself if the server fails to do so. -- :func:`~werkzeug.wsgi.make_line_iter` now requires a limit that is +- ``wsgi.make_line_iter`` now requires a limit that is not higher than the length the stream can provide. - Refactored form parsing into a form parser class that makes it possible to hook into individual parts of the parsing process for debugging and @@ -2216,7 +2215,7 @@ Released on Feb 19th 2010, codename Hammer. - the form data parser will now look at the filename instead the content type to figure out if it should treat the upload as regular form data or file upload. This fixes a bug with Google Chrome. -- improved performance of `make_line_iter` and the multipart parser +- improved performance of ``make_line_iter`` and the multipart parser for binary uploads. - fixed :attr:`~werkzeug.BaseResponse.is_streamed` - fixed a path quoting bug in `EnvironBuilder` that caused PATH_INFO and @@ -2345,7 +2344,7 @@ Released on April 24th, codename Schlagbohrer. - added :mod:`werkzeug.contrib.lint` - added `passthrough_errors` to `run_simple`. - added `secure_filename` -- added :func:`make_line_iter` +- added ``make_line_iter`` - :class:`MultiDict` copies now instead of revealing internal lists to the caller for `getlist` and iteration functions that return lists. diff --git a/docs/wsgi.rst b/docs/wsgi.rst index 1992bece6..67b3bb6b8 100644 --- a/docs/wsgi.rst +++ b/docs/wsgi.rst @@ -22,10 +22,6 @@ iterator and the input stream. .. autoclass:: LimitedStream :members: -.. autofunction:: make_line_iter - -.. autofunction:: make_chunk_iter - .. autofunction:: wrap_file diff --git a/src/werkzeug/wsgi.py b/src/werkzeug/wsgi.py index 6061e1141..27c580223 100644 --- a/src/werkzeug/wsgi.py +++ b/src/werkzeug/wsgi.py @@ -1,16 +1,11 @@ from __future__ import annotations import io -import re import typing as t import warnings from functools import partial from functools import update_wrapper -from itertools import chain -from ._internal import _make_encode_wrapper -from ._internal import _to_bytes -from ._internal import _to_str from .exceptions import ClientDisconnected from .exceptions import RequestEntityTooLarge from .sansio import utils as _sansio_utils @@ -455,225 +450,6 @@ def close(self) -> None: self.iterable.close() -def _make_chunk_iter( - stream: t.Iterable[bytes] | t.IO[bytes], - limit: int | None, - buffer_size: int, -) -> t.Iterator[bytes]: - """Helper for the line and chunk iter functions.""" - warnings.warn( - "'_make_chunk_iter' is deprecated and will be removed in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - - if isinstance(stream, (bytes, bytearray, str)): - raise TypeError( - "Passed a string or byte object instead of true iterator or stream." - ) - if not hasattr(stream, "read"): - for item in stream: - if item: - yield item - return - stream = t.cast(t.IO[bytes], stream) - if not isinstance(stream, LimitedStream) and limit is not None: - stream = t.cast(t.IO[bytes], LimitedStream(stream, limit)) - _read = stream.read - while True: - item = _read(buffer_size) - if not item: - break - yield item - - -def make_line_iter( - stream: t.Iterable[bytes] | t.IO[bytes], - limit: int | None = None, - buffer_size: int = 10 * 1024, - cap_at_buffer: bool = False, -) -> t.Iterator[bytes]: - """Safely iterates line-based over an input stream. If the input stream - is not a :class:`LimitedStream` the `limit` parameter is mandatory. - - This uses the stream's :meth:`~file.read` method internally as opposite - to the :meth:`~file.readline` method that is unsafe and can only be used - in violation of the WSGI specification. The same problem applies to the - `__iter__` function of the input stream which calls :meth:`~file.readline` - without arguments. - - If you need line-by-line processing it's strongly recommended to iterate - over the input stream using this helper function. - - .. deprecated:: 2.3 - Will be removed in Werkzeug 3.0. - - .. versionadded:: 0.11 - added support for the `cap_at_buffer` parameter. - - .. versionadded:: 0.9 - added support for iterators as input stream. - - .. versionchanged:: 0.8 - This function now ensures that the limit was reached. - - :param stream: the stream or iterate to iterate over. - :param limit: the limit in bytes for the stream. (Usually - content length. Not necessary if the `stream` - is a :class:`LimitedStream`. - :param buffer_size: The optional buffer size. - :param cap_at_buffer: if this is set chunks are split if they are longer - than the buffer size. Internally this is implemented - that the buffer size might be exhausted by a factor - of two however. - """ - warnings.warn( - "'make_line_iter' is deprecated and will be removed in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - _iter = _make_chunk_iter(stream, limit, buffer_size) - - first_item = next(_iter, "") - - if not first_item: - return - - s = _make_encode_wrapper(first_item) - empty = t.cast(bytes, s("")) - cr = t.cast(bytes, s("\r")) - lf = t.cast(bytes, s("\n")) - crlf = t.cast(bytes, s("\r\n")) - - _iter = t.cast(t.Iterator[bytes], chain((first_item,), _iter)) - - def _iter_basic_lines() -> t.Iterator[bytes]: - _join = empty.join - buffer: list[bytes] = [] - while True: - new_data = next(_iter, "") - if not new_data: - break - new_buf: list[bytes] = [] - buf_size = 0 - for item in t.cast( - t.Iterator[bytes], chain(buffer, new_data.splitlines(True)) - ): - new_buf.append(item) - buf_size += len(item) - if item and item[-1:] in crlf: - yield _join(new_buf) - new_buf = [] - elif cap_at_buffer and buf_size >= buffer_size: - rv = _join(new_buf) - while len(rv) >= buffer_size: - yield rv[:buffer_size] - rv = rv[buffer_size:] - new_buf = [rv] - buffer = new_buf - if buffer: - yield _join(buffer) - - # This hackery is necessary to merge 'foo\r' and '\n' into one item - # of 'foo\r\n' if we were unlucky and we hit a chunk boundary. - previous = empty - for item in _iter_basic_lines(): - if item == lf and previous[-1:] == cr: - previous += item - item = empty - if previous: - yield previous - previous = item - if previous: - yield previous - - -def make_chunk_iter( - stream: t.Iterable[bytes] | t.IO[bytes], - separator: bytes, - limit: int | None = None, - buffer_size: int = 10 * 1024, - cap_at_buffer: bool = False, -) -> t.Iterator[bytes]: - """Works like :func:`make_line_iter` but accepts a separator - which divides chunks. If you want newline based processing - you should use :func:`make_line_iter` instead as it - supports arbitrary newline markers. - - .. deprecated:: 2.3 - Will be removed in Werkzeug 3.0. - - .. versionchanged:: 0.11 - added support for the `cap_at_buffer` parameter. - - .. versionchanged:: 0.9 - added support for iterators as input stream. - - .. versionadded:: 0.8 - - :param stream: the stream or iterate to iterate over. - :param separator: the separator that divides chunks. - :param limit: the limit in bytes for the stream. (Usually - content length. Not necessary if the `stream` - is otherwise already limited). - :param buffer_size: The optional buffer size. - :param cap_at_buffer: if this is set chunks are split if they are longer - than the buffer size. Internally this is implemented - that the buffer size might be exhausted by a factor - of two however. - """ - warnings.warn( - "'make_chunk_iter' is deprecated and will be removed in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - _iter = _make_chunk_iter(stream, limit, buffer_size) - - first_item = next(_iter, b"") - - if not first_item: - return - - _iter = t.cast(t.Iterator[bytes], chain((first_item,), _iter)) - if isinstance(first_item, str): - separator = _to_str(separator) - _split = re.compile(f"({re.escape(separator)})").split - _join = "".join - else: - separator = _to_bytes(separator) - _split = re.compile(b"(" + re.escape(separator) + b")").split - _join = b"".join - - buffer: list[bytes] = [] - while True: - new_data = next(_iter, b"") - if not new_data: - break - chunks = _split(new_data) - new_buf: list[bytes] = [] - buf_size = 0 - for item in chain(buffer, chunks): - if item == separator: - yield _join(new_buf) - new_buf = [] - buf_size = 0 - else: - buf_size += len(item) - new_buf.append(item) - - if cap_at_buffer and buf_size >= buffer_size: - rv = _join(new_buf) - while len(rv) >= buffer_size: - yield rv[:buffer_size] - rv = rv[buffer_size:] - new_buf = [rv] - buf_size = len(rv) - - buffer = new_buf - if buffer: - yield _join(buffer) - - class LimitedStream(io.RawIOBase): """Wrap a stream so that it doesn't read more than a given limit. This is used to limit ``wsgi.input`` to the ``Content-Length`` header value or diff --git a/tests/test_wsgi.py b/tests/test_wsgi.py index 5f37aca97..7f4d2e9cf 100644 --- a/tests/test_wsgi.py +++ b/tests/test_wsgi.py @@ -256,142 +256,6 @@ def test_get_current_url_invalid_utf8(): assert rv == "http://localhost/?foo=bar&baz=blah&meh=%CF" -@pytest.mark.filterwarnings("ignore:'make_line_iter:DeprecationWarning") -@pytest.mark.filterwarnings("ignore:'_make_chunk_iter:DeprecationWarning") -def test_multi_part_line_breaks(): - data = b"abcdef\r\nghijkl\r\nmnopqrstuvwxyz\r\nABCDEFGHIJK" - test_stream = io.BytesIO(data) - lines = list(wsgi.make_line_iter(test_stream, limit=len(data), buffer_size=16)) - assert lines == [ - b"abcdef\r\n", - b"ghijkl\r\n", - b"mnopqrstuvwxyz\r\n", - b"ABCDEFGHIJK", - ] - - data = b"abc\r\nThis line is broken by the buffer length.\r\nFoo bar baz" - test_stream = io.BytesIO(data) - lines = list(wsgi.make_line_iter(test_stream, limit=len(data), buffer_size=24)) - assert lines == [ - b"abc\r\n", - b"This line is broken by the buffer length.\r\n", - b"Foo bar baz", - ] - - -@pytest.mark.filterwarnings("ignore:'make_line_iter:DeprecationWarning") -@pytest.mark.filterwarnings("ignore:'_make_chunk_iter:DeprecationWarning") -def test_multi_part_line_breaks_bytes(): - data = b"abcdef\r\nghijkl\r\nmnopqrstuvwxyz\r\nABCDEFGHIJK" - test_stream = io.BytesIO(data) - lines = list(wsgi.make_line_iter(test_stream, limit=len(data), buffer_size=16)) - assert lines == [ - b"abcdef\r\n", - b"ghijkl\r\n", - b"mnopqrstuvwxyz\r\n", - b"ABCDEFGHIJK", - ] - - data = b"abc\r\nThis line is broken by the buffer length.\r\nFoo bar baz" - test_stream = io.BytesIO(data) - lines = list(wsgi.make_line_iter(test_stream, limit=len(data), buffer_size=24)) - assert lines == [ - b"abc\r\n", - b"This line is broken by the buffer length.\r\n", - b"Foo bar baz", - ] - - -@pytest.mark.filterwarnings("ignore:'make_line_iter:DeprecationWarning") -@pytest.mark.filterwarnings("ignore:'_make_chunk_iter:DeprecationWarning") -def test_multi_part_line_breaks_problematic(): - data = b"abc\rdef\r\nghi" - for _ in range(1, 10): - test_stream = io.BytesIO(data) - lines = list(wsgi.make_line_iter(test_stream, limit=len(data), buffer_size=4)) - assert lines == [b"abc\r", b"def\r\n", b"ghi"] - - -@pytest.mark.filterwarnings("ignore:'make_line_iter:DeprecationWarning") -@pytest.mark.filterwarnings("ignore:'_make_chunk_iter:DeprecationWarning") -def test_iter_functions_support_iterators(): - data = ["abcdef\r\nghi", "jkl\r\nmnopqrstuvwxyz\r", "\nABCDEFGHIJK"] - lines = list(wsgi.make_line_iter(data)) - assert lines == ["abcdef\r\n", "ghijkl\r\n", "mnopqrstuvwxyz\r\n", "ABCDEFGHIJK"] - - -@pytest.mark.filterwarnings("ignore:'_?make_chunk_iter:DeprecationWarning") -def test_make_chunk_iter(): - data = [b"abcdefXghi", b"jklXmnopqrstuvwxyzX", b"ABCDEFGHIJK"] - rv = list(wsgi.make_chunk_iter(data, b"X")) - assert rv == [b"abcdef", b"ghijkl", b"mnopqrstuvwxyz", b"ABCDEFGHIJK"] - - data = b"abcdefXghijklXmnopqrstuvwxyzXABCDEFGHIJK" - test_stream = io.BytesIO(data) - rv = list(wsgi.make_chunk_iter(test_stream, b"X", limit=len(data), buffer_size=4)) - assert rv == [b"abcdef", b"ghijkl", b"mnopqrstuvwxyz", b"ABCDEFGHIJK"] - - -@pytest.mark.filterwarnings("ignore:'_?make_chunk_iter:DeprecationWarning") -def test_make_chunk_iter_bytes(): - data = [b"abcdefXghi", b"jklXmnopqrstuvwxyzX", b"ABCDEFGHIJK"] - rv = list(wsgi.make_chunk_iter(data, "X")) - assert rv == [b"abcdef", b"ghijkl", b"mnopqrstuvwxyz", b"ABCDEFGHIJK"] - - data = b"abcdefXghijklXmnopqrstuvwxyzXABCDEFGHIJK" - test_stream = io.BytesIO(data) - rv = list(wsgi.make_chunk_iter(test_stream, "X", limit=len(data), buffer_size=4)) - assert rv == [b"abcdef", b"ghijkl", b"mnopqrstuvwxyz", b"ABCDEFGHIJK"] - - data = b"abcdefXghijklXmnopqrstuvwxyzXABCDEFGHIJK" - test_stream = io.BytesIO(data) - rv = list( - wsgi.make_chunk_iter( - test_stream, "X", limit=len(data), buffer_size=4, cap_at_buffer=True - ) - ) - assert rv == [ - b"abcd", - b"ef", - b"ghij", - b"kl", - b"mnop", - b"qrst", - b"uvwx", - b"yz", - b"ABCD", - b"EFGH", - b"IJK", - ] - - -@pytest.mark.filterwarnings("ignore:'make_line_iter:DeprecationWarning") -@pytest.mark.filterwarnings("ignore:'_make_chunk_iter:DeprecationWarning") -def test_lines_longer_buffer_size(): - data = b"1234567890\n1234567890\n" - for bufsize in range(1, 15): - lines = list( - wsgi.make_line_iter(io.BytesIO(data), limit=len(data), buffer_size=bufsize) - ) - assert lines == [b"1234567890\n", b"1234567890\n"] - - -@pytest.mark.filterwarnings("ignore:'make_line_iter:DeprecationWarning") -@pytest.mark.filterwarnings("ignore:'_make_chunk_iter:DeprecationWarning") -def test_lines_longer_buffer_size_cap(): - data = b"1234567890\n1234567890\n" - for bufsize in range(1, 15): - lines = list( - wsgi.make_line_iter( - io.BytesIO(data), - limit=len(data), - buffer_size=bufsize, - cap_at_buffer=True, - ) - ) - assert len(lines[0]) == bufsize or lines[0].endswith(b"\n") - - def test_range_wrapper(): response = Response(b"Hello World") range_wrapper = _RangeWrapper(response.response, 6, 4) From 5ff0a573f4b78d9724f1f063fb058fd6bc76b24d Mon Sep 17 00:00:00 2001 From: David Lord Date: Mon, 14 Aug 2023 10:48:51 -0700 Subject: [PATCH 023/159] remove support for bytes where str is expected --- src/werkzeug/_internal.py | 99 +----------------- src/werkzeug/datastructures/auth.py | 8 +- src/werkzeug/datastructures/headers.py | 65 ++---------- src/werkzeug/formparser.py | 77 ++------------ src/werkzeug/http.py | 90 +++-------------- src/werkzeug/routing/converters.py | 12 +-- src/werkzeug/routing/map.py | 41 +------- src/werkzeug/routing/rules.py | 9 +- src/werkzeug/sansio/http.py | 37 +------ src/werkzeug/sansio/request.py | 133 +------------------------ src/werkzeug/sansio/response.py | 48 +-------- src/werkzeug/test.py | 68 ++++--------- src/werkzeug/urls.py | 6 +- src/werkzeug/wrappers/request.py | 13 +-- src/werkzeug/wrappers/response.py | 12 +-- src/werkzeug/wsgi.py | 38 +------ 16 files changed, 96 insertions(+), 660 deletions(-) diff --git a/src/werkzeug/_internal.py b/src/werkzeug/_internal.py index c158c92df..70ab68761 100644 --- a/src/werkzeug/_internal.py +++ b/src/werkzeug/_internal.py @@ -1,7 +1,6 @@ from __future__ import annotations import logging -import operator import re import sys import typing as t @@ -26,102 +25,12 @@ def __reduce__(self) -> str: _missing = _Missing() -@t.overload -def _make_encode_wrapper(reference: str) -> t.Callable[[str], str]: - ... - - -@t.overload -def _make_encode_wrapper(reference: bytes) -> t.Callable[[str], bytes]: - ... - - -def _make_encode_wrapper(reference: t.AnyStr) -> t.Callable[[str], t.AnyStr]: - """Create a function that will be called with a string argument. If - the reference is bytes, values will be encoded to bytes. - """ - if isinstance(reference, str): - return lambda x: x - - return operator.methodcaller("encode", "latin1") - - -def _check_str_tuple(value: tuple[t.AnyStr, ...]) -> None: - """Ensure tuple items are all strings or all bytes.""" - if not value: - return - - item_type = str if isinstance(value[0], str) else bytes - - if any(not isinstance(item, item_type) for item in value): - raise TypeError(f"Cannot mix str and bytes arguments (got {value!r})") - - -_default_encoding = sys.getdefaultencoding() - - -def _to_bytes( - x: str | bytes, charset: str = _default_encoding, errors: str = "strict" -) -> bytes: - if x is None or isinstance(x, bytes): - return x - - if isinstance(x, (bytearray, memoryview)): - return bytes(x) - - if isinstance(x, str): - return x.encode(charset, errors) - - raise TypeError("Expected bytes") - - -@t.overload -def _to_str( # type: ignore - x: None, - charset: str | None = ..., - errors: str = ..., - allow_none_charset: bool = ..., -) -> None: - ... - - -@t.overload -def _to_str( - x: t.Any, - charset: str | None = ..., - errors: str = ..., - allow_none_charset: bool = ..., -) -> str: - ... - - -def _to_str( - x: t.Any | None, - charset: str | None = _default_encoding, - errors: str = "strict", - allow_none_charset: bool = False, -) -> str | bytes | None: - if x is None or isinstance(x, str): - return x - - if not isinstance(x, (bytes, bytearray)): - return str(x) - - if charset is None: - if allow_none_charset: - return x - - return x.decode(charset, errors) # type: ignore - - -def _wsgi_decoding_dance( - s: str, charset: str = "utf-8", errors: str = "replace" -) -> str: - return s.encode("latin1").decode(charset, errors) +def _wsgi_decoding_dance(s: str) -> str: + return s.encode("latin1").decode(errors="replace") -def _wsgi_encoding_dance(s: str, charset: str = "utf-8", errors: str = "strict") -> str: - return s.encode(charset).decode("latin1", errors) +def _wsgi_encoding_dance(s: str) -> str: + return s.encode().decode("latin1") def _get_environ(obj: WSGIEnvironment | Request) -> WSGIEnvironment: diff --git a/src/werkzeug/datastructures/auth.py b/src/werkzeug/datastructures/auth.py index 81e588318..494576ded 100644 --- a/src/werkzeug/datastructures/auth.py +++ b/src/werkzeug/datastructures/auth.py @@ -42,7 +42,7 @@ class Authorization: def __init__( self, auth_type: str, - data: dict[str, str] | None = None, + data: dict[str, str | None] | None = None, token: str | None = None, ) -> None: self.type = auth_type @@ -168,11 +168,11 @@ class WWWAuthenticate: def __init__( self, auth_type: str, - values: dict[str, str] | None = None, + values: dict[str, str | None] | None = None, token: str | None = None, ): self._type = auth_type.lower() - self._parameters: dict[str, str] = CallbackDict( # type: ignore[misc] + self._parameters: dict[str, str | None] = CallbackDict( # type: ignore[misc] values, lambda _: self._trigger_on_update() ) self._token = token @@ -193,7 +193,7 @@ def type(self, value: str) -> None: self._trigger_on_update() @property - def parameters(self) -> dict[str, str]: + def parameters(self) -> dict[str, str | None]: """A dict of parameters for the header. Only one of this or :attr:`token` should have a value for a given scheme. """ diff --git a/src/werkzeug/datastructures/headers.py b/src/werkzeug/datastructures/headers.py index dc060c41e..d9dd655c2 100644 --- a/src/werkzeug/datastructures/headers.py +++ b/src/werkzeug/datastructures/headers.py @@ -2,7 +2,6 @@ import re import typing as t -import warnings from .._internal import _missing from ..exceptions import BadRequestKeyError @@ -82,7 +81,7 @@ def lowered(item): __hash__ = None - def get(self, key, default=None, type=None, as_bytes=None): + def get(self, key, default=None, type=None): """Return the default value if the requested data doesn't exist. If `type` is provided and is a callable it should convert the value, return it or raise a :exc:`ValueError` if that is not possible. In @@ -101,27 +100,16 @@ def get(self, key, default=None, type=None, as_bytes=None): :class:`Headers`. If a :exc:`ValueError` is raised by this callable the default value is returned. - .. versionchanged:: 2.3 - The ``as_bytes`` parameter is deprecated and will be removed - in Werkzeug 3.0. + .. versionchanged:: 3.0 + The ``as_bytes`` parameter was removed. .. versionchanged:: 0.9 The ``as_bytes`` parameter was added. """ - if as_bytes is not None: - warnings.warn( - "The 'as_bytes' parameter is deprecated and will be" - " removed in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - try: rv = self.__getitem__(key, _get_mode=True) except KeyError: return default - if as_bytes: - rv = rv.encode("latin1") if type is None: return rv try: @@ -129,7 +117,7 @@ def get(self, key, default=None, type=None, as_bytes=None): except ValueError: return default - def getlist(self, key, type=None, as_bytes=None): + def getlist(self, key, type=None): """Return the list of items for a given key. If that key is not in the :class:`Headers`, the return value will be an empty list. Just like :meth:`get`, :meth:`getlist` accepts a `type` parameter. All items will @@ -141,27 +129,16 @@ def getlist(self, key, type=None, as_bytes=None): by this callable the value will be removed from the list. :return: a :class:`list` of all the values for the key. - .. versionchanged:: 2.3 - The ``as_bytes`` parameter is deprecated and will be removed - in Werkzeug 3.0. + .. versionchanged:: 3.0 + The ``as_bytes`` parameter was removed. .. versionchanged:: 0.9 The ``as_bytes`` parameter was added. """ - if as_bytes is not None: - warnings.warn( - "The 'as_bytes' parameter is deprecated and will be" - " removed in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - ikey = key.lower() result = [] for k, v in self: if k.lower() == ikey: - if as_bytes: - v = v.encode("latin1") if type is not None: try: v = type(v) @@ -293,7 +270,6 @@ def add(self, _key, _value, **kw): """ if kw: _value = _options_header_vkw(_value, kw) - _key = _str_header_key(_key) _value = _str_header_value(_value) self._list.append((_key, _value)) @@ -326,7 +302,6 @@ def set(self, _key, _value, **kw): """ if kw: _value = _options_header_vkw(_value, kw) - _key = _str_header_key(_key) _value = _str_header_value(_value) if not self._list: self._list.append((_key, _value)) @@ -399,7 +374,7 @@ def __setitem__(self, key, value): if isinstance(key, (slice, int)): if isinstance(key, int): value = [value] - value = [(_str_header_key(k), _str_header_value(v)) for (k, v) in value] + value = [(k, _str_header_value(v)) for (k, v) in value] if isinstance(key, int): self._list[key] = value[0] else: @@ -476,36 +451,10 @@ def _options_header_vkw(value: str, kw: dict[str, t.Any]): ) -def _str_header_key(key: t.Any) -> str: - if not isinstance(key, str): - warnings.warn( - "Header keys must be strings. Passing other types is deprecated and will" - " not be supported in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - - if isinstance(key, bytes): - key = key.decode("latin-1") - else: - key = str(key) - - return key - - _newline_re = re.compile(r"[\r\n]") def _str_header_value(value: t.Any) -> str: - if isinstance(value, bytes): - warnings.warn( - "Passing bytes as a header value is deprecated and will not be supported in" - " Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - value = value.decode("latin-1") - if not isinstance(value, str): value = str(value) diff --git a/src/werkzeug/formparser.py b/src/werkzeug/formparser.py index 8b88b2dc7..ee30666dd 100644 --- a/src/werkzeug/formparser.py +++ b/src/werkzeug/formparser.py @@ -1,7 +1,6 @@ from __future__ import annotations import typing as t -import warnings from io import BytesIO from urllib.parse import parse_qsl @@ -68,8 +67,6 @@ def default_stream_factory( def parse_form_data( environ: WSGIEnvironment, stream_factory: TStreamFactory | None = None, - charset: str | None = None, - errors: str | None = None, max_form_memory_size: int | None = None, max_content_length: int | None = None, cls: type[MultiDict] | None = None, @@ -108,12 +105,11 @@ def parse_form_data( is exceeded, a :exc:`~exceptions.RequestEntityTooLarge` exception is raised. :return: A tuple in the form ``(stream, form, files)``. - .. versionchanged:: 2.3 - Added the ``max_form_parts`` parameter. + .. versionchanged:: 3.0 + The ``charset`` and ``errors`` parameters were removed. .. versionchanged:: 2.3 - The ``charset`` and ``errors`` parameters are deprecated and will be removed in - Werkzeug 3.0. + Added the ``max_form_parts`` parameter. .. versionadded:: 0.5.1 Added the ``silent`` parameter. @@ -124,8 +120,6 @@ def parse_form_data( """ return FormDataParser( stream_factory=stream_factory, - charset=charset, - errors=errors, max_form_memory_size=max_form_memory_size, max_content_length=max_content_length, max_form_parts=max_form_parts, @@ -159,9 +153,8 @@ class FormDataParser: :param max_form_parts: The maximum number of multipart parts to be parsed. If this is exceeded, a :exc:`~exceptions.RequestEntityTooLarge` exception is raised. - .. versionchanged:: 2.3 - The ``charset`` and ``errors`` parameters are deprecated and will be removed in - Werkzeug 3.0. + .. versionchanged:: 3.0 + The ``charset`` and ``errors`` parameters were removed. .. versionchanged:: 3.0 The ``parse_functions`` attribute and ``get_parse_func`` methods were removed. @@ -175,8 +168,6 @@ class FormDataParser: def __init__( self, stream_factory: TStreamFactory | None = None, - charset: str | None = None, - errors: str | None = None, max_form_memory_size: int | None = None, max_content_length: int | None = None, cls: type[MultiDict] | None = None, @@ -188,30 +179,6 @@ def __init__( stream_factory = default_stream_factory self.stream_factory = stream_factory - - if charset is not None: - warnings.warn( - "The 'charset' parameter is deprecated and will be" - " removed in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - else: - charset = "utf-8" - - self.charset = charset - - if errors is not None: - warnings.warn( - "The 'errors' parameter is deprecated and will be" - " removed in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - else: - errors = "replace" - - self.errors = errors self.max_form_memory_size = max_form_memory_size self.max_content_length = max_content_length self.max_form_parts = max_form_parts @@ -284,12 +251,8 @@ def _parse_multipart( content_length: int | None, options: dict[str, str], ) -> t_parse_result: - charset = self.charset if self.charset != "utf-8" else None - errors = self.errors if self.errors != "replace" else None parser = MultiPartParser( stream_factory=self.stream_factory, - charset=charset, - errors=errors, max_form_memory_size=self.max_form_memory_size, max_form_parts=self.max_form_parts, cls=self.cls, @@ -320,7 +283,6 @@ def _parse_urlencoded( items = parse_qsl( stream.read().decode(), keep_blank_values=True, - encoding=self.charset, errors="werkzeug.url_quote", ) except ValueError as e: @@ -333,36 +295,11 @@ class MultiPartParser: def __init__( self, stream_factory: TStreamFactory | None = None, - charset: str | None = None, - errors: str | None = None, max_form_memory_size: int | None = None, cls: type[MultiDict] | None = None, buffer_size: int = 64 * 1024, max_form_parts: int | None = None, ) -> None: - if charset is not None: - warnings.warn( - "The 'charset' parameter is deprecated and will be" - " removed in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - else: - charset = "utf-8" - - self.charset = charset - - if errors is not None: - warnings.warn( - "The 'errors' parameter is deprecated and will be" - " removed in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - else: - errors = "replace" - - self.errors = errors self.max_form_memory_size = max_form_memory_size self.max_form_parts = max_form_parts @@ -393,7 +330,7 @@ def get_part_charset(self, headers: Headers) -> str: if ct_charset in {"ascii", "us-ascii", "utf-8", "iso-8859-1"}: return ct_charset - return self.charset + return "utf-8" def start_file_streaming( self, event: File, total_content_length: int | None @@ -446,7 +383,7 @@ def parse( if not event.more_data: if isinstance(current_part, Field): value = b"".join(container).decode( - self.get_part_charset(current_part.headers), self.errors + self.get_part_charset(current_part.headers), "replace" ) fields.append((current_part.name, value)) else: diff --git a/src/werkzeug/http.py b/src/werkzeug/http.py index 1b89e191b..8280f51fa 100644 --- a/src/werkzeug/http.py +++ b/src/werkzeug/http.py @@ -146,25 +146,17 @@ def quote_header_value(value: t.Any, allow_token: bool = True) -> str: :param value: The value to quote. Will be converted to a string. :param allow_token: Disable to quote the value even if it only has token characters. + .. versionchanged:: 3.0 + Passing bytes is not supported. + .. versionchanged:: 3.0 The ``extra_chars`` parameter is removed. .. versionchanged:: 2.3 The value is quoted if it is the empty string. - .. versionchanged:: 2.3 - Passing bytes is deprecated and will not be supported in Werkzeug 3.0. - .. versionadded:: 0.5 """ - if isinstance(value, bytes): - warnings.warn( - "Passing bytes is deprecated and will not be supported in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - value = value.decode("latin1") - value = str(value) if not value: @@ -332,7 +324,7 @@ def parse_list_header(value: str) -> list[str]: return result -def parse_dict_header(value: str) -> dict[str, str]: +def parse_dict_header(value: str) -> dict[str, str | None]: """Parse a list header using :func:`parse_list_header`, then parse each item as a ``key=value`` pair. @@ -351,27 +343,19 @@ def parse_dict_header(value: str) -> dict[str, str]: :param value: The header value to parse. + .. versionchanged:: 3.0 + Passing bytes is not supported. + .. versionchanged:: 3.0 The ``cls`` argument is removed. .. versionchanged:: 2.3 Added support for ``key*=charset''value`` encoded items. - .. versionchanged:: 2.3 - Passing bytes is deprecated, support will be removed in Werkzeug 3.0. - .. versionchanged:: 0.9 The ``cls`` argument was added. """ - result = {} - - if isinstance(value, bytes): - warnings.warn( - "Passing bytes is deprecated and will be removed in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - value = value.decode("latin1") + result: dict[str, str | None] = {} for item in parse_list_header(value): key, has_value, value = item.partition("=") @@ -1176,8 +1160,6 @@ def is_hop_by_hop_header(header: str) -> bool: def parse_cookie( header: WSGIEnvironment | str | None, - charset: str | None = None, - errors: str | None = None, cls: type[ds.MultiDict] | None = None, ) -> ds.MultiDict[str, str]: """Parse a cookie from a string or WSGI environ. @@ -1192,9 +1174,8 @@ def parse_cookie( :param cls: A dict-like class to store the parsed cookies in. Defaults to :class:`MultiDict`. - .. versionchanged:: 2.3 - Passing bytes, and the ``charset`` and ``errors`` parameters, are deprecated and - will be removed in Werkzeug 3.0. + .. versionchanged:: 3.0 + Passing bytes, and the ``charset`` and ``errors`` parameters, were removed. .. versionchanged:: 1.0 Returns a :class:`MultiDict` instead of a ``TypeConversionDict``. @@ -1205,22 +1186,13 @@ def parse_cookie( """ if isinstance(header, dict): cookie = header.get("HTTP_COOKIE") - elif isinstance(header, bytes): - warnings.warn( - "Passing bytes is deprecated and will not be supported in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - cookie = header.decode() else: cookie = header if cookie: cookie = cookie.encode("latin1").decode() - return _sansio_http.parse_cookie( - cookie=cookie, charset=charset, errors=errors, cls=cls - ) + return _sansio_http.parse_cookie(cookie=cookie, cls=cls) _cookie_no_quote_re = re.compile(r"[\w!#$%&'()*+\-./:<=>?@\[\]^`{|}~]*", re.A) @@ -1241,7 +1213,6 @@ def dump_cookie( domain: str | None = None, secure: bool = False, httponly: bool = False, - charset: str | None = None, sync_expires: bool = True, max_size: int = 4093, samesite: str | None = None, @@ -1284,6 +1255,9 @@ def dump_cookie( .. _`cookie`: http://browsercookielimits.squawky.net/ + .. versionchanged:: 3.0 + Passing bytes, and the ``charset`` parameter, were removed. + .. versionchanged:: 2.3.3 The ``path`` parameter is ``/`` by default. @@ -1297,46 +1271,14 @@ def dump_cookie( .. versionchanged:: 2.3 The ``path`` parameter is ``None`` by default. - .. versionchanged:: 2.3 - Passing bytes, and the ``charset`` parameter, are deprecated and will be removed - in Werkzeug 3.0. - .. versionchanged:: 1.0.0 The string ``'None'`` is accepted for ``samesite``. """ - if charset is not None: - warnings.warn( - "The 'charset' parameter is deprecated and will be removed" - " in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - else: - charset = "utf-8" - - if isinstance(key, bytes): - warnings.warn( - "The 'key' parameter must be a string. Bytes are deprecated" - " and will not be supported in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - key = key.decode() - - if isinstance(value, bytes): - warnings.warn( - "The 'value' parameter must be a string. Bytes are" - " deprecated and will not be supported in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - value = value.decode() - if path is not None: # safe = https://url.spec.whatwg.org/#url-path-segment-string # as well as percent for things that are already quoted # excluding semicolon since it's part of the header syntax - path = quote(path, safe="%!$&'()*+,/:=@", encoding=charset) + path = quote(path, safe="%!$&'()*+,/:=@") if domain: domain = domain.partition(":")[0].lstrip(".").encode("idna").decode("ascii") @@ -1361,7 +1303,7 @@ def dump_cookie( if not _cookie_no_quote_re.fullmatch(value): # Work with bytes here, since a UTF-8 character could be multiple bytes. value = _cookie_slash_re.sub( - lambda m: _cookie_slash_map[m.group()], value.encode(charset) + lambda m: _cookie_slash_map[m.group()], value.encode() ).decode("ascii") value = f'"{value}"' diff --git a/src/werkzeug/routing/converters.py b/src/werkzeug/routing/converters.py index c59e2abcb..eeb5bbd93 100644 --- a/src/werkzeug/routing/converters.py +++ b/src/werkzeug/routing/converters.py @@ -3,7 +3,6 @@ import re import typing as t import uuid -import warnings from urllib.parse import quote if t.TYPE_CHECKING: @@ -42,17 +41,8 @@ def to_python(self, value: str) -> t.Any: return value def to_url(self, value: t.Any) -> str: - if isinstance(value, (bytes, bytearray)): - warnings.warn( - "Passing bytes as a URL value is deprecated and will not be supported" - " in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=7, - ) - return quote(value, safe="!$&'()*+,/:;=@") - # safe = https://url.spec.whatwg.org/#url-path-segment-string - return quote(str(value), encoding=self.map.charset, safe="!$&'()*+,/:;=@") + return quote(str(value), safe="!$&'()*+,/:;=@") class UnicodeConverter(BaseConverter): diff --git a/src/werkzeug/routing/map.py b/src/werkzeug/routing/map.py index 0d02bb8b7..76bbe2f3b 100644 --- a/src/werkzeug/routing/map.py +++ b/src/werkzeug/routing/map.py @@ -47,7 +47,6 @@ class Map: :param rules: sequence of url rules for this map. :param default_subdomain: The default subdomain for rules without a subdomain defined. - :param charset: charset of the url. defaults to ``"utf-8"`` :param strict_slashes: If a rule ends with a slash but the matched URL does not, redirect to the URL with a trailing slash. :param merge_slashes: Merge consecutive slashes when matching or @@ -62,15 +61,13 @@ class Map: :param sort_parameters: If set to `True` the url parameters are sorted. See `url_encode` for more details. :param sort_key: The sort key function for `url_encode`. - :param encoding_errors: the error method to use for decoding :param host_matching: if set to `True` it enables the host matching feature and disables the subdomain one. If enabled the `host` parameter to rules is used instead of the `subdomain` one. - .. versionchanged:: 2.3 - The ``charset`` and ``encoding_errors`` parameters are deprecated and will be - removed in Werkzeug 3.0. + .. versionchanged:: 3.0 + The ``charset`` and ``encoding_errors`` parameters were removed. .. versionchanged:: 1.0 If ``url_scheme`` is ``ws`` or ``wss``, only WebSocket rules will match. @@ -97,14 +94,12 @@ def __init__( self, rules: t.Iterable[RuleFactory] | None = None, default_subdomain: str = "", - charset: str | None = None, strict_slashes: bool = True, merge_slashes: bool = True, redirect_defaults: bool = True, converters: t.Mapping[str, type[BaseConverter]] | None = None, sort_parameters: bool = False, sort_key: t.Callable[[t.Any], t.Any] | None = None, - encoding_errors: str | None = None, host_matching: bool = False, ) -> None: self._matcher = StateMachineMatcher(merge_slashes) @@ -113,30 +108,6 @@ def __init__( self._remap_lock = self.lock_class() self.default_subdomain = default_subdomain - - if charset is not None: - warnings.warn( - "The 'charset' parameter is deprecated and will be" - " removed in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - else: - charset = "utf-8" - - self.charset = charset - - if encoding_errors is not None: - warnings.warn( - "The 'encoding_errors' parameter is deprecated and will be" - " removed in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - else: - encoding_errors = "replace" - - self.encoding_errors = encoding_errors self.strict_slashes = strict_slashes self.merge_slashes = merge_slashes self.redirect_defaults = redirect_defaults @@ -362,7 +333,7 @@ def bind_to_environ( def _get_wsgi_string(name: str) -> str | None: val = env.get(name) if val is not None: - return _wsgi_decoding_dance(val, self.charset) + return _wsgi_decoding_dance(val) return None script_name = _get_wsgi_string("SCRIPT_NAME") @@ -629,9 +600,7 @@ def match( result = self.map._matcher.match(domain_part, path_part, method, websocket) except RequestPath as e: # safe = https://url.spec.whatwg.org/#url-path-segment-string - new_path = quote( - e.path_info, safe="!$&'()*+,/:;=@", encoding=self.map.charset - ) + new_path = quote(e.path_info, safe="!$&'()*+,/:;=@") raise RequestRedirect( self.make_redirect_url(new_path, query_args) ) from None @@ -767,7 +736,7 @@ def get_default_redirect( def encode_query_args(self, query_args: t.Mapping[str, t.Any] | str) -> str: if not isinstance(query_args, str): - return _urlencode(query_args, encoding=self.map.charset) + return _urlencode(query_args) return query_args def make_redirect_url( diff --git a/src/werkzeug/routing/rules.py b/src/werkzeug/routing/rules.py index 904a02258..5c8184ccb 100644 --- a/src/werkzeug/routing/rules.py +++ b/src/werkzeug/routing/rules.py @@ -583,7 +583,7 @@ def _encode_query_vars(self, query_vars: t.Mapping[str, t.Any]) -> str: if self.map.sort_parameters: items = sorted(items, key=self.map.sort_key) - return _urlencode(items, encoding=self.map.charset) + return _urlencode(items) def _parse_rule(self, rule: str) -> t.Iterable[RulePart]: content = "" @@ -739,12 +739,7 @@ def _compile_builder( opl.append((False, data)) elif not is_dynamic: # safe = https://url.spec.whatwg.org/#url-path-segment-string - opl.append( - ( - False, - quote(data, safe="!$&'()*+,/:;=@", encoding=self.map.charset), - ) - ) + opl.append((False, quote(data, safe="!$&'()*+,/:;=@"))) else: opl.append((True, data)) diff --git a/src/werkzeug/sansio/http.py b/src/werkzeug/sansio/http.py index 21a619720..e3cd3330c 100644 --- a/src/werkzeug/sansio/http.py +++ b/src/werkzeug/sansio/http.py @@ -2,7 +2,6 @@ import re import typing as t -import warnings from datetime import datetime from .._internal import _dt_as_utc @@ -123,8 +122,6 @@ def _cookie_unslash_replace(m: t.Match[bytes]) -> bytes: def parse_cookie( cookie: str | None = None, - charset: str | None = None, - errors: str | None = None, cls: type[ds.MultiDict] | None = None, ) -> ds.MultiDict[str, str]: """Parse a cookie from a string. @@ -138,42 +135,14 @@ def parse_cookie( :param cls: A dict-like class to store the parsed cookies in. Defaults to :class:`MultiDict`. - .. versionchanged:: 2.3 - Passing bytes, and the ``charset`` and ``errors`` parameters, are deprecated and - will be removed in Werkzeug 3.0. + .. versionchanged:: 3.0 + Passing bytes, and the ``charset`` and ``errors`` parameters, were removed. .. versionadded:: 2.2 """ if cls is None: cls = ds.MultiDict - if isinstance(cookie, bytes): - warnings.warn( - "The 'cookie' parameter must be a string. Passing bytes is deprecated and" - " will not be supported in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - cookie = cookie.decode() - - if charset is not None: - warnings.warn( - "The 'charset' parameter is deprecated and will be removed in Werkzeug 3.0", - DeprecationWarning, - stacklevel=2, - ) - else: - charset = "utf-8" - - if errors is not None: - warnings.warn( - "The 'errors' parameter is deprecated and will be removed in Werkzeug 3.0", - DeprecationWarning, - stacklevel=2, - ) - else: - errors = "replace" - if not cookie: return cls() @@ -191,7 +160,7 @@ def parse_cookie( # Work with bytes here, since a UTF-8 character could be multiple bytes. cv = _cookie_unslash_re.sub( _cookie_unslash_replace, cv[1:-1].encode() - ).decode(charset, errors) + ).decode(errors="replace") out.append((ck, cv)) diff --git a/src/werkzeug/sansio/request.py b/src/werkzeug/sansio/request.py index 0bcda90b2..b59bd5bd6 100644 --- a/src/werkzeug/sansio/request.py +++ b/src/werkzeug/sansio/request.py @@ -1,7 +1,6 @@ from __future__ import annotations import typing as t -import warnings from datetime import datetime from urllib.parse import parse_qsl @@ -59,95 +58,13 @@ class Request: :param headers: The headers received with the request. :param remote_addr: The address of the client sending the request. + .. versionchanged:: 3.0 + The ``charset``, ``url_charset``, and ``encoding_errors`` attributes + were removed. + .. versionadded:: 2.0 """ - _charset: str - - @property - def charset(self) -> str: - """The charset used to decode body, form, and cookie data. Defaults to UTF-8. - - .. deprecated:: 2.3 - Will be removed in Werkzeug 3.0. Request data must always be UTF-8. - """ - warnings.warn( - "The 'charset' attribute is deprecated and will not be used in Werkzeug" - " 2.4. Interpreting bytes as text in body, form, and cookie data will" - " always use UTF-8.", - DeprecationWarning, - stacklevel=2, - ) - return self._charset - - @charset.setter - def charset(self, value: str) -> None: - warnings.warn( - "The 'charset' attribute is deprecated and will not be used in Werkzeug" - " 2.4. Interpreting bytes as text in body, form, and cookie data will" - " always use UTF-8.", - DeprecationWarning, - stacklevel=2, - ) - self._charset = value - - _encoding_errors: str - - @property - def encoding_errors(self) -> str: - """How errors when decoding bytes are handled. Defaults to "replace". - - .. deprecated:: 2.3 - Will be removed in Werkzeug 3.0. - """ - warnings.warn( - "The 'encoding_errors' attribute is deprecated and will not be used in" - " Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - return self._encoding_errors - - @encoding_errors.setter - def encoding_errors(self, value: str) -> None: - warnings.warn( - "The 'encoding_errors' attribute is deprecated and will not be used in" - " Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - self._encoding_errors = value - - _url_charset: str - - @property - def url_charset(self) -> str: - """The charset to use when decoding percent-encoded bytes in :attr:`args`. - Defaults to the value of :attr:`charset`, which defaults to UTF-8. - - .. deprecated:: 2.3 - Will be removed in Werkzeug 3.0. Percent-encoded bytes must always be UTF-8. - - .. versionadded:: 0.6 - """ - warnings.warn( - "The 'url_charset' attribute is deprecated and will not be used in" - " Werkzeug 3.0. Percent-encoded bytes must always be UTF-8.", - DeprecationWarning, - stacklevel=2, - ) - return self._url_charset - - @url_charset.setter - def url_charset(self, value: str) -> None: - warnings.warn( - "The 'url_charset' attribute is deprecated and will not be used in" - " Werkzeug 3.0. Percent-encoded bytes must always be UTF-8.", - DeprecationWarning, - stacklevel=2, - ) - self._url_charset = value - #: the class to use for `args` and `form`. The default is an #: :class:`~werkzeug.datastructures.ImmutableMultiDict` which supports #: multiple values per key. alternatively it makes sense to use an @@ -209,40 +126,6 @@ def __init__( headers: Headers, remote_addr: str | None, ) -> None: - if not isinstance(type(self).charset, property): - warnings.warn( - "The 'charset' attribute is deprecated and will not be used in Werkzeug" - " 2.4. Interpreting bytes as text in body, form, and cookie data will" - " always use UTF-8.", - DeprecationWarning, - stacklevel=2, - ) - self._charset = self.charset - else: - self._charset = "utf-8" - - if not isinstance(type(self).encoding_errors, property): - warnings.warn( - "The 'encoding_errors' attribute is deprecated and will not be used in" - " Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - self._encoding_errors = self.encoding_errors - else: - self._encoding_errors = "replace" - - if not isinstance(type(self).url_charset, property): - warnings.warn( - "The 'url_charset' attribute is deprecated and will not be used in" - " Werkzeug 3.0. Percent-encoded bytes must always be UTF-8.", - DeprecationWarning, - stacklevel=2, - ) - self._url_charset = self.url_charset - else: - self._url_charset = self._charset - #: The method the request was made with, such as ``GET``. self.method = method.upper() #: The URL scheme of the protocol the request used, such as @@ -291,7 +174,6 @@ def args(self) -> MultiDict[str, str]: parse_qsl( self.query_string.decode(), keep_blank_values=True, - encoding=self._url_charset, errors="werkzeug.url_quote", ) ) @@ -360,13 +242,8 @@ def cookies(self) -> ImmutableMultiDict[str, str]: """A :class:`dict` with the contents of all cookies transmitted with the request.""" wsgi_combined_cookie = ";".join(self.headers.getlist("Cookie")) - charset = self._charset if self._charset != "utf-8" else None - errors = self._encoding_errors if self._encoding_errors != "replace" else None return parse_cookie( # type: ignore - wsgi_combined_cookie, - charset=charset, - errors=errors, - cls=self.dict_storage_class, + wsgi_combined_cookie, cls=self.dict_storage_class ) # Common Descriptors diff --git a/src/werkzeug/sansio/response.py b/src/werkzeug/sansio/response.py index e5c1df743..271974ecf 100644 --- a/src/werkzeug/sansio/response.py +++ b/src/werkzeug/sansio/response.py @@ -1,7 +1,6 @@ from __future__ import annotations import typing as t -import warnings from datetime import datetime from datetime import timedelta from datetime import timezone @@ -81,36 +80,12 @@ class Response: :param content_type: The full content type of the response. Overrides building the value from ``mimetype``. + .. versionchanged:: 3.0 + The ``charset`` attribute was removed. + .. versionadded:: 2.0 """ - _charset: str - - @property - def charset(self) -> str: - """The charset used to encode body and cookie data. Defaults to UTF-8. - - .. deprecated:: 2.3 - Will be removed in Werkzeug 3.0. Response data must always be UTF-8. - """ - warnings.warn( - "The 'charset' attribute is deprecated and will not be used in Werkzeug" - " 2.4. Text in body and cookie data will always use UTF-8.", - DeprecationWarning, - stacklevel=2, - ) - return self._charset - - @charset.setter - def charset(self, value: str) -> None: - warnings.warn( - "The 'charset' attribute is deprecated and will not be used in Werkzeug" - " 2.4. Text in body and cookie data will always use UTF-8.", - DeprecationWarning, - stacklevel=2, - ) - self._charset = value - #: the default status if none is provided. default_status = 200 @@ -139,17 +114,6 @@ def __init__( mimetype: str | None = None, content_type: str | None = None, ) -> None: - if not isinstance(type(self).charset, property): - warnings.warn( - "The 'charset' attribute is deprecated and will not be used in Werkzeug" - " 2.4. Text in body and cookie data will always use UTF-8.", - DeprecationWarning, - stacklevel=2, - ) - self._charset = self.charset - else: - self._charset = "utf-8" - if isinstance(headers, Headers): self.headers = headers elif not headers: @@ -161,7 +125,7 @@ def __init__( if mimetype is None and "content-type" not in self.headers: mimetype = self.default_mimetype if mimetype is not None: - mimetype = get_content_type(mimetype, self._charset) + mimetype = get_content_type(mimetype, "utf-8") content_type = mimetype if content_type is not None: self.headers["Content-Type"] = content_type @@ -255,7 +219,6 @@ def set_cookie( :param samesite: Limit the scope of the cookie to only be attached to requests that are "same-site". """ - charset = self._charset if self._charset != "utf-8" else None self.headers.add( "Set-Cookie", dump_cookie( @@ -267,7 +230,6 @@ def set_cookie( domain=domain, secure=secure, httponly=httponly, - charset=charset, max_size=self.max_cookie_size, samesite=samesite, ), @@ -332,7 +294,7 @@ def mimetype(self) -> str | None: @mimetype.setter def mimetype(self, value: str) -> None: - self.headers["Content-Type"] = get_content_type(value, self._charset) + self.headers["Content-Type"] = get_content_type(value, "utf-8") @property def mimetype_params(self) -> dict[str, str]: diff --git a/src/werkzeug/test.py b/src/werkzeug/test.py index 26e9c3b13..7b5899a97 100644 --- a/src/werkzeug/test.py +++ b/src/werkzeug/test.py @@ -4,7 +4,6 @@ import mimetypes import sys import typing as t -import warnings from collections import defaultdict from datetime import datetime from io import BytesIO @@ -17,7 +16,6 @@ from urllib.parse import urlunsplit from ._internal import _get_environ -from ._internal import _make_encode_wrapper from ._internal import _wsgi_decoding_dance from ._internal import _wsgi_encoding_dance from .datastructures import Authorization @@ -58,24 +56,14 @@ def stream_encode_multipart( use_tempfile: bool = True, threshold: int = 1024 * 500, boundary: str | None = None, - charset: str | None = None, ) -> tuple[t.IO[bytes], int, str]: """Encode a dict of values (either strings or file descriptors or :class:`FileStorage` objects.) into a multipart encoded string stored in a file descriptor. - .. versionchanged:: 2.3 - The ``charset`` parameter is deprecated and will be removed in Werkzeug 3.0 + .. versionchanged:: 3.0 + The ``charset`` parameter was removed. """ - if charset is not None: - warnings.warn( - "The 'charset' parameter is deprecated and will be removed in Werkzeug 3.0", - DeprecationWarning, - stacklevel=2, - ) - else: - charset = "utf-8" - if boundary is None: boundary = f"---------------WerkzeugFormPart_{time()}{random()}" @@ -144,9 +132,7 @@ def write_binary(s: bytes) -> int: if not isinstance(value, str): value = str(value) write_binary(encoder.send_event(Field(name=key, headers=Headers()))) - write_binary( - encoder.send_event(Data(data=value.encode(charset), more_data=False)) - ) + write_binary(encoder.send_event(Data(data=value.encode(), more_data=False))) write_binary(encoder.send_event(Epilogue(data=b""))) @@ -156,18 +142,16 @@ def write_binary(s: bytes) -> int: def encode_multipart( - values: t.Mapping[str, t.Any], - boundary: str | None = None, - charset: str | None = None, + values: t.Mapping[str, t.Any], boundary: str | None = None ) -> tuple[str, bytes]: """Like `stream_encode_multipart` but returns a tuple in the form (``boundary``, ``data``) where data is bytes. - .. versionchanged:: 2.3 - The ``charset`` parameter is deprecated and will be removed in Werkzeug 3.0 + .. versionchanged:: 3.0 + The ``charset`` parameter was removed. """ stream, length, boundary = stream_encode_multipart( - values, use_tempfile=False, boundary=boundary, charset=charset + values, use_tempfile=False, boundary=boundary ) return boundary, stream.read() @@ -259,8 +243,8 @@ class EnvironBuilder: ``Authorization`` header value. A ``(username, password)`` tuple is a shortcut for ``Basic`` authorization. - .. versionchanged:: 2.3 - The ``charset`` parameter is deprecated and will be removed in Werkzeug 3.0 + .. versionchanged:: 3.0 + The ``charset`` parameter was removed. .. versionchanged:: 2.1 ``CONTENT_TYPE`` and ``CONTENT_LENGTH`` are not duplicated as @@ -328,29 +312,16 @@ def __init__( data: None | (t.IO[bytes] | str | bytes | t.Mapping[str, t.Any]) = None, environ_base: t.Mapping[str, t.Any] | None = None, environ_overrides: t.Mapping[str, t.Any] | None = None, - charset: str | None = None, mimetype: str | None = None, json: t.Mapping[str, t.Any] | None = None, auth: Authorization | tuple[str, str] | None = None, ) -> None: - path_s = _make_encode_wrapper(path) - if query_string is not None and path_s("?") in path: + if query_string is not None and "?" in path: raise ValueError("Query string is defined in the path and as an argument") request_uri = urlsplit(path) - if query_string is None and path_s("?") in path: + if query_string is None and "?" in path: query_string = request_uri.query - if charset is not None: - warnings.warn( - "The 'charset' parameter is deprecated and will be" - " removed in Werkzeug 3.0", - DeprecationWarning, - stacklevel=2, - ) - else: - charset = "utf-8" - - self.charset = charset self.path = iri_to_uri(request_uri.path) self.request_uri = path if base_url is not None: @@ -407,7 +378,7 @@ def __init__( if hasattr(data, "read"): data = data.read() if isinstance(data, str): - data = data.encode(self.charset) + data = data.encode() if isinstance(data, bytes): self.input_stream = BytesIO(data) if self.content_length is None: @@ -524,7 +495,7 @@ def mimetype(self) -> str | None: @mimetype.setter def mimetype(self, value: str) -> None: - self.content_type = get_content_type(value, self.charset) + self.content_type = get_content_type(value, "utf-8") @property def mimetype_params(self) -> t.Mapping[str, str]: @@ -626,7 +597,7 @@ def query_string(self) -> str: """ if self._query_string is None: if self._args is not None: - return _urlencode(self._args, encoding=self.charset) + return _urlencode(self._args) return "" return self._query_string @@ -714,13 +685,12 @@ def get_environ(self) -> WSGIEnvironment: input_stream.seek(start_pos) content_length = end_pos - start_pos elif mimetype == "multipart/form-data": - charset = self.charset if self.charset != "utf-8" else None input_stream, content_length, boundary = stream_encode_multipart( - CombinedMultiDict([self.form, self.files]), charset=charset + CombinedMultiDict([self.form, self.files]) ) content_type = f'{mimetype}; boundary="{boundary}"' elif mimetype == "application/x-www-form-urlencoded": - form_encoded = _urlencode(self.form, encoding=self.charset).encode("ascii") + form_encoded = _urlencode(self.form).encode("ascii") content_length = len(form_encoded) input_stream = BytesIO(form_encoded) else: @@ -731,15 +701,15 @@ def get_environ(self) -> WSGIEnvironment: result.update(self.environ_base) def _path_encode(x: str) -> str: - return _wsgi_encoding_dance(unquote(x, encoding=self.charset), self.charset) + return _wsgi_encoding_dance(unquote(x)) - raw_uri = _wsgi_encoding_dance(self.request_uri, self.charset) + raw_uri = _wsgi_encoding_dance(self.request_uri) result.update( { "REQUEST_METHOD": self.method, "SCRIPT_NAME": _path_encode(self.script_root), "PATH_INFO": _path_encode(self.path), - "QUERY_STRING": _wsgi_encoding_dance(self.query_string, self.charset), + "QUERY_STRING": _wsgi_encoding_dance(self.query_string), # Non-standard, added by mod_wsgi, uWSGI "REQUEST_URI": raw_uri, # Non-standard, added by gunicorn diff --git a/src/werkzeug/urls.py b/src/werkzeug/urls.py index cf29da0d9..4d61e600b 100644 --- a/src/werkzeug/urls.py +++ b/src/werkzeug/urls.py @@ -210,9 +210,7 @@ def _decode_idna(domain: str) -> str: return ".".join(parts) -def _urlencode( - query: t.Mapping[str, str] | t.Iterable[tuple[str, str]], encoding: str = "utf-8" -) -> str: +def _urlencode(query: t.Mapping[str, str] | t.Iterable[tuple[str, str]]) -> str: items = [x for x in iter_multi_items(query) if x[1] is not None] # safe = https://url.spec.whatwg.org/#percent-encoded-bytes - return urlencode(items, safe="!$'()*,/:;?@", encoding=encoding) + return urlencode(items, safe="!$'()*,/:;?@") diff --git a/src/werkzeug/wrappers/request.py b/src/werkzeug/wrappers/request.py index f4f51b1dc..25b091691 100644 --- a/src/werkzeug/wrappers/request.py +++ b/src/werkzeug/wrappers/request.py @@ -50,6 +50,10 @@ class Request(_SansIORequest): prevent consuming the form data in middleware, which would make it unavailable to the final application. + .. versionchanged:: 3.0 + The ``charset``, ``url_charset``, and ``encoding_errors`` parameters + were removed. + .. versionchanged:: 2.1 Old ``BaseRequest`` and mixin classes were removed. @@ -145,9 +149,6 @@ def from_values(cls, *args: t.Any, **kwargs: t.Any) -> Request: """ from ..test import EnvironBuilder - kwargs.setdefault( - "charset", cls.charset if not isinstance(cls.charset, property) else None - ) builder = EnvironBuilder(*args, **kwargs) try: return builder.get_request(cls) @@ -240,12 +241,8 @@ def make_form_data_parser(self) -> FormDataParser: .. versionadded:: 0.8 """ - charset = self._charset if self._charset != "utf-8" else None - errors = self._encoding_errors if self._encoding_errors != "replace" else None return self.form_data_parser_class( stream_factory=self._get_file_stream, - charset=charset, - errors=errors, max_form_memory_size=self.max_form_memory_size, max_content_length=self.max_content_length, max_form_parts=self.max_form_parts, @@ -424,7 +421,7 @@ def get_data( if cache: self._cached_data = rv if as_text: - rv = rv.decode(self._charset, self._encoding_errors) + rv = rv.decode(errors="replace") return rv @cached_property diff --git a/src/werkzeug/wrappers/response.py b/src/werkzeug/wrappers/response.py index c8488094e..ee5c69478 100644 --- a/src/werkzeug/wrappers/response.py +++ b/src/werkzeug/wrappers/response.py @@ -28,10 +28,10 @@ from .request import Request -def _iter_encoded(iterable: t.Iterable[str | bytes], charset: str) -> t.Iterator[bytes]: +def _iter_encoded(iterable: t.Iterable[str | bytes]) -> t.Iterator[bytes]: for item in iterable: if isinstance(item, str): - yield item.encode(charset) + yield item.encode() else: yield item @@ -284,7 +284,7 @@ def get_data(self, as_text: bool = False) -> bytes | str: rv = b"".join(self.iter_encoded()) if as_text: - return rv.decode(self._charset) + return rv.decode() return rv @@ -296,7 +296,7 @@ def set_data(self, value: bytes | str) -> None: .. versionadded:: 0.9 """ if isinstance(value, str): - value = value.encode(self._charset) + value = value.encode() self.response = [value] if self.automatically_set_content_length: self.headers["Content-Length"] = str(len(value)) @@ -366,7 +366,7 @@ def iter_encoded(self) -> t.Iterator[bytes]: # Encode in a separate function so that self.response is fetched # early. This allows us to wrap the response with the return # value from get_app_iter or iter_encoded. - return _iter_encoded(self.response, self._charset) + return _iter_encoded(self.response) @property def is_streamed(self) -> bool: @@ -832,4 +832,4 @@ def tell(self) -> int: @property def encoding(self) -> str: - return self.response._charset + return "utf-8" diff --git a/src/werkzeug/wsgi.py b/src/werkzeug/wsgi.py index 27c580223..01d40af2f 100644 --- a/src/werkzeug/wsgi.py +++ b/src/werkzeug/wsgi.py @@ -2,7 +2,6 @@ import io import typing as t -import warnings from functools import partial from functools import update_wrapper @@ -195,45 +194,18 @@ def get_input_stream( return t.cast(t.IO[bytes], LimitedStream(stream, content_length)) -def get_path_info( - environ: WSGIEnvironment, - charset: t.Any = ..., - errors: str | None = None, -) -> str: +def get_path_info(environ: WSGIEnvironment) -> str: """Return ``PATH_INFO`` from the WSGI environment. :param environ: WSGI environment to get the path from. - .. versionchanged:: 2.3 - The ``charset`` and ``errors`` parameters are deprecated and will be removed in - Werkzeug 3.0. + .. versionchanged:: 3.0 + The ``charset`` and ``errors`` parameters were removed. .. versionadded:: 0.9 """ - if charset is not ...: - warnings.warn( - "The 'charset' parameter is deprecated and will be removed" - " in Werkzeug 3.0.", - DeprecationWarning, - stacklevel=2, - ) - - if charset is None: - charset = "utf-8" - else: - charset = "utf-8" - - if errors is not None: - warnings.warn( - "The 'errors' parameter is deprecated and will be removed in Werkzeug 3.0", - DeprecationWarning, - stacklevel=2, - ) - else: - errors = "replace" - - path = environ.get("PATH_INFO", "").encode("latin1") - return path.decode(charset, errors) # type: ignore[no-any-return] + path: bytes = environ.get("PATH_INFO", "").encode("latin1") + return path.decode(errors="replace") class ClosingIterator: From 9a21003d45d336d5173396c4223872061015a524 Mon Sep 17 00:00:00 2001 From: David Lord Date: Mon, 14 Aug 2023 12:54:14 -0700 Subject: [PATCH 024/159] use scrypt as default --- CHANGES.rst | 1 + src/werkzeug/security.py | 9 ++++----- tests/test_security.py | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index e4cb06d50..b074cdc56 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -6,6 +6,7 @@ Version 3.0.0 Unreleased - Remove previously deprecated code. :pr:`2768` +- ``generate_password_hash`` uses scrypt by default. :issue:`2769` Version 2.3.8 diff --git a/src/werkzeug/security.py b/src/werkzeug/security.py index 3620d7c9c..578caf7f3 100644 --- a/src/werkzeug/security.py +++ b/src/werkzeug/security.py @@ -67,17 +67,16 @@ def _hash_internal(method: str, salt: str, password: str) -> tuple[str, str]: def generate_password_hash( - password: str, method: str = "pbkdf2", salt_length: int = 16 + password: str, method: str = "scrypt", salt_length: int = 16 ) -> str: """Securely hash a password for storage. A password can be compared to a stored hash using :func:`check_password_hash`. The following methods are supported: - - ``scrypt``, more secure but not available on PyPy. The parameters are ``n``, - ``r``, and ``p``, the default is ``scrypt:32768:8:1``. See - :func:`hashlib.scrypt`. - - ``pbkdf2``, the default. The parameters are ``hash_method`` and ``iterations``, + - ``scrypt``, the default. The parameters are ``n``, ``r``, and ``p``, the default + is ``scrypt:32768:8:1``. See :func:`hashlib.scrypt`. + - ``pbkdf2``, less secure. The parameters are ``hash_method`` and ``iterations``, the default is ``pbkdf2:sha256:600000``. See :func:`hashlib.pbkdf2_hmac`. Default parameters may be updated to reflect current guidelines, and methods may be diff --git a/tests/test_security.py b/tests/test_security.py index ae63046d5..6fad089a7 100644 --- a/tests/test_security.py +++ b/tests/test_security.py @@ -11,7 +11,7 @@ def test_default_password_method(): value = generate_password_hash("secret") - assert value.startswith("pbkdf2:") + assert value.startswith("scrypt:") @pytest.mark.xfail( From 40ba284a21598e67eba3faa431dc0e26a5a9c6d5 Mon Sep 17 00:00:00 2001 From: David Lord Date: Mon, 14 Aug 2023 13:12:59 -0700 Subject: [PATCH 025/159] deprecate __version__ attribute --- CHANGES.rst | 2 ++ pyproject.toml | 2 +- src/werkzeug/__init__.py | 21 ++++++++++++++++++++- src/werkzeug/serving.py | 8 +++++--- 4 files changed, 28 insertions(+), 5 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index b074cdc56..ec2ad5d51 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -6,6 +6,8 @@ Version 3.0.0 Unreleased - Remove previously deprecated code. :pr:`2768` +- Deprecate the ``__version__`` attribute. Use feature detection, or + ``importlib.metadata.version("werkzeug")``, instead. :issue:`2770` - ``generate_password_hash`` uses scrypt by default. :issue:`2769` diff --git a/pyproject.toml b/pyproject.toml index 3a1965554..e97c04830 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,6 @@ [project] name = "Werkzeug" +version = "3.0.0.dev" description = "The comprehensive WSGI web application library." readme = "README.rst" license = {file = "LICENSE.rst"} @@ -19,7 +20,6 @@ classifiers = [ ] requires-python = ">=3.8" dependencies = ["MarkupSafe>=2.1.1"] -dynamic = ["version"] [project.urls] Donate = "https://palletsprojects.com/donate" diff --git a/src/werkzeug/__init__.py b/src/werkzeug/__init__.py index 0436769c9..57cb7539a 100644 --- a/src/werkzeug/__init__.py +++ b/src/werkzeug/__init__.py @@ -1,6 +1,25 @@ +from __future__ import annotations + +import typing as t + from .serving import run_simple as run_simple from .test import Client as Client from .wrappers import Request as Request from .wrappers import Response as Response -__version__ = "3.0.0.dev" + +def __getattr__(name: str) -> t.Any: + if name == "__version__": + import importlib.metadata + import warnings + + warnings.warn( + "The '__version__' attribute is deprecated and will be removed in" + " Werkzeug 3.1. Use feature detection or" + " 'importlib.metadata.version(\"werkzeug\")' instead.", + DeprecationWarning, + stacklevel=2, + ) + return importlib.metadata.version("werkzeug") + + raise AttributeError(name) diff --git a/src/werkzeug/serving.py b/src/werkzeug/serving.py index c031dc45e..549a3bd80 100644 --- a/src/werkzeug/serving.py +++ b/src/werkzeug/serving.py @@ -154,9 +154,7 @@ class WSGIRequestHandler(BaseHTTPRequestHandler): @property def server_version(self) -> str: # type: ignore - from . import __version__ - - return f"Werkzeug/{__version__}" + return self.server._server_version def make_environ(self) -> WSGIEnvironment: request_url = urlsplit(self.path) @@ -796,6 +794,10 @@ def __init__( else: self.ssl_context = None + import importlib.metadata + + self._server_version = f"Werkzeug/{importlib.metadata.version('werkzeug')}" + def log(self, type: str, message: str, *args: t.Any) -> None: _log(type, message, *args) From 81b879a523e1c05485729a8cdfae812f69500a20 Mon Sep 17 00:00:00 2001 From: Matt Giles Date: Tue, 22 Aug 2023 20:01:33 -0400 Subject: [PATCH 026/159] Update helpful error message for AirPlay collision --- src/werkzeug/serving.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/werkzeug/serving.py b/src/werkzeug/serving.py index 549a3bd80..ff5eb8c66 100644 --- a/src/werkzeug/serving.py +++ b/src/werkzeug/serving.py @@ -762,7 +762,7 @@ def __init__( if sys.platform == "darwin" and port == 5000: print( "On macOS, try disabling the 'AirPlay Receiver' service" - " from System Preferences -> Sharing.", + " from System Preferences -> General -> AirDrop & Handoff.", file=sys.stderr, ) From 2b172cb0a4f32a61c7e64033245e6cb7a287df51 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 1 Sep 2023 16:10:14 +0000 Subject: [PATCH 027/159] Bump actions/checkout from 3.5.3 to 3.6.0 Bumps [actions/checkout](https://github.com/actions/checkout) from 3.5.3 to 3.6.0. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/c85c95e3d7251135ab7dc9ce3241c5835cc595a9...f43a0e5ff2bd294095638e18286ca9a3d1956744) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- .github/workflows/publish.yaml | 2 +- .github/workflows/tests.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index 3f368ebe0..9d62d52b3 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -9,7 +9,7 @@ jobs: outputs: hash: ${{ steps.hash.outputs.hash }} steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 with: python-version: '3.x' diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 8dbd601ad..c1e6ea314 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -34,7 +34,7 @@ jobs: - {name: 'PyPy', python: 'pypy-3.10', os: ubuntu-latest, tox: pypy310} - {name: Typing, python: '3.11', os: ubuntu-latest, tox: typing} steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 with: python-version: ${{ matrix.python }} From ce8cfe7dbb73b56c982a9c74162084cdb284c2f5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 1 Sep 2023 16:10:18 +0000 Subject: [PATCH 028/159] Bump slsa-framework/slsa-github-generator from 1.7.0 to 1.9.0 Bumps [slsa-framework/slsa-github-generator](https://github.com/slsa-framework/slsa-github-generator) from 1.7.0 to 1.9.0. - [Release notes](https://github.com/slsa-framework/slsa-github-generator/releases) - [Changelog](https://github.com/slsa-framework/slsa-github-generator/blob/main/CHANGELOG.md) - [Commits](https://github.com/slsa-framework/slsa-github-generator/compare/v1.7.0...v1.9.0) --- updated-dependencies: - dependency-name: slsa-framework/slsa-github-generator dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- .github/workflows/publish.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index 3f368ebe0..dcc7a42ca 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -33,7 +33,7 @@ jobs: id-token: write contents: write # Can't pin with hash due to how this workflow works. - uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v1.7.0 + uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v1.9.0 with: base64-subjects: ${{ needs.build.outputs.hash }} create-release: From 76a5419d2ee8b7785c0304d58a94d6c0387c976c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 1 Sep 2023 16:10:21 +0000 Subject: [PATCH 029/159] Bump pypa/gh-action-pypi-publish from 1.8.8 to 1.8.10 Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.8.8 to 1.8.10. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/f8c70e705ffc13c3b4d1221169b84f12a75d6ca8...b7f401de30cb6434a1e19f805ff006643653240e) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/publish.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index 3f368ebe0..9502eedea 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -63,10 +63,10 @@ jobs: steps: - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # Try uploading to Test PyPI first, in case something fails. - - uses: pypa/gh-action-pypi-publish@f8c70e705ffc13c3b4d1221169b84f12a75d6ca8 + - uses: pypa/gh-action-pypi-publish@b7f401de30cb6434a1e19f805ff006643653240e with: repository-url: https://test.pypi.org/legacy/ packages-dir: artifact/ - - uses: pypa/gh-action-pypi-publish@f8c70e705ffc13c3b4d1221169b84f12a75d6ca8 + - uses: pypa/gh-action-pypi-publish@b7f401de30cb6434a1e19f805ff006643653240e with: packages-dir: artifact/ From 4820d8c487e5db9f43645c31c4123fce5ac5ad32 Mon Sep 17 00:00:00 2001 From: Josh Wilson Date: Wed, 30 Aug 2023 16:23:51 -0700 Subject: [PATCH 030/159] Provide elapsed and timestamp info to filename_format This provides the `elapsed` and `time` values to the `ProfileMiddleware(filename_format=...)` function. Prior to this change, one could not replicate the format string format, much less modify how it would render the timestamp or elapsed time values. These new values can be found under the `werkzeug.profiler` key in the WSGI environ dict passed into the `filename_format()` function. Addresses #2775 --- CHANGES.rst | 3 ++ src/werkzeug/middleware/profiler.py | 19 +++++++++-- tests/middleware/test_profiler.py | 50 +++++++++++++++++++++++++++++ 3 files changed, 69 insertions(+), 3 deletions(-) create mode 100644 tests/middleware/test_profiler.py diff --git a/CHANGES.rst b/CHANGES.rst index ec2ad5d51..f9c997dd8 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -9,6 +9,9 @@ Unreleased - Deprecate the ``__version__`` attribute. Use feature detection, or ``importlib.metadata.version("werkzeug")``, instead. :issue:`2770` - ``generate_password_hash`` uses scrypt by default. :issue:`2769` +- Add the ``"werkzeug.profiler"`` item to the WSGI ``environ`` dictionary + passed to `ProfilerMiddleware`'s `filename_format` function. It contains + the ``elapsed`` and ``time`` values for the profiled request. :issue:`2775` Version 2.3.8 diff --git a/src/werkzeug/middleware/profiler.py b/src/werkzeug/middleware/profiler.py index 2d806154c..1120c83ef 100644 --- a/src/werkzeug/middleware/profiler.py +++ b/src/werkzeug/middleware/profiler.py @@ -44,11 +44,16 @@ class ProfilerMiddleware: - ``{method}`` - The request method; GET, POST, etc. - ``{path}`` - The request path or 'root' should one not exist. - - ``{elapsed}`` - The elapsed time of the request. + - ``{elapsed}`` - The elapsed time of the request in milliseconds. - ``{time}`` - The time of the request. - If it is a callable, it will be called with the WSGI ``environ`` - dict and should return a filename. + If it is a callable, it will be called with the WSGI ``environ`` and + be expected to return a filename string. The ``environ`` dictionary + will also have the ``"werkzeug.profiler"`` key populated with a + dictionary containing the following fields (more may be added in the + future): + - ``{elapsed}`` - The elapsed time of the request in milliseconds. + - ``{time}`` - The time of the request. :param app: The WSGI application to wrap. :param stream: Write stats to this stream. Disable with ``None``. @@ -65,6 +70,10 @@ class ProfilerMiddleware: from werkzeug.middleware.profiler import ProfilerMiddleware app = ProfilerMiddleware(app) + .. versionchanged:: 3.0 + Added the ``"werkzeug.profiler"`` key to the ``filename_format(environ)`` + parameter with the ``elapsed`` and ``time`` fields. + .. versionchanged:: 0.15 Stats are written even if ``profile_dir`` is given, and can be disable by passing ``stream=None``. @@ -118,6 +127,10 @@ def runapp() -> None: if self._profile_dir is not None: if callable(self._filename_format): + environ["werkzeug.profiler"] = { + "elapsed": elapsed * 1000.0, + "time": time.time(), + } filename = self._filename_format(environ) else: filename = self._filename_format.format( diff --git a/tests/middleware/test_profiler.py b/tests/middleware/test_profiler.py new file mode 100644 index 000000000..585aeb54b --- /dev/null +++ b/tests/middleware/test_profiler.py @@ -0,0 +1,50 @@ +import datetime +import os +from unittest.mock import ANY +from unittest.mock import MagicMock +from unittest.mock import patch + +from werkzeug.middleware.profiler import Profile +from werkzeug.middleware.profiler import ProfilerMiddleware +from werkzeug.test import Client + + +def dummy_application(environ, start_response): + start_response("200 OK", [("Content-Type", "text/plain")]) + return [b"Foo"] + + +def test_filename_format_function(): + # This should be called once with the generated file name + mock_capture_name = MagicMock() + + def filename_format(env): + now = datetime.datetime.fromtimestamp(env["werkzeug.profiler"]["time"]) + timestamp = now.strftime("%Y-%m-%d:%H:%M:%S") + path = ( + "_".join(token for token in env["PATH_INFO"].split("/") if token) or "ROOT" + ) + elapsed = env["werkzeug.profiler"]["elapsed"] + name = f"{timestamp}.{env['REQUEST_METHOD']}.{path}.{elapsed:.0f}ms.prof" + mock_capture_name(name=name) + return name + + client = Client( + ProfilerMiddleware( + dummy_application, + stream=None, + profile_dir="profiles", + filename_format=filename_format, + ) + ) + + # Replace the Profile class with a function that simulates an __init__() + # call and returns our mock instance. + mock_profile = MagicMock(wraps=Profile()) + mock_profile.dump_stats = MagicMock() + with patch("werkzeug.middleware.profiler.Profile", lambda: mock_profile): + client.get("/foo/bar") + + mock_capture_name.assert_called_once_with(name=ANY) + name = mock_capture_name.mock_calls[0].kwargs["name"] + mock_profile.dump_stats.assert_called_once_with(os.path.join("profiles", name)) From 64275425888b6ca4f5ebdfa1a9df814317718290 Mon Sep 17 00:00:00 2001 From: pgjones Date: Sat, 30 Sep 2023 10:17:13 +0100 Subject: [PATCH 031/159] Default the PathConverter (and descendants) to be non part isolating This is likely to be the expected for anything that is or extends the PathConverter and by adding it explicitly I hope it will cause less confusion, as seen in a recent issue 2783. Note the PathConverter was already non part isolating due to the BaseConverter check. --- CHANGES.rst | 1 + src/werkzeug/routing/converters.py | 1 + 2 files changed, 2 insertions(+) diff --git a/CHANGES.rst b/CHANGES.rst index f9c997dd8..d4eb01e99 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -12,6 +12,7 @@ Unreleased - Add the ``"werkzeug.profiler"`` item to the WSGI ``environ`` dictionary passed to `ProfilerMiddleware`'s `filename_format` function. It contains the ``elapsed`` and ``time`` values for the profiled request. :issue:`2775` +- Explicitly marked the PathConverter as non path isolating. :pr:`2784` Version 2.3.8 diff --git a/src/werkzeug/routing/converters.py b/src/werkzeug/routing/converters.py index eeb5bbd93..ce01dd1ea 100644 --- a/src/werkzeug/routing/converters.py +++ b/src/werkzeug/routing/converters.py @@ -119,6 +119,7 @@ class PathConverter(BaseConverter): :param map: the :class:`Map`. """ + part_isolating = False regex = "[^/].*?" weight = 200 From 726eaa28593d859548da3477859c914732f012ef Mon Sep 17 00:00:00 2001 From: pgjones Date: Sat, 30 Sep 2023 14:53:04 +0100 Subject: [PATCH 032/159] Release version 3.0.0 --- CHANGES.rst | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index d4eb01e99..ef83f2905 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,7 +3,7 @@ Version 3.0.0 ------------- -Unreleased +Released 2023-09-30 - Remove previously deprecated code. :pr:`2768` - Deprecate the ``__version__`` attribute. Use feature detection, or diff --git a/pyproject.toml b/pyproject.toml index e97c04830..ec69c2015 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "Werkzeug" -version = "3.0.0.dev" +version = "3.0.0" description = "The comprehensive WSGI web application library." readme = "README.rst" license = {file = "LICENSE.rst"} From b1916c0c083e0be1c9d887ee2f3d696922bfc5c1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Srokosz?= Date: Thu, 12 Oct 2023 18:50:04 +0200 Subject: [PATCH 033/159] Fix: slow multipart parsing for huge files with few CR/LF characters --- src/werkzeug/sansio/multipart.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/src/werkzeug/sansio/multipart.py b/src/werkzeug/sansio/multipart.py index 380993af7..fc8735378 100644 --- a/src/werkzeug/sansio/multipart.py +++ b/src/werkzeug/sansio/multipart.py @@ -251,12 +251,20 @@ def _parse_data(self, data: bytes, *, start: bool) -> tuple[bytes, int, bool]: else: data_start = 0 - if self.buffer.find(b"--" + self.boundary) == -1: + boundary = b"--" + self.boundary + + if self.buffer.find(boundary) == -1: # No complete boundary in the buffer, but there may be # a partial boundary at the end. As the boundary # starts with either a nl or cr find the earliest and # return up to that as data. data_end = del_index = self.last_newline(data[data_start:]) + data_start + # If amount of data after last newline is far from + # possible length of partial boundary, we should + # assume that there is no partial boundary in the buffer + # and return all pending data. + if (len(data) - data_end) > len(b"\n" + boundary): + data_end = del_index = len(data) more_data = True else: match = self.boundary_re.search(data) From 48a6560a639d1ed56df9bfb8c0e0daae352f8706 Mon Sep 17 00:00:00 2001 From: pgjones Date: Tue, 24 Oct 2023 21:47:00 +0100 Subject: [PATCH 034/159] Release version 3.0.1 --- CHANGES.rst | 8 ++++++++ pyproject.toml | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index ef83f2905..6f801b97c 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,5 +1,13 @@ .. currentmodule:: werkzeug +Version 3.0.1 +------------- + +Released 2023-10-24 + +- Fix slow multipart parsing for large parts potentially enabling DoS + attacks. :cwe:`CWE-407` + Version 3.0.0 ------------- diff --git a/pyproject.toml b/pyproject.toml index ec69c2015..70721a9a4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "Werkzeug" -version = "3.0.0" +version = "3.0.1" description = "The comprehensive WSGI web application library." readme = "README.rst" license = {file = "LICENSE.rst"} From 7a6eb7d3c9da3b899bb34227bcac10ec80e5f515 Mon Sep 17 00:00:00 2001 From: Adrian Moennich Date: Wed, 8 Nov 2023 19:24:46 +0100 Subject: [PATCH 035/159] Start version 2.3.9 --- CHANGES.rst | 6 ++++++ src/werkzeug/__init__.py | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index b348506d0..73052b513 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,5 +1,11 @@ .. currentmodule:: werkzeug +Version 2.3.9 +------------- + +Unreleased + + Version 2.3.8 ------------- diff --git a/src/werkzeug/__init__.py b/src/werkzeug/__init__.py index 0a472ae7d..7103d3d42 100644 --- a/src/werkzeug/__init__.py +++ b/src/werkzeug/__init__.py @@ -3,4 +3,4 @@ from .wrappers import Request as Request from .wrappers import Response as Response -__version__ = "2.3.8" +__version__ = "2.3.9.dev" From cebdef9800abe6497d10ebbf087a830e3340af2e Mon Sep 17 00:00:00 2001 From: David Lord Date: Thu, 9 Nov 2023 17:31:49 -0800 Subject: [PATCH 036/159] update read the docs env --- .readthedocs.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 346900b20..5ffe32b36 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -1,8 +1,8 @@ version: 2 build: - os: ubuntu-20.04 + os: ubuntu-22.04 tools: - python: "3.10" + python: "3.12" python: install: - requirements: requirements/docs.txt From a629c442d0ff0ccb8629285f86d2d8ca4d4522c2 Mon Sep 17 00:00:00 2001 From: David Lord Date: Thu, 9 Nov 2023 17:36:04 -0800 Subject: [PATCH 037/159] update description --- .github/workflows/lock.yaml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/lock.yaml b/.github/workflows/lock.yaml index e962fd041..9825178c3 100644 --- a/.github/workflows/lock.yaml +++ b/.github/workflows/lock.yaml @@ -1,8 +1,8 @@ -name: 'Lock threads' -# Lock closed issues that have not received any further activity for -# two weeks. This does not close open issues, only humans may do that. -# We find that it is easier to respond to new issues with fresh examples -# rather than continuing discussions on old issues. +name: 'Lock inactive closed issues' +# Lock closed issues that have not received any further activity for two weeks. +# This does not close open issues, only humans may do that. We find that it is +# easier to respond to new issues with fresh examples rather than continuing +# discussions on old issues. on: schedule: From b0f3fa1b4791d222ec0a8ff96ecc12dbab8054b5 Mon Sep 17 00:00:00 2001 From: David Lord Date: Thu, 9 Nov 2023 17:39:03 -0800 Subject: [PATCH 038/159] update python version matrix --- .github/workflows/tests.yaml | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index c1e6ea314..450bcaf28 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -9,9 +9,6 @@ on: - '*.md' - '*.rst' pull_request: - branches: - - main - - '*.x' paths-ignore: - 'docs/**' - '*.md' @@ -24,15 +21,15 @@ jobs: fail-fast: false matrix: include: - - {name: Linux, python: '3.11', os: ubuntu-latest, tox: py311} - - {name: Windows, python: '3.11', os: windows-latest, tox: py311} - - {name: Mac, python: '3.11', os: macos-latest, tox: py311} - - {name: '3.12-dev', python: '3.12-dev', os: ubuntu-latest, tox: py312} + - {name: Linux, python: '3.12', os: ubuntu-latest, tox: py312} + - {name: Windows, python: '3.12', os: windows-latest, tox: py312} + - {name: Mac, python: '3.12', os: macos-latest, tox: py312} + - {name: '3.11', python: '3.11', os: ubuntu-latest, tox: py311} - {name: '3.10', python: '3.10', os: ubuntu-latest, tox: py310} - {name: '3.9', python: '3.9', os: ubuntu-latest, tox: py39} - {name: '3.8', python: '3.8', os: ubuntu-latest, tox: py38} - {name: 'PyPy', python: 'pypy-3.10', os: ubuntu-latest, tox: pypy310} - - {name: Typing, python: '3.11', os: ubuntu-latest, tox: typing} + - {name: Typing, python: '3.12', os: ubuntu-latest, tox: typing} steps: - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 From 431299c3cb7c19f0b401ca5fc8a4f9b93ba95d30 Mon Sep 17 00:00:00 2001 From: David Lord Date: Thu, 9 Nov 2023 17:48:05 -0800 Subject: [PATCH 039/159] use pip-compile instead of pip-compile-multi --- .github/workflows/publish.yaml | 19 ++-- .github/workflows/tests.yaml | 8 +- .pre-commit-config.yaml | 4 - requirements/build.txt | 13 ++- requirements/dev.in | 2 +- requirements/dev.txt | 162 ++++++++++++++++++++++++++------- requirements/docs.in | 4 +- requirements/docs.txt | 44 +++++---- requirements/tests.in | 2 +- requirements/tests.txt | 39 ++++---- requirements/typing.txt | 25 +++-- 11 files changed, 207 insertions(+), 115 deletions(-) diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index 05681f54c..59dbfe8f6 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -9,12 +9,12 @@ jobs: outputs: hash: ${{ steps.hash.outputs.hash }} steps: - - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 + - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 with: python-version: '3.x' - cache: 'pip' - cache-dependency-path: 'requirements/*.txt' + cache: pip + cache-dependency-path: requirements*/*.txt - run: pip install -r requirements/build.txt # Use the commit date instead of the current date during the build. - run: echo "SOURCE_DATE_EPOCH=$(git log -1 --pretty=%ct)" >> $GITHUB_ENV @@ -23,11 +23,11 @@ jobs: - name: generate hash id: hash run: cd dist && echo "hash=$(sha256sum * | base64 -w0)" >> $GITHUB_OUTPUT - - uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce + - uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 with: path: ./dist provenance: - needs: ['build'] + needs: [build] permissions: actions: read id-token: write @@ -39,7 +39,7 @@ jobs: create-release: # Upload the sdist, wheels, and provenance to a GitHub release. They remain # available as build artifacts for a while as well. - needs: ['provenance'] + needs: [provenance] runs-on: ubuntu-latest permissions: contents: write @@ -53,16 +53,15 @@ jobs: env: GH_TOKEN: ${{ github.token }} publish-pypi: - needs: ['provenance'] + needs: [provenance] # Wait for approval before attempting to upload to PyPI. This allows reviewing the # files in the draft release. - environment: 'publish' + environment: publish runs-on: ubuntu-latest permissions: id-token: write steps: - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a - # Try uploading to Test PyPI first, in case something fails. - uses: pypa/gh-action-pypi-publish@b7f401de30cb6434a1e19f805ff006643653240e with: repository-url: https://test.pypi.org/legacy/ diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 450bcaf28..9e0ef4583 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -31,14 +31,14 @@ jobs: - {name: 'PyPy', python: 'pypy-3.10', os: ubuntu-latest, tox: pypy310} - {name: Typing, python: '3.12', os: ubuntu-latest, tox: typing} steps: - - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 + - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 with: python-version: ${{ matrix.python }} cache: 'pip' - cache-dependency-path: 'requirements/*.txt' + cache-dependency-path: requirements*/*.txt - name: cache mypy - uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 + uses: actions/cache@704facf57e6136b1bc63b828d79edcd491f0ee84 with: path: ./.mypy_cache key: mypy|${{ matrix.python }}|${{ hashFiles('pyproject.toml') }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6425015cf..a8757cc36 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,10 +29,6 @@ repos: additional_dependencies: - flake8-bugbear - flake8-implicit-str-concat - - repo: https://github.com/peterdemin/pip-compile-multi - rev: v2.6.3 - hooks: - - id: pip-compile-multi-verify - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.4.0 hooks: diff --git a/requirements/build.txt b/requirements/build.txt index 196545d0e..6bfd666c5 100644 --- a/requirements/build.txt +++ b/requirements/build.txt @@ -1,13 +1,12 @@ -# SHA1:80754af91bfb6d1073585b046fe0a474ce868509 # -# This file is autogenerated by pip-compile-multi -# To update, run: +# This file is autogenerated by pip-compile with Python 3.11 +# by the following command: # -# pip-compile-multi +# pip-compile build.in # -build==0.10.0 - # via -r requirements/build.in -packaging==23.1 +build==1.0.3 + # via -r build.in +packaging==23.2 # via build pyproject-hooks==1.0.0 # via build diff --git a/requirements/dev.in b/requirements/dev.in index 99f5942f8..2588467c1 100644 --- a/requirements/dev.in +++ b/requirements/dev.in @@ -1,6 +1,6 @@ -r docs.in -r tests.in -r typing.in -pip-compile-multi +pip-tools pre-commit tox diff --git a/requirements/dev.txt b/requirements/dev.txt index ed462080a..e7428e41c 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -1,62 +1,158 @@ -# SHA1:54b5b77ec8c7a0064ffa93b2fd16cb0130ba177c # -# This file is autogenerated by pip-compile-multi -# To update, run: +# This file is autogenerated by pip-compile with Python 3.11 +# by the following command: # -# pip-compile-multi +# pip-compile dev.in # --r docs.txt --r tests.txt --r typing.txt -build==0.10.0 +alabaster==0.7.13 + # via sphinx +babel==2.13.1 + # via sphinx +build==1.0.3 # via pip-tools -cachetools==5.3.1 +cachetools==5.3.2 # via tox -cfgv==3.3.1 +certifi==2023.7.22 + # via requests +cffi==1.16.0 + # via cryptography +cfgv==3.4.0 # via pre-commit -chardet==5.1.0 +chardet==5.2.0 # via tox -click==8.1.3 - # via - # pip-compile-multi - # pip-tools +charset-normalizer==3.3.2 + # via requests +click==8.1.7 + # via pip-tools colorama==0.4.6 # via tox -distlib==0.3.6 +cryptography==41.0.5 + # via -r tests.in +distlib==0.3.7 # via virtualenv -filelock==3.12.2 +docutils==0.20.1 + # via sphinx +ephemeral-port-reserve==1.1.4 + # via -r tests.in +filelock==3.13.1 # via # tox # virtualenv -identify==2.5.24 +greenlet==3.0.1 + # via -r tests.in +identify==2.5.31 # via pre-commit +idna==3.4 + # via requests +imagesize==1.4.1 + # via sphinx +iniconfig==2.0.0 + # via pytest +jinja2==3.1.2 + # via sphinx +markupsafe==2.1.3 + # via jinja2 +mypy==1.6.1 + # via -r typing.in +mypy-extensions==1.0.0 + # via mypy nodeenv==1.8.0 # via pre-commit -pip-compile-multi==2.6.3 - # via -r requirements/dev.in -pip-tools==6.13.0 - # via pip-compile-multi -platformdirs==3.8.0 +packaging==23.2 + # via + # build + # pallets-sphinx-themes + # pyproject-api + # pytest + # sphinx + # tox +pallets-sphinx-themes==2.1.1 + # via -r docs.in +pip-tools==7.3.0 + # via -r dev.in +platformdirs==3.11.0 # via # tox # virtualenv -pre-commit==3.3.3 - # via -r requirements/dev.in -pyproject-api==1.5.2 +pluggy==1.3.0 + # via + # pytest + # tox +pre-commit==3.5.0 + # via -r dev.in +psutil==5.9.6 + # via pytest-xprocess +pycparser==2.21 + # via cffi +pygments==2.16.1 + # via sphinx +pyproject-api==1.6.1 # via tox pyproject-hooks==1.0.0 # via build -pyyaml==6.0 +pytest==7.4.3 + # via + # -r tests.in + # pytest-timeout + # pytest-xprocess +pytest-timeout==2.2.0 + # via -r tests.in +pytest-xprocess==0.23.0 + # via -r tests.in +pyyaml==6.0.1 # via pre-commit -toposort==1.10 - # via pip-compile-multi -tox==4.6.3 - # via -r requirements/dev.in -virtualenv==20.23.1 +requests==2.31.0 + # via sphinx +snowballstemmer==2.2.0 + # via sphinx +sphinx==7.2.6 + # via + # -r docs.in + # pallets-sphinx-themes + # sphinx-issues + # sphinxcontrib-applehelp + # sphinxcontrib-devhelp + # sphinxcontrib-htmlhelp + # sphinxcontrib-log-cabinet + # sphinxcontrib-qthelp + # sphinxcontrib-serializinghtml +sphinx-issues==3.0.1 + # via -r docs.in +sphinxcontrib-applehelp==1.0.7 + # via sphinx +sphinxcontrib-devhelp==1.0.5 + # via sphinx +sphinxcontrib-htmlhelp==2.0.4 + # via sphinx +sphinxcontrib-jsmath==1.0.1 + # via sphinx +sphinxcontrib-log-cabinet==1.0.1 + # via -r docs.in +sphinxcontrib-qthelp==1.0.6 + # via sphinx +sphinxcontrib-serializinghtml==1.1.9 + # via sphinx +tox==4.11.3 + # via -r dev.in +types-contextvars==2.4.7.3 + # via -r typing.in +types-dataclasses==0.6.6 + # via -r typing.in +types-setuptools==68.2.0.1 + # via -r typing.in +typing-extensions==4.8.0 + # via mypy +urllib3==2.0.7 + # via requests +virtualenv==20.24.6 # via # pre-commit # tox -wheel==0.40.0 +watchdog==3.0.0 + # via + # -r tests.in + # -r typing.in +wheel==0.41.3 # via pip-tools # The following packages are considered to be unsafe in a requirements file: diff --git a/requirements/docs.in b/requirements/docs.in index 7ec501b6d..88fd7721d 100644 --- a/requirements/docs.in +++ b/requirements/docs.in @@ -1,4 +1,4 @@ -Pallets-Sphinx-Themes -Sphinx +pallets-sphinx-themes +sphinx sphinx-issues sphinxcontrib-log-cabinet diff --git a/requirements/docs.txt b/requirements/docs.txt index e125c59a4..731ad1538 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -1,17 +1,16 @@ -# SHA1:45c590f97fe95b8bdc755eef796e91adf5fbe4ea # -# This file is autogenerated by pip-compile-multi -# To update, run: +# This file is autogenerated by pip-compile with Python 3.11 +# by the following command: # -# pip-compile-multi +# pip-compile docs.in # alabaster==0.7.13 # via sphinx -babel==2.12.1 +babel==2.13.1 # via sphinx -certifi==2023.5.7 +certifi==2023.7.22 # via requests -charset-normalizer==3.1.0 +charset-normalizer==3.3.2 # via requests docutils==0.20.1 # via sphinx @@ -23,39 +22,44 @@ jinja2==3.1.2 # via sphinx markupsafe==2.1.3 # via jinja2 -packaging==23.1 +packaging==23.2 # via # pallets-sphinx-themes # sphinx pallets-sphinx-themes==2.1.1 - # via -r requirements/docs.in -pygments==2.15.1 + # via -r docs.in +pygments==2.16.1 # via sphinx requests==2.31.0 # via sphinx snowballstemmer==2.2.0 # via sphinx -sphinx==7.0.1 +sphinx==7.2.6 # via - # -r requirements/docs.in + # -r docs.in # pallets-sphinx-themes # sphinx-issues + # sphinxcontrib-applehelp + # sphinxcontrib-devhelp + # sphinxcontrib-htmlhelp # sphinxcontrib-log-cabinet + # sphinxcontrib-qthelp + # sphinxcontrib-serializinghtml sphinx-issues==3.0.1 - # via -r requirements/docs.in -sphinxcontrib-applehelp==1.0.4 + # via -r docs.in +sphinxcontrib-applehelp==1.0.7 # via sphinx -sphinxcontrib-devhelp==1.0.2 +sphinxcontrib-devhelp==1.0.5 # via sphinx -sphinxcontrib-htmlhelp==2.0.1 +sphinxcontrib-htmlhelp==2.0.4 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx sphinxcontrib-log-cabinet==1.0.1 - # via -r requirements/docs.in -sphinxcontrib-qthelp==1.0.3 + # via -r docs.in +sphinxcontrib-qthelp==1.0.6 # via sphinx -sphinxcontrib-serializinghtml==1.1.5 +sphinxcontrib-serializinghtml==1.1.9 # via sphinx -urllib3==2.0.3 +urllib3==2.0.7 # via requests diff --git a/requirements/tests.in b/requirements/tests.in index 3ced491be..279f90f75 100644 --- a/requirements/tests.in +++ b/requirements/tests.in @@ -2,6 +2,6 @@ pytest pytest-timeout pytest-xprocess cryptography -greenlet ; python_version < "3.11" +greenlet watchdog ephemeral-port-reserve diff --git a/requirements/tests.txt b/requirements/tests.txt index 057d62859..0b2198adc 100644 --- a/requirements/tests.txt +++ b/requirements/tests.txt @@ -1,36 +1,35 @@ -# SHA1:42b4e3e66395275e048d9a92c294b2c650393866 # -# This file is autogenerated by pip-compile-multi -# To update, run: +# This file is autogenerated by pip-compile with Python 3.11 +# by the following command: # -# pip-compile-multi +# pip-compile tests.in # -cffi==1.15.1 +cffi==1.16.0 # via cryptography -cryptography==41.0.1 - # via -r requirements/tests.in +cryptography==41.0.5 + # via -r tests.in ephemeral-port-reserve==1.1.4 - # via -r requirements/tests.in + # via -r tests.in +greenlet==3.0.1 + # via -r tests.in iniconfig==2.0.0 # via pytest -packaging==23.1 +packaging==23.2 # via pytest -pluggy==1.2.0 +pluggy==1.3.0 # via pytest -psutil==5.9.5 - # via pytest-xprocess -py==1.11.0 +psutil==5.9.6 # via pytest-xprocess pycparser==2.21 # via cffi -pytest==7.4.0 +pytest==7.4.3 # via - # -r requirements/tests.in + # -r tests.in # pytest-timeout # pytest-xprocess -pytest-timeout==2.1.0 - # via -r requirements/tests.in -pytest-xprocess==0.22.2 - # via -r requirements/tests.in +pytest-timeout==2.2.0 + # via -r tests.in +pytest-xprocess==0.23.0 + # via -r tests.in watchdog==3.0.0 - # via -r requirements/tests.in + # via -r tests.in diff --git a/requirements/typing.txt b/requirements/typing.txt index 99c46d2e0..cb8f567d2 100644 --- a/requirements/typing.txt +++ b/requirements/typing.txt @@ -1,21 +1,20 @@ -# SHA1:162796b1b3ac7a29da65fe0e32278f14b68ed8c8 # -# This file is autogenerated by pip-compile-multi -# To update, run: +# This file is autogenerated by pip-compile with Python 3.11 +# by the following command: # -# pip-compile-multi +# pip-compile typing.in # -mypy==1.4.1 - # via -r requirements/typing.in +mypy==1.6.1 + # via -r typing.in mypy-extensions==1.0.0 # via mypy -types-contextvars==2.4.7.2 - # via -r requirements/typing.in +types-contextvars==2.4.7.3 + # via -r typing.in types-dataclasses==0.6.6 - # via -r requirements/typing.in -types-setuptools==68.0.0.0 - # via -r requirements/typing.in -typing-extensions==4.6.3 + # via -r typing.in +types-setuptools==68.2.0.1 + # via -r typing.in +typing-extensions==4.8.0 # via mypy watchdog==3.0.0 - # via -r requirements/typing.in + # via -r typing.in From 71d220e3a91f40eb5b176485f05ffadd3061279a Mon Sep 17 00:00:00 2001 From: David Lord Date: Thu, 9 Nov 2023 17:49:01 -0800 Subject: [PATCH 040/159] enable grouped updates for actions and python --- .github/dependabot.yml | 23 ++++++++++++++++------- 1 file changed, 16 insertions(+), 7 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 90f94bc32..1f47f125e 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -1,9 +1,18 @@ version: 2 updates: -- package-ecosystem: "github-actions" - directory: "/" - schedule: - interval: "monthly" - day: "monday" - time: "16:00" - timezone: "UTC" + - package-ecosystem: github-actions + directory: / + schedule: + interval: monthly + groups: + github-actions: + patterns: + - '*' + - package-ecosystem: pip + directory: /requirements/ + schedule: + interval: monthly + groups: + python-requirements: + patterns: + - '*' From b145c68eca56cae27b1c128bd8e386cf7f4710d2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 10 Nov 2023 15:36:19 +0000 Subject: [PATCH 041/159] Bump the python-requirements group in /requirements with 1 update Bumps the python-requirements group in /requirements with 1 update: [mypy](https://github.com/python/mypy). - [Changelog](https://github.com/python/mypy/blob/master/CHANGELOG.md) - [Commits](https://github.com/python/mypy/compare/v1.6.1...v1.7.0) --- updated-dependencies: - dependency-name: mypy dependency-type: direct:production update-type: version-update:semver-minor dependency-group: python-requirements ... Signed-off-by: dependabot[bot] --- requirements/dev.txt | 2 +- requirements/typing.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/dev.txt b/requirements/dev.txt index e7428e41c..ebd4aba35 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -52,7 +52,7 @@ jinja2==3.1.2 # via sphinx markupsafe==2.1.3 # via jinja2 -mypy==1.6.1 +mypy==1.7.0 # via -r typing.in mypy-extensions==1.0.0 # via mypy diff --git a/requirements/typing.txt b/requirements/typing.txt index cb8f567d2..446122950 100644 --- a/requirements/typing.txt +++ b/requirements/typing.txt @@ -4,7 +4,7 @@ # # pip-compile typing.in # -mypy==1.6.1 +mypy==1.7.0 # via -r typing.in mypy-extensions==1.0.0 # via mypy From af893ea8adccf3a0bffd1793a9448b6389ffdd2e Mon Sep 17 00:00:00 2001 From: David Lord Date: Fri, 10 Nov 2023 07:42:07 -0800 Subject: [PATCH 042/159] fix mypy findings --- src/werkzeug/datastructures/auth.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/werkzeug/datastructures/auth.py b/src/werkzeug/datastructures/auth.py index 494576ded..c742ae560 100644 --- a/src/werkzeug/datastructures/auth.py +++ b/src/werkzeug/datastructures/auth.py @@ -172,7 +172,7 @@ def __init__( token: str | None = None, ): self._type = auth_type.lower() - self._parameters: dict[str, str | None] = CallbackDict( # type: ignore[misc] + self._parameters: dict[str, str | None] = CallbackDict( values, lambda _: self._trigger_on_update() ) self._token = token @@ -201,9 +201,7 @@ def parameters(self) -> dict[str, str | None]: @parameters.setter def parameters(self, value: dict[str, str]) -> None: - self._parameters = CallbackDict( # type: ignore[misc] - value, lambda _: self._trigger_on_update() - ) + self._parameters = CallbackDict(value, lambda _: self._trigger_on_update()) self._trigger_on_update() @property From 0b4c686eec54ccfc7be59015624b3cae0195b6ae Mon Sep 17 00:00:00 2001 From: James Addison Date: Mon, 2 Oct 2023 14:26:28 +0100 Subject: [PATCH 043/159] Fixup: unpick two changelog entries for v2.3.7 that had become entwined --- CHANGES.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 73052b513..2437fbaf3 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -21,8 +21,8 @@ Version 2.3.7 Released 2023-08-14 - Use ``flit_core`` instead of ``setuptools`` as build backend. -- Fix parsing of multipart bodies. :issue:`2734` Adjust index of last newline - in data start. :issue:`2761` +- Fix parsing of multipart bodies. :issue:`2734` +- Adjust index of last newline in data start. :issue:`2761` - Parsing ints from header values strips spacing first. :issue:`2734` - Fix empty file streaming when testing. :issue:`2740` - Clearer error message when URL rule does not start with slash. :pr:`2750` From 0a5777c9515e116fe22c1bb79596e79cd63876e7 Mon Sep 17 00:00:00 2001 From: David Lord Date: Sat, 11 Nov 2023 08:23:39 -0800 Subject: [PATCH 044/159] use ruff lint and format --- .flake8 | 29 ---------------- .gitignore | 31 +++++------------- .pre-commit-config.yaml | 40 ++++------------------- pyproject.toml | 25 ++++++++++++++ src/werkzeug/datastructures/auth.py | 10 ++++-- src/werkzeug/datastructures/mixins.pyi | 2 +- src/werkzeug/datastructures/structures.py | 2 +- src/werkzeug/debug/__init__.py | 4 +-- src/werkzeug/debug/repr.py | 6 ++-- src/werkzeug/debug/tbtools.py | 4 ++- src/werkzeug/formparser.py | 13 +++++--- src/werkzeug/local.py | 4 +-- src/werkzeug/middleware/lint.py | 38 +++++++++++++++------ src/werkzeug/routing/rules.py | 4 +-- src/werkzeug/sansio/request.py | 3 +- src/werkzeug/security.py | 4 +-- src/werkzeug/urls.py | 5 ++- src/werkzeug/utils.py | 2 +- src/werkzeug/wrappers/__init__.py | 2 +- tests/live_apps/data_app.py | 2 +- tests/sansio/test_multipart.py | 12 ++----- tests/sansio/test_utils.py | 6 ++-- tests/test_formparser.py | 2 +- tests/test_local.py | 4 +-- tests/test_routing.py | 2 +- tests/test_urls.py | 2 +- tests/test_wrappers.py | 14 ++++---- 27 files changed, 121 insertions(+), 151 deletions(-) delete mode 100644 .flake8 diff --git a/.flake8 b/.flake8 deleted file mode 100644 index 6ac59c8e2..000000000 --- a/.flake8 +++ /dev/null @@ -1,29 +0,0 @@ -[flake8] -extend-select = - # bugbear - B - # bugbear opinions - B9 - # implicit str concat - ISC -extend-ignore = - # slice notation whitespace, invalid - E203 - # import at top, too many circular import fixes - E402 - # line length, handled by bugbear B950 - E501 - # bare except, handled by bugbear B001 - E722 - # zip with strict=, requires python >= 3.10 - B905 - # string formatting opinion, B028 renamed to B907 - B028 - B907 -# up to 88 allowed by bugbear B950 -max-line-length = 80 -per-file-ignores = - # __init__ exports names - **/__init__.py: F401 - # LocalProxy assigns lambdas - src/werkzeug/local.py: E731 diff --git a/.gitignore b/.gitignore index aecea1a7b..cd9550b9e 100644 --- a/.gitignore +++ b/.gitignore @@ -1,26 +1,11 @@ -MANIFEST -build -dist -/src/Werkzeug.egg-info -*.pyc -*.pyo -.venv -.DS_Store -docs/_build -bench/a -bench/b -.tox +.idea/ +.vscode/ +__pycache__/ +.pytest_cache/ +.tox/ .coverage .coverage.* -coverage_out -htmlcov -.cache -.xprocess -.hypothesis -test_uwsgi_failed -.idea -.pytest_cache/ +htmlcov/ +docs/_build/ +dist/ venv/ -.vscode -.mypy_cache/ -.dmypy.json diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6425015cf..447fd5869 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,42 +1,16 @@ ci: - autoupdate_branch: "2.3.x" autoupdate_schedule: monthly repos: - - repo: https://github.com/asottile/pyupgrade - rev: v3.10.1 + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.1.5 hooks: - - id: pyupgrade - args: ["--py38-plus"] - - repo: https://github.com/asottile/reorder-python-imports - rev: v3.10.0 - hooks: - - id: reorder-python-imports - name: Reorder Python imports (src, tests) - files: "^(?!examples/)" - args: ["--application-directories", ".:src"] - - id: reorder-python-imports - name: Reorder Python imports (examples) - files: "^examples/" - args: ["--application-directories", "examples"] - - repo: https://github.com/psf/black - rev: 23.7.0 - hooks: - - id: black - - repo: https://github.com/PyCQA/flake8 - rev: 6.1.0 - hooks: - - id: flake8 - additional_dependencies: - - flake8-bugbear - - flake8-implicit-str-concat - - repo: https://github.com/peterdemin/pip-compile-multi - rev: v2.6.3 - hooks: - - id: pip-compile-multi-verify + - id: ruff + - id: ruff-format - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v4.5.0 hooks: + - id: check-merge-conflict + - id: debug-statements - id: fix-byte-order-marker - id: trailing-whitespace - id: end-of-file-fixer - exclude: "^tests/.*.http$" diff --git a/pyproject.toml b/pyproject.toml index 3a1965554..3c3e766e8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -102,3 +102,28 @@ module = [ "xprocess.*", ] ignore_missing_imports = true + +[tool.ruff] +extend-exclude = ["examples/"] +src = ["src"] +fix = false +show-fixes = true +show-source = true + +[tool.ruff.lint] +select = [ + "B", # flake8-bugbear + "E", # pycodestyle error + "F", # pyflakes + #"I", # isort + "UP", # pyupgrade + "W", # pycodestyle warning +] +ignore = [ + "E402" # allow circular imports at end of file +] +ignore-init-module-imports = true + +[tool.ruff.lint.isort] +force-single-line = true +order-by-type = false diff --git a/src/werkzeug/datastructures/auth.py b/src/werkzeug/datastructures/auth.py index 2f2515020..04ebfb2e7 100644 --- a/src/werkzeug/datastructures/auth.py +++ b/src/werkzeug/datastructures/auth.py @@ -128,7 +128,7 @@ def to_header(self) -> str: if self.type == "basic": value = base64.b64encode( f"{self.username}:{self.password}".encode() - ).decode("utf8") + ).decode("ascii") return f"Basic {value}" if self.token is not None: @@ -269,7 +269,9 @@ def set_basic(self, realm: str = "authentication required") -> None: """ warnings.warn( "The 'set_basic' method is deprecated and will be removed in Werkzeug 3.0." - " Create and assign an instance instead." + " Create and assign an instance instead.", + DeprecationWarning, + stacklevel=2, ) self._type = "basic" dict.clear(self.parameters) # type: ignore[arg-type] @@ -296,7 +298,9 @@ def set_digest( """ warnings.warn( "The 'set_digest' method is deprecated and will be removed in Werkzeug 3.0." - " Create and assign an instance instead." + " Create and assign an instance instead.", + DeprecationWarning, + stacklevel=2, ) self._type = "digest" dict.clear(self.parameters) # type: ignore[arg-type] diff --git a/src/werkzeug/datastructures/mixins.pyi b/src/werkzeug/datastructures/mixins.pyi index 74ed4b81e..40453f703 100644 --- a/src/werkzeug/datastructures/mixins.pyi +++ b/src/werkzeug/datastructures/mixins.pyi @@ -21,7 +21,7 @@ class ImmutableListMixin(list[V]): _hash_cache: int | None def __hash__(self) -> int: ... # type: ignore def __delitem__(self, key: SupportsIndex | slice) -> NoReturn: ... - def __iadd__(self, other: t.Any) -> NoReturn: ... # type: ignore + def __iadd__(self, other: Any) -> NoReturn: ... # type: ignore def __imul__(self, other: SupportsIndex) -> NoReturn: ... def __setitem__(self, key: int | slice, value: V) -> NoReturn: ... # type: ignore def append(self, value: V) -> NoReturn: ... diff --git a/src/werkzeug/datastructures/structures.py b/src/werkzeug/datastructures/structures.py index 7ea7bee28..e863cd8a4 100644 --- a/src/werkzeug/datastructures/structures.py +++ b/src/werkzeug/datastructures/structures.py @@ -146,7 +146,7 @@ class MultiDict(TypeConversionDict): def __init__(self, mapping=None): if isinstance(mapping, MultiDict): - dict.__init__(self, ((k, l[:]) for k, l in mapping.lists())) + dict.__init__(self, ((k, vs[:]) for k, vs in mapping.lists())) elif isinstance(mapping, dict): tmp = {} for key, value in mapping.items(): diff --git a/src/werkzeug/debug/__init__.py b/src/werkzeug/debug/__init__.py index 3b04b534e..f8756d890 100644 --- a/src/werkzeug/debug/__init__.py +++ b/src/werkzeug/debug/__init__.py @@ -110,7 +110,7 @@ def _generate() -> str | bytes | None: guid, guid_type = winreg.QueryValueEx(rk, "MachineGuid") if guid_type == winreg.REG_SZ: - return guid.encode("utf-8") + return guid.encode() return guid except OSError: @@ -193,7 +193,7 @@ def get_pin_and_cookie_name( if not bit: continue if isinstance(bit, str): - bit = bit.encode("utf-8") + bit = bit.encode() h.update(bit) h.update(b"cookiesalt") diff --git a/src/werkzeug/debug/repr.py b/src/werkzeug/debug/repr.py index 3bf15a77a..1dcdd67be 100644 --- a/src/werkzeug/debug/repr.py +++ b/src/werkzeug/debug/repr.py @@ -80,9 +80,7 @@ def __call__(self, topic: t.Any | None = None) -> None: helper = _Helper() -def _add_subclass_info( - inner: str, obj: object, base: t.Type | tuple[t.Type, ...] -) -> str: +def _add_subclass_info(inner: str, obj: object, base: type | tuple[type, ...]) -> str: if isinstance(base, tuple): for cls in base: if type(obj) is cls: @@ -96,7 +94,7 @@ def _add_subclass_info( def _sequence_repr_maker( - left: str, right: str, base: t.Type, limit: int = 8 + left: str, right: str, base: type, limit: int = 8 ) -> t.Callable[[DebugReprGenerator, t.Iterable, bool], str]: def proxy(self: DebugReprGenerator, obj: t.Iterable, recursive: bool) -> str: if recursive: diff --git a/src/werkzeug/debug/tbtools.py b/src/werkzeug/debug/tbtools.py index c45f56ef0..f9be17c42 100644 --- a/src/werkzeug/debug/tbtools.py +++ b/src/werkzeug/debug/tbtools.py @@ -265,7 +265,9 @@ def all_tracebacks( @cached_property def all_frames(self) -> list[DebugFrameSummary]: return [ - f for _, te in self.all_tracebacks for f in te.stack # type: ignore[misc] + f # type: ignore[misc] + for _, te in self.all_tracebacks + for f in te.stack ] def render_traceback_text(self) -> str: diff --git a/src/werkzeug/formparser.py b/src/werkzeug/formparser.py index 25ef0d61b..4dcaea2f6 100644 --- a/src/werkzeug/formparser.py +++ b/src/werkzeug/formparser.py @@ -225,11 +225,14 @@ def __init__( def get_parse_func( self, mimetype: str, options: dict[str, str] - ) -> None | ( - t.Callable[ - [FormDataParser, t.IO[bytes], str, int | None, dict[str, str]], - t_parse_result, - ] + ) -> ( + None + | ( + t.Callable[ + [FormDataParser, t.IO[bytes], str, int | None, dict[str, str]], + t_parse_result, + ] + ) ): warnings.warn( "The 'get_parse_func' method is deprecated and will be" diff --git a/src/werkzeug/local.py b/src/werkzeug/local.py index fba80e974..525ac0c80 100644 --- a/src/werkzeug/local.py +++ b/src/werkzeug/local.py @@ -556,9 +556,7 @@ def _get_current_object() -> T: # __weakref__ (__getattr__) # __init_subclass__ (proxying metaclass not supported) # __prepare__ (metaclass) - __class__ = _ProxyLookup( - fallback=lambda self: type(self), is_attr=True - ) # type: ignore + __class__ = _ProxyLookup(fallback=lambda self: type(self), is_attr=True) # type: ignore[assignment] __instancecheck__ = _ProxyLookup(lambda self, other: isinstance(other, self)) __subclasscheck__ = _ProxyLookup(lambda self, other: issubclass(other, self)) # __class_getitem__ triggered through __getitem__ diff --git a/src/werkzeug/middleware/lint.py b/src/werkzeug/middleware/lint.py index 462959943..8c858673b 100644 --- a/src/werkzeug/middleware/lint.py +++ b/src/werkzeug/middleware/lint.py @@ -37,7 +37,7 @@ class HTTPWarning(Warning): """Warning class for HTTP warnings.""" -def check_type(context: str, obj: object, need: t.Type = str) -> None: +def check_type(context: str, obj: object, need: type = str) -> None: if type(obj) is not need: warn( f"{context!r} requires {need.__name__!r}, got {type(obj).__name__!r}.", @@ -180,30 +180,44 @@ def close(self) -> None: key ): warn( - f"Entity header {key!r} found in 304 response.", HTTPWarning + f"Entity header {key!r} found in 304 response.", + HTTPWarning, + stacklevel=2, ) if bytes_sent: - warn("304 responses must not have a body.", HTTPWarning) + warn( + "304 responses must not have a body.", + HTTPWarning, + stacklevel=2, + ) elif 100 <= status_code < 200 or status_code == 204: if content_length != 0: warn( f"{status_code} responses must have an empty content length.", HTTPWarning, + stacklevel=2, ) if bytes_sent: - warn(f"{status_code} responses must not have a body.", HTTPWarning) + warn( + f"{status_code} responses must not have a body.", + HTTPWarning, + stacklevel=2, + ) elif content_length is not None and content_length != bytes_sent: warn( "Content-Length and the number of bytes sent to the" " client do not match.", WSGIWarning, + stacklevel=2, ) def __del__(self) -> None: if not self.closed: try: warn( - "Iterator was garbage collected before it was closed.", WSGIWarning + "Iterator was garbage collected before it was closed.", + WSGIWarning, + stacklevel=2, ) except Exception: pass @@ -236,7 +250,7 @@ def __init__(self, app: WSGIApplication) -> None: self.app = app def check_environ(self, environ: WSGIEnvironment) -> None: - if type(environ) is not dict: + if type(environ) is not dict: # noqa: E721 warn( "WSGI environment is not a standard Python dict.", WSGIWarning, @@ -304,14 +318,14 @@ def check_start_response( if status_code < 100: warn("Status code < 100 detected.", WSGIWarning, stacklevel=3) - if type(headers) is not list: + if type(headers) is not list: # noqa: E721 warn("Header list is not a list.", WSGIWarning, stacklevel=3) for item in headers: if type(item) is not tuple or len(item) != 2: warn("Header items must be 2-item tuples.", WSGIWarning, stacklevel=3) name, value = item - if type(name) is not str or type(value) is not str: + if type(name) is not str or type(value) is not str: # noqa: E721 warn( "Header keys and values must be strings.", WSGIWarning, stacklevel=3 ) @@ -402,13 +416,17 @@ def checking_start_response( ) if kwargs: - warn("'start_response' does not take keyword arguments.", WSGIWarning) + warn( + "'start_response' does not take keyword arguments.", + WSGIWarning, + stacklevel=2, + ) status: str = args[0] headers: list[tuple[str, str]] = args[1] exc_info: None | ( tuple[type[BaseException], BaseException, TracebackType] - ) = (args[2] if len(args) == 3 else None) + ) = args[2] if len(args) == 3 else None headers_set[:] = self.check_start_response(status, headers, exc_info) return GuardedWrite(start_response(status, headers, exc_info), chunks) diff --git a/src/werkzeug/routing/rules.py b/src/werkzeug/routing/rules.py index 904a02258..a10fa7365 100644 --- a/src/werkzeug/routing/rules.py +++ b/src/werkzeug/routing/rules.py @@ -108,7 +108,7 @@ def _pythonize(value: str) -> None | bool | int | float | str: return str(value) -def parse_converter_args(argstr: str) -> tuple[t.Tuple, dict[str, t.Any]]: +def parse_converter_args(argstr: str) -> tuple[tuple[t.Any, ...], dict[str, t.Any]]: argstr += "," args = [] kwargs = {} @@ -566,7 +566,7 @@ def get_converter( self, variable_name: str, converter_name: str, - args: t.Tuple, + args: tuple[t.Any, ...], kwargs: t.Mapping[str, t.Any], ) -> BaseConverter: """Looks up the converter for the given parameter. diff --git a/src/werkzeug/sansio/request.py b/src/werkzeug/sansio/request.py index 0bcda90b2..def060553 100644 --- a/src/werkzeug/sansio/request.py +++ b/src/werkzeug/sansio/request.py @@ -1,6 +1,5 @@ from __future__ import annotations -import typing as t import warnings from datetime import datetime from urllib.parse import parse_qsl @@ -174,7 +173,7 @@ def url_charset(self, value: str) -> None: #: (for example for :attr:`access_list`). #: #: .. versionadded:: 0.6 - list_storage_class: type[t.List] = ImmutableList + list_storage_class: type[list] = ImmutableList user_agent_class: type[UserAgent] = UserAgent """The class used and returned by the :attr:`user_agent` property to diff --git a/src/werkzeug/security.py b/src/werkzeug/security.py index 282c4fd8c..359870d7f 100644 --- a/src/werkzeug/security.py +++ b/src/werkzeug/security.py @@ -33,8 +33,8 @@ def _hash_internal(method: str, salt: str, password: str) -> tuple[str, str]: return password, method method, *args = method.split(":") - salt = salt.encode("utf-8") - password = password.encode("utf-8") + salt = salt.encode() + password = password.encode() if method == "scrypt": if not args: diff --git a/src/werkzeug/urls.py b/src/werkzeug/urls.py index f5760eb4c..b3d05ed75 100644 --- a/src/werkzeug/urls.py +++ b/src/werkzeug/urls.py @@ -1123,7 +1123,10 @@ def url_decode( separator = separator.encode(charset or "ascii") # type: ignore return cls( _url_decode_impl( - s.split(separator), charset, include_empty, errors # type: ignore + s.split(separator), # type: ignore[arg-type] + charset, + include_empty, + errors, ) ) diff --git a/src/werkzeug/utils.py b/src/werkzeug/utils.py index 785ac28b9..32ca9dad6 100644 --- a/src/werkzeug/utils.py +++ b/src/werkzeug/utils.py @@ -514,7 +514,7 @@ def send_file( if isinstance(etag, str): rv.set_etag(etag) elif etag and path is not None: - check = adler32(path.encode("utf-8")) & 0xFFFFFFFF + check = adler32(path.encode()) & 0xFFFFFFFF rv.set_etag(f"{mtime}-{size}-{check}") if conditional: diff --git a/src/werkzeug/wrappers/__init__.py b/src/werkzeug/wrappers/__init__.py index b8c45d71c..b36f228f2 100644 --- a/src/werkzeug/wrappers/__init__.py +++ b/src/werkzeug/wrappers/__init__.py @@ -1,3 +1,3 @@ from .request import Request as Request from .response import Response as Response -from .response import ResponseStream +from .response import ResponseStream as ResponseStream diff --git a/tests/live_apps/data_app.py b/tests/live_apps/data_app.py index 561390a1c..9b2e78b91 100644 --- a/tests/live_apps/data_app.py +++ b/tests/live_apps/data_app.py @@ -11,7 +11,7 @@ def app(request: Request) -> Response: { "environ": request.environ, "form": request.form.to_dict(), - "files": {k: v.read().decode("utf8") for k, v in request.files.items()}, + "files": {k: v.read().decode() for k, v in request.files.items()}, }, default=lambda x: str(x), ), diff --git a/tests/sansio/test_multipart.py b/tests/sansio/test_multipart.py index 35109d4bd..cf36fefd6 100644 --- a/tests/sansio/test_multipart.py +++ b/tests/sansio/test_multipart.py @@ -24,11 +24,7 @@ def test_decoder_simple() -> None: asdasd -----------------------------9704338192090380615194531385$-- - """.replace( - "\n", "\r\n" - ).encode( - "utf-8" - ) + """.replace("\n", "\r\n").encode() decoder.receive_data(data) decoder.receive_data(None) events = [decoder.next_event()] @@ -147,11 +143,7 @@ def test_empty_field() -> None: Content-Type: text/plain; charset="UTF-8" --foo-- - """.replace( - "\n", "\r\n" - ).encode( - "utf-8" - ) + """.replace("\n", "\r\n").encode() decoder.receive_data(data) decoder.receive_data(None) events = [decoder.next_event()] diff --git a/tests/sansio/test_utils.py b/tests/sansio/test_utils.py index 04d02e44c..d43de66c2 100644 --- a/tests/sansio/test_utils.py +++ b/tests/sansio/test_utils.py @@ -1,7 +1,5 @@ from __future__ import annotations -import typing as t - import pytest from werkzeug.sansio.utils import get_content_length @@ -28,8 +26,8 @@ ) def test_get_host( scheme: str, - host_header: t.Optional[str], - server: t.Optional[t.Tuple[str, t.Optional[int]]], + host_header: str | None, + server: tuple[str, int | None] | None, expected: str, ) -> None: assert get_host(scheme, host_header, server) == expected diff --git a/tests/test_formparser.py b/tests/test_formparser.py index 1dcb167ef..1ecb01208 100644 --- a/tests/test_formparser.py +++ b/tests/test_formparser.py @@ -273,7 +273,7 @@ def test_basic(self): content_type=f'multipart/form-data; boundary="{boundary}"', content_length=len(data), ) as response: - assert response.get_data() == repr(text).encode("utf-8") + assert response.get_data() == repr(text).encode() @pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning") def test_ie7_unc_path(self): diff --git a/tests/test_local.py b/tests/test_local.py index 2af69d2d6..2250a5bee 100644 --- a/tests/test_local.py +++ b/tests/test_local.py @@ -170,7 +170,7 @@ class SomeClassWithWrapped: _cv_val.set(42) with pytest.raises(AttributeError): - proxy.__wrapped__ + proxy.__wrapped__ # noqa: B018 ns = local.Local(_cv_ns) ns.foo = SomeClassWithWrapped() @@ -179,7 +179,7 @@ class SomeClassWithWrapped: assert ns("foo").__wrapped__ == "wrapped" with pytest.raises(AttributeError): - ns("bar").__wrapped__ + ns("bar").__wrapped__ # noqa: B018 def test_proxy_doc(): diff --git a/tests/test_routing.py b/tests/test_routing.py index 65d2a5f90..416fb4fc5 100644 --- a/tests/test_routing.py +++ b/tests/test_routing.py @@ -791,7 +791,7 @@ def __init__(self, url_map, *items): self.regex = items[0] # This is a regex pattern with nested groups - DATE_PATTERN = r"((\d{8}T\d{6}([.,]\d{1,3})?)|(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}([.,]\d{1,3})?))Z" # noqa: B950 + DATE_PATTERN = r"((\d{8}T\d{6}([.,]\d{1,3})?)|(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}([.,]\d{1,3})?))Z" # noqa: E501 map = r.Map( [ diff --git a/tests/test_urls.py b/tests/test_urls.py index 0b0f2aeed..bafccd185 100644 --- a/tests/test_urls.py +++ b/tests/test_urls.py @@ -396,5 +396,5 @@ def test_url_parse_does_not_clear_warnings_registry(recwarn): warnings.simplefilter("ignore", DeprecationWarning) for _ in range(2): urls.url_parse("http://example.org/") - warnings.warn("test warning") + warnings.warn("test warning", stacklevel=1) assert len(recwarn) == 1 diff --git a/tests/test_wrappers.py b/tests/test_wrappers.py index 8a91aefc1..d7bc12b95 100644 --- a/tests/test_wrappers.py +++ b/tests/test_wrappers.py @@ -1037,25 +1037,25 @@ class MyRequest(wrappers.Request): parameter_storage_class = dict req = MyRequest.from_values("/?foo=baz", headers={"Cookie": "foo=bar"}) - assert type(req.cookies) is dict + assert type(req.cookies) is dict # noqa: E721 assert req.cookies == {"foo": "bar"} - assert type(req.access_route) is list + assert type(req.access_route) is list # noqa: E721 - assert type(req.args) is dict - assert type(req.values) is CombinedMultiDict + assert type(req.args) is dict # noqa: E721 + assert type(req.values) is CombinedMultiDict # noqa: E721 assert req.values["foo"] == "baz" req = wrappers.Request.from_values(headers={"Cookie": "foo=bar;foo=baz"}) - assert type(req.cookies) is ImmutableMultiDict + assert type(req.cookies) is ImmutableMultiDict # noqa: E721 assert req.cookies.to_dict() == {"foo": "bar"} # it is possible to have multiple cookies with the same name assert req.cookies.getlist("foo") == ["bar", "baz"] - assert type(req.access_route) is ImmutableList + assert type(req.access_route) is ImmutableList # noqa: E721 MyRequest.list_storage_class = tuple req = MyRequest.from_values() - assert type(req.access_route) is tuple + assert type(req.access_route) is tuple # noqa: E721 def test_response_headers_passthrough(): From 8e4d0a8e257d3c9684751c22f25ca7f2e886d22d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 1 Dec 2023 11:35:12 +0000 Subject: [PATCH 045/159] Bump the python-requirements group in /requirements with 4 updates Bumps the python-requirements group in /requirements with 4 updates: [cryptography](https://github.com/pyca/cryptography), [mypy](https://github.com/python/mypy), [types-setuptools](https://github.com/python/typeshed) and [tox](https://github.com/tox-dev/tox). Updates `cryptography` from 41.0.5 to 41.0.7 - [Changelog](https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/cryptography/compare/41.0.5...41.0.7) Updates `mypy` from 1.7.0 to 1.7.1 - [Changelog](https://github.com/python/mypy/blob/master/CHANGELOG.md) - [Commits](https://github.com/python/mypy/compare/v1.7.0...v1.7.1) Updates `types-setuptools` from 68.2.0.1 to 69.0.0.0 - [Commits](https://github.com/python/typeshed/commits) Updates `tox` from 4.11.3 to 4.11.4 - [Release notes](https://github.com/tox-dev/tox/releases) - [Changelog](https://github.com/tox-dev/tox/blob/main/docs/changelog.rst) - [Commits](https://github.com/tox-dev/tox/compare/4.11.3...4.11.4) --- updated-dependencies: - dependency-name: cryptography dependency-type: direct:production update-type: version-update:semver-patch dependency-group: python-requirements - dependency-name: mypy dependency-type: direct:production update-type: version-update:semver-patch dependency-group: python-requirements - dependency-name: types-setuptools dependency-type: direct:production update-type: version-update:semver-major dependency-group: python-requirements - dependency-name: tox dependency-type: direct:development update-type: version-update:semver-patch dependency-group: python-requirements ... Signed-off-by: dependabot[bot] --- requirements/dev.txt | 8 ++++---- requirements/tests.txt | 2 +- requirements/typing.txt | 4 ++-- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/requirements/dev.txt b/requirements/dev.txt index ebd4aba35..104a2de9d 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -26,7 +26,7 @@ click==8.1.7 # via pip-tools colorama==0.4.6 # via tox -cryptography==41.0.5 +cryptography==41.0.7 # via -r tests.in distlib==0.3.7 # via virtualenv @@ -52,7 +52,7 @@ jinja2==3.1.2 # via sphinx markupsafe==2.1.3 # via jinja2 -mypy==1.7.0 +mypy==1.7.1 # via -r typing.in mypy-extensions==1.0.0 # via mypy @@ -132,13 +132,13 @@ sphinxcontrib-qthelp==1.0.6 # via sphinx sphinxcontrib-serializinghtml==1.1.9 # via sphinx -tox==4.11.3 +tox==4.11.4 # via -r dev.in types-contextvars==2.4.7.3 # via -r typing.in types-dataclasses==0.6.6 # via -r typing.in -types-setuptools==68.2.0.1 +types-setuptools==69.0.0.0 # via -r typing.in typing-extensions==4.8.0 # via mypy diff --git a/requirements/tests.txt b/requirements/tests.txt index 0b2198adc..599b8431f 100644 --- a/requirements/tests.txt +++ b/requirements/tests.txt @@ -6,7 +6,7 @@ # cffi==1.16.0 # via cryptography -cryptography==41.0.5 +cryptography==41.0.7 # via -r tests.in ephemeral-port-reserve==1.1.4 # via -r tests.in diff --git a/requirements/typing.txt b/requirements/typing.txt index 446122950..41a2f9f3f 100644 --- a/requirements/typing.txt +++ b/requirements/typing.txt @@ -4,7 +4,7 @@ # # pip-compile typing.in # -mypy==1.7.0 +mypy==1.7.1 # via -r typing.in mypy-extensions==1.0.0 # via mypy @@ -12,7 +12,7 @@ types-contextvars==2.4.7.3 # via -r typing.in types-dataclasses==0.6.6 # via -r typing.in -types-setuptools==68.2.0.1 +types-setuptools==69.0.0.0 # via -r typing.in typing-extensions==4.8.0 # via mypy From 87516db246d1f83b1973b762f17c530e36971429 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 1 Dec 2023 11:35:54 +0000 Subject: [PATCH 046/159] Bump the github-actions group with 2 updates Bumps the github-actions group with 2 updates: [dessant/lock-threads](https://github.com/dessant/lock-threads) and [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish). Updates `dessant/lock-threads` from 4.0.1 to 5.0.1 - [Release notes](https://github.com/dessant/lock-threads/releases) - [Changelog](https://github.com/dessant/lock-threads/blob/main/CHANGELOG.md) - [Commits](https://github.com/dessant/lock-threads/compare/be8aa5be94131386884a6da4189effda9b14aa21...1bf7ec25051fe7c00bdd17e6a7cf3d7bfb7dc771) Updates `pypa/gh-action-pypi-publish` from 1.8.10 to 1.8.11 - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/b7f401de30cb6434a1e19f805ff006643653240e...2f6f737ca5f74c637829c0f5c3acd0e29ea5e8bf) --- updated-dependencies: - dependency-name: dessant/lock-threads dependency-type: direct:production update-type: version-update:semver-major dependency-group: github-actions - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch dependency-group: github-actions ... Signed-off-by: dependabot[bot] --- .github/workflows/lock.yaml | 2 +- .github/workflows/publish.yaml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/lock.yaml b/.github/workflows/lock.yaml index 9825178c3..ea0dca01a 100644 --- a/.github/workflows/lock.yaml +++ b/.github/workflows/lock.yaml @@ -19,7 +19,7 @@ jobs: lock: runs-on: ubuntu-latest steps: - - uses: dessant/lock-threads@be8aa5be94131386884a6da4189effda9b14aa21 + - uses: dessant/lock-threads@1bf7ec25051fe7c00bdd17e6a7cf3d7bfb7dc771 with: issue-inactive-days: 14 pr-inactive-days: 14 diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index 59dbfe8f6..08953509a 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -62,10 +62,10 @@ jobs: id-token: write steps: - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a - - uses: pypa/gh-action-pypi-publish@b7f401de30cb6434a1e19f805ff006643653240e + - uses: pypa/gh-action-pypi-publish@2f6f737ca5f74c637829c0f5c3acd0e29ea5e8bf with: repository-url: https://test.pypi.org/legacy/ packages-dir: artifact/ - - uses: pypa/gh-action-pypi-publish@b7f401de30cb6434a1e19f805ff006643653240e + - uses: pypa/gh-action-pypi-publish@2f6f737ca5f74c637829c0f5c3acd0e29ea5e8bf with: packages-dir: artifact/ From d0a22b2c1624c5f698303ee056d40fc809a998e4 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 4 Dec 2023 20:29:40 +0000 Subject: [PATCH 047/159] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.1.5 → v0.1.6](https://github.com/astral-sh/ruff-pre-commit/compare/v0.1.5...v0.1.6) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 447fd5869..970b9e637 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,7 +2,7 @@ ci: autoupdate_schedule: monthly repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.5 + rev: v0.1.6 hooks: - id: ruff - id: ruff-format From b5b423c8eacf3ac3d5fcb2806310df9c3de6658b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Jan 2024 11:37:27 +0000 Subject: [PATCH 048/159] Bump the python-requirements group in /requirements with 4 updates Bumps the python-requirements group in /requirements with 4 updates: [greenlet](https://github.com/python-greenlet/greenlet), [pytest](https://github.com/pytest-dev/pytest), [mypy](https://github.com/python/mypy) and [pre-commit](https://github.com/pre-commit/pre-commit). Updates `greenlet` from 3.0.1 to 3.0.3 - [Changelog](https://github.com/python-greenlet/greenlet/blob/master/CHANGES.rst) - [Commits](https://github.com/python-greenlet/greenlet/compare/3.0.1...3.0.3) Updates `pytest` from 7.4.3 to 7.4.4 - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/7.4.3...7.4.4) Updates `mypy` from 1.7.1 to 1.8.0 - [Changelog](https://github.com/python/mypy/blob/master/CHANGELOG.md) - [Commits](https://github.com/python/mypy/compare/v1.7.1...v1.8.0) Updates `pre-commit` from 3.5.0 to 3.6.0 - [Release notes](https://github.com/pre-commit/pre-commit/releases) - [Changelog](https://github.com/pre-commit/pre-commit/blob/main/CHANGELOG.md) - [Commits](https://github.com/pre-commit/pre-commit/compare/v3.5.0...v3.6.0) --- updated-dependencies: - dependency-name: greenlet dependency-type: direct:production update-type: version-update:semver-patch dependency-group: python-requirements - dependency-name: pytest dependency-type: direct:production update-type: version-update:semver-patch dependency-group: python-requirements - dependency-name: mypy dependency-type: direct:production update-type: version-update:semver-minor dependency-group: python-requirements - dependency-name: pre-commit dependency-type: direct:development update-type: version-update:semver-minor dependency-group: python-requirements ... Signed-off-by: dependabot[bot] --- requirements/dev.txt | 8 ++++---- requirements/tests.txt | 4 ++-- requirements/typing.txt | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/requirements/dev.txt b/requirements/dev.txt index 104a2de9d..892693f06 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -38,7 +38,7 @@ filelock==3.13.1 # via # tox # virtualenv -greenlet==3.0.1 +greenlet==3.0.3 # via -r tests.in identify==2.5.31 # via pre-commit @@ -52,7 +52,7 @@ jinja2==3.1.2 # via sphinx markupsafe==2.1.3 # via jinja2 -mypy==1.7.1 +mypy==1.8.0 # via -r typing.in mypy-extensions==1.0.0 # via mypy @@ -78,7 +78,7 @@ pluggy==1.3.0 # via # pytest # tox -pre-commit==3.5.0 +pre-commit==3.6.0 # via -r dev.in psutil==5.9.6 # via pytest-xprocess @@ -90,7 +90,7 @@ pyproject-api==1.6.1 # via tox pyproject-hooks==1.0.0 # via build -pytest==7.4.3 +pytest==7.4.4 # via # -r tests.in # pytest-timeout diff --git a/requirements/tests.txt b/requirements/tests.txt index 599b8431f..784b1f243 100644 --- a/requirements/tests.txt +++ b/requirements/tests.txt @@ -10,7 +10,7 @@ cryptography==41.0.7 # via -r tests.in ephemeral-port-reserve==1.1.4 # via -r tests.in -greenlet==3.0.1 +greenlet==3.0.3 # via -r tests.in iniconfig==2.0.0 # via pytest @@ -22,7 +22,7 @@ psutil==5.9.6 # via pytest-xprocess pycparser==2.21 # via cffi -pytest==7.4.3 +pytest==7.4.4 # via # -r tests.in # pytest-timeout diff --git a/requirements/typing.txt b/requirements/typing.txt index 41a2f9f3f..c4467e775 100644 --- a/requirements/typing.txt +++ b/requirements/typing.txt @@ -4,7 +4,7 @@ # # pip-compile typing.in # -mypy==1.7.1 +mypy==1.8.0 # via -r typing.in mypy-extensions==1.0.0 # via mypy From 817aac4d2c71b83752e451d23f96ac26cfd7bbcd Mon Sep 17 00:00:00 2001 From: pabepadu <45884742+pabepadu@users.noreply.github.com> Date: Thu, 4 Jan 2024 04:01:48 +0100 Subject: [PATCH 049/159] fix: update urlparse doc --- docs/tutorial.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/tutorial.rst b/docs/tutorial.rst index 943787a7c..9cb5aef47 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -123,7 +123,7 @@ if they are not used right away, to keep it from being confusing:: import os import redis - from werkzeug.urls import url_parse + from urllib.parse import urlparse from werkzeug.wrappers import Request, Response from werkzeug.routing import Map, Rule from werkzeug.exceptions import HTTPException, NotFound @@ -308,7 +308,7 @@ we need to write a function and a helper method. For URL validation this is good enough:: def is_valid_url(url): - parts = url_parse(url) + parts = urlparse(url) return parts.scheme in ('http', 'https') For inserting the URL, all we need is this little method on our class:: From 70ad4d6d9b330e8dc780e53470e261cb096fd5fc Mon Sep 17 00:00:00 2001 From: Adam Dangoor Date: Wed, 24 Jan 2024 12:48:59 +0000 Subject: [PATCH 050/159] Use more precise type for formparser.MultiPartParser.parse return Without this change, `pyright` with `typeCheckingMode` set to `strict` reports: ``` error: Type of "fields" is partially unknown Type of "fields" is "MultiDict[Unknown, Unknown]" (reportUnknownVariableType) ``` when I use this method. --- CHANGES.rst | 8 ++++++++ src/werkzeug/formparser.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index 186e8f580..b23bffcd4 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,5 +1,13 @@ .. currentmodule:: werkzeug +Version 3.0.2 +------------- + +Unreleased + +- Make the return type of ``MultiPartParser.parse`` more + precise. :issue:`2840` + Version 3.0.1 ------------- diff --git a/src/werkzeug/formparser.py b/src/werkzeug/formparser.py index ee30666dd..5117a2673 100644 --- a/src/werkzeug/formparser.py +++ b/src/werkzeug/formparser.py @@ -352,7 +352,7 @@ def start_file_streaming( def parse( self, stream: t.IO[bytes], boundary: bytes, content_length: int | None - ) -> tuple[MultiDict, MultiDict]: + ) -> tuple[MultiDict[str, str], MultiDict[str, FileStorage]]: current_part: Field | File container: t.IO[bytes] | list[bytes] _write: t.Callable[[bytes], t.Any] From 4e5bdca7f8227d10cae828f8064fb98190ace4aa Mon Sep 17 00:00:00 2001 From: pgjones Date: Thu, 29 Feb 2024 21:03:16 +0000 Subject: [PATCH 051/159] Make the exception tests more robust This should ensure that the tests work with Pytest 8 onwards. The issue appears to be that __subclasses__ "returns a list of all those references still alive." which could include the RequestRedirect. If it does include RequestRedirect the tests will fail as it requires an argument to be constructed. Note this test is not meant for RequestRedirect. --- tests/test_exceptions.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py index e4ee58633..91ad1a7ce 100644 --- a/tests/test_exceptions.py +++ b/tests/test_exceptions.py @@ -7,7 +7,7 @@ from werkzeug import exceptions from werkzeug.datastructures import Headers from werkzeug.datastructures import WWWAuthenticate -from werkzeug.exceptions import HTTPException +from werkzeug.exceptions import default_exceptions, HTTPException from werkzeug.wrappers import Response @@ -138,7 +138,7 @@ def test_retry_after_mixin(cls, value, expect): @pytest.mark.parametrize( "cls", sorted( - (e for e in HTTPException.__subclasses__() if e.code and e.code >= 400), + (e for e in default_exceptions.values() if e.code and e.code >= 400), key=lambda e: e.code, # type: ignore ), ) @@ -158,7 +158,7 @@ def test_description_none(): @pytest.mark.parametrize( "cls", sorted( - (e for e in HTTPException.__subclasses__() if e.code), + (e for e in default_exceptions.values() if e.code), key=lambda e: e.code, # type: ignore ), ) From 7ab3823fb620d29bcfeeff7499dd9a64ad90f360 Mon Sep 17 00:00:00 2001 From: Kiran Jonnalagadda Date: Mon, 8 Jan 2024 15:32:22 +0530 Subject: [PATCH 052/159] Fix: Use `issubclass` instead of `isinstance` Bug described at https://github.com/pallets/werkzeug/issues/2831 --- src/werkzeug/test.py | 6 ++++-- tests/test_test.py | 22 ++++++++++++++++++++++ 2 files changed, 26 insertions(+), 2 deletions(-) diff --git a/src/werkzeug/test.py b/src/werkzeug/test.py index 7b5899a97..694e5d8e5 100644 --- a/src/werkzeug/test.py +++ b/src/werkzeug/test.py @@ -809,10 +809,12 @@ def __init__( if response_wrapper in {None, Response}: response_wrapper = TestResponse - elif not isinstance(response_wrapper, TestResponse): + elif response_wrapper is not None and not issubclass( + response_wrapper, TestResponse + ): response_wrapper = type( "WrapperTestResponse", - (TestResponse, response_wrapper), # type: ignore + (TestResponse, response_wrapper), {}, ) diff --git a/tests/test_test.py b/tests/test_test.py index c7f21fa11..d317d69c9 100644 --- a/tests/test_test.py +++ b/tests/test_test.py @@ -16,6 +16,7 @@ from werkzeug.test import EnvironBuilder from werkzeug.test import run_wsgi_app from werkzeug.test import stream_encode_multipart +from werkzeug.test import TestResponse from werkzeug.utils import redirect from werkzeug.wrappers import Request from werkzeug.wrappers import Response @@ -903,3 +904,24 @@ def test_no_content_type_header_addition(): c = Client(no_response_headers_app) response = c.open() assert response.headers == Headers([("Content-Length", "8")]) + + +def test_client_response_wrapper(): + class CustomResponse(Response): + pass + + class CustomTestResponse(TestResponse, Response): + pass + + c1 = Client(Response(), CustomResponse) + r1 = c1.open() + + assert isinstance(r1, CustomResponse) + assert type(r1) is not CustomResponse # Got subclassed + assert issubclass(type(r1), CustomResponse) + + c2 = Client(Response(), CustomTestResponse) + r2 = c2.open() + + assert isinstance(r2, CustomTestResponse) + assert type(r2) is CustomTestResponse # Did not get subclassed From 4c09d1b3b08deb939803a4beb53483cbc54dfb8d Mon Sep 17 00:00:00 2001 From: pgjones Date: Sun, 3 Mar 2024 16:01:03 +0000 Subject: [PATCH 053/159] Add missing CHANGES entry for #2832 --- CHANGES.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGES.rst b/CHANGES.rst index b23bffcd4..1dd640415 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -5,6 +5,7 @@ Version 3.0.2 Unreleased +- Fix response_wrapper type check in test client. :issue:`2831` - Make the return type of ``MultiPartParser.parse`` more precise. :issue:`2840` From 356dea16b3af390cbfddee85f68366ee82f974c8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 1 Mar 2024 11:15:00 +0000 Subject: [PATCH 054/159] Bump the github-actions group with 5 updates Bumps the github-actions group with 5 updates: | Package | From | To | | --- | --- | --- | | [actions/setup-python](https://github.com/actions/setup-python) | `4.7.1` | `5.0.0` | | [actions/upload-artifact](https://github.com/actions/upload-artifact) | `3.1.3` | `4.3.1` | | [actions/download-artifact](https://github.com/actions/download-artifact) | `3.0.2` | `4.1.3` | | [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) | `1.8.11` | `1.8.12` | | [actions/cache](https://github.com/actions/cache) | `3.3.2` | `4.0.1` | Updates `actions/setup-python` from 4.7.1 to 5.0.0 - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236...0a5c61591373683505ea898e09a3ea4f39ef2b9c) Updates `actions/upload-artifact` from 3.1.3 to 4.3.1 - [Release notes](https://github.com/actions/upload-artifact/releases) - [Commits](https://github.com/actions/upload-artifact/compare/a8a3f3ad30e3422c9c7b888a15615d19a852ae32...5d5d22a31266ced268874388b861e4b58bb5c2f3) Updates `actions/download-artifact` from 3.0.2 to 4.1.3 - [Release notes](https://github.com/actions/download-artifact/releases) - [Commits](https://github.com/actions/download-artifact/compare/9bc31d5ccc31df68ecc42ccf4149144866c47d8a...87c55149d96e628cc2ef7e6fc2aab372015aec85) Updates `pypa/gh-action-pypi-publish` from 1.8.11 to 1.8.12 - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/2f6f737ca5f74c637829c0f5c3acd0e29ea5e8bf...e53eb8b103ffcb59469888563dc324e3c8ba6f06) Updates `actions/cache` from 3.3.2 to 4.0.1 - [Release notes](https://github.com/actions/cache/releases) - [Changelog](https://github.com/actions/cache/blob/main/RELEASES.md) - [Commits](https://github.com/actions/cache/compare/704facf57e6136b1bc63b828d79edcd491f0ee84...ab5e6d0c87105b4c9c2047343972218f562e4319) --- updated-dependencies: - dependency-name: actions/setup-python dependency-type: direct:production update-type: version-update:semver-major dependency-group: github-actions - dependency-name: actions/upload-artifact dependency-type: direct:production update-type: version-update:semver-major dependency-group: github-actions - dependency-name: actions/download-artifact dependency-type: direct:production update-type: version-update:semver-major dependency-group: github-actions - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch dependency-group: github-actions - dependency-name: actions/cache dependency-type: direct:production update-type: version-update:semver-major dependency-group: github-actions ... Signed-off-by: dependabot[bot] --- .github/workflows/publish.yaml | 12 ++++++------ .github/workflows/tests.yaml | 4 ++-- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index 08953509a..d7aee0c80 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -10,7 +10,7 @@ jobs: hash: ${{ steps.hash.outputs.hash }} steps: - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 - - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 + - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c with: python-version: '3.x' cache: pip @@ -23,7 +23,7 @@ jobs: - name: generate hash id: hash run: cd dist && echo "hash=$(sha256sum * | base64 -w0)" >> $GITHUB_OUTPUT - - uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 + - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 with: path: ./dist provenance: @@ -44,7 +44,7 @@ jobs: permissions: contents: write steps: - - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a + - uses: actions/download-artifact@87c55149d96e628cc2ef7e6fc2aab372015aec85 - name: create release run: > gh release create --draft --repo ${{ github.repository }} @@ -61,11 +61,11 @@ jobs: permissions: id-token: write steps: - - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a - - uses: pypa/gh-action-pypi-publish@2f6f737ca5f74c637829c0f5c3acd0e29ea5e8bf + - uses: actions/download-artifact@87c55149d96e628cc2ef7e6fc2aab372015aec85 + - uses: pypa/gh-action-pypi-publish@e53eb8b103ffcb59469888563dc324e3c8ba6f06 with: repository-url: https://test.pypi.org/legacy/ packages-dir: artifact/ - - uses: pypa/gh-action-pypi-publish@2f6f737ca5f74c637829c0f5c3acd0e29ea5e8bf + - uses: pypa/gh-action-pypi-publish@e53eb8b103ffcb59469888563dc324e3c8ba6f06 with: packages-dir: artifact/ diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 9e0ef4583..142732b15 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -32,13 +32,13 @@ jobs: - {name: Typing, python: '3.12', os: ubuntu-latest, tox: typing} steps: - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 - - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 + - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c with: python-version: ${{ matrix.python }} cache: 'pip' cache-dependency-path: requirements*/*.txt - name: cache mypy - uses: actions/cache@704facf57e6136b1bc63b828d79edcd491f0ee84 + uses: actions/cache@ab5e6d0c87105b4c9c2047343972218f562e4319 with: path: ./.mypy_cache key: mypy|${{ matrix.python }}|${{ hashFiles('pyproject.toml') }} From f516c4005c7c4510b61ae07969450771b929809d Mon Sep 17 00:00:00 2001 From: Sympatron GmbH <35803463+Sympatron@users.noreply.github.com> Date: Fri, 26 Jan 2024 00:27:29 +0100 Subject: [PATCH 055/159] Handle TypeError in TypeConversionDict TypeErrors are now handled in the same manner as ValueErrors. fixes #2842 --- CHANGES.rst | 2 ++ src/werkzeug/datastructures/structures.py | 10 +++++++--- tests/test_datastructures.py | 3 ++- 3 files changed, 11 insertions(+), 4 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 1dd640415..3e9993488 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -5,6 +5,8 @@ Version 3.0.2 Unreleased +- Fix handling of TypeError in TypeConversionDict.get() to match + ValueErrors. :issue:`2843` - Fix response_wrapper type check in test client. :issue:`2831` - Make the return type of ``MultiPartParser.parse`` more precise. :issue:`2840` diff --git a/src/werkzeug/datastructures/structures.py b/src/werkzeug/datastructures/structures.py index e863cd8a4..4279ceb98 100644 --- a/src/werkzeug/datastructures/structures.py +++ b/src/werkzeug/datastructures/structures.py @@ -70,8 +70,12 @@ def get(self, key, default=None, type=None): be looked up. If not further specified `None` is returned. :param type: A callable that is used to cast the value in the - :class:`MultiDict`. If a :exc:`ValueError` is raised - by this callable the default value is returned. + :class:`MultiDict`. If a :exc:`ValueError` or a + :exc:`TypeError` is raised by this callable the default + value is returned. + + .. versionchanged:: 3.0.2 + Returns the default value on :exc:`TypeError`, too. """ try: rv = self[key] @@ -80,7 +84,7 @@ def get(self, key, default=None, type=None): if type is not None: try: rv = type(rv) - except ValueError: + except (ValueError, TypeError): rv = default return rv diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py index 5206aa6a2..64330e1e6 100644 --- a/tests/test_datastructures.py +++ b/tests/test_datastructures.py @@ -550,8 +550,9 @@ def test_value_conversion(self): assert d.get("foo", type=int) == 1 def test_return_default_when_conversion_is_not_possible(self): - d = self.storage_class(foo="bar") + d = self.storage_class(foo="bar", baz=None) assert d.get("foo", default=-1, type=int) == -1 + assert d.get("baz", default=-1, type=int) == -1 def test_propagate_exceptions_in_conversion(self): d = self.storage_class(foo="bar") From 0b472374af1ab91000ea244a0da44d49c04c7cce Mon Sep 17 00:00:00 2001 From: pgjones Date: Sun, 3 Mar 2024 16:38:25 +0000 Subject: [PATCH 056/159] Fix issue with repeated-slash requests redirecting Previously if a request had repeated slashes it could match a single slash route and hence return a redirect response even if merge_slashes was False. Additionally setting the merge_slashes attribute of the map after initialisation had no affect, compounding this problem. --- CHANGES.rst | 2 ++ src/werkzeug/routing/map.py | 9 ++++++++- src/werkzeug/routing/matcher.py | 2 +- tests/test_routing.py | 4 ++++ 4 files changed, 15 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 3e9993488..c7f5aeba5 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -5,6 +5,8 @@ Version 3.0.2 Unreleased +- Ensure setting merge_slashes to False results in NotFound for + repeated-slash requests against single slash routes. :issue:`2834` - Fix handling of TypeError in TypeConversionDict.get() to match ValueErrors. :issue:`2843` - Fix response_wrapper type check in test client. :issue:`2831` diff --git a/src/werkzeug/routing/map.py b/src/werkzeug/routing/map.py index 76bbe2f3b..87b83a54c 100644 --- a/src/werkzeug/routing/map.py +++ b/src/werkzeug/routing/map.py @@ -109,7 +109,6 @@ def __init__( self.default_subdomain = default_subdomain self.strict_slashes = strict_slashes - self.merge_slashes = merge_slashes self.redirect_defaults = redirect_defaults self.host_matching = host_matching @@ -123,6 +122,14 @@ def __init__( for rulefactory in rules or (): self.add(rulefactory) + @property + def merge_slashes(self) -> bool: + return self._matcher.merge_slashes + + @merge_slashes.setter + def merge_slashes(self, value: bool) -> None: + self._matcher.merge_slashes = value + def is_endpoint_expecting(self, endpoint: str, *arguments: str) -> bool: """Iterate over all rules and check if the endpoint expects the arguments provided. This is for example useful if you have diff --git a/src/werkzeug/routing/matcher.py b/src/werkzeug/routing/matcher.py index 0d1210a67..1fd00efca 100644 --- a/src/werkzeug/routing/matcher.py +++ b/src/werkzeug/routing/matcher.py @@ -177,7 +177,7 @@ def _match( rv = _match(self._root, [domain, *path.split("/")], []) except SlashRequired: raise RequestPath(f"{path}/") from None - if rv is None: + if rv is None or rv[0].merge_slashes is False: raise NoMatch(have_match_for, websocket_mismatch) else: raise RequestPath(f"{path}") diff --git a/tests/test_routing.py b/tests/test_routing.py index 416fb4fc5..5291348c0 100644 --- a/tests/test_routing.py +++ b/tests/test_routing.py @@ -95,6 +95,7 @@ def test_merge_slashes_match(): r.Rule("/yes/tail/", endpoint="yes_tail"), r.Rule("/with/", endpoint="with_path"), r.Rule("/no//merge", endpoint="no_merge", merge_slashes=False), + r.Rule("/no/merging", endpoint="no_merging", merge_slashes=False), ] ) adapter = url_map.bind("localhost", "/") @@ -124,6 +125,9 @@ def test_merge_slashes_match(): assert adapter.match("/no//merge")[0] == "no_merge" + assert adapter.match("/no/merging")[0] == "no_merging" + pytest.raises(NotFound, lambda: adapter.match("/no//merging")) + @pytest.mark.parametrize( ("path", "expected"), From 667066592174ac90e4372407638a81d3f6444a0a Mon Sep 17 00:00:00 2001 From: Alistair Lynn <159782921+alynn-coefficient@users.noreply.github.com> Date: Mon, 4 Mar 2024 10:13:23 +0000 Subject: [PATCH 057/159] Use Python 3 print() function in Werkzeug quickstart --- docs/quickstart.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/quickstart.rst b/docs/quickstart.rst index 0f3714e6e..7424f0d6b 100644 --- a/docs/quickstart.rst +++ b/docs/quickstart.rst @@ -141,7 +141,7 @@ the quality, the best item being the first: 'text/html' >>> 'application/xhtml+xml' in request.accept_mimetypes True ->>> print request.accept_mimetypes["application/json"] +>>> print(request.accept_mimetypes["application/json"]) 0.8 The same works for languages: From 564397967adee74f4212d7bbab83da47ce0bb190 Mon Sep 17 00:00:00 2001 From: pgjones Date: Tue, 27 Feb 2024 21:01:01 +0000 Subject: [PATCH 058/159] Support Cookie CHIPS CHIPS, Cookies Having Independent Partitioned State, allows for cookies to be opted into partitioned storage which is especially useful for thrid party cookies. --- CHANGES.rst | 8 ++++++++ src/werkzeug/http.py | 10 ++++++++++ tests/test_http.py | 8 ++++++++ 3 files changed, 26 insertions(+) diff --git a/CHANGES.rst b/CHANGES.rst index 186e8f580..ca8571713 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,5 +1,13 @@ .. currentmodule:: werkzeug +Version 3.1.0 +------------- + +Unreleased + +- Support Cookie CHIPS (Partitioned Cookies). :issue:`2797` + + Version 3.0.1 ------------- diff --git a/src/werkzeug/http.py b/src/werkzeug/http.py index 8280f51fa..4ead0165a 100644 --- a/src/werkzeug/http.py +++ b/src/werkzeug/http.py @@ -1216,6 +1216,7 @@ def dump_cookie( sync_expires: bool = True, max_size: int = 4093, samesite: str | None = None, + partitioned: bool = False, ) -> str: """Create a Set-Cookie header without the ``Set-Cookie`` prefix. @@ -1252,9 +1253,14 @@ def dump_cookie( `_. Set to 0 to disable this check. :param samesite: Limits the scope of the cookie such that it will only be attached to requests if those requests are same-site. + :param partitioned: Opts the cookie into partitioned storage. This + will also set secure to True .. _`cookie`: http://browsercookielimits.squawky.net/ + .. versionchanged:: 3.1 + The ``partitioned`` parameter was added. + .. versionchanged:: 3.0 Passing bytes, and the ``charset`` parameter, were removed. @@ -1298,6 +1304,9 @@ def dump_cookie( if samesite not in {"Strict", "Lax", "None"}: raise ValueError("SameSite must be 'Strict', 'Lax', or 'None'.") + if partitioned: + secure = True + # Quote value if it contains characters not allowed by RFC 6265. Slash-escape with # three octal digits, which matches http.cookies, although the RFC suggests base64. if not _cookie_no_quote_re.fullmatch(value): @@ -1319,6 +1328,7 @@ def dump_cookie( ("HttpOnly", httponly), ("Path", path), ("SameSite", samesite), + ("Partitioned", partitioned), ): if v is None or v is False: continue diff --git a/tests/test_http.py b/tests/test_http.py index bbd51ba33..1cf1613da 100644 --- a/tests/test_http.py +++ b/tests/test_http.py @@ -576,6 +576,14 @@ def test_cookie_samesite_invalid(self): with pytest.raises(ValueError): http.dump_cookie("foo", "bar", samesite="invalid") + def test_cookie_partitioned(self): + value = http.dump_cookie("foo", "bar", partitioned=True, secure=True) + assert value == "foo=bar; Secure; Path=/; Partitioned" + + def test_cookie_partitioned_sets_secure(self): + value = http.dump_cookie("foo", "bar", partitioned=True, secure=False) + assert value == "foo=bar; Secure; Path=/; Partitioned" + class TestRange: def test_if_range_parsing(self): From 32e69512134c2f8183c6438b2b2e13fd24e9d19f Mon Sep 17 00:00:00 2001 From: Daniel Gawne <30755402+dgwn@users.noreply.github.com> Date: Tue, 5 Mar 2024 11:02:46 -0500 Subject: [PATCH 059/159] add params for partitioned in set_cookie and delete_cookie --- src/werkzeug/sansio/response.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/werkzeug/sansio/response.py b/src/werkzeug/sansio/response.py index 271974ecf..cfad0994b 100644 --- a/src/werkzeug/sansio/response.py +++ b/src/werkzeug/sansio/response.py @@ -194,6 +194,7 @@ def set_cookie( secure: bool = False, httponly: bool = False, samesite: str | None = None, + partitioned: bool = False, ) -> None: """Sets a cookie. @@ -218,6 +219,7 @@ def set_cookie( :param httponly: Disallow JavaScript access to the cookie. :param samesite: Limit the scope of the cookie to only be attached to requests that are "same-site". + :param partitioned: If ``True``, the cookie will be partitioned. """ self.headers.add( "Set-Cookie", @@ -232,6 +234,7 @@ def set_cookie( httponly=httponly, max_size=self.max_cookie_size, samesite=samesite, + partitioned=partitioned, ), ) @@ -243,6 +246,7 @@ def delete_cookie( secure: bool = False, httponly: bool = False, samesite: str | None = None, + partitioned: bool = False, ) -> None: """Delete a cookie. Fails silently if key doesn't exist. @@ -256,6 +260,7 @@ def delete_cookie( :param httponly: Disallow JavaScript access to the cookie. :param samesite: Limit the scope of the cookie to only be attached to requests that are "same-site". + :param partitioned: If ``True``, the cookie will be partitioned. """ self.set_cookie( key, @@ -266,6 +271,7 @@ def delete_cookie( secure=secure, httponly=httponly, samesite=samesite, + partitioned=partitioned, ) @property From 57413980f4efa74d997d328782c0efd946c8f81b Mon Sep 17 00:00:00 2001 From: pgjones Date: Sat, 9 Mar 2024 22:14:44 +0000 Subject: [PATCH 060/159] Raise an error if the converter arguments cannot be parsed This could happen for example with `min=0;max=500` as the `;` is not a word character everything before it is ignored in the regex during the finditer call. This then lefts the user confused as the `min=0;` was silently ignored. --- CHANGES.rst | 2 ++ src/werkzeug/routing/rules.py | 8 ++++++++ tests/test_routing.py | 3 +++ 3 files changed, 13 insertions(+) diff --git a/CHANGES.rst b/CHANGES.rst index c7f5aeba5..b2f2a08f6 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -12,6 +12,8 @@ Unreleased - Fix response_wrapper type check in test client. :issue:`2831` - Make the return type of ``MultiPartParser.parse`` more precise. :issue:`2840` +- Raise an error if converter arguments cannot be + parsed. :issue:`2822` Version 3.0.1 ------------- diff --git a/src/werkzeug/routing/rules.py b/src/werkzeug/routing/rules.py index ea32d1dca..7029d8bc0 100644 --- a/src/werkzeug/routing/rules.py +++ b/src/werkzeug/routing/rules.py @@ -67,6 +67,7 @@ class RulePart: _simple_rule_re = re.compile(r"<([^>]+)>") _converter_args_re = re.compile( r""" + \s* ((?P\w+)\s*=\s*)? (?P True|False| @@ -112,8 +113,14 @@ def parse_converter_args(argstr: str) -> tuple[tuple[t.Any, ...], dict[str, t.An argstr += "," args = [] kwargs = {} + position = 0 for item in _converter_args_re.finditer(argstr): + if item.start() != position: + raise ValueError( + f"Cannot parse converter argument '{argstr[position:item.start()]}'" + ) + value = item.group("stringval") if value is None: value = item.group("value") @@ -123,6 +130,7 @@ def parse_converter_args(argstr: str) -> tuple[tuple[t.Any, ...], dict[str, t.An else: name = item.group("name") kwargs[name] = value + position = item.end() return tuple(args), kwargs diff --git a/tests/test_routing.py b/tests/test_routing.py index 5291348c0..02db898d6 100644 --- a/tests/test_routing.py +++ b/tests/test_routing.py @@ -1076,6 +1076,9 @@ def test_converter_parser(): args, kwargs = r.parse_converter_args('"foo", "bar"') assert args == ("foo", "bar") + with pytest.raises(ValueError): + r.parse_converter_args("min=0;max=500") + def test_alias_redirects(): m = r.Map( From ad703fd35494e6d5b2c072b7d1cd13afa132f6d2 Mon Sep 17 00:00:00 2001 From: pgjones Date: Mon, 1 Apr 2024 19:51:09 +0100 Subject: [PATCH 061/159] Bump the slsa-github-generator action version As this solves a publishing error. --- .github/workflows/publish.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index 05681f54c..d328c29af 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -33,7 +33,7 @@ jobs: id-token: write contents: write # Can't pin with hash due to how this workflow works. - uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v1.9.0 + uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v1.10.0 with: base64-subjects: ${{ needs.build.outputs.hash }} create-release: From d70dcea305bf842c5a3b55265b5ffac2db012e23 Mon Sep 17 00:00:00 2001 From: pgjones Date: Tue, 19 Mar 2024 21:35:36 +0000 Subject: [PATCH 062/159] Release version 3.0.2 --- CHANGES.rst | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index b2f2a08f6..fda41a89c 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,7 +3,7 @@ Version 3.0.2 ------------- -Unreleased +Released 2024-04-01 - Ensure setting merge_slashes to False results in NotFound for repeated-slash requests against single slash routes. :issue:`2834` diff --git a/pyproject.toml b/pyproject.toml index 161b4bc60..6b3509a8b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "Werkzeug" -version = "3.0.1" +version = "3.0.2" description = "The comprehensive WSGI web application library." readme = "README.rst" license = {file = "LICENSE.rst"} From bc6cf7f7848b847e709eb0081fb05c230132eac5 Mon Sep 17 00:00:00 2001 From: David Lord Date: Mon, 8 Apr 2024 15:07:10 -0700 Subject: [PATCH 063/159] update project files * update pre-commit hook * upgrade pip with venv * update description and version * show url in publish environment * update versions * update versions, separate typing job * use dependabot grouped updates ignore upload/download-artifact until slsa updates * use sphinx.ext.extlinks instead of sphinx-issues * update dev dependencies * update editorconfig * update gitignore * update .readthedocs.yaml * license is txt, readme is md * update pyproject.toml add typed classifier add pyright config simplify urls * tox builds docs in place * update min test py version * add tox env to update all dev dependencies * update issue and pr templates * rename security docs page to not conflict with org policy file * simplify matrix * readme is markdown, remove install and links sections --- .devcontainer/on-create-command.sh | 4 +- .editorconfig | 2 +- .github/ISSUE_TEMPLATE/bug-report.md | 4 +- .github/ISSUE_TEMPLATE/config.yml | 14 +- .github/ISSUE_TEMPLATE/feature-request.md | 2 +- .github/dependabot.yml | 29 ++- .github/pull_request_template.md | 25 +-- .github/workflows/lock.yaml | 16 +- .github/workflows/publish.yaml | 29 +-- .github/workflows/tests.yaml | 51 +++--- .gitignore | 10 +- .pre-commit-config.yaml | 4 +- .readthedocs.yaml | 4 +- CHANGES.rst | 4 +- LICENSE.rst => LICENSE.txt | 0 README.rst => README.md | 66 +++---- docs/conf.py | 23 +-- docs/license.rst | 3 +- pyproject.toml | 42 ++--- requirements/build.txt | 13 +- requirements/dev.in | 7 +- requirements/dev.txt | 214 ++++++++++++++++++---- requirements/docs.in | 5 +- requirements/docs.txt | 48 +++-- requirements/tests.in | 5 +- requirements/tests.txt | 43 +++-- requirements/typing.in | 2 + requirements/typing.txt | 42 +++-- tox.ini | 18 +- 29 files changed, 438 insertions(+), 291 deletions(-) rename LICENSE.rst => LICENSE.txt (100%) rename README.rst => README.md (56%) diff --git a/.devcontainer/on-create-command.sh b/.devcontainer/on-create-command.sh index fdf77952f..eaebea618 100755 --- a/.devcontainer/on-create-command.sh +++ b/.devcontainer/on-create-command.sh @@ -1,9 +1,7 @@ #!/bin/bash set -e - -python3 -m venv .venv +python3 -m venv --upgrade-deps .venv . .venv/bin/activate -pip install -U pip pip install -r requirements/dev.txt pip install -e . pre-commit install --install-hooks diff --git a/.editorconfig b/.editorconfig index e32c8029d..2ff985a67 100644 --- a/.editorconfig +++ b/.editorconfig @@ -9,5 +9,5 @@ end_of_line = lf charset = utf-8 max_line_length = 88 -[*.{yml,yaml,json,js,css,html}] +[*.{css,html,js,json,jsx,scss,ts,tsx,yaml,yml}] indent_size = 2 diff --git a/.github/ISSUE_TEMPLATE/bug-report.md b/.github/ISSUE_TEMPLATE/bug-report.md index eb5e22b21..cdbeececf 100644 --- a/.github/ISSUE_TEMPLATE/bug-report.md +++ b/.github/ISSUE_TEMPLATE/bug-report.md @@ -4,8 +4,8 @@ about: Report a bug in Werkzeug (not other projects which depend on Werkzeug) --- diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 9df4cec0e..88a049ead 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -1,11 +1,11 @@ blank_issues_enabled: false contact_links: - name: Security issue - url: security@palletsprojects.com - about: Do not report security issues publicly. Email our security contact. - - name: Questions - url: https://stackoverflow.com/questions/tagged/werkzeug?tab=Frequent - about: Search for and ask questions about your code on Stack Overflow. - - name: Questions and discussions + url: https://github.com/pallets/werkzeug/security/advisories/new + about: Do not report security issues publicly. Create a private advisory. + - name: Questions on Discussions + url: https://github.com/pallets/werkzeug/discussions/ + about: Ask questions about your own code on the Discussions tab. + - name: Questions on Chat url: https://discord.gg/pallets - about: Discuss questions about your code on our Discord chat. + about: Ask questions about your own code on our Discord chat. diff --git a/.github/ISSUE_TEMPLATE/feature-request.md b/.github/ISSUE_TEMPLATE/feature-request.md index 48698798f..18eaef7b5 100644 --- a/.github/ISSUE_TEMPLATE/feature-request.md +++ b/.github/ISSUE_TEMPLATE/feature-request.md @@ -5,7 +5,7 @@ about: Suggest a new feature for Werkzeug -- fixes # +fixes # +--> - -Checklist: - -- [ ] Add tests that demonstrate the correct behavior of the change. Tests should fail without the change. -- [ ] Add or update relevant docs, in the docs folder and in code. -- [ ] Add an entry in `CHANGES.rst` summarizing the change and linking to the issue. -- [ ] Add `.. versionchanged::` entries in any relevant code docs. -- [ ] Run `pre-commit` hooks and fix any issues. -- [ ] Run `pytest` and `tox`, no tests failed. diff --git a/.github/workflows/lock.yaml b/.github/workflows/lock.yaml index e962fd041..22228a1cd 100644 --- a/.github/workflows/lock.yaml +++ b/.github/workflows/lock.yaml @@ -1,25 +1,23 @@ -name: 'Lock threads' -# Lock closed issues that have not received any further activity for -# two weeks. This does not close open issues, only humans may do that. -# We find that it is easier to respond to new issues with fresh examples -# rather than continuing discussions on old issues. +name: Lock inactive closed issues +# Lock closed issues that have not received any further activity for two weeks. +# This does not close open issues, only humans may do that. It is easier to +# respond to new issues with fresh examples rather than continuing discussions +# on old issues. on: schedule: - cron: '0 0 * * *' - permissions: issues: write pull-requests: write - concurrency: group: lock - jobs: lock: runs-on: ubuntu-latest steps: - - uses: dessant/lock-threads@be8aa5be94131386884a6da4189effda9b14aa21 + - uses: dessant/lock-threads@1bf7ec25051fe7c00bdd17e6a7cf3d7bfb7dc771 # v5.0.1 with: issue-inactive-days: 14 pr-inactive-days: 14 + discussion-inactive-days: 14 diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index d328c29af..4e8139b79 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -9,12 +9,12 @@ jobs: outputs: hash: ${{ steps.hash.outputs.hash }} steps: - - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 + - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 with: python-version: '3.x' - cache: 'pip' - cache-dependency-path: 'requirements/*.txt' + cache: pip + cache-dependency-path: requirements*/*.txt - run: pip install -r requirements/build.txt # Use the commit date instead of the current date during the build. - run: echo "SOURCE_DATE_EPOCH=$(git log -1 --pretty=%ct)" >> $GITHUB_ENV @@ -23,11 +23,11 @@ jobs: - name: generate hash id: hash run: cd dist && echo "hash=$(sha256sum * | base64 -w0)" >> $GITHUB_OUTPUT - - uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce + - uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 # v3.1.3 with: path: ./dist provenance: - needs: ['build'] + needs: [build] permissions: actions: read id-token: write @@ -39,12 +39,12 @@ jobs: create-release: # Upload the sdist, wheels, and provenance to a GitHub release. They remain # available as build artifacts for a while as well. - needs: ['provenance'] + needs: [provenance] runs-on: ubuntu-latest permissions: contents: write steps: - - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a + - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2 - name: create release run: > gh release create --draft --repo ${{ github.repository }} @@ -53,20 +53,21 @@ jobs: env: GH_TOKEN: ${{ github.token }} publish-pypi: - needs: ['provenance'] + needs: [provenance] # Wait for approval before attempting to upload to PyPI. This allows reviewing the # files in the draft release. - environment: 'publish' + environment: + name: publish + url: https://pypi.org/project/Werkzeug/${{ github.ref_name }} runs-on: ubuntu-latest permissions: id-token: write steps: - - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a - # Try uploading to Test PyPI first, in case something fails. - - uses: pypa/gh-action-pypi-publish@b7f401de30cb6434a1e19f805ff006643653240e + - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2 + - uses: pypa/gh-action-pypi-publish@81e9d935c883d0b210363ab89cf05f3894778450 # v1.8.14 with: repository-url: https://test.pypi.org/legacy/ packages-dir: artifact/ - - uses: pypa/gh-action-pypi-publish@b7f401de30cb6434a1e19f805ff006643653240e + - uses: pypa/gh-action-pypi-publish@81e9d935c883d0b210363ab89cf05f3894778450 # v1.8.14 with: packages-dir: artifact/ diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index c1e6ea314..91a02d0ca 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -9,42 +9,49 @@ on: - '*.md' - '*.rst' pull_request: - branches: - - main - - '*.x' paths-ignore: - 'docs/**' - '*.md' - '*.rst' jobs: tests: - name: ${{ matrix.name }} - runs-on: ${{ matrix.os }} + name: ${{ matrix.name || matrix.python }} + runs-on: ${{ matrix.os || 'ubuntu-latest' }} strategy: fail-fast: false matrix: include: - - {name: Linux, python: '3.11', os: ubuntu-latest, tox: py311} - - {name: Windows, python: '3.11', os: windows-latest, tox: py311} - - {name: Mac, python: '3.11', os: macos-latest, tox: py311} - - {name: '3.12-dev', python: '3.12-dev', os: ubuntu-latest, tox: py312} - - {name: '3.10', python: '3.10', os: ubuntu-latest, tox: py310} - - {name: '3.9', python: '3.9', os: ubuntu-latest, tox: py39} - - {name: '3.8', python: '3.8', os: ubuntu-latest, tox: py38} - - {name: 'PyPy', python: 'pypy-3.10', os: ubuntu-latest, tox: pypy310} - - {name: Typing, python: '3.11', os: ubuntu-latest, tox: typing} + - {python: '3.12'} + - {name: Windows, python: '3.12', os: windows-latest} + - {name: Mac, python: '3.12', os: macos-latest} + - {python: '3.11'} + - {python: '3.10'} + - {python: '3.9'} + - {python: '3.8'} + - {name: PyPy, python: 'pypy-3.10', tox: pypy310} steps: - - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 + - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 with: python-version: ${{ matrix.python }} - cache: 'pip' - cache-dependency-path: 'requirements/*.txt' + allow-prereleases: true + cache: pip + cache-dependency-path: requirements*/*.txt + - run: pip install tox + - run: tox run -e ${{ matrix.tox || format('py{0}', matrix.python) }} + typing: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 + with: + python-version: '3.x' + cache: pip + cache-dependency-path: requirements*/*.txt - name: cache mypy - uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 + uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2 with: path: ./.mypy_cache - key: mypy|${{ matrix.python }}|${{ hashFiles('pyproject.toml') }} - if: matrix.tox == 'typing' + key: mypy|${{ hashFiles('pyproject.toml') }} - run: pip install tox - - run: tox run -e ${{ matrix.tox }} + - run: tox run -e typing diff --git a/.gitignore b/.gitignore index cd9550b9e..bbeb14f16 100644 --- a/.gitignore +++ b/.gitignore @@ -1,11 +1,11 @@ .idea/ .vscode/ +.venv*/ +venv*/ __pycache__/ +dist/ +.coverage* +htmlcov/ .pytest_cache/ .tox/ -.coverage -.coverage.* -htmlcov/ docs/_build/ -dist/ -venv/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 447fd5869..828916171 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,12 +2,12 @@ ci: autoupdate_schedule: monthly repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.5 + rev: v0.3.5 hooks: - id: ruff - id: ruff-format - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 + rev: v4.6.0 hooks: - id: check-merge-conflict - id: debug-statements diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 346900b20..865c68597 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -1,8 +1,8 @@ version: 2 build: - os: ubuntu-20.04 + os: ubuntu-22.04 tools: - python: "3.10" + python: '3.12' python: install: - requirements: requirements/docs.txt diff --git a/CHANGES.rst b/CHANGES.rst index fda41a89c..401886d36 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -21,7 +21,7 @@ Version 3.0.1 Released 2023-10-24 - Fix slow multipart parsing for large parts potentially enabling DoS - attacks. :cwe:`CWE-407` + attacks. Version 3.0.0 ------------- @@ -44,7 +44,7 @@ Version 2.3.8 Released 2023-11-08 - Fix slow multipart parsing for large parts potentially enabling DoS - attacks. :cwe:`CWE-407` + attacks. Version 2.3.7 diff --git a/LICENSE.rst b/LICENSE.txt similarity index 100% rename from LICENSE.rst rename to LICENSE.txt diff --git a/README.rst b/README.md similarity index 56% rename from README.rst rename to README.md index 220c9979a..011c0c45f 100644 --- a/README.rst +++ b/README.md @@ -1,9 +1,8 @@ -Werkzeug -======== +# Werkzeug *werkzeug* German noun: "tool". Etymology: *werk* ("work"), *zeug* ("stuff") -Werkzeug is a comprehensive `WSGI`_ web application library. It began as +Werkzeug is a comprehensive [WSGI][] web application library. It began as a simple collection of various utilities for WSGI applications and has become one of the most advanced WSGI utility libraries. @@ -31,59 +30,40 @@ choose a template engine, database adapter, and even how to handle requests. It can be used to build all sorts of end user applications such as blogs, wikis, or bulletin boards. -`Flask`_ wraps Werkzeug, using it to handle the details of WSGI while +[Flask][] wraps Werkzeug, using it to handle the details of WSGI while providing more structure and patterns for defining powerful applications. -.. _WSGI: https://wsgi.readthedocs.io/en/latest/ -.. _Flask: https://www.palletsprojects.com/p/flask/ +[WSGI]: https://wsgi.readthedocs.io/en/latest/ +[Flask]: https://www.palletsprojects.com/p/flask/ -Installing ----------- +## A Simple Example -Install and update using `pip`_: +```python +# save this as app.py +from werkzeug.wrappers import Request, Response -.. code-block:: text +@Request.application +def application(request: Request) -> Response: + return Response("Hello, World!") - pip install -U Werkzeug +if __name__ == "__main__": + from werkzeug.serving import run_simple + run_simple("127.0.0.1", 5000, application) +``` -.. _pip: https://pip.pypa.io/en/stable/getting-started/ +``` +$ python -m app + * Running on http://127.0.0.1:5000/ (Press CTRL+C to quit) +``` -A Simple Example ----------------- - -.. code-block:: python - - from werkzeug.wrappers import Request, Response - - @Request.application - def application(request): - return Response('Hello, World!') - - if __name__ == '__main__': - from werkzeug.serving import run_simple - run_simple('localhost', 4000, application) - - -Donate ------- +## Donate The Pallets organization develops and supports Werkzeug and other popular packages. In order to grow the community of contributors and users, and allow the maintainers to devote more time to the projects, -`please donate today`_. - -.. _please donate today: https://palletsprojects.com/donate - - -Links ------ +[please donate today][]. -- Documentation: https://werkzeug.palletsprojects.com/ -- Changes: https://werkzeug.palletsprojects.com/changes/ -- PyPI Releases: https://pypi.org/project/Werkzeug/ -- Source Code: https://github.com/pallets/werkzeug/ -- Issue Tracker: https://github.com/pallets/werkzeug/issues/ -- Chat: https://discord.gg/pallets +[please donate today]: https://palletsprojects.com/donate diff --git a/docs/conf.py b/docs/conf.py index e09ef8f7b..5e04cb817 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -10,18 +10,25 @@ # General -------------------------------------------------------------- -master_doc = "index" +default_role = "code" extensions = [ "sphinx.ext.autodoc", + "sphinx.ext.extlinks", "sphinx.ext.intersphinx", - "pallets_sphinx_themes", - "sphinx_issues", "sphinxcontrib.log_cabinet", + "pallets_sphinx_themes", ] autoclass_content = "both" +autodoc_member_order = "bysource" autodoc_typehints = "description" -intersphinx_mapping = {"python": ("https://docs.python.org/3/", None)} -issues_github_path = "pallets/werkzeug" +autodoc_preserve_defaults = True +extlinks = { + "issue": ("https://github.com/pallets/werkzeug/issues/%s", "#%s"), + "pr": ("https://github.com/pallets/werkzeug/pull/%s", "#%s"), +} +intersphinx_mapping = { + "python": ("https://docs.python.org/3/", None), +} # HTML ----------------------------------------------------------------- @@ -46,9 +53,3 @@ html_logo = "_static/werkzeug-vertical.png" html_title = f"Werkzeug Documentation ({version})" html_show_sourcelink = False - -# LaTeX ---------------------------------------------------------------- - -latex_documents = [ - (master_doc, f"Werkzeug-{version}.tex", html_title, author, "manual") -] diff --git a/docs/license.rst b/docs/license.rst index a53a98cf3..2a445f9c6 100644 --- a/docs/license.rst +++ b/docs/license.rst @@ -1,4 +1,5 @@ BSD-3-Clause License ==================== -.. include:: ../LICENSE.rst +.. literalinclude:: ../LICENSE.txt + :language: text diff --git a/pyproject.toml b/pyproject.toml index 6b3509a8b..f54060f6e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -2,8 +2,8 @@ name = "Werkzeug" version = "3.0.2" description = "The comprehensive WSGI web application library." -readme = "README.rst" -license = {file = "LICENSE.rst"} +readme = "README.md" +license = {file = "LICENSE.txt"} maintainers = [{name = "Pallets", email = "contact@palletsprojects.com"}] classifiers = [ "Development Status :: 5 - Production/Stable", @@ -17,9 +17,12 @@ classifiers = [ "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", "Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware", "Topic :: Software Development :: Libraries :: Application Frameworks", + "Typing :: Typed", ] requires-python = ">=3.8" -dependencies = ["MarkupSafe>=2.1.1"] +dependencies = [ + "MarkupSafe>=2.1.1", +] [project.urls] Donate = "https://palletsprojects.com/donate" @@ -67,29 +70,10 @@ source = ["werkzeug", "tests"] source = ["src", "*/site-packages"] [tool.mypy] -python_version = "3.8" files = ["src/werkzeug"] show_error_codes = true pretty = true -#strict = true -allow_redefinition = true -disallow_subclassing_any = true -#disallow_untyped_calls = true -disallow_untyped_defs = true -disallow_incomplete_defs = true -no_implicit_optional = true -local_partial_types = true -no_implicit_reexport = true -strict_equality = true -warn_redundant_casts = true -warn_unused_configs = true -warn_unused_ignores = true -warn_return_any = true -#warn_unreachable = True - -[[tool.mypy.overrides]] -module = ["werkzeug.wrappers"] -no_implicit_reexport = false +strict = true [[tool.mypy.overrides]] module = [ @@ -103,24 +87,28 @@ module = [ ] ignore_missing_imports = true +[tool.pyright] +pythonVersion = "3.8" +include = ["src/werkzeug"] + [tool.ruff] extend-exclude = ["examples/"] src = ["src"] -fix = false +fix = true show-fixes = true -show-source = true +output-format = "full" [tool.ruff.lint] select = [ "B", # flake8-bugbear "E", # pycodestyle error "F", # pyflakes - #"I", # isort + "I", # isort "UP", # pyupgrade "W", # pycodestyle warning ] ignore = [ - "E402" # allow circular imports at end of file + "E402", # allow circular imports at end of file ] ignore-init-module-imports = true diff --git a/requirements/build.txt b/requirements/build.txt index 196545d0e..9ecc48952 100644 --- a/requirements/build.txt +++ b/requirements/build.txt @@ -1,13 +1,12 @@ -# SHA1:80754af91bfb6d1073585b046fe0a474ce868509 # -# This file is autogenerated by pip-compile-multi -# To update, run: +# This file is autogenerated by pip-compile with Python 3.12 +# by the following command: # -# pip-compile-multi +# pip-compile build.in # -build==0.10.0 - # via -r requirements/build.in -packaging==23.1 +build==1.2.1 + # via -r build.in +packaging==24.0 # via build pyproject-hooks==1.0.0 # via build diff --git a/requirements/dev.in b/requirements/dev.in index 99f5942f8..1efde82b1 100644 --- a/requirements/dev.in +++ b/requirements/dev.in @@ -1,6 +1,5 @@ --r docs.in --r tests.in --r typing.in -pip-compile-multi +-r docs.txt +-r tests.txt +-r typing.txt pre-commit tox diff --git a/requirements/dev.txt b/requirements/dev.txt index ed462080a..186ceda46 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -1,64 +1,202 @@ -# SHA1:54b5b77ec8c7a0064ffa93b2fd16cb0130ba177c # -# This file is autogenerated by pip-compile-multi -# To update, run: +# This file is autogenerated by pip-compile with Python 3.12 +# by the following command: # -# pip-compile-multi +# pip-compile dev.in # --r docs.txt --r tests.txt --r typing.txt -build==0.10.0 - # via pip-tools -cachetools==5.3.1 +alabaster==0.7.16 + # via + # -r docs.txt + # sphinx +babel==2.14.0 + # via + # -r docs.txt + # sphinx +cachetools==5.3.3 # via tox -cfgv==3.3.1 +certifi==2024.2.2 + # via + # -r docs.txt + # requests +cffi==1.16.0 + # via + # -r tests.txt + # cryptography +cfgv==3.4.0 # via pre-commit -chardet==5.1.0 +chardet==5.2.0 # via tox -click==8.1.3 +charset-normalizer==3.3.2 # via - # pip-compile-multi - # pip-tools + # -r docs.txt + # requests colorama==0.4.6 # via tox -distlib==0.3.6 +cryptography==42.0.5 + # via -r tests.txt +distlib==0.3.8 # via virtualenv -filelock==3.12.2 +docutils==0.20.1 + # via + # -r docs.txt + # sphinx +ephemeral-port-reserve==1.1.4 + # via -r tests.txt +filelock==3.13.3 # via # tox # virtualenv -identify==2.5.24 +greenlet==3.0.3 + # via -r tests.txt +identify==2.5.35 # via pre-commit +idna==3.6 + # via + # -r docs.txt + # requests +imagesize==1.4.1 + # via + # -r docs.txt + # sphinx +iniconfig==2.0.0 + # via + # -r tests.txt + # -r typing.txt + # pytest +jinja2==3.1.3 + # via + # -r docs.txt + # sphinx +markupsafe==2.1.5 + # via + # -r docs.txt + # jinja2 +mypy==1.9.0 + # via -r typing.txt +mypy-extensions==1.0.0 + # via + # -r typing.txt + # mypy nodeenv==1.8.0 - # via pre-commit -pip-compile-multi==2.6.3 - # via -r requirements/dev.in -pip-tools==6.13.0 - # via pip-compile-multi -platformdirs==3.8.0 + # via + # -r typing.txt + # pre-commit + # pyright +packaging==24.0 + # via + # -r docs.txt + # -r tests.txt + # -r typing.txt + # pallets-sphinx-themes + # pyproject-api + # pytest + # sphinx + # tox +pallets-sphinx-themes==2.1.1 + # via -r docs.txt +platformdirs==4.2.0 # via # tox # virtualenv -pre-commit==3.3.3 - # via -r requirements/dev.in -pyproject-api==1.5.2 +pluggy==1.4.0 + # via + # -r tests.txt + # -r typing.txt + # pytest + # tox +pre-commit==3.7.0 + # via -r dev.in +psutil==5.9.8 + # via + # -r tests.txt + # pytest-xprocess +pycparser==2.22 + # via + # -r tests.txt + # cffi +pygments==2.17.2 + # via + # -r docs.txt + # sphinx +pyproject-api==1.6.1 # via tox -pyproject-hooks==1.0.0 - # via build -pyyaml==6.0 +pyright==1.1.357 + # via -r typing.txt +pytest==8.1.1 + # via + # -r tests.txt + # -r typing.txt + # pytest-timeout + # pytest-xprocess +pytest-timeout==2.3.1 + # via -r tests.txt +pytest-xprocess==0.23.0 + # via -r tests.txt +pyyaml==6.0.1 # via pre-commit -toposort==1.10 - # via pip-compile-multi -tox==4.6.3 - # via -r requirements/dev.in -virtualenv==20.23.1 +requests==2.31.0 + # via + # -r docs.txt + # sphinx +snowballstemmer==2.2.0 + # via + # -r docs.txt + # sphinx +sphinx==7.2.6 + # via + # -r docs.txt + # pallets-sphinx-themes + # sphinxcontrib-log-cabinet +sphinxcontrib-applehelp==1.0.8 + # via + # -r docs.txt + # sphinx +sphinxcontrib-devhelp==1.0.6 + # via + # -r docs.txt + # sphinx +sphinxcontrib-htmlhelp==2.0.5 + # via + # -r docs.txt + # sphinx +sphinxcontrib-jsmath==1.0.1 + # via + # -r docs.txt + # sphinx +sphinxcontrib-log-cabinet==1.0.1 + # via -r docs.txt +sphinxcontrib-qthelp==1.0.7 + # via + # -r docs.txt + # sphinx +sphinxcontrib-serializinghtml==1.1.10 + # via + # -r docs.txt + # sphinx +tox==4.14.2 + # via -r dev.in +types-contextvars==2.4.7.3 + # via -r typing.txt +types-dataclasses==0.6.6 + # via -r typing.txt +types-setuptools==69.2.0.20240317 + # via -r typing.txt +typing-extensions==4.11.0 + # via + # -r typing.txt + # mypy +urllib3==2.2.1 + # via + # -r docs.txt + # requests +virtualenv==20.25.1 # via # pre-commit # tox -wheel==0.40.0 - # via pip-tools +watchdog==4.0.0 + # via + # -r tests.txt + # -r typing.txt # The following packages are considered to be unsafe in a requirements file: -# pip # setuptools diff --git a/requirements/docs.in b/requirements/docs.in index 7ec501b6d..ba3fd7774 100644 --- a/requirements/docs.in +++ b/requirements/docs.in @@ -1,4 +1,3 @@ -Pallets-Sphinx-Themes -Sphinx -sphinx-issues +pallets-sphinx-themes +sphinx sphinxcontrib-log-cabinet diff --git a/requirements/docs.txt b/requirements/docs.txt index e125c59a4..ed605ea92 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -1,61 +1,57 @@ -# SHA1:45c590f97fe95b8bdc755eef796e91adf5fbe4ea # -# This file is autogenerated by pip-compile-multi -# To update, run: +# This file is autogenerated by pip-compile with Python 3.12 +# by the following command: # -# pip-compile-multi +# pip-compile docs.in # -alabaster==0.7.13 +alabaster==0.7.16 # via sphinx -babel==2.12.1 +babel==2.14.0 # via sphinx -certifi==2023.5.7 +certifi==2024.2.2 # via requests -charset-normalizer==3.1.0 +charset-normalizer==3.3.2 # via requests docutils==0.20.1 # via sphinx -idna==3.4 +idna==3.6 # via requests imagesize==1.4.1 # via sphinx -jinja2==3.1.2 +jinja2==3.1.3 # via sphinx -markupsafe==2.1.3 +markupsafe==2.1.5 # via jinja2 -packaging==23.1 +packaging==24.0 # via # pallets-sphinx-themes # sphinx pallets-sphinx-themes==2.1.1 - # via -r requirements/docs.in -pygments==2.15.1 + # via -r docs.in +pygments==2.17.2 # via sphinx requests==2.31.0 # via sphinx snowballstemmer==2.2.0 # via sphinx -sphinx==7.0.1 +sphinx==7.2.6 # via - # -r requirements/docs.in + # -r docs.in # pallets-sphinx-themes - # sphinx-issues # sphinxcontrib-log-cabinet -sphinx-issues==3.0.1 - # via -r requirements/docs.in -sphinxcontrib-applehelp==1.0.4 +sphinxcontrib-applehelp==1.0.8 # via sphinx -sphinxcontrib-devhelp==1.0.2 +sphinxcontrib-devhelp==1.0.6 # via sphinx -sphinxcontrib-htmlhelp==2.0.1 +sphinxcontrib-htmlhelp==2.0.5 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx sphinxcontrib-log-cabinet==1.0.1 - # via -r requirements/docs.in -sphinxcontrib-qthelp==1.0.3 + # via -r docs.in +sphinxcontrib-qthelp==1.0.7 # via sphinx -sphinxcontrib-serializinghtml==1.1.5 +sphinxcontrib-serializinghtml==1.1.10 # via sphinx -urllib3==2.0.3 +urllib3==2.2.1 # via requests diff --git a/requirements/tests.in b/requirements/tests.in index 3ced491be..8228f8ee6 100644 --- a/requirements/tests.in +++ b/requirements/tests.in @@ -1,7 +1,8 @@ pytest pytest-timeout -pytest-xprocess +# pinned for python 3.8 support +pytest-xprocess<1 cryptography -greenlet ; python_version < "3.11" +greenlet watchdog ephemeral-port-reserve diff --git a/requirements/tests.txt b/requirements/tests.txt index 057d62859..14b67436f 100644 --- a/requirements/tests.txt +++ b/requirements/tests.txt @@ -1,36 +1,35 @@ -# SHA1:42b4e3e66395275e048d9a92c294b2c650393866 # -# This file is autogenerated by pip-compile-multi -# To update, run: +# This file is autogenerated by pip-compile with Python 3.12 +# by the following command: # -# pip-compile-multi +# pip-compile tests.in # -cffi==1.15.1 +cffi==1.16.0 # via cryptography -cryptography==41.0.1 - # via -r requirements/tests.in +cryptography==42.0.5 + # via -r tests.in ephemeral-port-reserve==1.1.4 - # via -r requirements/tests.in + # via -r tests.in +greenlet==3.0.3 + # via -r tests.in iniconfig==2.0.0 # via pytest -packaging==23.1 +packaging==24.0 # via pytest -pluggy==1.2.0 +pluggy==1.4.0 # via pytest -psutil==5.9.5 +psutil==5.9.8 # via pytest-xprocess -py==1.11.0 - # via pytest-xprocess -pycparser==2.21 +pycparser==2.22 # via cffi -pytest==7.4.0 +pytest==8.1.1 # via - # -r requirements/tests.in + # -r tests.in # pytest-timeout # pytest-xprocess -pytest-timeout==2.1.0 - # via -r requirements/tests.in -pytest-xprocess==0.22.2 - # via -r requirements/tests.in -watchdog==3.0.0 - # via -r requirements/tests.in +pytest-timeout==2.3.1 + # via -r tests.in +pytest-xprocess==0.23.0 + # via -r tests.in +watchdog==4.0.0 + # via -r tests.in diff --git a/requirements/typing.in b/requirements/typing.in index 23ab1587b..096413b22 100644 --- a/requirements/typing.in +++ b/requirements/typing.in @@ -1,4 +1,6 @@ mypy +pyright +pytest types-contextvars types-dataclasses types-setuptools diff --git a/requirements/typing.txt b/requirements/typing.txt index 99c46d2e0..09c78d711 100644 --- a/requirements/typing.txt +++ b/requirements/typing.txt @@ -1,21 +1,35 @@ -# SHA1:162796b1b3ac7a29da65fe0e32278f14b68ed8c8 # -# This file is autogenerated by pip-compile-multi -# To update, run: +# This file is autogenerated by pip-compile with Python 3.12 +# by the following command: # -# pip-compile-multi +# pip-compile typing.in # -mypy==1.4.1 - # via -r requirements/typing.in +iniconfig==2.0.0 + # via pytest +mypy==1.9.0 + # via -r typing.in mypy-extensions==1.0.0 # via mypy -types-contextvars==2.4.7.2 - # via -r requirements/typing.in +nodeenv==1.8.0 + # via pyright +packaging==24.0 + # via pytest +pluggy==1.4.0 + # via pytest +pyright==1.1.357 + # via -r typing.in +pytest==8.1.1 + # via -r typing.in +types-contextvars==2.4.7.3 + # via -r typing.in types-dataclasses==0.6.6 - # via -r requirements/typing.in -types-setuptools==68.0.0.0 - # via -r requirements/typing.in -typing-extensions==4.6.3 + # via -r typing.in +types-setuptools==69.2.0.20240317 + # via -r typing.in +typing-extensions==4.11.0 # via mypy -watchdog==3.0.0 - # via -r requirements/typing.in +watchdog==4.0.0 + # via -r typing.in + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/tox.ini b/tox.ini index eca667f84..f7bc0b3b5 100644 --- a/tox.ini +++ b/tox.ini @@ -10,6 +10,8 @@ skip_missing_interpreters = true [testenv] package = wheel wheel_build_env = .pkg +constrain_package_deps = true +use_frozen_constraints = true deps = -r requirements/tests.txt commands = pytest -v --tb=short --basetemp={envtmpdir} {posargs} @@ -24,4 +26,18 @@ commands = mypy [testenv:docs] deps = -r requirements/docs.txt -commands = sphinx-build -W -b html -d {envtmpdir}/doctrees docs {envtmpdir}/html +commands = sphinx-build -E -W -b dirhtml docs docs/_build/dirhtml + +[testenv:update-requirements] +deps = + pip-tools + pre-commit +skip_install = true +change_dir = requirements +commands = + pre-commit autoupdate -j4 + pip-compile -U build.in + pip-compile -U docs.in + pip-compile -U tests.in + pip-compile -U typing.in + pip-compile -U dev.in From f5b71d82b87212bb5468656422f5a7423980432d Mon Sep 17 00:00:00 2001 From: David Lord Date: Mon, 8 Apr 2024 15:07:43 -0700 Subject: [PATCH 064/159] address mypy strict mode findings --- src/werkzeug/_internal.py | 15 ++--- src/werkzeug/_reloader.py | 18 +++--- src/werkzeug/datastructures/auth.py | 6 +- src/werkzeug/datastructures/cache_control.pyi | 18 ++++-- src/werkzeug/datastructures/file_storage.pyi | 6 +- src/werkzeug/datastructures/structures.pyi | 28 ++++---- src/werkzeug/debug/__init__.py | 3 +- src/werkzeug/debug/console.py | 2 +- src/werkzeug/debug/repr.py | 9 +-- src/werkzeug/debug/tbtools.py | 2 +- src/werkzeug/exceptions.py | 18 +++--- src/werkzeug/formparser.py | 18 +++--- src/werkzeug/http.py | 56 ++++++++-------- src/werkzeug/local.py | 64 +++++++++++-------- src/werkzeug/middleware/dispatcher.py | 1 + src/werkzeug/middleware/http_proxy.py | 1 + src/werkzeug/middleware/lint.py | 11 ++-- src/werkzeug/middleware/profiler.py | 1 + src/werkzeug/middleware/proxy_fix.py | 1 + src/werkzeug/middleware/shared_data.py | 6 +- src/werkzeug/routing/__init__.py | 1 + src/werkzeug/routing/converters.py | 16 ++--- src/werkzeug/routing/exceptions.py | 7 +- src/werkzeug/routing/map.py | 14 ++-- src/werkzeug/routing/rules.py | 4 +- src/werkzeug/sansio/http.py | 4 +- src/werkzeug/sansio/request.py | 7 +- src/werkzeug/sansio/response.py | 49 +++++++------- src/werkzeug/security.py | 12 ++-- src/werkzeug/serving.py | 5 +- src/werkzeug/test.py | 24 +++---- src/werkzeug/testapp.py | 27 ++++++-- src/werkzeug/utils.py | 21 +++--- src/werkzeug/wrappers/request.py | 17 ++--- src/werkzeug/wrappers/response.py | 27 ++++---- tests/test_exceptions.py | 3 +- tests/test_utils.py | 1 + 37 files changed, 282 insertions(+), 241 deletions(-) diff --git a/src/werkzeug/_internal.py b/src/werkzeug/_internal.py index 70ab68761..7dd2fbccd 100644 --- a/src/werkzeug/_internal.py +++ b/src/werkzeug/_internal.py @@ -9,6 +9,7 @@ if t.TYPE_CHECKING: from _typeshed.wsgi import WSGIEnvironment + from .wrappers.request import Request _logger: logging.Logger | None = None @@ -60,7 +61,7 @@ def _has_level_handler(logger: logging.Logger) -> bool: return False -class _ColorStreamHandler(logging.StreamHandler): +class _ColorStreamHandler(logging.StreamHandler): # type: ignore[type-arg] """On Windows, wrap stream with Colorama for ANSI style support.""" def __init__(self) -> None: @@ -97,13 +98,11 @@ def _log(type: str, message: str, *args: t.Any, **kwargs: t.Any) -> None: @t.overload -def _dt_as_utc(dt: None) -> None: - ... +def _dt_as_utc(dt: None) -> None: ... @t.overload -def _dt_as_utc(dt: datetime) -> datetime: - ... +def _dt_as_utc(dt: datetime) -> datetime: ... def _dt_as_utc(dt: datetime | None) -> datetime | None: @@ -149,12 +148,10 @@ def lookup(self, instance: t.Any) -> t.MutableMapping[str, t.Any]: @t.overload def __get__( self, instance: None, owner: type - ) -> _DictAccessorProperty[_TAccessorValue]: - ... + ) -> _DictAccessorProperty[_TAccessorValue]: ... @t.overload - def __get__(self, instance: t.Any, owner: type) -> _TAccessorValue: - ... + def __get__(self, instance: t.Any, owner: type) -> _TAccessorValue: ... def __get__( self, instance: t.Any | None, owner: type diff --git a/src/werkzeug/_reloader.py b/src/werkzeug/_reloader.py index c8683593f..24c2dab79 100644 --- a/src/werkzeug/_reloader.py +++ b/src/werkzeug/_reloader.py @@ -141,7 +141,7 @@ def _find_watchdog_paths( def _find_common_roots(paths: t.Iterable[str]) -> t.Iterable[str]: - root: dict[str, dict] = {} + root: dict[str, dict[str, t.Any]] = {} for chunks in sorted((PurePath(x).parts for x in paths), key=len, reverse=True): node = root @@ -153,7 +153,7 @@ def _find_common_roots(paths: t.Iterable[str]) -> t.Iterable[str]: rv = set() - def _walk(node: t.Mapping[str, dict], path: tuple[str, ...]) -> None: + def _walk(node: t.Mapping[str, dict[str, t.Any]], path: tuple[str, ...]) -> None: for prefix, child in node.items(): _walk(child, path + (prefix,)) @@ -310,10 +310,10 @@ def run_step(self) -> None: class WatchdogReloaderLoop(ReloaderLoop): def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: - from watchdog.observers import Observer - from watchdog.events import PatternMatchingEventHandler from watchdog.events import EVENT_TYPE_OPENED from watchdog.events import FileModifiedEvent + from watchdog.events import PatternMatchingEventHandler + from watchdog.observers import Observer super().__init__(*args, **kwargs) trigger_reload = self.trigger_reload @@ -338,7 +338,7 @@ def on_any_event(self, event: FileModifiedEvent): # type: ignore # the source file (or initial pyc file) as well. Ignore Git and # Mercurial internal changes. extra_patterns = [p for p in self.extra_files if not os.path.isdir(p)] - self.event_handler = EventHandler( + self.event_handler = EventHandler( # type: ignore[no-untyped-call] patterns=["*.py", "*.pyc", "*.zip", *extra_patterns], ignore_patterns=[ *[f"*/{d}/*" for d in _ignore_common_dirs], @@ -356,11 +356,11 @@ def trigger_reload(self, filename: str) -> None: def __enter__(self) -> ReloaderLoop: self.watches: dict[str, t.Any] = {} - self.observer.start() + self.observer.start() # type: ignore[no-untyped-call] return super().__enter__() def __exit__(self, exc_type, exc_val, exc_tb): # type: ignore - self.observer.stop() + self.observer.stop() # type: ignore[no-untyped-call] self.observer.join() def run(self) -> None: @@ -376,7 +376,7 @@ def run_step(self) -> None: for path in _find_watchdog_paths(self.extra_files, self.exclude_patterns): if path not in self.watches: try: - self.watches[path] = self.observer.schedule( + self.watches[path] = self.observer.schedule( # type: ignore[no-untyped-call] self.event_handler, path, recursive=True ) except OSError: @@ -391,7 +391,7 @@ def run_step(self) -> None: watch = self.watches.pop(path, None) if watch is not None: - self.observer.unschedule(watch) + self.observer.unschedule(watch) # type: ignore[no-untyped-call] reloader_loops: dict[str, type[ReloaderLoop]] = { diff --git a/src/werkzeug/datastructures/auth.py b/src/werkzeug/datastructures/auth.py index 830529da7..a3ca0de46 100644 --- a/src/werkzeug/datastructures/auth.py +++ b/src/werkzeug/datastructures/auth.py @@ -172,7 +172,7 @@ def __init__( token: str | None = None, ): self._type = auth_type.lower() - self._parameters: dict[str, str | None] = CallbackDict( # type: ignore[misc] + self._parameters: dict[str, str | None] = CallbackDict( values, lambda _: self._trigger_on_update() ) self._token = token @@ -201,9 +201,7 @@ def parameters(self) -> dict[str, str | None]: @parameters.setter def parameters(self, value: dict[str, str]) -> None: - self._parameters = CallbackDict( # type: ignore[misc] - value, lambda _: self._trigger_on_update() - ) + self._parameters = CallbackDict(value, lambda _: self._trigger_on_update()) self._trigger_on_update() @property diff --git a/src/werkzeug/datastructures/cache_control.pyi b/src/werkzeug/datastructures/cache_control.pyi index 06fe667a2..54ec02082 100644 --- a/src/werkzeug/datastructures/cache_control.pyi +++ b/src/werkzeug/datastructures/cache_control.pyi @@ -8,15 +8,19 @@ from .mixins import UpdateDictMixin T = TypeVar("T") _CPT = TypeVar("_CPT", str, int, bool) -_OptCPT = _CPT | None -def cache_control_property(key: str, empty: _OptCPT, type: type[_CPT]) -> property: ... +def cache_control_property( + key: str, empty: _CPT | None, type: type[_CPT] +) -> property: ... -class _CacheControl(UpdateDictMixin[str, _OptCPT], dict[str, _OptCPT]): +class _CacheControl( + UpdateDictMixin[str, str | int | bool | None], dict[str, str | int | bool | None] +): provided: bool def __init__( self, - values: Mapping[str, _OptCPT] | Iterable[tuple[str, _OptCPT]] = (), + values: Mapping[str, str | int | bool | None] + | Iterable[tuple[str, str | int | bool | None]] = (), on_update: Callable[[_CacheControl], None] | None = None, ) -> None: ... @property @@ -48,9 +52,11 @@ class _CacheControl(UpdateDictMixin[str, _OptCPT], dict[str, _OptCPT]): def _del_cache_value(self, key: str) -> None: ... def to_header(self) -> str: ... @staticmethod - def cache_property(key: str, empty: _OptCPT, type: type[_CPT]) -> property: ... + def cache_property(key: str, empty: _CPT | None, type: type[_CPT]) -> property: ... -class RequestCacheControl(ImmutableDictMixin[str, _OptCPT], _CacheControl): +class RequestCacheControl( # type: ignore[misc] + ImmutableDictMixin[str, str | int | bool | None], _CacheControl +): @property def max_stale(self) -> int | None: ... @max_stale.setter diff --git a/src/werkzeug/datastructures/file_storage.pyi b/src/werkzeug/datastructures/file_storage.pyi index 730789e35..36a7ed9f2 100644 --- a/src/werkzeug/datastructures/file_storage.pyi +++ b/src/werkzeug/datastructures/file_storage.pyi @@ -15,7 +15,7 @@ class FileStorage: def __init__( self, stream: IO[bytes] | None = None, - filename: str | PathLike | None = None, + filename: str | PathLike[str] | None = None, name: str | None = None, content_type: str | None = None, content_length: int | None = None, @@ -30,7 +30,9 @@ class FileStorage: def mimetype(self) -> str: ... @property def mimetype_params(self) -> dict[str, str]: ... - def save(self, dst: str | PathLike | IO[bytes], buffer_size: int = ...) -> None: ... + def save( + self, dst: str | PathLike[str] | IO[bytes], buffer_size: int = ... + ) -> None: ... def close(self) -> None: ... def __bool__(self) -> bool: ... def __getattr__(self, name: str) -> Any: ... diff --git a/src/werkzeug/datastructures/structures.pyi b/src/werkzeug/datastructures/structures.pyi index 2e7af35be..7086ddae1 100644 --- a/src/werkzeug/datastructures/structures.pyi +++ b/src/werkzeug/datastructures/structures.pyi @@ -9,22 +9,20 @@ from typing import NoReturn from typing import overload from typing import TypeVar -from .mixins import ( - ImmutableDictMixin, - ImmutableListMixin, - ImmutableMultiDictMixin, - UpdateDictMixin, -) +from .mixins import ImmutableDictMixin +from .mixins import ImmutableListMixin +from .mixins import ImmutableMultiDictMixin +from .mixins import UpdateDictMixin D = TypeVar("D") K = TypeVar("K") T = TypeVar("T") V = TypeVar("V") -_CD = TypeVar("_CD", bound="CallbackDict") +_CD = TypeVar("_CD", bound="CallbackDict[Any, Any]") def is_immutable(self: object) -> NoReturn: ... def iter_multi_items( - mapping: Mapping[K, V | Iterable[V]] | Iterable[tuple[K, V]] + mapping: Mapping[K, V | Iterable[V]] | Iterable[tuple[K, V]], ) -> Iterator[tuple[K, V]]: ... class ImmutableList(ImmutableListMixin[V]): ... @@ -41,7 +39,7 @@ class TypeConversionDict(dict[K, V]): class ImmutableTypeConversionDict(ImmutableDictMixin[K, V], TypeConversionDict[K, V]): def copy(self) -> TypeConversionDict[K, V]: ... - def __copy__(self) -> ImmutableTypeConversionDict: ... + def __copy__(self) -> ImmutableTypeConversionDict[K, V]: ... class MultiDict(TypeConversionDict[K, V]): def __init__( @@ -84,16 +82,16 @@ class MultiDict(TypeConversionDict[K, V]): def __deepcopy__(self, memo: Any) -> MultiDict[K, V]: ... class _omd_bucket(Generic[K, V]): - prev: _omd_bucket | None - next: _omd_bucket | None + prev: _omd_bucket[K, V] | None + next: _omd_bucket[K, V] | None key: K value: V - def __init__(self, omd: OrderedMultiDict, key: K, value: V) -> None: ... - def unlink(self, omd: OrderedMultiDict) -> None: ... + def __init__(self, omd: OrderedMultiDict[K, V], key: K, value: V) -> None: ... + def unlink(self, omd: OrderedMultiDict[K, V]) -> None: ... class OrderedMultiDict(MultiDict[K, V]): - _first_bucket: _omd_bucket | None - _last_bucket: _omd_bucket | None + _first_bucket: _omd_bucket[K, V] | None + _last_bucket: _omd_bucket[K, V] | None def __init__(self, mapping: Mapping[K, V] | None = None) -> None: ... def __eq__(self, other: object) -> bool: ... def __getitem__(self, key: K) -> V: ... diff --git a/src/werkzeug/debug/__init__.py b/src/werkzeug/debug/__init__.py index f8756d890..a55480aa3 100644 --- a/src/werkzeug/debug/__init__.py +++ b/src/werkzeug/debug/__init__.py @@ -82,7 +82,8 @@ def _generate() -> str | bytes | None: try: # subprocess may not be available, e.g. Google App Engine # https://github.com/pallets/werkzeug/issues/925 - from subprocess import Popen, PIPE + from subprocess import PIPE + from subprocess import Popen dump = Popen( ["ioreg", "-c", "IOPlatformExpertDevice", "-d", "2"], stdout=PIPE diff --git a/src/werkzeug/debug/console.py b/src/werkzeug/debug/console.py index 03ddc07f2..4e40475a5 100644 --- a/src/werkzeug/debug/console.py +++ b/src/werkzeug/debug/console.py @@ -13,7 +13,7 @@ from .repr import helper _stream: ContextVar[HTMLStringO] = ContextVar("werkzeug.debug.console.stream") -_ipy: ContextVar = ContextVar("werkzeug.debug.console.ipy") +_ipy: ContextVar[_InteractiveConsole] = ContextVar("werkzeug.debug.console.ipy") class HTMLStringO: diff --git a/src/werkzeug/debug/repr.py b/src/werkzeug/debug/repr.py index 1dcdd67be..2bbd9d546 100644 --- a/src/werkzeug/debug/repr.py +++ b/src/werkzeug/debug/repr.py @@ -4,6 +4,7 @@ Together with the CSS and JavaScript of the debugger this gives a colorful and more compact output. """ + from __future__ import annotations import codecs @@ -95,8 +96,8 @@ def _add_subclass_info(inner: str, obj: object, base: type | tuple[type, ...]) - def _sequence_repr_maker( left: str, right: str, base: type, limit: int = 8 -) -> t.Callable[[DebugReprGenerator, t.Iterable, bool], str]: - def proxy(self: DebugReprGenerator, obj: t.Iterable, recursive: bool) -> str: +) -> t.Callable[[DebugReprGenerator, t.Iterable[t.Any], bool], str]: + def proxy(self: DebugReprGenerator, obj: t.Iterable[t.Any], recursive: bool) -> str: if recursive: return _add_subclass_info(f"{left}...{right}", obj, base) buf = [left] @@ -128,7 +129,7 @@ def __init__(self) -> None: 'collections.deque([', "])", deque ) - def regex_repr(self, obj: t.Pattern) -> str: + def regex_repr(self, obj: t.Pattern[t.AnyStr]) -> str: pattern = repr(obj.pattern) pattern = codecs.decode(pattern, "unicode-escape", "ignore") pattern = f"r{pattern}" @@ -186,7 +187,7 @@ def dict_repr( buf.append("}") return _add_subclass_info("".join(buf), d, dict) - def object_repr(self, obj: type[dict] | t.Callable | type[list] | None) -> str: + def object_repr(self, obj: t.Any) -> str: r = repr(obj) return f'{escape(r)}' diff --git a/src/werkzeug/debug/tbtools.py b/src/werkzeug/debug/tbtools.py index f9be17c42..0574c966b 100644 --- a/src/werkzeug/debug/tbtools.py +++ b/src/werkzeug/debug/tbtools.py @@ -187,7 +187,7 @@ def _process_traceback( if hasattr(fs, "colno"): frame_args["colno"] = fs.colno - frame_args["end_colno"] = fs.end_colno # type: ignore[attr-defined] + frame_args["end_colno"] = fs.end_colno new_stack.append(DebugFrameSummary(**frame_args)) diff --git a/src/werkzeug/exceptions.py b/src/werkzeug/exceptions.py index 253612918..6ce7ef955 100644 --- a/src/werkzeug/exceptions.py +++ b/src/werkzeug/exceptions.py @@ -43,6 +43,7 @@ def application(request): return e """ + from __future__ import annotations import typing as t @@ -56,6 +57,7 @@ def application(request): if t.TYPE_CHECKING: from _typeshed.wsgi import StartResponse from _typeshed.wsgi import WSGIEnvironment + from .datastructures import WWWAuthenticate from .sansio.response import Response from .wrappers.request import Request as WSGIRequest @@ -94,7 +96,7 @@ def name(self) -> str: def get_description( self, environ: WSGIEnvironment | None = None, - scope: dict | None = None, + scope: dict[str, t.Any] | None = None, ) -> str: """Get the description.""" if self.description is None: @@ -108,7 +110,7 @@ def get_description( def get_body( self, environ: WSGIEnvironment | None = None, - scope: dict | None = None, + scope: dict[str, t.Any] | None = None, ) -> str: """Get the HTML body.""" return ( @@ -122,7 +124,7 @@ def get_body( def get_headers( self, environ: WSGIEnvironment | None = None, - scope: dict | None = None, + scope: dict[str, t.Any] | None = None, ) -> list[tuple[str, str]]: """Get a list of headers.""" return [("Content-Type", "text/html; charset=utf-8")] @@ -130,7 +132,7 @@ def get_headers( def get_response( self, environ: WSGIEnvironment | WSGIRequest | None = None, - scope: dict | None = None, + scope: dict[str, t.Any] | None = None, ) -> Response: """Get a response object. If one was passed to the exception it's returned directly. @@ -312,7 +314,7 @@ def __init__( def get_headers( self, environ: WSGIEnvironment | None = None, - scope: dict | None = None, + scope: dict[str, t.Any] | None = None, ) -> list[tuple[str, str]]: headers = super().get_headers(environ, scope) if self.www_authenticate: @@ -376,7 +378,7 @@ def __init__( def get_headers( self, environ: WSGIEnvironment | None = None, - scope: dict | None = None, + scope: dict[str, t.Any] | None = None, ) -> list[tuple[str, str]]: headers = super().get_headers(environ, scope) if self.valid_methods: @@ -536,7 +538,7 @@ def __init__( def get_headers( self, environ: WSGIEnvironment | None = None, - scope: dict | None = None, + scope: dict[str, t.Any] | None = None, ) -> list[tuple[str, str]]: headers = super().get_headers(environ, scope) if self.length is not None: @@ -645,7 +647,7 @@ def __init__( def get_headers( self, environ: WSGIEnvironment | None = None, - scope: dict | None = None, + scope: dict[str, t.Any] | None = None, ) -> list[tuple[str, str]]: headers = super().get_headers(environ, scope) diff --git a/src/werkzeug/formparser.py b/src/werkzeug/formparser.py index 5117a2673..ba84721e3 100644 --- a/src/werkzeug/formparser.py +++ b/src/werkzeug/formparser.py @@ -30,9 +30,12 @@ if t.TYPE_CHECKING: import typing as te + from _typeshed.wsgi import WSGIEnvironment - t_parse_result = t.Tuple[t.IO[bytes], MultiDict, MultiDict] + t_parse_result = t.Tuple[ + t.IO[bytes], MultiDict[str, str], MultiDict[str, FileStorage] + ] class TStreamFactory(te.Protocol): def __call__( @@ -41,8 +44,7 @@ def __call__( content_type: str | None, filename: str | None, content_length: int | None = None, - ) -> t.IO[bytes]: - ... + ) -> t.IO[bytes]: ... F = t.TypeVar("F", bound=t.Callable[..., t.Any]) @@ -69,7 +71,7 @@ def parse_form_data( stream_factory: TStreamFactory | None = None, max_form_memory_size: int | None = None, max_content_length: int | None = None, - cls: type[MultiDict] | None = None, + cls: type[MultiDict[str, t.Any]] | None = None, silent: bool = True, *, max_form_parts: int | None = None, @@ -170,7 +172,7 @@ def __init__( stream_factory: TStreamFactory | None = None, max_form_memory_size: int | None = None, max_content_length: int | None = None, - cls: type[MultiDict] | None = None, + cls: type[MultiDict[str, t.Any]] | None = None, silent: bool = True, *, max_form_parts: int | None = None, @@ -184,7 +186,7 @@ def __init__( self.max_form_parts = max_form_parts if cls is None: - cls = MultiDict + cls = t.cast("type[MultiDict[str, t.Any]]", MultiDict) self.cls = cls self.silent = silent @@ -296,7 +298,7 @@ def __init__( self, stream_factory: TStreamFactory | None = None, max_form_memory_size: int | None = None, - cls: type[MultiDict] | None = None, + cls: type[MultiDict[str, t.Any]] | None = None, buffer_size: int = 64 * 1024, max_form_parts: int | None = None, ) -> None: @@ -309,7 +311,7 @@ def __init__( self.stream_factory = stream_factory if cls is None: - cls = MultiDict + cls = t.cast("type[MultiDict[str, t.Any]]", MultiDict) self.cls = cls self.buffer_size = buffer_size diff --git a/src/werkzeug/http.py b/src/werkzeug/http.py index 8280f51fa..27fa9af90 100644 --- a/src/werkzeug/http.py +++ b/src/werkzeug/http.py @@ -157,19 +157,19 @@ def quote_header_value(value: t.Any, allow_token: bool = True) -> str: .. versionadded:: 0.5 """ - value = str(value) + value_str = str(value) - if not value: + if not value_str: return '""' if allow_token: token_chars = _token_chars - if token_chars.issuperset(value): - return value + if token_chars.issuperset(value_str): + return value_str - value = value.replace("\\", "\\\\").replace('"', '\\"') - return f'"{value}"' + value_str = value_str.replace("\\", "\\\\").replace('"', '\\"') + return f'"{value_str}"' def unquote_header_value(value: str) -> str: @@ -553,13 +553,11 @@ def parse_options_header(value: str | None) -> tuple[str, dict[str, str]]: @t.overload -def parse_accept_header(value: str | None) -> ds.Accept: - ... +def parse_accept_header(value: str | None) -> ds.Accept: ... @t.overload -def parse_accept_header(value: str | None, cls: type[_TAnyAccept]) -> _TAnyAccept: - ... +def parse_accept_header(value: str | None, cls: type[_TAnyAccept]) -> _TAnyAccept: ... def parse_accept_header( @@ -616,26 +614,26 @@ def parse_accept_header( _TAnyCC = t.TypeVar("_TAnyCC", bound="ds.cache_control._CacheControl") -_t_cc_update = t.Optional[t.Callable[[_TAnyCC], None]] @t.overload def parse_cache_control_header( - value: str | None, on_update: _t_cc_update, cls: None = None -) -> ds.RequestCacheControl: - ... + value: str | None, + on_update: t.Callable[[ds.cache_control._CacheControl], None] | None = None, +) -> ds.RequestCacheControl: ... @t.overload def parse_cache_control_header( - value: str | None, on_update: _t_cc_update, cls: type[_TAnyCC] -) -> _TAnyCC: - ... + value: str | None, + on_update: t.Callable[[ds.cache_control._CacheControl], None] | None = None, + cls: type[_TAnyCC] = ..., +) -> _TAnyCC: ... def parse_cache_control_header( value: str | None, - on_update: _t_cc_update = None, + on_update: t.Callable[[ds.cache_control._CacheControl], None] | None = None, cls: type[_TAnyCC] | None = None, ) -> _TAnyCC: """Parse a cache control header. The RFC differs between response and @@ -655,7 +653,7 @@ def parse_cache_control_header( :return: a `cls` object. """ if cls is None: - cls = t.cast(t.Type[_TAnyCC], ds.RequestCacheControl) + cls = t.cast("type[_TAnyCC]", ds.RequestCacheControl) if not value: return cls((), on_update) @@ -664,26 +662,26 @@ def parse_cache_control_header( _TAnyCSP = t.TypeVar("_TAnyCSP", bound="ds.ContentSecurityPolicy") -_t_csp_update = t.Optional[t.Callable[[_TAnyCSP], None]] @t.overload def parse_csp_header( - value: str | None, on_update: _t_csp_update, cls: None = None -) -> ds.ContentSecurityPolicy: - ... + value: str | None, + on_update: t.Callable[[ds.ContentSecurityPolicy], None] | None = None, +) -> ds.ContentSecurityPolicy: ... @t.overload def parse_csp_header( - value: str | None, on_update: _t_csp_update, cls: type[_TAnyCSP] -) -> _TAnyCSP: - ... + value: str | None, + on_update: t.Callable[[ds.ContentSecurityPolicy], None] | None = None, + cls: type[_TAnyCSP] = ..., +) -> _TAnyCSP: ... def parse_csp_header( value: str | None, - on_update: _t_csp_update = None, + on_update: t.Callable[[ds.ContentSecurityPolicy], None] | None = None, cls: type[_TAnyCSP] | None = None, ) -> _TAnyCSP: """Parse a Content Security Policy header. @@ -699,7 +697,7 @@ def parse_csp_header( :return: a `cls` object. """ if cls is None: - cls = t.cast(t.Type[_TAnyCSP], ds.ContentSecurityPolicy) + cls = t.cast("type[_TAnyCSP]", ds.ContentSecurityPolicy) if value is None: return cls((), on_update) @@ -1160,7 +1158,7 @@ def is_hop_by_hop_header(header: str) -> bool: def parse_cookie( header: WSGIEnvironment | str | None, - cls: type[ds.MultiDict] | None = None, + cls: type[ds.MultiDict[str, str]] | None = None, ) -> ds.MultiDict[str, str]: """Parse a cookie from a string or WSGI environ. diff --git a/src/werkzeug/local.py b/src/werkzeug/local.py index 525ac0c80..302589bba 100644 --- a/src/werkzeug/local.py +++ b/src/werkzeug/local.py @@ -20,7 +20,7 @@ F = t.TypeVar("F", bound=t.Callable[..., t.Any]) -def release_local(local: Local | LocalStack) -> None: +def release_local(local: Local | LocalStack[t.Any]) -> None: """Release the data for the current context in a :class:`Local` or :class:`LocalStack` without using a :class:`LocalManager`. @@ -64,7 +64,9 @@ def __init__(self, context_var: ContextVar[dict[str, t.Any]] | None = None) -> N def __iter__(self) -> t.Iterator[tuple[str, t.Any]]: return iter(self.__storage.get({}).items()) - def __call__(self, name: str, *, unbound_message: str | None = None) -> LocalProxy: + def __call__( + self, name: str, *, unbound_message: str | None = None + ) -> LocalProxy[t.Any]: """Create a :class:`LocalProxy` that access an attribute on this local namespace. @@ -169,7 +171,7 @@ def top(self) -> T | None: def __call__( self, name: str | None = None, *, unbound_message: str | None = None - ) -> LocalProxy: + ) -> LocalProxy[t.Any]: """Create a :class:`LocalProxy` that accesses the top of this local stack. @@ -205,7 +207,8 @@ class LocalManager: def __init__( self, - locals: None | (Local | LocalStack | t.Iterable[Local | LocalStack]) = None, + locals: None + | (Local | LocalStack[t.Any] | t.Iterable[Local | LocalStack[t.Any]]) = None, ) -> None: if locals is None: self.locals = [] @@ -269,23 +272,27 @@ class _ProxyLookup: def __init__( self, - f: t.Callable | None = None, - fallback: t.Callable | None = None, + f: t.Callable[..., t.Any] | None = None, + fallback: t.Callable[[LocalProxy[t.Any]], t.Any] | None = None, class_value: t.Any | None = None, is_attr: bool = False, ) -> None: - bind_f: t.Callable[[LocalProxy, t.Any], t.Callable] | None + bind_f: t.Callable[[LocalProxy[t.Any], t.Any], t.Callable[..., t.Any]] | None if hasattr(f, "__get__"): # A Python function, can be turned into a bound method. - def bind_f(instance: LocalProxy, obj: t.Any) -> t.Callable: + def bind_f( + instance: LocalProxy[t.Any], obj: t.Any + ) -> t.Callable[..., t.Any]: return f.__get__(obj, type(obj)) # type: ignore elif f is not None: # A C function, use partial to bind the first argument. - def bind_f(instance: LocalProxy, obj: t.Any) -> t.Callable: + def bind_f( + instance: LocalProxy[t.Any], obj: t.Any + ) -> t.Callable[..., t.Any]: return partial(f, obj) else: @@ -297,10 +304,10 @@ def bind_f(instance: LocalProxy, obj: t.Any) -> t.Callable: self.class_value = class_value self.is_attr = is_attr - def __set_name__(self, owner: LocalProxy, name: str) -> None: + def __set_name__(self, owner: LocalProxy[t.Any], name: str) -> None: self.name = name - def __get__(self, instance: LocalProxy, owner: type | None = None) -> t.Any: + def __get__(self, instance: LocalProxy[t.Any], owner: type | None = None) -> t.Any: if instance is None: if self.class_value is not None: return self.class_value @@ -330,7 +337,9 @@ def __get__(self, instance: LocalProxy, owner: type | None = None) -> t.Any: def __repr__(self) -> str: return f"proxy {self.name}" - def __call__(self, instance: LocalProxy, *args: t.Any, **kwargs: t.Any) -> t.Any: + def __call__( + self, instance: LocalProxy[t.Any], *args: t.Any, **kwargs: t.Any + ) -> t.Any: """Support calling unbound methods from the class. For example, this happens with ``copy.copy``, which does ``type(x).__copy__(x)``. ``type(x)`` can't be proxied, so it @@ -347,12 +356,14 @@ class _ProxyIOp(_ProxyLookup): __slots__ = () def __init__( - self, f: t.Callable | None = None, fallback: t.Callable | None = None + self, + f: t.Callable[..., t.Any] | None = None, + fallback: t.Callable[[LocalProxy[t.Any]], t.Any] | None = None, ) -> None: super().__init__(f, fallback) - def bind_f(instance: LocalProxy, obj: t.Any) -> t.Callable: - def i_op(self: t.Any, other: t.Any) -> LocalProxy: + def bind_f(instance: LocalProxy[t.Any], obj: t.Any) -> t.Callable[..., t.Any]: + def i_op(self: t.Any, other: t.Any) -> LocalProxy[t.Any]: f(self, other) # type: ignore return instance @@ -520,32 +531,33 @@ def _get_current_object() -> T: object.__setattr__(self, "_LocalProxy__wrapped", local) object.__setattr__(self, "_get_current_object", _get_current_object) - __doc__ = _ProxyLookup( # type: ignore + __doc__ = _ProxyLookup( # type: ignore[assignment] class_value=__doc__, fallback=lambda self: type(self).__doc__, is_attr=True ) __wrapped__ = _ProxyLookup( - fallback=lambda self: self._LocalProxy__wrapped, is_attr=True + fallback=lambda self: self._LocalProxy__wrapped, # type: ignore[attr-defined] + is_attr=True, ) # __del__ should only delete the proxy - __repr__ = _ProxyLookup( # type: ignore + __repr__ = _ProxyLookup( # type: ignore[assignment] repr, fallback=lambda self: f"<{type(self).__name__} unbound>" ) - __str__ = _ProxyLookup(str) # type: ignore + __str__ = _ProxyLookup(str) # type: ignore[assignment] __bytes__ = _ProxyLookup(bytes) - __format__ = _ProxyLookup() # type: ignore + __format__ = _ProxyLookup() # type: ignore[assignment] __lt__ = _ProxyLookup(operator.lt) __le__ = _ProxyLookup(operator.le) - __eq__ = _ProxyLookup(operator.eq) # type: ignore - __ne__ = _ProxyLookup(operator.ne) # type: ignore + __eq__ = _ProxyLookup(operator.eq) # type: ignore[assignment] + __ne__ = _ProxyLookup(operator.ne) # type: ignore[assignment] __gt__ = _ProxyLookup(operator.gt) __ge__ = _ProxyLookup(operator.ge) - __hash__ = _ProxyLookup(hash) # type: ignore + __hash__ = _ProxyLookup(hash) # type: ignore[assignment] __bool__ = _ProxyLookup(bool, fallback=lambda self: False) __getattr__ = _ProxyLookup(getattr) # __getattribute__ triggered through __getattr__ - __setattr__ = _ProxyLookup(setattr) # type: ignore - __delattr__ = _ProxyLookup(delattr) # type: ignore - __dir__ = _ProxyLookup(dir, fallback=lambda self: []) # type: ignore + __setattr__ = _ProxyLookup(setattr) # type: ignore[assignment] + __delattr__ = _ProxyLookup(delattr) # type: ignore[assignment] + __dir__ = _ProxyLookup(dir, fallback=lambda self: []) # type: ignore[assignment] # __get__ (proxying descriptor not supported) # __set__ (descriptor) # __delete__ (descriptor) diff --git a/src/werkzeug/middleware/dispatcher.py b/src/werkzeug/middleware/dispatcher.py index 559fea585..e11bacc52 100644 --- a/src/werkzeug/middleware/dispatcher.py +++ b/src/werkzeug/middleware/dispatcher.py @@ -30,6 +30,7 @@ :copyright: 2007 Pallets :license: BSD-3-Clause """ + from __future__ import annotations import typing as t diff --git a/src/werkzeug/middleware/http_proxy.py b/src/werkzeug/middleware/http_proxy.py index 59ba9b324..5e239156a 100644 --- a/src/werkzeug/middleware/http_proxy.py +++ b/src/werkzeug/middleware/http_proxy.py @@ -7,6 +7,7 @@ :copyright: 2007 Pallets :license: BSD-3-Clause """ + from __future__ import annotations import typing as t diff --git a/src/werkzeug/middleware/lint.py b/src/werkzeug/middleware/lint.py index 8c858673b..de93b526a 100644 --- a/src/werkzeug/middleware/lint.py +++ b/src/werkzeug/middleware/lint.py @@ -12,6 +12,7 @@ :copyright: 2007 Pallets :license: BSD-3-Clause """ + from __future__ import annotations import typing as t @@ -340,10 +341,10 @@ def check_start_response( if exc_info is not None and not isinstance(exc_info, tuple): warn("Invalid value for exc_info.", WSGIWarning, stacklevel=3) - headers = Headers(headers) - self.check_headers(headers) + headers_obj = Headers(headers) + self.check_headers(headers_obj) - return status_code, headers + return status_code, headers_obj def check_headers(self, headers: Headers) -> None: etag = headers.get("etag") @@ -424,8 +425,8 @@ def checking_start_response( status: str = args[0] headers: list[tuple[str, str]] = args[1] - exc_info: None | ( - tuple[type[BaseException], BaseException, TracebackType] + exc_info: ( + None | (tuple[type[BaseException], BaseException, TracebackType]) ) = args[2] if len(args) == 3 else None headers_set[:] = self.check_start_response(status, headers, exc_info) diff --git a/src/werkzeug/middleware/profiler.py b/src/werkzeug/middleware/profiler.py index 1120c83ef..112b87776 100644 --- a/src/werkzeug/middleware/profiler.py +++ b/src/werkzeug/middleware/profiler.py @@ -11,6 +11,7 @@ :copyright: 2007 Pallets :license: BSD-3-Clause """ + from __future__ import annotations import os.path diff --git a/src/werkzeug/middleware/proxy_fix.py b/src/werkzeug/middleware/proxy_fix.py index 8dfbb36c0..cbf4e0bae 100644 --- a/src/werkzeug/middleware/proxy_fix.py +++ b/src/werkzeug/middleware/proxy_fix.py @@ -21,6 +21,7 @@ :copyright: 2007 Pallets :license: BSD-3-Clause """ + from __future__ import annotations import typing as t diff --git a/src/werkzeug/middleware/shared_data.py b/src/werkzeug/middleware/shared_data.py index e3ec7cab8..0a0c95675 100644 --- a/src/werkzeug/middleware/shared_data.py +++ b/src/werkzeug/middleware/shared_data.py @@ -8,6 +8,7 @@ :copyright: 2007 Pallets :license: BSD-3-Clause """ + from __future__ import annotations import importlib.util @@ -38,7 +39,6 @@ class SharedDataMiddleware: - """A WSGI middleware which provides static content for development environments or simple server setups. Its usage is quite simple:: @@ -218,9 +218,9 @@ def loader( return loader def generate_etag(self, mtime: datetime, file_size: int, real_filename: str) -> str: - real_filename = os.fsencode(real_filename) + fn_str = os.fsencode(real_filename) timestamp = mtime.timestamp() - checksum = adler32(real_filename) & 0xFFFFFFFF + checksum = adler32(fn_str) & 0xFFFFFFFF return f"wzsdm-{timestamp}-{file_size}-{checksum}" def __call__( diff --git a/src/werkzeug/routing/__init__.py b/src/werkzeug/routing/__init__.py index 84b043fdf..62adc48fb 100644 --- a/src/werkzeug/routing/__init__.py +++ b/src/werkzeug/routing/__init__.py @@ -105,6 +105,7 @@ routing tried to match a ``POST`` request) a ``MethodNotAllowed`` exception is raised. """ + from .converters import AnyConverter as AnyConverter from .converters import BaseConverter as BaseConverter from .converters import FloatConverter as FloatConverter diff --git a/src/werkzeug/routing/converters.py b/src/werkzeug/routing/converters.py index ce01dd1ea..6016a975e 100644 --- a/src/werkzeug/routing/converters.py +++ b/src/werkzeug/routing/converters.py @@ -131,7 +131,7 @@ class NumberConverter(BaseConverter): """ weight = 50 - num_convert: t.Callable = int + num_convert: t.Callable[[t.Any], t.Any] = int def __init__( self, @@ -152,18 +152,18 @@ def __init__( def to_python(self, value: str) -> t.Any: if self.fixed_digits and len(value) != self.fixed_digits: raise ValidationError() - value = self.num_convert(value) - if (self.min is not None and value < self.min) or ( - self.max is not None and value > self.max + value_num = self.num_convert(value) + if (self.min is not None and value_num < self.min) or ( + self.max is not None and value_num > self.max ): raise ValidationError() - return value + return value_num def to_url(self, value: t.Any) -> str: - value = str(self.num_convert(value)) + value_str = str(self.num_convert(value)) if self.fixed_digits: - value = value.zfill(self.fixed_digits) - return value + value_str = value_str.zfill(self.fixed_digits) + return value_str @property def signed_regex(self) -> str: diff --git a/src/werkzeug/routing/exceptions.py b/src/werkzeug/routing/exceptions.py index 9d0a5281b..b63fe5b9c 100644 --- a/src/werkzeug/routing/exceptions.py +++ b/src/werkzeug/routing/exceptions.py @@ -10,10 +10,11 @@ if t.TYPE_CHECKING: from _typeshed.wsgi import WSGIEnvironment - from .map import MapAdapter - from .rules import Rule + from ..wrappers.request import Request from ..wrappers.response import Response + from .map import MapAdapter + from .rules import Rule class RoutingException(Exception): @@ -40,7 +41,7 @@ def __init__(self, new_url: str) -> None: def get_response( self, environ: WSGIEnvironment | Request | None = None, - scope: dict | None = None, + scope: dict[str, t.Any] | None = None, ) -> Response: return redirect(self.new_url, self.code) diff --git a/src/werkzeug/routing/map.py b/src/werkzeug/routing/map.py index 87b83a54c..73671bf94 100644 --- a/src/werkzeug/routing/map.py +++ b/src/werkzeug/routing/map.py @@ -32,9 +32,10 @@ if t.TYPE_CHECKING: from _typeshed.wsgi import WSGIApplication from _typeshed.wsgi import WSGIEnvironment + + from ..wrappers.request import Request from .converters import BaseConverter from .rules import RuleFactory - from ..wrappers.request import Request class Map: @@ -144,9 +145,9 @@ def is_endpoint_expecting(self, endpoint: str, *arguments: str) -> bool: checked. """ self.update() - arguments = set(arguments) + arguments_set = set(arguments) for rule in self._rules_by_endpoint[endpoint]: - if arguments.issubset(rule.arguments): + if arguments_set.issubset(rule.arguments): return True return False @@ -379,7 +380,6 @@ def __repr__(self) -> str: class MapAdapter: - """Returned by :meth:`Map.bind` or :meth:`Map.bind_to_environ` and does the URL matching and building based on runtime information. """ @@ -477,8 +477,7 @@ def match( # type: ignore return_rule: t.Literal[False] = False, query_args: t.Mapping[str, t.Any] | str | None = None, websocket: bool | None = None, - ) -> tuple[str, t.Mapping[str, t.Any]]: - ... + ) -> tuple[str, t.Mapping[str, t.Any]]: ... @t.overload def match( @@ -488,8 +487,7 @@ def match( return_rule: t.Literal[True] = True, query_args: t.Mapping[str, t.Any] | str | None = None, websocket: bool | None = None, - ) -> tuple[Rule, t.Mapping[str, t.Any]]: - ... + ) -> tuple[Rule, t.Mapping[str, t.Any]]: ... def match( self, diff --git a/src/werkzeug/routing/rules.py b/src/werkzeug/routing/rules.py index 7029d8bc0..75323357c 100644 --- a/src/werkzeug/routing/rules.py +++ b/src/werkzeug/routing/rules.py @@ -912,6 +912,6 @@ def __repr__(self) -> str: parts.append(f"<{data}>") else: parts.append(data) - parts = "".join(parts).lstrip("|") + parts_str = "".join(parts).lstrip("|") methods = f" ({', '.join(self.methods)})" if self.methods is not None else "" - return f"<{type(self).__name__} {parts!r}{methods} -> {self.endpoint}>" + return f"<{type(self).__name__} {parts_str!r}{methods} -> {self.endpoint}>" diff --git a/src/werkzeug/sansio/http.py b/src/werkzeug/sansio/http.py index e3cd3330c..b2b887799 100644 --- a/src/werkzeug/sansio/http.py +++ b/src/werkzeug/sansio/http.py @@ -122,7 +122,7 @@ def _cookie_unslash_replace(m: t.Match[bytes]) -> bytes: def parse_cookie( cookie: str | None = None, - cls: type[ds.MultiDict] | None = None, + cls: type[ds.MultiDict[str, str]] | None = None, ) -> ds.MultiDict[str, str]: """Parse a cookie from a string. @@ -141,7 +141,7 @@ def parse_cookie( .. versionadded:: 2.2 """ if cls is None: - cls = ds.MultiDict + cls = t.cast("type[ds.MultiDict[str, str]]", ds.MultiDict) if not cookie: return cls() diff --git a/src/werkzeug/sansio/request.py b/src/werkzeug/sansio/request.py index 41c9b18a1..dd0805d71 100644 --- a/src/werkzeug/sansio/request.py +++ b/src/werkzeug/sansio/request.py @@ -1,5 +1,6 @@ from __future__ import annotations +import typing as t from datetime import datetime from urllib.parse import parse_qsl @@ -73,7 +74,7 @@ class Request: #: possible to use mutable structures, but this is not recommended. #: #: .. versionadded:: 0.6 - parameter_storage_class: type[MultiDict] = ImmutableMultiDict + parameter_storage_class: type[MultiDict[str, t.Any]] = ImmutableMultiDict #: The type to be used for dict values from the incoming WSGI #: environment. (For example for :attr:`cookies`.) By default an @@ -83,14 +84,14 @@ class Request: #: Changed to ``ImmutableMultiDict`` to support multiple values. #: #: .. versionadded:: 0.6 - dict_storage_class: type[MultiDict] = ImmutableMultiDict + dict_storage_class: type[MultiDict[str, t.Any]] = ImmutableMultiDict #: the type to be used for list values from the incoming WSGI environment. #: By default an :class:`~werkzeug.datastructures.ImmutableList` is used #: (for example for :attr:`access_list`). #: #: .. versionadded:: 0.6 - list_storage_class: type[list] = ImmutableList + list_storage_class: type[list[t.Any]] = ImmutableList user_agent_class: type[UserAgent] = UserAgent """The class used and returned by the :attr:`user_agent` property to diff --git a/src/werkzeug/sansio/response.py b/src/werkzeug/sansio/response.py index 271974ecf..9093b0a8c 100644 --- a/src/werkzeug/sansio/response.py +++ b/src/werkzeug/sansio/response.py @@ -6,32 +6,35 @@ from datetime import timezone from http import HTTPStatus +from ..datastructures import CallbackDict +from ..datastructures import ContentRange +from ..datastructures import ContentSecurityPolicy from ..datastructures import Headers from ..datastructures import HeaderSet +from ..datastructures import ResponseCacheControl +from ..datastructures import WWWAuthenticate +from ..http import COEP +from ..http import COOP +from ..http import dump_age from ..http import dump_cookie +from ..http import dump_header +from ..http import dump_options_header +from ..http import http_date from ..http import HTTP_STATUS_CODES +from ..http import parse_age +from ..http import parse_cache_control_header +from ..http import parse_content_range_header +from ..http import parse_csp_header +from ..http import parse_date +from ..http import parse_options_header +from ..http import parse_set_header +from ..http import quote_etag +from ..http import unquote_etag from ..utils import get_content_type -from werkzeug.datastructures import CallbackDict -from werkzeug.datastructures import ContentRange -from werkzeug.datastructures import ContentSecurityPolicy -from werkzeug.datastructures import ResponseCacheControl -from werkzeug.datastructures import WWWAuthenticate -from werkzeug.http import COEP -from werkzeug.http import COOP -from werkzeug.http import dump_age -from werkzeug.http import dump_header -from werkzeug.http import dump_options_header -from werkzeug.http import http_date -from werkzeug.http import parse_age -from werkzeug.http import parse_cache_control_header -from werkzeug.http import parse_content_range_header -from werkzeug.http import parse_csp_header -from werkzeug.http import parse_date -from werkzeug.http import parse_options_header -from werkzeug.http import parse_set_header -from werkzeug.http import quote_etag -from werkzeug.http import unquote_etag -from werkzeug.utils import header_property +from ..utils import header_property + +if t.TYPE_CHECKING: + from ..datastructures.cache_control import _CacheControl def _set_property(name: str, doc: str | None = None) -> property: @@ -305,7 +308,7 @@ def mimetype_params(self) -> dict[str, str]: .. versionadded:: 0.5 """ - def on_update(d: CallbackDict) -> None: + def on_update(d: CallbackDict[str, str]) -> None: self.headers["Content-Type"] = dump_options_header(self.mimetype, d) d = parse_options_header(self.headers.get("content-type", ""))[1] @@ -480,7 +483,7 @@ def cache_control(self) -> ResponseCacheControl: request/response chain. """ - def on_update(cache_control: ResponseCacheControl) -> None: + def on_update(cache_control: _CacheControl) -> None: if not cache_control and "cache-control" in self.headers: del self.headers["cache-control"] elif cache_control: diff --git a/src/werkzeug/security.py b/src/werkzeug/security.py index 587a3cc74..9999509d1 100644 --- a/src/werkzeug/security.py +++ b/src/werkzeug/security.py @@ -24,8 +24,8 @@ def gen_salt(length: int) -> str: def _hash_internal(method: str, salt: str, password: str) -> tuple[str, str]: method, *args = method.split(":") - salt = salt.encode() - password = password.encode() + salt_bytes = salt.encode() + password_bytes = password.encode() if method == "scrypt": if not args: @@ -40,7 +40,9 @@ def _hash_internal(method: str, salt: str, password: str) -> tuple[str, str]: maxmem = 132 * n * r * p # ideally 128, but some extra seems needed return ( - hashlib.scrypt(password, salt=salt, n=n, r=r, p=p, maxmem=maxmem).hex(), + hashlib.scrypt( + password_bytes, salt=salt_bytes, n=n, r=r, p=p, maxmem=maxmem + ).hex(), f"scrypt:{n}:{r}:{p}", ) elif method == "pbkdf2": @@ -59,7 +61,9 @@ def _hash_internal(method: str, salt: str, password: str) -> tuple[str, str]: raise ValueError("'pbkdf2' takes 2 arguments.") return ( - hashlib.pbkdf2_hmac(hash_name, password, salt, iterations).hex(), + hashlib.pbkdf2_hmac( + hash_name, password_bytes, salt_bytes, iterations + ).hex(), f"pbkdf2:{hash_name}:{iterations}", ) else: diff --git a/src/werkzeug/serving.py b/src/werkzeug/serving.py index ff5eb8c66..ad6bf911b 100644 --- a/src/werkzeug/serving.py +++ b/src/werkzeug/serving.py @@ -11,6 +11,7 @@ from myapp import create_app from werkzeug import run_simple """ + from __future__ import annotations import errno @@ -496,10 +497,10 @@ def generate_adhoc_ssl_pair( ) -> tuple[Certificate, RSAPrivateKeyWithSerialization]: try: from cryptography import x509 - from cryptography.x509.oid import NameOID from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import hashes from cryptography.hazmat.primitives.asymmetric import rsa + from cryptography.x509.oid import NameOID except ImportError: raise TypeError( "Using ad-hoc certificates requires the cryptography library." @@ -583,8 +584,8 @@ def make_ssl_devcert( def generate_adhoc_ssl_context() -> ssl.SSLContext: """Generates an adhoc SSL context for the development server.""" - import tempfile import atexit + import tempfile cert, pkey = generate_adhoc_ssl_pair() diff --git a/src/werkzeug/test.py b/src/werkzeug/test.py index 694e5d8e5..38f69bfb9 100644 --- a/src/werkzeug/test.py +++ b/src/werkzeug/test.py @@ -46,9 +46,9 @@ from .wsgi import get_current_url if t.TYPE_CHECKING: + import typing_extensions as te from _typeshed.wsgi import WSGIApplication from _typeshed.wsgi import WSGIEnvironment - import typing_extensions as te def stream_encode_multipart( @@ -172,7 +172,7 @@ def _iter_data(data: t.Mapping[str, t.Any]) -> t.Iterator[tuple[str, t.Any]]: yield key, value -_TAnyMultiDict = t.TypeVar("_TAnyMultiDict", bound=MultiDict) +_TAnyMultiDict = t.TypeVar("_TAnyMultiDict", bound="MultiDict[t.Any, t.Any]") class EnvironBuilder: @@ -289,10 +289,10 @@ class EnvironBuilder: json_dumps = staticmethod(json.dumps) del json - _args: MultiDict | None + _args: MultiDict[str, str] | None _query_string: str | None _input_stream: t.IO[bytes] | None - _form: MultiDict | None + _form: MultiDict[str, str] | None _files: FileMultiDict | None def __init__( @@ -506,7 +506,7 @@ def mimetype_params(self) -> t.Mapping[str, str]: .. versionadded:: 0.14 """ - def on_update(d: CallbackDict) -> None: + def on_update(d: CallbackDict[str, str]) -> None: self.headers["Content-Type"] = dump_options_header(self.mimetype, d) d = parse_options_header(self.headers.get("content-type", ""))[1] @@ -545,7 +545,7 @@ def _get_form(self, name: str, storage: type[_TAnyMultiDict]) -> _TAnyMultiDict: return rv # type: ignore - def _set_form(self, name: str, value: MultiDict) -> None: + def _set_form(self, name: str, value: MultiDict[str, t.Any]) -> None: """Common behavior for setting the :attr:`form` and :attr:`files` properties. @@ -556,12 +556,12 @@ def _set_form(self, name: str, value: MultiDict) -> None: setattr(self, name, value) @property - def form(self) -> MultiDict: + def form(self) -> MultiDict[str, str]: """A :class:`MultiDict` of form values.""" return self._get_form("_form", MultiDict) @form.setter - def form(self, value: MultiDict) -> None: + def form(self, value: MultiDict[str, str]) -> None: self._set_form("_form", value) @property @@ -607,7 +607,7 @@ def query_string(self, value: str | None) -> None: self._args = None @property - def args(self) -> MultiDict: + def args(self) -> MultiDict[str, str]: """The URL arguments as :class:`MultiDict`.""" if self._query_string is not None: raise AttributeError("a query string is defined") @@ -616,7 +616,7 @@ def args(self) -> MultiDict: return self._args @args.setter - def args(self, value: MultiDict | None) -> None: + def args(self, value: MultiDict[str, str] | None) -> None: self._query_string = None self._args = value @@ -1113,8 +1113,8 @@ def open( finally: builder.close() - response = self.run_wsgi_app(request.environ, buffered=buffered) - response = self.response_wrapper(*response, request=request) + response_parts = self.run_wsgi_app(request.environ, buffered=buffered) + response = self.response_wrapper(*response_parts, request=request) redirects = set() history: list[TestResponse] = [] diff --git a/src/werkzeug/testapp.py b/src/werkzeug/testapp.py index 57f1f6fdf..cdf7fac1a 100644 --- a/src/werkzeug/testapp.py +++ b/src/werkzeug/testapp.py @@ -1,8 +1,10 @@ """A small application that can be used to test a WSGI server and check it for WSGI compliance. """ + from __future__ import annotations +import importlib.metadata import os import sys import typing as t @@ -10,7 +12,6 @@ from markupsafe import escape -from . import __version__ as _werkzeug_version from .wrappers.request import Request from .wrappers.response import Response @@ -153,13 +154,13 @@ def test_app(req: Request) -> Response: sys_path = [] for item, virtual, expanded in iter_sys_path(): - class_ = [] + css = [] if virtual: - class_.append("virtual") + css.append("virtual") if expanded: - class_.append("exp") - class_ = f' class="{" ".join(class_)}"' if class_ else "" - sys_path.append(f"{escape(item)}") + css.append("exp") + class_str = f' class="{" ".join(css)}"' if css else "" + sys_path.append(f"{escape(item)}") context = { "python_version": "
".join(escape(sys.version).splitlines()), @@ -167,7 +168,7 @@ def test_app(req: Request) -> Response: "os": escape(os.name), "api_version": sys.api_version, "byteorder": sys.byteorder, - "werkzeug_version": _werkzeug_version, + "werkzeug_version": _get_werkzeug_version(), "python_eggs": "\n".join(python_eggs), "wsgi_env": "\n".join(wsgi_env), "sys_path": "\n".join(sys_path), @@ -175,6 +176,18 @@ def test_app(req: Request) -> Response: return Response(TEMPLATE % context, mimetype="text/html") +_werkzeug_version = "" + + +def _get_werkzeug_version() -> str: + global _werkzeug_version + + if not _werkzeug_version: + _werkzeug_version = importlib.metadata.version("werkzeug") + + return _werkzeug_version + + if __name__ == "__main__": from .serving import run_simple diff --git a/src/werkzeug/utils.py b/src/werkzeug/utils.py index 32ca9dad6..59b97b732 100644 --- a/src/werkzeug/utils.py +++ b/src/werkzeug/utils.py @@ -26,6 +26,7 @@ if t.TYPE_CHECKING: from _typeshed.wsgi import WSGIEnvironment + from .wrappers.request import Request from .wrappers.response import Response @@ -316,7 +317,7 @@ def append_slash_redirect(environ: WSGIEnvironment, code: int = 308) -> Response def send_file( - path_or_file: os.PathLike | str | t.IO[bytes], + path_or_file: os.PathLike[str] | str | t.IO[bytes], environ: WSGIEnvironment, mimetype: str | None = None, as_attachment: bool = False, @@ -327,7 +328,7 @@ def send_file( max_age: None | (int | t.Callable[[str | None], int | None]) = None, use_x_sendfile: bool = False, response_class: type[Response] | None = None, - _root_path: os.PathLike | str | None = None, + _root_path: os.PathLike[str] | str | None = None, ) -> Response: """Send the contents of a file to the client. @@ -415,7 +416,7 @@ def send_file( if isinstance(path_or_file, (os.PathLike, str)) or hasattr( path_or_file, "__fspath__" ): - path_or_file = t.cast(t.Union[os.PathLike, str], path_or_file) + path_or_file = t.cast("t.Union[os.PathLike[str], str]", path_or_file) # Flask will pass app.root_path, allowing its send_file wrapper # to not have to deal with paths. @@ -535,8 +536,8 @@ def send_file( def send_from_directory( - directory: os.PathLike | str, - path: os.PathLike | str, + directory: os.PathLike[str] | str, + path: os.PathLike[str] | str, environ: WSGIEnvironment, **kwargs: t.Any, ) -> Response: @@ -560,20 +561,20 @@ def send_from_directory( .. versionadded:: 2.0 Adapted from Flask's implementation. """ - path = safe_join(os.fspath(directory), os.fspath(path)) + path_str = safe_join(os.fspath(directory), os.fspath(path)) - if path is None: + if path_str is None: raise NotFound() # Flask will pass app.root_path, allowing its send_from_directory # wrapper to not have to deal with paths. if "_root_path" in kwargs: - path = os.path.join(kwargs["_root_path"], path) + path_str = os.path.join(kwargs["_root_path"], path_str) - if not os.path.isfile(path): + if not os.path.isfile(path_str): raise NotFound() - return send_file(path, environ, **kwargs) + return send_file(path_str, environ, **kwargs) def import_string(import_name: str, silent: bool = False) -> t.Any: diff --git a/src/werkzeug/wrappers/request.py b/src/werkzeug/wrappers/request.py index 25b091691..38053c210 100644 --- a/src/werkzeug/wrappers/request.py +++ b/src/werkzeug/wrappers/request.py @@ -1,5 +1,6 @@ from __future__ import annotations +import collections.abc as cabc import functools import json import typing as t @@ -182,13 +183,13 @@ def my_wsgi_app(request): from ..exceptions import HTTPException @functools.wraps(f) - def application(*args): # type: ignore + def application(*args: t.Any) -> cabc.Iterable[bytes]: request = cls(args[-2]) with request: try: resp = f(*args[:-2] + (request,)) except HTTPException as e: - resp = e.get_response(args[-2]) + resp = t.cast("WSGIApplication", e.get_response(args[-2])) return resp(*args[-2:]) return t.cast("WSGIApplication", application) @@ -374,8 +375,7 @@ def get_data( # type: ignore cache: bool = True, as_text: t.Literal[False] = False, parse_form_data: bool = False, - ) -> bytes: - ... + ) -> bytes: ... @t.overload def get_data( @@ -383,8 +383,7 @@ def get_data( cache: bool = True, as_text: t.Literal[True] = ..., parse_form_data: bool = False, - ) -> str: - ... + ) -> str: ... def get_data( self, cache: bool = True, as_text: bool = False, parse_form_data: bool = False @@ -564,14 +563,12 @@ def json(self) -> t.Any | None: @t.overload def get_json( self, force: bool = ..., silent: t.Literal[False] = ..., cache: bool = ... - ) -> t.Any: - ... + ) -> t.Any: ... @t.overload def get_json( self, force: bool = ..., silent: bool = ..., cache: bool = ... - ) -> t.Any | None: - ... + ) -> t.Any | None: ... def get_json( self, force: bool = False, silent: bool = False, cache: bool = True diff --git a/src/werkzeug/wrappers/response.py b/src/werkzeug/wrappers/response.py index ee5c69478..7b666e3e8 100644 --- a/src/werkzeug/wrappers/response.py +++ b/src/werkzeug/wrappers/response.py @@ -5,26 +5,27 @@ from http import HTTPStatus from urllib.parse import urljoin +from .._internal import _get_environ from ..datastructures import Headers +from ..http import generate_etag +from ..http import http_date +from ..http import is_resource_modified +from ..http import parse_etags +from ..http import parse_range_header from ..http import remove_entity_headers from ..sansio.response import Response as _SansIOResponse from ..urls import _invalid_iri_to_uri from ..urls import iri_to_uri from ..utils import cached_property +from ..wsgi import _RangeWrapper from ..wsgi import ClosingIterator from ..wsgi import get_current_url -from werkzeug._internal import _get_environ -from werkzeug.http import generate_etag -from werkzeug.http import http_date -from werkzeug.http import is_resource_modified -from werkzeug.http import parse_etags -from werkzeug.http import parse_range_header -from werkzeug.wsgi import _RangeWrapper if t.TYPE_CHECKING: from _typeshed.wsgi import StartResponse from _typeshed.wsgi import WSGIApplication from _typeshed.wsgi import WSGIEnvironment + from .request import Request @@ -260,12 +261,10 @@ def from_app( return cls(*run_wsgi_app(app, environ, buffered)) @t.overload - def get_data(self, as_text: t.Literal[False] = False) -> bytes: - ... + def get_data(self, as_text: t.Literal[False] = False) -> bytes: ... @t.overload - def get_data(self, as_text: t.Literal[True]) -> str: - ... + def get_data(self, as_text: t.Literal[True]) -> str: ... def get_data(self, as_text: bool = False) -> bytes | str: """The string representation of the response body. Whenever you call @@ -595,12 +594,10 @@ def json(self) -> t.Any | None: return self.get_json() @t.overload - def get_json(self, force: bool = ..., silent: t.Literal[False] = ...) -> t.Any: - ... + def get_json(self, force: bool = ..., silent: t.Literal[False] = ...) -> t.Any: ... @t.overload - def get_json(self, force: bool = ..., silent: bool = ...) -> t.Any | None: - ... + def get_json(self, force: bool = ..., silent: bool = ...) -> t.Any | None: ... def get_json(self, force: bool = False, silent: bool = False) -> t.Any | None: """Parse :attr:`data` as JSON. Useful during testing. diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py index 91ad1a7ce..ad20b3f8b 100644 --- a/tests/test_exceptions.py +++ b/tests/test_exceptions.py @@ -7,7 +7,8 @@ from werkzeug import exceptions from werkzeug.datastructures import Headers from werkzeug.datastructures import WWWAuthenticate -from werkzeug.exceptions import default_exceptions, HTTPException +from werkzeug.exceptions import default_exceptions +from werkzeug.exceptions import HTTPException from werkzeug.wrappers import Response diff --git a/tests/test_utils.py b/tests/test_utils.py index b7f1bcb1a..c48eba556 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -176,6 +176,7 @@ def test_assign(): def test_import_string(): from datetime import date + from werkzeug.debug import DebuggedApplication assert utils.import_string("datetime.date") is date From 5b5b1f10b366f67834e3e9b17b12e5787fcc1793 Mon Sep 17 00:00:00 2001 From: David Lord Date: Mon, 8 Apr 2024 15:15:17 -0700 Subject: [PATCH 065/159] start version 3.1.0 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index f54060f6e..d5d76e909 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "Werkzeug" -version = "3.0.2" +version = "3.1.0.dev" description = "The comprehensive WSGI web application library." readme = "README.md" license = {file = "LICENSE.txt"} From d6c2fe14682c95ba08921d3474f4f6527d471fe2 Mon Sep 17 00:00:00 2001 From: David Lord Date: Mon, 8 Apr 2024 15:21:59 -0700 Subject: [PATCH 066/159] ignore pytest-xprocess until python3.9 --- .github/dependabot.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index fa94b770a..6c110ae05 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -18,6 +18,9 @@ updates: directory: /requirements/ schedule: interval: monthly + ignore: + - dependency-name: pytest-xprocess + versions: '>= 1' groups: python-requirements: patterns: From 821ea6bf921ba74f3198fc827bf2dcefd94c6f4a Mon Sep 17 00:00:00 2001 From: David Lord Date: Tue, 23 Apr 2024 16:31:59 -0700 Subject: [PATCH 067/159] unignore upload/download-artifact --- .github/dependabot.yml | 6 ------ 1 file changed, 6 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 6c110ae05..9ed4462b6 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -4,12 +4,6 @@ updates: directory: / schedule: interval: monthly - ignore: - # slsa depends on upload/download v3 - - dependency-name: actions/upload-artifact - versions: '>= 4' - - dependency-name: actions/download-artifact - versions: '>= 4' groups: github-actions: patterns: From 5bd2c5e8a5aaeb94f5b40c5b182e32719d2bb620 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 23 Apr 2024 23:32:40 +0000 Subject: [PATCH 068/159] Bump the github-actions group with 4 updates Bumps the github-actions group with 4 updates: [actions/checkout](https://github.com/actions/checkout), [actions/upload-artifact](https://github.com/actions/upload-artifact), [slsa-framework/slsa-github-generator](https://github.com/slsa-framework/slsa-github-generator) and [actions/download-artifact](https://github.com/actions/download-artifact). Updates `actions/checkout` from 4.1.2 to 4.1.3 - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/9bb56186c3b09b4f86b1c65136769dd318469633...1d96c772d19495a3b5c517cd2bc0cb401ea0529f) Updates `actions/upload-artifact` from 3.1.3 to 4.3.3 - [Release notes](https://github.com/actions/upload-artifact/releases) - [Commits](https://github.com/actions/upload-artifact/compare/a8a3f3ad30e3422c9c7b888a15615d19a852ae32...65462800fd760344b1a7b4382951275a0abb4808) Updates `slsa-framework/slsa-github-generator` from 1.10.0 to 2.0.0 - [Release notes](https://github.com/slsa-framework/slsa-github-generator/releases) - [Changelog](https://github.com/slsa-framework/slsa-github-generator/blob/main/CHANGELOG.md) - [Commits](https://github.com/slsa-framework/slsa-github-generator/compare/v1.10.0...v2.0.0) Updates `actions/download-artifact` from 3.0.2 to 4.1.6 - [Release notes](https://github.com/actions/download-artifact/releases) - [Commits](https://github.com/actions/download-artifact/compare/9bc31d5ccc31df68ecc42ccf4149144866c47d8a...9c19ed7fe5d278cd354c7dfd5d3b88589c7e2395) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-patch dependency-group: github-actions - dependency-name: actions/upload-artifact dependency-type: direct:production update-type: version-update:semver-major dependency-group: github-actions - dependency-name: slsa-framework/slsa-github-generator dependency-type: direct:production update-type: version-update:semver-major dependency-group: github-actions - dependency-name: actions/download-artifact dependency-type: direct:production update-type: version-update:semver-major dependency-group: github-actions ... Signed-off-by: dependabot[bot] --- .github/workflows/publish.yaml | 10 +++++----- .github/workflows/tests.yaml | 4 ++-- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index 4e8139b79..1329f8513 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -9,7 +9,7 @@ jobs: outputs: hash: ${{ steps.hash.outputs.hash }} steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 with: python-version: '3.x' @@ -23,7 +23,7 @@ jobs: - name: generate hash id: hash run: cd dist && echo "hash=$(sha256sum * | base64 -w0)" >> $GITHUB_OUTPUT - - uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 # v3.1.3 + - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 with: path: ./dist provenance: @@ -33,7 +33,7 @@ jobs: id-token: write contents: write # Can't pin with hash due to how this workflow works. - uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v1.10.0 + uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v2.0.0 with: base64-subjects: ${{ needs.build.outputs.hash }} create-release: @@ -44,7 +44,7 @@ jobs: permissions: contents: write steps: - - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2 + - uses: actions/download-artifact@9c19ed7fe5d278cd354c7dfd5d3b88589c7e2395 # v4.1.6 - name: create release run: > gh release create --draft --repo ${{ github.repository }} @@ -63,7 +63,7 @@ jobs: permissions: id-token: write steps: - - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2 + - uses: actions/download-artifact@9c19ed7fe5d278cd354c7dfd5d3b88589c7e2395 # v4.1.6 - uses: pypa/gh-action-pypi-publish@81e9d935c883d0b210363ab89cf05f3894778450 # v1.8.14 with: repository-url: https://test.pypi.org/legacy/ diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 91a02d0ca..d93f23c28 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -30,7 +30,7 @@ jobs: - {python: '3.8'} - {name: PyPy, python: 'pypy-3.10', tox: pypy310} steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 with: python-version: ${{ matrix.python }} @@ -42,7 +42,7 @@ jobs: typing: runs-on: ubuntu-latest steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 with: python-version: '3.x' From bfd23e54752875995680abb1fe2c50048ec08060 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 23 Apr 2024 23:35:25 +0000 Subject: [PATCH 069/159] Bump the python-requirements group in /requirements with 4 updates Bumps the python-requirements group in /requirements with 4 updates: [pallets-sphinx-themes](https://github.com/pallets/pallets-sphinx-themes), [sphinx](https://github.com/sphinx-doc/sphinx), [pyright](https://github.com/RobertCraigie/pyright-python) and [types-setuptools](https://github.com/python/typeshed). Updates `pallets-sphinx-themes` from 2.1.1 to 2.1.2 - [Release notes](https://github.com/pallets/pallets-sphinx-themes/releases) - [Changelog](https://github.com/pallets/pallets-sphinx-themes/blob/main/CHANGES.rst) - [Commits](https://github.com/pallets/pallets-sphinx-themes/compare/2.1.1...2.1.2) Updates `sphinx` from 7.2.6 to 7.3.7 - [Release notes](https://github.com/sphinx-doc/sphinx/releases) - [Changelog](https://github.com/sphinx-doc/sphinx/blob/master/CHANGES.rst) - [Commits](https://github.com/sphinx-doc/sphinx/compare/v7.2.6...v7.3.7) Updates `pyright` from 1.1.357 to 1.1.359 - [Release notes](https://github.com/RobertCraigie/pyright-python/releases) - [Commits](https://github.com/RobertCraigie/pyright-python/compare/v1.1.357...v1.1.359) Updates `types-setuptools` from 69.2.0.20240317 to 69.5.0.20240423 - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: pallets-sphinx-themes dependency-type: direct:production update-type: version-update:semver-patch dependency-group: python-requirements - dependency-name: sphinx dependency-type: direct:production update-type: version-update:semver-minor dependency-group: python-requirements - dependency-name: pyright dependency-type: direct:production update-type: version-update:semver-patch dependency-group: python-requirements - dependency-name: types-setuptools dependency-type: direct:production update-type: version-update:semver-minor dependency-group: python-requirements ... Signed-off-by: dependabot[bot] --- requirements/dev.txt | 8 ++++---- requirements/docs.txt | 4 ++-- requirements/typing.txt | 4 ++-- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/requirements/dev.txt b/requirements/dev.txt index 186ceda46..dbe7bd279 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -92,7 +92,7 @@ packaging==24.0 # pytest # sphinx # tox -pallets-sphinx-themes==2.1.1 +pallets-sphinx-themes==2.1.2 # via -r docs.txt platformdirs==4.2.0 # via @@ -120,7 +120,7 @@ pygments==2.17.2 # sphinx pyproject-api==1.6.1 # via tox -pyright==1.1.357 +pyright==1.1.359 # via -r typing.txt pytest==8.1.1 # via @@ -142,7 +142,7 @@ snowballstemmer==2.2.0 # via # -r docs.txt # sphinx -sphinx==7.2.6 +sphinx==7.3.7 # via # -r docs.txt # pallets-sphinx-themes @@ -179,7 +179,7 @@ types-contextvars==2.4.7.3 # via -r typing.txt types-dataclasses==0.6.6 # via -r typing.txt -types-setuptools==69.2.0.20240317 +types-setuptools==69.5.0.20240423 # via -r typing.txt typing-extensions==4.11.0 # via diff --git a/requirements/docs.txt b/requirements/docs.txt index ed605ea92..38ccab9c9 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -26,7 +26,7 @@ packaging==24.0 # via # pallets-sphinx-themes # sphinx -pallets-sphinx-themes==2.1.1 +pallets-sphinx-themes==2.1.2 # via -r docs.in pygments==2.17.2 # via sphinx @@ -34,7 +34,7 @@ requests==2.31.0 # via sphinx snowballstemmer==2.2.0 # via sphinx -sphinx==7.2.6 +sphinx==7.3.7 # via # -r docs.in # pallets-sphinx-themes diff --git a/requirements/typing.txt b/requirements/typing.txt index 09c78d711..497dfe70b 100644 --- a/requirements/typing.txt +++ b/requirements/typing.txt @@ -16,7 +16,7 @@ packaging==24.0 # via pytest pluggy==1.4.0 # via pytest -pyright==1.1.357 +pyright==1.1.359 # via -r typing.in pytest==8.1.1 # via -r typing.in @@ -24,7 +24,7 @@ types-contextvars==2.4.7.3 # via -r typing.in types-dataclasses==0.6.6 # via -r typing.in -types-setuptools==69.2.0.20240317 +types-setuptools==69.5.0.20240423 # via -r typing.in typing-extensions==4.11.0 # via mypy From 0ab4cc1c13f791a6ea997936826e89f56643ffce Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 1 May 2024 11:24:59 +0000 Subject: [PATCH 070/159] Bump the github-actions group with 2 updates Bumps the github-actions group with 2 updates: [actions/checkout](https://github.com/actions/checkout) and [actions/download-artifact](https://github.com/actions/download-artifact). Updates `actions/checkout` from 4.1.3 to 4.1.4 - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/1d96c772d19495a3b5c517cd2bc0cb401ea0529f...0ad4b8fadaa221de15dcec353f45205ec38ea70b) Updates `actions/download-artifact` from 4.1.6 to 4.1.7 - [Release notes](https://github.com/actions/download-artifact/releases) - [Commits](https://github.com/actions/download-artifact/compare/9c19ed7fe5d278cd354c7dfd5d3b88589c7e2395...65a9edc5881444af0b9093a5e628f2fe47ea3b2e) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-patch dependency-group: github-actions - dependency-name: actions/download-artifact dependency-type: direct:production update-type: version-update:semver-patch dependency-group: github-actions ... Signed-off-by: dependabot[bot] --- .github/workflows/publish.yaml | 6 +++--- .github/workflows/tests.yaml | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index 1329f8513..17f2af2a7 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -9,7 +9,7 @@ jobs: outputs: hash: ${{ steps.hash.outputs.hash }} steps: - - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 + - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 with: python-version: '3.x' @@ -44,7 +44,7 @@ jobs: permissions: contents: write steps: - - uses: actions/download-artifact@9c19ed7fe5d278cd354c7dfd5d3b88589c7e2395 # v4.1.6 + - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 - name: create release run: > gh release create --draft --repo ${{ github.repository }} @@ -63,7 +63,7 @@ jobs: permissions: id-token: write steps: - - uses: actions/download-artifact@9c19ed7fe5d278cd354c7dfd5d3b88589c7e2395 # v4.1.6 + - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 - uses: pypa/gh-action-pypi-publish@81e9d935c883d0b210363ab89cf05f3894778450 # v1.8.14 with: repository-url: https://test.pypi.org/legacy/ diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index d93f23c28..c852936b6 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -30,7 +30,7 @@ jobs: - {python: '3.8'} - {name: PyPy, python: 'pypy-3.10', tox: pypy310} steps: - - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 + - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 with: python-version: ${{ matrix.python }} @@ -42,7 +42,7 @@ jobs: typing: runs-on: ubuntu-latest steps: - - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 + - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 with: python-version: '3.x' From d56794e454f0e39969889dcf890a73ae8697506b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 1 May 2024 11:32:36 +0000 Subject: [PATCH 071/159] Bump the python-requirements group in /requirements with 5 updates Bumps the python-requirements group in /requirements with 5 updates: | Package | From | To | | --- | --- | --- | | [pytest](https://github.com/pytest-dev/pytest) | `8.1.1` | `8.2.0` | | [pallets-sphinx-themes](https://github.com/pallets/pallets-sphinx-themes) | `2.1.2` | `2.1.3` | | [mypy](https://github.com/python/mypy) | `1.9.0` | `1.10.0` | | [pyright](https://github.com/RobertCraigie/pyright-python) | `1.1.359` | `1.1.360` | | [tox](https://github.com/tox-dev/tox) | `4.14.2` | `4.15.0` | Updates `pytest` from 8.1.1 to 8.2.0 - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/8.1.1...8.2.0) Updates `pallets-sphinx-themes` from 2.1.2 to 2.1.3 - [Release notes](https://github.com/pallets/pallets-sphinx-themes/releases) - [Changelog](https://github.com/pallets/pallets-sphinx-themes/blob/main/CHANGES.rst) - [Commits](https://github.com/pallets/pallets-sphinx-themes/compare/2.1.2...2.1.3) Updates `mypy` from 1.9.0 to 1.10.0 - [Changelog](https://github.com/python/mypy/blob/master/CHANGELOG.md) - [Commits](https://github.com/python/mypy/compare/1.9.0...v1.10.0) Updates `pyright` from 1.1.359 to 1.1.360 - [Release notes](https://github.com/RobertCraigie/pyright-python/releases) - [Commits](https://github.com/RobertCraigie/pyright-python/compare/v1.1.359...v1.1.360) Updates `tox` from 4.14.2 to 4.15.0 - [Release notes](https://github.com/tox-dev/tox/releases) - [Changelog](https://github.com/tox-dev/tox/blob/main/docs/changelog.rst) - [Commits](https://github.com/tox-dev/tox/compare/4.14.2...4.15.0) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:production update-type: version-update:semver-minor dependency-group: python-requirements - dependency-name: pallets-sphinx-themes dependency-type: direct:production update-type: version-update:semver-patch dependency-group: python-requirements - dependency-name: mypy dependency-type: direct:production update-type: version-update:semver-minor dependency-group: python-requirements - dependency-name: pyright dependency-type: direct:production update-type: version-update:semver-patch dependency-group: python-requirements - dependency-name: tox dependency-type: direct:development update-type: version-update:semver-minor dependency-group: python-requirements ... Signed-off-by: dependabot[bot] --- requirements/dev.txt | 12 ++++++------ requirements/docs.txt | 2 +- requirements/tests.txt | 4 ++-- requirements/typing.txt | 8 ++++---- 4 files changed, 13 insertions(+), 13 deletions(-) diff --git a/requirements/dev.txt b/requirements/dev.txt index dbe7bd279..80074f24b 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -71,7 +71,7 @@ markupsafe==2.1.5 # via # -r docs.txt # jinja2 -mypy==1.9.0 +mypy==1.10.0 # via -r typing.txt mypy-extensions==1.0.0 # via @@ -92,13 +92,13 @@ packaging==24.0 # pytest # sphinx # tox -pallets-sphinx-themes==2.1.2 +pallets-sphinx-themes==2.1.3 # via -r docs.txt platformdirs==4.2.0 # via # tox # virtualenv -pluggy==1.4.0 +pluggy==1.5.0 # via # -r tests.txt # -r typing.txt @@ -120,9 +120,9 @@ pygments==2.17.2 # sphinx pyproject-api==1.6.1 # via tox -pyright==1.1.359 +pyright==1.1.360 # via -r typing.txt -pytest==8.1.1 +pytest==8.2.0 # via # -r tests.txt # -r typing.txt @@ -173,7 +173,7 @@ sphinxcontrib-serializinghtml==1.1.10 # via # -r docs.txt # sphinx -tox==4.14.2 +tox==4.15.0 # via -r dev.in types-contextvars==2.4.7.3 # via -r typing.txt diff --git a/requirements/docs.txt b/requirements/docs.txt index 38ccab9c9..a5b36b246 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -26,7 +26,7 @@ packaging==24.0 # via # pallets-sphinx-themes # sphinx -pallets-sphinx-themes==2.1.2 +pallets-sphinx-themes==2.1.3 # via -r docs.in pygments==2.17.2 # via sphinx diff --git a/requirements/tests.txt b/requirements/tests.txt index 14b67436f..90f186cd2 100644 --- a/requirements/tests.txt +++ b/requirements/tests.txt @@ -16,13 +16,13 @@ iniconfig==2.0.0 # via pytest packaging==24.0 # via pytest -pluggy==1.4.0 +pluggy==1.5.0 # via pytest psutil==5.9.8 # via pytest-xprocess pycparser==2.22 # via cffi -pytest==8.1.1 +pytest==8.2.0 # via # -r tests.in # pytest-timeout diff --git a/requirements/typing.txt b/requirements/typing.txt index 497dfe70b..1b3db64e3 100644 --- a/requirements/typing.txt +++ b/requirements/typing.txt @@ -6,7 +6,7 @@ # iniconfig==2.0.0 # via pytest -mypy==1.9.0 +mypy==1.10.0 # via -r typing.in mypy-extensions==1.0.0 # via mypy @@ -14,11 +14,11 @@ nodeenv==1.8.0 # via pyright packaging==24.0 # via pytest -pluggy==1.4.0 +pluggy==1.5.0 # via pytest -pyright==1.1.359 +pyright==1.1.360 # via -r typing.in -pytest==8.1.1 +pytest==8.2.0 # via -r typing.in types-contextvars==2.4.7.3 # via -r typing.in From e25b69e9e12bf083f41c11b6c15e3fd95223124f Mon Sep 17 00:00:00 2001 From: David Lord Date: Thu, 2 May 2024 11:33:52 -0700 Subject: [PATCH 072/159] start version 3.0.3 --- CHANGES.rst | 26 ++++++++++++++++---------- pyproject.toml | 2 +- 2 files changed, 17 insertions(+), 11 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 401886d36..b3fc39b27 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,27 +1,33 @@ .. currentmodule:: werkzeug +Version 3.0.3 +------------- + +Unreleased + + Version 3.0.2 ------------- Released 2024-04-01 -- Ensure setting merge_slashes to False results in NotFound for +- Ensure setting ``merge_slashes`` to ``False`` results in ``NotFound`` for repeated-slash requests against single slash routes. :issue:`2834` -- Fix handling of TypeError in TypeConversionDict.get() to match - ValueErrors. :issue:`2843` -- Fix response_wrapper type check in test client. :issue:`2831` -- Make the return type of ``MultiPartParser.parse`` more - precise. :issue:`2840` -- Raise an error if converter arguments cannot be - parsed. :issue:`2822` +- Fix handling of ``TypeError`` in ``TypeConversionDict.get()`` to match + ``ValueError``. :issue:`2843` +- Fix ``response_wrapper`` type check in test client. :issue:`2831` +- Make the return type of ``MultiPartParser.parse`` more precise. + :issue:`2840` +- Raise an error if converter arguments cannot be parsed. :issue:`2822` + Version 3.0.1 ------------- Released 2023-10-24 -- Fix slow multipart parsing for large parts potentially enabling DoS - attacks. +- Fix slow multipart parsing for large parts potentially enabling DoS attacks. + Version 3.0.0 ------------- diff --git a/pyproject.toml b/pyproject.toml index f54060f6e..bde199657 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "Werkzeug" -version = "3.0.2" +version = "3.0.3.dev" description = "The comprehensive WSGI web application library." readme = "README.md" license = {file = "LICENSE.txt"} From 3c45bc15c31496bf62ee524167d1dabb7e6bc68d Mon Sep 17 00:00:00 2001 From: David Lord Date: Thu, 2 May 2024 11:35:24 -0700 Subject: [PATCH 073/159] add ghsa links --- docs/conf.py | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/conf.py b/docs/conf.py index 5e04cb817..d58c17e1d 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -25,6 +25,7 @@ extlinks = { "issue": ("https://github.com/pallets/werkzeug/issues/%s", "#%s"), "pr": ("https://github.com/pallets/werkzeug/pull/%s", "#%s"), + "ghsa": ("https://github.com/advisories/%s", "GHSA-%s"), } intersphinx_mapping = { "python": ("https://docs.python.org/3/", None), From 8c302d86ff49c99bfff96e03f359bbd8d39e603b Mon Sep 17 00:00:00 2001 From: momotarogrp <75789359+momotarogrp@users.noreply.github.com> Date: Thu, 28 Dec 2023 02:16:50 +0900 Subject: [PATCH 074/159] Update _reloader.py root={} path=() os.path.join(*path) Traceback (most recent call last): File "", line 1, in TypeError: join() missing 1 required positional argument: 'path' --- CHANGES.rst | 2 ++ src/werkzeug/_reloader.py | 4 +++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index b3fc39b27..05e5acf29 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -5,6 +5,8 @@ Version 3.0.3 Unreleased +- Make reloader more robust when ``""`` is in ``sys.path``. :pr:`2823` + Version 3.0.2 ------------- diff --git a/src/werkzeug/_reloader.py b/src/werkzeug/_reloader.py index 24c2dab79..d7e91a61c 100644 --- a/src/werkzeug/_reloader.py +++ b/src/werkzeug/_reloader.py @@ -157,7 +157,9 @@ def _walk(node: t.Mapping[str, dict[str, t.Any]], path: tuple[str, ...]) -> None for prefix, child in node.items(): _walk(child, path + (prefix,)) - if not node: + # If there are no more nodes, and a path has been accumulated, add it. + # Path may be empty if the "" entry is in sys.path. + if not node and path: rv.add(os.path.join(*path)) _walk(root, ()) From e633b30cd1d9bcab5692f422e71084cc4f27ebde Mon Sep 17 00:00:00 2001 From: momotarogrp <75789359+momotarogrp@users.noreply.github.com> Date: Thu, 28 Dec 2023 02:16:50 +0900 Subject: [PATCH 075/159] Update _reloader.py root={} path=() os.path.join(*path) Traceback (most recent call last): File "", line 1, in TypeError: join() missing 1 required positional argument: 'path' --- CHANGES.rst | 2 ++ src/werkzeug/_reloader.py | 4 +++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index b3fc39b27..05e5acf29 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -5,6 +5,8 @@ Version 3.0.3 Unreleased +- Make reloader more robust when ``""`` is in ``sys.path``. :pr:`2823` + Version 3.0.2 ------------- diff --git a/src/werkzeug/_reloader.py b/src/werkzeug/_reloader.py index 24c2dab79..d7e91a61c 100644 --- a/src/werkzeug/_reloader.py +++ b/src/werkzeug/_reloader.py @@ -157,7 +157,9 @@ def _walk(node: t.Mapping[str, dict[str, t.Any]], path: tuple[str, ...]) -> None for prefix, child in node.items(): _walk(child, path + (prefix,)) - if not node: + # If there are no more nodes, and a path has been accumulated, add it. + # Path may be empty if the "" entry is in sys.path. + if not node and path: rv.add(os.path.join(*path)) _walk(root, ()) From 793be472c9d145eb9be7d4200672d1806289d84a Mon Sep 17 00:00:00 2001 From: afdy Date: Thu, 2 May 2024 08:43:15 +0100 Subject: [PATCH 076/159] update adhoc tls dev cert format single host in cn field san extension for wildcard name --- CHANGES.rst | 1 + src/werkzeug/serving.py | 7 +++++-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 05e5acf29..279fd3f7d 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -6,6 +6,7 @@ Version 3.0.3 Unreleased - Make reloader more robust when ``""`` is in ``sys.path``. :pr:`2823` +- Better TLS cert format with ``adhoc`` dev certs. :pr:`2891` Version 3.0.2 diff --git a/src/werkzeug/serving.py b/src/werkzeug/serving.py index ad6bf911b..7f6ea922e 100644 --- a/src/werkzeug/serving.py +++ b/src/werkzeug/serving.py @@ -532,7 +532,10 @@ def generate_adhoc_ssl_pair( .not_valid_before(dt.now(timezone.utc)) .not_valid_after(dt.now(timezone.utc) + timedelta(days=365)) .add_extension(x509.ExtendedKeyUsage([x509.OID_SERVER_AUTH]), critical=False) - .add_extension(x509.SubjectAlternativeName([x509.DNSName(cn)]), critical=False) + .add_extension( + x509.SubjectAlternativeName([x509.DNSName(cn), x509.DNSName(f"*.{cn}")]), + critical=False, + ) .sign(pkey, hashes.SHA256(), backend) ) return cert, pkey @@ -560,7 +563,7 @@ def make_ssl_devcert( """ if host is not None: - cn = f"*.{host}/CN={host}" + cn = host cert, pkey = generate_adhoc_ssl_pair(cn=cn) from cryptography.hazmat.primitives import serialization From 97fb2f722297ae4e12e36dab024e0acf8477b3c8 Mon Sep 17 00:00:00 2001 From: David Lord Date: Sun, 5 May 2024 08:55:42 -0700 Subject: [PATCH 077/159] remove _invalid_iri_to_uri workaround tell Python to handle itms-services scheme correctly --- CHANGES.rst | 3 +++ src/werkzeug/urls.py | 25 ++++++------------------- src/werkzeug/wrappers/response.py | 3 +-- tests/test_urls.py | 6 ++++++ tests/test_wrappers.py | 1 + 5 files changed, 17 insertions(+), 21 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 279fd3f7d..367cfb668 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -7,6 +7,9 @@ Unreleased - Make reloader more robust when ``""`` is in ``sys.path``. :pr:`2823` - Better TLS cert format with ``adhoc`` dev certs. :pr:`2891` +- Inform Python < 3.12 how to handle ``itms-services`` URIs correctly, rather + than using an overly-broad workaround in Werkzeug that caused some redirect + URIs to be passed on without encoding. :issue:`2828` Version 3.0.2 diff --git a/src/werkzeug/urls.py b/src/werkzeug/urls.py index 4d61e600b..5bffe3928 100644 --- a/src/werkzeug/urls.py +++ b/src/werkzeug/urls.py @@ -3,6 +3,7 @@ import codecs import re import typing as t +import urllib.parse from urllib.parse import quote from urllib.parse import unquote from urllib.parse import urlencode @@ -164,25 +165,11 @@ def iri_to_uri(iri: str) -> str: return urlunsplit((parts.scheme, netloc, path, query, fragment)) -def _invalid_iri_to_uri(iri: str) -> str: - """The URL scheme ``itms-services://`` must contain the ``//`` even though it does - not have a host component. There may be other invalid schemes as well. Currently, - responses will always call ``iri_to_uri`` on the redirect ``Location`` header, which - removes the ``//``. For now, if the IRI only contains ASCII and does not contain - spaces, pass it on as-is. In Werkzeug 3.0, this should become a - ``response.process_location`` flag. - - :meta private: - """ - try: - iri.encode("ascii") - except UnicodeError: - pass - else: - if len(iri.split(None, 1)) == 1: - return iri - - return iri_to_uri(iri) +# Python < 3.12 +# itms-services was worked around in previous iri_to_uri implementations, but +# we can tell Python directly that it needs to preserve the //. +if "itms-services" not in urllib.parse.uses_netloc: + urllib.parse.uses_netloc.append("itms-services") def _decode_idna(domain: str) -> str: diff --git a/src/werkzeug/wrappers/response.py b/src/werkzeug/wrappers/response.py index 7b666e3e8..7f01287c7 100644 --- a/src/werkzeug/wrappers/response.py +++ b/src/werkzeug/wrappers/response.py @@ -14,7 +14,6 @@ from ..http import parse_range_header from ..http import remove_entity_headers from ..sansio.response import Response as _SansIOResponse -from ..urls import _invalid_iri_to_uri from ..urls import iri_to_uri from ..utils import cached_property from ..wsgi import _RangeWrapper @@ -479,7 +478,7 @@ def get_wsgi_headers(self, environ: WSGIEnvironment) -> Headers: content_length = value if location is not None: - location = _invalid_iri_to_uri(location) + location = iri_to_uri(location) if self.autocorrect_location_header: # Make the location header an absolute URL. diff --git a/tests/test_urls.py b/tests/test_urls.py index fdaa913a6..101b886ec 100644 --- a/tests/test_urls.py +++ b/tests/test_urls.py @@ -98,3 +98,9 @@ def test_iri_to_uri_dont_quote_valid_code_points(): # [] are not valid URL code points according to WhatWG URL Standard # https://url.spec.whatwg.org/#url-code-points assert urls.iri_to_uri("/path[bracket]?(paren)") == "/path%5Bbracket%5D?(paren)" + + +# Python < 3.12 +def test_itms_services() -> None: + url = "itms-services://?action=download-manifest&url=https://test.example/path" + assert urls.iri_to_uri(url) == url diff --git a/tests/test_wrappers.py b/tests/test_wrappers.py index d7bc12b95..f75694459 100644 --- a/tests/test_wrappers.py +++ b/tests/test_wrappers.py @@ -1154,6 +1154,7 @@ class MyResponse(wrappers.Response): ("auto", "location", "expect"), ( (False, "/test", "/test"), + (False, "/\\\\test.example?q", "/%5C%5Ctest.example?q"), (True, "/test", "http://localhost/test"), (True, "test", "http://localhost/a/b/test"), (True, "./test", "http://localhost/a/b/test"), From 7080b55acd48b68afdda65ee6c7f99e9afafb0ba Mon Sep 17 00:00:00 2001 From: David Lord Date: Sun, 5 May 2024 09:23:06 -0700 Subject: [PATCH 078/159] endpoint type is Any --- CHANGES.rst | 2 ++ src/werkzeug/routing/exceptions.py | 9 ++++++--- src/werkzeug/routing/map.py | 18 +++++++++--------- src/werkzeug/routing/rules.py | 4 ++-- 4 files changed, 19 insertions(+), 14 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 367cfb668..5658080c3 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,8 @@ Unreleased - Inform Python < 3.12 how to handle ``itms-services`` URIs correctly, rather than using an overly-broad workaround in Werkzeug that caused some redirect URIs to be passed on without encoding. :issue:`2828` +- Type annotation for ``Rule.endpoint`` and other uses of ``endpoint`` is + ``Any``. :issue:`2836` Version 3.0.2 diff --git a/src/werkzeug/routing/exceptions.py b/src/werkzeug/routing/exceptions.py index b63fe5b9c..eeabd4ed1 100644 --- a/src/werkzeug/routing/exceptions.py +++ b/src/werkzeug/routing/exceptions.py @@ -59,7 +59,7 @@ def __init__(self, path_info: str) -> None: class RequestAliasRedirect(RoutingException): # noqa: B903 """This rule is an alias and wants to redirect to the canonical URL.""" - def __init__(self, matched_values: t.Mapping[str, t.Any], endpoint: str) -> None: + def __init__(self, matched_values: t.Mapping[str, t.Any], endpoint: t.Any) -> None: super().__init__() self.matched_values = matched_values self.endpoint = endpoint @@ -72,7 +72,7 @@ class BuildError(RoutingException, LookupError): def __init__( self, - endpoint: str, + endpoint: t.Any, values: t.Mapping[str, t.Any], method: str | None, adapter: MapAdapter | None = None, @@ -93,7 +93,10 @@ def _score_rule(rule: Rule) -> float: [ 0.98 * difflib.SequenceMatcher( - None, rule.endpoint, self.endpoint + # endpoints can be any type, compare as strings + None, + str(rule.endpoint), + str(self.endpoint), ).ratio(), 0.01 * bool(set(self.values or ()).issubset(rule.arguments)), 0.01 * bool(rule.methods and self.method in rule.methods), diff --git a/src/werkzeug/routing/map.py b/src/werkzeug/routing/map.py index 73671bf94..4d15e8824 100644 --- a/src/werkzeug/routing/map.py +++ b/src/werkzeug/routing/map.py @@ -104,7 +104,7 @@ def __init__( host_matching: bool = False, ) -> None: self._matcher = StateMachineMatcher(merge_slashes) - self._rules_by_endpoint: dict[str, list[Rule]] = {} + self._rules_by_endpoint: dict[t.Any, list[Rule]] = {} self._remap = True self._remap_lock = self.lock_class() @@ -131,7 +131,7 @@ def merge_slashes(self) -> bool: def merge_slashes(self, value: bool) -> None: self._matcher.merge_slashes = value - def is_endpoint_expecting(self, endpoint: str, *arguments: str) -> bool: + def is_endpoint_expecting(self, endpoint: t.Any, *arguments: str) -> bool: """Iterate over all rules and check if the endpoint expects the arguments provided. This is for example useful if you have some URLs that expect a language code and others that do not and @@ -155,7 +155,7 @@ def is_endpoint_expecting(self, endpoint: str, *arguments: str) -> bool: def _rules(self) -> list[Rule]: return [rule for rules in self._rules_by_endpoint.values() for rule in rules] - def iter_rules(self, endpoint: str | None = None) -> t.Iterator[Rule]: + def iter_rules(self, endpoint: t.Any | None = None) -> t.Iterator[Rule]: """Iterate over all rules or the rules of an endpoint. :param endpoint: if provided only the rules for that endpoint @@ -470,14 +470,14 @@ def application(environ, start_response): raise @t.overload - def match( # type: ignore + def match( self, path_info: str | None = None, method: str | None = None, return_rule: t.Literal[False] = False, query_args: t.Mapping[str, t.Any] | str | None = None, websocket: bool | None = None, - ) -> tuple[str, t.Mapping[str, t.Any]]: ... + ) -> tuple[t.Any, t.Mapping[str, t.Any]]: ... @t.overload def match( @@ -496,7 +496,7 @@ def match( return_rule: bool = False, query_args: t.Mapping[str, t.Any] | str | None = None, websocket: bool | None = None, - ) -> tuple[str | Rule, t.Mapping[str, t.Any]]: + ) -> tuple[t.Any | Rule, t.Mapping[str, t.Any]]: """The usage is simple: you just pass the match method the current path info as well as the method (which defaults to `GET`). The following things can then happen: @@ -770,7 +770,7 @@ def make_redirect_url( def make_alias_redirect_url( self, path: str, - endpoint: str, + endpoint: t.Any, values: t.Mapping[str, t.Any], method: str, query_args: t.Mapping[str, t.Any] | str, @@ -786,7 +786,7 @@ def make_alias_redirect_url( def _partial_build( self, - endpoint: str, + endpoint: t.Any, values: t.Mapping[str, t.Any], method: str | None, append_unknown: bool, @@ -827,7 +827,7 @@ def _partial_build( def build( self, - endpoint: str, + endpoint: t.Any, values: t.Mapping[str, t.Any] | None = None, method: str | None = None, force_external: bool = False, diff --git a/src/werkzeug/routing/rules.py b/src/werkzeug/routing/rules.py index 75323357c..6a02f8d3e 100644 --- a/src/werkzeug/routing/rules.py +++ b/src/werkzeug/routing/rules.py @@ -453,7 +453,7 @@ def __init__( subdomain: str | None = None, methods: t.Iterable[str] | None = None, build_only: bool = False, - endpoint: str | None = None, + endpoint: t.Any | None = None, strict_slashes: bool | None = None, merge_slashes: bool | None = None, redirect_to: str | t.Callable[..., str] | None = None, @@ -493,7 +493,7 @@ def __init__( ) self.methods = methods - self.endpoint: str = endpoint # type: ignore + self.endpoint: t.Any = endpoint self.redirect_to = redirect_to if defaults: From 71b69dfb7df3d912e66bab87fbb1f21f83504967 Mon Sep 17 00:00:00 2001 From: David Lord Date: Thu, 2 May 2024 11:55:52 -0700 Subject: [PATCH 079/159] restrict debugger trusted hosts Add a list of `trusted_hosts` to the `DebuggedApplication` middleware. It defaults to only allowing `localhost`, `.localhost` subdomains, and `127.0.0.1`. `run_simple(use_debugger=True)` adds its `hostname` argument to the trusted list as well. The middleware can be used directly to further modify the trusted list in less common development scenarios. The debugger UI uses the full `document.location` instead of only `document.location.pathname`. Either of these fixes on their own mitigates the reported vulnerability. --- CHANGES.rst | 5 ++++ docs/debug.rst | 35 +++++++++++++++++++++++---- src/werkzeug/debug/__init__.py | 10 ++++++++ src/werkzeug/debug/shared/debugger.js | 4 +-- src/werkzeug/serving.py | 3 +++ 5 files changed, 50 insertions(+), 7 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 5658080c3..4d0081e93 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -5,6 +5,11 @@ Version 3.0.3 Unreleased +- Only allow ``localhost``, ``.localhost``, ``127.0.0.1``, or the specified + hostname when running the dev server, to make debugger requests. Additional + hosts can be added by using the debugger middleware directly. The debugger + UI makes requests using the full URL rather than only the path. + :ghsa:`2g68-c3qc-8985` - Make reloader more robust when ``""`` is in ``sys.path``. :pr:`2823` - Better TLS cert format with ``adhoc`` dev certs. :pr:`2891` - Inform Python < 3.12 how to handle ``itms-services`` URIs correctly, rather diff --git a/docs/debug.rst b/docs/debug.rst index 25a9f0b2d..d842135a7 100644 --- a/docs/debug.rst +++ b/docs/debug.rst @@ -16,7 +16,8 @@ interactive debug console to execute code in any frame. The debugger allows the execution of arbitrary code which makes it a major security risk. **The debugger must never be used on production machines. We cannot stress this enough. Do not enable the debugger - in production.** + in production.** Production means anything that is not development, + and anything that is publicly accessible. .. note:: @@ -72,10 +73,9 @@ argument to get a detailed list of all the attributes it has. Debugger PIN ------------ -Starting with Werkzeug 0.11 the debug console is protected by a PIN. -This is a security helper to make it less likely for the debugger to be -exploited if you forget to disable it when deploying to production. The -PIN based authentication is enabled by default. +The debug console is protected by a PIN. This is a security helper to make it +less likely for the debugger to be exploited if you forget to disable it when +deploying to production. The PIN based authentication is enabled by default. The first time a console is opened, a dialog will prompt for a PIN that is printed to the command line. The PIN is generated in a stable way @@ -92,6 +92,31 @@ intended to make it harder for an attacker to exploit the debugger. Never enable the debugger in production.** +Allowed Hosts +------------- + +The debug console will only be served if the request comes from a trusted host. +If a request comes from a browser page that is not served on a trusted URL, a +400 error will be returned. + +By default, ``localhost``, any ``.localhost`` subdomain, and ``127.0.0.1`` are +trusted. ``run_simple`` will trust its ``hostname`` argument as well. To change +this further, use the debug middleware directly rather than through +``use_debugger=True``. + +.. code-block:: python + + if os.environ.get("USE_DEBUGGER") in {"1", "true"}: + app = DebuggedApplication(app, evalex=True) + app.trusted_hosts = [...] + + run_simple("localhost", 8080, app) + +**This feature is not meant to entirely secure the debugger. It is +intended to make it harder for an attacker to exploit the debugger. +Never enable the debugger in production.** + + Pasting Errors -------------- diff --git a/src/werkzeug/debug/__init__.py b/src/werkzeug/debug/__init__.py index a55480aa3..cda1fa2e7 100644 --- a/src/werkzeug/debug/__init__.py +++ b/src/werkzeug/debug/__init__.py @@ -298,6 +298,14 @@ def __init__( else: self.pin = None + self.trusted_hosts: list[str] = [".localhost", "127.0.0.1"] + """List of domains to allow requests to the debugger from. A leading dot + allows all subdomains. This only allows ``".localhost"`` domains by + default. + + .. versionadded:: 3.0.3 + """ + @property def pin(self) -> str | None: if not hasattr(self, "_pin"): @@ -506,6 +514,8 @@ def __call__( # form data! Otherwise the application won't have access to that data # any more! request = Request(environ) + request.trusted_hosts = self.trusted_hosts + assert request.host # will raise 400 error if not trusted response = self.debug_application if request.args.get("__debugger__") == "yes": cmd = request.args.get("cmd") diff --git a/src/werkzeug/debug/shared/debugger.js b/src/werkzeug/debug/shared/debugger.js index f463e9c77..18c658344 100644 --- a/src/werkzeug/debug/shared/debugger.js +++ b/src/werkzeug/debug/shared/debugger.js @@ -48,7 +48,7 @@ function initPinBox() { btn.disabled = true; fetch( - `${document.location.pathname}?__debugger__=yes&cmd=pinauth&pin=${pin}&s=${encodedSecret}` + `${document.location}?__debugger__=yes&cmd=pinauth&pin=${pin}&s=${encodedSecret}` ) .then((res) => res.json()) .then(({auth, exhausted}) => { @@ -79,7 +79,7 @@ function promptForPin() { if (!EVALEX_TRUSTED) { const encodedSecret = encodeURIComponent(SECRET); fetch( - `${document.location.pathname}?__debugger__=yes&cmd=printpin&s=${encodedSecret}` + `${document.location}?__debugger__=yes&cmd=printpin&s=${encodedSecret}` ); const pinPrompt = document.getElementsByClassName("pin-prompt")[0]; fadeIn(pinPrompt); diff --git a/src/werkzeug/serving.py b/src/werkzeug/serving.py index 7f6ea922e..859f9aacb 100644 --- a/src/werkzeug/serving.py +++ b/src/werkzeug/serving.py @@ -1072,6 +1072,9 @@ def run_simple( from .debug import DebuggedApplication application = DebuggedApplication(application, evalex=use_evalex) + # Allow the specified hostname to use the debugger, in addition to + # localhost domains. + application.trusted_hosts.append(hostname) if not is_running_from_reloader(): fd = None From 890b6b62634fa61224222aee31081c61b054ff01 Mon Sep 17 00:00:00 2001 From: David Lord Date: Fri, 3 May 2024 14:49:43 -0700 Subject: [PATCH 080/159] only require trusted host for evalex --- src/werkzeug/debug/__init__.py | 25 ++++++++++++++++++++----- src/werkzeug/sansio/utils.py | 2 +- 2 files changed, 21 insertions(+), 6 deletions(-) diff --git a/src/werkzeug/debug/__init__.py b/src/werkzeug/debug/__init__.py index cda1fa2e7..6bef30fbc 100644 --- a/src/werkzeug/debug/__init__.py +++ b/src/werkzeug/debug/__init__.py @@ -19,7 +19,9 @@ from .._internal import _log from ..exceptions import NotFound +from ..exceptions import SecurityError from ..http import parse_cookie +from ..sansio.utils import host_is_trusted from ..security import gen_salt from ..utils import send_file from ..wrappers.request import Request @@ -352,7 +354,7 @@ def debug_application( is_trusted = bool(self.check_pin_trust(environ)) html = tb.render_debugger_html( - evalex=self.evalex, + evalex=self.evalex and self.check_host_trust(environ), secret=self.secret, evalex_trusted=is_trusted, ) @@ -380,6 +382,9 @@ def execute_command( # type: ignore[return] frame: DebugFrameSummary | _ConsoleFrame, ) -> Response: """Execute a command in a console.""" + if not self.check_host_trust(request.environ): + return SecurityError() # type: ignore[return-value] + contexts = self.frame_contexts.get(id(frame), []) with ExitStack() as exit_stack: @@ -390,6 +395,9 @@ def execute_command( # type: ignore[return] def display_console(self, request: Request) -> Response: """Display a standalone shell.""" + if not self.check_host_trust(request.environ): + return SecurityError() # type: ignore[return-value] + if 0 not in self.frames: if self.console_init_func is None: ns = {} @@ -442,12 +450,18 @@ def check_pin_trust(self, environ: WSGIEnvironment) -> bool | None: return None return (time.time() - PIN_TIME) < ts + def check_host_trust(self, environ: WSGIEnvironment) -> bool: + return host_is_trusted(environ.get("HTTP_HOST"), self.trusted_hosts) + def _fail_pin_auth(self) -> None: time.sleep(5.0 if self._failed_pin_auth > 5 else 0.5) self._failed_pin_auth += 1 def pin_auth(self, request: Request) -> Response: """Authenticates with the pin.""" + if not self.check_host_trust(request.environ): + return SecurityError() # type: ignore[return-value] + exhausted = False auth = False trust = self.check_pin_trust(request.environ) @@ -497,8 +511,11 @@ def pin_auth(self, request: Request) -> Response: rv.delete_cookie(self.pin_cookie_name) return rv - def log_pin_request(self) -> Response: + def log_pin_request(self, request: Request) -> Response: """Log the pin if needed.""" + if not self.check_host_trust(request.environ): + return SecurityError() # type: ignore[return-value] + if self.pin_logging and self.pin is not None: _log( "info", " * To enable the debugger you need to enter the security pin:" @@ -514,8 +531,6 @@ def __call__( # form data! Otherwise the application won't have access to that data # any more! request = Request(environ) - request.trusted_hosts = self.trusted_hosts - assert request.host # will raise 400 error if not trusted response = self.debug_application if request.args.get("__debugger__") == "yes": cmd = request.args.get("cmd") @@ -527,7 +542,7 @@ def __call__( elif cmd == "pinauth" and secret == self.secret: response = self.pin_auth(request) # type: ignore elif cmd == "printpin" and secret == self.secret: - response = self.log_pin_request() # type: ignore + response = self.log_pin_request(request) # type: ignore elif ( self.evalex and cmd is not None diff --git a/src/werkzeug/sansio/utils.py b/src/werkzeug/sansio/utils.py index 48ec1bfa0..14fa0ac88 100644 --- a/src/werkzeug/sansio/utils.py +++ b/src/werkzeug/sansio/utils.py @@ -8,7 +8,7 @@ from ..urls import uri_to_iri -def host_is_trusted(hostname: str, trusted_list: t.Iterable[str]) -> bool: +def host_is_trusted(hostname: str | None, trusted_list: t.Iterable[str]) -> bool: """Check if a host matches a list of trusted names. :param hostname: The name to check. From f9995e967979eb694d6b31536cc65314fd7e9c8c Mon Sep 17 00:00:00 2001 From: David Lord Date: Sun, 5 May 2024 16:02:12 -0700 Subject: [PATCH 081/159] release version 3.0.3 --- CHANGES.rst | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 4d0081e93..f6158e79b 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,7 +3,7 @@ Version 3.0.3 ------------- -Unreleased +Released 2024-05-05 - Only allow ``localhost``, ``.localhost``, ``127.0.0.1``, or the specified hostname when running the dev server, to make debugger requests. Additional diff --git a/pyproject.toml b/pyproject.toml index bde199657..eb06882df 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "Werkzeug" -version = "3.0.3.dev" +version = "3.0.3" description = "The comprehensive WSGI web application library." readme = "README.md" license = {file = "LICENSE.txt"} From 497794c700106096363407fc3d5fd3ff128ffe84 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 6 May 2024 22:05:43 +0000 Subject: [PATCH 082/159] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.3.5 → v0.4.3](https://github.com/astral-sh/ruff-pre-commit/compare/v0.3.5...v0.4.3) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 828916171..ed8d79070 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,7 +2,7 @@ ci: autoupdate_schedule: monthly repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.3.5 + rev: v0.4.3 hooks: - id: ruff - id: ruff-format From e12362681a958711e5430d2de8da8b82ee26a5bf Mon Sep 17 00:00:00 2001 From: David Lord Date: Sat, 11 May 2024 10:02:40 -0700 Subject: [PATCH 083/159] test with python 3.13 --- .github/workflows/tests.yaml | 1 + pyproject.toml | 3 --- requirements/dev.txt | 4 +--- requirements/tests.in | 3 ++- requirements/tests.txt | 8 ++++---- src/werkzeug/debug/tbtools.py | 15 +++++++++++++-- tox.ini | 2 +- 7 files changed, 22 insertions(+), 14 deletions(-) diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 91a02d0ca..fb2629019 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -21,6 +21,7 @@ jobs: fail-fast: false matrix: include: + - {python: '3.13'} - {python: '3.12'} - {name: Windows, python: '3.12', os: windows-latest} - {name: Mac, python: '3.12', os: macos-latest} diff --git a/pyproject.toml b/pyproject.toml index eb06882df..600feb31e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -79,9 +79,6 @@ strict = true module = [ "colorama.*", "cryptography.*", - "eventlet.*", - "gevent.*", - "greenlet.*", "watchdog.*", "xprocess.*", ] diff --git a/requirements/dev.txt b/requirements/dev.txt index 186ceda46..91aac67eb 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -18,7 +18,7 @@ certifi==2024.2.2 # via # -r docs.txt # requests -cffi==1.16.0 +cffi @ https://github.com/python-cffi/cffi/archive/d7f750b1b1c5ea4da5aa537b9baba0e01b0ce843.zip # via # -r tests.txt # cryptography @@ -46,8 +46,6 @@ filelock==3.13.3 # via # tox # virtualenv -greenlet==3.0.3 - # via -r tests.txt identify==2.5.35 # via pre-commit idna==3.6 diff --git a/requirements/tests.in b/requirements/tests.in index 8228f8ee6..26263e3c0 100644 --- a/requirements/tests.in +++ b/requirements/tests.in @@ -3,6 +3,7 @@ pytest-timeout # pinned for python 3.8 support pytest-xprocess<1 cryptography -greenlet watchdog ephemeral-port-reserve +# pin current commit on main for python 3.13 support +https://github.com/python-cffi/cffi/archive/d7f750b1b1c5ea4da5aa537b9baba0e01b0ce843.zip diff --git a/requirements/tests.txt b/requirements/tests.txt index 14b67436f..c1d236b94 100644 --- a/requirements/tests.txt +++ b/requirements/tests.txt @@ -4,14 +4,14 @@ # # pip-compile tests.in # -cffi==1.16.0 - # via cryptography +cffi @ https://github.com/python-cffi/cffi/archive/d7f750b1b1c5ea4da5aa537b9baba0e01b0ce843.zip + # via + # -r tests.in + # cryptography cryptography==42.0.5 # via -r tests.in ephemeral-port-reserve==1.1.4 # via -r tests.in -greenlet==3.0.3 - # via -r tests.in iniconfig==2.0.0 # via pytest packaging==24.0 diff --git a/src/werkzeug/debug/tbtools.py b/src/werkzeug/debug/tbtools.py index 0574c966b..e81ed6e18 100644 --- a/src/werkzeug/debug/tbtools.py +++ b/src/werkzeug/debug/tbtools.py @@ -296,7 +296,12 @@ def render_traceback_html(self, include_title: bool = True) -> str: rows.append("\n".join(row_parts)) - is_syntax_error = issubclass(self._te.exc_type, SyntaxError) + if sys.version_info < (3, 13): + exc_type_str = self._te.exc_type.__name__ + else: + exc_type_str = self._te.exc_type_str + + is_syntax_error = exc_type_str == "SyntaxError" if include_title: if is_syntax_error: @@ -325,13 +330,19 @@ def render_debugger_html( ) -> str: exc_lines = list(self._te.format_exception_only()) plaintext = "".join(self._te.format()) + + if sys.version_info < (3, 13): + exc_type_str = self._te.exc_type.__name__ + else: + exc_type_str = self._te.exc_type_str + return PAGE_HTML % { "evalex": "true" if evalex else "false", "evalex_trusted": "true" if evalex_trusted else "false", "console": "false", "title": escape(exc_lines[0]), "exception": escape("".join(exc_lines)), - "exception_type": escape(self._te.exc_type.__name__), + "exception_type": escape(exc_type_str), "summary": self.render_traceback_html(include_title=False), "plaintext": escape(plaintext), "plaintext_cs": re.sub("-{2,}", "-", plaintext), diff --git a/tox.ini b/tox.ini index f7bc0b3b5..708c2295a 100644 --- a/tox.ini +++ b/tox.ini @@ -1,6 +1,6 @@ [tox] envlist = - py3{12,11,10,9,8} + py3{13,12,11,10,9,8} pypy310 style typing From 484514fe3036080f0f157ee960ab74f3e7e67c5b Mon Sep 17 00:00:00 2001 From: David Lord Date: Sat, 18 May 2024 11:28:43 -0400 Subject: [PATCH 084/159] split tox update envs --- tox.ini | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/tox.ini b/tox.ini index 708c2295a..a5abb3143 100644 --- a/tox.ini +++ b/tox.ini @@ -28,14 +28,18 @@ commands = mypy deps = -r requirements/docs.txt commands = sphinx-build -E -W -b dirhtml docs docs/_build/dirhtml +[testenv:update-pre_commit] +labels = update +deps = pre-commit +skip_install = true +commands = pre-commit autoupdate -j4 + [testenv:update-requirements] -deps = - pip-tools - pre-commit +labels = update +deps = pip-tools skip_install = true change_dir = requirements commands = - pre-commit autoupdate -j4 pip-compile -U build.in pip-compile -U docs.in pip-compile -U tests.in From c7cde2427892004f43e1b672ea85fba6062f8bfd Mon Sep 17 00:00:00 2001 From: David Lord Date: Sat, 18 May 2024 11:43:13 -0400 Subject: [PATCH 085/159] update dev dependencies --- .pre-commit-config.yaml | 2 +- requirements/build.txt | 2 +- requirements/dev.txt | 38 +++++++++++++++++++------------------- requirements/docs.txt | 14 +++++++------- requirements/tests.txt | 6 +++--- requirements/typing.txt | 10 +++++----- 6 files changed, 36 insertions(+), 36 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 828916171..5b7ebb878 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,7 +2,7 @@ ci: autoupdate_schedule: monthly repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.3.5 + rev: v0.4.4 hooks: - id: ruff - id: ruff-format diff --git a/requirements/build.txt b/requirements/build.txt index 9ecc48952..52fd1f69a 100644 --- a/requirements/build.txt +++ b/requirements/build.txt @@ -8,5 +8,5 @@ build==1.2.1 # via -r build.in packaging==24.0 # via build -pyproject-hooks==1.0.0 +pyproject-hooks==1.1.0 # via build diff --git a/requirements/dev.txt b/requirements/dev.txt index 91aac67eb..dad2e92d7 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -8,7 +8,7 @@ alabaster==0.7.16 # via # -r docs.txt # sphinx -babel==2.14.0 +babel==2.15.0 # via # -r docs.txt # sphinx @@ -32,23 +32,23 @@ charset-normalizer==3.3.2 # requests colorama==0.4.6 # via tox -cryptography==42.0.5 +cryptography==42.0.7 # via -r tests.txt distlib==0.3.8 # via virtualenv -docutils==0.20.1 +docutils==0.21.2 # via # -r docs.txt # sphinx ephemeral-port-reserve==1.1.4 # via -r tests.txt -filelock==3.13.3 +filelock==3.14.0 # via # tox # virtualenv -identify==2.5.35 +identify==2.5.36 # via pre-commit -idna==3.6 +idna==3.7 # via # -r docs.txt # requests @@ -61,7 +61,7 @@ iniconfig==2.0.0 # -r tests.txt # -r typing.txt # pytest -jinja2==3.1.3 +jinja2==3.1.4 # via # -r docs.txt # sphinx @@ -69,7 +69,7 @@ markupsafe==2.1.5 # via # -r docs.txt # jinja2 -mypy==1.9.0 +mypy==1.10.0 # via -r typing.txt mypy-extensions==1.0.0 # via @@ -90,19 +90,19 @@ packaging==24.0 # pytest # sphinx # tox -pallets-sphinx-themes==2.1.1 +pallets-sphinx-themes==2.1.3 # via -r docs.txt -platformdirs==4.2.0 +platformdirs==4.2.2 # via # tox # virtualenv -pluggy==1.4.0 +pluggy==1.5.0 # via # -r tests.txt # -r typing.txt # pytest # tox -pre-commit==3.7.0 +pre-commit==3.7.1 # via -r dev.in psutil==5.9.8 # via @@ -112,15 +112,15 @@ pycparser==2.22 # via # -r tests.txt # cffi -pygments==2.17.2 +pygments==2.18.0 # via # -r docs.txt # sphinx pyproject-api==1.6.1 # via tox -pyright==1.1.357 +pyright==1.1.363 # via -r typing.txt -pytest==8.1.1 +pytest==8.2.0 # via # -r tests.txt # -r typing.txt @@ -140,7 +140,7 @@ snowballstemmer==2.2.0 # via # -r docs.txt # sphinx -sphinx==7.2.6 +sphinx==7.3.7 # via # -r docs.txt # pallets-sphinx-themes @@ -171,13 +171,13 @@ sphinxcontrib-serializinghtml==1.1.10 # via # -r docs.txt # sphinx -tox==4.14.2 +tox==4.15.0 # via -r dev.in types-contextvars==2.4.7.3 # via -r typing.txt types-dataclasses==0.6.6 # via -r typing.txt -types-setuptools==69.2.0.20240317 +types-setuptools==69.5.0.20240518 # via -r typing.txt typing-extensions==4.11.0 # via @@ -187,7 +187,7 @@ urllib3==2.2.1 # via # -r docs.txt # requests -virtualenv==20.25.1 +virtualenv==20.26.2 # via # pre-commit # tox diff --git a/requirements/docs.txt b/requirements/docs.txt index ed605ea92..288993f4d 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -6,19 +6,19 @@ # alabaster==0.7.16 # via sphinx -babel==2.14.0 +babel==2.15.0 # via sphinx certifi==2024.2.2 # via requests charset-normalizer==3.3.2 # via requests -docutils==0.20.1 +docutils==0.21.2 # via sphinx -idna==3.6 +idna==3.7 # via requests imagesize==1.4.1 # via sphinx -jinja2==3.1.3 +jinja2==3.1.4 # via sphinx markupsafe==2.1.5 # via jinja2 @@ -26,15 +26,15 @@ packaging==24.0 # via # pallets-sphinx-themes # sphinx -pallets-sphinx-themes==2.1.1 +pallets-sphinx-themes==2.1.3 # via -r docs.in -pygments==2.17.2 +pygments==2.18.0 # via sphinx requests==2.31.0 # via sphinx snowballstemmer==2.2.0 # via sphinx -sphinx==7.2.6 +sphinx==7.3.7 # via # -r docs.in # pallets-sphinx-themes diff --git a/requirements/tests.txt b/requirements/tests.txt index c1d236b94..2c35dcf3a 100644 --- a/requirements/tests.txt +++ b/requirements/tests.txt @@ -8,7 +8,7 @@ cffi @ https://github.com/python-cffi/cffi/archive/d7f750b1b1c5ea4da5aa537b9baba # via # -r tests.in # cryptography -cryptography==42.0.5 +cryptography==42.0.7 # via -r tests.in ephemeral-port-reserve==1.1.4 # via -r tests.in @@ -16,13 +16,13 @@ iniconfig==2.0.0 # via pytest packaging==24.0 # via pytest -pluggy==1.4.0 +pluggy==1.5.0 # via pytest psutil==5.9.8 # via pytest-xprocess pycparser==2.22 # via cffi -pytest==8.1.1 +pytest==8.2.0 # via # -r tests.in # pytest-timeout diff --git a/requirements/typing.txt b/requirements/typing.txt index 09c78d711..2638271ba 100644 --- a/requirements/typing.txt +++ b/requirements/typing.txt @@ -6,7 +6,7 @@ # iniconfig==2.0.0 # via pytest -mypy==1.9.0 +mypy==1.10.0 # via -r typing.in mypy-extensions==1.0.0 # via mypy @@ -14,17 +14,17 @@ nodeenv==1.8.0 # via pyright packaging==24.0 # via pytest -pluggy==1.4.0 +pluggy==1.5.0 # via pytest -pyright==1.1.357 +pyright==1.1.363 # via -r typing.in -pytest==8.1.1 +pytest==8.2.0 # via -r typing.in types-contextvars==2.4.7.3 # via -r typing.in types-dataclasses==0.6.6 # via -r typing.in -types-setuptools==69.2.0.20240317 +types-setuptools==69.5.0.20240518 # via -r typing.in typing-extensions==4.11.0 # via mypy From c9f7a6b67ffd6c0556440984a931624e2eab0ba1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Edgar=20Ram=C3=ADrez=20Mondrag=C3=B3n?= <16805946+edgarrmondragon@users.noreply.github.com> Date: Fri, 24 May 2024 15:41:43 -0600 Subject: [PATCH 086/159] Test with cffi 1.17.0rc1 (#2910) --- requirements/tests.in | 4 ++-- requirements/tests.txt | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/tests.in b/requirements/tests.in index 26263e3c0..7c87c4d8d 100644 --- a/requirements/tests.in +++ b/requirements/tests.in @@ -5,5 +5,5 @@ pytest-xprocess<1 cryptography watchdog ephemeral-port-reserve -# pin current commit on main for python 3.13 support -https://github.com/python-cffi/cffi/archive/d7f750b1b1c5ea4da5aa537b9baba0e01b0ce843.zip +# pin cffi 1.17.0 pre-release for python 3.13 support +cffi==1.17.0rc1 diff --git a/requirements/tests.txt b/requirements/tests.txt index 2c35dcf3a..18bae7666 100644 --- a/requirements/tests.txt +++ b/requirements/tests.txt @@ -4,7 +4,7 @@ # # pip-compile tests.in # -cffi @ https://github.com/python-cffi/cffi/archive/d7f750b1b1c5ea4da5aa537b9baba0e01b0ce843.zip +cffi==1.17.0rc1 # via # -r tests.in # cryptography From cba8652a181184b504d89305a32deccc322d6a28 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 1 Jun 2024 11:14:45 +0000 Subject: [PATCH 087/159] Bump actions/checkout from 4.1.4 to 4.1.6 in the github-actions group Bumps the github-actions group with 1 update: [actions/checkout](https://github.com/actions/checkout). Updates `actions/checkout` from 4.1.4 to 4.1.6 - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/0ad4b8fadaa221de15dcec353f45205ec38ea70b...a5ac7e51b41094c92402da3b24376905380afc29) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-patch dependency-group: github-actions ... Signed-off-by: dependabot[bot] --- .github/workflows/publish.yaml | 2 +- .github/workflows/tests.yaml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index 17f2af2a7..ba0b348b1 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -9,7 +9,7 @@ jobs: outputs: hash: ${{ steps.hash.outputs.hash }} steps: - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 + - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 with: python-version: '3.x' diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index a7ad601a4..c5a88216f 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -31,7 +31,7 @@ jobs: - {python: '3.8'} - {name: PyPy, python: 'pypy-3.10', tox: pypy310} steps: - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 + - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 with: python-version: ${{ matrix.python }} @@ -43,7 +43,7 @@ jobs: typing: runs-on: ubuntu-latest steps: - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 + - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 with: python-version: '3.x' From 16017a9d931608903871bbd8aca60d8489155324 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 1 Jun 2024 11:43:58 +0000 Subject: [PATCH 088/159] Bump the python-requirements group in /requirements with 4 updates Bumps the python-requirements group in /requirements with 4 updates: [pyright](https://github.com/RobertCraigie/pyright-python), [pytest](https://github.com/pytest-dev/pytest), [types-setuptools](https://github.com/python/typeshed) and [watchdog](https://github.com/gorakhargosh/watchdog). Updates `pyright` from 1.1.363 to 1.1.365 - [Release notes](https://github.com/RobertCraigie/pyright-python/releases) - [Commits](https://github.com/RobertCraigie/pyright-python/compare/v1.1.363...v1.1.365) Updates `pytest` from 8.2.0 to 8.2.1 - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/8.2.0...8.2.1) Updates `types-setuptools` from 69.5.0.20240518 to 70.0.0.20240524 - [Commits](https://github.com/python/typeshed/commits) Updates `watchdog` from 4.0.0 to 4.0.1 - [Release notes](https://github.com/gorakhargosh/watchdog/releases) - [Changelog](https://github.com/gorakhargosh/watchdog/blob/master/changelog.rst) - [Commits](https://github.com/gorakhargosh/watchdog/compare/v4.0.0...v4.0.1) --- updated-dependencies: - dependency-name: pyright dependency-type: direct:production update-type: version-update:semver-patch dependency-group: python-requirements - dependency-name: pytest dependency-type: direct:production update-type: version-update:semver-patch dependency-group: python-requirements - dependency-name: types-setuptools dependency-type: direct:production update-type: version-update:semver-major dependency-group: python-requirements - dependency-name: watchdog dependency-type: direct:production update-type: version-update:semver-patch dependency-group: python-requirements ... Signed-off-by: dependabot[bot] --- requirements/tests.txt | 4 ++-- requirements/typing.txt | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/tests.txt b/requirements/tests.txt index 2c35dcf3a..6de3a630e 100644 --- a/requirements/tests.txt +++ b/requirements/tests.txt @@ -22,7 +22,7 @@ psutil==5.9.8 # via pytest-xprocess pycparser==2.22 # via cffi -pytest==8.2.0 +pytest==8.2.1 # via # -r tests.in # pytest-timeout @@ -31,5 +31,5 @@ pytest-timeout==2.3.1 # via -r tests.in pytest-xprocess==0.23.0 # via -r tests.in -watchdog==4.0.0 +watchdog==4.0.1 # via -r tests.in diff --git a/requirements/typing.txt b/requirements/typing.txt index 2638271ba..ab3ba7fc1 100644 --- a/requirements/typing.txt +++ b/requirements/typing.txt @@ -16,7 +16,7 @@ packaging==24.0 # via pytest pluggy==1.5.0 # via pytest -pyright==1.1.363 +pyright==1.1.365 # via -r typing.in pytest==8.2.0 # via -r typing.in @@ -24,7 +24,7 @@ types-contextvars==2.4.7.3 # via -r typing.in types-dataclasses==0.6.6 # via -r typing.in -types-setuptools==69.5.0.20240518 +types-setuptools==70.0.0.20240524 # via -r typing.in typing-extensions==4.11.0 # via mypy From 30500fb0910b25a365226ef644c0fd85e228dec5 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 3 Jun 2024 22:10:34 +0000 Subject: [PATCH 089/159] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.4.4 → v0.4.7](https://github.com/astral-sh/ruff-pre-commit/compare/v0.4.4...v0.4.7) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5b7ebb878..0b0bdf927 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,7 +2,7 @@ ci: autoupdate_schedule: monthly repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.4.4 + rev: v0.4.7 hooks: - id: ruff - id: ruff-format From e77b40cd7bd47b939583260a01998eea528288f2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Jul 2024 11:17:21 +0000 Subject: [PATCH 090/159] Bump the python-requirements group in /requirements with 7 updates Bumps the python-requirements group in /requirements with 7 updates: | Package | From | To | | --- | --- | --- | | [cryptography](https://github.com/pyca/cryptography) | `42.0.7` | `42.0.8` | | [packaging](https://github.com/pypa/packaging) | `24.0` | `24.1` | | [psutil](https://github.com/giampaolo/psutil) | `5.9.8` | `6.0.0` | | [pytest](https://github.com/pytest-dev/pytest) | `8.2.1` | `8.2.2` | | [mypy](https://github.com/python/mypy) | `1.10.0` | `1.10.1` | | [pyright](https://github.com/RobertCraigie/pyright-python) | `1.1.365` | `1.1.369` | | [types-setuptools](https://github.com/python/typeshed) | `70.0.0.20240524` | `70.1.0.20240627` | Updates `cryptography` from 42.0.7 to 42.0.8 - [Changelog](https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/cryptography/compare/42.0.7...42.0.8) Updates `packaging` from 24.0 to 24.1 - [Release notes](https://github.com/pypa/packaging/releases) - [Changelog](https://github.com/pypa/packaging/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pypa/packaging/compare/24.0...24.1) Updates `psutil` from 5.9.8 to 6.0.0 - [Changelog](https://github.com/giampaolo/psutil/blob/master/HISTORY.rst) - [Commits](https://github.com/giampaolo/psutil/compare/release-5.9.8...release-6.0.0) Updates `pytest` from 8.2.1 to 8.2.2 - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/8.2.1...8.2.2) Updates `mypy` from 1.10.0 to 1.10.1 - [Changelog](https://github.com/python/mypy/blob/master/CHANGELOG.md) - [Commits](https://github.com/python/mypy/compare/v1.10.0...v1.10.1) Updates `pyright` from 1.1.365 to 1.1.369 - [Release notes](https://github.com/RobertCraigie/pyright-python/releases) - [Commits](https://github.com/RobertCraigie/pyright-python/compare/v1.1.365...v1.1.369) Updates `types-setuptools` from 70.0.0.20240524 to 70.1.0.20240627 - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: cryptography dependency-type: direct:production update-type: version-update:semver-patch dependency-group: python-requirements - dependency-name: packaging dependency-type: direct:production update-type: version-update:semver-minor dependency-group: python-requirements - dependency-name: psutil dependency-type: direct:production update-type: version-update:semver-major dependency-group: python-requirements - dependency-name: pytest dependency-type: direct:production update-type: version-update:semver-patch dependency-group: python-requirements - dependency-name: mypy dependency-type: direct:production update-type: version-update:semver-patch dependency-group: python-requirements - dependency-name: pyright dependency-type: direct:production update-type: version-update:semver-patch dependency-group: python-requirements - dependency-name: types-setuptools dependency-type: direct:production update-type: version-update:semver-minor dependency-group: python-requirements ... Signed-off-by: dependabot[bot] --- requirements/tests.txt | 8 ++++---- requirements/typing.txt | 6 +++--- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/requirements/tests.txt b/requirements/tests.txt index 6de3a630e..f7df4984c 100644 --- a/requirements/tests.txt +++ b/requirements/tests.txt @@ -8,21 +8,21 @@ cffi @ https://github.com/python-cffi/cffi/archive/d7f750b1b1c5ea4da5aa537b9baba # via # -r tests.in # cryptography -cryptography==42.0.7 +cryptography==42.0.8 # via -r tests.in ephemeral-port-reserve==1.1.4 # via -r tests.in iniconfig==2.0.0 # via pytest -packaging==24.0 +packaging==24.1 # via pytest pluggy==1.5.0 # via pytest -psutil==5.9.8 +psutil==6.0.0 # via pytest-xprocess pycparser==2.22 # via cffi -pytest==8.2.1 +pytest==8.2.2 # via # -r tests.in # pytest-timeout diff --git a/requirements/typing.txt b/requirements/typing.txt index ab3ba7fc1..4be03f395 100644 --- a/requirements/typing.txt +++ b/requirements/typing.txt @@ -6,7 +6,7 @@ # iniconfig==2.0.0 # via pytest -mypy==1.10.0 +mypy==1.10.1 # via -r typing.in mypy-extensions==1.0.0 # via mypy @@ -16,7 +16,7 @@ packaging==24.0 # via pytest pluggy==1.5.0 # via pytest -pyright==1.1.365 +pyright==1.1.369 # via -r typing.in pytest==8.2.0 # via -r typing.in @@ -24,7 +24,7 @@ types-contextvars==2.4.7.3 # via -r typing.in types-dataclasses==0.6.6 # via -r typing.in -types-setuptools==70.0.0.20240524 +types-setuptools==70.1.0.20240627 # via -r typing.in typing-extensions==4.11.0 # via mypy From 2e97246502c192776b2baa809dca0451b2255cf4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Jul 2024 11:53:54 +0000 Subject: [PATCH 091/159] Bump the github-actions group with 2 updates Bumps the github-actions group with 2 updates: [actions/checkout](https://github.com/actions/checkout) and [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish). Updates `actions/checkout` from 4.1.6 to 4.1.7 - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/a5ac7e51b41094c92402da3b24376905380afc29...692973e3d937129bcbf40652eb9f2f61becf3332) Updates `pypa/gh-action-pypi-publish` from 1.8.14 to 1.9.0 - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/81e9d935c883d0b210363ab89cf05f3894778450...ec4db0b4ddc65acdf4bff5fa45ac92d78b56bdf0) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-patch dependency-group: github-actions - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-minor dependency-group: github-actions ... Signed-off-by: dependabot[bot] --- .github/workflows/publish.yaml | 6 +++--- .github/workflows/tests.yaml | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index ba0b348b1..5b27d31f5 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -9,7 +9,7 @@ jobs: outputs: hash: ${{ steps.hash.outputs.hash }} steps: - - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 with: python-version: '3.x' @@ -64,10 +64,10 @@ jobs: id-token: write steps: - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 - - uses: pypa/gh-action-pypi-publish@81e9d935c883d0b210363ab89cf05f3894778450 # v1.8.14 + - uses: pypa/gh-action-pypi-publish@ec4db0b4ddc65acdf4bff5fa45ac92d78b56bdf0 # v1.9.0 with: repository-url: https://test.pypi.org/legacy/ packages-dir: artifact/ - - uses: pypa/gh-action-pypi-publish@81e9d935c883d0b210363ab89cf05f3894778450 # v1.8.14 + - uses: pypa/gh-action-pypi-publish@ec4db0b4ddc65acdf4bff5fa45ac92d78b56bdf0 # v1.9.0 with: packages-dir: artifact/ diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index c5a88216f..9fa3bd2bc 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -31,7 +31,7 @@ jobs: - {python: '3.8'} - {name: PyPy, python: 'pypy-3.10', tox: pypy310} steps: - - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 with: python-version: ${{ matrix.python }} @@ -43,7 +43,7 @@ jobs: typing: runs-on: ubuntu-latest steps: - - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 with: python-version: '3.x' From 7b7b41cdb7225cbd2fad278c472de251d5bdb154 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 2 Jul 2024 00:15:47 +0000 Subject: [PATCH 092/159] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.4.7 → v0.5.0](https://github.com/astral-sh/ruff-pre-commit/compare/v0.4.7...v0.5.0) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0b0bdf927..a8f20ef7c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,7 +2,7 @@ ci: autoupdate_schedule: monthly repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.4.7 + rev: v0.5.0 hooks: - id: ruff - id: ruff-format From b28001e06f71126d32ec095721282c7da647c3c6 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Fri, 19 Apr 2024 11:35:10 +0200 Subject: [PATCH 093/159] fix cache control issues Cache-Control no-transform directive is a boolean no-transform has no arguments as a request or response directive (RFC 9111). Prior to this fix, cc.no_transform would return None whether the directive is present or not. Cache-Control min-fresh directive requires argument The type for this property is `int | None`, so getting `"*"` for a malformed directive is surprising. I think dropping the empty value here is better than fixing the type. Fix CacheControl getter type stubs - cache_control_property with type=bool never return None - some non-bool types were marked as returning bool instead of str - max_stale can return "*" in addition to int or None Reflect immutability of RequestCacheControl in type stubs Fix CacheControl setter type stubs mypy doesn't use the type of setters as of 1.9.0 (see python/mypy#3004), but I think it's still good to have these be accurate (maybe the other type checkers work better here). mypy's recommendation is to use `# type: ignore` comments if setter types don't match getters, which you see when setting no_cache to True. Support must-understand response directive --- CHANGES.rst | 4 ++ src/werkzeug/datastructures/cache_control.py | 21 +++++++- src/werkzeug/datastructures/cache_control.pyi | 49 ++++++++++--------- src/werkzeug/utils.py | 2 +- tests/test_datastructures.py | 20 ++++++++ 5 files changed, 70 insertions(+), 26 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index fdc52692a..0b7889237 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -6,6 +6,10 @@ Version 3.1.0 Unreleased - Support Cookie CHIPS (Partitioned Cookies). :issue:`2797` +- ``CacheControl.no_transform`` is a boolean when present. ``min_fresh`` is + ``None`` when not present. Added the ``must_understand`` attribute. Fixed + some typing issues on cache control. :issue:`2881` + Version 3.0.3 ------------- diff --git a/src/werkzeug/datastructures/cache_control.py b/src/werkzeug/datastructures/cache_control.py index bff4c18bb..6ff4eceeb 100644 --- a/src/werkzeug/datastructures/cache_control.py +++ b/src/werkzeug/datastructures/cache_control.py @@ -32,6 +32,10 @@ class _CacheControl(UpdateDictMixin, dict): to subclass it and add your own items have a look at the sourcecode for that class. + .. versionchanged:: 3.1 + + ``no_transform`` is a boolean when present. + .. versionchanged:: 2.1.0 Setting int properties such as ``max_age`` will convert the value to an int. @@ -58,7 +62,7 @@ class _CacheControl(UpdateDictMixin, dict): no_cache = cache_control_property("no-cache", "*", None) no_store = cache_control_property("no-store", None, bool) max_age = cache_control_property("max-age", -1, int) - no_transform = cache_control_property("no-transform", None, None) + no_transform = cache_control_property("no-transform", None, bool) def __init__(self, values=(), on_update=None): dict.__init__(self, values or ()) @@ -127,6 +131,12 @@ class RequestCacheControl(ImmutableDictMixin, _CacheControl): you plan to subclass it and add your own items have a look at the sourcecode for that class. + .. versionchanged:: 3.1 + ``no_transform`` is a boolean when present. + + .. versionchanged:: 3.1 + ``min_fresh`` is ``None`` if a value is not provided for the attribute. + .. versionchanged:: 2.1.0 Setting int properties such as ``max_age`` will convert the value to an int. @@ -137,7 +147,7 @@ class RequestCacheControl(ImmutableDictMixin, _CacheControl): """ max_stale = cache_control_property("max-stale", "*", int) - min_fresh = cache_control_property("min-fresh", "*", int) + min_fresh = cache_control_property("min-fresh", None, int) only_if_cached = cache_control_property("only-if-cached", None, bool) @@ -151,6 +161,12 @@ class ResponseCacheControl(_CacheControl): you plan to subclass it and add your own items have a look at the sourcecode for that class. + .. versionchanged:: 3.1 + ``no_transform`` is a boolean when present. + + .. versionchanged:: 3.1 + Added the ``must_understand`` attribute. + .. versionchanged:: 2.1.1 ``s_maxage`` converts the value to an int. @@ -169,6 +185,7 @@ class ResponseCacheControl(_CacheControl): proxy_revalidate = cache_control_property("proxy-revalidate", None, bool) s_maxage = cache_control_property("s-maxage", None, int) immutable = cache_control_property("immutable", None, bool) + must_understand = cache_control_property("must-understand", None, bool) # circular dependencies diff --git a/src/werkzeug/datastructures/cache_control.pyi b/src/werkzeug/datastructures/cache_control.pyi index 54ec02082..4c9f4df37 100644 --- a/src/werkzeug/datastructures/cache_control.pyi +++ b/src/werkzeug/datastructures/cache_control.pyi @@ -1,6 +1,7 @@ from collections.abc import Callable from collections.abc import Iterable from collections.abc import Mapping +from typing import Literal from typing import TypeVar from .mixins import ImmutableDictMixin @@ -24,13 +25,13 @@ class _CacheControl( on_update: Callable[[_CacheControl], None] | None = None, ) -> None: ... @property - def no_cache(self) -> bool | None: ... + def no_cache(self) -> str | None: ... @no_cache.setter - def no_cache(self, value: bool | None) -> None: ... + def no_cache(self, value: Literal[True] | str | None) -> None: ... @no_cache.deleter def no_cache(self) -> None: ... @property - def no_store(self) -> bool | None: ... + def no_store(self) -> bool: ... @no_store.setter def no_store(self, value: bool | None) -> None: ... @no_store.deleter @@ -42,7 +43,7 @@ class _CacheControl( @max_age.deleter def max_age(self) -> None: ... @property - def no_transform(self) -> bool | None: ... + def no_transform(self) -> bool: ... @no_transform.setter def no_transform(self, value: bool | None) -> None: ... @no_transform.deleter @@ -57,46 +58,42 @@ class _CacheControl( class RequestCacheControl( # type: ignore[misc] ImmutableDictMixin[str, str | int | bool | None], _CacheControl ): + @property # type: ignore + def no_cache(self) -> str | None: ... + @property # type: ignore + def no_store(self) -> bool: ... + @property # type: ignore + def max_age(self) -> int | None: ... + @property # type: ignore + def no_transform(self) -> bool: ... @property - def max_stale(self) -> int | None: ... - @max_stale.setter - def max_stale(self, value: int | None) -> None: ... - @max_stale.deleter - def max_stale(self) -> None: ... + def max_stale(self) -> int | Literal["*"] | None: ... @property def min_fresh(self) -> int | None: ... - @min_fresh.setter - def min_fresh(self, value: int | None) -> None: ... - @min_fresh.deleter - def min_fresh(self) -> None: ... @property def only_if_cached(self) -> bool | None: ... - @only_if_cached.setter - def only_if_cached(self, value: bool | None) -> None: ... - @only_if_cached.deleter - def only_if_cached(self) -> None: ... class ResponseCacheControl(_CacheControl): @property - def public(self) -> bool | None: ... + def public(self) -> bool: ... @public.setter def public(self, value: bool | None) -> None: ... @public.deleter def public(self) -> None: ... @property - def private(self) -> bool | None: ... + def private(self) -> str | None: ... @private.setter - def private(self, value: bool | None) -> None: ... + def private(self, value: Literal[True] | str | None) -> None: ... @private.deleter def private(self) -> None: ... @property - def must_revalidate(self) -> bool | None: ... + def must_revalidate(self) -> bool: ... @must_revalidate.setter def must_revalidate(self, value: bool | None) -> None: ... @must_revalidate.deleter def must_revalidate(self) -> None: ... @property - def proxy_revalidate(self) -> bool | None: ... + def proxy_revalidate(self) -> bool: ... @proxy_revalidate.setter def proxy_revalidate(self, value: bool | None) -> None: ... @proxy_revalidate.deleter @@ -108,8 +105,14 @@ class ResponseCacheControl(_CacheControl): @s_maxage.deleter def s_maxage(self) -> None: ... @property - def immutable(self) -> bool | None: ... + def immutable(self) -> bool: ... @immutable.setter def immutable(self, value: bool | None) -> None: ... @immutable.deleter def immutable(self) -> None: ... + @property + def must_understand(self) -> bool: ... + @must_understand.setter + def must_understand(self, value: bool | None) -> None: ... + @must_understand.deleter + def must_understand(self) -> None: ... diff --git a/src/werkzeug/utils.py b/src/werkzeug/utils.py index 59b97b732..b68ac18ca 100644 --- a/src/werkzeug/utils.py +++ b/src/werkzeug/utils.py @@ -497,7 +497,7 @@ def send_file( elif mtime is not None: rv.last_modified = mtime # type: ignore - rv.cache_control.no_cache = True + rv.cache_control.no_cache = True # type: ignore[assignment] # Flask will pass app.get_send_file_max_age, allowing its send_file # wrapper to not have to deal with paths. diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py index 64330e1e6..830dfefd5 100644 --- a/tests/test_datastructures.py +++ b/tests/test_datastructures.py @@ -953,6 +953,26 @@ def test_set_none(self): cc.no_cache = False assert cc.no_cache is False + def test_no_transform(self): + cc = ds.RequestCacheControl([("no-transform", None)]) + assert cc.no_transform is True + cc = ds.RequestCacheControl() + assert cc.no_transform is False + + def test_min_fresh(self): + cc = ds.RequestCacheControl([("min-fresh", "0")]) + assert cc.min_fresh == 0 + cc = ds.RequestCacheControl([("min-fresh", None)]) + assert cc.min_fresh is None + cc = ds.RequestCacheControl() + assert cc.min_fresh is None + + def test_must_understand(self): + cc = ds.ResponseCacheControl([("must-understand", None)]) + assert cc.must_understand is True + cc = ds.ResponseCacheControl() + assert cc.must_understand is False + class TestContentSecurityPolicy: def test_construct(self): From 553b5f2449f490f95321d76327b66d2ff9f34db8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 1 Aug 2024 11:56:04 +0000 Subject: [PATCH 094/159] Bump the github-actions group with 3 updates Bumps the github-actions group with 3 updates: [actions/setup-python](https://github.com/actions/setup-python), [actions/upload-artifact](https://github.com/actions/upload-artifact) and [actions/download-artifact](https://github.com/actions/download-artifact). Updates `actions/setup-python` from 5.1.0 to 5.1.1 - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/82c7e631bb3cdc910f68e0081d67478d79c6982d...39cd14951b08e74b54015e9e001cdefcf80e669f) Updates `actions/upload-artifact` from 4.3.3 to 4.3.4 - [Release notes](https://github.com/actions/upload-artifact/releases) - [Commits](https://github.com/actions/upload-artifact/compare/65462800fd760344b1a7b4382951275a0abb4808...0b2256b8c012f0828dc542b3febcab082c67f72b) Updates `actions/download-artifact` from 4.1.7 to 4.1.8 - [Release notes](https://github.com/actions/download-artifact/releases) - [Commits](https://github.com/actions/download-artifact/compare/65a9edc5881444af0b9093a5e628f2fe47ea3b2e...fa0a91b85d4f404e444e00e005971372dc801d16) --- updated-dependencies: - dependency-name: actions/setup-python dependency-type: direct:production update-type: version-update:semver-patch dependency-group: github-actions - dependency-name: actions/upload-artifact dependency-type: direct:production update-type: version-update:semver-patch dependency-group: github-actions - dependency-name: actions/download-artifact dependency-type: direct:production update-type: version-update:semver-patch dependency-group: github-actions ... Signed-off-by: dependabot[bot] --- .github/workflows/publish.yaml | 8 ++++---- .github/workflows/tests.yaml | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index 5b27d31f5..aff5b7e19 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -10,7 +10,7 @@ jobs: hash: ${{ steps.hash.outputs.hash }} steps: - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 + - uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f # v5.1.1 with: python-version: '3.x' cache: pip @@ -23,7 +23,7 @@ jobs: - name: generate hash id: hash run: cd dist && echo "hash=$(sha256sum * | base64 -w0)" >> $GITHUB_OUTPUT - - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + - uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 with: path: ./dist provenance: @@ -44,7 +44,7 @@ jobs: permissions: contents: write steps: - - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + - uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 - name: create release run: > gh release create --draft --repo ${{ github.repository }} @@ -63,7 +63,7 @@ jobs: permissions: id-token: write steps: - - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + - uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 - uses: pypa/gh-action-pypi-publish@ec4db0b4ddc65acdf4bff5fa45ac92d78b56bdf0 # v1.9.0 with: repository-url: https://test.pypi.org/legacy/ diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 9fa3bd2bc..4a755422f 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -32,7 +32,7 @@ jobs: - {name: PyPy, python: 'pypy-3.10', tox: pypy310} steps: - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 + - uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f # v5.1.1 with: python-version: ${{ matrix.python }} allow-prereleases: true @@ -44,7 +44,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 + - uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f # v5.1.1 with: python-version: '3.x' cache: pip From 62dc7803d22fbeadc9455cbbce4432e31140e75f Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 5 Aug 2024 23:09:30 +0000 Subject: [PATCH 095/159] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.5.0 → v0.5.6](https://github.com/astral-sh/ruff-pre-commit/compare/v0.5.0...v0.5.6) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a8f20ef7c..5115439cc 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,7 +2,7 @@ ci: autoupdate_schedule: monthly repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.5.0 + rev: v0.5.6 hooks: - id: ruff - id: ruff-format From c3c82f4576153581dd11b2e84a3aea6e60069c1f Mon Sep 17 00:00:00 2001 From: Jelle Zijlstra Date: Tue, 20 Aug 2024 09:14:13 -0700 Subject: [PATCH 096/159] Improve types for AST handling (#2905) --- src/werkzeug/routing/rules.py | 35 +++++++++++++++++++++++------------ 1 file changed, 23 insertions(+), 12 deletions(-) diff --git a/src/werkzeug/routing/rules.py b/src/werkzeug/routing/rules.py index 6a02f8d3e..de9bcdba3 100644 --- a/src/werkzeug/routing/rules.py +++ b/src/werkzeug/routing/rules.py @@ -294,11 +294,18 @@ def get_rules(self, map: Map) -> t.Iterator[Rule]: ) -def _prefix_names(src: str) -> ast.stmt: +_ASTT = t.TypeVar("_ASTT", bound=ast.AST) + + +def _prefix_names(src: str, expected_type: type[_ASTT]) -> _ASTT: """ast parse and prefix names with `.` to avoid collision with user vars""" - tree = ast.parse(src).body[0] + tree: ast.AST = ast.parse(src).body[0] if isinstance(tree, ast.Expr): - tree = tree.value # type: ignore + tree = tree.value + if not isinstance(tree, expected_type): + raise TypeError( + f"AST node is of type {type(tree).__name__}, not {expected_type.__name__}" + ) for node in ast.walk(tree): if isinstance(node, ast.Name): node.id = f".{node.id}" @@ -313,8 +320,11 @@ def _prefix_names(src: str) -> ast.stmt: else: q = params = "" """ -_IF_KWARGS_URL_ENCODE_AST = _prefix_names(_IF_KWARGS_URL_ENCODE_CODE) -_URL_ENCODE_AST_NAMES = (_prefix_names("q"), _prefix_names("params")) +_IF_KWARGS_URL_ENCODE_AST = _prefix_names(_IF_KWARGS_URL_ENCODE_CODE, ast.If) +_URL_ENCODE_AST_NAMES = ( + _prefix_names("q", ast.Name), + _prefix_names("params", ast.Name), +) class Rule(RuleFactory): @@ -751,13 +761,13 @@ def _compile_builder( else: opl.append((True, data)) - def _convert(elem: str) -> ast.stmt: - ret = _prefix_names(_CALL_CONVERTER_CODE_FMT.format(elem=elem)) - ret.args = [ast.Name(str(elem), ast.Load())] # type: ignore # str for py2 + def _convert(elem: str) -> ast.Call: + ret = _prefix_names(_CALL_CONVERTER_CODE_FMT.format(elem=elem), ast.Call) + ret.args = [ast.Name(elem, ast.Load())] return ret - def _parts(ops: list[tuple[bool, str]]) -> list[ast.AST]: - parts = [ + def _parts(ops: list[tuple[bool, str]]) -> list[ast.expr]: + parts: list[ast.expr] = [ _convert(elem) if is_dynamic else ast.Constant(elem) for is_dynamic, elem in ops ] @@ -773,13 +783,14 @@ def _parts(ops: list[tuple[bool, str]]) -> list[ast.AST]: dom_parts = _parts(dom_ops) url_parts = _parts(url_ops) + body: list[ast.stmt] if not append_unknown: body = [] else: body = [_IF_KWARGS_URL_ENCODE_AST] url_parts.extend(_URL_ENCODE_AST_NAMES) - def _join(parts: list[ast.AST]) -> ast.AST: + def _join(parts: list[ast.expr]) -> ast.expr: if len(parts) == 1: # shortcut return parts[0] return ast.JoinedStr(parts) @@ -795,7 +806,7 @@ def _join(parts: list[ast.AST]) -> ast.AST: ] kargs = [str(k) for k in defaults] - func_ast: ast.FunctionDef = _prefix_names("def _(): pass") # type: ignore + func_ast = _prefix_names("def _(): pass", ast.FunctionDef) func_ast.name = f"" func_ast.args.args.append(ast.arg(".self", None)) for arg in pargs + kargs: From d53cd468ab918d9e27c00ced575c5cddcb7db052 Mon Sep 17 00:00:00 2001 From: David Lord Date: Tue, 20 Aug 2024 09:20:50 -0700 Subject: [PATCH 097/159] pass args through tox to pip-compile --- tox.ini | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tox.ini b/tox.ini index a5abb3143..ee331d109 100644 --- a/tox.ini +++ b/tox.ini @@ -40,8 +40,8 @@ deps = pip-tools skip_install = true change_dir = requirements commands = - pip-compile -U build.in - pip-compile -U docs.in - pip-compile -U tests.in - pip-compile -U typing.in - pip-compile -U dev.in + pip-compile build.in -q {posargs:-U} + pip-compile docs.in -q {posargs:-U} + pip-compile tests.in -q {posargs:-U} + pip-compile typing.in -q {posargs:-U} + pip-compile dev.in -q {posargs:-U} From f95be71cf01a6bb4b6fc05c36ab4274200cf8242 Mon Sep 17 00:00:00 2001 From: David Lord Date: Tue, 20 Aug 2024 09:21:14 -0700 Subject: [PATCH 098/159] update mypy --- requirements/dev.txt | 2 +- requirements/typing.txt | 2 +- src/werkzeug/debug/__init__.py | 2 +- src/werkzeug/routing/rules.py | 10 +++++----- src/werkzeug/wrappers/request.py | 2 +- 5 files changed, 9 insertions(+), 9 deletions(-) diff --git a/requirements/dev.txt b/requirements/dev.txt index dad2e92d7..bbd58eda5 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -69,7 +69,7 @@ markupsafe==2.1.5 # via # -r docs.txt # jinja2 -mypy==1.10.0 +mypy==1.11.1 # via -r typing.txt mypy-extensions==1.0.0 # via diff --git a/requirements/typing.txt b/requirements/typing.txt index 2638271ba..c14ed66d6 100644 --- a/requirements/typing.txt +++ b/requirements/typing.txt @@ -6,7 +6,7 @@ # iniconfig==2.0.0 # via pytest -mypy==1.10.0 +mypy==1.11.1 # via -r typing.in mypy-extensions==1.0.0 # via mypy diff --git a/src/werkzeug/debug/__init__.py b/src/werkzeug/debug/__init__.py index 6bef30fbc..2eebd38a8 100644 --- a/src/werkzeug/debug/__init__.py +++ b/src/werkzeug/debug/__init__.py @@ -375,7 +375,7 @@ def debug_application( environ["wsgi.errors"].write("".join(tb.render_traceback_text())) - def execute_command( # type: ignore[return] + def execute_command( self, request: Request, command: str, diff --git a/src/werkzeug/routing/rules.py b/src/werkzeug/routing/rules.py index de9bcdba3..2dad31dd3 100644 --- a/src/werkzeug/routing/rules.py +++ b/src/werkzeug/routing/rules.py @@ -101,7 +101,7 @@ def _pythonize(value: str) -> None | bool | int | float | str: return _PYTHON_CONSTANTS[value] for convert in int, float: try: - return convert(value) # type: ignore + return convert(value) except ValueError: pass if value[:1] == value[-1:] and value[0] in "\"'": @@ -826,13 +826,13 @@ def _join(parts: list[ast.expr]) -> ast.expr: # bad line numbers cause an assert to fail in debug builds for node in ast.walk(module): if "lineno" in node._attributes: - node.lineno = 1 + node.lineno = 1 # type: ignore[attr-defined] if "end_lineno" in node._attributes: - node.end_lineno = node.lineno + node.end_lineno = node.lineno # type: ignore[attr-defined] if "col_offset" in node._attributes: - node.col_offset = 0 + node.col_offset = 0 # type: ignore[attr-defined] if "end_col_offset" in node._attributes: - node.end_col_offset = node.col_offset + node.end_col_offset = node.col_offset # type: ignore[attr-defined] code = compile(module, "", "exec") return self._get_func_code(code, func_ast.name) diff --git a/src/werkzeug/wrappers/request.py b/src/werkzeug/wrappers/request.py index 38053c210..344f28b60 100644 --- a/src/werkzeug/wrappers/request.py +++ b/src/werkzeug/wrappers/request.py @@ -370,7 +370,7 @@ def data(self) -> bytes: return self.get_data(parse_form_data=True) @t.overload - def get_data( # type: ignore + def get_data( self, cache: bool = True, as_text: t.Literal[False] = False, From 554a333476732459598eb71ef747115774ca99f2 Mon Sep 17 00:00:00 2001 From: David Lord Date: Tue, 20 Aug 2024 09:26:10 -0700 Subject: [PATCH 099/159] start version 3.0.4 --- CHANGES.rst | 6 ++++++ pyproject.toml | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index f6158e79b..41878b5da 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,5 +1,11 @@ .. currentmodule:: werkzeug +Version 3.0.4 +------------- + +Unreleased + + Version 3.0.3 ------------- diff --git a/pyproject.toml b/pyproject.toml index 600feb31e..1f194748c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "Werkzeug" -version = "3.0.3" +version = "3.0.4.dev" description = "The comprehensive WSGI web application library." readme = "README.md" license = {file = "LICENSE.txt"} From cbf351e04aa06fb66202b20750f7a3ef62ab341d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 20 Aug 2024 16:32:54 +0000 Subject: [PATCH 100/159] Bump the python-requirements group across 1 directory with 7 updates Bumps the python-requirements group with 7 updates in the /requirements directory: | Package | From | To | | --- | --- | --- | | [pyright](https://github.com/RobertCraigie/pyright-python) | `1.1.369` | `1.1.376` | | [pytest](https://github.com/pytest-dev/pytest) | `8.2.0` | `8.3.2` | | [types-setuptools](https://github.com/python/typeshed) | `70.1.0.20240627` | `71.1.0.20240818` | | [watchdog](https://github.com/gorakhargosh/watchdog) | `4.0.0` | `4.0.2` | | [sphinx](https://github.com/sphinx-doc/sphinx) | `7.3.7` | `8.0.2` | | [cffi](https://github.com/python-cffi/cffi) | `1.17.0rc1` | `1.17.0` | | [cryptography](https://github.com/pyca/cryptography) | `42.0.8` | `43.0.0` | Updates `pyright` from 1.1.369 to 1.1.376 - [Release notes](https://github.com/RobertCraigie/pyright-python/releases) - [Commits](https://github.com/RobertCraigie/pyright-python/compare/v1.1.369...v1.1.376) Updates `pytest` from 8.2.0 to 8.3.2 - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/8.2.0...8.3.2) Updates `types-setuptools` from 70.1.0.20240627 to 71.1.0.20240818 - [Commits](https://github.com/python/typeshed/commits) Updates `watchdog` from 4.0.0 to 4.0.2 - [Release notes](https://github.com/gorakhargosh/watchdog/releases) - [Changelog](https://github.com/gorakhargosh/watchdog/blob/master/changelog.rst) - [Commits](https://github.com/gorakhargosh/watchdog/compare/v4.0.0...v4.0.2) Updates `sphinx` from 7.3.7 to 8.0.2 - [Release notes](https://github.com/sphinx-doc/sphinx/releases) - [Changelog](https://github.com/sphinx-doc/sphinx/blob/v8.0.2/CHANGES.rst) - [Commits](https://github.com/sphinx-doc/sphinx/compare/v7.3.7...v8.0.2) Updates `cffi` from 1.17.0rc1 to 1.17.0 - [Release notes](https://github.com/python-cffi/cffi/releases) - [Commits](https://github.com/python-cffi/cffi/compare/v1.17.0rc1...v1.17.0) Updates `cryptography` from 42.0.8 to 43.0.0 - [Changelog](https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/cryptography/compare/42.0.8...43.0.0) --- updated-dependencies: - dependency-name: pyright dependency-type: direct:production update-type: version-update:semver-patch dependency-group: python-requirements - dependency-name: pytest dependency-type: direct:production update-type: version-update:semver-minor dependency-group: python-requirements - dependency-name: types-setuptools dependency-type: direct:production update-type: version-update:semver-major dependency-group: python-requirements - dependency-name: watchdog dependency-type: direct:production update-type: version-update:semver-patch dependency-group: python-requirements - dependency-name: sphinx dependency-type: direct:production update-type: version-update:semver-major dependency-group: python-requirements - dependency-name: cffi dependency-type: direct:production update-type: version-update:semver-patch dependency-group: python-requirements - dependency-name: cryptography dependency-type: direct:production update-type: version-update:semver-major dependency-group: python-requirements ... Signed-off-by: dependabot[bot] --- requirements/docs.txt | 2 +- requirements/tests.in | 2 +- requirements/tests.txt | 8 ++++---- requirements/typing.txt | 8 ++++---- 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/requirements/docs.txt b/requirements/docs.txt index 288993f4d..f372c1a19 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -34,7 +34,7 @@ requests==2.31.0 # via sphinx snowballstemmer==2.2.0 # via sphinx -sphinx==7.3.7 +sphinx==8.0.2 # via # -r docs.in # pallets-sphinx-themes diff --git a/requirements/tests.in b/requirements/tests.in index 7c87c4d8d..d6ea15c66 100644 --- a/requirements/tests.in +++ b/requirements/tests.in @@ -6,4 +6,4 @@ cryptography watchdog ephemeral-port-reserve # pin cffi 1.17.0 pre-release for python 3.13 support -cffi==1.17.0rc1 +cffi==1.17.0 diff --git a/requirements/tests.txt b/requirements/tests.txt index 1c3184a1b..170da4400 100644 --- a/requirements/tests.txt +++ b/requirements/tests.txt @@ -4,11 +4,11 @@ # # pip-compile tests.in # -cffi==1.17.0rc1 +cffi==1.17.0 # via # -r tests.in # cryptography -cryptography==42.0.8 +cryptography==43.0.0 # via -r tests.in ephemeral-port-reserve==1.1.4 # via -r tests.in @@ -22,7 +22,7 @@ psutil==6.0.0 # via pytest-xprocess pycparser==2.22 # via cffi -pytest==8.2.2 +pytest==8.3.2 # via # -r tests.in # pytest-timeout @@ -31,5 +31,5 @@ pytest-timeout==2.3.1 # via -r tests.in pytest-xprocess==0.23.0 # via -r tests.in -watchdog==4.0.1 +watchdog==4.0.2 # via -r tests.in diff --git a/requirements/typing.txt b/requirements/typing.txt index 8470daf8c..9242fc4af 100644 --- a/requirements/typing.txt +++ b/requirements/typing.txt @@ -16,19 +16,19 @@ packaging==24.0 # via pytest pluggy==1.5.0 # via pytest -pyright==1.1.369 +pyright==1.1.376 # via -r typing.in -pytest==8.2.0 +pytest==8.3.2 # via -r typing.in types-contextvars==2.4.7.3 # via -r typing.in types-dataclasses==0.6.6 # via -r typing.in -types-setuptools==70.1.0.20240627 +types-setuptools==71.1.0.20240818 # via -r typing.in typing-extensions==4.11.0 # via mypy -watchdog==4.0.0 +watchdog==4.0.2 # via -r typing.in # The following packages are considered to be unsafe in a requirements file: From a2f29de1255de4c737dbb541476991550c1a1eee Mon Sep 17 00:00:00 2001 From: David Lord Date: Tue, 20 Aug 2024 10:58:30 -0700 Subject: [PATCH 101/159] restore invalid bytes behavior for form parser --- CHANGES.rst | 4 ++++ src/werkzeug/formparser.py | 14 +++++--------- tests/test_formparser.py | 10 +++++++++- 3 files changed, 18 insertions(+), 10 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 41878b5da..600ec0b20 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -5,6 +5,10 @@ Version 3.0.4 Unreleased +- Restore behavior where parsing `multipart/x-www-form-urlencoded` data with + invalid UTF-8 bytes in the body results in no form data parsed rather than a + 413 error. :issue:`2930` + Version 3.0.3 ------------- diff --git a/src/werkzeug/formparser.py b/src/werkzeug/formparser.py index ba84721e3..e4b1f27fd 100644 --- a/src/werkzeug/formparser.py +++ b/src/werkzeug/formparser.py @@ -281,15 +281,11 @@ def _parse_urlencoded( ): raise RequestEntityTooLarge() - try: - items = parse_qsl( - stream.read().decode(), - keep_blank_values=True, - errors="werkzeug.url_quote", - ) - except ValueError as e: - raise RequestEntityTooLarge() from e - + items = parse_qsl( + stream.read().decode(), + keep_blank_values=True, + errors="werkzeug.url_quote", + ) return stream, self.cls(items), self.cls() diff --git a/tests/test_formparser.py b/tests/test_formparser.py index 1ecb01208..ed63be686 100644 --- a/tests/test_formparser.py +++ b/tests/test_formparser.py @@ -122,13 +122,21 @@ def test_limiting(self): req.max_form_parts = 1 pytest.raises(RequestEntityTooLarge, lambda: req.form["foo"]) - def test_x_www_urlencoded_max_form_parts(self): + def test_urlencoded_no_max(self) -> None: r = Request.from_values(method="POST", data={"a": 1, "b": 2}) r.max_form_parts = 1 assert r.form["a"] == "1" assert r.form["b"] == "2" + def test_urlencoded_silent_decode(self) -> None: + r = Request.from_values( + data=b"\x80", + content_type="application/x-www-form-urlencoded", + method="POST", + ) + assert not r.form + def test_missing_multipart_boundary(self): data = ( b"--foo\r\nContent-Disposition: form-field; name=foo\r\n\r\n" From 3a893d27b1a1b7da7a8ec52c297ba163371ddf71 Mon Sep 17 00:00:00 2001 From: David Lord Date: Tue, 20 Aug 2024 21:30:45 -0700 Subject: [PATCH 102/159] improve parse_options_header performance --- CHANGES.rst | 2 ++ src/werkzeug/http.py | 65 ++++++++++++++++++++++++++++---------------- tests/test_http.py | 6 ++-- 3 files changed, 47 insertions(+), 26 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 600ec0b20..2150ad299 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -8,6 +8,8 @@ Unreleased - Restore behavior where parsing `multipart/x-www-form-urlencoded` data with invalid UTF-8 bytes in the body results in no form data parsed rather than a 413 error. :issue:`2930` +- Improve ``parse_options_header`` performance when parsing unterminated + quoted string values. :issue:`2907` Version 3.0.3 diff --git a/src/werkzeug/http.py b/src/werkzeug/http.py index 27fa9af90..c0ec92389 100644 --- a/src/werkzeug/http.py +++ b/src/werkzeug/http.py @@ -395,22 +395,8 @@ def parse_dict_header(value: str) -> dict[str, str | None]: # https://httpwg.org/specs/rfc9110.html#parameter -_parameter_re = re.compile( - r""" - # don't match multiple empty parts, that causes backtracking - \s*;\s* # find the part delimiter - (?: - ([\w!#$%&'*+\-.^`|~]+) # key, one or more token chars - = # equals, with no space on either side - ( # value, token or quoted string - [\w!#$%&'*+\-.^`|~]+ # one or more token chars - | - "(?:\\\\|\\"|.)*?" # quoted string, consuming slash escapes - ) - )? # optionally match key=value, to account for empty parts - """, - re.ASCII | re.VERBOSE, -) +_parameter_key_re = re.compile(r"([\w!#$%&'*+\-.^`|~]+)=", flags=re.ASCII) +_parameter_token_value_re = re.compile(r"[\w!#$%&'*+\-.^`|~]+", flags=re.ASCII) # https://www.rfc-editor.org/rfc/rfc2231#section-4 _charset_value_re = re.compile( r""" @@ -492,18 +478,49 @@ def parse_options_header(value: str | None) -> tuple[str, dict[str, str]]: # empty (invalid) value, or value without options return value, {} - rest = f";{rest}" + # Collect all valid key=value parts without processing the value. + parts: list[tuple[str, str]] = [] + + while True: + if (m := _parameter_key_re.match(rest)) is not None: + pk = m.group(1).lower() + rest = rest[m.end() :] + + # Value may be a token. + if (m := _parameter_token_value_re.match(rest)) is not None: + parts.append((pk, m.group())) + + # Value may be a quoted string, find the closing quote. + elif rest[:1] == '"': + pos = 1 + length = len(rest) + + while pos < length: + if rest[pos : pos + 2] in {"\\\\", '\\"'}: + # Consume escaped slashes and quotes. + pos += 2 + elif rest[pos] == '"': + # Stop at an unescaped quote. + parts.append((pk, rest[: pos + 1])) + rest = rest[pos + 1 :] + break + else: + # Consume any other character. + pos += 1 + + # Find the next section delimited by `;`, if any. + if (end := rest.find(";")) == -1: + break + + rest = rest[end + 1 :].lstrip() + options: dict[str, str] = {} encoding: str | None = None continued_encoding: str | None = None - for pk, pv in _parameter_re.findall(rest): - if not pk: - # empty or invalid part - continue - - pk = pk.lower() - + # For each collected part, process optional charset and continuation, + # unquote quoted values. + for pk, pv in parts: if pk[-1] == "*": # key*=charset''value becomes key=value, where value is percent encoded pk = pk[:-1] diff --git a/tests/test_http.py b/tests/test_http.py index bbd51ba33..02e5eb512 100644 --- a/tests/test_http.py +++ b/tests/test_http.py @@ -361,8 +361,8 @@ def test_parse_options_header_empty(self, value, expect): ('v;a="b\\"c";d=e', {"a": 'b"c', "d": "e"}), # HTTP headers use \\ for internal \ ('v;a="c:\\\\"', {"a": "c:\\"}), - # Invalid trailing slash in quoted part is left as-is. - ('v;a="c:\\"', {"a": "c:\\"}), + # Part with invalid trailing slash is discarded. + ('v;a="c:\\"', {}), ('v;a="b\\\\\\"c"', {"a": 'b\\"c'}), # multipart form data uses %22 for internal " ('v;a="b%22c"', {"a": 'b"c'}), @@ -377,6 +377,8 @@ def test_parse_options_header_empty(self, value, expect): ("v;a*0=b;a*1=c;d=e", {"a": "bc", "d": "e"}), ("v;a*0*=b", {"a": "b"}), ("v;a*0*=UTF-8''b;a*1=c;a*2*=%C2%B5", {"a": "bcµ"}), + # Long invalid quoted string with trailing slashes does not freeze. + ('v;a="' + "\\" * 400, {}), ], ) def test_parse_options_header(self, value, expect) -> None: From 650481972fbab9e9d8f0b1af23c4fb6a88b4e8c0 Mon Sep 17 00:00:00 2001 From: David Lord Date: Wed, 21 Aug 2024 07:16:19 -0700 Subject: [PATCH 103/159] synchronize failed pin entry --- CHANGES.rst | 2 ++ src/werkzeug/debug/__init__.py | 14 +++++++++----- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 2150ad299..b39cf5cf2 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,8 @@ Unreleased 413 error. :issue:`2930` - Improve ``parse_options_header`` performance when parsing unterminated quoted string values. :issue:`2907` +- Debugger pin auth is synchronized across threads/processes when tracking + failed entries. :issue:`2916` Version 3.0.3 diff --git a/src/werkzeug/debug/__init__.py b/src/werkzeug/debug/__init__.py index 2eebd38a8..69ad3f4f4 100644 --- a/src/werkzeug/debug/__init__.py +++ b/src/werkzeug/debug/__init__.py @@ -13,6 +13,7 @@ from contextlib import ExitStack from io import BytesIO from itertools import chain +from multiprocessing import Value from os.path import basename from os.path import join from zlib import adler32 @@ -286,7 +287,7 @@ def __init__( self.console_init_func = console_init_func self.show_hidden_frames = show_hidden_frames self.secret = gen_salt(20) - self._failed_pin_auth = 0 + self._failed_pin_auth = Value("B") self.pin_logging = pin_logging if pin_security: @@ -454,8 +455,11 @@ def check_host_trust(self, environ: WSGIEnvironment) -> bool: return host_is_trusted(environ.get("HTTP_HOST"), self.trusted_hosts) def _fail_pin_auth(self) -> None: - time.sleep(5.0 if self._failed_pin_auth > 5 else 0.5) - self._failed_pin_auth += 1 + with self._failed_pin_auth.get_lock(): + count = self._failed_pin_auth.value + self._failed_pin_auth.value = count + 1 + + time.sleep(5.0 if count > 5 else 0.5) def pin_auth(self, request: Request) -> Response: """Authenticates with the pin.""" @@ -482,7 +486,7 @@ def pin_auth(self, request: Request) -> Response: auth = True # If we failed too many times, then we're locked out. - elif self._failed_pin_auth > 10: + elif self._failed_pin_auth.value > 10: exhausted = True # Otherwise go through pin based authentication @@ -490,7 +494,7 @@ def pin_auth(self, request: Request) -> Response: entered_pin = request.args["pin"] if entered_pin.strip().replace("-", "") == pin.replace("-", ""): - self._failed_pin_auth = 0 + self._failed_pin_auth.value = 0 auth = True else: self._fail_pin_auth() From cf18d037faeacfb458e645fa2dbaf25b5bbf1080 Mon Sep 17 00:00:00 2001 From: David Lord Date: Wed, 21 Aug 2024 09:02:30 -0700 Subject: [PATCH 104/159] treat SSLEOFError as dropped connection --- CHANGES.rst | 2 ++ src/werkzeug/serving.py | 9 ++++++++- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index b39cf5cf2..44b54eed4 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -12,6 +12,8 @@ Unreleased quoted string values. :issue:`2907` - Debugger pin auth is synchronized across threads/processes when tracking failed entries. :issue:`2916` +- Dev server handles unexpected `SSLEOFError` due to issue in Python < 3.13. + :issue:`2926` Version 3.0.3 diff --git a/src/werkzeug/serving.py b/src/werkzeug/serving.py index 859f9aacb..4faf9262c 100644 --- a/src/werkzeug/serving.py +++ b/src/werkzeug/serving.py @@ -37,6 +37,12 @@ try: import ssl + + connection_dropped_errors: tuple[type[Exception], ...] = ( + ConnectionError, + socket.timeout, + ssl.SSLEOFError, + ) except ImportError: class _SslDummy: @@ -47,6 +53,7 @@ def __getattr__(self, name: str) -> t.Any: ) ssl = _SslDummy() # type: ignore + connection_dropped_errors = (ConnectionError, socket.timeout) _log_add_style = True @@ -361,7 +368,7 @@ def execute(app: WSGIApplication) -> None: try: execute(self.server.app) - except (ConnectionError, socket.timeout) as e: + except connection_dropped_errors as e: self.connection_dropped(e, environ) except Exception as e: if self.server.passthrough_errors: From 1d1d987f6399031d33656d366b7bfa9a3960441b Mon Sep 17 00:00:00 2001 From: David Lord Date: Wed, 21 Aug 2024 12:23:52 -0700 Subject: [PATCH 105/159] debugger works on urls with query string --- CHANGES.rst | 2 ++ src/werkzeug/debug/shared/debugger.js | 36 ++++++++------------------- 2 files changed, 12 insertions(+), 26 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 44b54eed4..a7a312d47 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -14,6 +14,8 @@ Unreleased failed entries. :issue:`2916` - Dev server handles unexpected `SSLEOFError` due to issue in Python < 3.13. :issue:`2926` +- Debugger pin auth works when the URL already contains a query string. + :issue:`2918` Version 3.0.3 diff --git a/src/werkzeug/debug/shared/debugger.js b/src/werkzeug/debug/shared/debugger.js index 18c658344..809b14a6e 100644 --- a/src/werkzeug/debug/shared/debugger.js +++ b/src/werkzeug/debug/shared/debugger.js @@ -37,18 +37,22 @@ function wrapPlainTraceback() { plainTraceback.replaceWith(wrapper); } +function makeDebugURL(args) { + const params = new URLSearchParams(args) + params.set("s", SECRET) + return `?__debugger__=yes&${params}` +} + function initPinBox() { document.querySelector(".pin-prompt form").addEventListener( "submit", function (event) { event.preventDefault(); - const pin = encodeURIComponent(this.pin.value); - const encodedSecret = encodeURIComponent(SECRET); const btn = this.btn; btn.disabled = true; fetch( - `${document.location}?__debugger__=yes&cmd=pinauth&pin=${pin}&s=${encodedSecret}` + makeDebugURL({cmd: "pinauth", pin: this.pin.value}) ) .then((res) => res.json()) .then(({auth, exhausted}) => { @@ -77,10 +81,7 @@ function initPinBox() { function promptForPin() { if (!EVALEX_TRUSTED) { - const encodedSecret = encodeURIComponent(SECRET); - fetch( - `${document.location}?__debugger__=yes&cmd=printpin&s=${encodedSecret}` - ); + fetch(makeDebugURL({cmd: "printpin"})); const pinPrompt = document.getElementsByClassName("pin-prompt")[0]; fadeIn(pinPrompt); document.querySelector('.pin-prompt input[name="pin"]').focus(); @@ -237,7 +238,7 @@ function createConsoleInput() { function createIconForConsole() { const img = document.createElement("img"); - img.setAttribute("src", "?__debugger__=yes&cmd=resource&f=console.png"); + img.setAttribute("src", makeDebugURL({cmd: "resource", f: "console.png"})); img.setAttribute("title", "Open an interactive python shell in this frame"); return img; } @@ -263,24 +264,7 @@ function handleConsoleSubmit(e, command, frameID) { e.preventDefault(); return new Promise((resolve) => { - // Get input command. - const cmd = command.value; - - // Setup GET request. - const urlPath = ""; - const params = { - __debugger__: "yes", - cmd: cmd, - frm: frameID, - s: SECRET, - }; - const paramString = Object.keys(params) - .map((key) => { - return "&" + encodeURIComponent(key) + "=" + encodeURIComponent(params[key]); - }) - .join(""); - - fetch(urlPath + "?" + paramString) + fetch(makeDebugURL({cmd: command.value, frm: frameID})) .then((res) => { return res.text(); }) From b933ccb1f5eaf378d15ae88488bf993600fdca43 Mon Sep 17 00:00:00 2001 From: David Lord Date: Wed, 21 Aug 2024 12:35:55 -0700 Subject: [PATCH 106/159] release version 3.0.4 --- CHANGES.rst | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index a7a312d47..7bd3435e5 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,7 +3,7 @@ Version 3.0.4 ------------- -Unreleased +Released 2024-08-21 - Restore behavior where parsing `multipart/x-www-form-urlencoded` data with invalid UTF-8 bytes in the body results in no form data parsed rather than a diff --git a/pyproject.toml b/pyproject.toml index 1f194748c..9f7accafe 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "Werkzeug" -version = "3.0.4.dev" +version = "3.0.4" description = "The comprehensive WSGI web application library." readme = "README.md" license = {file = "LICENSE.txt"} From dca8cb620828c775c6b644311b2f08c12055a2c3 Mon Sep 17 00:00:00 2001 From: David Lord Date: Wed, 21 Aug 2024 12:58:17 -0700 Subject: [PATCH 107/159] fix issue number --- CHANGES.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index 7bd3435e5..05efeb8bd 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -9,7 +9,7 @@ Released 2024-08-21 invalid UTF-8 bytes in the body results in no form data parsed rather than a 413 error. :issue:`2930` - Improve ``parse_options_header`` performance when parsing unterminated - quoted string values. :issue:`2907` + quoted string values. :issue:`2904` - Debugger pin auth is synchronized across threads/processes when tracking failed entries. :issue:`2916` - Dev server handles unexpected `SSLEOFError` due to issue in Python < 3.13. From 36c4b0fa51714efc12805f44d6629fdf8e4d19ec Mon Sep 17 00:00:00 2001 From: David Lord Date: Fri, 23 Aug 2024 17:24:40 -0700 Subject: [PATCH 108/159] add gha-update, remove dependabot --- .github/dependabot.yml | 24 ------------------------ pyproject.toml | 5 +++++ tox.ini | 5 +++++ 3 files changed, 10 insertions(+), 24 deletions(-) delete mode 100644 .github/dependabot.yml diff --git a/.github/dependabot.yml b/.github/dependabot.yml deleted file mode 100644 index fa94b770a..000000000 --- a/.github/dependabot.yml +++ /dev/null @@ -1,24 +0,0 @@ -version: 2 -updates: - - package-ecosystem: github-actions - directory: / - schedule: - interval: monthly - ignore: - # slsa depends on upload/download v3 - - dependency-name: actions/upload-artifact - versions: '>= 4' - - dependency-name: actions/download-artifact - versions: '>= 4' - groups: - github-actions: - patterns: - - '*' - - package-ecosystem: pip - directory: /requirements/ - schedule: - interval: monthly - groups: - python-requirements: - patterns: - - '*' diff --git a/pyproject.toml b/pyproject.toml index 9f7accafe..227e0b13d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -112,3 +112,8 @@ ignore-init-module-imports = true [tool.ruff.lint.isort] force-single-line = true order-by-type = false + +[tool.gha-update] +tag-only = [ + "slsa-framework/slsa-github-generator", +] diff --git a/tox.ini b/tox.ini index ee331d109..da81aa65c 100644 --- a/tox.ini +++ b/tox.ini @@ -28,6 +28,11 @@ commands = mypy deps = -r requirements/docs.txt commands = sphinx-build -E -W -b dirhtml docs docs/_build/dirhtml +[testenv:update-actions] +labels = update +deps = gha-update +commands = gha-update + [testenv:update-pre_commit] labels = update deps = pre-commit From 0c8136bd10ff2b685efbeb3abc340e4a9fef9be1 Mon Sep 17 00:00:00 2001 From: David Lord Date: Fri, 23 Aug 2024 17:27:08 -0700 Subject: [PATCH 109/159] update dev dependencies --- .github/workflows/publish.yaml | 16 ++++----- .github/workflows/tests.yaml | 8 ++--- .pre-commit-config.yaml | 2 +- pyproject.toml | 1 - requirements/build.txt | 2 +- requirements/dev.txt | 63 ++++++++++++++++------------------ requirements/docs.txt | 26 +++++++------- requirements/tests.txt | 10 +++--- requirements/typing.txt | 17 ++++----- 9 files changed, 69 insertions(+), 76 deletions(-) diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index 4e8139b79..ae7cc5a72 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -9,8 +9,8 @@ jobs: outputs: hash: ${{ steps.hash.outputs.hash }} steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f # v5.1.1 with: python-version: '3.x' cache: pip @@ -23,7 +23,7 @@ jobs: - name: generate hash id: hash run: cd dist && echo "hash=$(sha256sum * | base64 -w0)" >> $GITHUB_OUTPUT - - uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 # v3.1.3 + - uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a # v4.3.6 with: path: ./dist provenance: @@ -33,7 +33,7 @@ jobs: id-token: write contents: write # Can't pin with hash due to how this workflow works. - uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v1.10.0 + uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v2.0.0 with: base64-subjects: ${{ needs.build.outputs.hash }} create-release: @@ -44,7 +44,7 @@ jobs: permissions: contents: write steps: - - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2 + - uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 - name: create release run: > gh release create --draft --repo ${{ github.repository }} @@ -63,11 +63,11 @@ jobs: permissions: id-token: write steps: - - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2 - - uses: pypa/gh-action-pypi-publish@81e9d935c883d0b210363ab89cf05f3894778450 # v1.8.14 + - uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 + - uses: pypa/gh-action-pypi-publish@ec4db0b4ddc65acdf4bff5fa45ac92d78b56bdf0 # v1.9.0 with: repository-url: https://test.pypi.org/legacy/ packages-dir: artifact/ - - uses: pypa/gh-action-pypi-publish@81e9d935c883d0b210363ab89cf05f3894778450 # v1.8.14 + - uses: pypa/gh-action-pypi-publish@ec4db0b4ddc65acdf4bff5fa45ac92d78b56bdf0 # v1.9.0 with: packages-dir: artifact/ diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index fb2629019..4a755422f 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -31,8 +31,8 @@ jobs: - {python: '3.8'} - {name: PyPy, python: 'pypy-3.10', tox: pypy310} steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f # v5.1.1 with: python-version: ${{ matrix.python }} allow-prereleases: true @@ -43,8 +43,8 @@ jobs: typing: runs-on: ubuntu-latest steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f # v5.1.1 with: python-version: '3.x' cache: pip diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5b7ebb878..5bee1ca4b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,7 +2,7 @@ ci: autoupdate_schedule: monthly repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.4.4 + rev: v0.6.2 hooks: - id: ruff - id: ruff-format diff --git a/pyproject.toml b/pyproject.toml index 227e0b13d..aed25bc17 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -107,7 +107,6 @@ select = [ ignore = [ "E402", # allow circular imports at end of file ] -ignore-init-module-imports = true [tool.ruff.lint.isort] force-single-line = true diff --git a/requirements/build.txt b/requirements/build.txt index 52fd1f69a..4b289ca7f 100644 --- a/requirements/build.txt +++ b/requirements/build.txt @@ -6,7 +6,7 @@ # build==1.2.1 # via -r build.in -packaging==24.0 +packaging==24.1 # via build pyproject-hooks==1.1.0 # via build diff --git a/requirements/dev.txt b/requirements/dev.txt index bbd58eda5..d75352c7b 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -4,21 +4,21 @@ # # pip-compile dev.in # -alabaster==0.7.16 +alabaster==1.0.0 # via # -r docs.txt # sphinx -babel==2.15.0 +babel==2.16.0 # via # -r docs.txt # sphinx -cachetools==5.3.3 +cachetools==5.5.0 # via tox -certifi==2024.2.2 +certifi==2024.7.4 # via # -r docs.txt # requests -cffi @ https://github.com/python-cffi/cffi/archive/d7f750b1b1c5ea4da5aa537b9baba0e01b0ce843.zip +cffi==1.17.0rc1 # via # -r tests.txt # cryptography @@ -32,7 +32,7 @@ charset-normalizer==3.3.2 # requests colorama==0.4.6 # via tox -cryptography==42.0.7 +cryptography==43.0.0 # via -r tests.txt distlib==0.3.8 # via virtualenv @@ -42,13 +42,13 @@ docutils==0.21.2 # sphinx ephemeral-port-reserve==1.1.4 # via -r tests.txt -filelock==3.14.0 +filelock==3.15.4 # via # tox # virtualenv -identify==2.5.36 +identify==2.6.0 # via pre-commit -idna==3.7 +idna==3.8 # via # -r docs.txt # requests @@ -75,12 +75,12 @@ mypy-extensions==1.0.0 # via # -r typing.txt # mypy -nodeenv==1.8.0 +nodeenv==1.9.1 # via # -r typing.txt # pre-commit # pyright -packaging==24.0 +packaging==24.1 # via # -r docs.txt # -r tests.txt @@ -102,9 +102,9 @@ pluggy==1.5.0 # -r typing.txt # pytest # tox -pre-commit==3.7.1 +pre-commit==3.8.0 # via -r dev.in -psutil==5.9.8 +psutil==6.0.0 # via # -r tests.txt # pytest-xprocess @@ -116,11 +116,11 @@ pygments==2.18.0 # via # -r docs.txt # sphinx -pyproject-api==1.6.1 +pyproject-api==1.7.1 # via tox -pyright==1.1.363 +pyright==1.1.377 # via -r typing.txt -pytest==8.2.0 +pytest==8.3.2 # via # -r tests.txt # -r typing.txt @@ -130,9 +130,9 @@ pytest-timeout==2.3.1 # via -r tests.txt pytest-xprocess==0.23.0 # via -r tests.txt -pyyaml==6.0.1 +pyyaml==6.0.2 # via pre-commit -requests==2.31.0 +requests==2.32.3 # via # -r docs.txt # sphinx @@ -140,20 +140,20 @@ snowballstemmer==2.2.0 # via # -r docs.txt # sphinx -sphinx==7.3.7 +sphinx==8.0.2 # via # -r docs.txt # pallets-sphinx-themes # sphinxcontrib-log-cabinet -sphinxcontrib-applehelp==1.0.8 +sphinxcontrib-applehelp==2.0.0 # via # -r docs.txt # sphinx -sphinxcontrib-devhelp==1.0.6 +sphinxcontrib-devhelp==2.0.0 # via # -r docs.txt # sphinx -sphinxcontrib-htmlhelp==2.0.5 +sphinxcontrib-htmlhelp==2.1.0 # via # -r docs.txt # sphinx @@ -163,38 +163,35 @@ sphinxcontrib-jsmath==1.0.1 # sphinx sphinxcontrib-log-cabinet==1.0.1 # via -r docs.txt -sphinxcontrib-qthelp==1.0.7 +sphinxcontrib-qthelp==2.0.0 # via # -r docs.txt # sphinx -sphinxcontrib-serializinghtml==1.1.10 +sphinxcontrib-serializinghtml==2.0.0 # via # -r docs.txt # sphinx -tox==4.15.0 +tox==4.18.0 # via -r dev.in types-contextvars==2.4.7.3 # via -r typing.txt types-dataclasses==0.6.6 # via -r typing.txt -types-setuptools==69.5.0.20240518 +types-setuptools==73.0.0.20240822 # via -r typing.txt -typing-extensions==4.11.0 +typing-extensions==4.12.2 # via # -r typing.txt # mypy -urllib3==2.2.1 +urllib3==2.2.2 # via # -r docs.txt # requests -virtualenv==20.26.2 +virtualenv==20.26.3 # via # pre-commit # tox -watchdog==4.0.0 +watchdog==4.0.2 # via # -r tests.txt # -r typing.txt - -# The following packages are considered to be unsafe in a requirements file: -# setuptools diff --git a/requirements/docs.txt b/requirements/docs.txt index 288993f4d..454762892 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -4,17 +4,17 @@ # # pip-compile docs.in # -alabaster==0.7.16 +alabaster==1.0.0 # via sphinx -babel==2.15.0 +babel==2.16.0 # via sphinx -certifi==2024.2.2 +certifi==2024.7.4 # via requests charset-normalizer==3.3.2 # via requests docutils==0.21.2 # via sphinx -idna==3.7 +idna==3.8 # via requests imagesize==1.4.1 # via sphinx @@ -22,7 +22,7 @@ jinja2==3.1.4 # via sphinx markupsafe==2.1.5 # via jinja2 -packaging==24.0 +packaging==24.1 # via # pallets-sphinx-themes # sphinx @@ -30,28 +30,28 @@ pallets-sphinx-themes==2.1.3 # via -r docs.in pygments==2.18.0 # via sphinx -requests==2.31.0 +requests==2.32.3 # via sphinx snowballstemmer==2.2.0 # via sphinx -sphinx==7.3.7 +sphinx==8.0.2 # via # -r docs.in # pallets-sphinx-themes # sphinxcontrib-log-cabinet -sphinxcontrib-applehelp==1.0.8 +sphinxcontrib-applehelp==2.0.0 # via sphinx -sphinxcontrib-devhelp==1.0.6 +sphinxcontrib-devhelp==2.0.0 # via sphinx -sphinxcontrib-htmlhelp==2.0.5 +sphinxcontrib-htmlhelp==2.1.0 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx sphinxcontrib-log-cabinet==1.0.1 # via -r docs.in -sphinxcontrib-qthelp==1.0.7 +sphinxcontrib-qthelp==2.0.0 # via sphinx -sphinxcontrib-serializinghtml==1.1.10 +sphinxcontrib-serializinghtml==2.0.0 # via sphinx -urllib3==2.2.1 +urllib3==2.2.2 # via requests diff --git a/requirements/tests.txt b/requirements/tests.txt index 18bae7666..6cfb58eef 100644 --- a/requirements/tests.txt +++ b/requirements/tests.txt @@ -8,21 +8,21 @@ cffi==1.17.0rc1 # via # -r tests.in # cryptography -cryptography==42.0.7 +cryptography==43.0.0 # via -r tests.in ephemeral-port-reserve==1.1.4 # via -r tests.in iniconfig==2.0.0 # via pytest -packaging==24.0 +packaging==24.1 # via pytest pluggy==1.5.0 # via pytest -psutil==5.9.8 +psutil==6.0.0 # via pytest-xprocess pycparser==2.22 # via cffi -pytest==8.2.0 +pytest==8.3.2 # via # -r tests.in # pytest-timeout @@ -31,5 +31,5 @@ pytest-timeout==2.3.1 # via -r tests.in pytest-xprocess==0.23.0 # via -r tests.in -watchdog==4.0.0 +watchdog==4.0.2 # via -r tests.in diff --git a/requirements/typing.txt b/requirements/typing.txt index c14ed66d6..37467d926 100644 --- a/requirements/typing.txt +++ b/requirements/typing.txt @@ -10,26 +10,23 @@ mypy==1.11.1 # via -r typing.in mypy-extensions==1.0.0 # via mypy -nodeenv==1.8.0 +nodeenv==1.9.1 # via pyright -packaging==24.0 +packaging==24.1 # via pytest pluggy==1.5.0 # via pytest -pyright==1.1.363 +pyright==1.1.377 # via -r typing.in -pytest==8.2.0 +pytest==8.3.2 # via -r typing.in types-contextvars==2.4.7.3 # via -r typing.in types-dataclasses==0.6.6 # via -r typing.in -types-setuptools==69.5.0.20240518 +types-setuptools==73.0.0.20240822 # via -r typing.in -typing-extensions==4.11.0 +typing-extensions==4.12.2 # via mypy -watchdog==4.0.0 +watchdog==4.0.2 # via -r typing.in - -# The following packages are considered to be unsafe in a requirements file: -# setuptools From e664aa7fe4b0e53f870bd2788516b25007f4399e Mon Sep 17 00:00:00 2001 From: David Lord Date: Fri, 23 Aug 2024 18:05:42 -0700 Subject: [PATCH 110/159] remove pre-commit.ci update --- .pre-commit-config.yaml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5bee1ca4b..acd7bba88 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,5 +1,3 @@ -ci: - autoupdate_schedule: monthly repos: - repo: https://github.com/astral-sh/ruff-pre-commit rev: v0.6.2 From 3d4dd8e60aadd4fcd44370ffff7f996225173ec4 Mon Sep 17 00:00:00 2001 From: David Lord Date: Sun, 1 Sep 2024 08:32:27 -0700 Subject: [PATCH 111/159] set up pre-commit lite workflow Committed via https://github.com/asottile/all-repos --- pre-commit.yaml | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 pre-commit.yaml diff --git a/pre-commit.yaml b/pre-commit.yaml new file mode 100644 index 000000000..34e77e4f2 --- /dev/null +++ b/pre-commit.yaml @@ -0,0 +1,15 @@ +on: + pull_request: + push: + branches: [main, '*.x'] +jobs: + main: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 # v5.2.0 + with: + python-version: 3.x + - uses: pre-commit/action@2c7b3805fd2a0fd8c1884dcaebf91fc102a13ecd # v3.0.1 + - uses: pre-commit-ci/lite-action@9d882e7a565f7008d4faf128f27d1cb6503d4ebf # v1.0.2 + if: ${{ !cancelled() }} From 29085a940225f039846e2498502033eb97b3b258 Mon Sep 17 00:00:00 2001 From: David Lord Date: Sun, 1 Sep 2024 09:03:39 -0700 Subject: [PATCH 112/159] set up pre-commit lite workflow Committed via https://github.com/asottile/all-repos --- pre-commit.yaml => .github/workflows/pre-commit.yaml | 1 + 1 file changed, 1 insertion(+) rename pre-commit.yaml => .github/workflows/pre-commit.yaml (96%) diff --git a/pre-commit.yaml b/.github/workflows/pre-commit.yaml similarity index 96% rename from pre-commit.yaml rename to .github/workflows/pre-commit.yaml index 34e77e4f2..683c85bef 100644 --- a/pre-commit.yaml +++ b/.github/workflows/pre-commit.yaml @@ -1,3 +1,4 @@ +name: pre-commit on: pull_request: push: From 59abac80cacba2afa5172408b38053edd16b192f Mon Sep 17 00:00:00 2001 From: Kodi Arfer Date: Mon, 2 Sep 2024 15:10:28 -0400 Subject: [PATCH 113/159] Fix headlines in `quickstart.rst` This change should cause the sections on this page to be correctly listed as subordinate items of "Quickstart" in `index.rst`. --- docs/quickstart.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/quickstart.rst b/docs/quickstart.rst index 7424f0d6b..d97764e98 100644 --- a/docs/quickstart.rst +++ b/docs/quickstart.rst @@ -9,7 +9,7 @@ understanding of :pep:`3333` (WSGI) and :rfc:`2616` (HTTP). WSGI Environment -================ +---------------- The WSGI environment contains all the information the user request transmits to the application. It is passed to the WSGI application but you can also @@ -33,7 +33,7 @@ access the form data besides parsing that data by hand. Enter Request -============= +------------- For access to the request data the :class:`Request` object is much more fun. It wraps the `environ` and provides a read-only access to the data from @@ -112,7 +112,7 @@ The keys for the headers are of course case insensitive. Header Parsing -============== +-------------- There is more. Werkzeug provides convenient access to often used HTTP headers and other request data. @@ -183,7 +183,7 @@ True Responses -========= +--------- Response objects are the opposite of request objects. They are used to send data back to the client. In reality, response objects are nothing more than From 2b4ab69864d73b2e2e9055dee0db7862bffc6a71 Mon Sep 17 00:00:00 2001 From: David Lord Date: Thu, 24 Oct 2024 12:45:09 -0700 Subject: [PATCH 114/159] update dev dependencies --- .github/workflows/publish.yaml | 10 +++---- .github/workflows/tests.yaml | 10 +++---- .pre-commit-config.yaml | 4 +-- requirements/build.txt | 6 ++-- requirements/dev.txt | 54 +++++++++++++++++++--------------- requirements/docs.txt | 19 +++++++----- requirements/tests.in | 4 +-- requirements/tests.txt | 12 ++++---- requirements/tests38.txt | 39 ++++++++++++++++++++++++ requirements/typing.txt | 16 +++++----- src/werkzeug/_reloader.py | 14 ++++----- tox.ini | 11 +++++++ 12 files changed, 129 insertions(+), 70 deletions(-) create mode 100644 requirements/tests38.txt diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index ae7cc5a72..61c622140 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -9,8 +9,8 @@ jobs: outputs: hash: ${{ steps.hash.outputs.hash }} steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - - uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f # v5.1.1 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0 with: python-version: '3.x' cache: pip @@ -23,7 +23,7 @@ jobs: - name: generate hash id: hash run: cd dist && echo "hash=$(sha256sum * | base64 -w0)" >> $GITHUB_OUTPUT - - uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a # v4.3.6 + - uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3 with: path: ./dist provenance: @@ -64,10 +64,10 @@ jobs: id-token: write steps: - uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 - - uses: pypa/gh-action-pypi-publish@ec4db0b4ddc65acdf4bff5fa45ac92d78b56bdf0 # v1.9.0 + - uses: pypa/gh-action-pypi-publish@f7600683efdcb7656dec5b29656edb7bc586e597 # v1.10.3 with: repository-url: https://test.pypi.org/legacy/ packages-dir: artifact/ - - uses: pypa/gh-action-pypi-publish@ec4db0b4ddc65acdf4bff5fa45ac92d78b56bdf0 # v1.9.0 + - uses: pypa/gh-action-pypi-publish@f7600683efdcb7656dec5b29656edb7bc586e597 # v1.10.3 with: packages-dir: artifact/ diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 4a755422f..d345b4301 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -31,8 +31,8 @@ jobs: - {python: '3.8'} - {name: PyPy, python: 'pypy-3.10', tox: pypy310} steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - - uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f # v5.1.1 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0 with: python-version: ${{ matrix.python }} allow-prereleases: true @@ -43,14 +43,14 @@ jobs: typing: runs-on: ubuntu-latest steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - - uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f # v5.1.1 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0 with: python-version: '3.x' cache: pip cache-dependency-path: requirements*/*.txt - name: cache mypy - uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2 + uses: actions/cache@6849a6489940f00c2f30c0fb92c6274307ccb58a # v4.1.2 with: path: ./.mypy_cache key: mypy|${{ hashFiles('pyproject.toml') }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5bee1ca4b..74b54e8f1 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,12 +2,12 @@ ci: autoupdate_schedule: monthly repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.6.2 + rev: v0.7.1 hooks: - id: ruff - id: ruff-format - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.6.0 + rev: v5.0.0 hooks: - id: check-merge-conflict - id: debug-statements diff --git a/requirements/build.txt b/requirements/build.txt index 4b289ca7f..1b13b0552 100644 --- a/requirements/build.txt +++ b/requirements/build.txt @@ -1,12 +1,12 @@ # -# This file is autogenerated by pip-compile with Python 3.12 +# This file is autogenerated by pip-compile with Python 3.13 # by the following command: # # pip-compile build.in # -build==1.2.1 +build==1.2.2.post1 # via -r build.in packaging==24.1 # via build -pyproject-hooks==1.1.0 +pyproject-hooks==1.2.0 # via build diff --git a/requirements/dev.txt b/requirements/dev.txt index d75352c7b..4f6a735d4 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile with Python 3.12 +# This file is autogenerated by pip-compile with Python 3.13 # by the following command: # # pip-compile dev.in @@ -14,11 +14,11 @@ babel==2.16.0 # sphinx cachetools==5.5.0 # via tox -certifi==2024.7.4 +certifi==2024.8.30 # via # -r docs.txt # requests -cffi==1.17.0rc1 +cffi==1.17.1 # via # -r tests.txt # cryptography @@ -26,15 +26,15 @@ cfgv==3.4.0 # via pre-commit chardet==5.2.0 # via tox -charset-normalizer==3.3.2 +charset-normalizer==3.4.0 # via # -r docs.txt # requests colorama==0.4.6 # via tox -cryptography==43.0.0 +cryptography==43.0.3 # via -r tests.txt -distlib==0.3.8 +distlib==0.3.9 # via virtualenv docutils==0.21.2 # via @@ -42,13 +42,13 @@ docutils==0.21.2 # sphinx ephemeral-port-reserve==1.1.4 # via -r tests.txt -filelock==3.15.4 +filelock==3.16.1 # via # tox # virtualenv -identify==2.6.0 +identify==2.6.1 # via pre-commit -idna==3.8 +idna==3.10 # via # -r docs.txt # requests @@ -65,11 +65,11 @@ jinja2==3.1.4 # via # -r docs.txt # sphinx -markupsafe==2.1.5 +markupsafe==3.0.2 # via # -r docs.txt # jinja2 -mypy==1.11.1 +mypy==1.13.0 # via -r typing.txt mypy-extensions==1.0.0 # via @@ -90,9 +90,9 @@ packaging==24.1 # pytest # sphinx # tox -pallets-sphinx-themes==2.1.3 +pallets-sphinx-themes==2.3.0 # via -r docs.txt -platformdirs==4.2.2 +platformdirs==4.3.6 # via # tox # virtualenv @@ -102,9 +102,9 @@ pluggy==1.5.0 # -r typing.txt # pytest # tox -pre-commit==3.8.0 +pre-commit==4.0.1 # via -r dev.in -psutil==6.0.0 +psutil==6.1.0 # via # -r tests.txt # pytest-xprocess @@ -116,11 +116,11 @@ pygments==2.18.0 # via # -r docs.txt # sphinx -pyproject-api==1.7.1 +pyproject-api==1.8.0 # via tox -pyright==1.1.377 +pyright==1.1.386 # via -r typing.txt -pytest==8.3.2 +pytest==8.3.3 # via # -r tests.txt # -r typing.txt @@ -140,11 +140,16 @@ snowballstemmer==2.2.0 # via # -r docs.txt # sphinx -sphinx==8.0.2 +sphinx==8.1.3 # via # -r docs.txt # pallets-sphinx-themes + # sphinx-notfound-page # sphinxcontrib-log-cabinet +sphinx-notfound-page==1.0.4 + # via + # -r docs.txt + # pallets-sphinx-themes sphinxcontrib-applehelp==2.0.0 # via # -r docs.txt @@ -171,27 +176,28 @@ sphinxcontrib-serializinghtml==2.0.0 # via # -r docs.txt # sphinx -tox==4.18.0 +tox==4.23.2 # via -r dev.in types-contextvars==2.4.7.3 # via -r typing.txt types-dataclasses==0.6.6 # via -r typing.txt -types-setuptools==73.0.0.20240822 +types-setuptools==75.2.0.20241019 # via -r typing.txt typing-extensions==4.12.2 # via # -r typing.txt # mypy -urllib3==2.2.2 + # pyright +urllib3==2.2.3 # via # -r docs.txt # requests -virtualenv==20.26.3 +virtualenv==20.27.0 # via # pre-commit # tox -watchdog==4.0.2 +watchdog==5.0.3 # via # -r tests.txt # -r typing.txt diff --git a/requirements/docs.txt b/requirements/docs.txt index 454762892..1e3a54ebb 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile with Python 3.12 +# This file is autogenerated by pip-compile with Python 3.13 # by the following command: # # pip-compile docs.in @@ -8,25 +8,25 @@ alabaster==1.0.0 # via sphinx babel==2.16.0 # via sphinx -certifi==2024.7.4 +certifi==2024.8.30 # via requests -charset-normalizer==3.3.2 +charset-normalizer==3.4.0 # via requests docutils==0.21.2 # via sphinx -idna==3.8 +idna==3.10 # via requests imagesize==1.4.1 # via sphinx jinja2==3.1.4 # via sphinx -markupsafe==2.1.5 +markupsafe==3.0.2 # via jinja2 packaging==24.1 # via # pallets-sphinx-themes # sphinx -pallets-sphinx-themes==2.1.3 +pallets-sphinx-themes==2.3.0 # via -r docs.in pygments==2.18.0 # via sphinx @@ -34,11 +34,14 @@ requests==2.32.3 # via sphinx snowballstemmer==2.2.0 # via sphinx -sphinx==8.0.2 +sphinx==8.1.3 # via # -r docs.in # pallets-sphinx-themes + # sphinx-notfound-page # sphinxcontrib-log-cabinet +sphinx-notfound-page==1.0.4 + # via pallets-sphinx-themes sphinxcontrib-applehelp==2.0.0 # via sphinx sphinxcontrib-devhelp==2.0.0 @@ -53,5 +56,5 @@ sphinxcontrib-qthelp==2.0.0 # via sphinx sphinxcontrib-serializinghtml==2.0.0 # via sphinx -urllib3==2.2.2 +urllib3==2.2.3 # via requests diff --git a/requirements/tests.in b/requirements/tests.in index 7c87c4d8d..494b61022 100644 --- a/requirements/tests.in +++ b/requirements/tests.in @@ -1,9 +1,7 @@ pytest pytest-timeout -# pinned for python 3.8 support pytest-xprocess<1 cryptography watchdog ephemeral-port-reserve -# pin cffi 1.17.0 pre-release for python 3.13 support -cffi==1.17.0rc1 +cffi diff --git a/requirements/tests.txt b/requirements/tests.txt index 6cfb58eef..c4557c90f 100644 --- a/requirements/tests.txt +++ b/requirements/tests.txt @@ -1,14 +1,14 @@ # -# This file is autogenerated by pip-compile with Python 3.12 +# This file is autogenerated by pip-compile with Python 3.13 # by the following command: # # pip-compile tests.in # -cffi==1.17.0rc1 +cffi==1.17.1 # via # -r tests.in # cryptography -cryptography==43.0.0 +cryptography==43.0.3 # via -r tests.in ephemeral-port-reserve==1.1.4 # via -r tests.in @@ -18,11 +18,11 @@ packaging==24.1 # via pytest pluggy==1.5.0 # via pytest -psutil==6.0.0 +psutil==6.1.0 # via pytest-xprocess pycparser==2.22 # via cffi -pytest==8.3.2 +pytest==8.3.3 # via # -r tests.in # pytest-timeout @@ -31,5 +31,5 @@ pytest-timeout==2.3.1 # via -r tests.in pytest-xprocess==0.23.0 # via -r tests.in -watchdog==4.0.2 +watchdog==5.0.3 # via -r tests.in diff --git a/requirements/tests38.txt b/requirements/tests38.txt new file mode 100644 index 000000000..10829d84c --- /dev/null +++ b/requirements/tests38.txt @@ -0,0 +1,39 @@ +# +# This file is autogenerated by pip-compile with Python 3.8 +# by the following command: +# +# pip-compile --output-file=tests38.txt tests.in +# +cffi==1.17.1 + # via + # -r tests.in + # cryptography +cryptography==43.0.3 + # via -r tests.in +ephemeral-port-reserve==1.1.4 + # via -r tests.in +exceptiongroup==1.2.2 + # via pytest +iniconfig==2.0.0 + # via pytest +packaging==24.1 + # via pytest +pluggy==1.5.0 + # via pytest +psutil==6.1.0 + # via pytest-xprocess +pycparser==2.22 + # via cffi +pytest==8.3.3 + # via + # -r tests.in + # pytest-timeout + # pytest-xprocess +pytest-timeout==2.3.1 + # via -r tests.in +pytest-xprocess==0.23.0 + # via -r tests.in +tomli==2.0.2 + # via pytest +watchdog==4.0.2 + # via -r tests.in diff --git a/requirements/typing.txt b/requirements/typing.txt index 37467d926..b90f838dd 100644 --- a/requirements/typing.txt +++ b/requirements/typing.txt @@ -1,12 +1,12 @@ # -# This file is autogenerated by pip-compile with Python 3.12 +# This file is autogenerated by pip-compile with Python 3.13 # by the following command: # # pip-compile typing.in # iniconfig==2.0.0 # via pytest -mypy==1.11.1 +mypy==1.13.0 # via -r typing.in mypy-extensions==1.0.0 # via mypy @@ -16,17 +16,19 @@ packaging==24.1 # via pytest pluggy==1.5.0 # via pytest -pyright==1.1.377 +pyright==1.1.386 # via -r typing.in -pytest==8.3.2 +pytest==8.3.3 # via -r typing.in types-contextvars==2.4.7.3 # via -r typing.in types-dataclasses==0.6.6 # via -r typing.in -types-setuptools==73.0.0.20240822 +types-setuptools==75.2.0.20241019 # via -r typing.in typing-extensions==4.12.2 - # via mypy -watchdog==4.0.2 + # via + # mypy + # pyright +watchdog==5.0.3 # via -r typing.in diff --git a/src/werkzeug/_reloader.py b/src/werkzeug/_reloader.py index d7e91a61c..d3507f5a2 100644 --- a/src/werkzeug/_reloader.py +++ b/src/werkzeug/_reloader.py @@ -281,7 +281,7 @@ def trigger_reload(self, filename: str) -> None: self.log_reload(filename) sys.exit(3) - def log_reload(self, filename: str) -> None: + def log_reload(self, filename: str | bytes) -> None: filename = os.path.abspath(filename) _log("info", f" * Detected change in {filename!r}, reloading") @@ -340,7 +340,7 @@ def on_any_event(self, event: FileModifiedEvent): # type: ignore # the source file (or initial pyc file) as well. Ignore Git and # Mercurial internal changes. extra_patterns = [p for p in self.extra_files if not os.path.isdir(p)] - self.event_handler = EventHandler( # type: ignore[no-untyped-call] + self.event_handler = EventHandler( patterns=["*.py", "*.pyc", "*.zip", *extra_patterns], ignore_patterns=[ *[f"*/{d}/*" for d in _ignore_common_dirs], @@ -349,7 +349,7 @@ def on_any_event(self, event: FileModifiedEvent): # type: ignore ) self.should_reload = False - def trigger_reload(self, filename: str) -> None: + def trigger_reload(self, filename: str | bytes) -> None: # This is called inside an event handler, which means throwing # SystemExit has no effect. # https://github.com/gorakhargosh/watchdog/issues/294 @@ -358,11 +358,11 @@ def trigger_reload(self, filename: str) -> None: def __enter__(self) -> ReloaderLoop: self.watches: dict[str, t.Any] = {} - self.observer.start() # type: ignore[no-untyped-call] + self.observer.start() return super().__enter__() def __exit__(self, exc_type, exc_val, exc_tb): # type: ignore - self.observer.stop() # type: ignore[no-untyped-call] + self.observer.stop() self.observer.join() def run(self) -> None: @@ -378,7 +378,7 @@ def run_step(self) -> None: for path in _find_watchdog_paths(self.extra_files, self.exclude_patterns): if path not in self.watches: try: - self.watches[path] = self.observer.schedule( # type: ignore[no-untyped-call] + self.watches[path] = self.observer.schedule( self.event_handler, path, recursive=True ) except OSError: @@ -393,7 +393,7 @@ def run_step(self) -> None: watch = self.watches.pop(path, None) if watch is not None: - self.observer.unschedule(watch) # type: ignore[no-untyped-call] + self.observer.unschedule(watch) reloader_loops: dict[str, type[ReloaderLoop]] = { diff --git a/tox.ini b/tox.ini index da81aa65c..77f381c59 100644 --- a/tox.ini +++ b/tox.ini @@ -15,6 +15,9 @@ use_frozen_constraints = true deps = -r requirements/tests.txt commands = pytest -v --tb=short --basetemp={envtmpdir} {posargs} +[testenv:py38,py3.8] +deps = -r requirements/tests38.txt + [testenv:style] deps = pre-commit skip_install = true @@ -50,3 +53,11 @@ commands = pip-compile tests.in -q {posargs:-U} pip-compile typing.in -q {posargs:-U} pip-compile dev.in -q {posargs:-U} + +[testenv:update-requirements38] +base_python = 3.8 +labels = update +deps = pip-tools +skip_install = true +change_dir = requirements +commands = pip-compile tests.in -q -o tests38.txt {posargs:-U} From 889eafd9f9bfbbdabed67a428637d643e8962bad Mon Sep 17 00:00:00 2001 From: David Lord Date: Thu, 24 Oct 2024 12:52:17 -0700 Subject: [PATCH 115/159] update test workflow trigger --- .github/workflows/tests.yaml | 14 +++----------- 1 file changed, 3 insertions(+), 11 deletions(-) diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index d345b4301..8a9741235 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -1,18 +1,10 @@ name: Tests on: push: - branches: - - main - - '*.x' - paths-ignore: - - 'docs/**' - - '*.md' - - '*.rst' + branches: [main, stable] + paths-ignore: ['docs/**', '*.md', '*.rst'] pull_request: - paths-ignore: - - 'docs/**' - - '*.md' - - '*.rst' + paths-ignore: ['docs/**', '*.md', '*.rst'] jobs: tests: name: ${{ matrix.name || matrix.python }} From ff507c521736bbfb900512f115c7dfd91e284637 Mon Sep 17 00:00:00 2001 From: David Lord Date: Thu, 24 Oct 2024 12:53:08 -0700 Subject: [PATCH 116/159] update test workflow trigger --- .github/workflows/pre-commit.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pre-commit.yaml b/.github/workflows/pre-commit.yaml index 683c85bef..adddea75d 100644 --- a/.github/workflows/pre-commit.yaml +++ b/.github/workflows/pre-commit.yaml @@ -2,7 +2,7 @@ name: pre-commit on: pull_request: push: - branches: [main, '*.x'] + branches: [main, stable] jobs: main: runs-on: ubuntu-latest From eb13b56e7826df97c8cacd1c86e4a5a560daa7f6 Mon Sep 17 00:00:00 2001 From: David Lord Date: Thu, 24 Oct 2024 15:06:38 -0700 Subject: [PATCH 117/159] start version 3.0.5 --- CHANGES.rst | 6 ++++++ pyproject.toml | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index 05efeb8bd..58b744f0e 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,5 +1,11 @@ .. currentmodule:: werkzeug +Version 3.0.5 +------------- + +Unreleased + + Version 3.0.4 ------------- diff --git a/pyproject.toml b/pyproject.toml index aed25bc17..8d5aaebd2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "Werkzeug" -version = "3.0.4" +version = "3.0.5.dev" description = "The comprehensive WSGI web application library." readme = "README.md" license = {file = "LICENSE.txt"} From cfe71ee9d53d612c39801e32a1258c6eb1e52f75 Mon Sep 17 00:00:00 2001 From: Jens Nistler Date: Thu, 5 Sep 2024 11:25:45 +0200 Subject: [PATCH 118/159] filter watchdog events --- CHANGES.rst | 2 ++ src/werkzeug/_reloader.py | 15 +++++++++++++-- tests/test_serving.py | 23 +++++++++++++++++++++++ 3 files changed, 38 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 58b744f0e..d603ed90e 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -5,6 +5,8 @@ Version 3.0.5 Unreleased +- The Watchdog reloader ignores file closed no write events. :issue:`2945` + Version 3.0.4 ------------- diff --git a/src/werkzeug/_reloader.py b/src/werkzeug/_reloader.py index d3507f5a2..8fd50b963 100644 --- a/src/werkzeug/_reloader.py +++ b/src/werkzeug/_reloader.py @@ -312,7 +312,11 @@ def run_step(self) -> None: class WatchdogReloaderLoop(ReloaderLoop): def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: - from watchdog.events import EVENT_TYPE_OPENED + from watchdog.events import EVENT_TYPE_CLOSED + from watchdog.events import EVENT_TYPE_CREATED + from watchdog.events import EVENT_TYPE_DELETED + from watchdog.events import EVENT_TYPE_MODIFIED + from watchdog.events import EVENT_TYPE_MOVED from watchdog.events import FileModifiedEvent from watchdog.events import PatternMatchingEventHandler from watchdog.observers import Observer @@ -322,7 +326,14 @@ def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: class EventHandler(PatternMatchingEventHandler): def on_any_event(self, event: FileModifiedEvent): # type: ignore - if event.event_type == EVENT_TYPE_OPENED: + if event.event_type not in { + EVENT_TYPE_CLOSED, + EVENT_TYPE_CREATED, + EVENT_TYPE_DELETED, + EVENT_TYPE_MODIFIED, + EVENT_TYPE_MOVED, + }: + # skip events that don't involve changes to the file return trigger_reload(event.src_path) diff --git a/tests/test_serving.py b/tests/test_serving.py index 4abc755d9..501279b97 100644 --- a/tests/test_serving.py +++ b/tests/test_serving.py @@ -10,6 +10,7 @@ from unittest.mock import patch import pytest +from watchdog import version as watchdog_version from watchdog.events import EVENT_TYPE_MODIFIED from watchdog.events import EVENT_TYPE_OPENED from watchdog.events import FileModifiedEvent @@ -136,6 +137,28 @@ def test_watchdog_reloader_ignores_opened(mock_trigger_reload): reloader.trigger_reload.assert_not_called() +@pytest.mark.skipif( + watchdog_version.VERSION_MAJOR < 5, + reason="'closed no write' event introduced in watchdog 5.0", +) +@patch.object(WatchdogReloaderLoop, "trigger_reload") +def test_watchdog_reloader_ignores_closed_no_write(mock_trigger_reload): + from watchdog.events import EVENT_TYPE_CLOSED_NO_WRITE + + reloader = WatchdogReloaderLoop() + modified_event = FileModifiedEvent("") + modified_event.event_type = EVENT_TYPE_MODIFIED + reloader.event_handler.on_any_event(modified_event) + mock_trigger_reload.assert_called_once() + + reloader.trigger_reload.reset_mock() + + opened_event = FileModifiedEvent("") + opened_event.event_type = EVENT_TYPE_CLOSED_NO_WRITE + reloader.event_handler.on_any_event(opened_event) + reloader.trigger_reload.assert_not_called() + + @pytest.mark.skipif(sys.version_info >= (3, 10), reason="not needed on >= 3.10") def test_windows_get_args_for_reloading(monkeypatch, tmp_path): argv = [str(tmp_path / "test.exe"), "run"] From 7326de64ea7d1ba783bb372144d59e7433113b68 Mon Sep 17 00:00:00 2001 From: not-a-kiwi <803276+not-a-kiwi@users.noreply.github.com> Date: Thu, 26 Sep 2024 13:52:40 +0200 Subject: [PATCH 119/159] fix logging addresses with ipv6 scope --- CHANGES.rst | 1 + src/werkzeug/serving.py | 4 +++- tests/live_apps/run.py | 5 +++++ tests/test_serving.py | 11 +++++++++++ 4 files changed, 20 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index d603ed90e..c92fe2e63 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -6,6 +6,7 @@ Version 3.0.5 Unreleased - The Watchdog reloader ignores file closed no write events. :issue:`2945` +- Logging works with client addresses containing an IPv6 scope :issue:`2952` Version 3.0.4 diff --git a/src/werkzeug/serving.py b/src/werkzeug/serving.py index 4faf9262c..ef32b8811 100644 --- a/src/werkzeug/serving.py +++ b/src/werkzeug/serving.py @@ -473,9 +473,11 @@ def log_message(self, format: str, *args: t.Any) -> None: self.log("info", format, *args) def log(self, type: str, message: str, *args: t.Any) -> None: + # an IPv6 scoped address contains "%" which breaks logging + address_string = self.address_string().replace("%", "%%") _log( type, - f"{self.address_string()} - - [{self.log_date_time_string()}] {message}\n", + f"{address_string} - - [{self.log_date_time_string()}] {message}\n", *args, ) diff --git a/tests/live_apps/run.py b/tests/live_apps/run.py index aacdcb664..1371e6723 100644 --- a/tests/live_apps/run.py +++ b/tests/live_apps/run.py @@ -4,6 +4,7 @@ from werkzeug.serving import generate_adhoc_ssl_context from werkzeug.serving import run_simple +from werkzeug.serving import WSGIRequestHandler from werkzeug.wrappers import Request from werkzeug.wrappers import Response @@ -23,10 +24,14 @@ def app(request): kwargs.update(hostname="127.0.0.1", port=5000, application=app) kwargs.update(json.loads(sys.argv[2])) ssl_context = kwargs.get("ssl_context") +override_client_addr = kwargs.pop("override_client_addr", None) if ssl_context == "custom": kwargs["ssl_context"] = generate_adhoc_ssl_context() elif isinstance(ssl_context, list): kwargs["ssl_context"] = tuple(ssl_context) +if override_client_addr: + WSGIRequestHandler.address_string = lambda _: override_client_addr + run_simple(**kwargs) diff --git a/tests/test_serving.py b/tests/test_serving.py index 501279b97..2de67dab0 100644 --- a/tests/test_serving.py +++ b/tests/test_serving.py @@ -337,3 +337,14 @@ def test_streaming_chunked_truncation(dev_server): """ with pytest.raises(http.client.IncompleteRead): dev_server("streaming", threaded=True).request("/crash") + + +@pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning") +@pytest.mark.dev_server +def test_host_with_ipv6_scope(dev_server): + client = dev_server(override_client_addr="fe80::1ff:fe23:4567:890a%eth2") + r = client.request("/crash") + + assert r.status == 500 + assert b"Internal Server Error" in r.data + assert "Logging error" not in client.log.read() From 1688968101c3341136bd8420ac7b77b6286c7409 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christian=20K=C3=B6hn?= Date: Thu, 10 Oct 2024 23:00:41 +0200 Subject: [PATCH 120/159] ignore invalid authorization parameters --- CHANGES.rst | 1 + src/werkzeug/http.py | 4 ++++ tests/test_http.py | 17 ++++++++++++++--- 3 files changed, 19 insertions(+), 3 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index c92fe2e63..c26a58b58 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -7,6 +7,7 @@ Unreleased - The Watchdog reloader ignores file closed no write events. :issue:`2945` - Logging works with client addresses containing an IPv6 scope :issue:`2952` +- Ignore invalid authorization parameters. :issue:`2955` Version 3.0.4 diff --git a/src/werkzeug/http.py b/src/werkzeug/http.py index c0ec92389..cb8bc2581 100644 --- a/src/werkzeug/http.py +++ b/src/werkzeug/http.py @@ -361,6 +361,10 @@ def parse_dict_header(value: str) -> dict[str, str | None]: key, has_value, value = item.partition("=") key = key.strip() + if not key: + # =value is not valid + continue + if not has_value: result[key] = None continue diff --git a/tests/test_http.py b/tests/test_http.py index 02e5eb512..5002c0936 100644 --- a/tests/test_http.py +++ b/tests/test_http.py @@ -107,9 +107,16 @@ def test_set_header(self): def test_list_header(self, value, expect): assert http.parse_list_header(value) == expect - def test_dict_header(self): - d = http.parse_dict_header('foo="bar baz", blah=42') - assert d == {"foo": "bar baz", "blah": "42"} + @pytest.mark.parametrize( + ("value", "expect"), + [ + ('foo="bar baz", blah=42', {"foo": "bar baz", "blah": "42"}), + ("foo, bar=", {"foo": None, "bar": ""}), + ("=foo, =", {}), + ], + ) + def test_dict_header(self, value, expect): + assert http.parse_dict_header(value) == expect def test_cache_control_header(self): cc = http.parse_cache_control_header("max-age=0, no-cache") @@ -204,6 +211,10 @@ def test_authorization_header(self): assert Authorization.from_header(None) is None assert Authorization.from_header("foo").type == "foo" + def test_authorization_ignore_invalid_parameters(self): + a = Authorization.from_header("Digest foo, bar=, =qux, =") + assert a.to_header() == 'Digest foo, bar=""' + def test_authorization_token_padding(self): # padded with = token = base64.b64encode(b"This has base64 padding").decode() From e14f4e0693aea2b300f163f35809be110c1b6304 Mon Sep 17 00:00:00 2001 From: Alex Gaynor Date: Thu, 24 Oct 2024 12:09:10 -0400 Subject: [PATCH 121/159] improve type annotation for SharedDataMiddleware --- CHANGES.rst | 1 + src/werkzeug/middleware/shared_data.py | 5 +++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index c26a58b58..42dae2470 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -8,6 +8,7 @@ Unreleased - The Watchdog reloader ignores file closed no write events. :issue:`2945` - Logging works with client addresses containing an IPv6 scope :issue:`2952` - Ignore invalid authorization parameters. :issue:`2955` +- Improve type annotation fore ``SharedDataMiddleware``. :issue:`2958` Version 3.0.4 diff --git a/src/werkzeug/middleware/shared_data.py b/src/werkzeug/middleware/shared_data.py index 0a0c95675..0f467f2e2 100644 --- a/src/werkzeug/middleware/shared_data.py +++ b/src/werkzeug/middleware/shared_data.py @@ -11,6 +11,7 @@ from __future__ import annotations +import collections.abc as cabc import importlib.util import mimetypes import os @@ -103,7 +104,7 @@ def __init__( self, app: WSGIApplication, exports: ( - dict[str, str | tuple[str, str]] + cabc.Mapping[str, str | tuple[str, str]] | t.Iterable[tuple[str, str | tuple[str, str]]] ), disallow: None = None, @@ -116,7 +117,7 @@ def __init__( self.cache = cache self.cache_timeout = cache_timeout - if isinstance(exports, dict): + if isinstance(exports, cabc.Mapping): exports = exports.items() for key, value in exports: From e6b4cce97eef17716004625bcf6754fa930f2618 Mon Sep 17 00:00:00 2001 From: David Lord Date: Thu, 24 Oct 2024 15:44:53 -0700 Subject: [PATCH 122/159] catch OSError from getpass.getuser --- CHANGES.rst | 2 ++ src/werkzeug/debug/__init__.py | 3 ++- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index 42dae2470..5350589b2 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -9,6 +9,8 @@ Unreleased - Logging works with client addresses containing an IPv6 scope :issue:`2952` - Ignore invalid authorization parameters. :issue:`2955` - Improve type annotation fore ``SharedDataMiddleware``. :issue:`2958` +- Compatibility with Python 3.13 when generating debugger pin and the current + UID does not have an associated name. :issue:`2957` Version 3.0.4 diff --git a/src/werkzeug/debug/__init__.py b/src/werkzeug/debug/__init__.py index 69ad3f4f4..0c4cabd89 100644 --- a/src/werkzeug/debug/__init__.py +++ b/src/werkzeug/debug/__init__.py @@ -173,7 +173,8 @@ def get_pin_and_cookie_name( # App Engine. It may also raise a KeyError if the UID does not # have a username, such as in Docker. username = getpass.getuser() - except (ImportError, KeyError): + # Python >= 3.13 only raises OSError + except (ImportError, KeyError, OSError): username = None mod = sys.modules.get(modname) From 9caf72ac060181a3171d91fd12279e071df430ca Mon Sep 17 00:00:00 2001 From: David Lord Date: Thu, 24 Oct 2024 17:43:39 -0700 Subject: [PATCH 123/159] release version 3.0.5 --- CHANGES.rst | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 5350589b2..20d400e52 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,7 +3,7 @@ Version 3.0.5 ------------- -Unreleased +Released 2024-10-24 - The Watchdog reloader ignores file closed no write events. :issue:`2945` - Logging works with client addresses containing an IPv6 scope :issue:`2952` diff --git a/pyproject.toml b/pyproject.toml index 8d5aaebd2..4513ad18e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "Werkzeug" -version = "3.0.5.dev" +version = "3.0.5" description = "The comprehensive WSGI web application library." readme = "README.md" license = {file = "LICENSE.txt"} From 8d6a12e2af542a553853c870d106884a3cd1f73b Mon Sep 17 00:00:00 2001 From: David Lord Date: Fri, 25 Oct 2024 06:22:10 -0700 Subject: [PATCH 124/159] start version 3.0.6 --- CHANGES.rst | 6 ++++++ pyproject.toml | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index 20d400e52..879bbf9ac 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,5 +1,11 @@ .. currentmodule:: werkzeug +Version 3.0.6 +------------- + +Unreleased + + Version 3.0.5 ------------- diff --git a/pyproject.toml b/pyproject.toml index 4513ad18e..80187a473 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "Werkzeug" -version = "3.0.5" +version = "3.0.6.dev" description = "The comprehensive WSGI web application library." readme = "README.md" license = {file = "LICENSE.txt"} From 8760275afb72bd10b57d92cb4d52abf759b2f3a7 Mon Sep 17 00:00:00 2001 From: David Lord Date: Fri, 25 Oct 2024 06:46:50 -0700 Subject: [PATCH 125/159] apply max_form_memory_size another level up in the parser --- CHANGES.rst | 3 +++ src/werkzeug/formparser.py | 11 +++++++++++ src/werkzeug/sansio/multipart.py | 2 ++ tests/test_formparser.py | 12 ++++++++++++ 4 files changed, 28 insertions(+) diff --git a/CHANGES.rst b/CHANGES.rst index 879bbf9ac..f55d192c2 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -5,6 +5,9 @@ Version 3.0.6 Unreleased +- Fix how ``max_form_memory_size`` is applied when parsing large non-file + fields. :ghsa:`q34m-jh98-gwm2` + Version 3.0.5 ------------- diff --git a/src/werkzeug/formparser.py b/src/werkzeug/formparser.py index e4b1f27fd..3c6875e26 100644 --- a/src/werkzeug/formparser.py +++ b/src/werkzeug/formparser.py @@ -352,6 +352,7 @@ def parse( self, stream: t.IO[bytes], boundary: bytes, content_length: int | None ) -> tuple[MultiDict[str, str], MultiDict[str, FileStorage]]: current_part: Field | File + field_size: int | None = None container: t.IO[bytes] | list[bytes] _write: t.Callable[[bytes], t.Any] @@ -370,13 +371,23 @@ def parse( while not isinstance(event, (Epilogue, NeedData)): if isinstance(event, Field): current_part = event + field_size = 0 container = [] _write = container.append elif isinstance(event, File): current_part = event + field_size = None container = self.start_file_streaming(event, content_length) _write = container.write elif isinstance(event, Data): + if self.max_form_memory_size is not None and field_size is not None: + # Ensure that accumulated data events do not exceed limit. + # Also checked within single event in MultipartDecoder. + field_size += len(event.data) + + if field_size > self.max_form_memory_size: + raise RequestEntityTooLarge() + _write(event.data) if not event.more_data: if isinstance(current_part, Field): diff --git a/src/werkzeug/sansio/multipart.py b/src/werkzeug/sansio/multipart.py index fc8735378..731be0336 100644 --- a/src/werkzeug/sansio/multipart.py +++ b/src/werkzeug/sansio/multipart.py @@ -140,6 +140,8 @@ def receive_data(self, data: bytes | None) -> None: self.max_form_memory_size is not None and len(self.buffer) + len(data) > self.max_form_memory_size ): + # Ensure that data within single event does not exceed limit. + # Also checked across accumulated events in MultiPartParser. raise RequestEntityTooLarge() else: self.buffer.extend(data) diff --git a/tests/test_formparser.py b/tests/test_formparser.py index ed63be686..ebd7fddcf 100644 --- a/tests/test_formparser.py +++ b/tests/test_formparser.py @@ -456,3 +456,15 @@ def test_file_rfc2231_filename_continuations(self): ) as request: assert request.files["rfc2231"].filename == "a b c d e f.txt" assert request.files["rfc2231"].read() == b"file contents" + + +def test_multipart_max_form_memory_size() -> None: + """max_form_memory_size is tracked across multiple data events.""" + data = b"--bound\r\nContent-Disposition: form-field; name=a\r\n\r\n" + data += b"a" * 15 + b"\r\n--bound--" + # The buffer size is less than the max size, so multiple data events will be + # returned. The field size is greater than the max. + parser = formparser.MultiPartParser(max_form_memory_size=10, buffer_size=5) + + with pytest.raises(RequestEntityTooLarge): + parser.parse(io.BytesIO(data), b"bound", None) From 87cc78a25f782f8c59fbde786840a00cf0d09b3d Mon Sep 17 00:00:00 2001 From: David Lord Date: Fri, 25 Oct 2024 09:56:15 -0700 Subject: [PATCH 126/159] catch special absolute path on Windows Python < 3.11 --- CHANGES.rst | 3 +++ src/werkzeug/security.py | 2 ++ tests/test_security.py | 17 +++++++++++------ 3 files changed, 16 insertions(+), 6 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index f55d192c2..f709fb5cd 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -8,6 +8,9 @@ Unreleased - Fix how ``max_form_memory_size`` is applied when parsing large non-file fields. :ghsa:`q34m-jh98-gwm2` +- ``safe_join`` catches certain paths on Windows that were not caught by + ``ntpath.isabs`` on Python < 3.11. :ghsa:`f9vj-2wh5-fj8j` + Version 3.0.5 ------------- diff --git a/src/werkzeug/security.py b/src/werkzeug/security.py index 9999509d1..997597990 100644 --- a/src/werkzeug/security.py +++ b/src/werkzeug/security.py @@ -151,6 +151,8 @@ def safe_join(directory: str, *pathnames: str) -> str | None: if ( any(sep in filename for sep in _os_alt_seps) or os.path.isabs(filename) + # ntpath.isabs doesn't catch this on Python < 3.11 + or filename.startswith("/") or filename == ".." or filename.startswith("../") ): diff --git a/tests/test_security.py b/tests/test_security.py index 6fad089a7..3ce741a99 100644 --- a/tests/test_security.py +++ b/tests/test_security.py @@ -1,5 +1,4 @@ import os -import posixpath import sys import pytest @@ -47,11 +46,17 @@ def test_invalid_method(): generate_password_hash("secret", "sha256") -def test_safe_join(): - assert safe_join("foo", "bar/baz") == posixpath.join("foo", "bar/baz") - assert safe_join("foo", "../bar/baz") is None - if os.name == "nt": - assert safe_join("foo", "foo\\bar") is None +@pytest.mark.parametrize( + ("path", "expect"), + [ + ("b/c", "a/b/c"), + ("../b/c", None), + ("b\\c", None if os.name == "nt" else "a/b\\c"), + ("//b/c", None), + ], +) +def test_safe_join(path, expect): + assert safe_join("a", path) == expect def test_safe_join_os_sep(): From 5eaefc3996aa5cc8c5237d8b82f1b89eed6ea624 Mon Sep 17 00:00:00 2001 From: David Lord Date: Fri, 25 Oct 2024 11:42:37 -0700 Subject: [PATCH 127/159] release version 3.0.6 --- CHANGES.rst | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index f709fb5cd..501e5e61f 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,7 +3,7 @@ Version 3.0.6 ------------- -Unreleased +Released 2024-10-25 - Fix how ``max_form_memory_size`` is applied when parsing large non-file fields. :ghsa:`q34m-jh98-gwm2` diff --git a/pyproject.toml b/pyproject.toml index 80187a473..e94ff598e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "Werkzeug" -version = "3.0.6.dev" +version = "3.0.6" description = "The comprehensive WSGI web application library." readme = "README.md" license = {file = "LICENSE.txt"} From 437cf6fccb10a313c6c620e2da7b58729aa853b9 Mon Sep 17 00:00:00 2001 From: Charan Kumar N Date: Thu, 16 May 2024 23:27:58 +0530 Subject: [PATCH 128/159] add 421 Misdirected Request exception --- CHANGES.rst | 1 + docs/exceptions.rst | 2 ++ src/werkzeug/exceptions.py | 11 +++++++++++ tests/test_exceptions.py | 1 + 4 files changed, 15 insertions(+) diff --git a/CHANGES.rst b/CHANGES.rst index 320e4836c..e6f54115f 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -9,6 +9,7 @@ Unreleased - ``CacheControl.no_transform`` is a boolean when present. ``min_fresh`` is ``None`` when not present. Added the ``must_understand`` attribute. Fixed some typing issues on cache control. :issue:`2881` +- Add 421 ``MisdirectedRequest`` HTTP exception. :issue:`2850` Version 3.0.6 diff --git a/docs/exceptions.rst b/docs/exceptions.rst index 88a309d45..d5b6970b1 100644 --- a/docs/exceptions.rst +++ b/docs/exceptions.rst @@ -44,6 +44,8 @@ The following error classes exist in Werkzeug: .. autoexception:: ImATeapot +.. autoexception:: MisdirectedRequest + .. autoexception:: UnprocessableEntity .. autoexception:: Locked diff --git a/src/werkzeug/exceptions.py b/src/werkzeug/exceptions.py index 6ce7ef955..02af2c15d 100644 --- a/src/werkzeug/exceptions.py +++ b/src/werkzeug/exceptions.py @@ -571,6 +571,17 @@ class ImATeapot(HTTPException): description = "This server is a teapot, not a coffee machine" +class MisdirectedRequest(HTTPException): + """421 Misdirected Request + + Indicates that the request was directed to a server that is not able to + produce a response. + """ + + code = 421 + description = "The server is not able to produce a response." + + class UnprocessableEntity(HTTPException): """*422* `Unprocessable Entity` diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py index ad20b3f8b..67d76d2b4 100644 --- a/tests/test_exceptions.py +++ b/tests/test_exceptions.py @@ -37,6 +37,7 @@ def test_proxy_exception(): (exceptions.RequestEntityTooLarge, 413), (exceptions.RequestURITooLarge, 414), (exceptions.UnsupportedMediaType, 415), + (exceptions.MisdirectedRequest, 421), (exceptions.UnprocessableEntity, 422), (exceptions.Locked, 423), (exceptions.InternalServerError, 500), From 7ff62c96bb4922c6397bdf4287cf5f2cd38abb86 Mon Sep 17 00:00:00 2001 From: David Lord Date: Fri, 25 Oct 2024 15:58:51 -0700 Subject: [PATCH 129/159] update CHANGES.rst --- CHANGES.rst | 1 - 1 file changed, 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index 501e5e61f..f27c93328 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -7,7 +7,6 @@ Released 2024-10-25 - Fix how ``max_form_memory_size`` is applied when parsing large non-file fields. :ghsa:`q34m-jh98-gwm2` - - ``safe_join`` catches certain paths on Windows that were not caught by ``ntpath.isabs`` on Python < 3.11. :ghsa:`f9vj-2wh5-fj8j` From 06e5094161c304017540b7efafe0d136b9879966 Mon Sep 17 00:00:00 2001 From: David Lord Date: Fri, 25 Oct 2024 16:05:58 -0700 Subject: [PATCH 130/159] fix ghsa links --- docs/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/conf.py b/docs/conf.py index d58c17e1d..5cbbd4fe7 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -25,7 +25,7 @@ extlinks = { "issue": ("https://github.com/pallets/werkzeug/issues/%s", "#%s"), "pr": ("https://github.com/pallets/werkzeug/pull/%s", "#%s"), - "ghsa": ("https://github.com/advisories/%s", "GHSA-%s"), + "ghsa": ("https://github.com/advisories/GHSA-%s", "GHSA-%s"), } intersphinx_mapping = { "python": ("https://docs.python.org/3/", None), From 14c5d990215dc90fa92132ee7dc56efa2c0f67c2 Mon Sep 17 00:00:00 2001 From: Cal Paterson Date: Mon, 9 Sep 2024 15:25:56 +0300 Subject: [PATCH 131/159] Add support for RFC5861 Cache-Control headers stale-while-revalidate and stale-if-error --- CHANGES.rst | 2 ++ docs/datastructures.rst | 22 ++++--------------- src/werkzeug/datastructures/cache_control.py | 5 ++++- src/werkzeug/datastructures/cache_control.pyi | 12 ++++++++++ tests/test_datastructures.py | 12 ++++++++++ 5 files changed, 34 insertions(+), 19 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 3fcd0ae4a..4c21d209e 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -9,6 +9,8 @@ Unreleased - ``CacheControl.no_transform`` is a boolean when present. ``min_fresh`` is ``None`` when not present. Added the ``must_understand`` attribute. Fixed some typing issues on cache control. :issue:`2881` +- Add ``stale_while_revalidate`` and ``stale_if_error`` properties to + ``ResponseCacheControl``. :issue:`2948` - Add 421 ``MisdirectedRequest`` HTTP exception. :issue:`2850` diff --git a/docs/datastructures.rst b/docs/datastructures.rst index 01432f413..92c969932 100644 --- a/docs/datastructures.rst +++ b/docs/datastructures.rst @@ -69,26 +69,12 @@ HTTP Related .. autoclass:: LanguageAccept .. autoclass:: RequestCacheControl - :members: - - .. autoattribute:: no_cache - - .. autoattribute:: no_store - - .. autoattribute:: max_age - - .. autoattribute:: no_transform + :members: + :inherited-members: .. autoclass:: ResponseCacheControl - :members: - - .. autoattribute:: no_cache - - .. autoattribute:: no_store - - .. autoattribute:: max_age - - .. autoattribute:: no_transform + :members: + :inherited-members: .. autoclass:: ETags :members: diff --git a/src/werkzeug/datastructures/cache_control.py b/src/werkzeug/datastructures/cache_control.py index 6ff4eceeb..fa7ed0a71 100644 --- a/src/werkzeug/datastructures/cache_control.py +++ b/src/werkzeug/datastructures/cache_control.py @@ -165,7 +165,8 @@ class ResponseCacheControl(_CacheControl): ``no_transform`` is a boolean when present. .. versionchanged:: 3.1 - Added the ``must_understand`` attribute. + Added the ``must_understand``, ``stale_while_revalidate``, and + ``stale_if_error`` attributes. .. versionchanged:: 2.1.1 ``s_maxage`` converts the value to an int. @@ -186,6 +187,8 @@ class ResponseCacheControl(_CacheControl): s_maxage = cache_control_property("s-maxage", None, int) immutable = cache_control_property("immutable", None, bool) must_understand = cache_control_property("must-understand", None, bool) + stale_while_revalidate = cache_control_property("stale-while-revalidate", None, int) + stale_if_error = cache_control_property("stale-if-error", None, int) # circular dependencies diff --git a/src/werkzeug/datastructures/cache_control.pyi b/src/werkzeug/datastructures/cache_control.pyi index 4c9f4df37..93c595db8 100644 --- a/src/werkzeug/datastructures/cache_control.pyi +++ b/src/werkzeug/datastructures/cache_control.pyi @@ -116,3 +116,15 @@ class ResponseCacheControl(_CacheControl): def must_understand(self, value: bool | None) -> None: ... @must_understand.deleter def must_understand(self) -> None: ... + @property + def stale_while_revalidate(self) -> int | None: ... + @stale_while_revalidate.setter + def stale_while_revalidate(self, value: int | None) -> None: ... + @stale_while_revalidate.deleter + def stale_while_revalidate(self) -> None: ... + @property + def stale_if_error(self) -> int | None: ... + @stale_if_error.setter + def stale_if_error(self, value: int | None) -> None: ... + @stale_if_error.deleter + def stale_if_error(self) -> None: ... diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py index 830dfefd5..a681c022b 100644 --- a/tests/test_datastructures.py +++ b/tests/test_datastructures.py @@ -973,6 +973,18 @@ def test_must_understand(self): cc = ds.ResponseCacheControl() assert cc.must_understand is False + def test_stale_while_revalidate(self): + cc = ds.ResponseCacheControl([("stale-while-revalidate", "1")]) + assert cc.stale_while_revalidate == 1 + cc = ds.ResponseCacheControl() + assert cc.stale_while_revalidate is None + + def test_stale_if_error(self): + cc = ds.ResponseCacheControl([("stale-if-error", "1")]) + assert cc.stale_if_error == 1 + cc = ds.ResponseCacheControl() + assert cc.stale_while_revalidate is None + class TestContentSecurityPolicy: def test_construct(self): From 3a4126da6dc094b1a20a2fe5d6b132e8b2382bb8 Mon Sep 17 00:00:00 2001 From: David Lord Date: Sun, 27 Oct 2024 08:50:22 -0700 Subject: [PATCH 132/159] manage dev server subprocess directly co-authored-by: Sudhanshu Pandey --- requirements/dev.txt | 7 -- requirements/tests.in | 1 - requirements/tests.txt | 5 -- tests/conftest.py | 148 ++++++++++++++++++++++++----------------- tests/test_serving.py | 6 +- 5 files changed, 89 insertions(+), 78 deletions(-) diff --git a/requirements/dev.txt b/requirements/dev.txt index 4f6a735d4..24eb34a53 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -104,10 +104,6 @@ pluggy==1.5.0 # tox pre-commit==4.0.1 # via -r dev.in -psutil==6.1.0 - # via - # -r tests.txt - # pytest-xprocess pycparser==2.22 # via # -r tests.txt @@ -125,11 +121,8 @@ pytest==8.3.3 # -r tests.txt # -r typing.txt # pytest-timeout - # pytest-xprocess pytest-timeout==2.3.1 # via -r tests.txt -pytest-xprocess==0.23.0 - # via -r tests.txt pyyaml==6.0.2 # via pre-commit requests==2.32.3 diff --git a/requirements/tests.in b/requirements/tests.in index 494b61022..c1b5bc313 100644 --- a/requirements/tests.in +++ b/requirements/tests.in @@ -1,6 +1,5 @@ pytest pytest-timeout -pytest-xprocess<1 cryptography watchdog ephemeral-port-reserve diff --git a/requirements/tests.txt b/requirements/tests.txt index c4557c90f..d64cace9a 100644 --- a/requirements/tests.txt +++ b/requirements/tests.txt @@ -18,18 +18,13 @@ packaging==24.1 # via pytest pluggy==1.5.0 # via pytest -psutil==6.1.0 - # via pytest-xprocess pycparser==2.22 # via cffi pytest==8.3.3 # via # -r tests.in # pytest-timeout - # pytest-xprocess pytest-timeout==2.3.1 # via -r tests.in -pytest-xprocess==0.23.0 - # via -r tests.in watchdog==5.0.3 # via -r tests.in diff --git a/tests/conftest.py b/tests/conftest.py index b73202cdb..1f238557c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,55 +1,85 @@ +from __future__ import annotations + import http.client import json import os import socket import ssl +import subprocess import sys +import time +from contextlib import closing +from contextlib import ExitStack from pathlib import Path import ephemeral_port_reserve import pytest -from xprocess import ProcessStarter - -from werkzeug.utils import cached_property - -run_path = str(Path(__file__).parent / "live_apps" / "run.py") class UnixSocketHTTPConnection(http.client.HTTPConnection): def connect(self): self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + # Raises FileNotFoundError if the server hasn't started yet. self.sock.connect(self.host) class DevServerClient: - def __init__(self, kwargs): - host = kwargs.get("hostname", "127.0.0.1") + def __init__(self, app_name="standard", *, tmp_path, **server_kwargs): + host = server_kwargs.get("hostname", "127.0.0.1") - if not host.startswith("unix"): - port = kwargs.get("port") + if not host.startswith("unix://"): + port = server_kwargs.get("port") if port is None: - kwargs["port"] = port = ephemeral_port_reserve.reserve(host) + server_kwargs["port"] = port = ephemeral_port_reserve.reserve(host) - scheme = "https" if "ssl_context" in kwargs else "http" + self.scheme = "https" if "ssl_context" in server_kwargs else "http" self.addr = f"{host}:{port}" - self.url = f"{scheme}://{self.addr}" + self.url = f"{self.scheme}://{self.addr}" else: + self.scheme = "unix" self.addr = host[7:] # strip "unix://" self.url = host - self.log = None - - def tail_log(self, path): - # surrogateescape allows for handling of file streams - # containing junk binary values as normal text streams - self.log = open(path, errors="surrogateescape") - self.log.read() + self._app_name = app_name + self._server_kwargs = server_kwargs + self._tmp_path = tmp_path + self._log_write = None + self._log_read = None + self._proc = None + + def __enter__(self): + log_path = self._tmp_path / "log.txt" + self._log_write = open(log_path, "wb") + self._log_read = open(log_path, encoding="utf8", errors="surrogateescape") + tmp_dir = os.fspath(self._tmp_path) + self._proc = subprocess.Popen( + [ + sys.executable, + os.fspath(Path(__file__).parent / "live_apps/run.py"), + self._app_name, + json.dumps(self._server_kwargs), + ], + env={**os.environ, "PYTHONUNBUFFERED": "1", "PYTHONPATH": tmp_dir}, + cwd=tmp_dir, + close_fds=True, + stdout=self._log_write, + stderr=subprocess.STDOUT, + ) + self.wait_ready() + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self._proc.terminate() + self._proc.wait() + self._proc = None + self._log_read.close() + self._log_read = None + self._log_write.close() + self._log_write = None def connect(self, **kwargs): - protocol = self.url.partition(":")[0] - - if protocol == "https": + if self.scheme == "https": if "context" not in kwargs: context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) context.check_hostname = False @@ -58,21 +88,20 @@ def connect(self, **kwargs): return http.client.HTTPSConnection(self.addr, **kwargs) - if protocol == "unix": + if self.scheme == "unix": return UnixSocketHTTPConnection(self.addr, **kwargs) return http.client.HTTPConnection(self.addr, **kwargs) - def request(self, path="", **kwargs): + def request(self, url: str, **kwargs): kwargs.setdefault("method", "GET") - kwargs.setdefault("url", path) - conn = self.connect() - conn.request(**kwargs) + kwargs["url"] = url - with conn.getresponse() as response: - response.data = response.read() + with closing(self.connect()) as conn: + conn.request(**kwargs) - conn.close() + with conn.getresponse() as response: + response.data = response.read() if response.headers.get("Content-Type", "").startswith("application/json"): response.json = json.loads(response.data) @@ -81,50 +110,45 @@ def request(self, path="", **kwargs): return response - def wait_for_log(self, start): + def wait_ready(self): while True: - for line in self.log: - if line.startswith(start): + try: + self.request("/ensure") + return + # ConnectionRefusedError for http, FileNotFoundError for unix + except (ConnectionRefusedError, FileNotFoundError): + time.sleep(0.1) + + def read_log(self) -> str: + return self._log_read.read() + + def wait_for_log(self, value): + while True: + for line in self._log_read: + if value in line: return + time.sleep(0.1) + def wait_for_reload(self): - self.wait_for_log(" * Restarting with ") + self.wait_for_log("Restarting with") + self.wait_ready() @pytest.fixture() -def dev_server(xprocess, request, tmp_path): - """A function that will start a dev server in an external process - and return a client for interacting with the server. +def dev_server(tmp_path): + """A function that will start a dev server in a subprocess and return a + client for interacting with the server. """ + exit_stack = ExitStack() def start_dev_server(name="standard", **kwargs): - client = DevServerClient(kwargs) - - class Starter(ProcessStarter): - args = [sys.executable, run_path, name, json.dumps(kwargs)] - # Extend the existing env, otherwise Windows and CI fails. - # Modules will be imported from tmp_path for the reloader. - # Unbuffered output so the logs update immediately. - env = {**os.environ, "PYTHONPATH": str(tmp_path), "PYTHONUNBUFFERED": "1"} - - @cached_property - def pattern(self): - client.request("/ensure") - return "GET /ensure" - - # Each test that uses the fixture will have a different log. - xp_name = f"dev_server-{request.node.name}" - _, log_path = xprocess.ensure(xp_name, Starter, restart=True) - client.tail_log(log_path) - - @request.addfinalizer - def close(): - xprocess.getinfo(xp_name).terminate() - client.log.close() - + client = DevServerClient(name, tmp_path=tmp_path, **kwargs) + exit_stack.enter_context(client) return client - return start_dev_server + with exit_stack: + yield start_dev_server @pytest.fixture() diff --git a/tests/test_serving.py b/tests/test_serving.py index 2de67dab0..9832fab63 100644 --- a/tests/test_serving.py +++ b/tests/test_serving.py @@ -116,7 +116,7 @@ def test_reloader_sys_path(tmp_path, dev_server, reloader_type): assert client.request().status == 500 shutil.copyfile(Path(__file__).parent / "live_apps" / "standard_app.py", real_path) - client.wait_for_log(f" * Detected change in {str(real_path)!r}, reloading") + client.wait_for_log(f"Detected change in {str(real_path)!r}") client.wait_for_reload() assert client.request().status == 200 @@ -195,7 +195,7 @@ def test_wrong_protocol(standard_app): with pytest.raises(ssl.SSLError): conn.request("GET", f"https://{standard_app.addr}") - assert "Traceback" not in standard_app.log.read() + assert "Traceback" not in standard_app.read_log() @pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning") @@ -347,4 +347,4 @@ def test_host_with_ipv6_scope(dev_server): assert r.status == 500 assert b"Internal Server Error" in r.data - assert "Logging error" not in client.log.read() + assert "Logging error" not in client.read_log() From e8292658623bf6607d996e254d1f51ee41b9575f Mon Sep 17 00:00:00 2001 From: David Lord Date: Sun, 27 Oct 2024 08:53:10 -0700 Subject: [PATCH 133/159] remove resource warning ignores --- tests/middleware/test_http_proxy.py | 1 - tests/test_debug.py | 1 - tests/test_serving.py | 18 +----------------- 3 files changed, 1 insertion(+), 19 deletions(-) diff --git a/tests/middleware/test_http_proxy.py b/tests/middleware/test_http_proxy.py index a1497c5cc..5e1f005b2 100644 --- a/tests/middleware/test_http_proxy.py +++ b/tests/middleware/test_http_proxy.py @@ -5,7 +5,6 @@ from werkzeug.wrappers import Response -@pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning") @pytest.mark.dev_server def test_http_proxy(standard_app): app = ProxyMiddleware( diff --git a/tests/test_debug.py b/tests/test_debug.py index cf171d1a5..f51779cbc 100644 --- a/tests/test_debug.py +++ b/tests/test_debug.py @@ -245,7 +245,6 @@ def test_get_machine_id(): assert isinstance(rv, bytes) -@pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning") @pytest.mark.parametrize("crash", (True, False)) @pytest.mark.dev_server def test_basic(dev_server, crash): diff --git a/tests/test_serving.py b/tests/test_serving.py index 9832fab63..c1819b348 100644 --- a/tests/test_serving.py +++ b/tests/test_serving.py @@ -25,7 +25,7 @@ from werkzeug.test import stream_encode_multipart -@pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning") + @pytest.mark.parametrize( "kwargs", [ @@ -52,7 +52,6 @@ def test_server(tmp_path, dev_server, kwargs: dict): assert r.json["PATH_INFO"] == "/" -@pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning") @pytest.mark.dev_server def test_untrusted_host(standard_app): r = standard_app.request( @@ -66,7 +65,6 @@ def test_untrusted_host(standard_app): assert r.json["SERVER_PORT"] == port -@pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning") @pytest.mark.dev_server def test_double_slash_path(standard_app): r = standard_app.request("//double-slash") @@ -74,7 +72,6 @@ def test_double_slash_path(standard_app): assert r.json["PATH_INFO"] == "/double-slash" -@pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning") @pytest.mark.dev_server def test_500_error(standard_app): r = standard_app.request("/crash") @@ -82,7 +79,6 @@ def test_500_error(standard_app): assert b"Internal Server Error" in r.data -@pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning") @pytest.mark.dev_server def test_ssl_dev_cert(tmp_path, dev_server): client = dev_server(ssl_context=make_ssl_devcert(tmp_path)) @@ -90,7 +86,6 @@ def test_ssl_dev_cert(tmp_path, dev_server): assert r.json["wsgi.url_scheme"] == "https" -@pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning") @pytest.mark.dev_server def test_ssl_object(dev_server): client = dev_server(ssl_context="custom") @@ -98,7 +93,6 @@ def test_ssl_object(dev_server): assert r.json["wsgi.url_scheme"] == "https" -@pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning") @pytest.mark.parametrize("reloader_type", ["stat", "watchdog"]) @pytest.mark.skipif( os.name == "nt" and "CI" in os.environ, reason="unreliable on Windows during CI" @@ -170,7 +164,6 @@ def test_windows_get_args_for_reloading(monkeypatch, tmp_path): assert rv == argv -@pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning") @pytest.mark.parametrize("find", [_find_stat_paths, _find_watchdog_paths]) def test_exclude_patterns(find): # Select a path to exclude from the unfiltered list, assert that it is present and @@ -184,7 +177,6 @@ def test_exclude_patterns(find): assert not any(p.startswith(path_to_exclude) for p in paths) -@pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning") @pytest.mark.dev_server def test_wrong_protocol(standard_app): """An HTTPS request to an HTTP server doesn't show a traceback. @@ -198,7 +190,6 @@ def test_wrong_protocol(standard_app): assert "Traceback" not in standard_app.read_log() -@pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning") @pytest.mark.dev_server def test_content_type_and_length(standard_app): r = standard_app.request() @@ -215,7 +206,6 @@ def test_port_is_int(): run_simple("127.0.0.1", "5000", None) -@pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning") @pytest.mark.parametrize("send_length", [False, True]) @pytest.mark.dev_server def test_chunked_request(monkeypatch, dev_server, send_length): @@ -258,7 +248,6 @@ def test_chunked_request(monkeypatch, dev_server, send_length): assert environ["wsgi.input_terminated"] -@pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning") @pytest.mark.dev_server def test_multiple_headers_concatenated(standard_app): """A header key can be sent multiple times. The server will join all @@ -283,7 +272,6 @@ def test_multiple_headers_concatenated(standard_app): assert data["HTTP_XYZ"] == "a ,b,c ,d" -@pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning") @pytest.mark.dev_server def test_multiline_header_folding(standard_app): """A header value can be split over multiple lines with a leading @@ -303,7 +291,6 @@ def test_multiline_header_folding(standard_app): @pytest.mark.parametrize("endpoint", ["", "crash"]) -@pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning") @pytest.mark.dev_server def test_streaming_close_response(dev_server, endpoint): """When using HTTP/1.0, chunked encoding is not supported. Fall @@ -315,7 +302,6 @@ def test_streaming_close_response(dev_server, endpoint): assert r.data == "".join(str(x) + "\n" for x in range(5)).encode() -@pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning") @pytest.mark.dev_server def test_streaming_chunked_response(dev_server): """When using HTTP/1.1, use Transfer-Encoding: chunked for streamed @@ -329,7 +315,6 @@ def test_streaming_chunked_response(dev_server): assert r.data == "".join(str(x) + "\n" for x in range(5)).encode() -@pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning") @pytest.mark.dev_server def test_streaming_chunked_truncation(dev_server): """When using HTTP/1.1, chunked encoding allows the client to detect @@ -339,7 +324,6 @@ def test_streaming_chunked_truncation(dev_server): dev_server("streaming", threaded=True).request("/crash") -@pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning") @pytest.mark.dev_server def test_host_with_ipv6_scope(dev_server): client = dev_server(override_client_addr="fe80::1ff:fe23:4567:890a%eth2") From 73b2b0536529505ea1536f457fb372836d8ce704 Mon Sep 17 00:00:00 2001 From: David Lord Date: Sun, 27 Oct 2024 09:05:39 -0700 Subject: [PATCH 134/159] annotate dev server client --- pyproject.toml | 1 + tests/conftest.py | 63 +++++++++++++++++++++++++---------- tests/test_serving.py | 76 ++++++++++++++++++++++++++----------------- 3 files changed, 93 insertions(+), 47 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 5804125ab..b387e7187 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -79,6 +79,7 @@ strict = true module = [ "colorama.*", "cryptography.*", + "ephemeral_port_reserve", "watchdog.*", "xprocess.*", ] diff --git a/tests/conftest.py b/tests/conftest.py index 1f238557c..aa0f541be 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,5 +1,6 @@ from __future__ import annotations +import collections.abc as cabc import http.client import json import os @@ -8,23 +9,36 @@ import subprocess import sys import time +import typing as t from contextlib import closing from contextlib import ExitStack from pathlib import Path +from types import TracebackType import ephemeral_port_reserve import pytest +if t.TYPE_CHECKING: + import typing_extensions as te + class UnixSocketHTTPConnection(http.client.HTTPConnection): - def connect(self): + def connect(self) -> None: self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) # Raises FileNotFoundError if the server hasn't started yet. self.sock.connect(self.host) +# Used to annotate the ``DevServerClient.request`` return value. +class DataHTTPResponse(http.client.HTTPResponse): + data: bytes + json: t.Any + + class DevServerClient: - def __init__(self, app_name="standard", *, tmp_path, **server_kwargs): + def __init__( + self, app_name: str = "standard", *, tmp_path: Path, **server_kwargs: t.Any + ) -> None: host = server_kwargs.get("hostname", "127.0.0.1") if not host.startswith("unix://"): @@ -44,11 +58,11 @@ def __init__(self, app_name="standard", *, tmp_path, **server_kwargs): self._app_name = app_name self._server_kwargs = server_kwargs self._tmp_path = tmp_path - self._log_write = None - self._log_read = None - self._proc = None + self._log_write: t.IO[bytes] | None = None + self._log_read: t.IO[str] | None = None + self._proc: subprocess.Popen[bytes] | None = None - def __enter__(self): + def __enter__(self) -> te.Self: log_path = self._tmp_path / "log.txt" self._log_write = open(log_path, "wb") self._log_read = open(log_path, encoding="utf8", errors="surrogateescape") @@ -69,16 +83,24 @@ def __enter__(self): self.wait_ready() return self - def __exit__(self, exc_type, exc_val, exc_tb): + def __exit__( + self, + exc_type: type[BaseException], + exc_val: BaseException, + exc_tb: TracebackType, + ) -> None: + assert self._proc is not None self._proc.terminate() self._proc.wait() self._proc = None + assert self._log_read is not None self._log_read.close() self._log_read = None + assert self._log_write is not None self._log_write.close() self._log_write = None - def connect(self, **kwargs): + def connect(self, **kwargs: t.Any) -> http.client.HTTPConnection: if self.scheme == "https": if "context" not in kwargs: context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) @@ -93,14 +115,15 @@ def connect(self, **kwargs): return http.client.HTTPConnection(self.addr, **kwargs) - def request(self, url: str, **kwargs): + def request(self, url: str = "", **kwargs: t.Any) -> DataHTTPResponse: kwargs.setdefault("method", "GET") kwargs["url"] = url + response: DataHTTPResponse with closing(self.connect()) as conn: conn.request(**kwargs) - with conn.getresponse() as response: + with conn.getresponse() as response: # type: ignore[assignment] response.data = response.read() if response.headers.get("Content-Type", "").startswith("application/json"): @@ -110,7 +133,7 @@ def request(self, url: str, **kwargs): return response - def wait_ready(self): + def wait_ready(self) -> None: while True: try: self.request("/ensure") @@ -120,9 +143,11 @@ def wait_ready(self): time.sleep(0.1) def read_log(self) -> str: + assert self._log_read is not None return self._log_read.read() - def wait_for_log(self, value): + def wait_for_log(self, value: str) -> None: + assert self._log_read is not None while True: for line in self._log_read: if value in line: @@ -130,21 +155,25 @@ def wait_for_log(self, value): time.sleep(0.1) - def wait_for_reload(self): + def wait_for_reload(self) -> None: self.wait_for_log("Restarting with") self.wait_ready() +class StartDevServer(t.Protocol): + def __call__(self, name: str = "standard", **kwargs: t.Any) -> DevServerClient: ... + + @pytest.fixture() -def dev_server(tmp_path): +def dev_server(tmp_path: Path) -> cabc.Iterator[StartDevServer]: """A function that will start a dev server in a subprocess and return a client for interacting with the server. """ exit_stack = ExitStack() - def start_dev_server(name="standard", **kwargs): + def start_dev_server(name: str = "standard", **kwargs: t.Any) -> DevServerClient: client = DevServerClient(name, tmp_path=tmp_path, **kwargs) - exit_stack.enter_context(client) + exit_stack.enter_context(client) # type: ignore[arg-type] return client with exit_stack: @@ -152,6 +181,6 @@ def start_dev_server(name="standard", **kwargs): @pytest.fixture() -def standard_app(dev_server): +def standard_app(dev_server: t.Callable[..., DevServerClient]) -> DevServerClient: """Equivalent to ``dev_server("standard")``.""" return dev_server() diff --git a/tests/test_serving.py b/tests/test_serving.py index c1819b348..6dd9d9dc3 100644 --- a/tests/test_serving.py +++ b/tests/test_serving.py @@ -1,3 +1,6 @@ +from __future__ import annotations + +import collections.abc as cabc import http.client import json import os @@ -5,8 +8,10 @@ import socket import ssl import sys +import typing as t from io import BytesIO from pathlib import Path +from unittest.mock import Mock from unittest.mock import patch import pytest @@ -24,6 +29,9 @@ from werkzeug.serving import make_ssl_devcert from werkzeug.test import stream_encode_multipart +if t.TYPE_CHECKING: + from conftest import DevServerClient + from conftest import StartDevServer @pytest.mark.parametrize( @@ -42,7 +50,9 @@ ], ) @pytest.mark.dev_server -def test_server(tmp_path, dev_server, kwargs: dict): +def test_server( + tmp_path: Path, dev_server: StartDevServer, kwargs: dict[str, t.Any] +) -> None: if kwargs.get("hostname") == "unix": kwargs["hostname"] = f"unix://{tmp_path / 'test.sock'}" @@ -53,7 +63,7 @@ def test_server(tmp_path, dev_server, kwargs: dict): @pytest.mark.dev_server -def test_untrusted_host(standard_app): +def test_untrusted_host(standard_app: DevServerClient) -> None: r = standard_app.request( "http://missing.test:1337/index.html#ignore", headers={"x-base-url": standard_app.url}, @@ -66,28 +76,28 @@ def test_untrusted_host(standard_app): @pytest.mark.dev_server -def test_double_slash_path(standard_app): +def test_double_slash_path(standard_app: DevServerClient) -> None: r = standard_app.request("//double-slash") assert "double-slash" not in r.json["HTTP_HOST"] assert r.json["PATH_INFO"] == "/double-slash" @pytest.mark.dev_server -def test_500_error(standard_app): +def test_500_error(standard_app: DevServerClient) -> None: r = standard_app.request("/crash") assert r.status == 500 assert b"Internal Server Error" in r.data @pytest.mark.dev_server -def test_ssl_dev_cert(tmp_path, dev_server): - client = dev_server(ssl_context=make_ssl_devcert(tmp_path)) +def test_ssl_dev_cert(tmp_path: Path, dev_server: StartDevServer) -> None: + client = dev_server(ssl_context=make_ssl_devcert(os.fspath(tmp_path))) r = client.request() assert r.json["wsgi.url_scheme"] == "https" @pytest.mark.dev_server -def test_ssl_object(dev_server): +def test_ssl_object(dev_server: StartDevServer) -> None: client = dev_server(ssl_context="custom") r = client.request() assert r.json["wsgi.url_scheme"] == "https" @@ -98,7 +108,9 @@ def test_ssl_object(dev_server): os.name == "nt" and "CI" in os.environ, reason="unreliable on Windows during CI" ) @pytest.mark.dev_server -def test_reloader_sys_path(tmp_path, dev_server, reloader_type): +def test_reloader_sys_path( + tmp_path: Path, dev_server: StartDevServer, reloader_type: str +) -> None: """This tests the general behavior of the reloader. It also tests that fixing an import error triggers a reload, not just Python retrying the failed import. @@ -116,19 +128,18 @@ def test_reloader_sys_path(tmp_path, dev_server, reloader_type): @patch.object(WatchdogReloaderLoop, "trigger_reload") -def test_watchdog_reloader_ignores_opened(mock_trigger_reload): +def test_watchdog_reloader_ignores_opened(mock_trigger_reload: Mock) -> None: reloader = WatchdogReloaderLoop() modified_event = FileModifiedEvent("") modified_event.event_type = EVENT_TYPE_MODIFIED reloader.event_handler.on_any_event(modified_event) mock_trigger_reload.assert_called_once() - reloader.trigger_reload.reset_mock() - + mock_trigger_reload.reset_mock() opened_event = FileModifiedEvent("") opened_event.event_type = EVENT_TYPE_OPENED reloader.event_handler.on_any_event(opened_event) - reloader.trigger_reload.assert_not_called() + mock_trigger_reload.assert_not_called() @pytest.mark.skipif( @@ -136,8 +147,8 @@ def test_watchdog_reloader_ignores_opened(mock_trigger_reload): reason="'closed no write' event introduced in watchdog 5.0", ) @patch.object(WatchdogReloaderLoop, "trigger_reload") -def test_watchdog_reloader_ignores_closed_no_write(mock_trigger_reload): - from watchdog.events import EVENT_TYPE_CLOSED_NO_WRITE +def test_watchdog_reloader_ignores_closed_no_write(mock_trigger_reload: Mock) -> None: + from watchdog.events import EVENT_TYPE_CLOSED_NO_WRITE # type: ignore[attr-defined] reloader = WatchdogReloaderLoop() modified_event = FileModifiedEvent("") @@ -145,16 +156,17 @@ def test_watchdog_reloader_ignores_closed_no_write(mock_trigger_reload): reloader.event_handler.on_any_event(modified_event) mock_trigger_reload.assert_called_once() - reloader.trigger_reload.reset_mock() - + mock_trigger_reload.reset_mock() opened_event = FileModifiedEvent("") opened_event.event_type = EVENT_TYPE_CLOSED_NO_WRITE reloader.event_handler.on_any_event(opened_event) - reloader.trigger_reload.assert_not_called() + mock_trigger_reload.assert_not_called() @pytest.mark.skipif(sys.version_info >= (3, 10), reason="not needed on >= 3.10") -def test_windows_get_args_for_reloading(monkeypatch, tmp_path): +def test_windows_get_args_for_reloading( + monkeypatch: pytest.MonkeyPatch, tmp_path: Path +) -> None: argv = [str(tmp_path / "test.exe"), "run"] monkeypatch.setattr("sys.executable", str(tmp_path / "python.exe")) monkeypatch.setattr("sys.argv", argv) @@ -165,7 +177,9 @@ def test_windows_get_args_for_reloading(monkeypatch, tmp_path): @pytest.mark.parametrize("find", [_find_stat_paths, _find_watchdog_paths]) -def test_exclude_patterns(find): +def test_exclude_patterns( + find: t.Callable[[set[str], set[str]], cabc.Iterable[str]], +) -> None: # Select a path to exclude from the unfiltered list, assert that it is present and # then gets excluded. paths = find(set(), set()) @@ -178,7 +192,7 @@ def test_exclude_patterns(find): @pytest.mark.dev_server -def test_wrong_protocol(standard_app): +def test_wrong_protocol(standard_app: DevServerClient) -> None: """An HTTPS request to an HTTP server doesn't show a traceback. https://github.com/pallets/werkzeug/pull/838 """ @@ -191,7 +205,7 @@ def test_wrong_protocol(standard_app): @pytest.mark.dev_server -def test_content_type_and_length(standard_app): +def test_content_type_and_length(standard_app: DevServerClient) -> None: r = standard_app.request() assert "CONTENT_TYPE" not in r.json assert "CONTENT_LENGTH" not in r.json @@ -201,14 +215,16 @@ def test_content_type_and_length(standard_app): assert r.json["CONTENT_LENGTH"] == "2" -def test_port_is_int(): +def test_port_is_int() -> None: with pytest.raises(TypeError, match="port must be an integer"): - run_simple("127.0.0.1", "5000", None) + run_simple("127.0.0.1", "5000", None) # type: ignore[arg-type] @pytest.mark.parametrize("send_length", [False, True]) @pytest.mark.dev_server -def test_chunked_request(monkeypatch, dev_server, send_length): +def test_chunked_request( + monkeypatch: pytest.MonkeyPatch, dev_server: StartDevServer, send_length: bool +) -> None: stream, length, boundary = stream_encode_multipart( { "value": "this is text", @@ -249,7 +265,7 @@ def test_chunked_request(monkeypatch, dev_server, send_length): @pytest.mark.dev_server -def test_multiple_headers_concatenated(standard_app): +def test_multiple_headers_concatenated(standard_app: DevServerClient) -> None: """A header key can be sent multiple times. The server will join all the values with commas. @@ -273,7 +289,7 @@ def test_multiple_headers_concatenated(standard_app): @pytest.mark.dev_server -def test_multiline_header_folding(standard_app): +def test_multiline_header_folding(standard_app: DevServerClient) -> None: """A header value can be split over multiple lines with a leading tab. The server will remove the newlines and preserve the tabs. @@ -292,7 +308,7 @@ def test_multiline_header_folding(standard_app): @pytest.mark.parametrize("endpoint", ["", "crash"]) @pytest.mark.dev_server -def test_streaming_close_response(dev_server, endpoint): +def test_streaming_close_response(dev_server: StartDevServer, endpoint: str) -> None: """When using HTTP/1.0, chunked encoding is not supported. Fall back to Connection: close, but this allows no reliable way to distinguish between complete and truncated responses. @@ -303,7 +319,7 @@ def test_streaming_close_response(dev_server, endpoint): @pytest.mark.dev_server -def test_streaming_chunked_response(dev_server): +def test_streaming_chunked_response(dev_server: StartDevServer) -> None: """When using HTTP/1.1, use Transfer-Encoding: chunked for streamed responses, since it can distinguish the end of the response without closing the connection. @@ -316,7 +332,7 @@ def test_streaming_chunked_response(dev_server): @pytest.mark.dev_server -def test_streaming_chunked_truncation(dev_server): +def test_streaming_chunked_truncation(dev_server: StartDevServer) -> None: """When using HTTP/1.1, chunked encoding allows the client to detect content truncated by a prematurely closed connection. """ @@ -325,7 +341,7 @@ def test_streaming_chunked_truncation(dev_server): @pytest.mark.dev_server -def test_host_with_ipv6_scope(dev_server): +def test_host_with_ipv6_scope(dev_server: StartDevServer) -> None: client = dev_server(override_client_addr="fe80::1ff:fe23:4567:890a%eth2") r = client.request("/crash") From f83df43099116823734feda8114c3b08d1031033 Mon Sep 17 00:00:00 2001 From: David Lord Date: Sun, 27 Oct 2024 09:08:33 -0700 Subject: [PATCH 135/159] document DevServerClient --- tests/conftest.py | 64 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 64 insertions(+) diff --git a/tests/conftest.py b/tests/conftest.py index aa0f541be..f05fd84ed 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -36,6 +36,37 @@ class DataHTTPResponse(http.client.HTTPResponse): class DevServerClient: + """Manage a live dev server process and make requests to it. Must be used + as a context manager. + + If ``hostname`` starts with ``unix://``, the server listens to a unix socket + file instead of a TCP socket. + + If ``port`` is not given, a random port is reserved for use by the server, + to allow multiple servers to run simultaneously. + + If ``ssl_context`` is given, the server listens with TLS enabled. It can be + the special value ``custom`` to generate and pass a context to + ``run_simple``, as opposed to ``adhoc`` which tells ``run_simple`` to + generate the context. + + :param app_name: The name of the app from the ``live_apps`` folder to load. + :param tmp_path: The current test's temporary directory. The server process + sets the working dir here, it is added to the Python path, the log file + is written here, and for unix connections the socket is opened here. + :param server_kwargs: Arguments to pass to ``live_apps/run.py`` to control + how ``run_simple`` is called in the subprocess. + """ + + scheme: str + """One of ``http``, ``https``, or ``unix``. Set based on ``ssl_context`` or + ``hostname``. + """ + addr: str + """The host and port.""" + url: str + """The scheme, host, and port.""" + def __init__( self, app_name: str = "standard", *, tmp_path: Path, **server_kwargs: t.Any ) -> None: @@ -63,6 +94,7 @@ def __init__( self._proc: subprocess.Popen[bytes] | None = None def __enter__(self) -> te.Self: + """Start the server process and wait for it to be ready.""" log_path = self._tmp_path / "log.txt" self._log_write = open(log_path, "wb") self._log_read = open(log_path, encoding="utf8", errors="surrogateescape") @@ -89,6 +121,7 @@ def __exit__( exc_val: BaseException, exc_tb: TracebackType, ) -> None: + """Clean up the server process.""" assert self._proc is not None self._proc.terminate() self._proc.wait() @@ -101,6 +134,15 @@ def __exit__( self._log_write = None def connect(self, **kwargs: t.Any) -> http.client.HTTPConnection: + """Create a connection to the server, without sending a request. + Useful if a test requires lower level methods to try something that + ``HTTPClient.request`` will not do. + + If the server's scheme is HTTPS and the TLS ``context`` argument is not + given, a default permissive context is used. + + :param kwargs: Arguments to :class:`http.client.HTTPConnection`. + """ if self.scheme == "https": if "context" not in kwargs: context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) @@ -116,6 +158,16 @@ def connect(self, **kwargs: t.Any) -> http.client.HTTPConnection: return http.client.HTTPConnection(self.addr, **kwargs) def request(self, url: str = "", **kwargs: t.Any) -> DataHTTPResponse: + """Open a connection and make a request to the server, returning the + response. + + The response object ``data`` parameter has the result of + ``response.read()``. If the response has a ``application/json`` content + type, the ``json`` parameter is populated with ``json.loads(data)``. + + :param url: URL to put in the request line. + :param kwargs: Arguments to :meth:`http.client.HTTPConnection.request`. + """ kwargs.setdefault("method", "GET") kwargs["url"] = url response: DataHTTPResponse @@ -134,6 +186,9 @@ def request(self, url: str = "", **kwargs: t.Any) -> DataHTTPResponse: return response def wait_ready(self) -> None: + """Wait until a request to ``/ensure`` is successful, indicating the + server has started and is listening. + """ while True: try: self.request("/ensure") @@ -143,11 +198,17 @@ def wait_ready(self) -> None: time.sleep(0.1) def read_log(self) -> str: + """Read from the current position to the current end of the log.""" assert self._log_read is not None return self._log_read.read() def wait_for_log(self, value: str) -> None: + """Wait until a line in the log contains the given string. + + :param value: The string to search for. + """ assert self._log_read is not None + while True: for line in self._log_read: if value in line: @@ -156,6 +217,9 @@ def wait_for_log(self, value: str) -> None: time.sleep(0.1) def wait_for_reload(self) -> None: + """Wait until the server logs that it is restarting, then wait for it to + be ready. + """ self.wait_for_log("Restarting with") self.wait_ready() From d27debc32f29ebd1677940cd307788fd65667ff5 Mon Sep 17 00:00:00 2001 From: David Lord Date: Sun, 27 Oct 2024 14:19:20 -0700 Subject: [PATCH 136/159] default max_form_memory_size to 500kB --- CHANGES.rst | 3 +++ docs/request_data.rst | 15 +++++++++++---- src/werkzeug/wrappers/request.py | 5 ++++- 3 files changed, 18 insertions(+), 5 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 4c21d209e..183c3a6a6 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -5,6 +5,9 @@ Version 3.1.0 Unreleased +- ``Request.max_form_memory_size`` defaults to 500kB instead of unlimited. + Non-file form fields over this size will cause a ``RequestEntityTooLarge`` + error. :issue:`2964` - Support Cookie CHIPS (Partitioned Cookies). :issue:`2797` - ``CacheControl.no_transform`` is a boolean when present. ``min_fresh`` is ``None`` when not present. Added the ``must_understand`` attribute. Fixed diff --git a/docs/request_data.rst b/docs/request_data.rst index b1c97b2c7..75811a902 100644 --- a/docs/request_data.rst +++ b/docs/request_data.rst @@ -79,16 +79,23 @@ request in such a way that the server uses too many resources to handle it. Each these limits will raise a :exc:`~werkzeug.exceptions.RequestEntityTooLarge` if they are exceeded. -- :attr:`~Request.max_content_length` Stop reading request data after this number +- :attr:`~Request.max_content_length` - Stop reading request data after this number of bytes. It's better to configure this in the WSGI server or HTTP server, rather than the WSGI application. -- :attr:`~Request.max_form_memory_size` Stop reading request data if any form part is - larger than this number of bytes. While file parts can be moved to disk, regular - form field data is stored in memory only. +- :attr:`~Request.max_form_memory_size` - Stop reading request data if any + non-file form field is larger than this number of bytes. While file parts + can be moved to disk, regular form field data is stored in memory only and + could fill up memory. The default is 500kB. - :attr:`~Request.max_form_parts` Stop reading request data if more than this number of parts are sent in multipart form data. This is useful to stop a very large number of very small parts, especially file parts. The default is 1000. +Each of these values can be set on the ``Request`` class to affect the default +for all requests, or on a ``request`` instance to change the behavior for a +specific request. For example, a small limit can be set by default, and a large +limit can be set on an endpoint that accepts video uploads. These values should +be tuned to the specific needs of your application and endpoints. + Using Werkzeug to set these limits is only one layer of protection. WSGI servers and HTTPS servers should set their own limits on size and timeouts. The operating system or container manager should set limits on memory and processing time for server diff --git a/src/werkzeug/wrappers/request.py b/src/werkzeug/wrappers/request.py index 344f28b60..719a3bc00 100644 --- a/src/werkzeug/wrappers/request.py +++ b/src/werkzeug/wrappers/request.py @@ -84,8 +84,11 @@ class Request(_SansIORequest): #: data in memory for post data is longer than the specified value a #: :exc:`~werkzeug.exceptions.RequestEntityTooLarge` exception is raised. #: + #: .. versionchanged:: 3.1 + #: Defaults to 500kB instead of unlimited. + #: #: .. versionadded:: 0.5 - max_form_memory_size: int | None = None + max_form_memory_size: int | None = 500_000 #: The maximum number of multipart parts to parse, passed to #: :attr:`form_data_parser_class`. Parsing form data with more than this From 8e62ad62b0cf3d1ab460e9a4603cf0c5ccde256b Mon Sep 17 00:00:00 2001 From: David Lord Date: Sun, 27 Oct 2024 15:40:43 -0700 Subject: [PATCH 137/159] drop support for Python 3.8 --- .github/workflows/tests.yaml | 1 - CHANGES.rst | 1 + docs/installation.rst | 2 +- pyproject.toml | 5 ++-- requirements/tests38.txt | 39 -------------------------- src/werkzeug/debug/tbtools.py | 2 +- src/werkzeug/formparser.py | 2 +- src/werkzeug/http.py | 2 +- src/werkzeug/middleware/lint.py | 2 +- src/werkzeug/middleware/shared_data.py | 4 +-- src/werkzeug/serving.py | 2 +- src/werkzeug/test.py | 2 +- tests/test_datastructures.py | 5 ++-- tox.ini | 13 +-------- 14 files changed, 16 insertions(+), 66 deletions(-) delete mode 100644 requirements/tests38.txt diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 8a9741235..5d26be45b 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -20,7 +20,6 @@ jobs: - {python: '3.11'} - {python: '3.10'} - {python: '3.9'} - - {python: '3.8'} - {name: PyPy, python: 'pypy-3.10', tox: pypy310} steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 diff --git a/CHANGES.rst b/CHANGES.rst index 183c3a6a6..0233b1401 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -5,6 +5,7 @@ Version 3.1.0 Unreleased +- Drop support for Python 3.8. :pr:`2966` - ``Request.max_form_memory_size`` defaults to 500kB instead of unlimited. Non-file form fields over this size will cause a ``RequestEntityTooLarge`` error. :issue:`2964` diff --git a/docs/installation.rst b/docs/installation.rst index 7138f08c1..00513e123 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -6,7 +6,7 @@ Python Version -------------- We recommend using the latest version of Python. Werkzeug supports -Python 3.8 and newer. +Python 3.9 and newer. Optional dependencies diff --git a/pyproject.toml b/pyproject.toml index b387e7187..7eab71c89 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,7 +19,7 @@ classifiers = [ "Topic :: Software Development :: Libraries :: Application Frameworks", "Typing :: Typed", ] -requires-python = ">=3.8" +requires-python = ">=3.9" dependencies = [ "MarkupSafe>=2.1.1", ] @@ -70,6 +70,7 @@ source = ["werkzeug", "tests"] source = ["src", "*/site-packages"] [tool.mypy] +python_version = "3.9" files = ["src/werkzeug"] show_error_codes = true pretty = true @@ -86,7 +87,7 @@ module = [ ignore_missing_imports = true [tool.pyright] -pythonVersion = "3.8" +pythonVersion = "3.9" include = ["src/werkzeug"] [tool.ruff] diff --git a/requirements/tests38.txt b/requirements/tests38.txt deleted file mode 100644 index 10829d84c..000000000 --- a/requirements/tests38.txt +++ /dev/null @@ -1,39 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --output-file=tests38.txt tests.in -# -cffi==1.17.1 - # via - # -r tests.in - # cryptography -cryptography==43.0.3 - # via -r tests.in -ephemeral-port-reserve==1.1.4 - # via -r tests.in -exceptiongroup==1.2.2 - # via pytest -iniconfig==2.0.0 - # via pytest -packaging==24.1 - # via pytest -pluggy==1.5.0 - # via pytest -psutil==6.1.0 - # via pytest-xprocess -pycparser==2.22 - # via cffi -pytest==8.3.3 - # via - # -r tests.in - # pytest-timeout - # pytest-xprocess -pytest-timeout==2.3.1 - # via -r tests.in -pytest-xprocess==0.23.0 - # via -r tests.in -tomli==2.0.2 - # via pytest -watchdog==4.0.2 - # via -r tests.in diff --git a/src/werkzeug/debug/tbtools.py b/src/werkzeug/debug/tbtools.py index e81ed6e18..d922893ea 100644 --- a/src/werkzeug/debug/tbtools.py +++ b/src/werkzeug/debug/tbtools.py @@ -185,7 +185,7 @@ def _process_traceback( "globals": f.f_globals, } - if hasattr(fs, "colno"): + if sys.version_info >= (3, 11): frame_args["colno"] = fs.colno frame_args["end_colno"] = fs.end_colno diff --git a/src/werkzeug/formparser.py b/src/werkzeug/formparser.py index 3c6875e26..010341497 100644 --- a/src/werkzeug/formparser.py +++ b/src/werkzeug/formparser.py @@ -33,7 +33,7 @@ from _typeshed.wsgi import WSGIEnvironment - t_parse_result = t.Tuple[ + t_parse_result = tuple[ t.IO[bytes], MultiDict[str, str], MultiDict[str, FileStorage] ] diff --git a/src/werkzeug/http.py b/src/werkzeug/http.py index 1b80c5012..56fc839f9 100644 --- a/src/werkzeug/http.py +++ b/src/werkzeug/http.py @@ -599,7 +599,7 @@ def parse_accept_header( Parse according to RFC 9110. Items with invalid ``q`` values are skipped. """ if cls is None: - cls = t.cast(t.Type[_TAnyAccept], ds.Accept) + cls = t.cast(type[_TAnyAccept], ds.Accept) if not value: return cls(None) diff --git a/src/werkzeug/middleware/lint.py b/src/werkzeug/middleware/lint.py index de93b526a..3714271b1 100644 --- a/src/werkzeug/middleware/lint.py +++ b/src/werkzeug/middleware/lint.py @@ -435,5 +435,5 @@ def checking_start_response( app_iter = self.app(environ, t.cast("StartResponse", checking_start_response)) self.check_iterator(app_iter) return GuardedIterator( - app_iter, t.cast(t.Tuple[int, Headers], headers_set), chunks + app_iter, t.cast(tuple[int, Headers], headers_set), chunks ) diff --git a/src/werkzeug/middleware/shared_data.py b/src/werkzeug/middleware/shared_data.py index 0f467f2e2..c7c06df5a 100644 --- a/src/werkzeug/middleware/shared_data.py +++ b/src/werkzeug/middleware/shared_data.py @@ -30,8 +30,8 @@ from ..wsgi import get_path_info from ..wsgi import wrap_file -_TOpener = t.Callable[[], t.Tuple[t.IO[bytes], datetime, int]] -_TLoader = t.Callable[[t.Optional[str]], t.Tuple[t.Optional[str], t.Optional[_TOpener]]] +_TOpener = t.Callable[[], tuple[t.IO[bytes], datetime, int]] +_TLoader = t.Callable[[t.Optional[str]], tuple[t.Optional[str], t.Optional[_TOpener]]] if t.TYPE_CHECKING: from _typeshed.wsgi import StartResponse diff --git a/src/werkzeug/serving.py b/src/werkzeug/serving.py index ef32b8811..ec166408e 100644 --- a/src/werkzeug/serving.py +++ b/src/werkzeug/serving.py @@ -81,7 +81,7 @@ class ForkingMixIn: # type: ignore LISTEN_QUEUE = 128 _TSSLContextArg = t.Optional[ - t.Union["ssl.SSLContext", t.Tuple[str, t.Optional[str]], t.Literal["adhoc"]] + t.Union["ssl.SSLContext", tuple[str, t.Optional[str]], t.Literal["adhoc"]] ] if t.TYPE_CHECKING: diff --git a/src/werkzeug/test.py b/src/werkzeug/test.py index 38f69bfb9..1dfb9320e 100644 --- a/src/werkzeug/test.py +++ b/src/werkzeug/test.py @@ -818,7 +818,7 @@ def __init__( {}, ) - self.response_wrapper = t.cast(t.Type["TestResponse"], response_wrapper) + self.response_wrapper = t.cast(type["TestResponse"], response_wrapper) if use_cookies: self._cookies: dict[tuple[str, str, str], Cookie] | None = {} diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py index a681c022b..76de1d849 100644 --- a/tests/test_datastructures.py +++ b/tests/test_datastructures.py @@ -1,7 +1,6 @@ import io import pickle import tempfile -import typing as t from contextlib import contextmanager from copy import copy from copy import deepcopy @@ -42,7 +41,7 @@ def items(self, multi=1): class _MutableMultiDictTests: - storage_class: t.Type["ds.MultiDict"] + storage_class: type["ds.MultiDict"] def test_pickle(self): cls = self.storage_class @@ -259,7 +258,7 @@ def test_basic_interface(self): class _ImmutableDictTests: - storage_class: t.Type[dict] + storage_class: type[dict] def test_follows_dict_interface(self): cls = self.storage_class diff --git a/tox.ini b/tox.ini index 77f381c59..cebd251fd 100644 --- a/tox.ini +++ b/tox.ini @@ -1,6 +1,6 @@ [tox] envlist = - py3{13,12,11,10,9,8} + py3{13,12,11,10,9} pypy310 style typing @@ -15,9 +15,6 @@ use_frozen_constraints = true deps = -r requirements/tests.txt commands = pytest -v --tb=short --basetemp={envtmpdir} {posargs} -[testenv:py38,py3.8] -deps = -r requirements/tests38.txt - [testenv:style] deps = pre-commit skip_install = true @@ -53,11 +50,3 @@ commands = pip-compile tests.in -q {posargs:-U} pip-compile typing.in -q {posargs:-U} pip-compile dev.in -q {posargs:-U} - -[testenv:update-requirements38] -base_python = 3.8 -labels = update -deps = pip-tools -skip_install = true -change_dir = requirements -commands = pip-compile tests.in -q -o tests38.txt {posargs:-U} From c1477605c5f237dd1d4230756d70634b616c520b Mon Sep 17 00:00:00 2001 From: David Lord Date: Sun, 27 Oct 2024 15:54:58 -0700 Subject: [PATCH 138/159] remove previously deprecated code --- CHANGES.rst | 1 + src/werkzeug/__init__.py | 21 --------------------- 2 files changed, 1 insertion(+), 21 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 0233b1401..aace07ee4 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -6,6 +6,7 @@ Version 3.1.0 Unreleased - Drop support for Python 3.8. :pr:`2966` +- Remove previously deprecated code. :pr:`2967` - ``Request.max_form_memory_size`` defaults to 500kB instead of unlimited. Non-file form fields over this size will cause a ``RequestEntityTooLarge`` error. :issue:`2964` diff --git a/src/werkzeug/__init__.py b/src/werkzeug/__init__.py index 57cb7539a..0b248fd86 100644 --- a/src/werkzeug/__init__.py +++ b/src/werkzeug/__init__.py @@ -1,25 +1,4 @@ -from __future__ import annotations - -import typing as t - from .serving import run_simple as run_simple from .test import Client as Client from .wrappers import Request as Request from .wrappers import Response as Response - - -def __getattr__(name: str) -> t.Any: - if name == "__version__": - import importlib.metadata - import warnings - - warnings.warn( - "The '__version__' attribute is deprecated and will be removed in" - " Werkzeug 3.1. Use feature detection or" - " 'importlib.metadata.version(\"werkzeug\")' instead.", - DeprecationWarning, - stacklevel=2, - ) - return importlib.metadata.version("werkzeug") - - raise AttributeError(name) From 763609ae21a1d8d649ca19f30c61257fbee670c1 Mon Sep 17 00:00:00 2001 From: David Lord Date: Sun, 27 Oct 2024 16:46:22 -0700 Subject: [PATCH 139/159] increase pbkdf2 iterations --- CHANGES.rst | 2 ++ src/werkzeug/security.py | 2 +- tests/test_security.py | 2 +- 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index aace07ee4..8280c91ba 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -17,6 +17,8 @@ Unreleased - Add ``stale_while_revalidate`` and ``stale_if_error`` properties to ``ResponseCacheControl``. :issue:`2948` - Add 421 ``MisdirectedRequest`` HTTP exception. :issue:`2850` +- Increase default work factor for PBKDF2 to 1,000,000 iterations. :issue:`2969` + Version 3.0.6 diff --git a/src/werkzeug/security.py b/src/werkzeug/security.py index 997597990..a18381779 100644 --- a/src/werkzeug/security.py +++ b/src/werkzeug/security.py @@ -7,7 +7,7 @@ import secrets SALT_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" -DEFAULT_PBKDF2_ITERATIONS = 600000 +DEFAULT_PBKDF2_ITERATIONS = 1_000_000 _os_alt_seps: list[str] = list( sep for sep in [os.sep, os.path.altsep] if sep is not None and sep != "/" diff --git a/tests/test_security.py b/tests/test_security.py index 3ce741a99..455936879 100644 --- a/tests/test_security.py +++ b/tests/test_security.py @@ -25,7 +25,7 @@ def test_scrypt(): def test_pbkdf2(): value = generate_password_hash("secret", method="pbkdf2") assert check_password_hash(value, "secret") - assert value.startswith("pbkdf2:sha256:600000$") + assert value.startswith("pbkdf2:sha256:1000000$") def test_salted_hashes(): From 0294d88b9c10fe079ecfe9e02129d52b1e95de54 Mon Sep 17 00:00:00 2001 From: David Lord Date: Mon, 28 Oct 2024 09:29:57 -0700 Subject: [PATCH 140/159] inline annotations for datastructures --- CHANGES.rst | 2 + src/werkzeug/datastructures/accept.py | 98 +-- src/werkzeug/datastructures/accept.pyi | 54 -- src/werkzeug/datastructures/auth.py | 3 +- src/werkzeug/datastructures/cache_control.py | 72 ++- src/werkzeug/datastructures/cache_control.pyi | 130 ---- src/werkzeug/datastructures/csp.py | 82 +-- src/werkzeug/datastructures/csp.pyi | 169 ----- src/werkzeug/datastructures/etag.py | 49 +- src/werkzeug/datastructures/etag.pyi | 30 - src/werkzeug/datastructures/file_storage.py | 85 +-- src/werkzeug/datastructures/file_storage.pyi | 49 -- src/werkzeug/datastructures/headers.py | 392 +++++++----- src/werkzeug/datastructures/headers.pyi | 109 ---- src/werkzeug/datastructures/mixins.py | 274 +++++---- src/werkzeug/datastructures/mixins.pyi | 97 --- src/werkzeug/datastructures/range.py | 122 ++-- src/werkzeug/datastructures/range.pyi | 57 -- src/werkzeug/datastructures/structures.py | 579 +++++++++++------- src/werkzeug/datastructures/structures.pyi | 206 ------- src/werkzeug/exceptions.py | 2 +- src/werkzeug/http.py | 4 + src/werkzeug/sansio/http.py | 1 - src/werkzeug/test.py | 4 +- src/werkzeug/utils.py | 2 +- 25 files changed, 1095 insertions(+), 1577 deletions(-) delete mode 100644 src/werkzeug/datastructures/accept.pyi delete mode 100644 src/werkzeug/datastructures/cache_control.pyi delete mode 100644 src/werkzeug/datastructures/csp.pyi delete mode 100644 src/werkzeug/datastructures/etag.pyi delete mode 100644 src/werkzeug/datastructures/file_storage.pyi delete mode 100644 src/werkzeug/datastructures/headers.pyi delete mode 100644 src/werkzeug/datastructures/mixins.pyi delete mode 100644 src/werkzeug/datastructures/range.pyi delete mode 100644 src/werkzeug/datastructures/structures.pyi diff --git a/CHANGES.rst b/CHANGES.rst index 8280c91ba..2976f889a 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -18,6 +18,8 @@ Unreleased ``ResponseCacheControl``. :issue:`2948` - Add 421 ``MisdirectedRequest`` HTTP exception. :issue:`2850` - Increase default work factor for PBKDF2 to 1,000,000 iterations. :issue:`2969` +- Inline annotations for ``datastructures``, removing stub files. + :issue:`2970` diff --git a/src/werkzeug/datastructures/accept.py b/src/werkzeug/datastructures/accept.py index d80f0bbb8..44179a93f 100644 --- a/src/werkzeug/datastructures/accept.py +++ b/src/werkzeug/datastructures/accept.py @@ -1,12 +1,14 @@ from __future__ import annotations import codecs +import collections.abc as cabc import re +import typing as t from .structures import ImmutableList -class Accept(ImmutableList): +class Accept(ImmutableList[tuple[str, float]]): """An :class:`Accept` object is just a list subclass for lists of ``(value, quality)`` tuples. It is automatically sorted by specificity and quality. @@ -42,29 +44,39 @@ class Accept(ImmutableList): """ - def __init__(self, values=()): + def __init__( + self, values: Accept | cabc.Iterable[tuple[str, float]] | None = () + ) -> None: if values is None: - list.__init__(self) + super().__init__() self.provided = False elif isinstance(values, Accept): self.provided = values.provided - list.__init__(self, values) + super().__init__(values) else: self.provided = True values = sorted( values, key=lambda x: (self._specificity(x[0]), x[1]), reverse=True ) - list.__init__(self, values) + super().__init__(values) - def _specificity(self, value): + def _specificity(self, value: str) -> tuple[bool, ...]: """Returns a tuple describing the value's specificity.""" return (value != "*",) - def _value_matches(self, value, item): + def _value_matches(self, value: str, item: str) -> bool: """Check if a value matches a given accept item.""" return item == "*" or item.lower() == value.lower() - def __getitem__(self, key): + @t.overload + def __getitem__(self, key: str) -> float: ... + @t.overload + def __getitem__(self, key: t.SupportsIndex) -> tuple[str, float]: ... + @t.overload + def __getitem__(self, key: slice) -> list[tuple[str, float]]: ... + def __getitem__( + self, key: str | t.SupportsIndex | slice + ) -> float | tuple[str, float] | list[tuple[str, float]]: """Besides index lookup (getting item n) you can also pass it a string to get the quality for the item. If the item is not in the list, the returned quality is ``0``. @@ -73,7 +85,7 @@ def __getitem__(self, key): return self.quality(key) return list.__getitem__(self, key) - def quality(self, key): + def quality(self, key: str) -> float: """Returns the quality of the key. .. versionadded:: 0.6 @@ -85,17 +97,17 @@ def quality(self, key): return quality return 0 - def __contains__(self, value): + def __contains__(self, value: str) -> bool: # type: ignore[override] for item, _quality in self: if self._value_matches(value, item): return True return False - def __repr__(self): + def __repr__(self) -> str: pairs_str = ", ".join(f"({x!r}, {y})" for x, y in self) return f"{type(self).__name__}([{pairs_str}])" - def index(self, key): + def index(self, key: str | tuple[str, float]) -> int: # type: ignore[override] """Get the position of an entry or raise :exc:`ValueError`. :param key: The key to be looked up. @@ -111,7 +123,7 @@ def index(self, key): raise ValueError(key) return list.index(self, key) - def find(self, key): + def find(self, key: str | tuple[str, float]) -> int: """Get the position of an entry or return -1. :param key: The key to be looked up. @@ -121,12 +133,12 @@ def find(self, key): except ValueError: return -1 - def values(self): + def values(self) -> cabc.Iterator[str]: """Iterate over all values.""" for item in self: yield item[0] - def to_header(self): + def to_header(self) -> str: """Convert the header set into an HTTP header string.""" result = [] for value, quality in self: @@ -135,17 +147,23 @@ def to_header(self): result.append(value) return ",".join(result) - def __str__(self): + def __str__(self) -> str: return self.to_header() - def _best_single_match(self, match): + def _best_single_match(self, match: str) -> tuple[str, float] | None: for client_item, quality in self: if self._value_matches(match, client_item): # self is sorted by specificity descending, we can exit return client_item, quality return None - def best_match(self, matches, default=None): + @t.overload + def best_match(self, matches: cabc.Iterable[str]) -> str | None: ... + @t.overload + def best_match(self, matches: cabc.Iterable[str], default: str = ...) -> str: ... + def best_match( + self, matches: cabc.Iterable[str], default: str | None = None + ) -> str | None: """Returns the best match from a list of possible matches based on the specificity and quality of the client. If two items have the same quality and specificity, the one is returned that comes first. @@ -154,8 +172,8 @@ def best_match(self, matches, default=None): :param default: the value that is returned if none match """ result = default - best_quality = -1 - best_specificity = (-1,) + best_quality: float = -1 + best_specificity: tuple[float, ...] = (-1,) for server_item in matches: match = self._best_single_match(server_item) if not match: @@ -172,16 +190,18 @@ def best_match(self, matches, default=None): return result @property - def best(self): + def best(self) -> str | None: """The best match as value.""" if self: return self[0][0] + return None + _mime_split_re = re.compile(r"/|(?:\s*;\s*)") -def _normalize_mime(value): +def _normalize_mime(value: str) -> list[str]: return _mime_split_re.split(value.lower()) @@ -190,10 +210,10 @@ class MIMEAccept(Accept): mimetypes. """ - def _specificity(self, value): + def _specificity(self, value: str) -> tuple[bool, ...]: return tuple(x != "*" for x in _mime_split_re.split(value)) - def _value_matches(self, value, item): + def _value_matches(self, value: str, item: str) -> bool: # item comes from the client, can't match if it's invalid. if "/" not in item: return False @@ -234,27 +254,25 @@ def _value_matches(self, value, item): ) @property - def accept_html(self): + def accept_html(self) -> bool: """True if this object accepts HTML.""" - return ( - "text/html" in self or "application/xhtml+xml" in self or self.accept_xhtml - ) + return "text/html" in self or self.accept_xhtml # type: ignore[comparison-overlap] @property - def accept_xhtml(self): + def accept_xhtml(self) -> bool: """True if this object accepts XHTML.""" - return "application/xhtml+xml" in self or "application/xml" in self + return "application/xhtml+xml" in self or "application/xml" in self # type: ignore[comparison-overlap] @property - def accept_json(self): + def accept_json(self) -> bool: """True if this object accepts JSON.""" - return "application/json" in self + return "application/json" in self # type: ignore[comparison-overlap] _locale_delim_re = re.compile(r"[_-]") -def _normalize_lang(value): +def _normalize_lang(value: str) -> list[str]: """Process a language tag for matching.""" return _locale_delim_re.split(value.lower()) @@ -262,10 +280,16 @@ def _normalize_lang(value): class LanguageAccept(Accept): """Like :class:`Accept` but with normalization for language tags.""" - def _value_matches(self, value, item): + def _value_matches(self, value: str, item: str) -> bool: return item == "*" or _normalize_lang(value) == _normalize_lang(item) - def best_match(self, matches, default=None): + @t.overload + def best_match(self, matches: cabc.Iterable[str]) -> str | None: ... + @t.overload + def best_match(self, matches: cabc.Iterable[str], default: str = ...) -> str: ... + def best_match( + self, matches: cabc.Iterable[str], default: str | None = None + ) -> str | None: """Given a list of supported values, finds the best match from the list of accepted values. @@ -316,8 +340,8 @@ def best_match(self, matches, default=None): class CharsetAccept(Accept): """Like :class:`Accept` but with normalization for charsets.""" - def _value_matches(self, value, item): - def _normalize(name): + def _value_matches(self, value: str, item: str) -> bool: + def _normalize(name: str) -> str: try: return codecs.lookup(name).name except LookupError: diff --git a/src/werkzeug/datastructures/accept.pyi b/src/werkzeug/datastructures/accept.pyi deleted file mode 100644 index 4b74dd950..000000000 --- a/src/werkzeug/datastructures/accept.pyi +++ /dev/null @@ -1,54 +0,0 @@ -from collections.abc import Iterable -from collections.abc import Iterator -from typing import overload - -from .structures import ImmutableList - -class Accept(ImmutableList[tuple[str, int]]): - provided: bool - def __init__( - self, values: Accept | Iterable[tuple[str, float]] | None = None - ) -> None: ... - def _specificity(self, value: str) -> tuple[bool, ...]: ... - def _value_matches(self, value: str, item: str) -> bool: ... - @overload # type: ignore - def __getitem__(self, key: str) -> int: ... - @overload - def __getitem__(self, key: int) -> tuple[str, int]: ... - @overload - def __getitem__(self, key: slice) -> Iterable[tuple[str, int]]: ... - def quality(self, key: str) -> int: ... - def __contains__(self, value: str) -> bool: ... # type: ignore - def index(self, key: str) -> int: ... # type: ignore - def find(self, key: str) -> int: ... - def values(self) -> Iterator[str]: ... - def to_header(self) -> str: ... - def _best_single_match(self, match: str) -> tuple[str, int] | None: ... - @overload - def best_match(self, matches: Iterable[str], default: str) -> str: ... - @overload - def best_match( - self, matches: Iterable[str], default: str | None = None - ) -> str | None: ... - @property - def best(self) -> str: ... - -def _normalize_mime(value: str) -> list[str]: ... - -class MIMEAccept(Accept): - def _specificity(self, value: str) -> tuple[bool, ...]: ... - def _value_matches(self, value: str, item: str) -> bool: ... - @property - def accept_html(self) -> bool: ... - @property - def accept_xhtml(self) -> bool: ... - @property - def accept_json(self) -> bool: ... - -def _normalize_lang(value: str) -> list[str]: ... - -class LanguageAccept(Accept): - def _value_matches(self, value: str, item: str) -> bool: ... - -class CharsetAccept(Accept): - def _value_matches(self, value: str, item: str) -> bool: ... diff --git a/src/werkzeug/datastructures/auth.py b/src/werkzeug/datastructures/auth.py index a3ca0de46..42f7aa468 100644 --- a/src/werkzeug/datastructures/auth.py +++ b/src/werkzeug/datastructures/auth.py @@ -2,6 +2,7 @@ import base64 import binascii +import collections.abc as cabc import typing as t from ..http import dump_header @@ -176,7 +177,7 @@ def __init__( values, lambda _: self._trigger_on_update() ) self._token = token - self._on_update: t.Callable[[WWWAuthenticate], None] | None = None + self._on_update: cabc.Callable[[WWWAuthenticate], None] | None = None def _trigger_on_update(self) -> None: if self._on_update is not None: diff --git a/src/werkzeug/datastructures/cache_control.py b/src/werkzeug/datastructures/cache_control.py index fa7ed0a71..e185944b3 100644 --- a/src/werkzeug/datastructures/cache_control.py +++ b/src/werkzeug/datastructures/cache_control.py @@ -1,10 +1,13 @@ from __future__ import annotations +import collections.abc as cabc +import typing as t + from .mixins import ImmutableDictMixin -from .mixins import UpdateDictMixin +from .structures import CallbackDict -def cache_control_property(key, empty, type): +def cache_control_property(key: str, empty: t.Any, type: type[t.Any] | None) -> t.Any: """Return a new property object for a cache header. Useful if you want to add support for a cache extension in a subclass. @@ -19,7 +22,7 @@ def cache_control_property(key, empty, type): ) -class _CacheControl(UpdateDictMixin, dict): +class _CacheControl(CallbackDict[str, t.Any]): """Subclass of a dict that stores values for a Cache-Control header. It has accessors for all the cache-control directives specified in RFC 2616. The class does not differentiate between request and response directives. @@ -59,17 +62,22 @@ class _CacheControl(UpdateDictMixin, dict): no longer existing `CacheControl` class. """ - no_cache = cache_control_property("no-cache", "*", None) - no_store = cache_control_property("no-store", None, bool) - max_age = cache_control_property("max-age", -1, int) - no_transform = cache_control_property("no-transform", None, bool) - - def __init__(self, values=(), on_update=None): - dict.__init__(self, values or ()) - self.on_update = on_update + no_cache: str | bool | None = cache_control_property("no-cache", "*", None) + no_store: bool = cache_control_property("no-store", None, bool) + max_age: int | None = cache_control_property("max-age", -1, int) + no_transform: bool = cache_control_property("no-transform", None, bool) + + def __init__( + self, + values: cabc.Mapping[str, t.Any] | cabc.Iterable[tuple[str, t.Any]] | None = (), + on_update: cabc.Callable[[_CacheControl], None] | None = None, + ): + super().__init__(values, on_update) self.provided = values is not None - def _get_cache_value(self, key, empty, type): + def _get_cache_value( + self, key: str, empty: t.Any, type: type[t.Any] | None + ) -> t.Any: """Used internally by the accessor properties.""" if type is bool: return key in self @@ -85,7 +93,9 @@ def _get_cache_value(self, key, empty, type): return value return None - def _set_cache_value(self, key, value, type): + def _set_cache_value( + self, key: str, value: t.Any, type: type[t.Any] | None + ) -> None: """Used internally by the accessor properties.""" if type is bool: if value: @@ -103,26 +113,26 @@ def _set_cache_value(self, key, value, type): else: self[key] = value - def _del_cache_value(self, key): + def _del_cache_value(self, key: str) -> None: """Used internally by the accessor properties.""" if key in self: del self[key] - def to_header(self): + def to_header(self) -> str: """Convert the stored values into a cache control header.""" return http.dump_header(self) - def __str__(self): + def __str__(self) -> str: return self.to_header() - def __repr__(self): + def __repr__(self) -> str: kv_str = " ".join(f"{k}={v!r}" for k, v in sorted(self.items())) return f"<{type(self).__name__} {kv_str}>" cache_property = staticmethod(cache_control_property) -class RequestCacheControl(ImmutableDictMixin, _CacheControl): +class RequestCacheControl(ImmutableDictMixin[str, t.Any], _CacheControl): # type: ignore[misc] """A cache control for requests. This is immutable and gives access to all the request-relevant cache control headers. @@ -146,9 +156,9 @@ class RequestCacheControl(ImmutableDictMixin, _CacheControl): both for request and response. """ - max_stale = cache_control_property("max-stale", "*", int) - min_fresh = cache_control_property("min-fresh", None, int) - only_if_cached = cache_control_property("only-if-cached", None, bool) + max_stale: str | int | None = cache_control_property("max-stale", "*", int) + min_fresh: int | None = cache_control_property("min-fresh", None, int) + only_if_cached: bool = cache_control_property("only-if-cached", None, bool) class ResponseCacheControl(_CacheControl): @@ -180,15 +190,17 @@ class ResponseCacheControl(_CacheControl): both for request and response. """ - public = cache_control_property("public", None, bool) - private = cache_control_property("private", "*", None) - must_revalidate = cache_control_property("must-revalidate", None, bool) - proxy_revalidate = cache_control_property("proxy-revalidate", None, bool) - s_maxage = cache_control_property("s-maxage", None, int) - immutable = cache_control_property("immutable", None, bool) - must_understand = cache_control_property("must-understand", None, bool) - stale_while_revalidate = cache_control_property("stale-while-revalidate", None, int) - stale_if_error = cache_control_property("stale-if-error", None, int) + public: bool = cache_control_property("public", None, bool) + private: str | None = cache_control_property("private", "*", None) + must_revalidate: bool = cache_control_property("must-revalidate", None, bool) + proxy_revalidate: bool = cache_control_property("proxy-revalidate", None, bool) + s_maxage: int | None = cache_control_property("s-maxage", None, int) + immutable: bool = cache_control_property("immutable", None, bool) + must_understand: bool = cache_control_property("must-understand", None, bool) + stale_while_revalidate: int | None = cache_control_property( + "stale-while-revalidate", None, int + ) + stale_if_error: int | None = cache_control_property("stale-if-error", None, int) # circular dependencies diff --git a/src/werkzeug/datastructures/cache_control.pyi b/src/werkzeug/datastructures/cache_control.pyi deleted file mode 100644 index 93c595db8..000000000 --- a/src/werkzeug/datastructures/cache_control.pyi +++ /dev/null @@ -1,130 +0,0 @@ -from collections.abc import Callable -from collections.abc import Iterable -from collections.abc import Mapping -from typing import Literal -from typing import TypeVar - -from .mixins import ImmutableDictMixin -from .mixins import UpdateDictMixin - -T = TypeVar("T") -_CPT = TypeVar("_CPT", str, int, bool) - -def cache_control_property( - key: str, empty: _CPT | None, type: type[_CPT] -) -> property: ... - -class _CacheControl( - UpdateDictMixin[str, str | int | bool | None], dict[str, str | int | bool | None] -): - provided: bool - def __init__( - self, - values: Mapping[str, str | int | bool | None] - | Iterable[tuple[str, str | int | bool | None]] = (), - on_update: Callable[[_CacheControl], None] | None = None, - ) -> None: ... - @property - def no_cache(self) -> str | None: ... - @no_cache.setter - def no_cache(self, value: Literal[True] | str | None) -> None: ... - @no_cache.deleter - def no_cache(self) -> None: ... - @property - def no_store(self) -> bool: ... - @no_store.setter - def no_store(self, value: bool | None) -> None: ... - @no_store.deleter - def no_store(self) -> None: ... - @property - def max_age(self) -> int | None: ... - @max_age.setter - def max_age(self, value: int | None) -> None: ... - @max_age.deleter - def max_age(self) -> None: ... - @property - def no_transform(self) -> bool: ... - @no_transform.setter - def no_transform(self, value: bool | None) -> None: ... - @no_transform.deleter - def no_transform(self) -> None: ... - def _get_cache_value(self, key: str, empty: T | None, type: type[T]) -> T: ... - def _set_cache_value(self, key: str, value: T | None, type: type[T]) -> None: ... - def _del_cache_value(self, key: str) -> None: ... - def to_header(self) -> str: ... - @staticmethod - def cache_property(key: str, empty: _CPT | None, type: type[_CPT]) -> property: ... - -class RequestCacheControl( # type: ignore[misc] - ImmutableDictMixin[str, str | int | bool | None], _CacheControl -): - @property # type: ignore - def no_cache(self) -> str | None: ... - @property # type: ignore - def no_store(self) -> bool: ... - @property # type: ignore - def max_age(self) -> int | None: ... - @property # type: ignore - def no_transform(self) -> bool: ... - @property - def max_stale(self) -> int | Literal["*"] | None: ... - @property - def min_fresh(self) -> int | None: ... - @property - def only_if_cached(self) -> bool | None: ... - -class ResponseCacheControl(_CacheControl): - @property - def public(self) -> bool: ... - @public.setter - def public(self, value: bool | None) -> None: ... - @public.deleter - def public(self) -> None: ... - @property - def private(self) -> str | None: ... - @private.setter - def private(self, value: Literal[True] | str | None) -> None: ... - @private.deleter - def private(self) -> None: ... - @property - def must_revalidate(self) -> bool: ... - @must_revalidate.setter - def must_revalidate(self, value: bool | None) -> None: ... - @must_revalidate.deleter - def must_revalidate(self) -> None: ... - @property - def proxy_revalidate(self) -> bool: ... - @proxy_revalidate.setter - def proxy_revalidate(self, value: bool | None) -> None: ... - @proxy_revalidate.deleter - def proxy_revalidate(self) -> None: ... - @property - def s_maxage(self) -> int | None: ... - @s_maxage.setter - def s_maxage(self, value: int | None) -> None: ... - @s_maxage.deleter - def s_maxage(self) -> None: ... - @property - def immutable(self) -> bool: ... - @immutable.setter - def immutable(self, value: bool | None) -> None: ... - @immutable.deleter - def immutable(self) -> None: ... - @property - def must_understand(self) -> bool: ... - @must_understand.setter - def must_understand(self, value: bool | None) -> None: ... - @must_understand.deleter - def must_understand(self) -> None: ... - @property - def stale_while_revalidate(self) -> int | None: ... - @stale_while_revalidate.setter - def stale_while_revalidate(self, value: int | None) -> None: ... - @stale_while_revalidate.deleter - def stale_while_revalidate(self) -> None: ... - @property - def stale_if_error(self) -> int | None: ... - @stale_if_error.setter - def stale_if_error(self, value: int | None) -> None: ... - @stale_if_error.deleter - def stale_if_error(self) -> None: ... diff --git a/src/werkzeug/datastructures/csp.py b/src/werkzeug/datastructures/csp.py index dde941495..0353eebea 100644 --- a/src/werkzeug/datastructures/csp.py +++ b/src/werkzeug/datastructures/csp.py @@ -1,9 +1,12 @@ from __future__ import annotations -from .mixins import UpdateDictMixin +import collections.abc as cabc +import typing as t +from .structures import CallbackDict -def csp_property(key): + +def csp_property(key: str) -> t.Any: """Return a new property object for a content security policy header. Useful if you want to add support for a csp extension in a subclass. @@ -16,7 +19,7 @@ def csp_property(key): ) -class ContentSecurityPolicy(UpdateDictMixin, dict): +class ContentSecurityPolicy(CallbackDict[str, str]): """Subclass of a dict that stores values for a Content Security Policy header. It has accessors for all the level 3 policies. @@ -33,62 +36,65 @@ class ContentSecurityPolicy(UpdateDictMixin, dict): """ - base_uri = csp_property("base-uri") - child_src = csp_property("child-src") - connect_src = csp_property("connect-src") - default_src = csp_property("default-src") - font_src = csp_property("font-src") - form_action = csp_property("form-action") - frame_ancestors = csp_property("frame-ancestors") - frame_src = csp_property("frame-src") - img_src = csp_property("img-src") - manifest_src = csp_property("manifest-src") - media_src = csp_property("media-src") - navigate_to = csp_property("navigate-to") - object_src = csp_property("object-src") - prefetch_src = csp_property("prefetch-src") - plugin_types = csp_property("plugin-types") - report_to = csp_property("report-to") - report_uri = csp_property("report-uri") - sandbox = csp_property("sandbox") - script_src = csp_property("script-src") - script_src_attr = csp_property("script-src-attr") - script_src_elem = csp_property("script-src-elem") - style_src = csp_property("style-src") - style_src_attr = csp_property("style-src-attr") - style_src_elem = csp_property("style-src-elem") - worker_src = csp_property("worker-src") - - def __init__(self, values=(), on_update=None): - dict.__init__(self, values or ()) - self.on_update = on_update + base_uri: str | None = csp_property("base-uri") + child_src: str | None = csp_property("child-src") + connect_src: str | None = csp_property("connect-src") + default_src: str | None = csp_property("default-src") + font_src: str | None = csp_property("font-src") + form_action: str | None = csp_property("form-action") + frame_ancestors: str | None = csp_property("frame-ancestors") + frame_src: str | None = csp_property("frame-src") + img_src: str | None = csp_property("img-src") + manifest_src: str | None = csp_property("manifest-src") + media_src: str | None = csp_property("media-src") + navigate_to: str | None = csp_property("navigate-to") + object_src: str | None = csp_property("object-src") + prefetch_src: str | None = csp_property("prefetch-src") + plugin_types: str | None = csp_property("plugin-types") + report_to: str | None = csp_property("report-to") + report_uri: str | None = csp_property("report-uri") + sandbox: str | None = csp_property("sandbox") + script_src: str | None = csp_property("script-src") + script_src_attr: str | None = csp_property("script-src-attr") + script_src_elem: str | None = csp_property("script-src-elem") + style_src: str | None = csp_property("style-src") + style_src_attr: str | None = csp_property("style-src-attr") + style_src_elem: str | None = csp_property("style-src-elem") + worker_src: str | None = csp_property("worker-src") + + def __init__( + self, + values: cabc.Mapping[str, str] | cabc.Iterable[tuple[str, str]] | None = (), + on_update: cabc.Callable[[ContentSecurityPolicy], None] | None = None, + ) -> None: + super().__init__(values, on_update) self.provided = values is not None - def _get_value(self, key): + def _get_value(self, key: str) -> str | None: """Used internally by the accessor properties.""" return self.get(key) - def _set_value(self, key, value): + def _set_value(self, key: str, value: str | None) -> None: """Used internally by the accessor properties.""" if value is None: self.pop(key, None) else: self[key] = value - def _del_value(self, key): + def _del_value(self, key: str) -> None: """Used internally by the accessor properties.""" if key in self: del self[key] - def to_header(self): + def to_header(self) -> str: """Convert the stored values into a cache control header.""" from ..http import dump_csp_header return dump_csp_header(self) - def __str__(self): + def __str__(self) -> str: return self.to_header() - def __repr__(self): + def __repr__(self) -> str: kv_str = " ".join(f"{k}={v!r}" for k, v in sorted(self.items())) return f"<{type(self).__name__} {kv_str}>" diff --git a/src/werkzeug/datastructures/csp.pyi b/src/werkzeug/datastructures/csp.pyi deleted file mode 100644 index f9e2ac0f4..000000000 --- a/src/werkzeug/datastructures/csp.pyi +++ /dev/null @@ -1,169 +0,0 @@ -from collections.abc import Callable -from collections.abc import Iterable -from collections.abc import Mapping - -from .mixins import UpdateDictMixin - -def csp_property(key: str) -> property: ... - -class ContentSecurityPolicy(UpdateDictMixin[str, str], dict[str, str]): - @property - def base_uri(self) -> str | None: ... - @base_uri.setter - def base_uri(self, value: str | None) -> None: ... - @base_uri.deleter - def base_uri(self) -> None: ... - @property - def child_src(self) -> str | None: ... - @child_src.setter - def child_src(self, value: str | None) -> None: ... - @child_src.deleter - def child_src(self) -> None: ... - @property - def connect_src(self) -> str | None: ... - @connect_src.setter - def connect_src(self, value: str | None) -> None: ... - @connect_src.deleter - def connect_src(self) -> None: ... - @property - def default_src(self) -> str | None: ... - @default_src.setter - def default_src(self, value: str | None) -> None: ... - @default_src.deleter - def default_src(self) -> None: ... - @property - def font_src(self) -> str | None: ... - @font_src.setter - def font_src(self, value: str | None) -> None: ... - @font_src.deleter - def font_src(self) -> None: ... - @property - def form_action(self) -> str | None: ... - @form_action.setter - def form_action(self, value: str | None) -> None: ... - @form_action.deleter - def form_action(self) -> None: ... - @property - def frame_ancestors(self) -> str | None: ... - @frame_ancestors.setter - def frame_ancestors(self, value: str | None) -> None: ... - @frame_ancestors.deleter - def frame_ancestors(self) -> None: ... - @property - def frame_src(self) -> str | None: ... - @frame_src.setter - def frame_src(self, value: str | None) -> None: ... - @frame_src.deleter - def frame_src(self) -> None: ... - @property - def img_src(self) -> str | None: ... - @img_src.setter - def img_src(self, value: str | None) -> None: ... - @img_src.deleter - def img_src(self) -> None: ... - @property - def manifest_src(self) -> str | None: ... - @manifest_src.setter - def manifest_src(self, value: str | None) -> None: ... - @manifest_src.deleter - def manifest_src(self) -> None: ... - @property - def media_src(self) -> str | None: ... - @media_src.setter - def media_src(self, value: str | None) -> None: ... - @media_src.deleter - def media_src(self) -> None: ... - @property - def navigate_to(self) -> str | None: ... - @navigate_to.setter - def navigate_to(self, value: str | None) -> None: ... - @navigate_to.deleter - def navigate_to(self) -> None: ... - @property - def object_src(self) -> str | None: ... - @object_src.setter - def object_src(self, value: str | None) -> None: ... - @object_src.deleter - def object_src(self) -> None: ... - @property - def prefetch_src(self) -> str | None: ... - @prefetch_src.setter - def prefetch_src(self, value: str | None) -> None: ... - @prefetch_src.deleter - def prefetch_src(self) -> None: ... - @property - def plugin_types(self) -> str | None: ... - @plugin_types.setter - def plugin_types(self, value: str | None) -> None: ... - @plugin_types.deleter - def plugin_types(self) -> None: ... - @property - def report_to(self) -> str | None: ... - @report_to.setter - def report_to(self, value: str | None) -> None: ... - @report_to.deleter - def report_to(self) -> None: ... - @property - def report_uri(self) -> str | None: ... - @report_uri.setter - def report_uri(self, value: str | None) -> None: ... - @report_uri.deleter - def report_uri(self) -> None: ... - @property - def sandbox(self) -> str | None: ... - @sandbox.setter - def sandbox(self, value: str | None) -> None: ... - @sandbox.deleter - def sandbox(self) -> None: ... - @property - def script_src(self) -> str | None: ... - @script_src.setter - def script_src(self, value: str | None) -> None: ... - @script_src.deleter - def script_src(self) -> None: ... - @property - def script_src_attr(self) -> str | None: ... - @script_src_attr.setter - def script_src_attr(self, value: str | None) -> None: ... - @script_src_attr.deleter - def script_src_attr(self) -> None: ... - @property - def script_src_elem(self) -> str | None: ... - @script_src_elem.setter - def script_src_elem(self, value: str | None) -> None: ... - @script_src_elem.deleter - def script_src_elem(self) -> None: ... - @property - def style_src(self) -> str | None: ... - @style_src.setter - def style_src(self, value: str | None) -> None: ... - @style_src.deleter - def style_src(self) -> None: ... - @property - def style_src_attr(self) -> str | None: ... - @style_src_attr.setter - def style_src_attr(self, value: str | None) -> None: ... - @style_src_attr.deleter - def style_src_attr(self) -> None: ... - @property - def style_src_elem(self) -> str | None: ... - @style_src_elem.setter - def style_src_elem(self, value: str | None) -> None: ... - @style_src_elem.deleter - def style_src_elem(self) -> None: ... - @property - def worker_src(self) -> str | None: ... - @worker_src.setter - def worker_src(self, value: str | None) -> None: ... - @worker_src.deleter - def worker_src(self) -> None: ... - provided: bool - def __init__( - self, - values: Mapping[str, str] | Iterable[tuple[str, str]] = (), - on_update: Callable[[ContentSecurityPolicy], None] | None = None, - ) -> None: ... - def _get_value(self, key: str) -> str | None: ... - def _set_value(self, key: str, value: str) -> None: ... - def _del_value(self, key: str) -> None: ... - def to_header(self) -> str: ... diff --git a/src/werkzeug/datastructures/etag.py b/src/werkzeug/datastructures/etag.py index 747d9966d..a4ef34245 100644 --- a/src/werkzeug/datastructures/etag.py +++ b/src/werkzeug/datastructures/etag.py @@ -1,14 +1,19 @@ from __future__ import annotations -from collections.abc import Collection +import collections.abc as cabc -class ETags(Collection): +class ETags(cabc.Collection[str]): """A set that can be used to check if one etag is present in a collection of etags. """ - def __init__(self, strong_etags=None, weak_etags=None, star_tag=False): + def __init__( + self, + strong_etags: cabc.Iterable[str] | None = None, + weak_etags: cabc.Iterable[str] | None = None, + star_tag: bool = False, + ): if not star_tag and strong_etags: self._strong = frozenset(strong_etags) else: @@ -17,7 +22,7 @@ def __init__(self, strong_etags=None, weak_etags=None, star_tag=False): self._weak = frozenset(weak_etags or ()) self.star_tag = star_tag - def as_set(self, include_weak=False): + def as_set(self, include_weak: bool = False) -> set[str]: """Convert the `ETags` object into a python set. Per default all the weak etags are not part of this set.""" rv = set(self._strong) @@ -25,19 +30,19 @@ def as_set(self, include_weak=False): rv.update(self._weak) return rv - def is_weak(self, etag): + def is_weak(self, etag: str) -> bool: """Check if an etag is weak.""" return etag in self._weak - def is_strong(self, etag): + def is_strong(self, etag: str) -> bool: """Check if an etag is strong.""" return etag in self._strong - def contains_weak(self, etag): + def contains_weak(self, etag: str) -> bool: """Check if an etag is part of the set including weak and strong tags.""" return self.is_weak(etag) or self.contains(etag) - def contains(self, etag): + def contains(self, etag: str) -> bool: """Check if an etag is part of the set ignoring weak tags. It is also possible to use the ``in`` operator. """ @@ -45,7 +50,7 @@ def contains(self, etag): return True return self.is_strong(etag) - def contains_raw(self, etag): + def contains_raw(self, etag: str) -> bool: """When passed a quoted tag it will check if this tag is part of the set. If the tag is weak it is checked against weak and strong tags, otherwise strong only.""" @@ -56,7 +61,7 @@ def contains_raw(self, etag): return self.contains_weak(etag) return self.contains(etag) - def to_header(self): + def to_header(self) -> str: """Convert the etags set into a HTTP header string.""" if self.star_tag: return "*" @@ -64,10 +69,16 @@ def to_header(self): [f'"{x}"' for x in self._strong] + [f'W/"{x}"' for x in self._weak] ) - def __call__(self, etag=None, data=None, include_weak=False): - if [etag, data].count(None) != 1: - raise TypeError("either tag or data required, but at least one") + def __call__( + self, + etag: str | None = None, + data: bytes | None = None, + include_weak: bool = False, + ) -> bool: if etag is None: + if data is None: + raise TypeError("'data' is required when 'etag' is not given.") + from ..http import generate_etag etag = generate_etag(data) @@ -76,20 +87,20 @@ def __call__(self, etag=None, data=None, include_weak=False): return True return etag in self._strong - def __bool__(self): + def __bool__(self) -> bool: return bool(self.star_tag or self._strong or self._weak) - def __str__(self): + def __str__(self) -> str: return self.to_header() - def __len__(self): + def __len__(self) -> int: return len(self._strong) - def __iter__(self): + def __iter__(self) -> cabc.Iterator[str]: return iter(self._strong) - def __contains__(self, etag): + def __contains__(self, etag: str) -> bool: # type: ignore[override] return self.contains(etag) - def __repr__(self): + def __repr__(self) -> str: return f"<{type(self).__name__} {str(self)!r}>" diff --git a/src/werkzeug/datastructures/etag.pyi b/src/werkzeug/datastructures/etag.pyi deleted file mode 100644 index 88e54f154..000000000 --- a/src/werkzeug/datastructures/etag.pyi +++ /dev/null @@ -1,30 +0,0 @@ -from collections.abc import Collection -from collections.abc import Iterable -from collections.abc import Iterator - -class ETags(Collection[str]): - _strong: frozenset[str] - _weak: frozenset[str] - star_tag: bool - def __init__( - self, - strong_etags: Iterable[str] | None = None, - weak_etags: Iterable[str] | None = None, - star_tag: bool = False, - ) -> None: ... - def as_set(self, include_weak: bool = False) -> set[str]: ... - def is_weak(self, etag: str) -> bool: ... - def is_strong(self, etag: str) -> bool: ... - def contains_weak(self, etag: str) -> bool: ... - def contains(self, etag: str) -> bool: ... - def contains_raw(self, etag: str) -> bool: ... - def to_header(self) -> str: ... - def __call__( - self, - etag: str | None = None, - data: bytes | None = None, - include_weak: bool = False, - ) -> bool: ... - def __len__(self) -> int: ... - def __iter__(self) -> Iterator[str]: ... - def __contains__(self, item: str) -> bool: ... # type: ignore diff --git a/src/werkzeug/datastructures/file_storage.py b/src/werkzeug/datastructures/file_storage.py index e878a56d4..123424477 100644 --- a/src/werkzeug/datastructures/file_storage.py +++ b/src/werkzeug/datastructures/file_storage.py @@ -1,11 +1,15 @@ from __future__ import annotations +import collections.abc as cabc import mimetypes +import os +import typing as t from io import BytesIO from os import fsdecode from os import fspath from .._internal import _plain_int +from .headers import Headers from .structures import MultiDict @@ -19,12 +23,12 @@ class FileStorage: def __init__( self, - stream=None, - filename=None, - name=None, - content_type=None, - content_length=None, - headers=None, + stream: t.IO[bytes] | None = None, + filename: str | None = None, + name: str | None = None, + content_type: str | None = None, + content_length: int | None = None, + headers: Headers | None = None, ): self.name = name self.stream = stream or BytesIO() @@ -46,8 +50,6 @@ def __init__( self.filename = filename if headers is None: - from .headers import Headers - headers = Headers() self.headers = headers if content_type is not None: @@ -55,17 +57,17 @@ def __init__( if content_length is not None: headers["Content-Length"] = str(content_length) - def _parse_content_type(self): + def _parse_content_type(self) -> None: if not hasattr(self, "_parsed_content_type"): self._parsed_content_type = http.parse_options_header(self.content_type) @property - def content_type(self): + def content_type(self) -> str | None: """The content-type sent in the header. Usually not available""" return self.headers.get("content-type") @property - def content_length(self): + def content_length(self) -> int: """The content-length sent in the header. Usually not available""" if "content-length" in self.headers: try: @@ -76,7 +78,7 @@ def content_length(self): return 0 @property - def mimetype(self): + def mimetype(self) -> str: """Like :attr:`content_type`, but without parameters (eg, without charset, type etc.) and always lowercase. For example if the content type is ``text/HTML; charset=utf-8`` the mimetype would be @@ -88,7 +90,7 @@ def mimetype(self): return self._parsed_content_type[0].lower() @property - def mimetype_params(self): + def mimetype_params(self) -> dict[str, str]: """The mimetype parameters as dict. For example if the content type is ``text/html; charset=utf-8`` the params would be ``{'charset': 'utf-8'}``. @@ -98,7 +100,9 @@ def mimetype_params(self): self._parse_content_type() return self._parsed_content_type[1] - def save(self, dst, buffer_size=16384): + def save( + self, dst: str | os.PathLike[str] | t.IO[bytes], buffer_size: int = 16384 + ) -> None: """Save the file to a destination path or file object. If the destination is a file object you have to close it yourself after the call. The buffer size is the number of bytes held in memory during @@ -131,35 +135,34 @@ def save(self, dst, buffer_size=16384): if close_dst: dst.close() - def close(self): + def close(self) -> None: """Close the underlying file if possible.""" try: self.stream.close() except Exception: pass - def __bool__(self): + def __bool__(self) -> bool: return bool(self.filename) - def __getattr__(self, name): + def __getattr__(self, name: str) -> t.Any: try: return getattr(self.stream, name) except AttributeError: - # SpooledTemporaryFile doesn't implement IOBase, get the - # attribute from its backing file instead. - # https://github.com/python/cpython/pull/3249 + # SpooledTemporaryFile on Python < 3.11 doesn't implement IOBase, + # get the attribute from its backing file instead. if hasattr(self.stream, "_file"): return getattr(self.stream._file, name) raise - def __iter__(self): + def __iter__(self) -> cabc.Iterator[bytes]: return iter(self.stream) - def __repr__(self): + def __repr__(self) -> str: return f"<{type(self).__name__}: {self.filename!r} ({self.content_type!r})>" -class FileMultiDict(MultiDict): +class FileMultiDict(MultiDict[str, FileStorage]): """A special :class:`MultiDict` that has convenience methods to add files to it. This is used for :class:`EnvironBuilder` and generally useful for unittesting. @@ -167,7 +170,13 @@ class FileMultiDict(MultiDict): .. versionadded:: 0.5 """ - def add_file(self, name, file, filename=None, content_type=None): + def add_file( + self, + name: str, + file: str | os.PathLike[str] | t.IO[bytes] | FileStorage, + filename: str | None = None, + content_type: str | None = None, + ) -> None: """Adds a new file to the dict. `file` can be a file name or a :class:`file`-like or a :class:`FileStorage` object. @@ -177,19 +186,23 @@ def add_file(self, name, file, filename=None, content_type=None): :param content_type: an optional content type """ if isinstance(file, FileStorage): - value = file + self.add(name, file) + return + + if isinstance(file, (str, os.PathLike)): + if filename is None: + filename = os.fspath(file) + + file_obj: t.IO[bytes] = open(file, "rb") else: - if isinstance(file, str): - if filename is None: - filename = file - file = open(file, "rb") - if filename and content_type is None: - content_type = ( - mimetypes.guess_type(filename)[0] or "application/octet-stream" - ) - value = FileStorage(file, filename, name, content_type) - - self.add(name, value) + file_obj = file # type: ignore[assignment] + + if filename and content_type is None: + content_type = ( + mimetypes.guess_type(filename)[0] or "application/octet-stream" + ) + + self.add(name, FileStorage(file_obj, filename, name, content_type)) # circular dependencies diff --git a/src/werkzeug/datastructures/file_storage.pyi b/src/werkzeug/datastructures/file_storage.pyi deleted file mode 100644 index 36a7ed9f2..000000000 --- a/src/werkzeug/datastructures/file_storage.pyi +++ /dev/null @@ -1,49 +0,0 @@ -from collections.abc import Iterator -from os import PathLike -from typing import Any -from typing import IO - -from .headers import Headers -from .structures import MultiDict - -class FileStorage: - name: str | None - stream: IO[bytes] - filename: str | None - headers: Headers - _parsed_content_type: tuple[str, dict[str, str]] - def __init__( - self, - stream: IO[bytes] | None = None, - filename: str | PathLike[str] | None = None, - name: str | None = None, - content_type: str | None = None, - content_length: int | None = None, - headers: Headers | None = None, - ) -> None: ... - def _parse_content_type(self) -> None: ... - @property - def content_type(self) -> str: ... - @property - def content_length(self) -> int: ... - @property - def mimetype(self) -> str: ... - @property - def mimetype_params(self) -> dict[str, str]: ... - def save( - self, dst: str | PathLike[str] | IO[bytes], buffer_size: int = ... - ) -> None: ... - def close(self) -> None: ... - def __bool__(self) -> bool: ... - def __getattr__(self, name: str) -> Any: ... - def __iter__(self) -> Iterator[bytes]: ... - def __repr__(self) -> str: ... - -class FileMultiDict(MultiDict[str, FileStorage]): - def add_file( - self, - name: str, - file: FileStorage | str | IO[bytes], - filename: str | None = None, - content_type: str | None = None, - ) -> None: ... diff --git a/src/werkzeug/datastructures/headers.py b/src/werkzeug/datastructures/headers.py index d9dd655c2..a23a0e0b1 100644 --- a/src/werkzeug/datastructures/headers.py +++ b/src/werkzeug/datastructures/headers.py @@ -1,5 +1,6 @@ from __future__ import annotations +import collections.abc as cabc import re import typing as t @@ -9,8 +10,14 @@ from .structures import iter_multi_items from .structures import MultiDict +if t.TYPE_CHECKING: + import typing_extensions as te + from _typeshed.wsgi import WSGIEnvironment -class Headers: +T = t.TypeVar("T") + + +class Headers(cabc.MutableMapping[str, str]): """An object that stores some headers. It has a dict-like interface, but is ordered, can store the same key multiple times, and iterating yields ``(key, value)`` pairs instead of only keys. @@ -47,41 +54,69 @@ class Headers: was an API that does not support the changes to the encoding model. """ - def __init__(self, defaults=None): - self._list = [] + def __init__( + self, + defaults: ( + Headers + | MultiDict[str, t.Any] + | cabc.Mapping[str, t.Any | cabc.Collection[t.Any]] + | cabc.Iterable[tuple[str, t.Any]] + | None + ) = None, + ) -> None: + self._list: list[tuple[str, str]] = [] + if defaults is not None: self.extend(defaults) - def __getitem__(self, key, _get_mode=False): - if not _get_mode: - if isinstance(key, int): - return self._list[key] - elif isinstance(key, slice): - return self.__class__(self._list[key]) - if not isinstance(key, str): - raise BadRequestKeyError(key) + @t.overload + def __getitem__(self, key: str) -> str: ... + @t.overload + def __getitem__(self, key: int) -> tuple[str, str]: ... + @t.overload + def __getitem__(self, key: slice) -> te.Self: ... + def __getitem__(self, key: str | int | slice) -> str | tuple[str, str] | te.Self: + if isinstance(key, str): + return self._get_key(key) + + if isinstance(key, int): + return self._list[key] + + return self.__class__(self._list[key]) + + def _get_key(self, key: str) -> str: ikey = key.lower() + for k, v in self._list: if k.lower() == ikey: return v - # micro optimization: if we are in get mode we will catch that - # exception one stack level down so we can raise a standard - # key error instead of our special one. - if _get_mode: - raise KeyError() - raise BadRequestKeyError(key) - - def __eq__(self, other): - def lowered(item): - return (item[0].lower(),) + item[1:] - - return other.__class__ is self.__class__ and set( - map(lowered, other._list) - ) == set(map(lowered, self._list)) - __hash__ = None + raise BadRequestKeyError(key) - def get(self, key, default=None, type=None): + def __eq__(self, other: object) -> bool: + if other.__class__ is not self.__class__: + return NotImplemented + + def lowered(item: tuple[str, ...]) -> tuple[str, ...]: + return item[0].lower(), *item[1:] + + return set(map(lowered, other._list)) == set(map(lowered, self._list)) # type: ignore[attr-defined] + + __hash__ = None # type: ignore[assignment] + + @t.overload # type: ignore[override] + def get(self, key: str) -> str | None: ... + @t.overload + def get(self, key: str, default: str) -> str: ... + @t.overload + def get(self, key: str, default: T) -> str | T: ... + @t.overload + def get(self, key: str, type: type[T]) -> T | None: ... + @t.overload + def get(self, key: str, default: T, type: type[T]) -> T: ... + def get( # type: ignore[misc] + self, key: str, default: str | T | None = None, type: type[T] | None = None + ) -> str | T | None: """Return the default value if the requested data doesn't exist. If `type` is provided and is a callable it should convert the value, return it or raise a :exc:`ValueError` if that is not possible. In @@ -107,17 +142,23 @@ def get(self, key, default=None, type=None): The ``as_bytes`` parameter was added. """ try: - rv = self.__getitem__(key, _get_mode=True) + rv = self._get_key(key) except KeyError: return default + if type is None: return rv + try: - return type(rv) + return type(rv) # type: ignore[call-arg] except ValueError: return default - def getlist(self, key, type=None): + @t.overload + def getlist(self, key: str) -> list[str]: ... + @t.overload + def getlist(self, key: str, type: type[T]) -> list[T]: ... + def getlist(self, key: str, type: type[T] | None = None) -> list[str] | list[T]: """Return the list of items for a given key. If that key is not in the :class:`Headers`, the return value will be an empty list. Just like :meth:`get`, :meth:`getlist` accepts a `type` parameter. All items will @@ -136,18 +177,22 @@ def getlist(self, key, type=None): The ``as_bytes`` parameter was added. """ ikey = key.lower() - result = [] - for k, v in self: - if k.lower() == ikey: - if type is not None: + + if type is not None: + result = [] + + for k, v in self: + if k.lower() == ikey: try: - v = type(v) + result.append(type(v)) # type: ignore[call-arg] except ValueError: continue - result.append(v) - return result - def get_all(self, name): + return result + + return [v for k, v in self if k.lower() == ikey] + + def get_all(self, name: str) -> list[str]: """Return a list of all the values for the named field. This method is compatible with the :mod:`wsgiref` @@ -155,21 +200,32 @@ def get_all(self, name): """ return self.getlist(name) - def items(self, lower=False): + def items(self, lower: bool = False) -> t.Iterable[tuple[str, str]]: # type: ignore[override] for key, value in self: if lower: key = key.lower() yield key, value - def keys(self, lower=False): + def keys(self, lower: bool = False) -> t.Iterable[str]: # type: ignore[override] for key, _ in self.items(lower): yield key - def values(self): + def values(self) -> t.Iterable[str]: # type: ignore[override] for _, value in self.items(): yield value - def extend(self, *args, **kwargs): + def extend( + self, + arg: ( + Headers + | MultiDict[str, t.Any] + | cabc.Mapping[str, t.Any | cabc.Collection[t.Any]] + | cabc.Iterable[tuple[str, t.Any]] + | None + ) = None, + /, + **kwargs: str, + ) -> None: """Extend headers in this object with items from another object containing header items as well as keyword arguments. @@ -183,35 +239,52 @@ def extend(self, *args, **kwargs): .. versionchanged:: 1.0 Support :class:`MultiDict`. Allow passing ``kwargs``. """ - if len(args) > 1: - raise TypeError(f"update expected at most 1 arguments, got {len(args)}") - - if args: - for key, value in iter_multi_items(args[0]): + if arg is not None: + for key, value in iter_multi_items(arg): self.add(key, value) for key, value in iter_multi_items(kwargs): self.add(key, value) - def __delitem__(self, key, _index_operation=True): - if _index_operation and isinstance(key, (int, slice)): - del self._list[key] + def __delitem__(self, key: str | int | slice) -> None: + if isinstance(key, str): + self._del_key(key) return + + del self._list[key] + + def _del_key(self, key: str) -> None: key = key.lower() new = [] + for k, v in self._list: if k.lower() != key: new.append((k, v)) + self._list[:] = new - def remove(self, key): + def remove(self, key: str) -> None: """Remove a key. :param key: The key to be removed. """ - return self.__delitem__(key, _index_operation=False) - - def pop(self, key=None, default=_missing): + return self._del_key(key) + + @t.overload + def pop(self) -> tuple[str, str]: ... + @t.overload + def pop(self, key: str) -> str: ... + @t.overload + def pop(self, key: int | None = ...) -> tuple[str, str]: ... + @t.overload + def pop(self, key: str, default: str) -> str: ... + @t.overload + def pop(self, key: str, default: T) -> str | T: ... + def pop( + self, + key: str | int | None = None, + default: str | T = _missing, # type: ignore[assignment] + ) -> str | tuple[str, str] | T: """Removes and returns a key or index. :param key: The key to be popped. If this is an integer the item at @@ -222,37 +295,42 @@ def pop(self, key=None, default=_missing): """ if key is None: return self._list.pop() + if isinstance(key, int): return self._list.pop(key) + try: - rv = self[key] - self.remove(key) + rv = self._get_key(key) except KeyError: if default is not _missing: return default + raise + + self.remove(key) return rv - def popitem(self): + def popitem(self) -> tuple[str, str]: """Removes a key or index and returns a (key, value) item.""" - return self.pop() + return self._list.pop() - def __contains__(self, key): + def __contains__(self, key: str) -> bool: # type: ignore[override] """Check if a key is present.""" try: - self.__getitem__(key, _get_mode=True) + self._get_key(key) except KeyError: return False + return True - def __iter__(self): + def __iter__(self) -> t.Iterator[tuple[str, str]]: # type: ignore[override] """Yield ``(key, value)`` tuples.""" return iter(self._list) - def __len__(self): + def __len__(self) -> int: return len(self._list) - def add(self, _key, _value, **kw): + def add(self, key: str, value: t.Any, /, **kwargs: t.Any) -> None: """Add a new header tuple to the list. Keyword arguments can specify additional parameters for the header @@ -265,27 +343,28 @@ def add(self, _key, _value, **kw): The keyword argument dumping uses :func:`dump_options_header` behind the scenes. - .. versionadded:: 0.4.1 + .. versionchanged:: 0.4.1 keyword arguments were added for :mod:`wsgiref` compatibility. """ - if kw: - _value = _options_header_vkw(_value, kw) - _value = _str_header_value(_value) - self._list.append((_key, _value)) + if kwargs: + value = _options_header_vkw(value, kwargs) - def add_header(self, _key, _value, **_kw): + value_str = _str_header_value(value) + self._list.append((key, value_str)) + + def add_header(self, key: str, value: t.Any, /, **kwargs: t.Any) -> None: """Add a new header tuple to the list. An alias for :meth:`add` for compatibility with the :mod:`wsgiref` :meth:`~wsgiref.headers.Headers.add_header` method. """ - self.add(_key, _value, **_kw) + self.add(key, value, **kwargs) - def clear(self): + def clear(self) -> None: """Clears all headers.""" - del self._list[:] + self._list.clear() - def set(self, _key, _value, **kw): + def set(self, key: str, value: t.Any, /, **kwargs: t.Any) -> None: """Remove all header tuples for `key` and add a new one. The newly added key either appears at the end of the list if there was no entry or replaces the first one. @@ -300,25 +379,32 @@ def set(self, _key, _value, **kw): :param key: The key to be inserted. :param value: The value to be inserted. """ - if kw: - _value = _options_header_vkw(_value, kw) - _value = _str_header_value(_value) + if kwargs: + value = _options_header_vkw(value, kwargs) + + value_str = _str_header_value(value) + if not self._list: - self._list.append((_key, _value)) + self._list.append((key, value_str)) return - listiter = iter(self._list) - ikey = _key.lower() - for idx, (old_key, _old_value) in enumerate(listiter): + + iter_list = iter(self._list) + ikey = key.lower() + + for idx, (old_key, _) in enumerate(iter_list): if old_key.lower() == ikey: # replace first occurrence - self._list[idx] = (_key, _value) + self._list[idx] = (key, value_str) break else: - self._list.append((_key, _value)) + # no existing occurrences + self._list.append((key, value_str)) return - self._list[idx + 1 :] = [t for t in listiter if t[0].lower() != ikey] - def setlist(self, key, values): + # remove remaining occurrences + self._list[idx + 1 :] = [t for t in iter_list if t[0].lower() != ikey] + + def setlist(self, key: str, values: cabc.Iterable[t.Any]) -> None: """Remove any existing values for a header and add new ones. :param key: The header key to set. @@ -335,7 +421,7 @@ def setlist(self, key, values): else: self.remove(key) - def setdefault(self, key, default): + def setdefault(self, key: str, default: t.Any) -> str: """Return the first value for the key if it is in the headers, otherwise set the header to the value given by ``default`` and return that. @@ -344,13 +430,15 @@ def setdefault(self, key, default): :param default: The value to set for the key if it is not in the headers. """ - if key in self: - return self[key] + try: + return self._get_key(key) + except KeyError: + pass self.set(key, default) - return default + return self._get_key(key) - def setlistdefault(self, key, default): + def setlistdefault(self, key: str, default: cabc.Iterable[t.Any]) -> list[str]: """Return the list of values for the key if it is in the headers, otherwise set the header to the list of values given by ``default`` and return that. @@ -369,20 +457,39 @@ def setlistdefault(self, key, default): return self.getlist(key) - def __setitem__(self, key, value): + @t.overload + def __setitem__(self, key: str, value: t.Any) -> None: ... + @t.overload + def __setitem__(self, key: int, value: tuple[str, t.Any]) -> None: ... + @t.overload + def __setitem__( + self, key: slice, value: cabc.Iterable[tuple[str, t.Any]] + ) -> None: ... + def __setitem__( + self, + key: str | int | slice, + value: t.Any | tuple[str, t.Any] | cabc.Iterable[tuple[str, t.Any]], + ) -> None: """Like :meth:`set` but also supports index/slice based setting.""" - if isinstance(key, (slice, int)): - if isinstance(key, int): - value = [value] - value = [(k, _str_header_value(v)) for (k, v) in value] - if isinstance(key, int): - self._list[key] = value[0] - else: - self._list[key] = value - else: + if isinstance(key, str): self.set(key, value) - - def update(self, *args, **kwargs): + elif isinstance(key, int): + self._list[key] = value[0], _str_header_value(value[1]) # type: ignore[index] + else: + self._list[key] = [(k, _str_header_value(v)) for k, v in value] # type: ignore[misc] + + def update( # type: ignore[override] + self, + arg: ( + Headers + | MultiDict[str, t.Any] + | cabc.Mapping[str, t.Any | cabc.Collection[t.Any]] + | cabc.Iterable[tuple[str, t.Any]] + | None + ) = None, + /, + **kwargs: t.Any | cabc.Collection[t.Any], + ) -> None: """Replace headers in this object with items from another headers object and keyword arguments. @@ -395,57 +502,54 @@ def update(self, *args, **kwargs): .. versionadded:: 1.0 """ - if len(args) > 1: - raise TypeError(f"update expected at most 1 arguments, got {len(args)}") - - if args: - mapping = args[0] - - if isinstance(mapping, (Headers, MultiDict)): - for key in mapping.keys(): - self.setlist(key, mapping.getlist(key)) - elif isinstance(mapping, dict): - for key, value in mapping.items(): - if isinstance(value, (list, tuple)): + if arg is not None: + if isinstance(arg, (Headers, MultiDict)): + for key in arg.keys(): + self.setlist(key, arg.getlist(key)) + elif isinstance(arg, cabc.Mapping): + for key, value in arg.items(): + if isinstance(value, cabc.Collection) and not isinstance( + value, str + ): self.setlist(key, value) else: self.set(key, value) else: - for key, value in mapping: + for key, value in arg: self.set(key, value) for key, value in kwargs.items(): - if isinstance(value, (list, tuple)): + if isinstance(value, cabc.Collection) and not isinstance(value, str): self.setlist(key, value) else: self.set(key, value) - def to_wsgi_list(self): + def to_wsgi_list(self) -> list[tuple[str, str]]: """Convert the headers into a list suitable for WSGI. :return: list """ - return list(self) + return list(self) # type: ignore[arg-type] - def copy(self): + def copy(self) -> te.Self: return self.__class__(self._list) - def __copy__(self): + def __copy__(self) -> te.Self: return self.copy() - def __str__(self): + def __str__(self) -> str: """Returns formatted headers suitable for HTTP transmission.""" strs = [] - for key, value in self.to_wsgi_list(): + for key, value in self._list: strs.append(f"{key}: {value}") strs.append("\r\n") return "\r\n".join(strs) - def __repr__(self): - return f"{type(self).__name__}({list(self)!r})" + def __repr__(self) -> str: + return f"{type(self).__name__}({self._list!r})" -def _options_header_vkw(value: str, kw: dict[str, t.Any]): +def _options_header_vkw(value: str, kw: dict[str, t.Any]) -> str: return http.dump_options_header( value, {k.replace("_", "-"): v for k, v in kw.items()} ) @@ -461,10 +565,10 @@ def _str_header_value(value: t.Any) -> str: if _newline_re.search(value) is not None: raise ValueError("Header values must not contain newline characters.") - return value + return value # type: ignore[no-any-return] -class EnvironHeaders(ImmutableHeadersMixin, Headers): +class EnvironHeaders(ImmutableHeadersMixin, Headers): # type: ignore[misc] """Read only version of the headers from a WSGI environment. This provides the same interface as `Headers` and is constructed from a WSGI environment. @@ -474,30 +578,36 @@ class EnvironHeaders(ImmutableHeadersMixin, Headers): HTTP exceptions. """ - def __init__(self, environ): + def __init__(self, environ: WSGIEnvironment) -> None: + super().__init__() self.environ = environ - def __eq__(self, other): + def __eq__(self, other: object) -> bool: + if not isinstance(other, EnvironHeaders): + return NotImplemented + return self.environ is other.environ - __hash__ = None + __hash__ = None # type: ignore[assignment] + + def __getitem__(self, key: str) -> str: # type: ignore[override] + return self._get_key(key) - def __getitem__(self, key, _get_mode=False): - # _get_mode is a no-op for this class as there is no index but - # used because get() calls it. + def _get_key(self, key: str) -> str: if not isinstance(key, str): - raise KeyError(key) + raise BadRequestKeyError(key) + key = key.upper().replace("-", "_") + if key in {"CONTENT_TYPE", "CONTENT_LENGTH"}: - return self.environ[key] - return self.environ[f"HTTP_{key}"] + return self.environ[key] # type: ignore[no-any-return] + + return self.environ[f"HTTP_{key}"] # type: ignore[no-any-return] - def __len__(self): - # the iter is necessary because otherwise list calls our - # len which would call list again and so forth. - return len(list(iter(self))) + def __len__(self) -> int: + return sum(1 for _ in self) - def __iter__(self): + def __iter__(self) -> cabc.Iterator[tuple[str, str]]: # type: ignore[override] for key, value in self.environ.items(): if key.startswith("HTTP_") and key not in { "HTTP_CONTENT_TYPE", @@ -507,7 +617,7 @@ def __iter__(self): elif key in {"CONTENT_TYPE", "CONTENT_LENGTH"} and value: yield key.replace("_", "-").title(), value - def copy(self): + def copy(self) -> t.NoReturn: raise TypeError(f"cannot create {type(self).__name__!r} copies") diff --git a/src/werkzeug/datastructures/headers.pyi b/src/werkzeug/datastructures/headers.pyi deleted file mode 100644 index 86502221a..000000000 --- a/src/werkzeug/datastructures/headers.pyi +++ /dev/null @@ -1,109 +0,0 @@ -from collections.abc import Callable -from collections.abc import Iterable -from collections.abc import Iterator -from collections.abc import Mapping -from typing import Literal -from typing import NoReturn -from typing import overload -from typing import TypeVar - -from _typeshed import SupportsKeysAndGetItem -from _typeshed.wsgi import WSGIEnvironment - -from .mixins import ImmutableHeadersMixin - -D = TypeVar("D") -T = TypeVar("T") - -class Headers(dict[str, str]): - _list: list[tuple[str, str]] - def __init__( - self, - defaults: Mapping[str, str | Iterable[str]] - | Iterable[tuple[str, str]] - | None = None, - ) -> None: ... - @overload - def __getitem__(self, key: str) -> str: ... - @overload - def __getitem__(self, key: int) -> tuple[str, str]: ... - @overload - def __getitem__(self, key: slice) -> Headers: ... - @overload - def __getitem__(self, key: str, _get_mode: Literal[True] = ...) -> str: ... - def __eq__(self, other: object) -> bool: ... - @overload # type: ignore - def get(self, key: str, default: str) -> str: ... - @overload - def get(self, key: str, default: str | None = None) -> str | None: ... - @overload - def get( - self, key: str, default: T | None = None, type: Callable[[str], T] = ... - ) -> T | None: ... - @overload - def getlist(self, key: str) -> list[str]: ... - @overload - def getlist(self, key: str, type: Callable[[str], T]) -> list[T]: ... - def get_all(self, name: str) -> list[str]: ... - def items( # type: ignore - self, lower: bool = False - ) -> Iterator[tuple[str, str]]: ... - def keys(self, lower: bool = False) -> Iterator[str]: ... # type: ignore - def values(self) -> Iterator[str]: ... # type: ignore - def extend( - self, - *args: Mapping[str, str | Iterable[str]] | Iterable[tuple[str, str]], - **kwargs: str | Iterable[str], - ) -> None: ... - @overload - def __delitem__(self, key: str | int | slice) -> None: ... - @overload - def __delitem__(self, key: str, _index_operation: Literal[False]) -> None: ... - def remove(self, key: str) -> None: ... - @overload # type: ignore - def pop(self, key: str, default: str | None = None) -> str: ... - @overload - def pop( - self, key: int | None = None, default: tuple[str, str] | None = None - ) -> tuple[str, str]: ... - def popitem(self) -> tuple[str, str]: ... - def __contains__(self, key: str) -> bool: ... # type: ignore - def has_key(self, key: str) -> bool: ... - def __iter__(self) -> Iterator[tuple[str, str]]: ... # type: ignore - def add(self, _key: str, _value: str, **kw: str) -> None: ... - def _validate_value(self, value: str) -> None: ... - def add_header(self, _key: str, _value: str, **_kw: str) -> None: ... - def clear(self) -> None: ... - def set(self, _key: str, _value: str, **kw: str) -> None: ... - def setlist(self, key: str, values: Iterable[str]) -> None: ... - def setdefault(self, key: str, default: str) -> str: ... - def setlistdefault(self, key: str, default: Iterable[str]) -> None: ... - @overload - def __setitem__(self, key: str, value: str) -> None: ... - @overload - def __setitem__(self, key: int, value: tuple[str, str]) -> None: ... - @overload - def __setitem__(self, key: slice, value: Iterable[tuple[str, str]]) -> None: ... - @overload - def update( - self, __m: SupportsKeysAndGetItem[str, str], **kwargs: str | Iterable[str] - ) -> None: ... - @overload - def update( - self, __m: Iterable[tuple[str, str]], **kwargs: str | Iterable[str] - ) -> None: ... - @overload - def update(self, **kwargs: str | Iterable[str]) -> None: ... - def to_wsgi_list(self) -> list[tuple[str, str]]: ... - def copy(self) -> Headers: ... - def __copy__(self) -> Headers: ... - -class EnvironHeaders(ImmutableHeadersMixin, Headers): - environ: WSGIEnvironment - def __init__(self, environ: WSGIEnvironment) -> None: ... - def __eq__(self, other: object) -> bool: ... - def __getitem__( # type: ignore - self, key: str, _get_mode: Literal[False] = False - ) -> str: ... - def __iter__(self) -> Iterator[tuple[str, str]]: ... # type: ignore - def copy(self) -> NoReturn: ... diff --git a/src/werkzeug/datastructures/mixins.py b/src/werkzeug/datastructures/mixins.py index 2c84ca8f2..76324d5a2 100644 --- a/src/werkzeug/datastructures/mixins.py +++ b/src/werkzeug/datastructures/mixins.py @@ -1,11 +1,22 @@ from __future__ import annotations +import collections.abc as cabc +import typing as t +from functools import update_wrapper from itertools import repeat from .._internal import _missing +if t.TYPE_CHECKING: + import typing_extensions as te -def is_immutable(self): +K = t.TypeVar("K") +V = t.TypeVar("V") +T = t.TypeVar("T") +F = t.TypeVar("F", bound=cabc.Callable[..., t.Any]) + + +def _immutable_error(self: t.Any) -> t.NoReturn: raise TypeError(f"{type(self).__name__!r} objects are immutable") @@ -17,52 +28,52 @@ class ImmutableListMixin: :private: """ - _hash_cache = None + _hash_cache: int | None = None - def __hash__(self): + def __hash__(self) -> int: if self._hash_cache is not None: return self._hash_cache - rv = self._hash_cache = hash(tuple(self)) + rv = self._hash_cache = hash(tuple(self)) # type: ignore[arg-type] return rv - def __reduce_ex__(self, protocol): - return type(self), (list(self),) + def __reduce_ex__(self, protocol: t.SupportsIndex) -> t.Any: + return type(self), (list(self),) # type: ignore[call-overload] - def __delitem__(self, key): - is_immutable(self) + def __delitem__(self, key: t.Any) -> t.NoReturn: + _immutable_error(self) - def __iadd__(self, other): - is_immutable(self) + def __iadd__(self, other: t.Any) -> t.NoReturn: + _immutable_error(self) - def __imul__(self, other): - is_immutable(self) + def __imul__(self, other: t.Any) -> t.NoReturn: + _immutable_error(self) - def __setitem__(self, key, value): - is_immutable(self) + def __setitem__(self, key: t.Any, value: t.Any) -> t.NoReturn: + _immutable_error(self) - def append(self, item): - is_immutable(self) + def append(self, item: t.Any) -> t.NoReturn: + _immutable_error(self) - def remove(self, item): - is_immutable(self) + def remove(self, item: t.Any) -> t.NoReturn: + _immutable_error(self) - def extend(self, iterable): - is_immutable(self) + def extend(self, iterable: t.Any) -> t.NoReturn: + _immutable_error(self) - def insert(self, pos, value): - is_immutable(self) + def insert(self, pos: t.Any, value: t.Any) -> t.NoReturn: + _immutable_error(self) - def pop(self, index=-1): - is_immutable(self) + def pop(self, index: t.Any = -1) -> t.NoReturn: + _immutable_error(self) - def reverse(self): - is_immutable(self) + def reverse(self: t.Any) -> t.NoReturn: + _immutable_error(self) - def sort(self, key=None, reverse=False): - is_immutable(self) + def sort(self, key: t.Any = None, reverse: t.Any = False) -> t.NoReturn: + _immutable_error(self) -class ImmutableDictMixin: +class ImmutableDictMixin(t.Generic[K, V]): """Makes a :class:`dict` immutable. .. versionadded:: 0.5 @@ -70,49 +81,59 @@ class ImmutableDictMixin: :private: """ - _hash_cache = None + _hash_cache: int | None = None @classmethod - def fromkeys(cls, keys, value=None): + @t.overload + def fromkeys( + cls, keys: cabc.Iterable[K], value: None + ) -> ImmutableDictMixin[K, t.Any | None]: ... + @classmethod + @t.overload + def fromkeys(cls, keys: cabc.Iterable[K], value: V) -> ImmutableDictMixin[K, V]: ... + @classmethod + def fromkeys( + cls, keys: cabc.Iterable[K], value: V | None = None + ) -> ImmutableDictMixin[K, t.Any | None] | ImmutableDictMixin[K, V]: instance = super().__new__(cls) - instance.__init__(zip(keys, repeat(value))) + instance.__init__(zip(keys, repeat(value))) # type: ignore[misc] return instance - def __reduce_ex__(self, protocol): - return type(self), (dict(self),) + def __reduce_ex__(self, protocol: t.SupportsIndex) -> t.Any: + return type(self), (dict(self),) # type: ignore[call-overload] - def _iter_hashitems(self): - return self.items() + def _iter_hashitems(self) -> t.Iterable[t.Any]: + return self.items() # type: ignore[attr-defined,no-any-return] - def __hash__(self): + def __hash__(self) -> int: if self._hash_cache is not None: return self._hash_cache rv = self._hash_cache = hash(frozenset(self._iter_hashitems())) return rv - def setdefault(self, key, default=None): - is_immutable(self) + def setdefault(self, key: t.Any, default: t.Any = None) -> t.NoReturn: + _immutable_error(self) - def update(self, *args, **kwargs): - is_immutable(self) + def update(self, arg: t.Any, /, **kwargs: t.Any) -> t.NoReturn: + _immutable_error(self) - def pop(self, key, default=None): - is_immutable(self) + def pop(self, key: t.Any, default: t.Any = None) -> t.NoReturn: + _immutable_error(self) - def popitem(self): - is_immutable(self) + def popitem(self) -> t.NoReturn: + _immutable_error(self) - def __setitem__(self, key, value): - is_immutable(self) + def __setitem__(self, key: t.Any, value: t.Any) -> t.NoReturn: + _immutable_error(self) - def __delitem__(self, key): - is_immutable(self) + def __delitem__(self, key: t.Any) -> t.NoReturn: + _immutable_error(self) - def clear(self): - is_immutable(self) + def clear(self) -> t.NoReturn: + _immutable_error(self) -class ImmutableMultiDictMixin(ImmutableDictMixin): +class ImmutableMultiDictMixin(ImmutableDictMixin[K, V]): """Makes a :class:`MultiDict` immutable. .. versionadded:: 0.5 @@ -120,26 +141,26 @@ class ImmutableMultiDictMixin(ImmutableDictMixin): :private: """ - def __reduce_ex__(self, protocol): - return type(self), (list(self.items(multi=True)),) + def __reduce_ex__(self, protocol: t.SupportsIndex) -> t.Any: + return type(self), (list(self.items(multi=True)),) # type: ignore[attr-defined] - def _iter_hashitems(self): - return self.items(multi=True) + def _iter_hashitems(self) -> t.Iterable[t.Any]: + return self.items(multi=True) # type: ignore[attr-defined,no-any-return] - def add(self, key, value): - is_immutable(self) + def add(self, key: t.Any, value: t.Any) -> t.NoReturn: + _immutable_error(self) - def popitemlist(self): - is_immutable(self) + def popitemlist(self) -> t.NoReturn: + _immutable_error(self) - def poplist(self, key): - is_immutable(self) + def poplist(self, key: t.Any) -> t.NoReturn: + _immutable_error(self) - def setlist(self, key, new_list): - is_immutable(self) + def setlist(self, key: t.Any, new_list: t.Any) -> t.NoReturn: + _immutable_error(self) - def setlistdefault(self, key, default_list=None): - is_immutable(self) + def setlistdefault(self, key: t.Any, default_list: t.Any = None) -> t.NoReturn: + _immutable_error(self) class ImmutableHeadersMixin: @@ -152,63 +173,64 @@ class ImmutableHeadersMixin: :private: """ - def __delitem__(self, key, **kwargs): - is_immutable(self) + def __delitem__(self, key: t.Any, **kwargs: t.Any) -> t.NoReturn: + _immutable_error(self) - def __setitem__(self, key, value): - is_immutable(self) + def __setitem__(self, key: t.Any, value: t.Any) -> t.NoReturn: + _immutable_error(self) - def set(self, _key, _value, **kwargs): - is_immutable(self) + def set(self, key: t.Any, value: t.Any, /, **kwargs: t.Any) -> t.NoReturn: + _immutable_error(self) - def setlist(self, key, values): - is_immutable(self) + def setlist(self, key: t.Any, values: t.Any) -> t.NoReturn: + _immutable_error(self) - def add(self, _key, _value, **kwargs): - is_immutable(self) + def add(self, key: t.Any, value: t.Any, /, **kwargs: t.Any) -> t.NoReturn: + _immutable_error(self) - def add_header(self, _key, _value, **_kwargs): - is_immutable(self) + def add_header(self, key: t.Any, value: t.Any, /, **kwargs: t.Any) -> t.NoReturn: + _immutable_error(self) - def remove(self, key): - is_immutable(self) + def remove(self, key: t.Any) -> t.NoReturn: + _immutable_error(self) - def extend(self, *args, **kwargs): - is_immutable(self) + def extend(self, arg: t.Any, /, **kwargs: t.Any) -> t.NoReturn: + _immutable_error(self) - def update(self, *args, **kwargs): - is_immutable(self) + def update(self, arg: t.Any, /, **kwargs: t.Any) -> t.NoReturn: + _immutable_error(self) - def insert(self, pos, value): - is_immutable(self) + def insert(self, pos: t.Any, value: t.Any) -> t.NoReturn: + _immutable_error(self) - def pop(self, key=None, default=_missing): - is_immutable(self) + def pop(self, key: t.Any = None, default: t.Any = _missing) -> t.NoReturn: + _immutable_error(self) - def popitem(self): - is_immutable(self) + def popitem(self) -> t.NoReturn: + _immutable_error(self) - def setdefault(self, key, default): - is_immutable(self) + def setdefault(self, key: t.Any, default: t.Any) -> t.NoReturn: + _immutable_error(self) - def setlistdefault(self, key, default): - is_immutable(self) + def setlistdefault(self, key: t.Any, default: t.Any) -> t.NoReturn: + _immutable_error(self) -def _calls_update(name): - def oncall(self, *args, **kw): - rv = getattr(super(UpdateDictMixin, self), name)(*args, **kw) +def _always_update(f: F) -> F: + def wrapper( + self: UpdateDictMixin[t.Any, t.Any], /, *args: t.Any, **kwargs: t.Any + ) -> t.Any: + rv = f(self, *args, **kwargs) if self.on_update is not None: self.on_update(self) return rv - oncall.__name__ = name - return oncall + return update_wrapper(wrapper, f) # type: ignore[return-value] -class UpdateDictMixin(dict): +class UpdateDictMixin(dict[K, V]): """Makes dicts call `self.on_update` on modifications. .. versionadded:: 0.5 @@ -216,27 +238,59 @@ class UpdateDictMixin(dict): :private: """ - on_update = None + on_update: cabc.Callable[[te.Self], None] | None = None - def setdefault(self, key, default=None): + def setdefault(self: te.Self, key: K, default: V | None = None) -> V: modified = key not in self - rv = super().setdefault(key, default) + rv = super().setdefault(key, default) # type: ignore[arg-type] if modified and self.on_update is not None: self.on_update(self) return rv - def pop(self, key, default=_missing): + @t.overload + def pop(self: te.Self, key: K) -> V: ... + @t.overload + def pop(self: te.Self, key: K, default: V) -> V: ... + @t.overload + def pop(self: te.Self, key: K, default: T) -> T: ... + def pop( + self: te.Self, + key: K, + default: V | T = _missing, # type: ignore[assignment] + ) -> V | T: modified = key in self if default is _missing: rv = super().pop(key) else: - rv = super().pop(key, default) + rv = super().pop(key, default) # type: ignore[arg-type] if modified and self.on_update is not None: self.on_update(self) return rv - __setitem__ = _calls_update("__setitem__") - __delitem__ = _calls_update("__delitem__") - clear = _calls_update("clear") - popitem = _calls_update("popitem") - update = _calls_update("update") + @_always_update + def __setitem__(self, key: K, value: V) -> None: + super().__setitem__(key, value) + + @_always_update + def __delitem__(self, key: K) -> None: + super().__delitem__(key) + + @_always_update + def clear(self) -> None: + super().clear() + + @_always_update + def popitem(self) -> tuple[K, V]: + return super().popitem() + + @_always_update + def update( # type: ignore[override] + self, + arg: cabc.Mapping[K, V] | cabc.Iterable[tuple[K, V]] | None = None, + /, + **kwargs: V, + ) -> None: + if arg is None: + super().update(**kwargs) + else: + super().update(arg, **kwargs) diff --git a/src/werkzeug/datastructures/mixins.pyi b/src/werkzeug/datastructures/mixins.pyi deleted file mode 100644 index 40453f703..000000000 --- a/src/werkzeug/datastructures/mixins.pyi +++ /dev/null @@ -1,97 +0,0 @@ -from collections.abc import Callable -from collections.abc import Hashable -from collections.abc import Iterable -from typing import Any -from typing import NoReturn -from typing import overload -from typing import SupportsIndex -from typing import TypeVar - -from _typeshed import SupportsKeysAndGetItem - -from .headers import Headers - -K = TypeVar("K") -T = TypeVar("T") -V = TypeVar("V") - -def is_immutable(self: object) -> NoReturn: ... - -class ImmutableListMixin(list[V]): - _hash_cache: int | None - def __hash__(self) -> int: ... # type: ignore - def __delitem__(self, key: SupportsIndex | slice) -> NoReturn: ... - def __iadd__(self, other: Any) -> NoReturn: ... # type: ignore - def __imul__(self, other: SupportsIndex) -> NoReturn: ... - def __setitem__(self, key: int | slice, value: V) -> NoReturn: ... # type: ignore - def append(self, value: V) -> NoReturn: ... - def remove(self, value: V) -> NoReturn: ... - def extend(self, values: Iterable[V]) -> NoReturn: ... - def insert(self, pos: SupportsIndex, value: V) -> NoReturn: ... - def pop(self, index: SupportsIndex = -1) -> NoReturn: ... - def reverse(self) -> NoReturn: ... - def sort( - self, key: Callable[[V], Any] | None = None, reverse: bool = False - ) -> NoReturn: ... - -class ImmutableDictMixin(dict[K, V]): - _hash_cache: int | None - @classmethod - def fromkeys( # type: ignore - cls, keys: Iterable[K], value: V | None = None - ) -> ImmutableDictMixin[K, V]: ... - def _iter_hashitems(self) -> Iterable[Hashable]: ... - def __hash__(self) -> int: ... # type: ignore - def setdefault(self, key: K, default: V | None = None) -> NoReturn: ... - def update(self, *args: Any, **kwargs: V) -> NoReturn: ... - def pop(self, key: K, default: V | None = None) -> NoReturn: ... # type: ignore - def popitem(self) -> NoReturn: ... - def __setitem__(self, key: K, value: V) -> NoReturn: ... - def __delitem__(self, key: K) -> NoReturn: ... - def clear(self) -> NoReturn: ... - -class ImmutableMultiDictMixin(ImmutableDictMixin[K, V]): - def _iter_hashitems(self) -> Iterable[Hashable]: ... - def add(self, key: K, value: V) -> NoReturn: ... - def popitemlist(self) -> NoReturn: ... - def poplist(self, key: K) -> NoReturn: ... - def setlist(self, key: K, new_list: Iterable[V]) -> NoReturn: ... - def setlistdefault( - self, key: K, default_list: Iterable[V] | None = None - ) -> NoReturn: ... - -class ImmutableHeadersMixin(Headers): - def __delitem__(self, key: Any, _index_operation: bool = True) -> NoReturn: ... - def __setitem__(self, key: Any, value: Any) -> NoReturn: ... - def set(self, _key: Any, _value: Any, **kw: Any) -> NoReturn: ... - def setlist(self, key: Any, values: Any) -> NoReturn: ... - def add(self, _key: Any, _value: Any, **kw: Any) -> NoReturn: ... - def add_header(self, _key: Any, _value: Any, **_kw: Any) -> NoReturn: ... - def remove(self, key: Any) -> NoReturn: ... - def extend(self, *args: Any, **kwargs: Any) -> NoReturn: ... - def update(self, *args: Any, **kwargs: Any) -> NoReturn: ... - def insert(self, pos: Any, value: Any) -> NoReturn: ... - def pop(self, key: Any = None, default: Any = ...) -> NoReturn: ... - def popitem(self) -> NoReturn: ... - def setdefault(self, key: Any, default: Any) -> NoReturn: ... - def setlistdefault(self, key: Any, default: Any) -> NoReturn: ... - -def _calls_update(name: str) -> Callable[[UpdateDictMixin[K, V]], Any]: ... - -class UpdateDictMixin(dict[K, V]): - on_update: Callable[[UpdateDictMixin[K, V] | None, None], None] - def setdefault(self, key: K, default: V | None = None) -> V: ... - @overload - def pop(self, key: K) -> V: ... - @overload - def pop(self, key: K, default: V | T = ...) -> V | T: ... - def __setitem__(self, key: K, value: V) -> None: ... - def __delitem__(self, key: K) -> None: ... - def clear(self) -> None: ... - def popitem(self) -> tuple[K, V]: ... - @overload - def update(self, __m: SupportsKeysAndGetItem[K, V], **kwargs: V) -> None: ... - @overload - def update(self, __m: Iterable[tuple[K, V]], **kwargs: V) -> None: ... - @overload - def update(self, **kwargs: V) -> None: ... diff --git a/src/werkzeug/datastructures/range.py b/src/werkzeug/datastructures/range.py index 7011ea4ae..4c9f67d44 100644 --- a/src/werkzeug/datastructures/range.py +++ b/src/werkzeug/datastructures/range.py @@ -1,5 +1,14 @@ from __future__ import annotations +import collections.abc as cabc +import typing as t +from datetime import datetime + +if t.TYPE_CHECKING: + import typing_extensions as te + +T = t.TypeVar("T") + class IfRange: """Very simple object that represents the `If-Range` header in parsed @@ -9,14 +18,14 @@ class IfRange: .. versionadded:: 0.7 """ - def __init__(self, etag=None, date=None): + def __init__(self, etag: str | None = None, date: datetime | None = None): #: The etag parsed and unquoted. Ranges always operate on strong #: etags so the weakness information is not necessary. self.etag = etag #: The date in parsed format or `None`. self.date = date - def to_header(self): + def to_header(self) -> str: """Converts the object back into an HTTP header.""" if self.date is not None: return http.http_date(self.date) @@ -24,10 +33,10 @@ def to_header(self): return http.quote_etag(self.etag) return "" - def __str__(self): + def __str__(self) -> str: return self.to_header() - def __repr__(self): + def __repr__(self) -> str: return f"<{type(self).__name__} {str(self)!r}>" @@ -44,7 +53,9 @@ class Range: .. versionadded:: 0.7 """ - def __init__(self, units, ranges): + def __init__( + self, units: str, ranges: cabc.Sequence[tuple[int, int | None]] + ) -> None: #: The units of this range. Usually "bytes". self.units = units #: A list of ``(begin, end)`` tuples for the range header provided. @@ -55,7 +66,7 @@ def __init__(self, units, ranges): if start is None or (end is not None and (start < 0 or start >= end)): raise ValueError(f"{(start, end)} is not a valid range.") - def range_for_length(self, length): + def range_for_length(self, length: int | None) -> tuple[int, int] | None: """If the range is for bytes, the length is not None and there is exactly one range and it is satisfiable it returns a ``(start, stop)`` tuple, otherwise `None`. @@ -71,7 +82,7 @@ def range_for_length(self, length): return start, min(end, length) return None - def make_content_range(self, length): + def make_content_range(self, length: int | None) -> ContentRange | None: """Creates a :class:`~werkzeug.datastructures.ContentRange` object from the current range and given content length. """ @@ -80,7 +91,7 @@ def make_content_range(self, length): return ContentRange(self.units, rng[0], rng[1], length) return None - def to_header(self): + def to_header(self) -> str: """Converts the object back into an HTTP header.""" ranges = [] for begin, end in self.ranges: @@ -90,7 +101,7 @@ def to_header(self): ranges.append(f"{begin}-{end - 1}") return f"{self.units}={','.join(ranges)}" - def to_content_range_header(self, length): + def to_content_range_header(self, length: int | None) -> str | None: """Converts the object into `Content-Range` HTTP header, based on given length """ @@ -99,23 +110,34 @@ def to_content_range_header(self, length): return f"{self.units} {range[0]}-{range[1] - 1}/{length}" return None - def __str__(self): + def __str__(self) -> str: return self.to_header() - def __repr__(self): + def __repr__(self) -> str: return f"<{type(self).__name__} {str(self)!r}>" -def _callback_property(name): - def fget(self): - return getattr(self, name) +class _CallbackProperty(t.Generic[T]): + def __set_name__(self, owner: type[ContentRange], name: str) -> None: + self.attr = f"_{name}" - def fset(self, value): - setattr(self, name, value) - if self.on_update is not None: - self.on_update(self) + @t.overload + def __get__(self, instance: None, owner: None) -> te.Self: ... + @t.overload + def __get__(self, instance: ContentRange, owner: type[ContentRange]) -> T: ... + def __get__( + self, instance: ContentRange | None, owner: type[ContentRange] | None + ) -> te.Self | T: + if instance is None: + return self - return property(fget, fset) + return instance.__dict__[self.attr] # type: ignore[no-any-return] + + def __set__(self, instance: ContentRange, value: T) -> None: + instance.__dict__[self.attr] = value + + if instance.on_update is not None: + instance.on_update(instance) class ContentRange: @@ -124,55 +146,67 @@ class ContentRange: .. versionadded:: 0.7 """ - def __init__(self, units, start, stop, length=None, on_update=None): - assert http.is_byte_range_valid(start, stop, length), "Bad range provided" + def __init__( + self, + units: str | None, + start: int | None, + stop: int | None, + length: int | None = None, + on_update: cabc.Callable[[ContentRange], None] | None = None, + ) -> None: self.on_update = on_update self.set(start, stop, length, units) #: The units to use, usually "bytes" - units = _callback_property("_units") + units: str | None = _CallbackProperty() # type: ignore[assignment] #: The start point of the range or `None`. - start = _callback_property("_start") + start: int | None = _CallbackProperty() # type: ignore[assignment] #: The stop point of the range (non-inclusive) or `None`. Can only be #: `None` if also start is `None`. - stop = _callback_property("_stop") + stop: int | None = _CallbackProperty() # type: ignore[assignment] #: The length of the range or `None`. - length = _callback_property("_length") - - def set(self, start, stop, length=None, units="bytes"): + length: int | None = _CallbackProperty() # type: ignore[assignment] + + def set( + self, + start: int | None, + stop: int | None, + length: int | None = None, + units: str | None = "bytes", + ) -> None: """Simple method to update the ranges.""" assert http.is_byte_range_valid(start, stop, length), "Bad range provided" - self._units = units - self._start = start - self._stop = stop - self._length = length + self._units: str | None = units + self._start: int | None = start + self._stop: int | None = stop + self._length: int | None = length if self.on_update is not None: self.on_update(self) - def unset(self): + def unset(self) -> None: """Sets the units to `None` which indicates that the header should no longer be used. """ self.set(None, None, units=None) - def to_header(self): - if self.units is None: + def to_header(self) -> str: + if self._units is None: return "" - if self.length is None: - length = "*" + if self._length is None: + length: str | int = "*" else: - length = self.length - if self.start is None: - return f"{self.units} */{length}" - return f"{self.units} {self.start}-{self.stop - 1}/{length}" + length = self._length + if self._start is None: + return f"{self._units} */{length}" + return f"{self._units} {self._start}-{self._stop - 1}/{length}" # type: ignore[operator] - def __bool__(self): - return self.units is not None + def __bool__(self) -> bool: + return self._units is not None - def __str__(self): + def __str__(self) -> str: return self.to_header() - def __repr__(self): + def __repr__(self) -> str: return f"<{type(self).__name__} {str(self)!r}>" diff --git a/src/werkzeug/datastructures/range.pyi b/src/werkzeug/datastructures/range.pyi deleted file mode 100644 index f38ad69ef..000000000 --- a/src/werkzeug/datastructures/range.pyi +++ /dev/null @@ -1,57 +0,0 @@ -from collections.abc import Callable -from datetime import datetime - -class IfRange: - etag: str | None - date: datetime | None - def __init__( - self, etag: str | None = None, date: datetime | None = None - ) -> None: ... - def to_header(self) -> str: ... - -class Range: - units: str - ranges: list[tuple[int, int | None]] - def __init__(self, units: str, ranges: list[tuple[int, int | None]]) -> None: ... - def range_for_length(self, length: int | None) -> tuple[int, int] | None: ... - def make_content_range(self, length: int | None) -> ContentRange | None: ... - def to_header(self) -> str: ... - def to_content_range_header(self, length: int | None) -> str | None: ... - -def _callback_property(name: str) -> property: ... - -class ContentRange: - on_update: Callable[[ContentRange], None] | None - def __init__( - self, - units: str | None, - start: int | None, - stop: int | None, - length: int | None = None, - on_update: Callable[[ContentRange], None] | None = None, - ) -> None: ... - @property - def units(self) -> str | None: ... - @units.setter - def units(self, value: str | None) -> None: ... - @property - def start(self) -> int | None: ... - @start.setter - def start(self, value: int | None) -> None: ... - @property - def stop(self) -> int | None: ... - @stop.setter - def stop(self, value: int | None) -> None: ... - @property - def length(self) -> int | None: ... - @length.setter - def length(self, value: int | None) -> None: ... - def set( - self, - start: int | None, - stop: int | None, - length: int | None = None, - units: str | None = "bytes", - ) -> None: ... - def unset(self) -> None: ... - def to_header(self) -> str: ... diff --git a/src/werkzeug/datastructures/structures.py b/src/werkzeug/datastructures/structures.py index 4279ceb98..eb54599d5 100644 --- a/src/werkzeug/datastructures/structures.py +++ b/src/werkzeug/datastructures/structures.py @@ -1,6 +1,7 @@ from __future__ import annotations -from collections.abc import MutableSet +import collections.abc as cabc +import typing as t from copy import deepcopy from .. import exceptions @@ -10,29 +11,38 @@ from .mixins import ImmutableMultiDictMixin from .mixins import UpdateDictMixin +if t.TYPE_CHECKING: + import typing_extensions as te -def is_immutable(self): - raise TypeError(f"{type(self).__name__!r} objects are immutable") +K = t.TypeVar("K") +V = t.TypeVar("V") +T = t.TypeVar("T") -def iter_multi_items(mapping): +def iter_multi_items( + mapping: ( + MultiDict[K, V] + | cabc.Mapping[K, V | cabc.Collection[V]] + | cabc.Iterable[tuple[K, V]] + ), +) -> cabc.Iterator[tuple[K, V]]: """Iterates over the items of a mapping yielding keys and values without dropping any from more complex structures. """ if isinstance(mapping, MultiDict): yield from mapping.items(multi=True) - elif isinstance(mapping, dict): + elif isinstance(mapping, cabc.Mapping): for key, value in mapping.items(): - if isinstance(value, (tuple, list)): + if isinstance(value, cabc.Collection) and not isinstance(value, str): for v in value: yield key, v else: - yield key, value + yield key, value # type: ignore[misc] else: yield from mapping -class ImmutableList(ImmutableListMixin, list): +class ImmutableList(ImmutableListMixin, list[V]): # type: ignore[misc] """An immutable :class:`list`. .. versionadded:: 0.5 @@ -40,11 +50,11 @@ class ImmutableList(ImmutableListMixin, list): :private: """ - def __repr__(self): + def __repr__(self) -> str: return f"{type(self).__name__}({list.__repr__(self)})" -class TypeConversionDict(dict): +class TypeConversionDict(dict[K, V]): """Works like a regular dict but the :meth:`get` method can perform type conversions. :class:`MultiDict` and :class:`CombinedMultiDict` are subclasses of this class and provide the same feature. @@ -52,7 +62,19 @@ class TypeConversionDict(dict): .. versionadded:: 0.5 """ - def get(self, key, default=None, type=None): + @t.overload # type: ignore[override] + def get(self, key: K) -> V | None: ... + @t.overload + def get(self, key: K, default: V) -> V: ... + @t.overload + def get(self, key: K, default: T) -> V | T: ... + @t.overload + def get(self, key: str, type: type[T]) -> T | None: ... + @t.overload + def get(self, key: str, default: T, type: type[T]) -> T: ... + def get( # type: ignore[misc] + self, key: K, default: V | T | None = None, type: type[T] | None = None + ) -> V | T | None: """Return the default value if the requested data doesn't exist. If `type` is provided and is a callable it should convert the value, return it or raise a :exc:`ValueError` if that is not possible. In @@ -81,33 +103,35 @@ def get(self, key, default=None, type=None): rv = self[key] except KeyError: return default - if type is not None: - try: - rv = type(rv) - except (ValueError, TypeError): - rv = default - return rv + + if type is None: + return rv + + try: + return type(rv) # type: ignore[call-arg] + except (ValueError, TypeError): + return default -class ImmutableTypeConversionDict(ImmutableDictMixin, TypeConversionDict): +class ImmutableTypeConversionDict(ImmutableDictMixin[K, V], TypeConversionDict[K, V]): # type: ignore[misc] """Works like a :class:`TypeConversionDict` but does not support modifications. .. versionadded:: 0.5 """ - def copy(self): + def copy(self) -> TypeConversionDict[K, V]: """Return a shallow mutable copy of this object. Keep in mind that the standard library's :func:`copy` function is a no-op for this class like for any other python immutable type (eg: :class:`tuple`). """ return TypeConversionDict(self) - def __copy__(self): + def __copy__(self) -> te.Self: return self -class MultiDict(TypeConversionDict): +class MultiDict(TypeConversionDict[K, V]): """A :class:`MultiDict` is a dictionary subclass customized to deal with multiple values for the same key which is for example used by the parsing functions in the wrappers. This is necessary because some HTML form @@ -148,40 +172,52 @@ class MultiDict(TypeConversionDict): or `None`. """ - def __init__(self, mapping=None): - if isinstance(mapping, MultiDict): - dict.__init__(self, ((k, vs[:]) for k, vs in mapping.lists())) - elif isinstance(mapping, dict): + def __init__( + self, + mapping: ( + MultiDict[K, V] + | cabc.Mapping[K, V | cabc.Collection[V]] + | cabc.Iterable[tuple[K, V]] + | None + ) = None, + ) -> None: + if mapping is None: + super().__init__() + elif isinstance(mapping, MultiDict): + super().__init__((k, vs[:]) for k, vs in mapping.lists()) + elif isinstance(mapping, cabc.Mapping): tmp = {} for key, value in mapping.items(): - if isinstance(value, (tuple, list)): - if len(value) == 0: - continue + if isinstance(value, cabc.Collection) and not isinstance(value, str): value = list(value) + + if not value: + continue else: value = [value] tmp[key] = value - dict.__init__(self, tmp) + super().__init__(tmp) # type: ignore[arg-type] else: tmp = {} - for key, value in mapping or (): + for key, value in mapping: tmp.setdefault(key, []).append(value) - dict.__init__(self, tmp) + super().__init__(tmp) # type: ignore[arg-type] - def __getstate__(self): + def __getstate__(self) -> t.Any: return dict(self.lists()) - def __setstate__(self, value): - dict.clear(self) - dict.update(self, value) + def __setstate__(self, value: t.Any) -> None: + super().clear() + super().update(value) - def __iter__(self): - # Work around https://bugs.python.org/issue43246. - # (`return super().__iter__()` also works here, which makes this look - # even more like it should be a no-op, yet it isn't.) - return dict.__iter__(self) + def __iter__(self) -> cabc.Iterator[K]: + # https://github.com/python/cpython/issues/87412 + # If __iter__ is not overridden, Python uses a fast path for dict(md), + # taking the data directly and getting lists of values, rather than + # calling __getitem__ and getting only the first value. + return super().__iter__() - def __getitem__(self, key): + def __getitem__(self, key: K) -> V: """Return the first data value for this key; raises KeyError if not found. @@ -190,20 +226,20 @@ def __getitem__(self, key): """ if key in self: - lst = dict.__getitem__(self, key) - if len(lst) > 0: - return lst[0] + lst = super().__getitem__(key) + if len(lst) > 0: # type: ignore[arg-type] + return lst[0] # type: ignore[index,no-any-return] raise exceptions.BadRequestKeyError(key) - def __setitem__(self, key, value): + def __setitem__(self, key: K, value: V) -> None: """Like :meth:`add` but removes an existing key first. :param key: the key for the value. :param value: the value to set. """ - dict.__setitem__(self, key, [value]) + super().__setitem__(key, [value]) # type: ignore[assignment] - def add(self, key, value): + def add(self, key: K, value: V) -> None: """Adds a new value for the key. .. versionadded:: 0.6 @@ -211,9 +247,13 @@ def add(self, key, value): :param key: the key for the value. :param value: the value to add. """ - dict.setdefault(self, key, []).append(value) + super().setdefault(key, []).append(value) # type: ignore[arg-type,attr-defined] - def getlist(self, key, type=None): + @t.overload + def getlist(self, key: K) -> list[V]: ... + @t.overload + def getlist(self, key: K, type: type[T]) -> list[T]: ... + def getlist(self, key: K, type: type[T] | None = None) -> list[V] | list[T]: """Return the list of items for a given key. If that key is not in the `MultiDict`, the return value will be an empty list. Just like `get`, `getlist` accepts a `type` parameter. All items will be converted @@ -226,7 +266,7 @@ def getlist(self, key, type=None): :return: a :class:`list` of all the values for the key. """ try: - rv = dict.__getitem__(self, key) + rv: list[V] = super().__getitem__(key) # type: ignore[assignment] except KeyError: return [] if type is None: @@ -234,12 +274,12 @@ def getlist(self, key, type=None): result = [] for item in rv: try: - result.append(type(item)) + result.append(type(item)) # type: ignore[call-arg] except ValueError: pass return result - def setlist(self, key, new_list): + def setlist(self, key: K, new_list: cabc.Iterable[V]) -> None: """Remove the old values for a key and add new ones. Note that the list you pass the values in will be shallow-copied before it is inserted in the dictionary. @@ -255,9 +295,13 @@ def setlist(self, key, new_list): :param new_list: An iterable with the new values for the key. Old values are removed first. """ - dict.__setitem__(self, key, list(new_list)) + super().__setitem__(key, list(new_list)) # type: ignore[assignment] - def setdefault(self, key, default=None): + @t.overload + def setdefault(self, key: K) -> None: ... + @t.overload + def setdefault(self, key: K, default: V) -> V: ... + def setdefault(self, key: K, default: V | None = None) -> V | None: """Returns the value for the key if it is in the dict, otherwise it returns `default` and sets that value for `key`. @@ -266,12 +310,13 @@ def setdefault(self, key, default=None): in the dict. If not further specified it's `None`. """ if key not in self: - self[key] = default - else: - default = self[key] - return default + self[key] = default # type: ignore[assignment] - def setlistdefault(self, key, default_list=None): + return self[key] + + def setlistdefault( + self, key: K, default_list: cabc.Iterable[V] | None = None + ) -> list[V]: """Like `setdefault` but sets multiple values. The list returned is not a copy, but the list that is actually used internally. This means that you can put new values into the dict by appending items @@ -289,38 +334,42 @@ def setlistdefault(self, key, default_list=None): :return: a :class:`list` """ if key not in self: - default_list = list(default_list or ()) - dict.__setitem__(self, key, default_list) - else: - default_list = dict.__getitem__(self, key) - return default_list + super().__setitem__(key, list(default_list or ())) # type: ignore[assignment] + + return super().__getitem__(key) # type: ignore[return-value] - def items(self, multi=False): + def items(self, multi: bool = False) -> cabc.Iterable[tuple[K, V]]: # type: ignore[override] """Return an iterator of ``(key, value)`` pairs. :param multi: If set to `True` the iterator returned will have a pair for each value of each key. Otherwise it will only contain pairs for the first value of each key. """ - for key, values in dict.items(self): + values: list[V] + + for key, values in super().items(): # type: ignore[assignment] if multi: for value in values: yield key, value else: yield key, values[0] - def lists(self): + def lists(self) -> cabc.Iterable[tuple[K, list[V]]]: """Return a iterator of ``(key, values)`` pairs, where values is the list of all values associated with the key.""" - for key, values in dict.items(self): + values: list[V] + + for key, values in super().items(): # type: ignore[assignment] yield key, list(values) - def values(self): + def values(self) -> cabc.Iterable[V]: # type: ignore[override] """Returns an iterator of the first value on every key's value list.""" - for values in dict.values(self): + values: list[V] + + for values in super().values(): # type: ignore[assignment] yield values[0] - def listvalues(self): + def listvalues(self) -> cabc.Iterable[list[V]]: """Return an iterator of all values associated with a key. Zipping :meth:`keys` and this is the same as calling :meth:`lists`: @@ -328,17 +377,21 @@ def listvalues(self): >>> zip(d.keys(), d.listvalues()) == d.lists() True """ - return dict.values(self) + return super().values() # type: ignore[return-value] - def copy(self): + def copy(self) -> te.Self: """Return a shallow copy of this object.""" return self.__class__(self) - def deepcopy(self, memo=None): + def deepcopy(self, memo: t.Any = None) -> te.Self: """Return a deep copy of this object.""" return self.__class__(deepcopy(self.to_dict(flat=False), memo)) - def to_dict(self, flat=True): + @t.overload + def to_dict(self) -> dict[K, V]: ... + @t.overload + def to_dict(self, flat: t.Literal[False]) -> dict[K, list[V]]: ... + def to_dict(self, flat: bool = True) -> dict[K, V] | dict[K, list[V]]: """Return the contents as regular dict. If `flat` is `True` the returned dict will only have the first item present, if `flat` is `False` all values will be returned as lists. @@ -352,7 +405,14 @@ def to_dict(self, flat=True): return dict(self.items()) return dict(self.lists()) - def update(self, mapping): + def update( # type: ignore[override] + self, + mapping: ( + MultiDict[K, V] + | cabc.Mapping[K, V | cabc.Collection[V]] + | cabc.Iterable[tuple[K, V]] + ), + ) -> None: """update() extends rather than replaces existing key lists: >>> a = MultiDict({'x': 1}) @@ -371,9 +431,19 @@ def update(self, mapping): MultiDict([]) """ for key, value in iter_multi_items(mapping): - MultiDict.add(self, key, value) + self.add(key, value) - def pop(self, key, default=_missing): + @t.overload + def pop(self, key: K) -> V: ... + @t.overload + def pop(self, key: K, default: V) -> V: ... + @t.overload + def pop(self, key: K, default: T) -> V | T: ... + def pop( + self, + key: K, + default: V | T = _missing, # type: ignore[assignment] + ) -> V | T: """Pop the first item for a list on the dict. Afterwards the key is removed from the dict, so additional values are discarded: @@ -387,8 +457,10 @@ def pop(self, key, default=_missing): :param default: if provided the value to return if the key was not in the dictionary. """ + lst: list[V] + try: - lst = dict.pop(self, key) + lst = super().pop(key) # type: ignore[assignment] if len(lst) == 0: raise exceptions.BadRequestKeyError(key) @@ -400,19 +472,21 @@ def pop(self, key, default=_missing): raise exceptions.BadRequestKeyError(key) from None - def popitem(self): + def popitem(self) -> tuple[K, V]: """Pop an item from the dict.""" + item: tuple[K, list[V]] + try: - item = dict.popitem(self) + item = super().popitem() # type: ignore[assignment] if len(item[1]) == 0: raise exceptions.BadRequestKeyError(item[0]) - return (item[0], item[1][0]) + return item[0], item[1][0] except KeyError as e: raise exceptions.BadRequestKeyError(e.args[0]) from None - def poplist(self, key): + def poplist(self, key: K) -> list[V]: """Pop the list for a key from the dict. If the key is not in the dict an empty list is returned. @@ -420,26 +494,26 @@ def poplist(self, key): If the key does no longer exist a list is returned instead of raising an error. """ - return dict.pop(self, key, []) + return super().pop(key, []) # type: ignore[return-value] - def popitemlist(self): + def popitemlist(self) -> tuple[K, list[V]]: """Pop a ``(key, list)`` tuple from the dict.""" try: - return dict.popitem(self) + return super().popitem() # type: ignore[return-value] except KeyError as e: raise exceptions.BadRequestKeyError(e.args[0]) from None - def __copy__(self): + def __copy__(self) -> te.Self: return self.copy() - def __deepcopy__(self, memo): + def __deepcopy__(self, memo: t.Any) -> te.Self: return self.deepcopy(memo=memo) - def __repr__(self): + def __repr__(self) -> str: return f"{type(self).__name__}({list(self.items(multi=True))!r})" -class _omd_bucket: +class _omd_bucket(t.Generic[K, V]): """Wraps values in the :class:`OrderedMultiDict`. This makes it possible to keep an order over multiple different keys. It requires a lot of extra memory and slows down access a lot, but makes it @@ -448,11 +522,11 @@ class _omd_bucket: __slots__ = ("prev", "key", "value", "next") - def __init__(self, omd, key, value): - self.prev = omd._last_bucket - self.key = key - self.value = value - self.next = None + def __init__(self, omd: OrderedMultiDict[K, V], key: K, value: V) -> None: + self.prev: _omd_bucket[K, V] | None = omd._last_bucket + self.key: K = key + self.value: V = value + self.next: _omd_bucket[K, V] | None = None if omd._first_bucket is None: omd._first_bucket = self @@ -460,7 +534,7 @@ def __init__(self, omd, key, value): omd._last_bucket.next = self omd._last_bucket = self - def unlink(self, omd): + def unlink(self, omd: OrderedMultiDict[K, V]) -> None: if self.prev: self.prev.next = self.next if self.next: @@ -471,7 +545,7 @@ def unlink(self, omd): omd._last_bucket = self.prev -class OrderedMultiDict(MultiDict): +class OrderedMultiDict(MultiDict[K, V]): """Works like a regular :class:`MultiDict` but preserves the order of the fields. To convert the ordered multi dict into a list you can use the :meth:`items` method and pass it ``multi=True``. @@ -487,13 +561,22 @@ class OrderedMultiDict(MultiDict): the internal bucket objects are exposed. """ - def __init__(self, mapping=None): - dict.__init__(self) - self._first_bucket = self._last_bucket = None + def __init__( + self, + mapping: ( + MultiDict[K, V] + | cabc.Mapping[K, V | cabc.Collection[V]] + | cabc.Iterable[tuple[K, V]] + | None + ) = None, + ) -> None: + super().__init__() + self._first_bucket: _omd_bucket[K, V] | None = None + self._last_bucket: _omd_bucket[K, V] | None = None if mapping is not None: - OrderedMultiDict.update(self, mapping) + self.update(mapping) - def __eq__(self, other): + def __eq__(self, other: object) -> bool: if not isinstance(other, MultiDict): return NotImplemented if isinstance(other, OrderedMultiDict): @@ -518,41 +601,42 @@ def __eq__(self, other): return False return True - __hash__ = None + __hash__ = None # type: ignore[assignment] - def __reduce_ex__(self, protocol): + def __reduce_ex__(self, protocol: t.SupportsIndex) -> t.Any: return type(self), (list(self.items(multi=True)),) - def __getstate__(self): + def __getstate__(self) -> t.Any: return list(self.items(multi=True)) - def __setstate__(self, values): - dict.clear(self) + def __setstate__(self, values: t.Any) -> None: + self.clear() + for key, value in values: self.add(key, value) - def __getitem__(self, key): + def __getitem__(self, key: K) -> V: if key in self: - return dict.__getitem__(self, key)[0].value + return dict.__getitem__(self, key)[0].value # type: ignore[index,no-any-return] raise exceptions.BadRequestKeyError(key) - def __setitem__(self, key, value): + def __setitem__(self, key: K, value: V) -> None: self.poplist(key) self.add(key, value) - def __delitem__(self, key): + def __delitem__(self, key: K) -> None: self.pop(key) - def keys(self): - return (key for key, value in self.items()) + def keys(self) -> cabc.Iterable[K]: # type: ignore[override] + return (key for key, _ in self.items()) - def __iter__(self): + def __iter__(self) -> cabc.Iterator[K]: return iter(self.keys()) - def values(self): + def values(self) -> cabc.Iterable[V]: # type: ignore[override] return (value for key, value in self.items()) - def items(self, multi=False): + def items(self, multi: bool = False) -> cabc.Iterable[tuple[K, V]]: # type: ignore[override] ptr = self._first_bucket if multi: while ptr is not None: @@ -566,7 +650,7 @@ def items(self, multi=False): yield ptr.key, ptr.value ptr = ptr.next - def lists(self): + def lists(self) -> cabc.Iterable[tuple[K, list[V]]]: returned_keys = set() ptr = self._first_bucket while ptr is not None: @@ -575,16 +659,22 @@ def lists(self): returned_keys.add(ptr.key) ptr = ptr.next - def listvalues(self): + def listvalues(self) -> cabc.Iterable[list[V]]: for _key, values in self.lists(): yield values - def add(self, key, value): - dict.setdefault(self, key, []).append(_omd_bucket(self, key, value)) + def add(self, key: K, value: V) -> None: + dict.setdefault(self, key, []).append(_omd_bucket(self, key, value)) # type: ignore[arg-type,attr-defined] + + @t.overload + def getlist(self, key: K) -> list[V]: ... + @t.overload + def getlist(self, key: K, type: type[T]) -> list[T]: ... + def getlist(self, key: K, type: type[T] | None = None) -> list[V] | list[T]: + rv: list[_omd_bucket[K, V]] - def getlist(self, key, type=None): try: - rv = dict.__getitem__(self, key) + rv = dict.__getitem__(self, key) # type: ignore[index] except KeyError: return [] if type is None: @@ -592,32 +682,51 @@ def getlist(self, key, type=None): result = [] for item in rv: try: - result.append(type(item.value)) + result.append(type(item.value)) # type: ignore[call-arg] except ValueError: pass return result - def setlist(self, key, new_list): + def setlist(self, key: K, new_list: cabc.Iterable[V]) -> None: self.poplist(key) for value in new_list: self.add(key, value) - def setlistdefault(self, key, default_list=None): + def setlistdefault(self, key: t.Any, default_list: t.Any = None) -> t.NoReturn: raise TypeError("setlistdefault is unsupported for ordered multi dicts") - def update(self, mapping): + def update( # type: ignore[override] + self, + mapping: ( + MultiDict[K, V] + | cabc.Mapping[K, V | cabc.Collection[V]] + | cabc.Iterable[tuple[K, V]] + ), + ) -> None: for key, value in iter_multi_items(mapping): - OrderedMultiDict.add(self, key, value) + self.add(key, value) - def poplist(self, key): - buckets = dict.pop(self, key, ()) + def poplist(self, key: K) -> list[V]: + buckets: cabc.Iterable[_omd_bucket[K, V]] = dict.pop(self, key, ()) # type: ignore[arg-type] for bucket in buckets: bucket.unlink(self) return [x.value for x in buckets] - def pop(self, key, default=_missing): + @t.overload + def pop(self, key: K) -> V: ... + @t.overload + def pop(self, key: K, default: V) -> V: ... + @t.overload + def pop(self, key: K, default: T) -> V | T: ... + def pop( + self, + key: K, + default: V | T = _missing, # type: ignore[assignment] + ) -> V | T: + buckets: list[_omd_bucket[K, V]] + try: - buckets = dict.pop(self, key) + buckets = dict.pop(self, key) # type: ignore[arg-type] except KeyError: if default is not _missing: return default @@ -629,9 +738,12 @@ def pop(self, key, default=_missing): return buckets[0].value - def popitem(self): + def popitem(self) -> tuple[K, V]: + key: K + buckets: list[_omd_bucket[K, V]] + try: - key, buckets = dict.popitem(self) + key, buckets = dict.popitem(self) # type: ignore[arg-type,assignment] except KeyError as e: raise exceptions.BadRequestKeyError(e.args[0]) from None @@ -640,9 +752,12 @@ def popitem(self): return key, buckets[0].value - def popitemlist(self): + def popitemlist(self) -> tuple[K, list[V]]: + key: K + buckets: list[_omd_bucket[K, V]] + try: - key, buckets = dict.popitem(self) + key, buckets = dict.popitem(self) # type: ignore[arg-type,assignment] except KeyError as e: raise exceptions.BadRequestKeyError(e.args[0]) from None @@ -652,7 +767,7 @@ def popitemlist(self): return key, [x.value for x in buckets] -class CombinedMultiDict(ImmutableMultiDictMixin, MultiDict): +class CombinedMultiDict(ImmutableMultiDictMixin[K, V], MultiDict[K, V]): # type: ignore[misc] """A read only :class:`MultiDict` that you can pass multiple :class:`MultiDict` instances as sequence and it will combine the return values of all wrapped dicts: @@ -675,54 +790,75 @@ class CombinedMultiDict(ImmutableMultiDictMixin, MultiDict): exceptions. """ - def __reduce_ex__(self, protocol): + def __reduce_ex__(self, protocol: t.SupportsIndex) -> t.Any: return type(self), (self.dicts,) - def __init__(self, dicts=None): - self.dicts = list(dicts) or [] + def __init__(self, dicts: cabc.Iterable[MultiDict[K, V]] | None = None) -> None: + super().__init__() + self.dicts: list[MultiDict[K, V]] = list(dicts or ()) @classmethod - def fromkeys(cls, keys, value=None): + def fromkeys(cls, keys: t.Any, value: t.Any = None) -> t.NoReturn: raise TypeError(f"cannot create {cls.__name__!r} instances by fromkeys") - def __getitem__(self, key): + def __getitem__(self, key: K) -> V: for d in self.dicts: if key in d: return d[key] raise exceptions.BadRequestKeyError(key) - def get(self, key, default=None, type=None): + @t.overload # type: ignore[override] + def get(self, key: K) -> V | None: ... + @t.overload + def get(self, key: K, default: V) -> V: ... + @t.overload + def get(self, key: K, default: T) -> V | T: ... + @t.overload + def get(self, key: str, type: type[T]) -> T | None: ... + @t.overload + def get(self, key: str, default: T, type: type[T]) -> T: ... + def get( # type: ignore[misc] + self, key: K, default: V | T | None = None, type: type[T] | None = None + ) -> V | T | None: for d in self.dicts: if key in d: if type is not None: try: - return type(d[key]) + return type(d[key]) # type: ignore[call-arg] except ValueError: continue return d[key] return default - def getlist(self, key, type=None): + @t.overload + def getlist(self, key: K) -> list[V]: ... + @t.overload + def getlist(self, key: K, type: type[T]) -> list[T]: ... + def getlist(self, key: K, type: type[T] | None = None) -> list[V] | list[T]: rv = [] for d in self.dicts: - rv.extend(d.getlist(key, type)) + rv.extend(d.getlist(key, type)) # type: ignore[arg-type] return rv - def _keys_impl(self): + def _keys_impl(self) -> set[K]: """This function exists so __len__ can be implemented more efficiently, saving one list creation from an iterator. """ - rv = set() - rv.update(*self.dicts) - return rv + return set(k for d in self.dicts for k in d) - def keys(self): + def keys(self) -> cabc.Iterable[K]: # type: ignore[override] return self._keys_impl() - def __iter__(self): - return iter(self.keys()) + def __iter__(self) -> cabc.Iterator[K]: + return iter(self._keys_impl()) - def items(self, multi=False): + @t.overload # type: ignore[override] + def items(self) -> cabc.Iterable[tuple[K, V]]: ... + @t.overload + def items(self, multi: t.Literal[True]) -> cabc.Iterable[tuple[K, list[V]]]: ... + def items( + self, multi: bool = False + ) -> cabc.Iterable[tuple[K, V]] | cabc.Iterable[tuple[K, list[V]]]: found = set() for d in self.dicts: for key, value in d.items(multi): @@ -732,21 +868,21 @@ def items(self, multi=False): found.add(key) yield key, value - def values(self): - for _key, value in self.items(): + def values(self) -> cabc.Iterable[V]: # type: ignore[override] + for _, value in self.items(): yield value - def lists(self): - rv = {} + def lists(self) -> cabc.Iterable[tuple[K, list[V]]]: + rv: dict[K, list[V]] = {} for d in self.dicts: for key, values in d.lists(): rv.setdefault(key, []).extend(values) - return list(rv.items()) + return rv.items() - def listvalues(self): + def listvalues(self) -> cabc.Iterable[list[V]]: return (x[1] for x in self.lists()) - def copy(self): + def copy(self) -> MultiDict[K, V]: # type: ignore[override] """Return a shallow mutable copy of this object. This returns a :class:`MultiDict` representing the data at the @@ -758,105 +894,113 @@ def copy(self): """ return MultiDict(self) - def to_dict(self, flat=True): - """Return the contents as regular dict. If `flat` is `True` the - returned dict will only have the first item present, if `flat` is - `False` all values will be returned as lists. - - :param flat: If set to `False` the dict returned will have lists - with all the values in it. Otherwise it will only - contain the first item for each key. - :return: a :class:`dict` - """ - if flat: - return dict(self.items()) - - return dict(self.lists()) - - def __len__(self): + def __len__(self) -> int: return len(self._keys_impl()) - def __contains__(self, key): + def __contains__(self, key: K) -> bool: # type: ignore[override] for d in self.dicts: if key in d: return True return False - def __repr__(self): + def __repr__(self) -> str: return f"{type(self).__name__}({self.dicts!r})" -class ImmutableDict(ImmutableDictMixin, dict): +class ImmutableDict(ImmutableDictMixin[K, V], dict[K, V]): # type: ignore[misc] """An immutable :class:`dict`. .. versionadded:: 0.5 """ - def __repr__(self): + def __repr__(self) -> str: return f"{type(self).__name__}({dict.__repr__(self)})" - def copy(self): + def copy(self) -> dict[K, V]: """Return a shallow mutable copy of this object. Keep in mind that the standard library's :func:`copy` function is a no-op for this class like for any other python immutable type (eg: :class:`tuple`). """ return dict(self) - def __copy__(self): + def __copy__(self) -> te.Self: return self -class ImmutableMultiDict(ImmutableMultiDictMixin, MultiDict): +class ImmutableMultiDict(ImmutableMultiDictMixin[K, V], MultiDict[K, V]): # type: ignore[misc] """An immutable :class:`MultiDict`. .. versionadded:: 0.5 """ - def copy(self): + def copy(self) -> MultiDict[K, V]: # type: ignore[override] """Return a shallow mutable copy of this object. Keep in mind that the standard library's :func:`copy` function is a no-op for this class like for any other python immutable type (eg: :class:`tuple`). """ return MultiDict(self) - def __copy__(self): + def __copy__(self) -> te.Self: return self -class ImmutableOrderedMultiDict(ImmutableMultiDictMixin, OrderedMultiDict): +class ImmutableOrderedMultiDict(ImmutableMultiDictMixin[K, V], OrderedMultiDict[K, V]): # type: ignore[misc] """An immutable :class:`OrderedMultiDict`. .. versionadded:: 0.6 """ - def _iter_hashitems(self): + def __init__( + self, + mapping: ( + MultiDict[K, V] + | cabc.Mapping[K, V | cabc.Collection[V]] + | cabc.Iterable[tuple[K, V]] + | None + ) = None, + ) -> None: + super().__init__() + + if mapping is not None: + for k, v in iter_multi_items(mapping): + OrderedMultiDict.add(self, k, v) + + def _iter_hashitems(self) -> cabc.Iterable[t.Any]: return enumerate(self.items(multi=True)) - def copy(self): + def copy(self) -> OrderedMultiDict[K, V]: # type: ignore[override] """Return a shallow mutable copy of this object. Keep in mind that the standard library's :func:`copy` function is a no-op for this class like for any other python immutable type (eg: :class:`tuple`). """ return OrderedMultiDict(self) - def __copy__(self): + def __copy__(self) -> te.Self: return self -class CallbackDict(UpdateDictMixin, dict): +class CallbackDict(UpdateDictMixin[K, V], dict[K, V]): """A dict that calls a function passed every time something is changed. The function is passed the dict instance. """ - def __init__(self, initial=None, on_update=None): - dict.__init__(self, initial or ()) + def __init__( + self, + initial: cabc.Mapping[K, V] | cabc.Iterable[tuple[K, V]] | None = None, + on_update: cabc.Callable[[te.Self], None] | None = None, + ) -> None: + if initial is None: + super().__init__() + else: + super().__init__(initial) + self.on_update = on_update - def __repr__(self): - return f"<{type(self).__name__} {dict.__repr__(self)}>" + def __repr__(self) -> str: + return f"<{type(self).__name__} {super().__repr__()}>" -class HeaderSet(MutableSet): +class HeaderSet(cabc.MutableSet[str]): """Similar to the :class:`ETags` class this implements a set-like structure. Unlike :class:`ETags` this is case insensitive and used for vary, allow, and content-language headers. @@ -869,16 +1013,20 @@ class HeaderSet(MutableSet): HeaderSet(['foo', 'bar', 'baz']) """ - def __init__(self, headers=None, on_update=None): + def __init__( + self, + headers: cabc.Iterable[str] | None = None, + on_update: cabc.Callable[[te.Self], None] | None = None, + ) -> None: self._headers = list(headers or ()) self._set = {x.lower() for x in self._headers} self.on_update = on_update - def add(self, header): + def add(self, header: str) -> None: """Add a new header to the set.""" self.update((header,)) - def remove(self, header): + def remove(self: te.Self, header: str) -> None: """Remove a header from the set. This raises an :exc:`KeyError` if the header is not in the set. @@ -899,7 +1047,7 @@ def remove(self, header): if self.on_update is not None: self.on_update(self) - def update(self, iterable): + def update(self: te.Self, iterable: cabc.Iterable[str]) -> None: """Add all the headers from the iterable to the set. :param iterable: updates the set with the items from the iterable. @@ -914,7 +1062,7 @@ def update(self, iterable): if inserted_any and self.on_update is not None: self.on_update(self) - def discard(self, header): + def discard(self, header: str) -> None: """Like :meth:`remove` but ignores errors. :param header: the header to be discarded. @@ -924,7 +1072,7 @@ def discard(self, header): except KeyError: pass - def find(self, header): + def find(self, header: str) -> int: """Return the index of the header in the set or return -1 if not found. :param header: the header to be looked up. @@ -935,7 +1083,7 @@ def find(self, header): return idx return -1 - def index(self, header): + def index(self, header: str) -> int: """Return the index of the header in the set or raise an :exc:`IndexError`. @@ -946,14 +1094,15 @@ def index(self, header): raise IndexError(header) return rv - def clear(self): + def clear(self: te.Self) -> None: """Clear the set.""" self._set.clear() - del self._headers[:] + self._headers.clear() + if self.on_update is not None: self.on_update(self) - def as_set(self, preserve_casing=False): + def as_set(self, preserve_casing: bool = False) -> set[str]: """Return the set as real python set type. When calling this, all the items are converted to lowercase and the ordering is lost. @@ -966,20 +1115,20 @@ def as_set(self, preserve_casing=False): return set(self._headers) return set(self._set) - def to_header(self): + def to_header(self) -> str: """Convert the header set into an HTTP header string.""" return ", ".join(map(http.quote_header_value, self._headers)) - def __getitem__(self, idx): + def __getitem__(self, idx: t.SupportsIndex) -> str: return self._headers[idx] - def __delitem__(self, idx): + def __delitem__(self: te.Self, idx: t.SupportsIndex) -> None: rv = self._headers.pop(idx) self._set.remove(rv.lower()) if self.on_update is not None: self.on_update(self) - def __setitem__(self, idx, value): + def __setitem__(self: te.Self, idx: t.SupportsIndex, value: str) -> None: old = self._headers[idx] self._set.remove(old.lower()) self._headers[idx] = value @@ -987,22 +1136,22 @@ def __setitem__(self, idx, value): if self.on_update is not None: self.on_update(self) - def __contains__(self, header): + def __contains__(self, header: str) -> bool: # type: ignore[override] return header.lower() in self._set - def __len__(self): + def __len__(self) -> int: return len(self._set) - def __iter__(self): + def __iter__(self) -> cabc.Iterator[str]: return iter(self._headers) - def __bool__(self): + def __bool__(self) -> bool: return bool(self._set) - def __str__(self): + def __str__(self) -> str: return self.to_header() - def __repr__(self): + def __repr__(self) -> str: return f"{type(self).__name__}({self._headers!r})" diff --git a/src/werkzeug/datastructures/structures.pyi b/src/werkzeug/datastructures/structures.pyi deleted file mode 100644 index 7086ddae1..000000000 --- a/src/werkzeug/datastructures/structures.pyi +++ /dev/null @@ -1,206 +0,0 @@ -from collections.abc import Callable -from collections.abc import Iterable -from collections.abc import Iterator -from collections.abc import Mapping -from typing import Any -from typing import Generic -from typing import Literal -from typing import NoReturn -from typing import overload -from typing import TypeVar - -from .mixins import ImmutableDictMixin -from .mixins import ImmutableListMixin -from .mixins import ImmutableMultiDictMixin -from .mixins import UpdateDictMixin - -D = TypeVar("D") -K = TypeVar("K") -T = TypeVar("T") -V = TypeVar("V") -_CD = TypeVar("_CD", bound="CallbackDict[Any, Any]") - -def is_immutable(self: object) -> NoReturn: ... -def iter_multi_items( - mapping: Mapping[K, V | Iterable[V]] | Iterable[tuple[K, V]], -) -> Iterator[tuple[K, V]]: ... - -class ImmutableList(ImmutableListMixin[V]): ... - -class TypeConversionDict(dict[K, V]): - @overload - def get(self, key: K, default: None = ..., type: None = ...) -> V | None: ... - @overload - def get(self, key: K, default: D, type: None = ...) -> D | V: ... - @overload - def get(self, key: K, default: D, type: Callable[[V], T]) -> D | T: ... - @overload - def get(self, key: K, type: Callable[[V], T]) -> T | None: ... - -class ImmutableTypeConversionDict(ImmutableDictMixin[K, V], TypeConversionDict[K, V]): - def copy(self) -> TypeConversionDict[K, V]: ... - def __copy__(self) -> ImmutableTypeConversionDict[K, V]: ... - -class MultiDict(TypeConversionDict[K, V]): - def __init__( - self, - mapping: Mapping[K, Iterable[V] | V] | Iterable[tuple[K, V]] | None = None, - ) -> None: ... - def __getitem__(self, item: K) -> V: ... - def __setitem__(self, key: K, value: V) -> None: ... - def add(self, key: K, value: V) -> None: ... - @overload - def getlist(self, key: K) -> list[V]: ... - @overload - def getlist(self, key: K, type: Callable[[V], T] = ...) -> list[T]: ... - def setlist(self, key: K, new_list: Iterable[V]) -> None: ... - def setdefault(self, key: K, default: V | None = None) -> V: ... - def setlistdefault( - self, key: K, default_list: Iterable[V] | None = None - ) -> list[V]: ... - def items(self, multi: bool = False) -> Iterator[tuple[K, V]]: ... # type: ignore - def lists(self) -> Iterator[tuple[K, list[V]]]: ... - def values(self) -> Iterator[V]: ... # type: ignore - def listvalues(self) -> Iterator[list[V]]: ... - def copy(self) -> MultiDict[K, V]: ... - def deepcopy(self, memo: Any = None) -> MultiDict[K, V]: ... - @overload - def to_dict(self) -> dict[K, V]: ... - @overload - def to_dict(self, flat: Literal[False]) -> dict[K, list[V]]: ... - def update( # type: ignore - self, mapping: Mapping[K, Iterable[V] | V] | Iterable[tuple[K, V]] - ) -> None: ... - @overload - def pop(self, key: K) -> V: ... - @overload - def pop(self, key: K, default: V | T = ...) -> V | T: ... - def popitem(self) -> tuple[K, V]: ... - def poplist(self, key: K) -> list[V]: ... - def popitemlist(self) -> tuple[K, list[V]]: ... - def __copy__(self) -> MultiDict[K, V]: ... - def __deepcopy__(self, memo: Any) -> MultiDict[K, V]: ... - -class _omd_bucket(Generic[K, V]): - prev: _omd_bucket[K, V] | None - next: _omd_bucket[K, V] | None - key: K - value: V - def __init__(self, omd: OrderedMultiDict[K, V], key: K, value: V) -> None: ... - def unlink(self, omd: OrderedMultiDict[K, V]) -> None: ... - -class OrderedMultiDict(MultiDict[K, V]): - _first_bucket: _omd_bucket[K, V] | None - _last_bucket: _omd_bucket[K, V] | None - def __init__(self, mapping: Mapping[K, V] | None = None) -> None: ... - def __eq__(self, other: object) -> bool: ... - def __getitem__(self, key: K) -> V: ... - def __setitem__(self, key: K, value: V) -> None: ... - def __delitem__(self, key: K) -> None: ... - def keys(self) -> Iterator[K]: ... # type: ignore - def __iter__(self) -> Iterator[K]: ... - def values(self) -> Iterator[V]: ... # type: ignore - def items(self, multi: bool = False) -> Iterator[tuple[K, V]]: ... # type: ignore - def lists(self) -> Iterator[tuple[K, list[V]]]: ... - def listvalues(self) -> Iterator[list[V]]: ... - def add(self, key: K, value: V) -> None: ... - @overload - def getlist(self, key: K) -> list[V]: ... - @overload - def getlist(self, key: K, type: Callable[[V], T] = ...) -> list[T]: ... - def setlist(self, key: K, new_list: Iterable[V]) -> None: ... - def setlistdefault( - self, key: K, default_list: Iterable[V] | None = None - ) -> list[V]: ... - def update( # type: ignore - self, mapping: Mapping[K, V] | Iterable[tuple[K, V]] - ) -> None: ... - def poplist(self, key: K) -> list[V]: ... - @overload - def pop(self, key: K) -> V: ... - @overload - def pop(self, key: K, default: V | T = ...) -> V | T: ... - def popitem(self) -> tuple[K, V]: ... - def popitemlist(self) -> tuple[K, list[V]]: ... - -class CombinedMultiDict(ImmutableMultiDictMixin[K, V], MultiDict[K, V]): # type: ignore - dicts: list[MultiDict[K, V]] - def __init__(self, dicts: Iterable[MultiDict[K, V]] | None) -> None: ... - @classmethod - def fromkeys(cls, keys: Any, value: Any = None) -> NoReturn: ... - def __getitem__(self, key: K) -> V: ... - @overload # type: ignore - def get(self, key: K) -> V | None: ... - @overload - def get(self, key: K, default: V | T = ...) -> V | T: ... - @overload - def get( - self, key: K, default: T | None = None, type: Callable[[V], T] = ... - ) -> T | None: ... - @overload - def getlist(self, key: K) -> list[V]: ... - @overload - def getlist(self, key: K, type: Callable[[V], T] = ...) -> list[T]: ... - def _keys_impl(self) -> set[K]: ... - def keys(self) -> set[K]: ... # type: ignore - def __iter__(self) -> set[K]: ... # type: ignore - def items(self, multi: bool = False) -> Iterator[tuple[K, V]]: ... # type: ignore - def values(self) -> Iterator[V]: ... # type: ignore - def lists(self) -> Iterator[tuple[K, list[V]]]: ... - def listvalues(self) -> Iterator[list[V]]: ... - def copy(self) -> MultiDict[K, V]: ... - @overload - def to_dict(self) -> dict[K, V]: ... - @overload - def to_dict(self, flat: Literal[False]) -> dict[K, list[V]]: ... - def __contains__(self, key: K) -> bool: ... # type: ignore - def has_key(self, key: K) -> bool: ... - -class ImmutableDict(ImmutableDictMixin[K, V], dict[K, V]): - def copy(self) -> dict[K, V]: ... - def __copy__(self) -> ImmutableDict[K, V]: ... - -class ImmutableMultiDict( # type: ignore - ImmutableMultiDictMixin[K, V], MultiDict[K, V] -): - def copy(self) -> MultiDict[K, V]: ... - def __copy__(self) -> ImmutableMultiDict[K, V]: ... - -class ImmutableOrderedMultiDict( # type: ignore - ImmutableMultiDictMixin[K, V], OrderedMultiDict[K, V] -): - def _iter_hashitems(self) -> Iterator[tuple[int, tuple[K, V]]]: ... - def copy(self) -> OrderedMultiDict[K, V]: ... - def __copy__(self) -> ImmutableOrderedMultiDict[K, V]: ... - -class CallbackDict(UpdateDictMixin[K, V], dict[K, V]): - def __init__( - self, - initial: Mapping[K, V] | Iterable[tuple[K, V]] | None = None, - on_update: Callable[[_CD], None] | None = None, - ) -> None: ... - -class HeaderSet(set[str]): - _headers: list[str] - _set: set[str] - on_update: Callable[[HeaderSet], None] | None - def __init__( - self, - headers: Iterable[str] | None = None, - on_update: Callable[[HeaderSet], None] | None = None, - ) -> None: ... - def add(self, header: str) -> None: ... - def remove(self, header: str) -> None: ... - def update(self, iterable: Iterable[str]) -> None: ... # type: ignore - def discard(self, header: str) -> None: ... - def find(self, header: str) -> int: ... - def index(self, header: str) -> int: ... - def clear(self) -> None: ... - def as_set(self, preserve_casing: bool = False) -> set[str]: ... - def to_header(self) -> str: ... - def __getitem__(self, idx: int) -> str: ... - def __delitem__(self, idx: int) -> None: ... - def __setitem__(self, idx: int, value: str) -> None: ... - def __contains__(self, header: str) -> bool: ... # type: ignore - def __len__(self) -> int: ... - def __iter__(self) -> Iterator[str]: ... diff --git a/src/werkzeug/exceptions.py b/src/werkzeug/exceptions.py index 02af2c15d..ddb621032 100644 --- a/src/werkzeug/exceptions.py +++ b/src/werkzeug/exceptions.py @@ -197,7 +197,7 @@ class BadRequestKeyError(BadRequest, KeyError): #: useful in a debug mode. show_exception = False - def __init__(self, arg: str | None = None, *args: t.Any, **kwargs: t.Any): + def __init__(self, arg: object | None = None, *args: t.Any, **kwargs: t.Any): super().__init__(*args, **kwargs) if arg is None: diff --git a/src/werkzeug/http.py b/src/werkzeug/http.py index 56fc839f9..f1dbb850a 100644 --- a/src/werkzeug/http.py +++ b/src/werkzeug/http.py @@ -914,6 +914,10 @@ def quote_etag(etag: str, weak: bool = False) -> str: return etag +@t.overload +def unquote_etag(etag: str) -> tuple[str, bool]: ... +@t.overload +def unquote_etag(etag: None) -> tuple[None, None]: ... def unquote_etag( etag: str | None, ) -> tuple[str, bool] | tuple[None, None]: diff --git a/src/werkzeug/sansio/http.py b/src/werkzeug/sansio/http.py index b2b887799..f02d7fd54 100644 --- a/src/werkzeug/sansio/http.py +++ b/src/werkzeug/sansio/http.py @@ -72,7 +72,6 @@ def is_resource_modified( if etag: etag, _ = unquote_etag(etag) - etag = t.cast(str, etag) if if_range is not None and if_range.etag is not None: unmodified = parse_etags(if_range.etag).contains(etag) diff --git a/src/werkzeug/test.py b/src/werkzeug/test.py index 1dfb9320e..5c3c60883 100644 --- a/src/werkzeug/test.py +++ b/src/werkzeug/test.py @@ -656,7 +656,7 @@ def close(self) -> None: try: files = self.files.values() except AttributeError: - files = () # type: ignore + files = () for f in files: try: f.close() @@ -1431,7 +1431,7 @@ def _to_request_header(self) -> str: def _from_response_header(cls, server_name: str, path: str, header: str) -> te.Self: header, _, parameters_str = header.partition(";") key, _, value = header.partition("=") - decoded_key, decoded_value = next(parse_cookie(header).items()) + decoded_key, decoded_value = next(parse_cookie(header).items()) # type: ignore[call-overload] params = {} for item in parameters_str.split(";"): diff --git a/src/werkzeug/utils.py b/src/werkzeug/utils.py index b68ac18ca..59b97b732 100644 --- a/src/werkzeug/utils.py +++ b/src/werkzeug/utils.py @@ -497,7 +497,7 @@ def send_file( elif mtime is not None: rv.last_modified = mtime # type: ignore - rv.cache_control.no_cache = True # type: ignore[assignment] + rv.cache_control.no_cache = True # Flask will pass app.get_send_file_max_age, allowing its send_file # wrapper to not have to deal with paths. From 3720e3e84d7de619356e0b5fddb8b342d81d975d Mon Sep 17 00:00:00 2001 From: David Lord Date: Sun, 27 Oct 2024 18:39:40 -0700 Subject: [PATCH 141/159] deprecate OrderedMultiDict --- CHANGES.rst | 2 + docs/datastructures.rst | 29 ++++++++++-- src/werkzeug/datastructures/__init__.py | 34 +++++++++++++- src/werkzeug/datastructures/structures.py | 56 +++++++++++++++++++---- src/werkzeug/sansio/request.py | 6 +-- tests/test_datastructures.py | 8 +++- tests/test_wrappers.py | 7 +-- 7 files changed, 120 insertions(+), 22 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 2976f889a..c73bcaa89 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,8 @@ Unreleased - ``Request.max_form_memory_size`` defaults to 500kB instead of unlimited. Non-file form fields over this size will cause a ``RequestEntityTooLarge`` error. :issue:`2964` +- ``OrderedMultiDict`` and ``ImmutableOrderedMultiDict`` are deprecated. + Use ``MultiDict`` and ``ImmutableMultiDict`` instead. :issue:`2968` - Support Cookie CHIPS (Partitioned Cookies). :issue:`2797` - ``CacheControl.no_transform`` is a boolean when present. ``min_fresh`` is ``None`` when not present. Added the ``must_understand`` attribute. Fixed diff --git a/docs/datastructures.rst b/docs/datastructures.rst index 92c969932..bd2c0d223 100644 --- a/docs/datastructures.rst +++ b/docs/datastructures.rst @@ -27,10 +27,33 @@ General Purpose :members: :inherited-members: -.. autoclass:: OrderedMultiDict +.. class:: OrderedMultiDict -.. autoclass:: ImmutableMultiDict - :members: copy + Works like a regular :class:`MultiDict` but preserves the + order of the fields. To convert the ordered multi dict into a + list you can use the :meth:`items` method and pass it ``multi=True``. + + In general an :class:`OrderedMultiDict` is an order of magnitude + slower than a :class:`MultiDict`. + + .. admonition:: note + + Due to a limitation in Python you cannot convert an ordered + multi dict into a regular dict by using ``dict(multidict)``. + Instead you have to use the :meth:`to_dict` method, otherwise + the internal bucket objects are exposed. + + .. deprecated:: 3.1 + Will be removed in Werkzeug 3.2. Use ``MultiDict`` instead. + +.. class:: ImmutableMultiDict + + An immutable :class:`OrderedMultiDict`. + + .. deprecated:: 3.1 + Will be removed in Werkzeug 3.2. Use ``ImmutableMultiDict`` instead. + + .. versionadded:: 0.6 .. autoclass:: ImmutableOrderedMultiDict :members: copy diff --git a/src/werkzeug/datastructures/__init__.py b/src/werkzeug/datastructures/__init__.py index 846ffce67..6582de02c 100644 --- a/src/werkzeug/datastructures/__init__.py +++ b/src/werkzeug/datastructures/__init__.py @@ -1,3 +1,7 @@ +from __future__ import annotations + +import typing as t + from .accept import Accept as Accept from .accept import CharsetAccept as CharsetAccept from .accept import LanguageAccept as LanguageAccept @@ -26,9 +30,35 @@ from .structures import ImmutableDict as ImmutableDict from .structures import ImmutableList as ImmutableList from .structures import ImmutableMultiDict as ImmutableMultiDict -from .structures import ImmutableOrderedMultiDict as ImmutableOrderedMultiDict from .structures import ImmutableTypeConversionDict as ImmutableTypeConversionDict from .structures import iter_multi_items as iter_multi_items from .structures import MultiDict as MultiDict -from .structures import OrderedMultiDict as OrderedMultiDict from .structures import TypeConversionDict as TypeConversionDict + + +def __getattr__(name: str) -> t.Any: + import warnings + + if name == "OrderedMultiDict": + from .structures import _OrderedMultiDict + + warnings.warn( + "'OrderedMultiDict' is deprecated and will be removed in Werkzeug" + " 3.2. Use 'MultiDict' instead.", + DeprecationWarning, + stacklevel=2, + ) + return _OrderedMultiDict + + if name == "ImmutableOrderedMultiDict": + from .structures import _ImmutableOrderedMultiDict + + warnings.warn( + "'OrderedMultiDict' is deprecated and will be removed in Werkzeug" + " 3.2. Use 'ImmutableMultiDict' instead.", + DeprecationWarning, + stacklevel=2, + ) + return _ImmutableOrderedMultiDict + + raise AttributeError(name) diff --git a/src/werkzeug/datastructures/structures.py b/src/werkzeug/datastructures/structures.py index eb54599d5..34863ccc3 100644 --- a/src/werkzeug/datastructures/structures.py +++ b/src/werkzeug/datastructures/structures.py @@ -522,7 +522,7 @@ class _omd_bucket(t.Generic[K, V]): __slots__ = ("prev", "key", "value", "next") - def __init__(self, omd: OrderedMultiDict[K, V], key: K, value: V) -> None: + def __init__(self, omd: _OrderedMultiDict[K, V], key: K, value: V) -> None: self.prev: _omd_bucket[K, V] | None = omd._last_bucket self.key: K = key self.value: V = value @@ -534,7 +534,7 @@ def __init__(self, omd: OrderedMultiDict[K, V], key: K, value: V) -> None: omd._last_bucket.next = self omd._last_bucket = self - def unlink(self, omd: OrderedMultiDict[K, V]) -> None: + def unlink(self, omd: _OrderedMultiDict[K, V]) -> None: if self.prev: self.prev.next = self.next if self.next: @@ -545,7 +545,7 @@ def unlink(self, omd: OrderedMultiDict[K, V]) -> None: omd._last_bucket = self.prev -class OrderedMultiDict(MultiDict[K, V]): +class _OrderedMultiDict(MultiDict[K, V]): """Works like a regular :class:`MultiDict` but preserves the order of the fields. To convert the ordered multi dict into a list you can use the :meth:`items` method and pass it ``multi=True``. @@ -559,6 +559,9 @@ class OrderedMultiDict(MultiDict[K, V]): multi dict into a regular dict by using ``dict(multidict)``. Instead you have to use the :meth:`to_dict` method, otherwise the internal bucket objects are exposed. + + .. deprecated:: 3.1 + Will be removed in Werkzeug 3.2. Use ``MultiDict`` instead. """ def __init__( @@ -570,6 +573,14 @@ def __init__( | None ) = None, ) -> None: + import warnings + + warnings.warn( + "'OrderedMultiDict' is deprecated and will be removed in Werkzeug" + " 3.2. Use 'MultiDict' instead.", + DeprecationWarning, + stacklevel=2, + ) super().__init__() self._first_bucket: _omd_bucket[K, V] | None = None self._last_bucket: _omd_bucket[K, V] | None = None @@ -579,7 +590,7 @@ def __init__( def __eq__(self, other: object) -> bool: if not isinstance(other, MultiDict): return NotImplemented - if isinstance(other, OrderedMultiDict): + if isinstance(other, _OrderedMultiDict): iter1 = iter(self.items(multi=True)) iter2 = iter(other.items(multi=True)) try: @@ -944,9 +955,14 @@ def __copy__(self) -> te.Self: return self -class ImmutableOrderedMultiDict(ImmutableMultiDictMixin[K, V], OrderedMultiDict[K, V]): # type: ignore[misc] +class _ImmutableOrderedMultiDict( # type: ignore[misc] + ImmutableMultiDictMixin[K, V], _OrderedMultiDict[K, V] +): """An immutable :class:`OrderedMultiDict`. + .. deprecated:: 3.1 + Will be removed in Werkzeug 3.2. Use ``ImmutableMultiDict`` instead. + .. versionadded:: 0.6 """ @@ -963,17 +979,17 @@ def __init__( if mapping is not None: for k, v in iter_multi_items(mapping): - OrderedMultiDict.add(self, k, v) + _OrderedMultiDict.add(self, k, v) def _iter_hashitems(self) -> cabc.Iterable[t.Any]: return enumerate(self.items(multi=True)) - def copy(self) -> OrderedMultiDict[K, V]: # type: ignore[override] + def copy(self) -> _OrderedMultiDict[K, V]: # type: ignore[override] """Return a shallow mutable copy of this object. Keep in mind that the standard library's :func:`copy` function is a no-op for this class like for any other python immutable type (eg: :class:`tuple`). """ - return OrderedMultiDict(self) + return _OrderedMultiDict(self) def __copy__(self) -> te.Self: return self @@ -1157,3 +1173,27 @@ def __repr__(self) -> str: # circular dependencies from .. import http + + +def __getattr__(name: str) -> t.Any: + import warnings + + if name == "OrderedMultiDict": + warnings.warn( + "'OrderedMultiDict' is deprecated and will be removed in Werkzeug" + " 3.2. Use 'MultiDict' instead.", + DeprecationWarning, + stacklevel=2, + ) + return _OrderedMultiDict + + if name == "ImmutableOrderedMultiDict": + warnings.warn( + "'ImmutableOrderedMultiDict' is deprecated and will be removed in" + " Werkzeug 3.2. Use 'ImmutableMultiDict' instead.", + DeprecationWarning, + stacklevel=2, + ) + return _ImmutableOrderedMultiDict + + raise AttributeError(name) diff --git a/src/werkzeug/sansio/request.py b/src/werkzeug/sansio/request.py index dd0805d71..8d5fbd8f8 100644 --- a/src/werkzeug/sansio/request.py +++ b/src/werkzeug/sansio/request.py @@ -67,10 +67,8 @@ class Request: #: the class to use for `args` and `form`. The default is an #: :class:`~werkzeug.datastructures.ImmutableMultiDict` which supports - #: multiple values per key. alternatively it makes sense to use an - #: :class:`~werkzeug.datastructures.ImmutableOrderedMultiDict` which - #: preserves order or a :class:`~werkzeug.datastructures.ImmutableDict` - #: which is the fastest but only remembers the last key. It is also + #: multiple values per key. A :class:`~werkzeug.datastructures.ImmutableDict` + #: is faster but only remembers the last key. It is also #: possible to use mutable structures, but this is not recommended. #: #: .. versionadded:: 0.6 diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py index 76de1d849..9d11d2aab 100644 --- a/tests/test_datastructures.py +++ b/tests/test_datastructures.py @@ -9,6 +9,8 @@ from werkzeug import datastructures as ds from werkzeug import http +from werkzeug.datastructures.structures import _ImmutableOrderedMultiDict +from werkzeug.datastructures.structures import _OrderedMultiDict from werkzeug.exceptions import BadRequestKeyError @@ -333,8 +335,9 @@ class TestImmutableDict(_ImmutableDictTests): storage_class = ds.ImmutableDict +@pytest.mark.filterwarnings("ignore:'OrderedMultiDict':DeprecationWarning") class TestImmutableOrderedMultiDict(_ImmutableDictTests): - storage_class = ds.ImmutableOrderedMultiDict + storage_class = _ImmutableOrderedMultiDict def test_ordered_multidict_is_hashable(self): a = self.storage_class([("a", 1), ("b", 1), ("a", 2)]) @@ -412,8 +415,9 @@ def test_getitem_raise_badrequestkeyerror_for_empty_list_value(self): md["empty"] +@pytest.mark.filterwarnings("ignore:'OrderedMultiDict':DeprecationWarning") class TestOrderedMultiDict(_MutableMultiDictTests): - storage_class = ds.OrderedMultiDict + storage_class = _OrderedMultiDict def test_ordered_interface(self): cls = self.storage_class diff --git a/tests/test_wrappers.py b/tests/test_wrappers.py index f75694459..8bc063c74 100644 --- a/tests/test_wrappers.py +++ b/tests/test_wrappers.py @@ -16,11 +16,11 @@ from werkzeug.datastructures import Headers from werkzeug.datastructures import ImmutableList from werkzeug.datastructures import ImmutableMultiDict -from werkzeug.datastructures import ImmutableOrderedMultiDict from werkzeug.datastructures import LanguageAccept from werkzeug.datastructures import MIMEAccept from werkzeug.datastructures import MultiDict from werkzeug.datastructures import WWWAuthenticate +from werkzeug.datastructures.structures import _ImmutableOrderedMultiDict from werkzeug.exceptions import BadRequest from werkzeug.exceptions import RequestedRangeNotSatisfiable from werkzeug.exceptions import SecurityError @@ -998,9 +998,10 @@ def generate_items(): assert resp.response == ["foo", "bar", "baz"] +@pytest.mark.filterwarnings("ignore:'OrderedMultiDict':DeprecationWarning") def test_form_data_ordering(): class MyRequest(wrappers.Request): - parameter_storage_class = ImmutableOrderedMultiDict + parameter_storage_class = _ImmutableOrderedMultiDict req = MyRequest.from_values("/?foo=1&bar=0&foo=3") assert list(req.args) == ["foo", "bar"] @@ -1009,7 +1010,7 @@ class MyRequest(wrappers.Request): ("bar", "0"), ("foo", "3"), ] - assert isinstance(req.args, ImmutableOrderedMultiDict) + assert isinstance(req.args, _ImmutableOrderedMultiDict) assert isinstance(req.values, CombinedMultiDict) assert req.values["foo"] == "1" assert req.values.getlist("foo") == ["1", "3"] From e9f87fdcdacb84c0647742b3c156b896982ee5b2 Mon Sep 17 00:00:00 2001 From: David Lord Date: Tue, 29 Oct 2024 17:04:30 -0700 Subject: [PATCH 142/159] fix formatting --- CHANGES.rst | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index c73bcaa89..7dc7f0bd9 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -12,14 +12,15 @@ Unreleased error. :issue:`2964` - ``OrderedMultiDict`` and ``ImmutableOrderedMultiDict`` are deprecated. Use ``MultiDict`` and ``ImmutableMultiDict`` instead. :issue:`2968` -- Support Cookie CHIPS (Partitioned Cookies). :issue:`2797` +- Support Cookie CHIPS (Partitioned Cookies). :issue:`2797` - ``CacheControl.no_transform`` is a boolean when present. ``min_fresh`` is ``None`` when not present. Added the ``must_understand`` attribute. Fixed some typing issues on cache control. :issue:`2881` - Add ``stale_while_revalidate`` and ``stale_if_error`` properties to ``ResponseCacheControl``. :issue:`2948` - Add 421 ``MisdirectedRequest`` HTTP exception. :issue:`2850` -- Increase default work factor for PBKDF2 to 1,000,000 iterations. :issue:`2969` +- Increase default work factor for PBKDF2 to 1,000,000 iterations. + :issue:`2969` - Inline annotations for ``datastructures``, removing stub files. :issue:`2970` From e550ef8b313324d2efc0b9385be4c8c5739df36b Mon Sep 17 00:00:00 2001 From: David Lord Date: Tue, 29 Oct 2024 17:42:31 -0700 Subject: [PATCH 143/159] getlist catches TypeError --- CHANGES.rst | 3 ++- src/werkzeug/datastructures/structures.py | 14 ++++++++------ 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 7dc7f0bd9..c76cce4c5 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -23,7 +23,8 @@ Unreleased :issue:`2969` - Inline annotations for ``datastructures``, removing stub files. :issue:`2970` - +- ``MultiDict.getlist`` catches ``TypeError`` in addition to ``ValueError`` + when doing type conversion. :issue:`2976` Version 3.0.6 diff --git a/src/werkzeug/datastructures/structures.py b/src/werkzeug/datastructures/structures.py index 34863ccc3..a48d504e4 100644 --- a/src/werkzeug/datastructures/structures.py +++ b/src/werkzeug/datastructures/structures.py @@ -260,10 +260,12 @@ def getlist(self, key: K, type: type[T] | None = None) -> list[V] | list[T]: with the callable defined there. :param key: The key to be looked up. - :param type: A callable that is used to cast the value in the - :class:`MultiDict`. If a :exc:`ValueError` is raised - by this callable the value will be removed from the list. + :param type: Callable to convert each value. If a ``ValueError`` or + ``TypeError`` is raised, the value is omitted. :return: a :class:`list` of all the values for the key. + + .. versionchanged:: 3.1 + Catches ``TypeError`` in addition to ``ValueError``. """ try: rv: list[V] = super().__getitem__(key) # type: ignore[assignment] @@ -275,7 +277,7 @@ def getlist(self, key: K, type: type[T] | None = None) -> list[V] | list[T]: for item in rv: try: result.append(type(item)) # type: ignore[call-arg] - except ValueError: + except (ValueError, TypeError): pass return result @@ -694,7 +696,7 @@ def getlist(self, key: K, type: type[T] | None = None) -> list[V] | list[T]: for item in rv: try: result.append(type(item.value)) # type: ignore[call-arg] - except ValueError: + except (ValueError, TypeError): pass return result @@ -836,7 +838,7 @@ def get( # type: ignore[misc] if type is not None: try: return type(d[key]) # type: ignore[call-arg] - except ValueError: + except (ValueError, TypeError): continue return d[key] return default From b65b587aa12ec40df1eecb4c6aadaf8de8931187 Mon Sep 17 00:00:00 2001 From: David Lord Date: Tue, 29 Oct 2024 19:20:33 -0700 Subject: [PATCH 144/159] implement or and ior operators --- CHANGES.rst | 2 + src/werkzeug/datastructures/headers.py | 31 +++++++++++++++ src/werkzeug/datastructures/mixins.py | 21 ++++++++++ src/werkzeug/datastructures/structures.py | 25 ++++++++++++ tests/test_datastructures.py | 48 ++++++++++++++++++++++- 5 files changed, 126 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index c76cce4c5..0a0616d57 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -25,6 +25,8 @@ Unreleased :issue:`2970` - ``MultiDict.getlist`` catches ``TypeError`` in addition to ``ValueError`` when doing type conversion. :issue:`2976` +- Implement ``|`` and ``|=`` operators for ``MultiDict``, ``Headers``, and + ``CallbackDict``, and disallow ``|=`` on immutable types. :issue:`2977` Version 3.0.6 diff --git a/src/werkzeug/datastructures/headers.py b/src/werkzeug/datastructures/headers.py index a23a0e0b1..db53cda7b 100644 --- a/src/werkzeug/datastructures/headers.py +++ b/src/werkzeug/datastructures/headers.py @@ -41,6 +41,9 @@ class Headers(cabc.MutableMapping[str, str]): :param defaults: The list of default values for the :class:`Headers`. + .. versionchanged:: 3.1 + Implement ``|`` and ``|=`` operators. + .. versionchanged:: 2.1.0 Default values are validated the same as values added later. @@ -524,6 +527,31 @@ def update( # type: ignore[override] else: self.set(key, value) + def __or__( + self, other: cabc.Mapping[str, t.Any | cabc.Collection[t.Any]] + ) -> te.Self: + if not isinstance(other, cabc.Mapping): + return NotImplemented + + rv = self.copy() + rv.update(other) + return rv + + def __ior__( + self, + other: ( + cabc.Mapping[str, t.Any | cabc.Collection[t.Any]] + | cabc.Iterable[tuple[str, t.Any]] + ), + ) -> te.Self: + if not isinstance(other, (cabc.Mapping, cabc.Iterable)) or isinstance( + other, str + ): + return NotImplemented + + self.update(other) + return self + def to_wsgi_list(self) -> list[tuple[str, str]]: """Convert the headers into a list suitable for WSGI. @@ -620,6 +648,9 @@ def __iter__(self) -> cabc.Iterator[tuple[str, str]]: # type: ignore[override] def copy(self) -> t.NoReturn: raise TypeError(f"cannot create {type(self).__name__!r} copies") + def __or__(self, other: t.Any) -> t.NoReturn: + raise TypeError(f"cannot create {type(self).__name__!r} copies") + # circular dependencies from .. import http diff --git a/src/werkzeug/datastructures/mixins.py b/src/werkzeug/datastructures/mixins.py index 76324d5a2..03d461ad8 100644 --- a/src/werkzeug/datastructures/mixins.py +++ b/src/werkzeug/datastructures/mixins.py @@ -76,6 +76,9 @@ def sort(self, key: t.Any = None, reverse: t.Any = False) -> t.NoReturn: class ImmutableDictMixin(t.Generic[K, V]): """Makes a :class:`dict` immutable. + .. versionchanged:: 3.1 + Disallow ``|=`` operator. + .. versionadded:: 0.5 :private: @@ -117,6 +120,9 @@ def setdefault(self, key: t.Any, default: t.Any = None) -> t.NoReturn: def update(self, arg: t.Any, /, **kwargs: t.Any) -> t.NoReturn: _immutable_error(self) + def __ior__(self, other: t.Any) -> t.NoReturn: + _immutable_error(self) + def pop(self, key: t.Any, default: t.Any = None) -> t.NoReturn: _immutable_error(self) @@ -168,6 +174,9 @@ class ImmutableHeadersMixin: hashable though since the only usecase for this datastructure in Werkzeug is a view on a mutable structure. + .. versionchanged:: 3.1 + Disallow ``|=`` operator. + .. versionadded:: 0.5 :private: @@ -200,6 +209,9 @@ def extend(self, arg: t.Any, /, **kwargs: t.Any) -> t.NoReturn: def update(self, arg: t.Any, /, **kwargs: t.Any) -> t.NoReturn: _immutable_error(self) + def __ior__(self, other: t.Any) -> t.NoReturn: + _immutable_error(self) + def insert(self, pos: t.Any, value: t.Any) -> t.NoReturn: _immutable_error(self) @@ -233,6 +245,9 @@ def wrapper( class UpdateDictMixin(dict[K, V]): """Makes dicts call `self.on_update` on modifications. + .. versionchanged:: 3.1 + Implement ``|=`` operator. + .. versionadded:: 0.5 :private: @@ -294,3 +309,9 @@ def update( # type: ignore[override] super().update(**kwargs) else: super().update(arg, **kwargs) + + @_always_update + def __ior__( # type: ignore[override] + self, other: cabc.Mapping[K, V] | cabc.Iterable[tuple[K, V]] + ) -> te.Self: + return super().__ior__(other) diff --git a/src/werkzeug/datastructures/structures.py b/src/werkzeug/datastructures/structures.py index a48d504e4..db2f99800 100644 --- a/src/werkzeug/datastructures/structures.py +++ b/src/werkzeug/datastructures/structures.py @@ -170,6 +170,9 @@ class MultiDict(TypeConversionDict[K, V]): :param mapping: the initial value for the :class:`MultiDict`. Either a regular dict, an iterable of ``(key, value)`` tuples or `None`. + + .. versionchanged:: 3.1 + Implement ``|`` and ``|=`` operators. """ def __init__( @@ -435,6 +438,28 @@ def update( # type: ignore[override] for key, value in iter_multi_items(mapping): self.add(key, value) + def __or__( # type: ignore[override] + self, other: cabc.Mapping[K, V | cabc.Collection[V]] + ) -> MultiDict[K, V]: + if not isinstance(other, cabc.Mapping): + return NotImplemented + + rv = self.copy() + rv.update(other) + return rv + + def __ior__( # type: ignore[override] + self, + other: cabc.Mapping[K, V | cabc.Collection[V]] | cabc.Iterable[tuple[K, V]], + ) -> te.Self: + if not isinstance(other, (cabc.Mapping, cabc.Iterable)) or isinstance( + other, str + ): + return NotImplemented + + self.update(other) + return self + @t.overload def pop(self, key: K) -> V: ... @t.overload diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py index 9d11d2aab..76a5530fc 100644 --- a/tests/test_datastructures.py +++ b/tests/test_datastructures.py @@ -258,6 +258,17 @@ def test_basic_interface(self): md.setlist("foo", [1, 2]) assert md.getlist("foo") == [1, 2] + def test_or(self) -> None: + a = self.storage_class({"x": 1}) + b = a | {"y": 2} + assert isinstance(b, self.storage_class) + assert "x" in b and "y" in b + + def test_ior(self) -> None: + a = self.storage_class({"x": 1}) + a |= {"y": 2} + assert "x" in a and "y" in a + class _ImmutableDictTests: storage_class: type[dict] @@ -305,6 +316,17 @@ def test_dict_is_hashable(self): assert immutable in x assert immutable2 in x + def test_or(self) -> None: + a = self.storage_class({"x": 1}) + b = a | {"y": 2} + assert "x" in b and "y" in b + + def test_ior(self) -> None: + a = self.storage_class({"x": 1}) + + with pytest.raises(TypeError): + a |= {"y": 2} + class TestImmutableTypeConversionDict(_ImmutableDictTests): storage_class = ds.ImmutableTypeConversionDict @@ -799,6 +821,17 @@ def test_equality(self): assert h1 == h2 + def test_or(self) -> None: + a = ds.Headers({"x": 1}) + b = a | {"y": 2} + assert isinstance(b, ds.Headers) + assert "x" in b and "y" in b + + def test_ior(self) -> None: + a = ds.Headers({"x": 1}) + a |= {"y": 2} + assert "x" in a and "y" in a + class TestEnvironHeaders: storage_class = ds.EnvironHeaders @@ -840,6 +873,18 @@ def test_return_type_is_str(self): assert headers["Foo"] == "\xe2\x9c\x93" assert next(iter(headers)) == ("Foo", "\xe2\x9c\x93") + def test_or(self) -> None: + headers = ds.EnvironHeaders({"x": "1"}) + + with pytest.raises(TypeError): + headers | {"y": "2"} + + def test_ior(self) -> None: + headers = ds.EnvironHeaders({}) + + with pytest.raises(TypeError): + headers |= {"y": "2"} + class TestHeaderSet: storage_class = ds.HeaderSet @@ -927,7 +972,7 @@ def test_callback_dict_writes(self): assert_calls, func = make_call_asserter() initial = {"a": "foo", "b": "bar"} dct = self.storage_class(initial=initial, on_update=func) - with assert_calls(8, "callback not triggered by write method"): + with assert_calls(9, "callback not triggered by write method"): # always-write methods dct["z"] = 123 dct["z"] = 123 # must trigger again @@ -937,6 +982,7 @@ def test_callback_dict_writes(self): dct.popitem() dct.update([]) dct.clear() + dct |= {} with assert_calls(0, "callback triggered by failed del"): pytest.raises(KeyError, lambda: dct.__delitem__("x")) with assert_calls(0, "callback triggered by failed pop"): From e3a50c9ac130658e15bc8e8a4555a58379006cba Mon Sep 17 00:00:00 2001 From: David Lord Date: Wed, 30 Oct 2024 16:53:22 -0700 Subject: [PATCH 145/159] more cache-control cleanup --- CHANGES.rst | 31 ++- docs/datastructures.rst | 6 +- src/werkzeug/datastructures/cache_control.py | 189 +++++++++++++------ tests/test_datastructures.py | 2 +- tests/test_http.py | 10 +- 5 files changed, 167 insertions(+), 71 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 0a0616d57..4097365d4 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -12,12 +12,33 @@ Unreleased error. :issue:`2964` - ``OrderedMultiDict`` and ``ImmutableOrderedMultiDict`` are deprecated. Use ``MultiDict`` and ``ImmutableMultiDict`` instead. :issue:`2968` +- Behavior of properties on ``request.cache_control`` and + ``response.cache_control`` has been significantly adjusted. + + - Dict values are always ``str | None``. Setting properties will convert + the value to a string. Setting a property to ``False`` is equivalent to + setting it to ``None``. Getting typed properties will return ``None`` if + conversion raises ``ValueError``, rather than the string. :issue:`2980` + - ``max_age`` is ``None`` if not present, rather than ``-1``. + :issue:`2980` + - ``no_cache`` is a boolean for requests, it is ``False`` instead of + ``"*"`` when not present. It remains a string for responses. + issue:`2980` + - ``max_stale`` is an int, it is ``None`` instead of ``"*"`` if it is + present with no value. ``max_stale_any`` is a boolean indicating if + the property is present regardless of if it has a value. :issue:`2980` + - ``no_transform`` is a boolean. Previously it was mistakenly always + ``None``. :issue:`2881` + - ``min_fresh`` is ``None`` if not present instead of ``"*"``. + :issue:`2881` + - ``private`` is a boolean, it is ``False`` instead of ``"*"`` when not + present. :issue:`2980` + - Added the ``must_understand`` property. :issue:`2881` + - Added the ``stale_while_revalidate``, and ``stale_if_error`` + properties. :issue:`2948` + - Type annotations more accurately reflect the values. :issue:`2881` + - Support Cookie CHIPS (Partitioned Cookies). :issue:`2797` -- ``CacheControl.no_transform`` is a boolean when present. ``min_fresh`` is - ``None`` when not present. Added the ``must_understand`` attribute. Fixed - some typing issues on cache control. :issue:`2881` -- Add ``stale_while_revalidate`` and ``stale_if_error`` properties to - ``ResponseCacheControl``. :issue:`2948` - Add 421 ``MisdirectedRequest`` HTTP exception. :issue:`2850` - Increase default work factor for PBKDF2 to 1,000,000 iterations. :issue:`2969` diff --git a/docs/datastructures.rst b/docs/datastructures.rst index bd2c0d223..e70252534 100644 --- a/docs/datastructures.rst +++ b/docs/datastructures.rst @@ -93,11 +93,13 @@ HTTP Related .. autoclass:: RequestCacheControl :members: - :inherited-members: + :inherited-members: ImmutableDictMixin, CallbackDict + :member-order: groupwise .. autoclass:: ResponseCacheControl :members: - :inherited-members: + :inherited-members: CallbackDict + :member-order: groupwise .. autoclass:: ETags :members: diff --git a/src/werkzeug/datastructures/cache_control.py b/src/werkzeug/datastructures/cache_control.py index e185944b3..2171c4d7a 100644 --- a/src/werkzeug/datastructures/cache_control.py +++ b/src/werkzeug/datastructures/cache_control.py @@ -2,27 +2,58 @@ import collections.abc as cabc import typing as t +from inspect import cleandoc from .mixins import ImmutableDictMixin from .structures import CallbackDict -def cache_control_property(key: str, empty: t.Any, type: type[t.Any] | None) -> t.Any: +def cache_control_property( + key: str, empty: t.Any, type: type[t.Any] | None, *, doc: str | None = None +) -> t.Any: """Return a new property object for a cache header. Useful if you want to add support for a cache extension in a subclass. + :param key: The attribute name present in the parsed cache-control header dict. + :param empty: The value to use if the key is present without a value. + :param type: The type to convert the string value to instead of a string. If + conversion raises a ``ValueError``, the returned value is ``None``. + :param doc: The docstring for the property. If not given, it is generated + based on the other params. + + .. versionchanged:: 3.1 + Added the ``doc`` param. + .. versionchanged:: 2.0 Renamed from ``cache_property``. """ + if doc is None: + parts = [f"The ``{key}`` attribute."] + + if type is bool: + parts.append("A ``bool``, either present or not.") + else: + if type is None: + parts.append("A ``str``,") + else: + parts.append(f"A ``{type.__name__}``,") + + if empty is not None: + parts.append(f"``{empty!r}`` if present with no value,") + + parts.append("or ``None`` if not present.") + + doc = " ".join(parts) + return property( lambda x: x._get_cache_value(key, empty, type), lambda x, v: x._set_cache_value(key, v, type), lambda x: x._del_cache_value(key), - f"accessor for {key!r}", + doc=cleandoc(doc), ) -class _CacheControl(CallbackDict[str, t.Any]): +class _CacheControl(CallbackDict[str, t.Optional[str]]): """Subclass of a dict that stores values for a Cache-Control header. It has accessors for all the cache-control directives specified in RFC 2616. The class does not differentiate between request and response directives. @@ -36,36 +67,25 @@ class _CacheControl(CallbackDict[str, t.Any]): that class. .. versionchanged:: 3.1 + Dict values are always ``str | None``. Setting properties will + convert the value to a string. Setting a non-bool property to + ``False`` is equivalent to setting it to ``None``. Getting typed + properties will return ``None`` if conversion raises + ``ValueError``, rather than the string. - ``no_transform`` is a boolean when present. - - .. versionchanged:: 2.1.0 + .. versionchanged:: 2.1 Setting int properties such as ``max_age`` will convert the value to an int. .. versionchanged:: 0.4 - - Setting `no_cache` or `private` to boolean `True` will set the implicit - none-value which is ``*``: - - >>> cc = ResponseCacheControl() - >>> cc.no_cache = True - >>> cc - - >>> cc.no_cache - '*' - >>> cc.no_cache = None - >>> cc - - - In versions before 0.5 the behavior documented here affected the now - no longer existing `CacheControl` class. + Setting ``no_cache`` or ``private`` to ``True`` will set the + implicit value ``"*"``. """ - no_cache: str | bool | None = cache_control_property("no-cache", "*", None) no_store: bool = cache_control_property("no-store", None, bool) - max_age: int | None = cache_control_property("max-age", -1, int) + max_age: int | None = cache_control_property("max-age", None, int) no_transform: bool = cache_control_property("no-transform", None, bool) + stale_if_error: int | None = cache_control_property("stale-if-error", None, int) def __init__( self, @@ -81,17 +101,20 @@ def _get_cache_value( """Used internally by the accessor properties.""" if type is bool: return key in self - if key in self: - value = self[key] - if value is None: - return empty - elif type is not None: - try: - value = type(value) - except ValueError: - pass - return value - return None + + if key not in self: + return None + + if (value := self[key]) is None: + return empty + + if type is not None: + try: + value = type(value) + except ValueError: + return None + + return value def _set_cache_value( self, key: str, value: t.Any, type: type[t.Any] | None @@ -102,16 +125,15 @@ def _set_cache_value( self[key] = None else: self.pop(key, None) + elif value is None or value is False: + self.pop(key, None) + elif value is True: + self[key] = None else: - if value is None: - self.pop(key, None) - elif value is True: - self[key] = None - else: - if type is not None: - self[key] = type(value) - else: - self[key] = value + if type is not None: + value = type(value) + + self[key] = str(value) def _del_cache_value(self, key: str) -> None: """Used internally by the accessor properties.""" @@ -132,7 +154,7 @@ def __repr__(self) -> str: cache_property = staticmethod(cache_control_property) -class RequestCacheControl(ImmutableDictMixin[str, t.Any], _CacheControl): # type: ignore[misc] +class RequestCacheControl(ImmutableDictMixin[str, t.Optional[str]], _CacheControl): # type: ignore[misc] """A cache control for requests. This is immutable and gives access to all the request-relevant cache control headers. @@ -142,21 +164,61 @@ class RequestCacheControl(ImmutableDictMixin[str, t.Any], _CacheControl): # typ for that class. .. versionchanged:: 3.1 - ``no_transform`` is a boolean when present. + Dict values are always ``str | None``. Setting properties will + convert the value to a string. Setting a non-bool property to + ``False`` is equivalent to setting it to ``None``. Getting typed + properties will return ``None`` if conversion raises + ``ValueError``, rather than the string. + + .. versionchanged:: 3.1 + ``max_age`` is ``None`` if not present, rather than ``-1``. + + .. versionchanged:: 3.1 + ``no_cache`` is a boolean, it is ``False`` instead of ``"*"`` + when not present. + + .. versionchanged:: 3.1 + ``max_stale`` is an int, it is ``None`` instead of ``"*"`` if it is + present with no value. ``max_stale_any`` is a boolean indicating if + the property is present regardless of if it has a value. .. versionchanged:: 3.1 - ``min_fresh`` is ``None`` if a value is not provided for the attribute. + ``no_transform`` is a boolean. Previously it was mistakenly + always ``None``. - .. versionchanged:: 2.1.0 + .. versionchanged:: 3.1 + ``min_fresh`` is ``None`` if not present instead of ``"*"``. + + .. versionchanged:: 2.1 Setting int properties such as ``max_age`` will convert the value to an int. .. versionadded:: 0.5 - In previous versions a `CacheControl` class existed that was used - both for request and response. + Response-only properties are not present on this request class. """ - max_stale: str | int | None = cache_control_property("max-stale", "*", int) + no_cache: bool = cache_control_property("no-cache", None, bool) + max_stale: int | None = cache_control_property( + "max-stale", + None, + int, + doc="""The ``max-stale`` attribute if it has a value. A ``int``, or + ``None`` if not present or no value. + + This attribute can also be present without a value. To check that, use + :attr:`max_stale_any`. + """, + ) + max_stale_any: bool = cache_control_property( + "max-stale", + None, + bool, + doc="""The ``max-stale`` attribute presence regardless of value. A + ``bool``, either present or not. + + To check the value of the attribute if present, use :attr:`max_stale`. + """, + ) min_fresh: int | None = cache_control_property("min-fresh", None, int) only_if_cached: bool = cache_control_property("only-if-cached", None, bool) @@ -172,26 +234,38 @@ class ResponseCacheControl(_CacheControl): for that class. .. versionchanged:: 3.1 - ``no_transform`` is a boolean when present. + Dict values are always ``str | None``. Setting properties will + convert the value to a string. Setting a non-bool property to + ``False`` is equivalent to setting it to ``None``. Getting typed + properties will return ``None`` if conversion raises + ``ValueError``, rather than the string. + + .. versionchanged:: 3.1 + ``private`` is a boolean, it is ``False`` instead of ``"*"`` + when not present. + + .. versionchanged:: 3.1 + ``no_transform`` is a boolean. Previously it was mistakenly always + ``None``. .. versionchanged:: 3.1 Added the ``must_understand``, ``stale_while_revalidate``, and - ``stale_if_error`` attributes. + ``stale_if_error`` properties. .. versionchanged:: 2.1.1 ``s_maxage`` converts the value to an int. - .. versionchanged:: 2.1.0 + .. versionchanged:: 2.1 Setting int properties such as ``max_age`` will convert the value to an int. .. versionadded:: 0.5 - In previous versions a `CacheControl` class existed that was used - both for request and response. + Request-only properties are not present on this response class. """ + no_cache: str | bool | None = cache_control_property("no-cache", "*", None) public: bool = cache_control_property("public", None, bool) - private: str | None = cache_control_property("private", "*", None) + private: bool = cache_control_property("private", None, bool) must_revalidate: bool = cache_control_property("must-revalidate", None, bool) proxy_revalidate: bool = cache_control_property("proxy-revalidate", None, bool) s_maxage: int | None = cache_control_property("s-maxage", None, int) @@ -200,7 +274,6 @@ class ResponseCacheControl(_CacheControl): stale_while_revalidate: int | None = cache_control_property( "stale-while-revalidate", None, int ) - stale_if_error: int | None = cache_control_property("stale-if-error", None, int) # circular dependencies diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py index 76a5530fc..51799722b 100644 --- a/tests/test_datastructures.py +++ b/tests/test_datastructures.py @@ -1000,7 +1000,7 @@ def test_set_none(self): cc.no_cache = None assert cc.no_cache is None cc.no_cache = False - assert cc.no_cache is False + assert cc.no_cache is None def test_no_transform(self): cc = ds.RequestCacheControl([("no-transform", None)]) diff --git a/tests/test_http.py b/tests/test_http.py index 9febd0f0c..5c45b3dde 100644 --- a/tests/test_http.py +++ b/tests/test_http.py @@ -121,22 +121,22 @@ def test_dict_header(self, value, expect): def test_cache_control_header(self): cc = http.parse_cache_control_header("max-age=0, no-cache") assert cc.max_age == 0 - assert cc.no_cache + assert cc.no_cache is True cc = http.parse_cache_control_header( 'private, community="UCI"', None, datastructures.ResponseCacheControl ) - assert cc.private + assert cc.private is True assert cc["community"] == "UCI" c = datastructures.ResponseCacheControl() assert c.no_cache is None - assert c.private is None + assert c.private is False c.no_cache = True assert c.no_cache == "*" c.private = True - assert c.private == "*" + assert c.private is True del c.private - assert c.private is None + assert c.private is False # max_age is an int, other types are converted c.max_age = 3.1 assert c.max_age == 3 From b0f361ce69038a55f7ca8cfb6262ddc5ffe1eeea Mon Sep 17 00:00:00 2001 From: David Lord Date: Thu, 31 Oct 2024 09:38:01 -0700 Subject: [PATCH 146/159] more cache-control cleanup --- CHANGES.rst | 22 ++++---- src/werkzeug/datastructures/cache_control.py | 55 +++++++++----------- tests/test_http.py | 14 ++--- 3 files changed, 41 insertions(+), 50 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 4097365d4..26cd3d44c 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -18,21 +18,19 @@ Unreleased - Dict values are always ``str | None``. Setting properties will convert the value to a string. Setting a property to ``False`` is equivalent to setting it to ``None``. Getting typed properties will return ``None`` if - conversion raises ``ValueError``, rather than the string. :issue:`2980` - - ``max_age`` is ``None`` if not present, rather than ``-1``. + conversion raises ``ValueError``, rather than the string. :issue:`2980` + - ``max_age`` is ``None`` if present without a value, rather than ``-1``. :issue:`2980` - - ``no_cache`` is a boolean for requests, it is ``False`` instead of - ``"*"`` when not present. It remains a string for responses. - issue:`2980` - - ``max_stale`` is an int, it is ``None`` instead of ``"*"`` if it is - present with no value. ``max_stale_any`` is a boolean indicating if - the property is present regardless of if it has a value. :issue:`2980` + - ``no_cache`` is a boolean for requests, it is ``True`` instead of + ``"*"`` when present. It remains a string for responses. :issue:`2980` + - ``max_stale`` is ``True`` if present without a value, rather + than ``"*"``. :issue:`2980` - ``no_transform`` is a boolean. Previously it was mistakenly always ``None``. :issue:`2881` - - ``min_fresh`` is ``None`` if not present instead of ``"*"``. - :issue:`2881` - - ``private`` is a boolean, it is ``False`` instead of ``"*"`` when not - present. :issue:`2980` + - ``min_fresh`` is ``None`` if present without a value, rather than + ``"*"``. :issue:`2881` + - ``private`` is ``True`` if present without a value, rather than ``"*"``. + :issue:`2980` - Added the ``must_understand`` property. :issue:`2881` - Added the ``stale_while_revalidate``, and ``stale_if_error`` properties. :issue:`2948` diff --git a/src/werkzeug/datastructures/cache_control.py b/src/werkzeug/datastructures/cache_control.py index 2171c4d7a..8d700ab6a 100644 --- a/src/werkzeug/datastructures/cache_control.py +++ b/src/werkzeug/datastructures/cache_control.py @@ -171,23 +171,24 @@ class RequestCacheControl(ImmutableDictMixin[str, t.Optional[str]], _CacheContro ``ValueError``, rather than the string. .. versionchanged:: 3.1 - ``max_age`` is ``None`` if not present, rather than ``-1``. + ``max_age`` is ``None`` if present without a value, rather + than ``-1``. .. versionchanged:: 3.1 - ``no_cache`` is a boolean, it is ``False`` instead of ``"*"`` - when not present. + ``no_cache`` is a boolean, it is ``True`` instead of ``"*"`` + when present. .. versionchanged:: 3.1 - ``max_stale`` is an int, it is ``None`` instead of ``"*"`` if it is - present with no value. ``max_stale_any`` is a boolean indicating if - the property is present regardless of if it has a value. + ``max_stale`` is ``True`` if present without a value, rather + than ``"*"``. .. versionchanged:: 3.1 ``no_transform`` is a boolean. Previously it was mistakenly always ``None``. .. versionchanged:: 3.1 - ``min_fresh`` is ``None`` if not present instead of ``"*"``. + ``min_fresh`` is ``None`` if present without a value, rather + than ``"*"``. .. versionchanged:: 2.1 Setting int properties such as ``max_age`` will convert the @@ -198,26 +199,10 @@ class RequestCacheControl(ImmutableDictMixin[str, t.Optional[str]], _CacheContro """ no_cache: bool = cache_control_property("no-cache", None, bool) - max_stale: int | None = cache_control_property( + max_stale: int | t.Literal[True] | None = cache_control_property( "max-stale", - None, + True, int, - doc="""The ``max-stale`` attribute if it has a value. A ``int``, or - ``None`` if not present or no value. - - This attribute can also be present without a value. To check that, use - :attr:`max_stale_any`. - """, - ) - max_stale_any: bool = cache_control_property( - "max-stale", - None, - bool, - doc="""The ``max-stale`` attribute presence regardless of value. A - ``bool``, either present or not. - - To check the value of the attribute if present, use :attr:`max_stale`. - """, ) min_fresh: int | None = cache_control_property("min-fresh", None, int) only_if_cached: bool = cache_control_property("only-if-cached", None, bool) @@ -241,12 +226,16 @@ class ResponseCacheControl(_CacheControl): ``ValueError``, rather than the string. .. versionchanged:: 3.1 - ``private`` is a boolean, it is ``False`` instead of ``"*"`` - when not present. + ``no_cache`` is ``True`` if present without a value, rather than + ``"*"``. + + .. versionchanged:: 3.1 + ``private`` is ``True`` if present without a value, rather than + ``"*"``. .. versionchanged:: 3.1 - ``no_transform`` is a boolean. Previously it was mistakenly always - ``None``. + ``no_transform`` is a boolean. Previously it was mistakenly + always ``None``. .. versionchanged:: 3.1 Added the ``must_understand``, ``stale_while_revalidate``, and @@ -263,9 +252,13 @@ class ResponseCacheControl(_CacheControl): Request-only properties are not present on this response class. """ - no_cache: str | bool | None = cache_control_property("no-cache", "*", None) + no_cache: str | t.Literal[True] | None = cache_control_property( + "no-cache", True, None + ) public: bool = cache_control_property("public", None, bool) - private: bool = cache_control_property("private", None, bool) + private: str | t.Literal[True] | None = cache_control_property( + "private", True, None + ) must_revalidate: bool = cache_control_property("must-revalidate", None, bool) proxy_revalidate: bool = cache_control_property("proxy-revalidate", None, bool) s_maxage: int | None = cache_control_property("s-maxage", None, int) diff --git a/tests/test_http.py b/tests/test_http.py index 5c45b3dde..726b40bca 100644 --- a/tests/test_http.py +++ b/tests/test_http.py @@ -125,24 +125,24 @@ def test_cache_control_header(self): cc = http.parse_cache_control_header( 'private, community="UCI"', None, datastructures.ResponseCacheControl ) - assert cc.private is True + assert cc.private assert cc["community"] == "UCI" c = datastructures.ResponseCacheControl() assert c.no_cache is None - assert c.private is False + assert c.private is None c.no_cache = True - assert c.no_cache == "*" + assert c.no_cache and c.no_cache is True c.private = True - assert c.private is True + assert c.private and c.private is True del c.private - assert c.private is False + assert not c.private and c.private is None # max_age is an int, other types are converted c.max_age = 3.1 - assert c.max_age == 3 + assert c.max_age == 3 and c["max-age"] == "3" del c.max_age c.s_maxage = 3.1 - assert c.s_maxage == 3 + assert c.s_maxage == 3 and c["s-maxage"] == "3" del c.s_maxage assert c.to_header() == "no-cache" From 564835a6b5928d7240f636a13d80d69359cd86f5 Mon Sep 17 00:00:00 2001 From: David Lord Date: Thu, 31 Oct 2024 09:53:52 -0700 Subject: [PATCH 147/159] add docstring changelogs --- src/werkzeug/exceptions.py | 2 ++ src/werkzeug/sansio/response.py | 3 +++ src/werkzeug/security.py | 3 +++ 3 files changed, 8 insertions(+) diff --git a/src/werkzeug/exceptions.py b/src/werkzeug/exceptions.py index ddb621032..1cd999773 100644 --- a/src/werkzeug/exceptions.py +++ b/src/werkzeug/exceptions.py @@ -576,6 +576,8 @@ class MisdirectedRequest(HTTPException): Indicates that the request was directed to a server that is not able to produce a response. + + .. versionadded:: 3.1 """ code = 421 diff --git a/src/werkzeug/sansio/response.py b/src/werkzeug/sansio/response.py index 1c32b51de..9fed08625 100644 --- a/src/werkzeug/sansio/response.py +++ b/src/werkzeug/sansio/response.py @@ -223,6 +223,9 @@ def set_cookie( :param samesite: Limit the scope of the cookie to only be attached to requests that are "same-site". :param partitioned: If ``True``, the cookie will be partitioned. + + .. versionchanged:: 3.1 + The ``partitioned`` parameter was added. """ self.headers.add( "Set-Cookie", diff --git a/src/werkzeug/security.py b/src/werkzeug/security.py index a18381779..3f49ad1b4 100644 --- a/src/werkzeug/security.py +++ b/src/werkzeug/security.py @@ -92,6 +92,9 @@ def generate_password_hash( :param method: The key derivation function and parameters. :param salt_length: The number of characters to generate for the salt. + .. versionchanged:: 3.1 + The default iterations for pbkdf2 was increased to 1,000,000. + .. versionchanged:: 2.3 Scrypt support was added. From df655e6ce6bc77adc7833619a079e66eba566aee Mon Sep 17 00:00:00 2001 From: David Lord Date: Thu, 31 Oct 2024 09:57:35 -0700 Subject: [PATCH 148/159] release version 3.1.0 --- CHANGES.rst | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 26cd3d44c..407ff33f6 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,7 +3,7 @@ Version 3.1.0 ------------- -Unreleased +Released 2024-10-31 - Drop support for Python 3.8. :pr:`2966` - Remove previously deprecated code. :pr:`2967` diff --git a/pyproject.toml b/pyproject.toml index 7eab71c89..bdd66cbb8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "Werkzeug" -version = "3.1.0.dev" +version = "3.1.0" description = "The comprehensive WSGI web application library." readme = "README.md" license = {file = "LICENSE.txt"} From d1f60d68ac9788aec05712aca29867abd00d5cc3 Mon Sep 17 00:00:00 2001 From: David Lord Date: Thu, 31 Oct 2024 10:35:11 -0700 Subject: [PATCH 149/159] start version 3.1.1 --- CHANGES.rst | 6 ++++++ pyproject.toml | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index 407ff33f6..d4c040e2d 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,5 +1,11 @@ .. currentmodule:: werkzeug +Version 3.1.1 +------------- + +Unreleased + + Version 3.1.0 ------------- diff --git a/pyproject.toml b/pyproject.toml index bdd66cbb8..ea3535d58 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "Werkzeug" -version = "3.1.0" +version = "3.1.1.dev" description = "The comprehensive WSGI web application library." readme = "README.md" license = {file = "LICENSE.txt"} From afc4ea7b5e969d501194d8acdc88025f877a5510 Mon Sep 17 00:00:00 2001 From: David Lord Date: Fri, 1 Nov 2024 08:21:24 -0700 Subject: [PATCH 150/159] fix str(request.headers) --- CHANGES.rst | 3 +++ src/werkzeug/datastructures/headers.py | 4 ++-- tests/test_datastructures.py | 4 ++++ 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index d4c040e2d..1f2938695 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -5,6 +5,9 @@ Version 3.1.1 Unreleased +- Fix an issue that caused ``str(Request.headers)`` to always appear empty. + :issue:`2985` + Version 3.1.0 ------------- diff --git a/src/werkzeug/datastructures/headers.py b/src/werkzeug/datastructures/headers.py index db53cda7b..8b274c75f 100644 --- a/src/werkzeug/datastructures/headers.py +++ b/src/werkzeug/datastructures/headers.py @@ -568,13 +568,13 @@ def __copy__(self) -> te.Self: def __str__(self) -> str: """Returns formatted headers suitable for HTTP transmission.""" strs = [] - for key, value in self._list: + for key, value in self.to_wsgi_list(): strs.append(f"{key}: {value}") strs.append("\r\n") return "\r\n".join(strs) def __repr__(self) -> str: - return f"{type(self).__name__}({self._list!r})" + return f"{type(self).__name__}({list(self)!r})" def _options_header_vkw(value: str, kw: dict[str, t.Any]) -> str: diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py index 51799722b..dcbc79697 100644 --- a/tests/test_datastructures.py +++ b/tests/test_datastructures.py @@ -885,6 +885,10 @@ def test_ior(self) -> None: with pytest.raises(TypeError): headers |= {"y": "2"} + def test_str(self) -> None: + headers = ds.EnvironHeaders({"CONTENT_LENGTH": "50", "HTTP_HOST": "test"}) + assert str(headers) == "Content-Length: 50\r\nHost: test\r\n\r\n" + class TestHeaderSet: storage_class = ds.HeaderSet From 82ad3061566b68ef3ea01aa8b728787c832d0686 Mon Sep 17 00:00:00 2001 From: David Lord Date: Fri, 1 Nov 2024 09:18:41 -0700 Subject: [PATCH 151/159] release version 3.1.1 --- CHANGES.rst | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 1f2938695..3c1117d83 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,7 +3,7 @@ Version 3.1.1 ------------- -Unreleased +Released 2024-11-01 - Fix an issue that caused ``str(Request.headers)`` to always appear empty. :issue:`2985` diff --git a/pyproject.toml b/pyproject.toml index ea3535d58..1ce88e3a5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "Werkzeug" -version = "3.1.1.dev" +version = "3.1.1" description = "The comprehensive WSGI web application library." readme = "README.md" license = {file = "LICENSE.txt"} From f27e476144f973d2bc3df42cfd562be4c33bbcfe Mon Sep 17 00:00:00 2001 From: David Lord Date: Mon, 4 Nov 2024 07:45:39 -0800 Subject: [PATCH 152/159] start version 3.1.2 --- CHANGES.rst | 6 ++++++ pyproject.toml | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index 3c1117d83..ec16da97f 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,5 +1,11 @@ .. currentmodule:: werkzeug +Version 3.1.2 +------------- + +Unreleased + + Version 3.1.1 ------------- diff --git a/pyproject.toml b/pyproject.toml index 1ce88e3a5..b612b662a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "Werkzeug" -version = "3.1.1" +version = "3.1.2.dev" description = "The comprehensive WSGI web application library." readme = "README.md" license = {file = "LICENSE.txt"} From acdf7faf210596243a2484fe06a3b46c8c6c492d Mon Sep 17 00:00:00 2001 From: David Lord Date: Mon, 4 Nov 2024 08:01:14 -0800 Subject: [PATCH 153/159] TypeConversionDict.get type can be a callable --- CHANGES.rst | 4 +++ src/werkzeug/datastructures/headers.py | 19 ++++++---- src/werkzeug/datastructures/structures.py | 44 ++++++++++++++--------- 3 files changed, 44 insertions(+), 23 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index ec16da97f..b85611630 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -5,6 +5,10 @@ Version 3.1.2 Unreleased +- Improve type annotation for ``TypeConversionDict.get`` to allow the ``type`` + parameter to be a callable. :issue:`2988` + + Version 3.1.1 ------------- diff --git a/src/werkzeug/datastructures/headers.py b/src/werkzeug/datastructures/headers.py index 8b274c75f..96c2a0cd6 100644 --- a/src/werkzeug/datastructures/headers.py +++ b/src/werkzeug/datastructures/headers.py @@ -114,11 +114,14 @@ def get(self, key: str, default: str) -> str: ... @t.overload def get(self, key: str, default: T) -> str | T: ... @t.overload - def get(self, key: str, type: type[T]) -> T | None: ... + def get(self, key: str, type: cabc.Callable[[str], T]) -> T | None: ... @t.overload - def get(self, key: str, default: T, type: type[T]) -> T: ... + def get(self, key: str, default: T, type: cabc.Callable[[str], T]) -> T: ... def get( # type: ignore[misc] - self, key: str, default: str | T | None = None, type: type[T] | None = None + self, + key: str, + default: str | T | None = None, + type: cabc.Callable[[str], T] | None = None, ) -> str | T | None: """Return the default value if the requested data doesn't exist. If `type` is provided and is a callable it should convert the value, @@ -153,15 +156,17 @@ def get( # type: ignore[misc] return rv try: - return type(rv) # type: ignore[call-arg] + return type(rv) except ValueError: return default @t.overload def getlist(self, key: str) -> list[str]: ... @t.overload - def getlist(self, key: str, type: type[T]) -> list[T]: ... - def getlist(self, key: str, type: type[T] | None = None) -> list[str] | list[T]: + def getlist(self, key: str, type: cabc.Callable[[str], T]) -> list[T]: ... + def getlist( + self, key: str, type: cabc.Callable[[str], T] | None = None + ) -> list[str] | list[T]: """Return the list of items for a given key. If that key is not in the :class:`Headers`, the return value will be an empty list. Just like :meth:`get`, :meth:`getlist` accepts a `type` parameter. All items will @@ -187,7 +192,7 @@ def getlist(self, key: str, type: type[T] | None = None) -> list[str] | list[T]: for k, v in self: if k.lower() == ikey: try: - result.append(type(v)) # type: ignore[call-arg] + result.append(type(v)) except ValueError: continue diff --git a/src/werkzeug/datastructures/structures.py b/src/werkzeug/datastructures/structures.py index db2f99800..fcfa160da 100644 --- a/src/werkzeug/datastructures/structures.py +++ b/src/werkzeug/datastructures/structures.py @@ -69,11 +69,14 @@ def get(self, key: K, default: V) -> V: ... @t.overload def get(self, key: K, default: T) -> V | T: ... @t.overload - def get(self, key: str, type: type[T]) -> T | None: ... + def get(self, key: str, type: cabc.Callable[[V], T]) -> T | None: ... @t.overload - def get(self, key: str, default: T, type: type[T]) -> T: ... + def get(self, key: str, default: T, type: cabc.Callable[[V], T]) -> T: ... def get( # type: ignore[misc] - self, key: K, default: V | T | None = None, type: type[T] | None = None + self, + key: K, + default: V | T | None = None, + type: cabc.Callable[[V], T] | None = None, ) -> V | T | None: """Return the default value if the requested data doesn't exist. If `type` is provided and is a callable it should convert the value, @@ -108,7 +111,7 @@ def get( # type: ignore[misc] return rv try: - return type(rv) # type: ignore[call-arg] + return type(rv) except (ValueError, TypeError): return default @@ -255,8 +258,10 @@ def add(self, key: K, value: V) -> None: @t.overload def getlist(self, key: K) -> list[V]: ... @t.overload - def getlist(self, key: K, type: type[T]) -> list[T]: ... - def getlist(self, key: K, type: type[T] | None = None) -> list[V] | list[T]: + def getlist(self, key: K, type: cabc.Callable[[V], T]) -> list[T]: ... + def getlist( + self, key: K, type: cabc.Callable[[V], T] | None = None + ) -> list[V] | list[T]: """Return the list of items for a given key. If that key is not in the `MultiDict`, the return value will be an empty list. Just like `get`, `getlist` accepts a `type` parameter. All items will be converted @@ -279,7 +284,7 @@ def getlist(self, key: K, type: type[T] | None = None) -> list[V] | list[T]: result = [] for item in rv: try: - result.append(type(item)) # type: ignore[call-arg] + result.append(type(item)) except (ValueError, TypeError): pass return result @@ -707,8 +712,10 @@ def add(self, key: K, value: V) -> None: @t.overload def getlist(self, key: K) -> list[V]: ... @t.overload - def getlist(self, key: K, type: type[T]) -> list[T]: ... - def getlist(self, key: K, type: type[T] | None = None) -> list[V] | list[T]: + def getlist(self, key: K, type: cabc.Callable[[V], T]) -> list[T]: ... + def getlist( + self, key: K, type: cabc.Callable[[V], T] | None = None + ) -> list[V] | list[T]: rv: list[_omd_bucket[K, V]] try: @@ -720,7 +727,7 @@ def getlist(self, key: K, type: type[T] | None = None) -> list[V] | list[T]: result = [] for item in rv: try: - result.append(type(item.value)) # type: ignore[call-arg] + result.append(type(item.value)) except (ValueError, TypeError): pass return result @@ -852,17 +859,20 @@ def get(self, key: K, default: V) -> V: ... @t.overload def get(self, key: K, default: T) -> V | T: ... @t.overload - def get(self, key: str, type: type[T]) -> T | None: ... + def get(self, key: str, type: cabc.Callable[[V], T]) -> T | None: ... @t.overload - def get(self, key: str, default: T, type: type[T]) -> T: ... + def get(self, key: str, default: T, type: cabc.Callable[[V], T]) -> T: ... def get( # type: ignore[misc] - self, key: K, default: V | T | None = None, type: type[T] | None = None + self, + key: K, + default: V | T | None = None, + type: cabc.Callable[[V], T] | None = None, ) -> V | T | None: for d in self.dicts: if key in d: if type is not None: try: - return type(d[key]) # type: ignore[call-arg] + return type(d[key]) except (ValueError, TypeError): continue return d[key] @@ -871,8 +881,10 @@ def get( # type: ignore[misc] @t.overload def getlist(self, key: K) -> list[V]: ... @t.overload - def getlist(self, key: K, type: type[T]) -> list[T]: ... - def getlist(self, key: K, type: type[T] | None = None) -> list[V] | list[T]: + def getlist(self, key: K, type: cabc.Callable[[V], T]) -> list[T]: ... + def getlist( + self, key: K, type: cabc.Callable[[V], T] | None = None + ) -> list[V] | list[T]: rv = [] for d in self.dicts: rv.extend(d.getlist(key, type)) # type: ignore[arg-type] From ac87bf835f36e03e28a3546868dd730824e85fc3 Mon Sep 17 00:00:00 2001 From: David Lord Date: Mon, 4 Nov 2024 09:12:23 -0800 Subject: [PATCH 154/159] Headers is not MutableMapping --- CHANGES.rst | 3 ++- src/werkzeug/datastructures/headers.py | 20 ++++++++++---------- src/werkzeug/utils.py | 2 +- 3 files changed, 13 insertions(+), 12 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index b85611630..5555f01ef 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -7,7 +7,8 @@ Unreleased - Improve type annotation for ``TypeConversionDict.get`` to allow the ``type`` parameter to be a callable. :issue:`2988` - +- ``Headers`` does not inherit from ``MutableMapping``, as it is does not + exactly match that interface. :issue:`2989` Version 3.1.1 diff --git a/src/werkzeug/datastructures/headers.py b/src/werkzeug/datastructures/headers.py index 96c2a0cd6..e8cbdd8e8 100644 --- a/src/werkzeug/datastructures/headers.py +++ b/src/werkzeug/datastructures/headers.py @@ -17,7 +17,7 @@ T = t.TypeVar("T") -class Headers(cabc.MutableMapping[str, str]): +class Headers: """An object that stores some headers. It has a dict-like interface, but is ordered, can store the same key multiple times, and iterating yields ``(key, value)`` pairs instead of only keys. @@ -107,7 +107,7 @@ def lowered(item: tuple[str, ...]) -> tuple[str, ...]: __hash__ = None # type: ignore[assignment] - @t.overload # type: ignore[override] + @t.overload def get(self, key: str) -> str | None: ... @t.overload def get(self, key: str, default: str) -> str: ... @@ -208,17 +208,17 @@ def get_all(self, name: str) -> list[str]: """ return self.getlist(name) - def items(self, lower: bool = False) -> t.Iterable[tuple[str, str]]: # type: ignore[override] + def items(self, lower: bool = False) -> t.Iterable[tuple[str, str]]: for key, value in self: if lower: key = key.lower() yield key, value - def keys(self, lower: bool = False) -> t.Iterable[str]: # type: ignore[override] + def keys(self, lower: bool = False) -> t.Iterable[str]: for key, _ in self.items(lower): yield key - def values(self) -> t.Iterable[str]: # type: ignore[override] + def values(self) -> t.Iterable[str]: for _, value in self.items(): yield value @@ -322,7 +322,7 @@ def popitem(self) -> tuple[str, str]: """Removes a key or index and returns a (key, value) item.""" return self._list.pop() - def __contains__(self, key: str) -> bool: # type: ignore[override] + def __contains__(self, key: str) -> bool: """Check if a key is present.""" try: self._get_key(key) @@ -331,7 +331,7 @@ def __contains__(self, key: str) -> bool: # type: ignore[override] return True - def __iter__(self) -> t.Iterator[tuple[str, str]]: # type: ignore[override] + def __iter__(self) -> t.Iterator[tuple[str, str]]: """Yield ``(key, value)`` tuples.""" return iter(self._list) @@ -486,7 +486,7 @@ def __setitem__( else: self._list[key] = [(k, _str_header_value(v)) for k, v in value] # type: ignore[misc] - def update( # type: ignore[override] + def update( self, arg: ( Headers @@ -562,7 +562,7 @@ def to_wsgi_list(self) -> list[tuple[str, str]]: :return: list """ - return list(self) # type: ignore[arg-type] + return list(self) def copy(self) -> te.Self: return self.__class__(self._list) @@ -640,7 +640,7 @@ def _get_key(self, key: str) -> str: def __len__(self) -> int: return sum(1 for _ in self) - def __iter__(self) -> cabc.Iterator[tuple[str, str]]: # type: ignore[override] + def __iter__(self) -> cabc.Iterator[tuple[str, str]]: for key, value in self.environ.items(): if key.startswith("HTTP_") and key not in { "HTTP_CONTENT_TYPE", diff --git a/src/werkzeug/utils.py b/src/werkzeug/utils.py index 59b97b732..3d3bbf066 100644 --- a/src/werkzeug/utils.py +++ b/src/werkzeug/utils.py @@ -150,7 +150,7 @@ def lookup(self, obj: Request) -> WSGIEnvironment: class header_property(_DictAccessorProperty[_TAccessorValue]): """Like `environ_property` but for headers.""" - def lookup(self, obj: Request | Response) -> Headers: + def lookup(self, obj: Request | Response) -> Headers: # type: ignore[override] return obj.headers From 4764684faffa7232ac68b7e089bc2d0fdfff3303 Mon Sep 17 00:00:00 2001 From: David Lord Date: Mon, 4 Nov 2024 09:30:33 -0800 Subject: [PATCH 155/159] release version 3.1.2 --- CHANGES.rst | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 5555f01ef..8388dc4c4 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,7 +3,7 @@ Version 3.1.2 ------------- -Unreleased +Released 2024-11-04 - Improve type annotation for ``TypeConversionDict.get`` to allow the ``type`` parameter to be a callable. :issue:`2988` diff --git a/pyproject.toml b/pyproject.toml index b612b662a..fabf98579 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "Werkzeug" -version = "3.1.2.dev" +version = "3.1.2" description = "The comprehensive WSGI web application library." readme = "README.md" license = {file = "LICENSE.txt"} From 1a1728ed88939ca68928dade168e1989be062c6f Mon Sep 17 00:00:00 2001 From: David Lord Date: Wed, 6 Nov 2024 08:15:46 -0800 Subject: [PATCH 156/159] start version 3.1.3 --- CHANGES.rst | 6 ++++++ pyproject.toml | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index 8388dc4c4..75f01962b 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,5 +1,11 @@ .. currentmodule:: werkzeug +Version 3.1.3 +------------- + +Unreleased + + Version 3.1.2 ------------- diff --git a/pyproject.toml b/pyproject.toml index fabf98579..8d4e687eb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "Werkzeug" -version = "3.1.2" +version = "3.1.3.dev" description = "The comprehensive WSGI web application library." readme = "README.md" license = {file = "LICENSE.txt"} From 598bb1de78678107404493e56d68003910e1dbea Mon Sep 17 00:00:00 2001 From: David Lord Date: Wed, 6 Nov 2024 11:49:36 -0800 Subject: [PATCH 157/159] restrict containers accepted by multi --- CHANGES.rst | 5 ++++ src/werkzeug/datastructures/headers.py | 27 +++++++++++---------- src/werkzeug/datastructures/structures.py | 29 ++++++++++++----------- tests/test_datastructures.py | 17 ++++++++++++- 4 files changed, 50 insertions(+), 28 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 75f01962b..6a2fd2ed4 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -5,6 +5,11 @@ Version 3.1.3 Unreleased +- Initial data passed to ``MultiDict`` and similar interfaces only accepts + ``list``, ``tuple``, or ``set`` when passing multiple values. It had been + changed to accept any ``Collection``, but this matched types that should be + treated as single values, such as ``bytes``. :issue:`2994` + Version 3.1.2 ------------- diff --git a/src/werkzeug/datastructures/headers.py b/src/werkzeug/datastructures/headers.py index e8cbdd8e8..1088e3bc9 100644 --- a/src/werkzeug/datastructures/headers.py +++ b/src/werkzeug/datastructures/headers.py @@ -62,7 +62,7 @@ def __init__( defaults: ( Headers | MultiDict[str, t.Any] - | cabc.Mapping[str, t.Any | cabc.Collection[t.Any]] + | cabc.Mapping[str, t.Any | list[t.Any] | tuple[t.Any, ...] | set[t.Any]] | cabc.Iterable[tuple[str, t.Any]] | None ) = None, @@ -227,7 +227,7 @@ def extend( arg: ( Headers | MultiDict[str, t.Any] - | cabc.Mapping[str, t.Any | cabc.Collection[t.Any]] + | cabc.Mapping[str, t.Any | list[t.Any] | tuple[t.Any, ...] | set[t.Any]] | cabc.Iterable[tuple[str, t.Any]] | None ) = None, @@ -491,12 +491,14 @@ def update( arg: ( Headers | MultiDict[str, t.Any] - | cabc.Mapping[str, t.Any | cabc.Collection[t.Any]] + | cabc.Mapping[ + str, t.Any | list[t.Any] | tuple[t.Any, ...] | cabc.Set[t.Any] + ] | cabc.Iterable[tuple[str, t.Any]] | None ) = None, /, - **kwargs: t.Any | cabc.Collection[t.Any], + **kwargs: t.Any | list[t.Any] | tuple[t.Any, ...] | cabc.Set[t.Any], ) -> None: """Replace headers in this object with items from another headers object and keyword arguments. @@ -516,9 +518,7 @@ def update( self.setlist(key, arg.getlist(key)) elif isinstance(arg, cabc.Mapping): for key, value in arg.items(): - if isinstance(value, cabc.Collection) and not isinstance( - value, str - ): + if isinstance(value, (list, tuple, set)): self.setlist(key, value) else: self.set(key, value) @@ -527,13 +527,16 @@ def update( self.set(key, value) for key, value in kwargs.items(): - if isinstance(value, cabc.Collection) and not isinstance(value, str): + if isinstance(value, (list, tuple, set)): self.setlist(key, value) else: self.set(key, value) def __or__( - self, other: cabc.Mapping[str, t.Any | cabc.Collection[t.Any]] + self, + other: cabc.Mapping[ + str, t.Any | list[t.Any] | tuple[t.Any, ...] | cabc.Set[t.Any] + ], ) -> te.Self: if not isinstance(other, cabc.Mapping): return NotImplemented @@ -545,13 +548,11 @@ def __or__( def __ior__( self, other: ( - cabc.Mapping[str, t.Any | cabc.Collection[t.Any]] + cabc.Mapping[str, t.Any | list[t.Any] | tuple[t.Any, ...] | cabc.Set[t.Any]] | cabc.Iterable[tuple[str, t.Any]] ), ) -> te.Self: - if not isinstance(other, (cabc.Mapping, cabc.Iterable)) or isinstance( - other, str - ): + if not isinstance(other, (cabc.Mapping, cabc.Iterable)): return NotImplemented self.update(other) diff --git a/src/werkzeug/datastructures/structures.py b/src/werkzeug/datastructures/structures.py index fcfa160da..dbb7e8048 100644 --- a/src/werkzeug/datastructures/structures.py +++ b/src/werkzeug/datastructures/structures.py @@ -22,7 +22,7 @@ def iter_multi_items( mapping: ( MultiDict[K, V] - | cabc.Mapping[K, V | cabc.Collection[V]] + | cabc.Mapping[K, V | list[V] | tuple[V, ...] | set[V]] | cabc.Iterable[tuple[K, V]] ), ) -> cabc.Iterator[tuple[K, V]]: @@ -33,11 +33,11 @@ def iter_multi_items( yield from mapping.items(multi=True) elif isinstance(mapping, cabc.Mapping): for key, value in mapping.items(): - if isinstance(value, cabc.Collection) and not isinstance(value, str): + if isinstance(value, (list, tuple, set)): for v in value: yield key, v else: - yield key, value # type: ignore[misc] + yield key, value else: yield from mapping @@ -182,7 +182,7 @@ def __init__( self, mapping: ( MultiDict[K, V] - | cabc.Mapping[K, V | cabc.Collection[V]] + | cabc.Mapping[K, V | list[V] | tuple[V, ...] | set[V]] | cabc.Iterable[tuple[K, V]] | None ) = None, @@ -194,7 +194,7 @@ def __init__( elif isinstance(mapping, cabc.Mapping): tmp = {} for key, value in mapping.items(): - if isinstance(value, cabc.Collection) and not isinstance(value, str): + if isinstance(value, (list, tuple, set)): value = list(value) if not value: @@ -419,7 +419,7 @@ def update( # type: ignore[override] self, mapping: ( MultiDict[K, V] - | cabc.Mapping[K, V | cabc.Collection[V]] + | cabc.Mapping[K, V | list[V] | tuple[V, ...] | set[V]] | cabc.Iterable[tuple[K, V]] ), ) -> None: @@ -444,7 +444,7 @@ def update( # type: ignore[override] self.add(key, value) def __or__( # type: ignore[override] - self, other: cabc.Mapping[K, V | cabc.Collection[V]] + self, other: cabc.Mapping[K, V | list[V] | tuple[V, ...] | set[V]] ) -> MultiDict[K, V]: if not isinstance(other, cabc.Mapping): return NotImplemented @@ -455,11 +455,12 @@ def __or__( # type: ignore[override] def __ior__( # type: ignore[override] self, - other: cabc.Mapping[K, V | cabc.Collection[V]] | cabc.Iterable[tuple[K, V]], + other: ( + cabc.Mapping[K, V | list[V] | tuple[V, ...] | set[V]] + | cabc.Iterable[tuple[K, V]] + ), ) -> te.Self: - if not isinstance(other, (cabc.Mapping, cabc.Iterable)) or isinstance( - other, str - ): + if not isinstance(other, (cabc.Mapping, cabc.Iterable)): return NotImplemented self.update(other) @@ -600,7 +601,7 @@ def __init__( self, mapping: ( MultiDict[K, V] - | cabc.Mapping[K, V | cabc.Collection[V]] + | cabc.Mapping[K, V | list[V] | tuple[V, ...] | set[V]] | cabc.Iterable[tuple[K, V]] | None ) = None, @@ -744,7 +745,7 @@ def update( # type: ignore[override] self, mapping: ( MultiDict[K, V] - | cabc.Mapping[K, V | cabc.Collection[V]] + | cabc.Mapping[K, V | list[V] | tuple[V, ...] | set[V]] | cabc.Iterable[tuple[K, V]] ), ) -> None: @@ -1009,7 +1010,7 @@ def __init__( self, mapping: ( MultiDict[K, V] - | cabc.Mapping[K, V | cabc.Collection[V]] + | cabc.Mapping[K, V | list[V] | tuple[V, ...] | set[V]] | cabc.Iterable[tuple[K, V]] | None ) = None, diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py index dcbc79697..0cd497438 100644 --- a/tests/test_datastructures.py +++ b/tests/test_datastructures.py @@ -1,6 +1,9 @@ +from __future__ import annotations + import io import pickle import tempfile +import typing as t from contextlib import contextmanager from copy import copy from copy import deepcopy @@ -43,7 +46,7 @@ def items(self, multi=1): class _MutableMultiDictTests: - storage_class: type["ds.MultiDict"] + storage_class: type[ds.MultiDict] def test_pickle(self): cls = self.storage_class @@ -1280,3 +1283,15 @@ def test_range_to_header(ranges): def test_range_validates_ranges(ranges): with pytest.raises(ValueError): ds.Range("bytes", ranges) + + +@pytest.mark.parametrize( + ("value", "expect"), + [ + ({"a": "ab"}, [("a", "ab")]), + ({"a": ["a", "b"]}, [("a", "a"), ("a", "b")]), + ({"a": b"ab"}, [("a", b"ab")]), + ], +) +def test_iter_multi_data(value: t.Any, expect: list[tuple[t.Any, t.Any]]) -> None: + assert list(ds.iter_multi_items(value)) == expect From d99f72d12698d86e7ebcb894f3c6c729e2b6c067 Mon Sep 17 00:00:00 2001 From: David Lord Date: Thu, 7 Nov 2024 08:01:56 -0800 Subject: [PATCH 158/159] wrap IPv6 SERVER_NAME in [] --- CHANGES.rst | 3 +++ src/werkzeug/sansio/utils.py | 8 ++++++++ tests/sansio/test_utils.py | 4 ++++ 3 files changed, 15 insertions(+) diff --git a/CHANGES.rst b/CHANGES.rst index 6a2fd2ed4..27a89e83e 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -9,6 +9,9 @@ Unreleased ``list``, ``tuple``, or ``set`` when passing multiple values. It had been changed to accept any ``Collection``, but this matched types that should be treated as single values, such as ``bytes``. :issue:`2994` +- When the ``Host`` header is not set and ``Request.host`` falls back to the + WSGI ``SERVER_NAME`` value, if that value is an IPv6 address it is wrapped + in ``[]`` to match the ``Host`` header. :issue:`2993` Version 3.1.2 diff --git a/src/werkzeug/sansio/utils.py b/src/werkzeug/sansio/utils.py index 14fa0ac88..ff7ceda34 100644 --- a/src/werkzeug/sansio/utils.py +++ b/src/werkzeug/sansio/utils.py @@ -71,6 +71,9 @@ def get_host( :return: Host, with port if necessary. :raise ~werkzeug.exceptions.SecurityError: If the host is not trusted. + + .. versionchanged:: 3.1.3 + If ``SERVER_NAME`` is IPv6, it is wrapped in ``[]``. """ host = "" @@ -79,6 +82,11 @@ def get_host( elif server is not None: host = server[0] + # If SERVER_NAME is IPv6, wrap it in [] to match Host header. + # Check for : because domain or IPv4 can't have that. + if ":" in host and host[0] != "[": + host = f"[{host}]" + if server[1] is not None: host = f"{host}:{server[1]}" diff --git a/tests/sansio/test_utils.py b/tests/sansio/test_utils.py index d43de66c2..a63e7c660 100644 --- a/tests/sansio/test_utils.py +++ b/tests/sansio/test_utils.py @@ -14,12 +14,16 @@ ("https", "spam", None, "spam"), ("https", "spam:443", None, "spam"), ("http", "spam:8080", None, "spam:8080"), + ("http", "127.0.0.1:8080", None, "127.0.0.1:8080"), + ("http", "[::1]:8080", None, "[::1]:8080"), ("ws", "spam", None, "spam"), ("ws", "spam:80", None, "spam"), ("wss", "spam", None, "spam"), ("wss", "spam:443", None, "spam"), ("http", None, ("spam", 80), "spam"), ("http", None, ("spam", 8080), "spam:8080"), + ("http", None, ("127.0.0.1", 8080), "127.0.0.1:8080"), + ("http", None, ("::1", 8080), "[::1]:8080"), ("http", None, ("unix/socket", None), "unix/socket"), ("http", "spam", ("eggs", 80), "spam"), ], From 6389612fd1ee1bd93579eed5026e8fd471d04abd Mon Sep 17 00:00:00 2001 From: David Lord Date: Fri, 8 Nov 2024 07:46:09 -0800 Subject: [PATCH 159/159] release version 3.1.3 --- CHANGES.rst | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 27a89e83e..de3f2b7c9 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,7 +3,7 @@ Version 3.1.3 ------------- -Unreleased +Released 2024-11-08 - Initial data passed to ``MultiDict`` and similar interfaces only accepts ``list``, ``tuple``, or ``set`` when passing multiple values. It had been diff --git a/pyproject.toml b/pyproject.toml index 8d4e687eb..2d5a6cee2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "Werkzeug" -version = "3.1.3.dev" +version = "3.1.3" description = "The comprehensive WSGI web application library." readme = "README.md" license = {file = "LICENSE.txt"}