Import Upstream version 3.0.1
|
@ -0,0 +1,17 @@
|
||||||
|
{
|
||||||
|
"name": "pallets/werkzeug",
|
||||||
|
"image": "mcr.microsoft.com/devcontainers/python:3",
|
||||||
|
"customizations": {
|
||||||
|
"vscode": {
|
||||||
|
"settings": {
|
||||||
|
"python.defaultInterpreterPath": "${workspaceFolder}/.venv",
|
||||||
|
"python.terminal.activateEnvInCurrentTerminal": true,
|
||||||
|
"python.terminal.launchArgs": [
|
||||||
|
"-X",
|
||||||
|
"dev"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"onCreateCommand": ".devcontainer/on-create-command.sh"
|
||||||
|
}
|
|
@ -0,0 +1,9 @@
|
||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
python3 -m venv .venv
|
||||||
|
. .venv/bin/activate
|
||||||
|
pip install -U pip
|
||||||
|
pip install -r requirements/dev.txt
|
||||||
|
pip install -e .
|
||||||
|
pre-commit install --install-hooks
|
|
@ -0,0 +1,29 @@
|
||||||
|
[flake8]
|
||||||
|
extend-select =
|
||||||
|
# bugbear
|
||||||
|
B
|
||||||
|
# bugbear opinions
|
||||||
|
B9
|
||||||
|
# implicit str concat
|
||||||
|
ISC
|
||||||
|
extend-ignore =
|
||||||
|
# slice notation whitespace, invalid
|
||||||
|
E203
|
||||||
|
# import at top, too many circular import fixes
|
||||||
|
E402
|
||||||
|
# line length, handled by bugbear B950
|
||||||
|
E501
|
||||||
|
# bare except, handled by bugbear B001
|
||||||
|
E722
|
||||||
|
# zip with strict=, requires python >= 3.10
|
||||||
|
B905
|
||||||
|
# string formatting opinion, B028 renamed to B907
|
||||||
|
B028
|
||||||
|
B907
|
||||||
|
# up to 88 allowed by bugbear B950
|
||||||
|
max-line-length = 80
|
||||||
|
per-file-ignores =
|
||||||
|
# __init__ exports names
|
||||||
|
**/__init__.py: F401
|
||||||
|
# LocalProxy assigns lambdas
|
||||||
|
src/werkzeug/local.py: E731
|
|
@ -1,15 +1,25 @@
|
||||||
name: 'Lock threads'
|
name: 'Lock threads'
|
||||||
|
# Lock closed issues that have not received any further activity for
|
||||||
|
# two weeks. This does not close open issues, only humans may do that.
|
||||||
|
# We find that it is easier to respond to new issues with fresh examples
|
||||||
|
# rather than continuing discussions on old issues.
|
||||||
|
|
||||||
on:
|
on:
|
||||||
schedule:
|
schedule:
|
||||||
- cron: '0 0 * * *'
|
- cron: '0 0 * * *'
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: lock
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
lock:
|
lock:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: dessant/lock-threads@v3
|
- uses: dessant/lock-threads@be8aa5be94131386884a6da4189effda9b14aa21
|
||||||
with:
|
with:
|
||||||
github-token: ${{ github.token }}
|
|
||||||
issue-inactive-days: 14
|
issue-inactive-days: 14
|
||||||
pr-inactive-days: 14
|
pr-inactive-days: 14
|
||||||
|
|
|
@ -0,0 +1,72 @@
|
||||||
|
name: Publish
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- '*'
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
hash: ${{ steps.hash.outputs.hash }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744
|
||||||
|
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||||
|
with:
|
||||||
|
python-version: '3.x'
|
||||||
|
cache: 'pip'
|
||||||
|
cache-dependency-path: 'requirements/*.txt'
|
||||||
|
- run: pip install -r requirements/build.txt
|
||||||
|
# Use the commit date instead of the current date during the build.
|
||||||
|
- run: echo "SOURCE_DATE_EPOCH=$(git log -1 --pretty=%ct)" >> $GITHUB_ENV
|
||||||
|
- run: python -m build
|
||||||
|
# Generate hashes used for provenance.
|
||||||
|
- name: generate hash
|
||||||
|
id: hash
|
||||||
|
run: cd dist && echo "hash=$(sha256sum * | base64 -w0)" >> $GITHUB_OUTPUT
|
||||||
|
- uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce
|
||||||
|
with:
|
||||||
|
path: ./dist
|
||||||
|
provenance:
|
||||||
|
needs: ['build']
|
||||||
|
permissions:
|
||||||
|
actions: read
|
||||||
|
id-token: write
|
||||||
|
contents: write
|
||||||
|
# Can't pin with hash due to how this workflow works.
|
||||||
|
uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v1.9.0
|
||||||
|
with:
|
||||||
|
base64-subjects: ${{ needs.build.outputs.hash }}
|
||||||
|
create-release:
|
||||||
|
# Upload the sdist, wheels, and provenance to a GitHub release. They remain
|
||||||
|
# available as build artifacts for a while as well.
|
||||||
|
needs: ['provenance']
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
steps:
|
||||||
|
- uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a
|
||||||
|
- name: create release
|
||||||
|
run: >
|
||||||
|
gh release create --draft --repo ${{ github.repository }}
|
||||||
|
${{ github.ref_name }}
|
||||||
|
*.intoto.jsonl/* artifact/*
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ github.token }}
|
||||||
|
publish-pypi:
|
||||||
|
needs: ['provenance']
|
||||||
|
# Wait for approval before attempting to upload to PyPI. This allows reviewing the
|
||||||
|
# files in the draft release.
|
||||||
|
environment: 'publish'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
id-token: write
|
||||||
|
steps:
|
||||||
|
- uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a
|
||||||
|
# Try uploading to Test PyPI first, in case something fails.
|
||||||
|
- uses: pypa/gh-action-pypi-publish@b7f401de30cb6434a1e19f805ff006643653240e
|
||||||
|
with:
|
||||||
|
repository-url: https://test.pypi.org/legacy/
|
||||||
|
packages-dir: artifact/
|
||||||
|
- uses: pypa/gh-action-pypi-publish@b7f401de30cb6434a1e19f805ff006643653240e
|
||||||
|
with:
|
||||||
|
packages-dir: artifact/
|
|
@ -24,32 +24,27 @@ jobs:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
- {name: Linux, python: '3.10', os: ubuntu-latest, tox: py310}
|
- {name: Linux, python: '3.11', os: ubuntu-latest, tox: py311}
|
||||||
- {name: Windows, python: '3.10', os: windows-latest, tox: py310}
|
- {name: Windows, python: '3.11', os: windows-latest, tox: py311}
|
||||||
- {name: Mac, python: '3.10', os: macos-latest, tox: py310}
|
- {name: Mac, python: '3.11', os: macos-latest, tox: py311}
|
||||||
- {name: '3.11-dev', python: '3.11-dev', os: ubuntu-latest, tox: py311}
|
- {name: '3.12-dev', python: '3.12-dev', os: ubuntu-latest, tox: py312}
|
||||||
|
- {name: '3.10', python: '3.10', os: ubuntu-latest, tox: py310}
|
||||||
- {name: '3.9', python: '3.9', os: ubuntu-latest, tox: py39}
|
- {name: '3.9', python: '3.9', os: ubuntu-latest, tox: py39}
|
||||||
- {name: '3.8', python: '3.8', os: ubuntu-latest, tox: py38}
|
- {name: '3.8', python: '3.8', os: ubuntu-latest, tox: py38}
|
||||||
- {name: '3.7', python: '3.7', os: ubuntu-latest, tox: py37}
|
- {name: 'PyPy', python: 'pypy-3.10', os: ubuntu-latest, tox: pypy310}
|
||||||
- {name: 'PyPy', python: 'pypy-3.7', os: ubuntu-latest, tox: pypy37}
|
- {name: Typing, python: '3.11', os: ubuntu-latest, tox: typing}
|
||||||
- {name: Typing, python: '3.10', os: ubuntu-latest, tox: typing}
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744
|
||||||
- uses: actions/setup-python@v4
|
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python }}
|
python-version: ${{ matrix.python }}
|
||||||
cache: 'pip'
|
cache: 'pip'
|
||||||
cache-dependency-path: 'requirements/*.txt'
|
cache-dependency-path: 'requirements/*.txt'
|
||||||
- name: update pip
|
|
||||||
run: |
|
|
||||||
pip install -U wheel
|
|
||||||
pip install -U setuptools
|
|
||||||
python -m pip install -U pip
|
|
||||||
- name: cache mypy
|
- name: cache mypy
|
||||||
uses: actions/cache@v3.0.4
|
uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8
|
||||||
with:
|
with:
|
||||||
path: ./.mypy_cache
|
path: ./.mypy_cache
|
||||||
key: mypy|${{ matrix.python }}|${{ hashFiles('setup.cfg') }}
|
key: mypy|${{ matrix.python }}|${{ hashFiles('pyproject.toml') }}
|
||||||
if: matrix.tox == 'typing'
|
if: matrix.tox == 'typing'
|
||||||
- run: pip install tox
|
- run: pip install tox
|
||||||
- run: tox -e ${{ matrix.tox }}
|
- run: tox run -e ${{ matrix.tox }}
|
||||||
|
|
|
@ -4,7 +4,7 @@ dist
|
||||||
/src/Werkzeug.egg-info
|
/src/Werkzeug.egg-info
|
||||||
*.pyc
|
*.pyc
|
||||||
*.pyo
|
*.pyo
|
||||||
env
|
.venv
|
||||||
.DS_Store
|
.DS_Store
|
||||||
docs/_build
|
docs/_build
|
||||||
bench/a
|
bench/a
|
||||||
|
|
|
@ -1,42 +1,40 @@
|
||||||
ci:
|
ci:
|
||||||
autoupdate_branch: "2.2.x"
|
autoupdate_branch: "2.3.x"
|
||||||
autoupdate_schedule: monthly
|
autoupdate_schedule: monthly
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/asottile/pyupgrade
|
- repo: https://github.com/asottile/pyupgrade
|
||||||
rev: v2.37.3
|
rev: v3.10.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: pyupgrade
|
- id: pyupgrade
|
||||||
args: ["--py37-plus"]
|
args: ["--py38-plus"]
|
||||||
- repo: https://github.com/asottile/reorder_python_imports
|
- repo: https://github.com/asottile/reorder-python-imports
|
||||||
rev: v3.8.2
|
rev: v3.10.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: reorder-python-imports
|
- id: reorder-python-imports
|
||||||
name: Reorder Python imports (src, tests)
|
name: Reorder Python imports (src, tests)
|
||||||
files: "^(?!examples/)"
|
files: "^(?!examples/)"
|
||||||
args: ["--application-directories", ".:src"]
|
args: ["--application-directories", ".:src"]
|
||||||
additional_dependencies: ["setuptools>60.9"]
|
|
||||||
- id: reorder-python-imports
|
- id: reorder-python-imports
|
||||||
name: Reorder Python imports (examples)
|
name: Reorder Python imports (examples)
|
||||||
files: "^examples/"
|
files: "^examples/"
|
||||||
args: ["--application-directories", "examples"]
|
args: ["--application-directories", "examples"]
|
||||||
additional_dependencies: ["setuptools>60.9"]
|
|
||||||
- repo: https://github.com/psf/black
|
- repo: https://github.com/psf/black
|
||||||
rev: 22.6.0
|
rev: 23.7.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: black
|
- id: black
|
||||||
- repo: https://github.com/PyCQA/flake8
|
- repo: https://github.com/PyCQA/flake8
|
||||||
rev: 5.0.4
|
rev: 6.1.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: flake8
|
- id: flake8
|
||||||
additional_dependencies:
|
additional_dependencies:
|
||||||
- flake8-bugbear
|
- flake8-bugbear
|
||||||
- flake8-implicit-str-concat
|
- flake8-implicit-str-concat
|
||||||
- repo: https://github.com/peterdemin/pip-compile-multi
|
- repo: https://github.com/peterdemin/pip-compile-multi
|
||||||
rev: v2.4.6
|
rev: v2.6.3
|
||||||
hooks:
|
hooks:
|
||||||
- id: pip-compile-multi-verify
|
- id: pip-compile-multi-verify
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v4.3.0
|
rev: v4.4.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: fix-byte-order-marker
|
- id: fix-byte-order-marker
|
||||||
- id: trailing-whitespace
|
- id: trailing-whitespace
|
||||||
|
|
296
CHANGES.rst
|
@ -1,5 +1,276 @@
|
||||||
.. currentmodule:: werkzeug
|
.. currentmodule:: werkzeug
|
||||||
|
|
||||||
|
Version 3.0.1
|
||||||
|
-------------
|
||||||
|
|
||||||
|
Released 2023-10-24
|
||||||
|
|
||||||
|
- Fix slow multipart parsing for large parts potentially enabling DoS
|
||||||
|
attacks. :cwe:`CWE-407`
|
||||||
|
|
||||||
|
Version 3.0.0
|
||||||
|
-------------
|
||||||
|
|
||||||
|
Released 2023-09-30
|
||||||
|
|
||||||
|
- Remove previously deprecated code. :pr:`2768`
|
||||||
|
- Deprecate the ``__version__`` attribute. Use feature detection, or
|
||||||
|
``importlib.metadata.version("werkzeug")``, instead. :issue:`2770`
|
||||||
|
- ``generate_password_hash`` uses scrypt by default. :issue:`2769`
|
||||||
|
- Add the ``"werkzeug.profiler"`` item to the WSGI ``environ`` dictionary
|
||||||
|
passed to `ProfilerMiddleware`'s `filename_format` function. It contains
|
||||||
|
the ``elapsed`` and ``time`` values for the profiled request. :issue:`2775`
|
||||||
|
- Explicitly marked the PathConverter as non path isolating. :pr:`2784`
|
||||||
|
|
||||||
|
|
||||||
|
Version 2.3.8
|
||||||
|
-------------
|
||||||
|
|
||||||
|
Unreleased
|
||||||
|
|
||||||
|
|
||||||
|
Version 2.3.7
|
||||||
|
-------------
|
||||||
|
|
||||||
|
Released 2023-08-14
|
||||||
|
|
||||||
|
- Use ``flit_core`` instead of ``setuptools`` as build backend.
|
||||||
|
- Fix parsing of multipart bodies. :issue:`2734` Adjust index of last newline
|
||||||
|
in data start. :issue:`2761`
|
||||||
|
- Parsing ints from header values strips spacing first. :issue:`2734`
|
||||||
|
- Fix empty file streaming when testing. :issue:`2740`
|
||||||
|
- Clearer error message when URL rule does not start with slash. :pr:`2750`
|
||||||
|
- ``Accept`` ``q`` value can be a float without a decimal part. :issue:`2751`
|
||||||
|
|
||||||
|
|
||||||
|
Version 2.3.6
|
||||||
|
-------------
|
||||||
|
|
||||||
|
Released 2023-06-08
|
||||||
|
|
||||||
|
- ``FileStorage.content_length`` does not fail if the form data did not provide a
|
||||||
|
value. :issue:`2726`
|
||||||
|
|
||||||
|
|
||||||
|
Version 2.3.5
|
||||||
|
-------------
|
||||||
|
|
||||||
|
Released 2023-06-07
|
||||||
|
|
||||||
|
- Python 3.12 compatibility. :issue:`2704`
|
||||||
|
- Fix handling of invalid base64 values in ``Authorization.from_header``. :issue:`2717`
|
||||||
|
- The debugger escapes the exception message in the page title. :pr:`2719`
|
||||||
|
- When binding ``routing.Map``, a long IDNA ``server_name`` with a port does not fail
|
||||||
|
encoding. :issue:`2700`
|
||||||
|
- ``iri_to_uri`` shows a deprecation warning instead of an error when passing bytes.
|
||||||
|
:issue:`2708`
|
||||||
|
- When parsing numbers in HTTP request headers such as ``Content-Length``, only ASCII
|
||||||
|
digits are accepted rather than any format that Python's ``int`` and ``float``
|
||||||
|
accept. :issue:`2716`
|
||||||
|
|
||||||
|
|
||||||
|
Version 2.3.4
|
||||||
|
-------------
|
||||||
|
|
||||||
|
Released 2023-05-08
|
||||||
|
|
||||||
|
- ``Authorization.from_header`` and ``WWWAuthenticate.from_header`` detects tokens
|
||||||
|
that end with base64 padding (``=``). :issue:`2685`
|
||||||
|
- Remove usage of ``warnings.catch_warnings``. :issue:`2690`
|
||||||
|
- Remove ``max_form_parts`` restriction from standard form data parsing and only use
|
||||||
|
if for multipart content. :pr:`2694`
|
||||||
|
- ``Response`` will avoid converting the ``Location`` header in some cases to preserve
|
||||||
|
invalid URL schemes like ``itms-services``. :issue:`2691`
|
||||||
|
|
||||||
|
|
||||||
|
Version 2.3.3
|
||||||
|
-------------
|
||||||
|
|
||||||
|
Released 2023-05-01
|
||||||
|
|
||||||
|
- Fix parsing of large multipart bodies. Remove invalid leading newline, and restore
|
||||||
|
parsing speed. :issue:`2658, 2675`
|
||||||
|
- The cookie ``Path`` attribute is set to ``/`` by default again, to prevent clients
|
||||||
|
from falling back to RFC 6265's ``default-path`` behavior. :issue:`2672, 2679`
|
||||||
|
|
||||||
|
|
||||||
|
Version 2.3.2
|
||||||
|
-------------
|
||||||
|
|
||||||
|
Released 2023-04-28
|
||||||
|
|
||||||
|
- Parse the cookie ``Expires`` attribute correctly in the test client. :issue:`2669`
|
||||||
|
- ``max_content_length`` can only be enforced on streaming requests if the server
|
||||||
|
sets ``wsgi.input_terminated``. :issue:`2668`
|
||||||
|
|
||||||
|
|
||||||
|
Version 2.3.1
|
||||||
|
-------------
|
||||||
|
|
||||||
|
Released 2023-04-27
|
||||||
|
|
||||||
|
- Percent-encode plus (+) when building URLs and in test requests. :issue:`2657`
|
||||||
|
- Cookie values don't quote characters defined in RFC 6265. :issue:`2659`
|
||||||
|
- Include ``pyi`` files for ``datastructures`` type annotations. :issue:`2660`
|
||||||
|
- ``Authorization`` and ``WWWAuthenticate`` objects can be compared for equality.
|
||||||
|
:issue:`2665`
|
||||||
|
|
||||||
|
|
||||||
|
Version 2.3.0
|
||||||
|
-------------
|
||||||
|
|
||||||
|
Released 2023-04-25
|
||||||
|
|
||||||
|
- Drop support for Python 3.7. :pr:`2648`
|
||||||
|
- Remove previously deprecated code. :pr:`2592`
|
||||||
|
- Passing bytes where strings are expected is deprecated, as well as the ``charset``
|
||||||
|
and ``errors`` parameters in many places. Anywhere that was annotated, documented,
|
||||||
|
or tested to accept bytes shows a warning. Removing this artifact of the transition
|
||||||
|
from Python 2 to 3 removes a significant amount of overhead in instance checks and
|
||||||
|
encoding cycles. In general, always work with UTF-8, the modern HTML, URL, and HTTP
|
||||||
|
standards all strongly recommend this. :issue:`2602`
|
||||||
|
- Deprecate the ``werkzeug.urls`` module, except for the ``uri_to_iri`` and
|
||||||
|
``iri_to_uri`` functions. Use the ``urllib.parse`` library instead. :issue:`2600`
|
||||||
|
- Update which characters are considered safe when using percent encoding in URLs,
|
||||||
|
based on the WhatWG URL Standard. :issue:`2601`
|
||||||
|
- Update which characters are considered safe when using percent encoding for Unicode
|
||||||
|
filenames in downloads. :issue:`2598`
|
||||||
|
- Deprecate the ``safe_conversion`` parameter of ``iri_to_uri``. The ``Location``
|
||||||
|
header is converted to IRI using the same process as everywhere else. :issue:`2609`
|
||||||
|
- Deprecate ``werkzeug.wsgi.make_line_iter`` and ``make_chunk_iter``. :pr:`2613`
|
||||||
|
- Use modern packaging metadata with ``pyproject.toml`` instead of ``setup.cfg``.
|
||||||
|
:pr:`2574`
|
||||||
|
- ``Request.get_json()`` will raise a ``415 Unsupported Media Type`` error if the
|
||||||
|
``Content-Type`` header is not ``application/json``, instead of a generic 400.
|
||||||
|
:issue:`2550`
|
||||||
|
- A URL converter's ``part_isolating`` defaults to ``False`` if its ``regex`` contains
|
||||||
|
a ``/``. :issue:`2582`
|
||||||
|
- A custom converter's regex can have capturing groups without breaking the router.
|
||||||
|
:pr:`2596`
|
||||||
|
- The reloader can pick up arguments to ``python`` like ``-X dev``, and does not
|
||||||
|
require heuristics to determine how to reload the command. Only available
|
||||||
|
on Python >= 3.10. :issue:`2589`
|
||||||
|
- The Watchdog reloader ignores file opened events. Bump the minimum version of
|
||||||
|
Watchdog to 2.3.0. :issue:`2603`
|
||||||
|
- When using a Unix socket for the development server, the path can start with a dot.
|
||||||
|
:issue:`2595`
|
||||||
|
- Increase default work factor for PBKDF2 to 600,000 iterations. :issue:`2611`
|
||||||
|
- ``parse_options_header`` is 2-3 times faster. It conforms to :rfc:`9110`, some
|
||||||
|
invalid parts that were previously accepted are now ignored. :issue:`1628`
|
||||||
|
- The ``is_filename`` parameter to ``unquote_header_value`` is deprecated. :pr:`2614`
|
||||||
|
- Deprecate the ``extra_chars`` parameter and passing bytes to ``quote_header_value``,
|
||||||
|
the ``allow_token`` parameter to ``dump_header``, and the ``cls`` parameter and
|
||||||
|
passing bytes to ``parse_dict_header``. :pr:`2618`
|
||||||
|
- Improve ``parse_accept_header`` implementation. Parse according to :rfc:`9110`.
|
||||||
|
Discard items with invalid ``q`` values. :issue:`1623`
|
||||||
|
- ``quote_header_value`` quotes the empty string. :pr:`2618`
|
||||||
|
- ``dump_options_header`` skips ``None`` values rather than using a bare key.
|
||||||
|
:pr:`2618`
|
||||||
|
- ``dump_header`` and ``dump_options_header`` will not quote a value if the key ends
|
||||||
|
with an asterisk ``*``.
|
||||||
|
- ``parse_dict_header`` will decode values with charsets. :pr:`2618`
|
||||||
|
- Refactor the ``Authorization`` and ``WWWAuthenticate`` header data structures.
|
||||||
|
:issue:`1769`, :pr:`2619`
|
||||||
|
|
||||||
|
- Both classes have ``type``, ``parameters``, and ``token`` attributes. The
|
||||||
|
``token`` attribute supports auth schemes that use a single opaque token rather
|
||||||
|
than ``key=value`` parameters, such as ``Bearer``.
|
||||||
|
- Neither class is a ``dict`` anymore, although they still implement getting,
|
||||||
|
setting, and deleting ``auth[key]`` and ``auth.key`` syntax, as well as
|
||||||
|
``auth.get(key)`` and ``key in auth``.
|
||||||
|
- Both classes have a ``from_header`` class method. ``parse_authorization_header``
|
||||||
|
and ``parse_www_authenticate_header`` are deprecated.
|
||||||
|
- The methods ``WWWAuthenticate.set_basic`` and ``set_digest`` are deprecated.
|
||||||
|
Instead, an instance should be created and assigned to
|
||||||
|
``response.www_authenticate``.
|
||||||
|
- A list of instances can be assigned to ``response.www_authenticate`` to set
|
||||||
|
multiple header values. However, accessing the property only returns the first
|
||||||
|
instance.
|
||||||
|
|
||||||
|
- Refactor ``parse_cookie`` and ``dump_cookie``. :pr:`2637`
|
||||||
|
|
||||||
|
- ``parse_cookie`` is up to 40% faster, ``dump_cookie`` is up to 60% faster.
|
||||||
|
- Passing bytes to ``parse_cookie`` and ``dump_cookie`` is deprecated. The
|
||||||
|
``dump_cookie`` ``charset`` parameter is deprecated.
|
||||||
|
- ``dump_cookie`` allows ``domain`` values that do not include a dot ``.``, and
|
||||||
|
strips off a leading dot.
|
||||||
|
- ``dump_cookie`` does not set ``path="/"`` unnecessarily by default.
|
||||||
|
|
||||||
|
- Refactor the test client cookie implementation. :issue:`1060, 1680`
|
||||||
|
|
||||||
|
- The ``cookie_jar`` attribute is deprecated. ``http.cookiejar`` is no longer used
|
||||||
|
for storage.
|
||||||
|
- Domain and path matching is used when sending cookies in requests. The
|
||||||
|
``domain`` and ``path`` parameters default to ``localhost`` and ``/``.
|
||||||
|
- Added a ``get_cookie`` method to inspect cookies.
|
||||||
|
- Cookies have ``decoded_key`` and ``decoded_value`` attributes to match what the
|
||||||
|
app sees rather than the encoded values a client would see.
|
||||||
|
- The first positional ``server_name`` parameter to ``set_cookie`` and
|
||||||
|
``delete_cookie`` is deprecated. Use the ``domain`` parameter instead.
|
||||||
|
- Other parameters to ``delete_cookie`` besides ``domain``, ``path``, and
|
||||||
|
``value`` are deprecated.
|
||||||
|
|
||||||
|
- If ``request.max_content_length`` is set, it is checked immediately when accessing
|
||||||
|
the stream, and while reading from the stream in general, rather than only during
|
||||||
|
form parsing. :issue:`1513`
|
||||||
|
- The development server, which must not be used in production, will exhaust the
|
||||||
|
request stream up to 10GB or 1000 reads. This allows clients to see a 413 error if
|
||||||
|
``max_content_length`` is exceeded, instead of a "connection reset" failure.
|
||||||
|
:pr:`2620`
|
||||||
|
- The development server discards header keys that contain underscores ``_``, as they
|
||||||
|
are ambiguous with dashes ``-`` in WSGI. :pr:`2622`
|
||||||
|
- ``secure_filename`` looks for more Windows reserved file names. :pr:`2623`
|
||||||
|
- Update type annotation for ``best_match`` to make ``default`` parameter clearer.
|
||||||
|
:issue:`2625`
|
||||||
|
- Multipart parser handles empty fields correctly. :issue:`2632`
|
||||||
|
- The ``Map`` ``charset`` parameter and ``Request.url_charset`` property are
|
||||||
|
deprecated. Percent encoding in URLs must always represent UTF-8 bytes. Invalid
|
||||||
|
bytes are left percent encoded rather than replaced. :issue:`2602`
|
||||||
|
- The ``Request.charset``, ``Request.encoding_errors``, ``Response.charset``, and
|
||||||
|
``Client.charset`` attributes are deprecated. Request and response data must always
|
||||||
|
use UTF-8. :issue:`2602`
|
||||||
|
- Header values that have charset information only allow ASCII, UTF-8, and ISO-8859-1.
|
||||||
|
:pr:`2614, 2641`
|
||||||
|
- Update type annotation for ``ProfilerMiddleware`` ``stream`` parameter.
|
||||||
|
:issue:`2642`
|
||||||
|
- Use postponed evaluation of annotations. :pr:`2644`
|
||||||
|
- The development server escapes ASCII control characters in decoded URLs before
|
||||||
|
logging the request to the terminal. :pr:`2652`
|
||||||
|
- The ``FormDataParser`` ``parse_functions`` attribute and ``get_parse_func`` method,
|
||||||
|
and the invalid ``application/x-url-encoded`` content type, are deprecated.
|
||||||
|
:pr:`2653`
|
||||||
|
- ``generate_password_hash`` supports scrypt. Plain hash methods are deprecated, only
|
||||||
|
scrypt and pbkdf2 are supported. :issue:`2654`
|
||||||
|
|
||||||
|
|
||||||
|
Version 2.2.3
|
||||||
|
-------------
|
||||||
|
|
||||||
|
Released 2023-02-14
|
||||||
|
|
||||||
|
- Ensure that URL rules using path converters will redirect with strict slashes when
|
||||||
|
the trailing slash is missing. :issue:`2533`
|
||||||
|
- Type signature for ``get_json`` specifies that return type is not optional when
|
||||||
|
``silent=False``. :issue:`2508`
|
||||||
|
- ``parse_content_range_header`` returns ``None`` for a value like ``bytes */-1``
|
||||||
|
where the length is invalid, instead of raising an ``AssertionError``. :issue:`2531`
|
||||||
|
- Address remaining ``ResourceWarning`` related to the socket used by ``run_simple``.
|
||||||
|
Remove ``prepare_socket``, which now happens when creating the server. :issue:`2421`
|
||||||
|
- Update pre-existing headers for ``multipart/form-data`` requests with the test
|
||||||
|
client. :issue:`2549`
|
||||||
|
- Fix handling of header extended parameters such that they are no longer quoted.
|
||||||
|
:issue:`2529`
|
||||||
|
- ``LimitedStream.read`` works correctly when wrapping a stream that may not return
|
||||||
|
the requested size in one ``read`` call. :issue:`2558`
|
||||||
|
- A cookie header that starts with ``=`` is treated as an empty key and discarded,
|
||||||
|
rather than stripping the leading ``==``.
|
||||||
|
- Specify a maximum number of multipart parts, default 1000, after which a
|
||||||
|
``RequestEntityTooLarge`` exception is raised on parsing. This mitigates a DoS
|
||||||
|
attack where a larger number of form/file parts would result in disproportionate
|
||||||
|
resource use.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Version 2.2.2
|
Version 2.2.2
|
||||||
-------------
|
-------------
|
||||||
|
|
||||||
|
@ -23,6 +294,7 @@ Released 2022-08-08
|
||||||
``run_simple``. :issue:`2421`
|
``run_simple``. :issue:`2421`
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Version 2.2.1
|
Version 2.2.1
|
||||||
-------------
|
-------------
|
||||||
|
|
||||||
|
@ -54,8 +326,9 @@ Released 2022-07-23
|
||||||
debug console. :pr:`2439`
|
debug console. :pr:`2439`
|
||||||
- Fix compatibility with Python 3.11 by ensuring that ``end_lineno``
|
- Fix compatibility with Python 3.11 by ensuring that ``end_lineno``
|
||||||
and ``end_col_offset`` are present on AST nodes. :issue:`2425`
|
and ``end_col_offset`` are present on AST nodes. :issue:`2425`
|
||||||
- Add a new faster matching router based on a state
|
- Add a new faster URL matching router based on a state machine. If a custom converter
|
||||||
machine. :pr:`2433`
|
needs to match a ``/`` it must set the class variable ``part_isolating = False``.
|
||||||
|
:pr:`2433`
|
||||||
- Fix branch leaf path masking branch paths when strict-slashes is
|
- Fix branch leaf path masking branch paths when strict-slashes is
|
||||||
disabled. :issue:`1074`
|
disabled. :issue:`1074`
|
||||||
- Names within options headers are always converted to lowercase. This
|
- Names within options headers are always converted to lowercase. This
|
||||||
|
@ -775,7 +1048,7 @@ Released 2019-03-19
|
||||||
(:pr:`1358`)
|
(:pr:`1358`)
|
||||||
- :func:`http.parse_cookie` ignores empty segments rather than
|
- :func:`http.parse_cookie` ignores empty segments rather than
|
||||||
producing a cookie with no key or value. (:issue:`1245`, :pr:`1301`)
|
producing a cookie with no key or value. (:issue:`1245`, :pr:`1301`)
|
||||||
- :func:`~http.parse_authorization_header` (and
|
- ``http.parse_authorization_header`` (and
|
||||||
:class:`~datastructures.Authorization`,
|
:class:`~datastructures.Authorization`,
|
||||||
:attr:`~wrappers.Request.authorization`) treats the authorization
|
:attr:`~wrappers.Request.authorization`) treats the authorization
|
||||||
header as UTF-8. On Python 2, basic auth username and password are
|
header as UTF-8. On Python 2, basic auth username and password are
|
||||||
|
@ -1540,8 +1813,8 @@ Version 0.9.2
|
||||||
|
|
||||||
(bugfix release, released on July 18th 2013)
|
(bugfix release, released on July 18th 2013)
|
||||||
|
|
||||||
- Added `unsafe` parameter to :func:`~werkzeug.urls.url_quote`.
|
- Added ``unsafe`` parameter to ``urls.url_quote``.
|
||||||
- Fixed an issue with :func:`~werkzeug.urls.url_quote_plus` not quoting
|
- Fixed an issue with ``urls.url_quote_plus`` not quoting
|
||||||
`'+'` correctly.
|
`'+'` correctly.
|
||||||
- Ported remaining parts of :class:`~werkzeug.contrib.RedisCache` to
|
- Ported remaining parts of :class:`~werkzeug.contrib.RedisCache` to
|
||||||
Python 3.3.
|
Python 3.3.
|
||||||
|
@ -1590,9 +1863,8 @@ Released on June 13nd 2013, codename Planierraupe.
|
||||||
certificates easily and load them from files.
|
certificates easily and load them from files.
|
||||||
- Refactored test client to invoke the open method on the class
|
- Refactored test client to invoke the open method on the class
|
||||||
for redirects. This makes subclassing more powerful.
|
for redirects. This makes subclassing more powerful.
|
||||||
- :func:`werkzeug.wsgi.make_chunk_iter` and
|
- ``wsgi.make_chunk_iter`` and ``make_line_iter`` now support processing
|
||||||
:func:`werkzeug.wsgi.make_line_iter` now support processing of
|
of iterators and streams.
|
||||||
iterators and streams.
|
|
||||||
- URL generation by the routing system now no longer quotes
|
- URL generation by the routing system now no longer quotes
|
||||||
``+``.
|
``+``.
|
||||||
- URL fixing now no longer quotes certain reserved characters.
|
- URL fixing now no longer quotes certain reserved characters.
|
||||||
|
@ -1690,7 +1962,7 @@ Version 0.8.3
|
||||||
|
|
||||||
(bugfix release, released on February 5th 2012)
|
(bugfix release, released on February 5th 2012)
|
||||||
|
|
||||||
- Fixed another issue with :func:`werkzeug.wsgi.make_line_iter`
|
- Fixed another issue with ``wsgi.make_line_iter``
|
||||||
where lines longer than the buffer size were not handled
|
where lines longer than the buffer size were not handled
|
||||||
properly.
|
properly.
|
||||||
- Restore stdout after debug console finished executing so
|
- Restore stdout after debug console finished executing so
|
||||||
|
@ -1758,7 +2030,7 @@ Released on September 29th 2011, codename Lötkolben
|
||||||
- Werkzeug now uses a new method to check that the length of incoming
|
- Werkzeug now uses a new method to check that the length of incoming
|
||||||
data is complete and will raise IO errors by itself if the server
|
data is complete and will raise IO errors by itself if the server
|
||||||
fails to do so.
|
fails to do so.
|
||||||
- :func:`~werkzeug.wsgi.make_line_iter` now requires a limit that is
|
- ``wsgi.make_line_iter`` now requires a limit that is
|
||||||
not higher than the length the stream can provide.
|
not higher than the length the stream can provide.
|
||||||
- Refactored form parsing into a form parser class that makes it possible
|
- Refactored form parsing into a form parser class that makes it possible
|
||||||
to hook into individual parts of the parsing process for debugging and
|
to hook into individual parts of the parsing process for debugging and
|
||||||
|
@ -1958,7 +2230,7 @@ Released on Feb 19th 2010, codename Hammer.
|
||||||
- the form data parser will now look at the filename instead the
|
- the form data parser will now look at the filename instead the
|
||||||
content type to figure out if it should treat the upload as regular
|
content type to figure out if it should treat the upload as regular
|
||||||
form data or file upload. This fixes a bug with Google Chrome.
|
form data or file upload. This fixes a bug with Google Chrome.
|
||||||
- improved performance of `make_line_iter` and the multipart parser
|
- improved performance of ``make_line_iter`` and the multipart parser
|
||||||
for binary uploads.
|
for binary uploads.
|
||||||
- fixed :attr:`~werkzeug.BaseResponse.is_streamed`
|
- fixed :attr:`~werkzeug.BaseResponse.is_streamed`
|
||||||
- fixed a path quoting bug in `EnvironBuilder` that caused PATH_INFO and
|
- fixed a path quoting bug in `EnvironBuilder` that caused PATH_INFO and
|
||||||
|
@ -2087,7 +2359,7 @@ Released on April 24th, codename Schlagbohrer.
|
||||||
- added :mod:`werkzeug.contrib.lint`
|
- added :mod:`werkzeug.contrib.lint`
|
||||||
- added `passthrough_errors` to `run_simple`.
|
- added `passthrough_errors` to `run_simple`.
|
||||||
- added `secure_filename`
|
- added `secure_filename`
|
||||||
- added :func:`make_line_iter`
|
- added ``make_line_iter``
|
||||||
- :class:`MultiDict` copies now instead of revealing internal
|
- :class:`MultiDict` copies now instead of revealing internal
|
||||||
lists to the caller for `getlist` and iteration functions that
|
lists to the caller for `getlist` and iteration functions that
|
||||||
return lists.
|
return lists.
|
||||||
|
|
131
CONTRIBUTING.rst
|
@ -7,19 +7,17 @@ Thank you for considering contributing to Werkzeug!
|
||||||
Support questions
|
Support questions
|
||||||
-----------------
|
-----------------
|
||||||
|
|
||||||
Please don't use the issue tracker for this. The issue tracker is a
|
Please don't use the issue tracker for this. The issue tracker is a tool to address bugs
|
||||||
tool to address bugs and feature requests in Werkzeug itself. Use one of
|
and feature requests in Werkzeug itself. Use one of the following resources for
|
||||||
the following resources for questions about using Werkzeug or issues
|
questions about using Werkzeug or issues with your own code:
|
||||||
with your own code:
|
|
||||||
|
|
||||||
- The ``#get-help`` channel on our Discord chat:
|
- The ``#questions`` channel on our Discord chat: https://discord.gg/pallets
|
||||||
https://discord.gg/pallets
|
|
||||||
- The mailing list flask@python.org for long term discussion or larger
|
|
||||||
issues.
|
|
||||||
- Ask on `Stack Overflow`_. Search with Google first using:
|
- Ask on `Stack Overflow`_. Search with Google first using:
|
||||||
``site:stackoverflow.com werkzeug {search term, exception message, etc.}``
|
``site:stackoverflow.com werkzeug {search term, exception message, etc.}``
|
||||||
|
- Ask on our `GitHub Discussions`_ for long term discussion or larger questions.
|
||||||
|
|
||||||
.. _Stack Overflow: https://stackoverflow.com/questions/tagged/werkzeug?tab=Frequent
|
.. _Stack Overflow: https://stackoverflow.com/questions/tagged/werkzeug?tab=Frequent
|
||||||
|
.. _GitHub Discussions: https://github.com/pallets/werkzeug/discussions
|
||||||
|
|
||||||
|
|
||||||
Reporting issues
|
Reporting issues
|
||||||
|
@ -66,9 +64,30 @@ Include the following in your patch:
|
||||||
.. _pre-commit: https://pre-commit.com
|
.. _pre-commit: https://pre-commit.com
|
||||||
|
|
||||||
|
|
||||||
First time setup
|
First time setup using GitHub Codespaces
|
||||||
~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
`GitHub Codespaces`_ creates a development environment that is already set up for the
|
||||||
|
project. By default it opens in Visual Studio Code for the Web, but this can
|
||||||
|
be changed in your GitHub profile settings to use Visual Studio Code or JetBrains
|
||||||
|
PyCharm on your local computer.
|
||||||
|
|
||||||
|
- Make sure you have a `GitHub account`_.
|
||||||
|
- From the project's repository page, click the green "Code" button and then "Create
|
||||||
|
codespace on main".
|
||||||
|
- The codespace will be set up, then Visual Studio Code will open. However, you'll
|
||||||
|
need to wait a bit longer for the Python extension to be installed. You'll know it's
|
||||||
|
ready when the terminal at the bottom shows that the virtualenv was activated.
|
||||||
|
- Check out a branch and `start coding`_.
|
||||||
|
|
||||||
|
.. _GitHub Codespaces: https://docs.github.com/en/codespaces
|
||||||
|
.. _devcontainer: https://docs.github.com/en/codespaces/setting-up-your-project-for-codespaces/adding-a-dev-container-configuration/introduction-to-dev-containers
|
||||||
|
|
||||||
|
|
||||||
|
First time setup in your local environment
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
- Make sure you have a `GitHub account`_.
|
||||||
- Download and install the `latest version of git`_.
|
- Download and install the `latest version of git`_.
|
||||||
- Configure git with your `username`_ and `email`_.
|
- Configure git with your `username`_ and `email`_.
|
||||||
|
|
||||||
|
@ -77,99 +96,93 @@ First time setup
|
||||||
$ git config --global user.name 'your name'
|
$ git config --global user.name 'your name'
|
||||||
$ git config --global user.email 'your email'
|
$ git config --global user.email 'your email'
|
||||||
|
|
||||||
- Make sure you have a `GitHub account`_.
|
|
||||||
- Fork Werkzeug to your GitHub account by clicking the `Fork`_ button.
|
- Fork Werkzeug to your GitHub account by clicking the `Fork`_ button.
|
||||||
- `Clone`_ the main repository locally.
|
- `Clone`_ your fork locally, replacing ``your-username`` in the command below with
|
||||||
|
your actual username.
|
||||||
|
|
||||||
.. code-block:: text
|
.. code-block:: text
|
||||||
|
|
||||||
$ git clone https://github.com/pallets/werkzeug
|
$ git clone https://github.com/your-username/werkzeug
|
||||||
$ cd werkzeug
|
$ cd werkzeug
|
||||||
|
|
||||||
- Add your fork as a remote to push your work to. Replace
|
- Create a virtualenv. Use the latest version of Python.
|
||||||
``{username}`` with your username. This names the remote "fork", the
|
|
||||||
default Pallets remote is "origin".
|
- Linux/macOS
|
||||||
|
|
||||||
.. code-block:: text
|
.. code-block:: text
|
||||||
|
|
||||||
$ git remote add fork https://github.com/{username}/werkzeug
|
$ python3 -m venv .venv
|
||||||
|
$ . .venv/bin/activate
|
||||||
- Create a virtualenv.
|
|
||||||
|
- Windows
|
||||||
.. code-block:: text
|
|
||||||
|
.. code-block:: text
|
||||||
$ python3 -m venv env
|
|
||||||
$ . env/bin/activate
|
> py -3 -m venv .venv
|
||||||
|
> .venv\Scripts\activate
|
||||||
On Windows, activating is different.
|
|
||||||
|
- Install the development dependencies, then install Werkzeug in editable mode.
|
||||||
.. code-block:: text
|
|
||||||
|
|
||||||
> env\Scripts\activate
|
|
||||||
|
|
||||||
- Upgrade pip and setuptools.
|
|
||||||
|
|
||||||
.. code-block:: text
|
|
||||||
|
|
||||||
$ python -m pip install --upgrade pip setuptools
|
|
||||||
|
|
||||||
- Install the development dependencies, then install Werkzeug in
|
|
||||||
editable mode.
|
|
||||||
|
|
||||||
.. code-block:: text
|
.. code-block:: text
|
||||||
|
|
||||||
|
$ python -m pip install -U pip
|
||||||
$ pip install -r requirements/dev.txt && pip install -e .
|
$ pip install -r requirements/dev.txt && pip install -e .
|
||||||
|
|
||||||
- Install the pre-commit hooks.
|
- Install the pre-commit hooks.
|
||||||
|
|
||||||
.. code-block:: text
|
.. code-block:: text
|
||||||
|
|
||||||
$ pre-commit install
|
$ pre-commit install --install-hooks
|
||||||
|
|
||||||
|
.. _GitHub account: https://github.com/join
|
||||||
.. _latest version of git: https://git-scm.com/downloads
|
.. _latest version of git: https://git-scm.com/downloads
|
||||||
.. _username: https://docs.github.com/en/github/using-git/setting-your-username-in-git
|
.. _username: https://docs.github.com/en/github/using-git/setting-your-username-in-git
|
||||||
.. _email: https://docs.github.com/en/github/setting-up-and-managing-your-github-user-account/setting-your-commit-email-address
|
.. _email: https://docs.github.com/en/github/setting-up-and-managing-your-github-user-account/setting-your-commit-email-address
|
||||||
.. _GitHub account: https://github.com/join
|
|
||||||
.. _Fork: https://github.com/pallets/werkzeug/fork
|
.. _Fork: https://github.com/pallets/werkzeug/fork
|
||||||
.. _Clone: https://docs.github.com/en/github/getting-started-with-github/fork-a-repo#step-2-create-a-local-clone-of-your-fork
|
.. _Clone: https://docs.github.com/en/github/getting-started-with-github/fork-a-repo#step-2-create-a-local-clone-of-your-fork
|
||||||
|
|
||||||
|
|
||||||
|
.. _start coding:
|
||||||
|
|
||||||
Start coding
|
Start coding
|
||||||
~~~~~~~~~~~~
|
~~~~~~~~~~~~
|
||||||
|
|
||||||
- Create a branch to identify the issue you would like to work on. If
|
- Create a branch to identify the issue you would like to work on. If you're
|
||||||
you're submitting a bug or documentation fix, branch off of the
|
submitting a bug or documentation fix, branch off of the latest ".x" branch.
|
||||||
latest ".x" branch.
|
|
||||||
|
|
||||||
.. code-block:: text
|
.. code-block:: text
|
||||||
|
|
||||||
$ git fetch origin
|
$ git fetch origin
|
||||||
$ git checkout -b your-branch-name origin/2.0.x
|
$ git checkout -b your-branch-name origin/2.2.x
|
||||||
|
|
||||||
If you're submitting a feature addition or change, branch off of the
|
If you're submitting a feature addition or change, branch off of the "main" branch.
|
||||||
"main" branch.
|
|
||||||
|
|
||||||
.. code-block:: text
|
.. code-block:: text
|
||||||
|
|
||||||
$ git fetch origin
|
$ git fetch origin
|
||||||
$ git checkout -b your-branch-name origin/main
|
$ git checkout -b your-branch-name origin/main
|
||||||
|
|
||||||
- Using your favorite editor, make your changes,
|
- Using your favorite editor, make your changes, `committing as you go`_.
|
||||||
`committing as you go`_.
|
|
||||||
- Include tests that cover any code changes you make. Make sure the
|
- If you are in a codespace, you will be prompted to `create a fork`_ the first
|
||||||
test fails without your patch. Run the tests as described below.
|
time you make a commit. Enter ``Y`` to continue.
|
||||||
- Push your commits to your fork on GitHub and
|
|
||||||
`create a pull request`_. Link to the issue being addressed with
|
- Include tests that cover any code changes you make. Make sure the test fails without
|
||||||
``fixes #123`` in the pull request.
|
your patch. Run the tests as described below.
|
||||||
|
- Push your commits to your fork on GitHub and `create a pull request`_. Link to the
|
||||||
|
issue being addressed with ``fixes #123`` in the pull request description.
|
||||||
|
|
||||||
.. code-block:: text
|
.. code-block:: text
|
||||||
|
|
||||||
$ git push --set-upstream fork your-branch-name
|
$ git push --set-upstream origin your-branch-name
|
||||||
|
|
||||||
.. _committing as you go: https://dont-be-afraid-to-commit.readthedocs.io/en/latest/git/commandlinegit.html#commit-your-changes
|
.. _committing as you go: https://afraid-to-commit.readthedocs.io/en/latest/git/commandlinegit.html#commit-your-changes
|
||||||
|
.. _create a fork: https://docs.github.com/en/codespaces/developing-in-codespaces/using-source-control-in-your-codespace#about-automatic-forking
|
||||||
.. _create a pull request: https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-a-pull-request
|
.. _create a pull request: https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-a-pull-request
|
||||||
|
|
||||||
|
|
||||||
|
.. _Running the tests:
|
||||||
|
|
||||||
Running the tests
|
Running the tests
|
||||||
~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
|
12
MANIFEST.in
|
@ -1,12 +0,0 @@
|
||||||
include CHANGES.rst
|
|
||||||
include tox.ini
|
|
||||||
include requirements/*.txt
|
|
||||||
graft artwork
|
|
||||||
graft docs
|
|
||||||
prune docs/_build
|
|
||||||
graft examples
|
|
||||||
graft tests
|
|
||||||
include src/werkzeug/py.typed
|
|
||||||
include src/werkzeug/*.pyi
|
|
||||||
graft src/werkzeug/debug/shared
|
|
||||||
global-exclude *.pyc
|
|
|
@ -86,6 +86,4 @@ Links
|
||||||
- PyPI Releases: https://pypi.org/project/Werkzeug/
|
- PyPI Releases: https://pypi.org/project/Werkzeug/
|
||||||
- Source Code: https://github.com/pallets/werkzeug/
|
- Source Code: https://github.com/pallets/werkzeug/
|
||||||
- Issue Tracker: https://github.com/pallets/werkzeug/issues/
|
- Issue Tracker: https://github.com/pallets/werkzeug/issues/
|
||||||
- Website: https://palletsprojects.com/p/werkzeug/
|
|
||||||
- Twitter: https://twitter.com/PalletsTeam
|
|
||||||
- Chat: https://discord.gg/pallets
|
- Chat: https://discord.gg/pallets
|
||||||
|
|
BIN
artwork/logo.png
Before Width: | Height: | Size: 34 KiB |
Before Width: | Height: | Size: 16 KiB |
|
@ -1,29 +0,0 @@
|
||||||
python-werkzeug (2.2.2-ok5) yangtze; urgency=medium
|
|
||||||
|
|
||||||
* Update version info.
|
|
||||||
|
|
||||||
-- sufang <sufang@kylinos.cn> Tue, 21 Mar 2023 14:06:59 +0800
|
|
||||||
|
|
||||||
python-werkzeug (2.2.2-ok4) yangtze; urgency=medium
|
|
||||||
|
|
||||||
* Add python3-jinja2 to build-depends.
|
|
||||||
|
|
||||||
-- sufang <sufang@kylinos.cn> Tue, 14 Mar 2023 16:11:45 +0800
|
|
||||||
|
|
||||||
python-werkzeug (2.2.2-ok3) yangtze; urgency=medium
|
|
||||||
|
|
||||||
* Fix command 'install' has no such option 'install_layout'.
|
|
||||||
|
|
||||||
-- sufang <sufang@kylinos.cn> Tue, 14 Mar 2023 14:53:39 +0800
|
|
||||||
|
|
||||||
python-werkzeug (2.2.2-ok2) yangtze; urgency=medium
|
|
||||||
|
|
||||||
* Apply patch.
|
|
||||||
|
|
||||||
-- sufang <sufang@kylinos.cn> Tue, 14 Mar 2023 14:51:23 +0800
|
|
||||||
|
|
||||||
python-werkzeug (2.2.2-ok1) yangtze; urgency=medium
|
|
||||||
|
|
||||||
* Build for openkylin.
|
|
||||||
|
|
||||||
-- sufang <sufang@kylinos.cn> Mon, 30 Jan 2023 17:20:54 +0800
|
|
|
@ -1,69 +0,0 @@
|
||||||
Source: python-werkzeug
|
|
||||||
Section: python
|
|
||||||
Priority: optional
|
|
||||||
Maintainer: OpenKylin Developers <packaging@lists.openkylin.top>
|
|
||||||
Standards-Version: 4.6.1
|
|
||||||
Build-Depends:
|
|
||||||
debhelper-compat (= 13),
|
|
||||||
dh-python,
|
|
||||||
python3-all,
|
|
||||||
python3-cryptography <!nocheck>,
|
|
||||||
python3-doc,
|
|
||||||
python3-ephemeral-port-reserve <!nocheck>,
|
|
||||||
python3-greenlet <!nocheck>,
|
|
||||||
python3-pallets-sphinx-themes <!nodoc>,
|
|
||||||
python3-pytest <!nocheck>,
|
|
||||||
python3-pytest-timeout <!nocheck>,
|
|
||||||
python3-pytest-xprocess <!nocheck>,
|
|
||||||
python3-setuptools,
|
|
||||||
python3-sphinx <!nodoc>,
|
|
||||||
python3-sphinx-issues <!nodoc>,
|
|
||||||
python3-sphinxcontrib-log-cabinet <!nodoc>,
|
|
||||||
python3-watchdog <!nocheck>,
|
|
||||||
python3-jinja2
|
|
||||||
Homepage: http://werkzeug.pocoo.org/
|
|
||||||
Vcs-Git: https://gitee.com/openkylin/python-werkzeug.git
|
|
||||||
Vcs-Browser: https://gitee.com/openkylin/python-werkzeug
|
|
||||||
Testsuite: autopkgtest-pkg-python
|
|
||||||
Rules-Requires-Root: no
|
|
||||||
|
|
||||||
Package: python3-werkzeug
|
|
||||||
Architecture: all
|
|
||||||
Depends:
|
|
||||||
libjs-jquery,
|
|
||||||
${misc:Depends},
|
|
||||||
${python3:Depends},
|
|
||||||
Recommends:
|
|
||||||
python3-openssl,
|
|
||||||
python3-pyinotify,
|
|
||||||
Suggests:
|
|
||||||
ipython3,
|
|
||||||
python-werkzeug-doc,
|
|
||||||
python3-lxml,
|
|
||||||
python3-pkg-resources,
|
|
||||||
python3-watchdog,
|
|
||||||
Description: collection of utilities for WSGI applications (Python 3.x)
|
|
||||||
The Web Server Gateway Interface (WSGI) is a standard interface between web
|
|
||||||
server software and web applications written in Python.
|
|
||||||
.
|
|
||||||
Werkzeug is a lightweight library for interfacing with WSGI. It features
|
|
||||||
request and response objects, an interactive debugging system and a powerful
|
|
||||||
URI dispatcher. Combine with your choice of third party libraries and
|
|
||||||
middleware to easily create a custom application framework.
|
|
||||||
.
|
|
||||||
This package contains the Python 3.x module.
|
|
||||||
|
|
||||||
Package: python-werkzeug-doc
|
|
||||||
Section: doc
|
|
||||||
Architecture: all
|
|
||||||
Depends:
|
|
||||||
${misc:Depends},
|
|
||||||
${sphinxdoc:Depends},
|
|
||||||
Multi-Arch: foreign
|
|
||||||
Description: documentation for the werkzeug Python library (docs)
|
|
||||||
Werkzeug is a lightweight library for interfacing with WSGI. It features
|
|
||||||
request and response objects, an interactive debugging system and a powerful
|
|
||||||
URI dispatcher. Combine with your choice of third party libraries and
|
|
||||||
middleware to easily create a custom application framework.
|
|
||||||
.
|
|
||||||
This package provides the Sphinx generated documentation for Werkzeug.
|
|
|
@ -1,369 +0,0 @@
|
||||||
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
|
||||||
Upstream-Name: python-werkzeug
|
|
||||||
Source: <url://example.com>
|
|
||||||
#
|
|
||||||
# Please double check copyright with the licensecheck(1) command.
|
|
||||||
|
|
||||||
Files: .editorconfig
|
|
||||||
.gitattributes
|
|
||||||
.github/ISSUE_TEMPLATE/bug-report.md
|
|
||||||
.github/ISSUE_TEMPLATE/config.yml
|
|
||||||
.github/ISSUE_TEMPLATE/feature-request.md
|
|
||||||
.github/dependabot.yml
|
|
||||||
.github/workflows/lock.yaml
|
|
||||||
.github/workflows/tests.yaml
|
|
||||||
.gitignore
|
|
||||||
.pre-commit-config.yaml
|
|
||||||
.readthedocs.yaml
|
|
||||||
CHANGES.rst
|
|
||||||
CODE_OF_CONDUCT.md
|
|
||||||
CONTRIBUTING.rst
|
|
||||||
MANIFEST.in
|
|
||||||
README.rst
|
|
||||||
artwork/logo.png
|
|
||||||
docs/Makefile
|
|
||||||
docs/_static/debug-screenshot.png
|
|
||||||
docs/_static/favicon.ico
|
|
||||||
docs/_static/shortly.png
|
|
||||||
docs/_static/werkzeug.png
|
|
||||||
docs/changes.rst
|
|
||||||
docs/conf.py
|
|
||||||
docs/datastructures.rst
|
|
||||||
docs/debug.rst
|
|
||||||
docs/deployment/apache-httpd.rst
|
|
||||||
docs/deployment/eventlet.rst
|
|
||||||
docs/deployment/gevent.rst
|
|
||||||
docs/deployment/gunicorn.rst
|
|
||||||
docs/deployment/index.rst
|
|
||||||
docs/deployment/mod_wsgi.rst
|
|
||||||
docs/deployment/nginx.rst
|
|
||||||
docs/deployment/proxy_fix.rst
|
|
||||||
docs/deployment/uwsgi.rst
|
|
||||||
docs/deployment/waitress.rst
|
|
||||||
docs/exceptions.rst
|
|
||||||
docs/http.rst
|
|
||||||
docs/index.rst
|
|
||||||
docs/installation.rst
|
|
||||||
docs/levels.rst
|
|
||||||
docs/license.rst
|
|
||||||
docs/local.rst
|
|
||||||
docs/make.bat
|
|
||||||
docs/middleware/dispatcher.rst
|
|
||||||
docs/middleware/http_proxy.rst
|
|
||||||
docs/middleware/index.rst
|
|
||||||
docs/middleware/lint.rst
|
|
||||||
docs/middleware/profiler.rst
|
|
||||||
docs/middleware/proxy_fix.rst
|
|
||||||
docs/middleware/shared_data.rst
|
|
||||||
docs/quickstart.rst
|
|
||||||
docs/request_data.rst
|
|
||||||
docs/routing.rst
|
|
||||||
docs/serving.rst
|
|
||||||
docs/terms.rst
|
|
||||||
docs/test.rst
|
|
||||||
docs/tutorial.rst
|
|
||||||
docs/unicode.rst
|
|
||||||
docs/urls.rst
|
|
||||||
docs/utils.rst
|
|
||||||
docs/wrappers.rst
|
|
||||||
docs/wsgi.rst
|
|
||||||
examples/README.rst
|
|
||||||
examples/coolmagic/__init__.py
|
|
||||||
examples/coolmagic/application.py
|
|
||||||
examples/coolmagic/helpers.py
|
|
||||||
examples/coolmagic/public/style.css
|
|
||||||
examples/coolmagic/templates/static/about.html
|
|
||||||
examples/coolmagic/templates/static/index.html
|
|
||||||
examples/coolmagic/templates/static/not_found.html
|
|
||||||
examples/coolmagic/utils.py
|
|
||||||
examples/coolmagic/views/__init__.py
|
|
||||||
examples/coolmagic/views/static.py
|
|
||||||
examples/couchy/README
|
|
||||||
examples/couchy/__init__.py
|
|
||||||
examples/couchy/application.py
|
|
||||||
examples/couchy/models.py
|
|
||||||
examples/couchy/static/style.css
|
|
||||||
examples/couchy/templates/display.html
|
|
||||||
examples/couchy/templates/list.html
|
|
||||||
examples/couchy/templates/new.html
|
|
||||||
examples/couchy/templates/not_found.html
|
|
||||||
examples/couchy/utils.py
|
|
||||||
examples/couchy/views.py
|
|
||||||
examples/cupoftee/__init__.py
|
|
||||||
examples/cupoftee/application.py
|
|
||||||
examples/cupoftee/db.py
|
|
||||||
examples/cupoftee/network.py
|
|
||||||
examples/cupoftee/pages.py
|
|
||||||
examples/cupoftee/shared/content.png
|
|
||||||
examples/cupoftee/shared/down.png
|
|
||||||
examples/cupoftee/shared/favicon.ico
|
|
||||||
examples/cupoftee/shared/header.png
|
|
||||||
examples/cupoftee/shared/logo.png
|
|
||||||
examples/cupoftee/shared/style.css
|
|
||||||
examples/cupoftee/shared/up.png
|
|
||||||
examples/cupoftee/templates/missingpage.html
|
|
||||||
examples/cupoftee/templates/search.html
|
|
||||||
examples/cupoftee/templates/server.html
|
|
||||||
examples/cupoftee/templates/serverlist.html
|
|
||||||
examples/cupoftee/utils.py
|
|
||||||
examples/httpbasicauth.py
|
|
||||||
examples/i18nurls/__init__.py
|
|
||||||
examples/i18nurls/application.py
|
|
||||||
examples/i18nurls/templates/about.html
|
|
||||||
examples/i18nurls/templates/blog.html
|
|
||||||
examples/i18nurls/templates/index.html
|
|
||||||
examples/i18nurls/urls.py
|
|
||||||
examples/i18nurls/views.py
|
|
||||||
examples/manage-coolmagic.py
|
|
||||||
examples/manage-couchy.py
|
|
||||||
examples/manage-cupoftee.py
|
|
||||||
examples/manage-i18nurls.py
|
|
||||||
examples/manage-plnt.py
|
|
||||||
examples/manage-shorty.py
|
|
||||||
examples/manage-simplewiki.py
|
|
||||||
examples/manage-webpylike.py
|
|
||||||
examples/partial/README
|
|
||||||
examples/partial/complex_routing.py
|
|
||||||
examples/plnt/__init__.py
|
|
||||||
examples/plnt/database.py
|
|
||||||
examples/plnt/shared/style.css
|
|
||||||
examples/plnt/sync.py
|
|
||||||
examples/plnt/templates/about.html
|
|
||||||
examples/plnt/templates/index.html
|
|
||||||
examples/plnt/utils.py
|
|
||||||
examples/plnt/views.py
|
|
||||||
examples/plnt/webapp.py
|
|
||||||
examples/shortly/shortly.py
|
|
||||||
examples/shortly/static/style.css
|
|
||||||
examples/shortly/templates/404.html
|
|
||||||
examples/shortly/templates/new_url.html
|
|
||||||
examples/shortly/templates/short_link_details.html
|
|
||||||
examples/shorty/__init__.py
|
|
||||||
examples/shorty/application.py
|
|
||||||
examples/shorty/models.py
|
|
||||||
examples/shorty/static/style.css
|
|
||||||
examples/shorty/templates/display.html
|
|
||||||
examples/shorty/templates/list.html
|
|
||||||
examples/shorty/templates/new.html
|
|
||||||
examples/shorty/templates/not_found.html
|
|
||||||
examples/shorty/utils.py
|
|
||||||
examples/shorty/views.py
|
|
||||||
examples/simplewiki/__init__.py
|
|
||||||
examples/simplewiki/actions.py
|
|
||||||
examples/simplewiki/application.py
|
|
||||||
examples/simplewiki/database.py
|
|
||||||
examples/simplewiki/shared/style.css
|
|
||||||
examples/simplewiki/specialpages.py
|
|
||||||
examples/simplewiki/utils.py
|
|
||||||
examples/upload.py
|
|
||||||
examples/webpylike/example.py
|
|
||||||
examples/webpylike/webpylike.py
|
|
||||||
examples/wsecho.py
|
|
||||||
requirements/dev.in
|
|
||||||
requirements/dev.txt
|
|
||||||
requirements/docs.in
|
|
||||||
requirements/docs.txt
|
|
||||||
requirements/tests.in
|
|
||||||
requirements/tests.txt
|
|
||||||
requirements/typing.in
|
|
||||||
requirements/typing.txt
|
|
||||||
setup.cfg
|
|
||||||
setup.py
|
|
||||||
src/werkzeug/__init__.py
|
|
||||||
src/werkzeug/_internal.py
|
|
||||||
src/werkzeug/_reloader.py
|
|
||||||
src/werkzeug/datastructures.py
|
|
||||||
src/werkzeug/datastructures.pyi
|
|
||||||
src/werkzeug/debug/__init__.py
|
|
||||||
src/werkzeug/debug/console.py
|
|
||||||
src/werkzeug/debug/repr.py
|
|
||||||
src/werkzeug/debug/shared/ICON_LICENSE.md
|
|
||||||
src/werkzeug/debug/shared/console.png
|
|
||||||
src/werkzeug/debug/shared/debugger.js
|
|
||||||
src/werkzeug/debug/shared/less.png
|
|
||||||
src/werkzeug/debug/shared/more.png
|
|
||||||
src/werkzeug/debug/shared/style.css
|
|
||||||
src/werkzeug/debug/tbtools.py
|
|
||||||
src/werkzeug/exceptions.py
|
|
||||||
src/werkzeug/formparser.py
|
|
||||||
src/werkzeug/http.py
|
|
||||||
src/werkzeug/local.py
|
|
||||||
src/werkzeug/middleware/__init__.py
|
|
||||||
src/werkzeug/middleware/dispatcher.py
|
|
||||||
src/werkzeug/middleware/http_proxy.py
|
|
||||||
src/werkzeug/middleware/lint.py
|
|
||||||
src/werkzeug/middleware/profiler.py
|
|
||||||
src/werkzeug/middleware/proxy_fix.py
|
|
||||||
src/werkzeug/middleware/shared_data.py
|
|
||||||
src/werkzeug/py.typed
|
|
||||||
src/werkzeug/routing/__init__.py
|
|
||||||
src/werkzeug/routing/converters.py
|
|
||||||
src/werkzeug/routing/exceptions.py
|
|
||||||
src/werkzeug/routing/map.py
|
|
||||||
src/werkzeug/routing/matcher.py
|
|
||||||
src/werkzeug/routing/rules.py
|
|
||||||
src/werkzeug/sansio/__init__.py
|
|
||||||
src/werkzeug/sansio/http.py
|
|
||||||
src/werkzeug/sansio/multipart.py
|
|
||||||
src/werkzeug/sansio/request.py
|
|
||||||
src/werkzeug/sansio/response.py
|
|
||||||
src/werkzeug/sansio/utils.py
|
|
||||||
src/werkzeug/security.py
|
|
||||||
src/werkzeug/serving.py
|
|
||||||
src/werkzeug/test.py
|
|
||||||
src/werkzeug/testapp.py
|
|
||||||
src/werkzeug/urls.py
|
|
||||||
src/werkzeug/user_agent.py
|
|
||||||
src/werkzeug/utils.py
|
|
||||||
src/werkzeug/wrappers/__init__.py
|
|
||||||
src/werkzeug/wrappers/request.py
|
|
||||||
src/werkzeug/wrappers/response.py
|
|
||||||
src/werkzeug/wsgi.py
|
|
||||||
tests/conftest.py
|
|
||||||
tests/live_apps/data_app.py
|
|
||||||
tests/live_apps/reloader_app.py
|
|
||||||
tests/live_apps/run.py
|
|
||||||
tests/live_apps/standard_app.py
|
|
||||||
tests/live_apps/streaming_app.py
|
|
||||||
tests/middleware/test_dispatcher.py
|
|
||||||
tests/middleware/test_http_proxy.py
|
|
||||||
tests/middleware/test_lint.py
|
|
||||||
tests/middleware/test_proxy_fix.py
|
|
||||||
tests/middleware/test_shared_data.py
|
|
||||||
tests/multipart/firefox3-2png1txt/file1.png
|
|
||||||
tests/multipart/firefox3-2png1txt/file2.png
|
|
||||||
tests/multipart/firefox3-2png1txt/request.http
|
|
||||||
tests/multipart/firefox3-2png1txt/text.txt
|
|
||||||
tests/multipart/firefox3-2pnglongtext/file1.png
|
|
||||||
tests/multipart/firefox3-2pnglongtext/file2.png
|
|
||||||
tests/multipart/firefox3-2pnglongtext/request.http
|
|
||||||
tests/multipart/firefox3-2pnglongtext/text.txt
|
|
||||||
tests/multipart/ie6-2png1txt/file1.png
|
|
||||||
tests/multipart/ie6-2png1txt/file2.png
|
|
||||||
tests/multipart/ie6-2png1txt/request.http
|
|
||||||
tests/multipart/ie6-2png1txt/text.txt
|
|
||||||
tests/multipart/ie7_full_path_request.http
|
|
||||||
tests/multipart/opera8-2png1txt/file1.png
|
|
||||||
tests/multipart/opera8-2png1txt/file2.png
|
|
||||||
tests/multipart/opera8-2png1txt/request.http
|
|
||||||
tests/multipart/opera8-2png1txt/text.txt
|
|
||||||
tests/multipart/webkit3-2png1txt/file1.png
|
|
||||||
tests/multipart/webkit3-2png1txt/file2.png
|
|
||||||
tests/multipart/webkit3-2png1txt/request.http
|
|
||||||
tests/multipart/webkit3-2png1txt/text.txt
|
|
||||||
tests/res/test.txt
|
|
||||||
tests/sansio/__init__.py
|
|
||||||
tests/sansio/test_multipart.py
|
|
||||||
tests/sansio/test_request.py
|
|
||||||
tests/sansio/test_utils.py
|
|
||||||
tests/test_datastructures.py
|
|
||||||
tests/test_debug.py
|
|
||||||
tests/test_exceptions.py
|
|
||||||
tests/test_formparser.py
|
|
||||||
tests/test_http.py
|
|
||||||
tests/test_internal.py
|
|
||||||
tests/test_local.py
|
|
||||||
tests/test_routing.py
|
|
||||||
tests/test_security.py
|
|
||||||
tests/test_send_file.py
|
|
||||||
tests/test_serving.py
|
|
||||||
tests/test_test.py
|
|
||||||
tests/test_urls.py
|
|
||||||
tests/test_utils.py
|
|
||||||
tests/test_wrappers.py
|
|
||||||
tests/test_wsgi.py
|
|
||||||
tox.ini
|
|
||||||
Copyright: __NO_COPYRIGHT_NOR_LICENSE__
|
|
||||||
License: __NO_COPYRIGHT_NOR_LICENSE__
|
|
||||||
|
|
||||||
Files: LICENSE.rst
|
|
||||||
Copyright: 2007 Pallets
|
|
||||||
License: BSD-3-Clause
|
|
||||||
Redistribution and use in source and binary forms, with or without
|
|
||||||
modification, are permitted provided that the following conditions are
|
|
||||||
met:
|
|
||||||
.
|
|
||||||
1. Redistributions of source code must retain the above copyright
|
|
||||||
notice, this list of conditions and the following disclaimer.
|
|
||||||
.
|
|
||||||
2. Redistributions in binary form must reproduce the above copyright
|
|
||||||
notice, this list of conditions and the following disclaimer in the
|
|
||||||
documentation and/or other materials provided with the distribution.
|
|
||||||
.
|
|
||||||
3. Neither the name of the copyright holder nor the names of its
|
|
||||||
contributors may be used to endorse or promote products derived from
|
|
||||||
this software without specific prior written permission.
|
|
||||||
.
|
|
||||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|
||||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|
||||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
|
||||||
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|
||||||
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
||||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
|
||||||
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
|
||||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
|
||||||
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
|
||||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
|
||||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
.
|
|
||||||
On Debian systems, the complete text of the BSD 3-clause "New" or "Revised"
|
|
||||||
License can be found in `/usr/share/common-licenses/BSD'.
|
|
||||||
|
|
||||||
#----------------------------------------------------------------------------
|
|
||||||
# xml and html files (skipped):
|
|
||||||
# tests/res/index.html
|
|
||||||
# examples/shortly/templates/layout.html
|
|
||||||
# examples/coolmagic/templates/layout.html
|
|
||||||
# examples/plnt/templates/layout.html
|
|
||||||
# examples/simplewiki/templates/action_revert.html
|
|
||||||
# examples/simplewiki/templates/page_index.html
|
|
||||||
# examples/simplewiki/templates/page_missing.html
|
|
||||||
# examples/simplewiki/templates/action_edit.html
|
|
||||||
# examples/simplewiki/templates/action_log.html
|
|
||||||
# examples/simplewiki/templates/action_show.html
|
|
||||||
# examples/simplewiki/templates/macros.xml
|
|
||||||
# examples/simplewiki/templates/recent_changes.html
|
|
||||||
# examples/simplewiki/templates/missing_action.html
|
|
||||||
# examples/simplewiki/templates/layout.html
|
|
||||||
# examples/simplewiki/templates/action_diff.html
|
|
||||||
# examples/i18nurls/templates/layout.html
|
|
||||||
# examples/shorty/templates/layout.html
|
|
||||||
# examples/cupoftee/templates/layout.html
|
|
||||||
# examples/couchy/templates/layout.html
|
|
||||||
# .github/pull_request_template.md
|
|
||||||
# artwork/logo.svg
|
|
||||||
|
|
||||||
#----------------------------------------------------------------------------
|
|
||||||
# Files marked as NO_LICENSE_TEXT_FOUND may be covered by the following
|
|
||||||
# license/copyright files.
|
|
||||||
|
|
||||||
#----------------------------------------------------------------------------
|
|
||||||
# License file: LICENSE.rst
|
|
||||||
Copyright 2007 Pallets
|
|
||||||
.
|
|
||||||
Redistribution and use in source and binary forms, with or without
|
|
||||||
modification, are permitted provided that the following conditions are
|
|
||||||
met:
|
|
||||||
.
|
|
||||||
1. Redistributions of source code must retain the above copyright
|
|
||||||
notice, this list of conditions and the following disclaimer.
|
|
||||||
.
|
|
||||||
2. Redistributions in binary form must reproduce the above copyright
|
|
||||||
notice, this list of conditions and the following disclaimer in the
|
|
||||||
documentation and/or other materials provided with the distribution.
|
|
||||||
.
|
|
||||||
3. Neither the name of the copyright holder nor the names of its
|
|
||||||
contributors may be used to endorse or promote products derived from
|
|
||||||
this software without specific prior written permission.
|
|
||||||
.
|
|
||||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|
||||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|
||||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
|
||||||
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|
||||||
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
||||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
|
||||||
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
|
||||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
|
||||||
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
|
||||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
|
||||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
|
@ -1 +0,0 @@
|
||||||
# You must remove unused comment lines for the released package.
|
|
|
@ -1,10 +0,0 @@
|
||||||
Document: werkzeug
|
|
||||||
Title: Werkzeug Documentation
|
|
||||||
Author: Armin Ronacher
|
|
||||||
Abstract: This document describes Werkzeug - collection of utilities for WSGI
|
|
||||||
applications written in Python.
|
|
||||||
Section: Programming/Python
|
|
||||||
|
|
||||||
Format: HTML
|
|
||||||
Index: /usr/share/doc/python-werkzeug-doc/html/index.html
|
|
||||||
Files: /usr/share/doc/python-werkzeug-doc/html/*.html
|
|
|
@ -1 +0,0 @@
|
||||||
examples/*
|
|
|
@ -1,6 +0,0 @@
|
||||||
/usr/share/doc/python-werkzeug-doc/examples /usr/share/doc/python-werkzeug/examples
|
|
||||||
/usr/share/doc/python-werkzeug-doc/examples /usr/share/doc/python3-werkzeug/examples
|
|
||||||
/usr/share/doc/python-werkzeug-doc/html /usr/share/doc/python-werkzeug/html
|
|
||||||
/usr/share/doc/python-werkzeug-doc/html /usr/share/doc/python3-werkzeug/html
|
|
||||||
/usr/share/doc/python-werkzeug-doc/html/_sources /usr/share/doc/python-werkzeug/rst
|
|
||||||
/usr/share/doc/python-werkzeug-doc/html/_sources /usr/share/doc/python3-werkzeug/rst
|
|
|
@ -1 +0,0 @@
|
||||||
/usr/share/javascript/jquery/jquery.js /usr/lib/python3/dist-packages/werkzeug/debug/shared/jquery.js
|
|
|
@ -1,39 +0,0 @@
|
||||||
#!/usr/bin/make -f
|
|
||||||
|
|
||||||
# Copyright 2009, Noah Slater <nslater@tumbolia.org>
|
|
||||||
|
|
||||||
# Copying and distribution of this file, with or without modification, are
|
|
||||||
# permitted in any medium without royalty provided the copyright notice and this
|
|
||||||
# notice are preserved.
|
|
||||||
|
|
||||||
export PYBUILD_NAME=werkzeug
|
|
||||||
export PYBUILD_TEST_PYTEST=1
|
|
||||||
export SETUPTOOLS_USE_DISTUTILS=stdlib
|
|
||||||
|
|
||||||
%:
|
|
||||||
dh $@ --with python3,sphinxdoc --buildsystem pybuild
|
|
||||||
|
|
||||||
override_dh_auto_clean:
|
|
||||||
make -C docs clean
|
|
||||||
rm -rf build Werkzeug.egg-info/
|
|
||||||
#find $(CURDIR) \( -name '\._*' -o -name '\.DS_Store' \) -delete
|
|
||||||
find . -iname '__pycache__' -exec rm -rf {} \; || true
|
|
||||||
rm -rf .pytest_cache
|
|
||||||
dh_auto_clean
|
|
||||||
|
|
||||||
override_dh_fixperms:
|
|
||||||
find debian/ -name '*\.png' -exec chmod -x '{}' \;
|
|
||||||
dh_fixperms
|
|
||||||
|
|
||||||
override_dh_installdocs:
|
|
||||||
dh_installdocs --doc-main-package=python-werkzeug-doc -ppython-werkzeug-doc
|
|
||||||
dh_installdocs
|
|
||||||
|
|
||||||
override_dh_installexamples:
|
|
||||||
dh_installexamples --doc-main-package=python-werkzeug-doc -ppython-werkzeug-doc
|
|
||||||
|
|
||||||
override_dh_sphinxdoc:
|
|
||||||
ifeq (,$(findstring nodocs, $(DEB_BUILD_OPTIONS)))
|
|
||||||
PYTHONPATH=src python3 -m sphinx -b html docs/ debian/python-werkzeug-doc/usr/share/doc/python-werkzeug-doc/html/
|
|
||||||
dh_sphinxdoc
|
|
||||||
endif
|
|
|
@ -1 +0,0 @@
|
||||||
3.0 (native)
|
|
|
@ -1,5 +0,0 @@
|
||||||
Tests: upstream
|
|
||||||
Depends:
|
|
||||||
@,
|
|
||||||
@builddeps@,
|
|
||||||
Restrictions: allow-stderr
|
|
|
@ -1,14 +0,0 @@
|
||||||
#!/bin/sh
|
|
||||||
set -eu
|
|
||||||
|
|
||||||
export LC_ALL=C.UTF-8
|
|
||||||
pyvers=$(py3versions -r 2>/dev/null)
|
|
||||||
|
|
||||||
cp -a tests "$AUTOPKGTEST_TMP"
|
|
||||||
cd "$AUTOPKGTEST_TMP"
|
|
||||||
|
|
||||||
for py in ${pyvers}; do
|
|
||||||
echo "-=-=-=-=-=-=-=- running tests for ${py} -=-=-=-=-=-=-=-=-"
|
|
||||||
printf '$ %s\n' "${py} -m pytest tests"
|
|
||||||
${py} -m pytest tests
|
|
||||||
done
|
|
|
@ -1,4 +0,0 @@
|
||||||
Bug-Database: https://github.com/pallets/werkzeug/issues
|
|
||||||
Bug-Submit: https://github.com/pallets/werkzeug/issues/new
|
|
||||||
Repository: https://github.com/pallets/werkzeug.git
|
|
||||||
Repository-Browse: https://github.com/pallets/werkzeug
|
|
|
@ -1,6 +0,0 @@
|
||||||
version=3
|
|
||||||
opts=uversionmangle=s/(rc|a|b|c)/~$1/,\
|
|
||||||
dversionmangle=auto,\
|
|
||||||
repack,\
|
|
||||||
filenamemangle=s/.+\/v?(\d\S*)\.tar\.gz/werkzeug-$1\.tar\.gz/ \
|
|
||||||
https://github.com/pallets/werkzeug/tags .*/v?(\d\S*)\.tar\.gz
|
|
Before Width: | Height: | Size: 1.1 KiB |
After Width: | Height: | Size: 2.3 KiB |
After Width: | Height: | Size: 28 KiB |
After Width: | Height: | Size: 15 KiB |
Before Width: | Height: | Size: 19 KiB |
|
@ -26,14 +26,13 @@ issues_github_path = "pallets/werkzeug"
|
||||||
# HTML -----------------------------------------------------------------
|
# HTML -----------------------------------------------------------------
|
||||||
|
|
||||||
html_theme = "werkzeug"
|
html_theme = "werkzeug"
|
||||||
|
html_theme_options = {"index_sidebar_logo": False}
|
||||||
html_context = {
|
html_context = {
|
||||||
"project_links": [
|
"project_links": [
|
||||||
ProjectLink("Donate", "https://palletsprojects.com/donate"),
|
ProjectLink("Donate", "https://palletsprojects.com/donate"),
|
||||||
ProjectLink("PyPI Releases", "https://pypi.org/project/Werkzeug/"),
|
ProjectLink("PyPI Releases", "https://pypi.org/project/Werkzeug/"),
|
||||||
ProjectLink("Source Code", "https://github.com/pallets/werkzeug/"),
|
ProjectLink("Source Code", "https://github.com/pallets/werkzeug/"),
|
||||||
ProjectLink("Issue Tracker", "https://github.com/pallets/werkzeug/issues/"),
|
ProjectLink("Issue Tracker", "https://github.com/pallets/werkzeug/issues/"),
|
||||||
ProjectLink("Website", "https://palletsprojects.com/p/werkzeug/"),
|
|
||||||
ProjectLink("Twitter", "https://twitter.com/PalletsTeam"),
|
|
||||||
ProjectLink("Chat", "https://discord.gg/pallets"),
|
ProjectLink("Chat", "https://discord.gg/pallets"),
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
@ -43,8 +42,8 @@ html_sidebars = {
|
||||||
}
|
}
|
||||||
singlehtml_sidebars = {"index": ["project.html", "localtoc.html", "ethicalads.html"]}
|
singlehtml_sidebars = {"index": ["project.html", "localtoc.html", "ethicalads.html"]}
|
||||||
html_static_path = ["_static"]
|
html_static_path = ["_static"]
|
||||||
html_favicon = "_static/favicon.ico"
|
html_favicon = "_static/shortcut-icon.png"
|
||||||
html_logo = "_static/werkzeug.png"
|
html_logo = "_static/werkzeug-vertical.png"
|
||||||
html_title = f"Werkzeug Documentation ({version})"
|
html_title = f"Werkzeug Documentation ({version})"
|
||||||
html_show_sourcelink = False
|
html_show_sourcelink = False
|
||||||
|
|
||||||
|
|
|
@ -53,10 +53,6 @@ by :rfc:`2616`, Werkzeug implements some custom data structures that are
|
||||||
|
|
||||||
.. autofunction:: parse_cache_control_header
|
.. autofunction:: parse_cache_control_header
|
||||||
|
|
||||||
.. autofunction:: parse_authorization_header
|
|
||||||
|
|
||||||
.. autofunction:: parse_www_authenticate_header
|
|
||||||
|
|
||||||
.. autofunction:: parse_if_range_header
|
.. autofunction:: parse_if_range_header
|
||||||
|
|
||||||
.. autofunction:: parse_range_header
|
.. autofunction:: parse_range_header
|
||||||
|
|
|
@ -1,6 +1,12 @@
|
||||||
|
.. rst-class:: hide-header
|
||||||
|
|
||||||
Werkzeug
|
Werkzeug
|
||||||
========
|
========
|
||||||
|
|
||||||
|
.. image:: _static/werkzeug-horizontal.png
|
||||||
|
:align: center
|
||||||
|
:target: https://werkzeug.palletsprojects.com
|
||||||
|
|
||||||
*werkzeug* German noun: "tool".
|
*werkzeug* German noun: "tool".
|
||||||
Etymology: *werk* ("work"), *zeug* ("stuff")
|
Etymology: *werk* ("work"), *zeug* ("stuff")
|
||||||
|
|
||||||
|
@ -72,7 +78,6 @@ Additional Information
|
||||||
:maxdepth: 2
|
:maxdepth: 2
|
||||||
|
|
||||||
terms
|
terms
|
||||||
unicode
|
|
||||||
request_data
|
request_data
|
||||||
license
|
license
|
||||||
changes
|
changes
|
||||||
|
|
|
@ -6,13 +6,7 @@ Python Version
|
||||||
--------------
|
--------------
|
||||||
|
|
||||||
We recommend using the latest version of Python. Werkzeug supports
|
We recommend using the latest version of Python. Werkzeug supports
|
||||||
Python 3.7 and newer.
|
Python 3.8 and newer.
|
||||||
|
|
||||||
|
|
||||||
Dependencies
|
|
||||||
------------
|
|
||||||
|
|
||||||
Werkzeug does not have any direct dependencies.
|
|
||||||
|
|
||||||
|
|
||||||
Optional dependencies
|
Optional dependencies
|
||||||
|
|
|
@ -1 +1,20 @@
|
||||||
.. automodule:: werkzeug.middleware
|
Middleware
|
||||||
|
==========
|
||||||
|
|
||||||
|
A WSGI middleware is a WSGI application that wraps another application
|
||||||
|
in order to observe or change its behavior. Werkzeug provides some
|
||||||
|
middleware for common use cases.
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 1
|
||||||
|
|
||||||
|
proxy_fix
|
||||||
|
shared_data
|
||||||
|
dispatcher
|
||||||
|
http_proxy
|
||||||
|
lint
|
||||||
|
profiler
|
||||||
|
|
||||||
|
The :doc:`interactive debugger </debug>` is also a middleware that can
|
||||||
|
be applied manually, although it is typically used automatically with
|
||||||
|
the :doc:`development server </serving>`.
|
||||||
|
|
|
@ -43,9 +43,7 @@ there:
|
||||||
>>> request = Request(environ)
|
>>> request = Request(environ)
|
||||||
|
|
||||||
Now you can access the important variables and Werkzeug will parse them
|
Now you can access the important variables and Werkzeug will parse them
|
||||||
for you and decode them where it makes sense. The default charset for
|
for you and decode them where it makes sense.
|
||||||
requests is set to `utf-8` but you can change that by subclassing
|
|
||||||
:class:`Request`.
|
|
||||||
|
|
||||||
>>> request.path
|
>>> request.path
|
||||||
'/foo'
|
'/foo'
|
||||||
|
|
|
@ -73,23 +73,31 @@ read the stream *or* call :meth:`~Request.get_data`.
|
||||||
Limiting Request Data
|
Limiting Request Data
|
||||||
---------------------
|
---------------------
|
||||||
|
|
||||||
To avoid being the victim of a DDOS attack you can set the maximum
|
The :class:`Request` class provides a few attributes to control how much data is
|
||||||
accepted content length and request field sizes. The :class:`Request`
|
processed from the request body. This can help mitigate DoS attacks that craft the
|
||||||
class has two attributes for that: :attr:`~Request.max_content_length`
|
request in such a way that the server uses too many resources to handle it. Each of
|
||||||
and :attr:`~Request.max_form_memory_size`.
|
these limits will raise a :exc:`~werkzeug.exceptions.RequestEntityTooLarge` if they are
|
||||||
|
exceeded.
|
||||||
|
|
||||||
The first one can be used to limit the total content length. For example
|
- :attr:`~Request.max_content_length` Stop reading request data after this number
|
||||||
by setting it to ``1024 * 1024 * 16`` the request won't accept more than
|
of bytes. It's better to configure this in the WSGI server or HTTP server, rather
|
||||||
16MB of transmitted data.
|
than the WSGI application.
|
||||||
|
- :attr:`~Request.max_form_memory_size` Stop reading request data if any form part is
|
||||||
|
larger than this number of bytes. While file parts can be moved to disk, regular
|
||||||
|
form field data is stored in memory only.
|
||||||
|
- :attr:`~Request.max_form_parts` Stop reading request data if more than this number
|
||||||
|
of parts are sent in multipart form data. This is useful to stop a very large number
|
||||||
|
of very small parts, especially file parts. The default is 1000.
|
||||||
|
|
||||||
Because certain data can't be moved to the hard disk (regular post data)
|
Using Werkzeug to set these limits is only one layer of protection. WSGI servers
|
||||||
whereas temporary files can, there is a second limit you can set. The
|
and HTTPS servers should set their own limits on size and timeouts. The operating system
|
||||||
:attr:`~Request.max_form_memory_size` limits the size of `POST`
|
or container manager should set limits on memory and processing time for server
|
||||||
transmitted form data. By setting it to ``1024 * 1024 * 2`` you can make
|
processes.
|
||||||
sure that all in memory-stored fields are not more than 2MB in size.
|
|
||||||
|
|
||||||
This however does *not* affect in-memory stored files if the
|
If a 413 Content Too Large error is returned before the entire request is read, clients
|
||||||
`stream_factory` used returns a in-memory file.
|
may show a "connection reset" failure instead of the 413 error. This is based on how the
|
||||||
|
WSGI/HTTP server and client handle connections, it's not something the WSGI application
|
||||||
|
(Werkzeug) has control over.
|
||||||
|
|
||||||
|
|
||||||
How to extend Parsing?
|
How to extend Parsing?
|
||||||
|
|
|
@ -18,8 +18,8 @@ requests.
|
||||||
>>> response = c.get("/")
|
>>> response = c.get("/")
|
||||||
>>> response.status_code
|
>>> response.status_code
|
||||||
200
|
200
|
||||||
>>> resp.headers
|
>>> response.headers
|
||||||
Headers([('Content-Type', 'text/html; charset=utf-8'), ('Content-Length', '6658')])
|
Headers([('Content-Type', 'text/html; charset=utf-8'), ('Content-Length', '5211')])
|
||||||
>>> response.get_data(as_text=True)
|
>>> response.get_data(as_text=True)
|
||||||
'<!doctype html>...'
|
'<!doctype html>...'
|
||||||
|
|
||||||
|
@ -102,6 +102,10 @@ API
|
||||||
:members:
|
:members:
|
||||||
:member-order: bysource
|
:member-order: bysource
|
||||||
|
|
||||||
|
.. autoclass:: Cookie
|
||||||
|
:members:
|
||||||
|
:member-order: bysource
|
||||||
|
|
||||||
.. autoclass:: EnvironBuilder
|
.. autoclass:: EnvironBuilder
|
||||||
:members:
|
:members:
|
||||||
:member-order: bysource
|
:member-order: bysource
|
||||||
|
|
|
@ -1,76 +0,0 @@
|
||||||
Unicode
|
|
||||||
=======
|
|
||||||
|
|
||||||
.. currentmodule:: werkzeug
|
|
||||||
|
|
||||||
Werkzeug uses strings internally everwhere text data is assumed, even if
|
|
||||||
the HTTP standard is not Unicode aware. Basically all incoming data is
|
|
||||||
decoded from the charset (UTF-8 by default) so that you don't work with
|
|
||||||
bytes directly. Outgoing data is encoded into the target charset.
|
|
||||||
|
|
||||||
|
|
||||||
Unicode in Python
|
|
||||||
-----------------
|
|
||||||
|
|
||||||
Imagine you have the German Umlaut ``ö``. In ASCII you cannot represent
|
|
||||||
that character, but in the ``latin-1`` and ``utf-8`` character sets you
|
|
||||||
can represent it, but they look different when encoded:
|
|
||||||
|
|
||||||
>>> "ö".encode("latin1")
|
|
||||||
b'\xf6'
|
|
||||||
>>> "ö".encode("utf-8")
|
|
||||||
b'\xc3\xb6'
|
|
||||||
|
|
||||||
An ``ö`` looks different depending on the encoding which makes it hard
|
|
||||||
to work with it as bytes. Instead, Python treats strings as Unicode text
|
|
||||||
and stores the information ``LATIN SMALL LETTER O WITH DIAERESIS``
|
|
||||||
instead of the bytes for ``ö`` in a specific encoding. The length of a
|
|
||||||
string with 1 character will be 1, where the length of the bytes might
|
|
||||||
be some other value.
|
|
||||||
|
|
||||||
|
|
||||||
Unicode in HTTP
|
|
||||||
---------------
|
|
||||||
|
|
||||||
However, the HTTP spec was written in a time where ASCII bytes were the
|
|
||||||
common way data was represented. To work around this for the modern
|
|
||||||
web, Werkzeug decodes and encodes incoming and outgoing data
|
|
||||||
automatically. Data sent from the browser to the web application is
|
|
||||||
decoded from UTF-8 bytes into a string. Data sent from the application
|
|
||||||
back to the browser is encoded back to UTF-8.
|
|
||||||
|
|
||||||
|
|
||||||
Error Handling
|
|
||||||
--------------
|
|
||||||
|
|
||||||
Functions that do internal encoding or decoding accept an ``errors``
|
|
||||||
keyword argument that is passed to :meth:`str.decode` and
|
|
||||||
:meth:`str.encode`. The default is ``'replace'`` so that errors are easy
|
|
||||||
to spot. It might be useful to set it to ``'strict'`` in order to catch
|
|
||||||
the error and report the bad data to the client.
|
|
||||||
|
|
||||||
|
|
||||||
Request and Response Objects
|
|
||||||
----------------------------
|
|
||||||
|
|
||||||
In most cases, you should stick with Werkzeug's default encoding of
|
|
||||||
UTF-8. If you have a specific reason to, you can subclass
|
|
||||||
:class:`wrappers.Request` and :class:`wrappers.Response` to change the
|
|
||||||
encoding and error handling.
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
from werkzeug.wrappers.request import Request
|
|
||||||
from werkzeug.wrappers.response import Response
|
|
||||||
|
|
||||||
class Latin1Request(Request):
|
|
||||||
charset = "latin1"
|
|
||||||
encoding_errors = "strict"
|
|
||||||
|
|
||||||
class Latin1Response(Response):
|
|
||||||
charset = "latin1"
|
|
||||||
|
|
||||||
The error handling can only be changed for the request. Werkzeug will
|
|
||||||
always raise errors when encoding to bytes in the response. It's your
|
|
||||||
responsibility to not create data that is not present in the target
|
|
||||||
charset. This is not an issue for UTF-8.
|
|
|
@ -23,6 +23,8 @@ General Helpers
|
||||||
|
|
||||||
.. autofunction:: send_file
|
.. autofunction:: send_file
|
||||||
|
|
||||||
|
.. autofunction:: send_from_directory
|
||||||
|
|
||||||
.. autofunction:: import_string
|
.. autofunction:: import_string
|
||||||
|
|
||||||
.. autofunction:: find_modules
|
.. autofunction:: find_modules
|
||||||
|
|
|
@ -22,10 +22,6 @@ iterator and the input stream.
|
||||||
.. autoclass:: LimitedStream
|
.. autoclass:: LimitedStream
|
||||||
:members:
|
:members:
|
||||||
|
|
||||||
.. autofunction:: make_line_iter
|
|
||||||
|
|
||||||
.. autofunction:: make_chunk_iter
|
|
||||||
|
|
||||||
.. autofunction:: wrap_file
|
.. autofunction:: wrap_file
|
||||||
|
|
||||||
|
|
||||||
|
@ -43,18 +39,6 @@ information or perform common manipulations:
|
||||||
|
|
||||||
.. autofunction:: get_current_url
|
.. autofunction:: get_current_url
|
||||||
|
|
||||||
.. autofunction:: get_query_string
|
|
||||||
|
|
||||||
.. autofunction:: get_script_name
|
|
||||||
|
|
||||||
.. autofunction:: get_path_info
|
|
||||||
|
|
||||||
.. autofunction:: pop_path_info
|
|
||||||
|
|
||||||
.. autofunction:: peek_path_info
|
|
||||||
|
|
||||||
.. autofunction:: extract_path_info
|
|
||||||
|
|
||||||
.. autofunction:: host_is_trusted
|
.. autofunction:: host_is_trusted
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
from os import path
|
from os import path
|
||||||
from random import randrange
|
from random import randrange
|
||||||
from random import sample
|
from random import sample
|
||||||
|
from urllib.parse import urlsplit
|
||||||
|
|
||||||
from jinja2 import Environment
|
from jinja2 import Environment
|
||||||
from jinja2 import FileSystemLoader
|
from jinja2 import FileSystemLoader
|
||||||
|
@ -8,7 +9,6 @@ from werkzeug.local import Local
|
||||||
from werkzeug.local import LocalManager
|
from werkzeug.local import LocalManager
|
||||||
from werkzeug.routing import Map
|
from werkzeug.routing import Map
|
||||||
from werkzeug.routing import Rule
|
from werkzeug.routing import Rule
|
||||||
from werkzeug.urls import url_parse
|
|
||||||
from werkzeug.utils import cached_property
|
from werkzeug.utils import cached_property
|
||||||
from werkzeug.wrappers import Response
|
from werkzeug.wrappers import Response
|
||||||
|
|
||||||
|
@ -49,7 +49,7 @@ def render_template(template, **context):
|
||||||
|
|
||||||
|
|
||||||
def validate_url(url):
|
def validate_url(url):
|
||||||
return url_parse(url)[0] in ALLOWED_SCHEMES
|
return urlsplit(url)[0] in ALLOWED_SCHEMES
|
||||||
|
|
||||||
|
|
||||||
def get_random_uid():
|
def get_random_uid():
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
"""A simple URL shortener using Werkzeug and redis."""
|
"""A simple URL shortener using Werkzeug and redis."""
|
||||||
import os
|
import os
|
||||||
|
from urllib.parse import urlsplit
|
||||||
|
|
||||||
import redis
|
import redis
|
||||||
from jinja2 import Environment
|
from jinja2 import Environment
|
||||||
|
@ -9,7 +10,6 @@ from werkzeug.exceptions import NotFound
|
||||||
from werkzeug.middleware.shared_data import SharedDataMiddleware
|
from werkzeug.middleware.shared_data import SharedDataMiddleware
|
||||||
from werkzeug.routing import Map
|
from werkzeug.routing import Map
|
||||||
from werkzeug.routing import Rule
|
from werkzeug.routing import Rule
|
||||||
from werkzeug.urls import url_parse
|
|
||||||
from werkzeug.utils import redirect
|
from werkzeug.utils import redirect
|
||||||
from werkzeug.wrappers import Request
|
from werkzeug.wrappers import Request
|
||||||
from werkzeug.wrappers import Response
|
from werkzeug.wrappers import Response
|
||||||
|
@ -27,12 +27,12 @@ def base36_encode(number):
|
||||||
|
|
||||||
|
|
||||||
def is_valid_url(url):
|
def is_valid_url(url):
|
||||||
parts = url_parse(url)
|
parts = urlsplit(url)
|
||||||
return parts.scheme in ("http", "https")
|
return parts.scheme in ("http", "https")
|
||||||
|
|
||||||
|
|
||||||
def get_hostname(url):
|
def get_hostname(url):
|
||||||
return url_parse(url).netloc
|
return urlsplit(url).netloc
|
||||||
|
|
||||||
|
|
||||||
class Shortly:
|
class Shortly:
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
from os import path
|
from os import path
|
||||||
from random import randrange
|
from random import randrange
|
||||||
from random import sample
|
from random import sample
|
||||||
|
from urllib.parse import urlsplit
|
||||||
|
|
||||||
from jinja2 import Environment
|
from jinja2 import Environment
|
||||||
from jinja2 import FileSystemLoader
|
from jinja2 import FileSystemLoader
|
||||||
|
@ -11,7 +12,6 @@ from werkzeug.local import Local
|
||||||
from werkzeug.local import LocalManager
|
from werkzeug.local import LocalManager
|
||||||
from werkzeug.routing import Map
|
from werkzeug.routing import Map
|
||||||
from werkzeug.routing import Rule
|
from werkzeug.routing import Rule
|
||||||
from werkzeug.urls import url_parse
|
|
||||||
from werkzeug.utils import cached_property
|
from werkzeug.utils import cached_property
|
||||||
from werkzeug.wrappers import Response
|
from werkzeug.wrappers import Response
|
||||||
|
|
||||||
|
@ -59,7 +59,7 @@ def render_template(template, **context):
|
||||||
|
|
||||||
|
|
||||||
def validate_url(url):
|
def validate_url(url):
|
||||||
return url_parse(url)[0] in ALLOWED_SCHEMES
|
return urlsplit(url)[0] in ALLOWED_SCHEMES
|
||||||
|
|
||||||
|
|
||||||
def get_random_uid():
|
def get_random_uid():
|
||||||
|
|
|
@ -1,12 +1,12 @@
|
||||||
from os import path
|
from os import path
|
||||||
|
from urllib.parse import quote
|
||||||
|
from urllib.parse import urlencode
|
||||||
|
|
||||||
import creoleparser
|
import creoleparser
|
||||||
from genshi import Stream
|
from genshi import Stream
|
||||||
from genshi.template import TemplateLoader
|
from genshi.template import TemplateLoader
|
||||||
from werkzeug.local import Local
|
from werkzeug.local import Local
|
||||||
from werkzeug.local import LocalManager
|
from werkzeug.local import LocalManager
|
||||||
from werkzeug.urls import url_encode
|
|
||||||
from werkzeug.urls import url_quote
|
|
||||||
from werkzeug.utils import cached_property
|
from werkzeug.utils import cached_property
|
||||||
from werkzeug.wrappers import Request as BaseRequest
|
from werkzeug.wrappers import Request as BaseRequest
|
||||||
from werkzeug.wrappers import Response as BaseResponse
|
from werkzeug.wrappers import Response as BaseResponse
|
||||||
|
@ -58,9 +58,9 @@ def href(*args, **kw):
|
||||||
"""
|
"""
|
||||||
result = [f"{request.script_root if request else ''}/"]
|
result = [f"{request.script_root if request else ''}/"]
|
||||||
for idx, arg in enumerate(args):
|
for idx, arg in enumerate(args):
|
||||||
result.append(f"{'/' if idx else ''}{url_quote(arg)}")
|
result.append(f"{'/' if idx else ''}{quote(arg)}")
|
||||||
if kw:
|
if kw:
|
||||||
result.append(f"?{url_encode(kw)}")
|
result.append(f"?{urlencode(kw)}")
|
||||||
return "".join(result)
|
return "".join(result)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,104 @@
|
||||||
|
[project]
|
||||||
|
name = "Werkzeug"
|
||||||
|
version = "3.0.1"
|
||||||
|
description = "The comprehensive WSGI web application library."
|
||||||
|
readme = "README.rst"
|
||||||
|
license = {file = "LICENSE.rst"}
|
||||||
|
maintainers = [{name = "Pallets", email = "contact@palletsprojects.com"}]
|
||||||
|
classifiers = [
|
||||||
|
"Development Status :: 5 - Production/Stable",
|
||||||
|
"Environment :: Web Environment",
|
||||||
|
"Intended Audience :: Developers",
|
||||||
|
"License :: OSI Approved :: BSD License",
|
||||||
|
"Operating System :: OS Independent",
|
||||||
|
"Programming Language :: Python",
|
||||||
|
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
|
||||||
|
"Topic :: Internet :: WWW/HTTP :: WSGI",
|
||||||
|
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
|
||||||
|
"Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware",
|
||||||
|
"Topic :: Software Development :: Libraries :: Application Frameworks",
|
||||||
|
]
|
||||||
|
requires-python = ">=3.8"
|
||||||
|
dependencies = ["MarkupSafe>=2.1.1"]
|
||||||
|
|
||||||
|
[project.urls]
|
||||||
|
Donate = "https://palletsprojects.com/donate"
|
||||||
|
Documentation = "https://werkzeug.palletsprojects.com/"
|
||||||
|
Changes = "https://werkzeug.palletsprojects.com/changes/"
|
||||||
|
"Source Code" = "https://github.com/pallets/werkzeug/"
|
||||||
|
"Issue Tracker" = "https://github.com/pallets/werkzeug/issues/"
|
||||||
|
Chat = "https://discord.gg/pallets"
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
watchdog = ["watchdog>=2.3"]
|
||||||
|
|
||||||
|
[build-system]
|
||||||
|
requires = ["flit_core<4"]
|
||||||
|
build-backend = "flit_core.buildapi"
|
||||||
|
|
||||||
|
[tool.flit.module]
|
||||||
|
name = "werkzeug"
|
||||||
|
|
||||||
|
[tool.flit.sdist]
|
||||||
|
include = [
|
||||||
|
"docs/",
|
||||||
|
"examples/",
|
||||||
|
"requirements/",
|
||||||
|
"tests/",
|
||||||
|
"CHANGES.rst",
|
||||||
|
"tox.ini",
|
||||||
|
]
|
||||||
|
exclude = [
|
||||||
|
"docs/_build/",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.pytest.ini_options]
|
||||||
|
testpaths = ["tests"]
|
||||||
|
filterwarnings = [
|
||||||
|
"error",
|
||||||
|
]
|
||||||
|
markers = ["dev_server: tests that start the dev server"]
|
||||||
|
|
||||||
|
[tool.coverage.run]
|
||||||
|
branch = true
|
||||||
|
source = ["werkzeug", "tests"]
|
||||||
|
|
||||||
|
[tool.coverage.paths]
|
||||||
|
source = ["src", "*/site-packages"]
|
||||||
|
|
||||||
|
[tool.mypy]
|
||||||
|
python_version = "3.8"
|
||||||
|
files = ["src/werkzeug"]
|
||||||
|
show_error_codes = true
|
||||||
|
pretty = true
|
||||||
|
#strict = true
|
||||||
|
allow_redefinition = true
|
||||||
|
disallow_subclassing_any = true
|
||||||
|
#disallow_untyped_calls = true
|
||||||
|
disallow_untyped_defs = true
|
||||||
|
disallow_incomplete_defs = true
|
||||||
|
no_implicit_optional = true
|
||||||
|
local_partial_types = true
|
||||||
|
no_implicit_reexport = true
|
||||||
|
strict_equality = true
|
||||||
|
warn_redundant_casts = true
|
||||||
|
warn_unused_configs = true
|
||||||
|
warn_unused_ignores = true
|
||||||
|
warn_return_any = true
|
||||||
|
#warn_unreachable = True
|
||||||
|
|
||||||
|
[[tool.mypy.overrides]]
|
||||||
|
module = ["werkzeug.wrappers"]
|
||||||
|
no_implicit_reexport = false
|
||||||
|
|
||||||
|
[[tool.mypy.overrides]]
|
||||||
|
module = [
|
||||||
|
"colorama.*",
|
||||||
|
"cryptography.*",
|
||||||
|
"eventlet.*",
|
||||||
|
"gevent.*",
|
||||||
|
"greenlet.*",
|
||||||
|
"watchdog.*",
|
||||||
|
"xprocess.*",
|
||||||
|
]
|
||||||
|
ignore_missing_imports = true
|
|
@ -0,0 +1 @@
|
||||||
|
build
|
|
@ -0,0 +1,13 @@
|
||||||
|
# SHA1:80754af91bfb6d1073585b046fe0a474ce868509
|
||||||
|
#
|
||||||
|
# This file is autogenerated by pip-compile-multi
|
||||||
|
# To update, run:
|
||||||
|
#
|
||||||
|
# pip-compile-multi
|
||||||
|
#
|
||||||
|
build==0.10.0
|
||||||
|
# via -r requirements/build.in
|
||||||
|
packaging==23.1
|
||||||
|
# via build
|
||||||
|
pyproject-hooks==1.0.0
|
||||||
|
# via build
|
|
@ -8,55 +8,55 @@
|
||||||
-r docs.txt
|
-r docs.txt
|
||||||
-r tests.txt
|
-r tests.txt
|
||||||
-r typing.txt
|
-r typing.txt
|
||||||
build==0.8.0
|
build==0.10.0
|
||||||
# via pip-tools
|
# via pip-tools
|
||||||
|
cachetools==5.3.1
|
||||||
|
# via tox
|
||||||
cfgv==3.3.1
|
cfgv==3.3.1
|
||||||
# via pre-commit
|
# via pre-commit
|
||||||
|
chardet==5.1.0
|
||||||
|
# via tox
|
||||||
click==8.1.3
|
click==8.1.3
|
||||||
# via
|
# via
|
||||||
# pip-compile-multi
|
# pip-compile-multi
|
||||||
# pip-tools
|
# pip-tools
|
||||||
distlib==0.3.4
|
colorama==0.4.6
|
||||||
|
# via tox
|
||||||
|
distlib==0.3.6
|
||||||
# via virtualenv
|
# via virtualenv
|
||||||
filelock==3.7.1
|
filelock==3.12.2
|
||||||
# via
|
# via
|
||||||
# tox
|
# tox
|
||||||
# virtualenv
|
# virtualenv
|
||||||
greenlet==1.1.2 ; python_version < "3.11"
|
identify==2.5.24
|
||||||
# via -r requirements/tests.in
|
|
||||||
identify==2.5.1
|
|
||||||
# via pre-commit
|
# via pre-commit
|
||||||
nodeenv==1.7.0
|
nodeenv==1.8.0
|
||||||
# via pre-commit
|
# via pre-commit
|
||||||
pep517==0.12.0
|
pip-compile-multi==2.6.3
|
||||||
# via build
|
|
||||||
pip-compile-multi==2.4.5
|
|
||||||
# via -r requirements/dev.in
|
# via -r requirements/dev.in
|
||||||
pip-tools==6.8.0
|
pip-tools==6.13.0
|
||||||
# via pip-compile-multi
|
# via pip-compile-multi
|
||||||
platformdirs==2.5.2
|
platformdirs==3.8.0
|
||||||
# via virtualenv
|
# via
|
||||||
pre-commit==2.20.0
|
# tox
|
||||||
|
# virtualenv
|
||||||
|
pre-commit==3.3.3
|
||||||
# via -r requirements/dev.in
|
# via -r requirements/dev.in
|
||||||
|
pyproject-api==1.5.2
|
||||||
|
# via tox
|
||||||
|
pyproject-hooks==1.0.0
|
||||||
|
# via build
|
||||||
pyyaml==6.0
|
pyyaml==6.0
|
||||||
# via pre-commit
|
# via pre-commit
|
||||||
six==1.16.0
|
toposort==1.10
|
||||||
# via
|
|
||||||
# tox
|
|
||||||
# virtualenv
|
|
||||||
toml==0.10.2
|
|
||||||
# via
|
|
||||||
# pre-commit
|
|
||||||
# tox
|
|
||||||
toposort==1.7
|
|
||||||
# via pip-compile-multi
|
# via pip-compile-multi
|
||||||
tox==3.25.1
|
tox==4.6.3
|
||||||
# via -r requirements/dev.in
|
# via -r requirements/dev.in
|
||||||
virtualenv==20.15.1
|
virtualenv==20.23.1
|
||||||
# via
|
# via
|
||||||
# pre-commit
|
# pre-commit
|
||||||
# tox
|
# tox
|
||||||
wheel==0.37.1
|
wheel==0.40.0
|
||||||
# via pip-tools
|
# via pip-tools
|
||||||
|
|
||||||
# The following packages are considered to be unsafe in a requirements file:
|
# The following packages are considered to be unsafe in a requirements file:
|
||||||
|
|
|
@ -5,41 +5,37 @@
|
||||||
#
|
#
|
||||||
# pip-compile-multi
|
# pip-compile-multi
|
||||||
#
|
#
|
||||||
alabaster==0.7.12
|
alabaster==0.7.13
|
||||||
# via sphinx
|
# via sphinx
|
||||||
babel==2.10.3
|
babel==2.12.1
|
||||||
# via sphinx
|
# via sphinx
|
||||||
certifi==2022.6.15
|
certifi==2023.5.7
|
||||||
# via requests
|
# via requests
|
||||||
charset-normalizer==2.1.0
|
charset-normalizer==3.1.0
|
||||||
# via requests
|
# via requests
|
||||||
docutils==0.18.1
|
docutils==0.20.1
|
||||||
# via sphinx
|
# via sphinx
|
||||||
idna==3.3
|
idna==3.4
|
||||||
# via requests
|
# via requests
|
||||||
imagesize==1.4.1
|
imagesize==1.4.1
|
||||||
# via sphinx
|
# via sphinx
|
||||||
jinja2==3.1.2
|
jinja2==3.1.2
|
||||||
# via sphinx
|
# via sphinx
|
||||||
markupsafe==2.1.1
|
markupsafe==2.1.3
|
||||||
# via jinja2
|
# via jinja2
|
||||||
packaging==21.3
|
packaging==23.1
|
||||||
# via
|
# via
|
||||||
# pallets-sphinx-themes
|
# pallets-sphinx-themes
|
||||||
# sphinx
|
# sphinx
|
||||||
pallets-sphinx-themes==2.0.2
|
pallets-sphinx-themes==2.1.1
|
||||||
# via -r requirements/docs.in
|
# via -r requirements/docs.in
|
||||||
pygments==2.12.0
|
pygments==2.15.1
|
||||||
# via sphinx
|
# via sphinx
|
||||||
pyparsing==3.0.9
|
requests==2.31.0
|
||||||
# via packaging
|
|
||||||
pytz==2022.1
|
|
||||||
# via babel
|
|
||||||
requests==2.28.1
|
|
||||||
# via sphinx
|
# via sphinx
|
||||||
snowballstemmer==2.2.0
|
snowballstemmer==2.2.0
|
||||||
# via sphinx
|
# via sphinx
|
||||||
sphinx==5.0.2
|
sphinx==7.0.1
|
||||||
# via
|
# via
|
||||||
# -r requirements/docs.in
|
# -r requirements/docs.in
|
||||||
# pallets-sphinx-themes
|
# pallets-sphinx-themes
|
||||||
|
@ -47,11 +43,11 @@ sphinx==5.0.2
|
||||||
# sphinxcontrib-log-cabinet
|
# sphinxcontrib-log-cabinet
|
||||||
sphinx-issues==3.0.1
|
sphinx-issues==3.0.1
|
||||||
# via -r requirements/docs.in
|
# via -r requirements/docs.in
|
||||||
sphinxcontrib-applehelp==1.0.2
|
sphinxcontrib-applehelp==1.0.4
|
||||||
# via sphinx
|
# via sphinx
|
||||||
sphinxcontrib-devhelp==1.0.2
|
sphinxcontrib-devhelp==1.0.2
|
||||||
# via sphinx
|
# via sphinx
|
||||||
sphinxcontrib-htmlhelp==2.0.0
|
sphinxcontrib-htmlhelp==2.0.1
|
||||||
# via sphinx
|
# via sphinx
|
||||||
sphinxcontrib-jsmath==1.0.1
|
sphinxcontrib-jsmath==1.0.1
|
||||||
# via sphinx
|
# via sphinx
|
||||||
|
@ -61,5 +57,5 @@ sphinxcontrib-qthelp==1.0.3
|
||||||
# via sphinx
|
# via sphinx
|
||||||
sphinxcontrib-serializinghtml==1.1.5
|
sphinxcontrib-serializinghtml==1.1.5
|
||||||
# via sphinx
|
# via sphinx
|
||||||
urllib3==1.26.10
|
urllib3==2.0.3
|
||||||
# via requests
|
# via requests
|
||||||
|
|
|
@ -5,40 +5,32 @@
|
||||||
#
|
#
|
||||||
# pip-compile-multi
|
# pip-compile-multi
|
||||||
#
|
#
|
||||||
attrs==21.4.0
|
|
||||||
# via pytest
|
|
||||||
cffi==1.15.1
|
cffi==1.15.1
|
||||||
# via cryptography
|
# via cryptography
|
||||||
cryptography==37.0.4
|
cryptography==41.0.1
|
||||||
# via -r requirements/tests.in
|
# via -r requirements/tests.in
|
||||||
ephemeral-port-reserve==1.1.4
|
ephemeral-port-reserve==1.1.4
|
||||||
# via -r requirements/tests.in
|
# via -r requirements/tests.in
|
||||||
greenlet==1.1.2 ; python_version < "3.11"
|
iniconfig==2.0.0
|
||||||
# via -r requirements/tests.in
|
|
||||||
iniconfig==1.1.1
|
|
||||||
# via pytest
|
# via pytest
|
||||||
packaging==21.3
|
packaging==23.1
|
||||||
# via pytest
|
# via pytest
|
||||||
pluggy==1.0.0
|
pluggy==1.2.0
|
||||||
# via pytest
|
# via pytest
|
||||||
psutil==5.9.1
|
psutil==5.9.5
|
||||||
# via pytest-xprocess
|
# via pytest-xprocess
|
||||||
py==1.11.0
|
py==1.11.0
|
||||||
# via pytest
|
# via pytest-xprocess
|
||||||
pycparser==2.21
|
pycparser==2.21
|
||||||
# via cffi
|
# via cffi
|
||||||
pyparsing==3.0.9
|
pytest==7.4.0
|
||||||
# via packaging
|
|
||||||
pytest==7.1.2
|
|
||||||
# via
|
# via
|
||||||
# -r requirements/tests.in
|
# -r requirements/tests.in
|
||||||
# pytest-timeout
|
# pytest-timeout
|
||||||
# pytest-xprocess
|
# pytest-xprocess
|
||||||
pytest-timeout==2.1.0
|
pytest-timeout==2.1.0
|
||||||
# via -r requirements/tests.in
|
# via -r requirements/tests.in
|
||||||
pytest-xprocess==0.19.0
|
pytest-xprocess==0.22.2
|
||||||
# via -r requirements/tests.in
|
# via -r requirements/tests.in
|
||||||
tomli==2.0.1
|
watchdog==3.0.0
|
||||||
# via pytest
|
|
||||||
watchdog==2.1.9
|
|
||||||
# via -r requirements/tests.in
|
# via -r requirements/tests.in
|
||||||
|
|
|
@ -2,3 +2,4 @@ mypy
|
||||||
types-contextvars
|
types-contextvars
|
||||||
types-dataclasses
|
types-dataclasses
|
||||||
types-setuptools
|
types-setuptools
|
||||||
|
watchdog
|
||||||
|
|
|
@ -1,21 +1,21 @@
|
||||||
# SHA1:95499f7e92b572adde012b13e1ec99dbbb2f7089
|
# SHA1:162796b1b3ac7a29da65fe0e32278f14b68ed8c8
|
||||||
#
|
#
|
||||||
# This file is autogenerated by pip-compile-multi
|
# This file is autogenerated by pip-compile-multi
|
||||||
# To update, run:
|
# To update, run:
|
||||||
#
|
#
|
||||||
# pip-compile-multi
|
# pip-compile-multi
|
||||||
#
|
#
|
||||||
mypy==0.961
|
mypy==1.4.1
|
||||||
# via -r requirements/typing.in
|
# via -r requirements/typing.in
|
||||||
mypy-extensions==0.4.3
|
mypy-extensions==1.0.0
|
||||||
# via mypy
|
# via mypy
|
||||||
tomli==2.0.1
|
types-contextvars==2.4.7.2
|
||||||
# via mypy
|
|
||||||
types-contextvars==2.4.7
|
|
||||||
# via -r requirements/typing.in
|
# via -r requirements/typing.in
|
||||||
types-dataclasses==0.6.6
|
types-dataclasses==0.6.6
|
||||||
# via -r requirements/typing.in
|
# via -r requirements/typing.in
|
||||||
types-setuptools==62.6.1
|
types-setuptools==68.0.0.0
|
||||||
# via -r requirements/typing.in
|
# via -r requirements/typing.in
|
||||||
typing-extensions==4.3.0
|
typing-extensions==4.6.3
|
||||||
# via mypy
|
# via mypy
|
||||||
|
watchdog==3.0.0
|
||||||
|
# via -r requirements/typing.in
|
||||||
|
|
130
setup.cfg
|
@ -1,130 +0,0 @@
|
||||||
[metadata]
|
|
||||||
name = Werkzeug
|
|
||||||
version = attr: werkzeug.__version__
|
|
||||||
url = https://palletsprojects.com/p/werkzeug/
|
|
||||||
project_urls =
|
|
||||||
Donate = https://palletsprojects.com/donate
|
|
||||||
Documentation = https://werkzeug.palletsprojects.com/
|
|
||||||
Changes = https://werkzeug.palletsprojects.com/changes/
|
|
||||||
Source Code = https://github.com/pallets/werkzeug/
|
|
||||||
Issue Tracker = https://github.com/pallets/werkzeug/issues/
|
|
||||||
Twitter = https://twitter.com/PalletsTeam
|
|
||||||
Chat = https://discord.gg/pallets
|
|
||||||
license = BSD-3-Clause
|
|
||||||
author = Armin Ronacher
|
|
||||||
author_email = armin.ronacher@active-4.com
|
|
||||||
maintainer = Pallets
|
|
||||||
maintainer_email = contact@palletsprojects.com
|
|
||||||
description = The comprehensive WSGI web application library.
|
|
||||||
long_description = file: README.rst
|
|
||||||
long_description_content_type = text/x-rst
|
|
||||||
classifiers =
|
|
||||||
Development Status :: 5 - Production/Stable
|
|
||||||
Environment :: Web Environment
|
|
||||||
Intended Audience :: Developers
|
|
||||||
License :: OSI Approved :: BSD License
|
|
||||||
Operating System :: OS Independent
|
|
||||||
Programming Language :: Python
|
|
||||||
Topic :: Internet :: WWW/HTTP :: Dynamic Content
|
|
||||||
Topic :: Internet :: WWW/HTTP :: WSGI
|
|
||||||
Topic :: Internet :: WWW/HTTP :: WSGI :: Application
|
|
||||||
Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware
|
|
||||||
Topic :: Software Development :: Libraries :: Application Frameworks
|
|
||||||
|
|
||||||
[options]
|
|
||||||
packages = find:
|
|
||||||
package_dir = = src
|
|
||||||
include_package_data = True
|
|
||||||
python_requires = >= 3.7
|
|
||||||
# Dependencies are in setup.py for GitHub's dependency graph.
|
|
||||||
|
|
||||||
[options.packages.find]
|
|
||||||
where = src
|
|
||||||
|
|
||||||
[tool:pytest]
|
|
||||||
testpaths = tests
|
|
||||||
filterwarnings =
|
|
||||||
error
|
|
||||||
markers =
|
|
||||||
dev_server: tests that start the dev server
|
|
||||||
|
|
||||||
[coverage:run]
|
|
||||||
branch = True
|
|
||||||
source =
|
|
||||||
werkzeug
|
|
||||||
tests
|
|
||||||
|
|
||||||
[coverage:paths]
|
|
||||||
source =
|
|
||||||
src
|
|
||||||
*/site-packages
|
|
||||||
|
|
||||||
[flake8]
|
|
||||||
# B = bugbear
|
|
||||||
# E = pycodestyle errors
|
|
||||||
# F = flake8 pyflakes
|
|
||||||
# W = pycodestyle warnings
|
|
||||||
# B9 = bugbear opinions
|
|
||||||
# ISC = implicit str concat
|
|
||||||
select = B, E, F, W, B9, ISC
|
|
||||||
ignore =
|
|
||||||
# slice notation whitespace, invalid
|
|
||||||
E203
|
|
||||||
# import at top, too many circular import fixes
|
|
||||||
E402
|
|
||||||
# line length, handled by bugbear B950
|
|
||||||
E501
|
|
||||||
# bare except, handled by bugbear B001
|
|
||||||
E722
|
|
||||||
# bin op line break, invalid
|
|
||||||
W503
|
|
||||||
# up to 88 allowed by bugbear B950
|
|
||||||
max-line-length = 80
|
|
||||||
per-file-ignores =
|
|
||||||
# __init__ exports names
|
|
||||||
**/__init__.py: F401
|
|
||||||
# LocalProxy assigns lambdas
|
|
||||||
src/werkzeug/local.py: E731
|
|
||||||
|
|
||||||
[mypy]
|
|
||||||
files = src/werkzeug
|
|
||||||
python_version = 3.7
|
|
||||||
show_error_codes = True
|
|
||||||
allow_redefinition = True
|
|
||||||
disallow_subclassing_any = True
|
|
||||||
# disallow_untyped_calls = True
|
|
||||||
disallow_untyped_defs = True
|
|
||||||
disallow_incomplete_defs = True
|
|
||||||
no_implicit_optional = True
|
|
||||||
local_partial_types = True
|
|
||||||
no_implicit_reexport = True
|
|
||||||
strict_equality = True
|
|
||||||
warn_redundant_casts = True
|
|
||||||
warn_unused_configs = True
|
|
||||||
warn_unused_ignores = True
|
|
||||||
warn_return_any = True
|
|
||||||
# warn_unreachable = True
|
|
||||||
|
|
||||||
[mypy-werkzeug.wrappers]
|
|
||||||
no_implicit_reexport = False
|
|
||||||
|
|
||||||
[mypy-colorama.*]
|
|
||||||
ignore_missing_imports = True
|
|
||||||
|
|
||||||
[mypy-cryptography.*]
|
|
||||||
ignore_missing_imports = True
|
|
||||||
|
|
||||||
[mypy-eventlet.*]
|
|
||||||
ignore_missing_imports = True
|
|
||||||
|
|
||||||
[mypy-gevent.*]
|
|
||||||
ignore_missing_imports = True
|
|
||||||
|
|
||||||
[mypy-greenlet.*]
|
|
||||||
ignore_missing_imports = True
|
|
||||||
|
|
||||||
[mypy-watchdog.*]
|
|
||||||
ignore_missing_imports = True
|
|
||||||
|
|
||||||
[mypy-xprocess.*]
|
|
||||||
ignore_missing_imports = True
|
|
9
setup.py
|
@ -1,9 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
from setuptools import setup
|
|
||||||
|
|
||||||
# Metadata goes in setup.cfg. These are here for GitHub's dependency graph.
|
|
||||||
setup(
|
|
||||||
name="Werkzeug",
|
|
||||||
install_requires=["MarkupSafe>=2.1.1"],
|
|
||||||
extras_require={"watchdog": ["watchdog"]},
|
|
||||||
)
|
|
|
@ -1,6 +1,25 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import typing as t
|
||||||
|
|
||||||
from .serving import run_simple as run_simple
|
from .serving import run_simple as run_simple
|
||||||
from .test import Client as Client
|
from .test import Client as Client
|
||||||
from .wrappers import Request as Request
|
from .wrappers import Request as Request
|
||||||
from .wrappers import Response as Response
|
from .wrappers import Response as Response
|
||||||
|
|
||||||
__version__ = "2.2.2"
|
|
||||||
|
def __getattr__(name: str) -> t.Any:
|
||||||
|
if name == "__version__":
|
||||||
|
import importlib.metadata
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
warnings.warn(
|
||||||
|
"The '__version__' attribute is deprecated and will be removed in"
|
||||||
|
" Werkzeug 3.1. Use feature detection or"
|
||||||
|
" 'importlib.metadata.version(\"werkzeug\")' instead.",
|
||||||
|
DeprecationWarning,
|
||||||
|
stacklevel=2,
|
||||||
|
)
|
||||||
|
return importlib.metadata.version("werkzeug")
|
||||||
|
|
||||||
|
raise AttributeError(name)
|
||||||
|
|
|
@ -1,50 +1,17 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import operator
|
|
||||||
import re
|
import re
|
||||||
import string
|
|
||||||
import sys
|
import sys
|
||||||
import typing
|
|
||||||
import typing as t
|
import typing as t
|
||||||
from datetime import date
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from datetime import timezone
|
from datetime import timezone
|
||||||
from itertools import chain
|
|
||||||
from weakref import WeakKeyDictionary
|
|
||||||
|
|
||||||
if t.TYPE_CHECKING:
|
if t.TYPE_CHECKING:
|
||||||
from _typeshed.wsgi import StartResponse
|
|
||||||
from _typeshed.wsgi import WSGIApplication
|
|
||||||
from _typeshed.wsgi import WSGIEnvironment
|
from _typeshed.wsgi import WSGIEnvironment
|
||||||
from .wrappers.request import Request # noqa: F401
|
from .wrappers.request import Request
|
||||||
|
|
||||||
_logger: t.Optional[logging.Logger] = None
|
_logger: logging.Logger | None = None
|
||||||
_signature_cache = WeakKeyDictionary() # type: ignore
|
|
||||||
_epoch_ord = date(1970, 1, 1).toordinal()
|
|
||||||
_legal_cookie_chars = frozenset(
|
|
||||||
c.encode("ascii")
|
|
||||||
for c in f"{string.ascii_letters}{string.digits}/=!#$%&'*+-.^_`|~:"
|
|
||||||
)
|
|
||||||
|
|
||||||
_cookie_quoting_map = {b",": b"\\054", b";": b"\\073", b'"': b'\\"', b"\\": b"\\\\"}
|
|
||||||
for _i in chain(range(32), range(127, 256)):
|
|
||||||
_cookie_quoting_map[_i.to_bytes(1, sys.byteorder)] = f"\\{_i:03o}".encode("latin1")
|
|
||||||
|
|
||||||
_octal_re = re.compile(rb"\\[0-3][0-7][0-7]")
|
|
||||||
_quote_re = re.compile(rb"[\\].")
|
|
||||||
_legal_cookie_chars_re = rb"[\w\d!#%&\'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\=]"
|
|
||||||
_cookie_re = re.compile(
|
|
||||||
rb"""
|
|
||||||
(?P<key>[^=;]+)
|
|
||||||
(?:\s*=\s*
|
|
||||||
(?P<val>
|
|
||||||
"(?:[^\\"]|\\.)*" |
|
|
||||||
(?:.*?)
|
|
||||||
)
|
|
||||||
)?
|
|
||||||
\s*;
|
|
||||||
""",
|
|
||||||
flags=re.VERBOSE,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class _Missing:
|
class _Missing:
|
||||||
|
@ -58,110 +25,15 @@ class _Missing:
|
||||||
_missing = _Missing()
|
_missing = _Missing()
|
||||||
|
|
||||||
|
|
||||||
@typing.overload
|
def _wsgi_decoding_dance(s: str) -> str:
|
||||||
def _make_encode_wrapper(reference: str) -> t.Callable[[str], str]:
|
return s.encode("latin1").decode(errors="replace")
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
@typing.overload
|
def _wsgi_encoding_dance(s: str) -> str:
|
||||||
def _make_encode_wrapper(reference: bytes) -> t.Callable[[str], bytes]:
|
return s.encode().decode("latin1")
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
def _make_encode_wrapper(reference: t.AnyStr) -> t.Callable[[str], t.AnyStr]:
|
def _get_environ(obj: WSGIEnvironment | Request) -> WSGIEnvironment:
|
||||||
"""Create a function that will be called with a string argument. If
|
|
||||||
the reference is bytes, values will be encoded to bytes.
|
|
||||||
"""
|
|
||||||
if isinstance(reference, str):
|
|
||||||
return lambda x: x
|
|
||||||
|
|
||||||
return operator.methodcaller("encode", "latin1")
|
|
||||||
|
|
||||||
|
|
||||||
def _check_str_tuple(value: t.Tuple[t.AnyStr, ...]) -> None:
|
|
||||||
"""Ensure tuple items are all strings or all bytes."""
|
|
||||||
if not value:
|
|
||||||
return
|
|
||||||
|
|
||||||
item_type = str if isinstance(value[0], str) else bytes
|
|
||||||
|
|
||||||
if any(not isinstance(item, item_type) for item in value):
|
|
||||||
raise TypeError(f"Cannot mix str and bytes arguments (got {value!r})")
|
|
||||||
|
|
||||||
|
|
||||||
_default_encoding = sys.getdefaultencoding()
|
|
||||||
|
|
||||||
|
|
||||||
def _to_bytes(
|
|
||||||
x: t.Union[str, bytes], charset: str = _default_encoding, errors: str = "strict"
|
|
||||||
) -> bytes:
|
|
||||||
if x is None or isinstance(x, bytes):
|
|
||||||
return x
|
|
||||||
|
|
||||||
if isinstance(x, (bytearray, memoryview)):
|
|
||||||
return bytes(x)
|
|
||||||
|
|
||||||
if isinstance(x, str):
|
|
||||||
return x.encode(charset, errors)
|
|
||||||
|
|
||||||
raise TypeError("Expected bytes")
|
|
||||||
|
|
||||||
|
|
||||||
@typing.overload
|
|
||||||
def _to_str( # type: ignore
|
|
||||||
x: None,
|
|
||||||
charset: t.Optional[str] = ...,
|
|
||||||
errors: str = ...,
|
|
||||||
allow_none_charset: bool = ...,
|
|
||||||
) -> None:
|
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
@typing.overload
|
|
||||||
def _to_str(
|
|
||||||
x: t.Any,
|
|
||||||
charset: t.Optional[str] = ...,
|
|
||||||
errors: str = ...,
|
|
||||||
allow_none_charset: bool = ...,
|
|
||||||
) -> str:
|
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
def _to_str(
|
|
||||||
x: t.Optional[t.Any],
|
|
||||||
charset: t.Optional[str] = _default_encoding,
|
|
||||||
errors: str = "strict",
|
|
||||||
allow_none_charset: bool = False,
|
|
||||||
) -> t.Optional[t.Union[str, bytes]]:
|
|
||||||
if x is None or isinstance(x, str):
|
|
||||||
return x
|
|
||||||
|
|
||||||
if not isinstance(x, (bytes, bytearray)):
|
|
||||||
return str(x)
|
|
||||||
|
|
||||||
if charset is None:
|
|
||||||
if allow_none_charset:
|
|
||||||
return x
|
|
||||||
|
|
||||||
return x.decode(charset, errors) # type: ignore
|
|
||||||
|
|
||||||
|
|
||||||
def _wsgi_decoding_dance(
|
|
||||||
s: str, charset: str = "utf-8", errors: str = "replace"
|
|
||||||
) -> str:
|
|
||||||
return s.encode("latin1").decode(charset, errors)
|
|
||||||
|
|
||||||
|
|
||||||
def _wsgi_encoding_dance(
|
|
||||||
s: str, charset: str = "utf-8", errors: str = "replace"
|
|
||||||
) -> str:
|
|
||||||
if isinstance(s, bytes):
|
|
||||||
return s.decode("latin1", errors)
|
|
||||||
|
|
||||||
return s.encode(charset).decode("latin1", errors)
|
|
||||||
|
|
||||||
|
|
||||||
def _get_environ(obj: t.Union["WSGIEnvironment", "Request"]) -> "WSGIEnvironment":
|
|
||||||
env = getattr(obj, "environ", obj)
|
env = getattr(obj, "environ", obj)
|
||||||
assert isinstance(
|
assert isinstance(
|
||||||
env, dict
|
env, dict
|
||||||
|
@ -224,17 +96,17 @@ def _log(type: str, message: str, *args: t.Any, **kwargs: t.Any) -> None:
|
||||||
getattr(_logger, type)(message.rstrip(), *args, **kwargs)
|
getattr(_logger, type)(message.rstrip(), *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
@typing.overload
|
@t.overload
|
||||||
def _dt_as_utc(dt: None) -> None:
|
def _dt_as_utc(dt: None) -> None:
|
||||||
...
|
...
|
||||||
|
|
||||||
|
|
||||||
@typing.overload
|
@t.overload
|
||||||
def _dt_as_utc(dt: datetime) -> datetime:
|
def _dt_as_utc(dt: datetime) -> datetime:
|
||||||
...
|
...
|
||||||
|
|
||||||
|
|
||||||
def _dt_as_utc(dt: t.Optional[datetime]) -> t.Optional[datetime]:
|
def _dt_as_utc(dt: datetime | None) -> datetime | None:
|
||||||
if dt is None:
|
if dt is None:
|
||||||
return dt
|
return dt
|
||||||
|
|
||||||
|
@ -257,11 +129,11 @@ class _DictAccessorProperty(t.Generic[_TAccessorValue]):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
name: str,
|
name: str,
|
||||||
default: t.Optional[_TAccessorValue] = None,
|
default: _TAccessorValue | None = None,
|
||||||
load_func: t.Optional[t.Callable[[str], _TAccessorValue]] = None,
|
load_func: t.Callable[[str], _TAccessorValue] | None = None,
|
||||||
dump_func: t.Optional[t.Callable[[_TAccessorValue], str]] = None,
|
dump_func: t.Callable[[_TAccessorValue], str] | None = None,
|
||||||
read_only: t.Optional[bool] = None,
|
read_only: bool | None = None,
|
||||||
doc: t.Optional[str] = None,
|
doc: str | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
self.name = name
|
self.name = name
|
||||||
self.default = default
|
self.default = default
|
||||||
|
@ -274,19 +146,19 @@ class _DictAccessorProperty(t.Generic[_TAccessorValue]):
|
||||||
def lookup(self, instance: t.Any) -> t.MutableMapping[str, t.Any]:
|
def lookup(self, instance: t.Any) -> t.MutableMapping[str, t.Any]:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
@typing.overload
|
@t.overload
|
||||||
def __get__(
|
def __get__(
|
||||||
self, instance: None, owner: type
|
self, instance: None, owner: type
|
||||||
) -> "_DictAccessorProperty[_TAccessorValue]":
|
) -> _DictAccessorProperty[_TAccessorValue]:
|
||||||
...
|
...
|
||||||
|
|
||||||
@typing.overload
|
@t.overload
|
||||||
def __get__(self, instance: t.Any, owner: type) -> _TAccessorValue:
|
def __get__(self, instance: t.Any, owner: type) -> _TAccessorValue:
|
||||||
...
|
...
|
||||||
|
|
||||||
def __get__(
|
def __get__(
|
||||||
self, instance: t.Optional[t.Any], owner: type
|
self, instance: t.Any | None, owner: type
|
||||||
) -> t.Union[_TAccessorValue, "_DictAccessorProperty[_TAccessorValue]"]:
|
) -> _TAccessorValue | _DictAccessorProperty[_TAccessorValue]:
|
||||||
if instance is None:
|
if instance is None:
|
||||||
return self
|
return self
|
||||||
|
|
||||||
|
@ -324,225 +196,19 @@ class _DictAccessorProperty(t.Generic[_TAccessorValue]):
|
||||||
return f"<{type(self).__name__} {self.name}>"
|
return f"<{type(self).__name__} {self.name}>"
|
||||||
|
|
||||||
|
|
||||||
def _cookie_quote(b: bytes) -> bytes:
|
_plain_int_re = re.compile(r"-?\d+", re.ASCII)
|
||||||
buf = bytearray()
|
|
||||||
all_legal = True
|
|
||||||
_lookup = _cookie_quoting_map.get
|
|
||||||
_push = buf.extend
|
|
||||||
|
|
||||||
for char_int in b:
|
|
||||||
char = char_int.to_bytes(1, sys.byteorder)
|
|
||||||
if char not in _legal_cookie_chars:
|
|
||||||
all_legal = False
|
|
||||||
char = _lookup(char, char)
|
|
||||||
_push(char)
|
|
||||||
|
|
||||||
if all_legal:
|
|
||||||
return bytes(buf)
|
|
||||||
return bytes(b'"' + buf + b'"')
|
|
||||||
|
|
||||||
|
|
||||||
def _cookie_unquote(b: bytes) -> bytes:
|
def _plain_int(value: str) -> int:
|
||||||
if len(b) < 2:
|
"""Parse an int only if it is only ASCII digits and ``-``.
|
||||||
return b
|
|
||||||
if b[:1] != b'"' or b[-1:] != b'"':
|
|
||||||
return b
|
|
||||||
|
|
||||||
b = b[1:-1]
|
This disallows ``+``, ``_``, and non-ASCII digits, which are accepted by ``int`` but
|
||||||
|
are not allowed in HTTP header values.
|
||||||
|
|
||||||
i = 0
|
Any leading or trailing whitespace is stripped
|
||||||
n = len(b)
|
"""
|
||||||
rv = bytearray()
|
value = value.strip()
|
||||||
_push = rv.extend
|
if _plain_int_re.fullmatch(value) is None:
|
||||||
|
raise ValueError
|
||||||
|
|
||||||
while 0 <= i < n:
|
return int(value)
|
||||||
o_match = _octal_re.search(b, i)
|
|
||||||
q_match = _quote_re.search(b, i)
|
|
||||||
if not o_match and not q_match:
|
|
||||||
rv.extend(b[i:])
|
|
||||||
break
|
|
||||||
j = k = -1
|
|
||||||
if o_match:
|
|
||||||
j = o_match.start(0)
|
|
||||||
if q_match:
|
|
||||||
k = q_match.start(0)
|
|
||||||
if q_match and (not o_match or k < j):
|
|
||||||
_push(b[i:k])
|
|
||||||
_push(b[k + 1 : k + 2])
|
|
||||||
i = k + 2
|
|
||||||
else:
|
|
||||||
_push(b[i:j])
|
|
||||||
rv.append(int(b[j + 1 : j + 4], 8))
|
|
||||||
i = j + 4
|
|
||||||
|
|
||||||
return bytes(rv)
|
|
||||||
|
|
||||||
|
|
||||||
def _cookie_parse_impl(b: bytes) -> t.Iterator[t.Tuple[bytes, bytes]]:
|
|
||||||
"""Lowlevel cookie parsing facility that operates on bytes."""
|
|
||||||
i = 0
|
|
||||||
n = len(b)
|
|
||||||
|
|
||||||
while i < n:
|
|
||||||
match = _cookie_re.search(b + b";", i)
|
|
||||||
if not match:
|
|
||||||
break
|
|
||||||
|
|
||||||
key = match.group("key").strip()
|
|
||||||
value = match.group("val") or b""
|
|
||||||
i = match.end(0)
|
|
||||||
|
|
||||||
yield key, _cookie_unquote(value)
|
|
||||||
|
|
||||||
|
|
||||||
def _encode_idna(domain: str) -> bytes:
|
|
||||||
# If we're given bytes, make sure they fit into ASCII
|
|
||||||
if isinstance(domain, bytes):
|
|
||||||
domain.decode("ascii")
|
|
||||||
return domain
|
|
||||||
|
|
||||||
# Otherwise check if it's already ascii, then return
|
|
||||||
try:
|
|
||||||
return domain.encode("ascii")
|
|
||||||
except UnicodeError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Otherwise encode each part separately
|
|
||||||
return b".".join(p.encode("idna") for p in domain.split("."))
|
|
||||||
|
|
||||||
|
|
||||||
def _decode_idna(domain: t.Union[str, bytes]) -> str:
|
|
||||||
# If the input is a string try to encode it to ascii to do the idna
|
|
||||||
# decoding. If that fails because of a unicode error, then we
|
|
||||||
# already have a decoded idna domain.
|
|
||||||
if isinstance(domain, str):
|
|
||||||
try:
|
|
||||||
domain = domain.encode("ascii")
|
|
||||||
except UnicodeError:
|
|
||||||
return domain # type: ignore
|
|
||||||
|
|
||||||
# Decode each part separately. If a part fails, try to decode it
|
|
||||||
# with ascii and silently ignore errors. This makes sense because
|
|
||||||
# the idna codec does not have error handling.
|
|
||||||
def decode_part(part: bytes) -> str:
|
|
||||||
try:
|
|
||||||
return part.decode("idna")
|
|
||||||
except UnicodeError:
|
|
||||||
return part.decode("ascii", "ignore")
|
|
||||||
|
|
||||||
return ".".join(decode_part(p) for p in domain.split(b"."))
|
|
||||||
|
|
||||||
|
|
||||||
@typing.overload
|
|
||||||
def _make_cookie_domain(domain: None) -> None:
|
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
@typing.overload
|
|
||||||
def _make_cookie_domain(domain: str) -> bytes:
|
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
def _make_cookie_domain(domain: t.Optional[str]) -> t.Optional[bytes]:
|
|
||||||
if domain is None:
|
|
||||||
return None
|
|
||||||
domain = _encode_idna(domain)
|
|
||||||
if b":" in domain:
|
|
||||||
domain = domain.split(b":", 1)[0]
|
|
||||||
if b"." in domain:
|
|
||||||
return domain
|
|
||||||
raise ValueError(
|
|
||||||
"Setting 'domain' for a cookie on a server running locally (ex: "
|
|
||||||
"localhost) is not supported by complying browsers. You should "
|
|
||||||
"have something like: '127.0.0.1 localhost dev.localhost' on "
|
|
||||||
"your hosts file and then point your server to run on "
|
|
||||||
"'dev.localhost' and also set 'domain' for 'dev.localhost'"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _easteregg(app: t.Optional["WSGIApplication"] = None) -> "WSGIApplication":
|
|
||||||
"""Like the name says. But who knows how it works?"""
|
|
||||||
|
|
||||||
def bzzzzzzz(gyver: bytes) -> str:
|
|
||||||
import base64
|
|
||||||
import zlib
|
|
||||||
|
|
||||||
return zlib.decompress(base64.b64decode(gyver)).decode("ascii")
|
|
||||||
|
|
||||||
gyver = "\n".join(
|
|
||||||
[
|
|
||||||
x + (77 - len(x)) * " "
|
|
||||||
for x in bzzzzzzz(
|
|
||||||
b"""
|
|
||||||
eJyFlzuOJDkMRP06xRjymKgDJCDQStBYT8BCgK4gTwfQ2fcFs2a2FzvZk+hvlcRvRJD148efHt9m
|
|
||||||
9Xz94dRY5hGt1nrYcXx7us9qlcP9HHNh28rz8dZj+q4rynVFFPdlY4zH873NKCexrDM6zxxRymzz
|
|
||||||
4QIxzK4bth1PV7+uHn6WXZ5C4ka/+prFzx3zWLMHAVZb8RRUxtFXI5DTQ2n3Hi2sNI+HK43AOWSY
|
|
||||||
jmEzE4naFp58PdzhPMdslLVWHTGUVpSxImw+pS/D+JhzLfdS1j7PzUMxij+mc2U0I9zcbZ/HcZxc
|
|
||||||
q1QjvvcThMYFnp93agEx392ZdLJWXbi/Ca4Oivl4h/Y1ErEqP+lrg7Xa4qnUKu5UE9UUA4xeqLJ5
|
|
||||||
jWlPKJvR2yhRI7xFPdzPuc6adXu6ovwXwRPXXnZHxlPtkSkqWHilsOrGrvcVWXgGP3daXomCj317
|
|
||||||
8P2UOw/NnA0OOikZyFf3zZ76eN9QXNwYdD8f8/LdBRFg0BO3bB+Pe/+G8er8tDJv83XTkj7WeMBJ
|
|
||||||
v/rnAfdO51d6sFglfi8U7zbnr0u9tyJHhFZNXYfH8Iafv2Oa+DT6l8u9UYlajV/hcEgk1x8E8L/r
|
|
||||||
XJXl2SK+GJCxtnyhVKv6GFCEB1OO3f9YWAIEbwcRWv/6RPpsEzOkXURMN37J0PoCSYeBnJQd9Giu
|
|
||||||
LxYQJNlYPSo/iTQwgaihbART7Fcyem2tTSCcwNCs85MOOpJtXhXDe0E7zgZJkcxWTar/zEjdIVCk
|
|
||||||
iXy87FW6j5aGZhttDBoAZ3vnmlkx4q4mMmCdLtnHkBXFMCReqthSGkQ+MDXLLCpXwBs0t+sIhsDI
|
|
||||||
tjBB8MwqYQpLygZ56rRHHpw+OAVyGgaGRHWy2QfXez+ZQQTTBkmRXdV/A9LwH6XGZpEAZU8rs4pE
|
|
||||||
1R4FQ3Uwt8RKEtRc0/CrANUoes3EzM6WYcFyskGZ6UTHJWenBDS7h163Eo2bpzqxNE9aVgEM2CqI
|
|
||||||
GAJe9Yra4P5qKmta27VjzYdR04Vc7KHeY4vs61C0nbywFmcSXYjzBHdiEjraS7PGG2jHHTpJUMxN
|
|
||||||
Jlxr3pUuFvlBWLJGE3GcA1/1xxLcHmlO+LAXbhrXah1tD6Ze+uqFGdZa5FM+3eHcKNaEarutAQ0A
|
|
||||||
QMAZHV+ve6LxAwWnXbbSXEG2DmCX5ijeLCKj5lhVFBrMm+ryOttCAeFpUdZyQLAQkA06RLs56rzG
|
|
||||||
8MID55vqr/g64Qr/wqwlE0TVxgoiZhHrbY2h1iuuyUVg1nlkpDrQ7Vm1xIkI5XRKLedN9EjzVchu
|
|
||||||
jQhXcVkjVdgP2O99QShpdvXWoSwkp5uMwyjt3jiWCqWGSiaaPAzohjPanXVLbM3x0dNskJsaCEyz
|
|
||||||
DTKIs+7WKJD4ZcJGfMhLFBf6hlbnNkLEePF8Cx2o2kwmYF4+MzAxa6i+6xIQkswOqGO+3x9NaZX8
|
|
||||||
MrZRaFZpLeVTYI9F/djY6DDVVs340nZGmwrDqTCiiqD5luj3OzwpmQCiQhdRYowUYEA3i1WWGwL4
|
|
||||||
GCtSoO4XbIPFeKGU13XPkDf5IdimLpAvi2kVDVQbzOOa4KAXMFlpi/hV8F6IDe0Y2reg3PuNKT3i
|
|
||||||
RYhZqtkQZqSB2Qm0SGtjAw7RDwaM1roESC8HWiPxkoOy0lLTRFG39kvbLZbU9gFKFRvixDZBJmpi
|
|
||||||
Xyq3RE5lW00EJjaqwp/v3EByMSpVZYsEIJ4APaHmVtpGSieV5CALOtNUAzTBiw81GLgC0quyzf6c
|
|
||||||
NlWknzJeCsJ5fup2R4d8CYGN77mu5vnO1UqbfElZ9E6cR6zbHjgsr9ly18fXjZoPeDjPuzlWbFwS
|
|
||||||
pdvPkhntFvkc13qb9094LL5NrA3NIq3r9eNnop9DizWOqCEbyRBFJTHn6Tt3CG1o8a4HevYh0XiJ
|
|
||||||
sR0AVVHuGuMOIfbuQ/OKBkGRC6NJ4u7sbPX8bG/n5sNIOQ6/Y/BX3IwRlTSabtZpYLB85lYtkkgm
|
|
||||||
p1qXK3Du2mnr5INXmT/78KI12n11EFBkJHHp0wJyLe9MvPNUGYsf+170maayRoy2lURGHAIapSpQ
|
|
||||||
krEDuNoJCHNlZYhKpvw4mspVWxqo415n8cD62N9+EfHrAvqQnINStetek7RY2Urv8nxsnGaZfRr/
|
|
||||||
nhXbJ6m/yl1LzYqscDZA9QHLNbdaSTTr+kFg3bC0iYbX/eQy0Bv3h4B50/SGYzKAXkCeOLI3bcAt
|
|
||||||
mj2Z/FM1vQWgDynsRwNvrWnJHlespkrp8+vO1jNaibm+PhqXPPv30YwDZ6jApe3wUjFQobghvW9p
|
|
||||||
7f2zLkGNv8b191cD/3vs9Q833z8t"""
|
|
||||||
).splitlines()
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
def easteregged(
|
|
||||||
environ: "WSGIEnvironment", start_response: "StartResponse"
|
|
||||||
) -> t.Iterable[bytes]:
|
|
||||||
def injecting_start_response(
|
|
||||||
status: str, headers: t.List[t.Tuple[str, str]], exc_info: t.Any = None
|
|
||||||
) -> t.Callable[[bytes], t.Any]:
|
|
||||||
headers.append(("X-Powered-By", "Werkzeug"))
|
|
||||||
return start_response(status, headers, exc_info)
|
|
||||||
|
|
||||||
if app is not None and environ.get("QUERY_STRING") != "macgybarchakku":
|
|
||||||
return app(environ, injecting_start_response)
|
|
||||||
injecting_start_response("200 OK", [("Content-Type", "text/html")])
|
|
||||||
return [
|
|
||||||
f"""\
|
|
||||||
<!doctype html>
|
|
||||||
<html lang=en>
|
|
||||||
<head>
|
|
||||||
<title>About Werkzeug</title>
|
|
||||||
<style type="text/css">
|
|
||||||
body {{ font: 15px Georgia, serif; text-align: center; }}
|
|
||||||
a {{ color: #333; text-decoration: none; }}
|
|
||||||
h1 {{ font-size: 30px; margin: 20px 0 10px 0; }}
|
|
||||||
p {{ margin: 0 0 30px 0; }}
|
|
||||||
pre {{ font: 11px 'Consolas', 'Monaco', monospace; line-height: 0.95; }}
|
|
||||||
</style>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<h1><a href="http://werkzeug.pocoo.org/">Werkzeug</a></h1>
|
|
||||||
<p>the Swiss Army knife of Python web development.</p>
|
|
||||||
<pre>{gyver}\n\n\n</pre>
|
|
||||||
</body>
|
|
||||||
</html>""".encode(
|
|
||||||
"latin1"
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
return easteregged
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import fnmatch
|
import fnmatch
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
|
@ -20,7 +22,7 @@ prefix = {*_ignore_always, sys.prefix, sys.exec_prefix}
|
||||||
|
|
||||||
if hasattr(sys, "real_prefix"):
|
if hasattr(sys, "real_prefix"):
|
||||||
# virtualenv < 20
|
# virtualenv < 20
|
||||||
prefix.add(sys.real_prefix) # type: ignore[attr-defined]
|
prefix.add(sys.real_prefix)
|
||||||
|
|
||||||
_stat_ignore_scan = tuple(prefix)
|
_stat_ignore_scan = tuple(prefix)
|
||||||
del prefix
|
del prefix
|
||||||
|
@ -55,13 +57,13 @@ def _iter_module_paths() -> t.Iterator[str]:
|
||||||
yield name
|
yield name
|
||||||
|
|
||||||
|
|
||||||
def _remove_by_pattern(paths: t.Set[str], exclude_patterns: t.Set[str]) -> None:
|
def _remove_by_pattern(paths: set[str], exclude_patterns: set[str]) -> None:
|
||||||
for pattern in exclude_patterns:
|
for pattern in exclude_patterns:
|
||||||
paths.difference_update(fnmatch.filter(paths, pattern))
|
paths.difference_update(fnmatch.filter(paths, pattern))
|
||||||
|
|
||||||
|
|
||||||
def _find_stat_paths(
|
def _find_stat_paths(
|
||||||
extra_files: t.Set[str], exclude_patterns: t.Set[str]
|
extra_files: set[str], exclude_patterns: set[str]
|
||||||
) -> t.Iterable[str]:
|
) -> t.Iterable[str]:
|
||||||
"""Find paths for the stat reloader to watch. Returns imported
|
"""Find paths for the stat reloader to watch. Returns imported
|
||||||
module files, Python files under non-system paths. Extra files and
|
module files, Python files under non-system paths. Extra files and
|
||||||
|
@ -115,7 +117,7 @@ def _find_stat_paths(
|
||||||
|
|
||||||
|
|
||||||
def _find_watchdog_paths(
|
def _find_watchdog_paths(
|
||||||
extra_files: t.Set[str], exclude_patterns: t.Set[str]
|
extra_files: set[str], exclude_patterns: set[str]
|
||||||
) -> t.Iterable[str]:
|
) -> t.Iterable[str]:
|
||||||
"""Find paths for the stat reloader to watch. Looks at the same
|
"""Find paths for the stat reloader to watch. Looks at the same
|
||||||
sources as the stat reloader, but watches everything under
|
sources as the stat reloader, but watches everything under
|
||||||
|
@ -139,7 +141,7 @@ def _find_watchdog_paths(
|
||||||
|
|
||||||
|
|
||||||
def _find_common_roots(paths: t.Iterable[str]) -> t.Iterable[str]:
|
def _find_common_roots(paths: t.Iterable[str]) -> t.Iterable[str]:
|
||||||
root: t.Dict[str, dict] = {}
|
root: dict[str, dict] = {}
|
||||||
|
|
||||||
for chunks in sorted((PurePath(x).parts for x in paths), key=len, reverse=True):
|
for chunks in sorted((PurePath(x).parts for x in paths), key=len, reverse=True):
|
||||||
node = root
|
node = root
|
||||||
|
@ -151,7 +153,7 @@ def _find_common_roots(paths: t.Iterable[str]) -> t.Iterable[str]:
|
||||||
|
|
||||||
rv = set()
|
rv = set()
|
||||||
|
|
||||||
def _walk(node: t.Mapping[str, dict], path: t.Tuple[str, ...]) -> None:
|
def _walk(node: t.Mapping[str, dict], path: tuple[str, ...]) -> None:
|
||||||
for prefix, child in node.items():
|
for prefix, child in node.items():
|
||||||
_walk(child, path + (prefix,))
|
_walk(child, path + (prefix,))
|
||||||
|
|
||||||
|
@ -162,10 +164,15 @@ def _find_common_roots(paths: t.Iterable[str]) -> t.Iterable[str]:
|
||||||
return rv
|
return rv
|
||||||
|
|
||||||
|
|
||||||
def _get_args_for_reloading() -> t.List[str]:
|
def _get_args_for_reloading() -> list[str]:
|
||||||
"""Determine how the script was executed, and return the args needed
|
"""Determine how the script was executed, and return the args needed
|
||||||
to execute it again in a new process.
|
to execute it again in a new process.
|
||||||
"""
|
"""
|
||||||
|
if sys.version_info >= (3, 10):
|
||||||
|
# sys.orig_argv, added in Python 3.10, contains the exact args used to invoke
|
||||||
|
# Python. Still replace argv[0] with sys.executable for accuracy.
|
||||||
|
return [sys.executable, *sys.orig_argv[1:]]
|
||||||
|
|
||||||
rv = [sys.executable]
|
rv = [sys.executable]
|
||||||
py_script = sys.argv[0]
|
py_script = sys.argv[0]
|
||||||
args = sys.argv[1:]
|
args = sys.argv[1:]
|
||||||
|
@ -221,15 +228,15 @@ class ReloaderLoop:
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
extra_files: t.Optional[t.Iterable[str]] = None,
|
extra_files: t.Iterable[str] | None = None,
|
||||||
exclude_patterns: t.Optional[t.Iterable[str]] = None,
|
exclude_patterns: t.Iterable[str] | None = None,
|
||||||
interval: t.Union[int, float] = 1,
|
interval: int | float = 1,
|
||||||
) -> None:
|
) -> None:
|
||||||
self.extra_files: t.Set[str] = {os.path.abspath(x) for x in extra_files or ()}
|
self.extra_files: set[str] = {os.path.abspath(x) for x in extra_files or ()}
|
||||||
self.exclude_patterns: t.Set[str] = set(exclude_patterns or ())
|
self.exclude_patterns: set[str] = set(exclude_patterns or ())
|
||||||
self.interval = interval
|
self.interval = interval
|
||||||
|
|
||||||
def __enter__(self) -> "ReloaderLoop":
|
def __enter__(self) -> ReloaderLoop:
|
||||||
"""Do any setup, then run one step of the watch to populate the
|
"""Do any setup, then run one step of the watch to populate the
|
||||||
initial filesystem state.
|
initial filesystem state.
|
||||||
"""
|
"""
|
||||||
|
@ -281,7 +288,7 @@ class StatReloaderLoop(ReloaderLoop):
|
||||||
name = "stat"
|
name = "stat"
|
||||||
|
|
||||||
def __enter__(self) -> ReloaderLoop:
|
def __enter__(self) -> ReloaderLoop:
|
||||||
self.mtimes: t.Dict[str, float] = {}
|
self.mtimes: dict[str, float] = {}
|
||||||
return super().__enter__()
|
return super().__enter__()
|
||||||
|
|
||||||
def run_step(self) -> None:
|
def run_step(self) -> None:
|
||||||
|
@ -305,15 +312,20 @@ class WatchdogReloaderLoop(ReloaderLoop):
|
||||||
def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
|
def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
|
||||||
from watchdog.observers import Observer
|
from watchdog.observers import Observer
|
||||||
from watchdog.events import PatternMatchingEventHandler
|
from watchdog.events import PatternMatchingEventHandler
|
||||||
|
from watchdog.events import EVENT_TYPE_OPENED
|
||||||
|
from watchdog.events import FileModifiedEvent
|
||||||
|
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
trigger_reload = self.trigger_reload
|
trigger_reload = self.trigger_reload
|
||||||
|
|
||||||
class EventHandler(PatternMatchingEventHandler): # type: ignore
|
class EventHandler(PatternMatchingEventHandler):
|
||||||
def on_any_event(self, event): # type: ignore
|
def on_any_event(self, event: FileModifiedEvent): # type: ignore
|
||||||
|
if event.event_type == EVENT_TYPE_OPENED:
|
||||||
|
return
|
||||||
|
|
||||||
trigger_reload(event.src_path)
|
trigger_reload(event.src_path)
|
||||||
|
|
||||||
reloader_name = Observer.__name__.lower()
|
reloader_name = Observer.__name__.lower() # type: ignore[attr-defined]
|
||||||
|
|
||||||
if reloader_name.endswith("observer"):
|
if reloader_name.endswith("observer"):
|
||||||
reloader_name = reloader_name[:-8]
|
reloader_name = reloader_name[:-8]
|
||||||
|
@ -343,7 +355,7 @@ class WatchdogReloaderLoop(ReloaderLoop):
|
||||||
self.log_reload(filename)
|
self.log_reload(filename)
|
||||||
|
|
||||||
def __enter__(self) -> ReloaderLoop:
|
def __enter__(self) -> ReloaderLoop:
|
||||||
self.watches: t.Dict[str, t.Any] = {}
|
self.watches: dict[str, t.Any] = {}
|
||||||
self.observer.start()
|
self.observer.start()
|
||||||
return super().__enter__()
|
return super().__enter__()
|
||||||
|
|
||||||
|
@ -382,7 +394,7 @@ class WatchdogReloaderLoop(ReloaderLoop):
|
||||||
self.observer.unschedule(watch)
|
self.observer.unschedule(watch)
|
||||||
|
|
||||||
|
|
||||||
reloader_loops: t.Dict[str, t.Type[ReloaderLoop]] = {
|
reloader_loops: dict[str, type[ReloaderLoop]] = {
|
||||||
"stat": StatReloaderLoop,
|
"stat": StatReloaderLoop,
|
||||||
"watchdog": WatchdogReloaderLoop,
|
"watchdog": WatchdogReloaderLoop,
|
||||||
}
|
}
|
||||||
|
@ -416,9 +428,9 @@ def ensure_echo_on() -> None:
|
||||||
|
|
||||||
def run_with_reloader(
|
def run_with_reloader(
|
||||||
main_func: t.Callable[[], None],
|
main_func: t.Callable[[], None],
|
||||||
extra_files: t.Optional[t.Iterable[str]] = None,
|
extra_files: t.Iterable[str] | None = None,
|
||||||
exclude_patterns: t.Optional[t.Iterable[str]] = None,
|
exclude_patterns: t.Iterable[str] | None = None,
|
||||||
interval: t.Union[int, float] = 1,
|
interval: int | float = 1,
|
||||||
reloader_type: str = "auto",
|
reloader_type: str = "auto",
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Run the given function in an independent Python interpreter."""
|
"""Run the given function in an independent Python interpreter."""
|
||||||
|
|
|
@ -1,921 +0,0 @@
|
||||||
from datetime import datetime
|
|
||||||
from os import PathLike
|
|
||||||
from typing import Any
|
|
||||||
from typing import Callable
|
|
||||||
from typing import Collection
|
|
||||||
from typing import Dict
|
|
||||||
from typing import FrozenSet
|
|
||||||
from typing import Generic
|
|
||||||
from typing import Hashable
|
|
||||||
from typing import IO
|
|
||||||
from typing import Iterable
|
|
||||||
from typing import Iterator
|
|
||||||
from typing import List
|
|
||||||
from typing import Mapping
|
|
||||||
from typing import NoReturn
|
|
||||||
from typing import Optional
|
|
||||||
from typing import overload
|
|
||||||
from typing import Set
|
|
||||||
from typing import Tuple
|
|
||||||
from typing import Type
|
|
||||||
from typing import TypeVar
|
|
||||||
from typing import Union
|
|
||||||
from _typeshed import SupportsKeysAndGetItem
|
|
||||||
from _typeshed.wsgi import WSGIEnvironment
|
|
||||||
|
|
||||||
from typing_extensions import Literal
|
|
||||||
from typing_extensions import SupportsIndex
|
|
||||||
|
|
||||||
K = TypeVar("K")
|
|
||||||
V = TypeVar("V")
|
|
||||||
T = TypeVar("T")
|
|
||||||
D = TypeVar("D")
|
|
||||||
_CD = TypeVar("_CD", bound="CallbackDict")
|
|
||||||
|
|
||||||
def is_immutable(self: object) -> NoReturn: ...
|
|
||||||
def iter_multi_items(
|
|
||||||
mapping: Union[Mapping[K, Union[V, Iterable[V]]], Iterable[Tuple[K, V]]]
|
|
||||||
) -> Iterator[Tuple[K, V]]: ...
|
|
||||||
|
|
||||||
class ImmutableListMixin(List[V]):
|
|
||||||
_hash_cache: Optional[int]
|
|
||||||
def __hash__(self) -> int: ... # type: ignore
|
|
||||||
def __delitem__(self, key: Union[SupportsIndex, slice]) -> NoReturn: ...
|
|
||||||
def __iadd__(self, other: t.Any) -> NoReturn: ... # type: ignore
|
|
||||||
def __imul__(self, other: SupportsIndex) -> NoReturn: ...
|
|
||||||
def __setitem__( # type: ignore
|
|
||||||
self, key: Union[int, slice], value: V
|
|
||||||
) -> NoReturn: ...
|
|
||||||
def append(self, value: V) -> NoReturn: ...
|
|
||||||
def remove(self, value: V) -> NoReturn: ...
|
|
||||||
def extend(self, values: Iterable[V]) -> NoReturn: ...
|
|
||||||
def insert(self, pos: SupportsIndex, value: V) -> NoReturn: ...
|
|
||||||
def pop(self, index: SupportsIndex = -1) -> NoReturn: ...
|
|
||||||
def reverse(self) -> NoReturn: ...
|
|
||||||
def sort(
|
|
||||||
self, key: Optional[Callable[[V], Any]] = None, reverse: bool = False
|
|
||||||
) -> NoReturn: ...
|
|
||||||
|
|
||||||
class ImmutableList(ImmutableListMixin[V]): ...
|
|
||||||
|
|
||||||
class ImmutableDictMixin(Dict[K, V]):
|
|
||||||
_hash_cache: Optional[int]
|
|
||||||
@classmethod
|
|
||||||
def fromkeys( # type: ignore
|
|
||||||
cls, keys: Iterable[K], value: Optional[V] = None
|
|
||||||
) -> ImmutableDictMixin[K, V]: ...
|
|
||||||
def _iter_hashitems(self) -> Iterable[Hashable]: ...
|
|
||||||
def __hash__(self) -> int: ... # type: ignore
|
|
||||||
def setdefault(self, key: K, default: Optional[V] = None) -> NoReturn: ...
|
|
||||||
def update(self, *args: Any, **kwargs: V) -> NoReturn: ...
|
|
||||||
def pop(self, key: K, default: Optional[V] = None) -> NoReturn: ... # type: ignore
|
|
||||||
def popitem(self) -> NoReturn: ...
|
|
||||||
def __setitem__(self, key: K, value: V) -> NoReturn: ...
|
|
||||||
def __delitem__(self, key: K) -> NoReturn: ...
|
|
||||||
def clear(self) -> NoReturn: ...
|
|
||||||
|
|
||||||
class ImmutableMultiDictMixin(ImmutableDictMixin[K, V]):
|
|
||||||
def _iter_hashitems(self) -> Iterable[Hashable]: ...
|
|
||||||
def add(self, key: K, value: V) -> NoReturn: ...
|
|
||||||
def popitemlist(self) -> NoReturn: ...
|
|
||||||
def poplist(self, key: K) -> NoReturn: ...
|
|
||||||
def setlist(self, key: K, new_list: Iterable[V]) -> NoReturn: ...
|
|
||||||
def setlistdefault(
|
|
||||||
self, key: K, default_list: Optional[Iterable[V]] = None
|
|
||||||
) -> NoReturn: ...
|
|
||||||
|
|
||||||
def _calls_update(name: str) -> Callable[[UpdateDictMixin[K, V]], Any]: ...
|
|
||||||
|
|
||||||
class UpdateDictMixin(Dict[K, V]):
|
|
||||||
on_update: Optional[Callable[[UpdateDictMixin[K, V]], None]]
|
|
||||||
def setdefault(self, key: K, default: Optional[V] = None) -> V: ...
|
|
||||||
@overload
|
|
||||||
def pop(self, key: K) -> V: ...
|
|
||||||
@overload
|
|
||||||
def pop(self, key: K, default: Union[V, T] = ...) -> Union[V, T]: ...
|
|
||||||
def __setitem__(self, key: K, value: V) -> None: ...
|
|
||||||
def __delitem__(self, key: K) -> None: ...
|
|
||||||
def clear(self) -> None: ...
|
|
||||||
def popitem(self) -> Tuple[K, V]: ...
|
|
||||||
@overload
|
|
||||||
def update(self, __m: SupportsKeysAndGetItem[K, V], **kwargs: V) -> None: ...
|
|
||||||
@overload
|
|
||||||
def update(self, __m: Iterable[Tuple[K, V]], **kwargs: V) -> None: ...
|
|
||||||
@overload
|
|
||||||
def update(self, **kwargs: V) -> None: ...
|
|
||||||
|
|
||||||
class TypeConversionDict(Dict[K, V]):
|
|
||||||
@overload
|
|
||||||
def get(self, key: K, default: None = ..., type: None = ...) -> Optional[V]: ...
|
|
||||||
@overload
|
|
||||||
def get(self, key: K, default: D, type: None = ...) -> Union[D, V]: ...
|
|
||||||
@overload
|
|
||||||
def get(self, key: K, default: D, type: Callable[[V], T]) -> Union[D, T]: ...
|
|
||||||
@overload
|
|
||||||
def get(self, key: K, type: Callable[[V], T]) -> Optional[T]: ...
|
|
||||||
|
|
||||||
class ImmutableTypeConversionDict(ImmutableDictMixin[K, V], TypeConversionDict[K, V]):
|
|
||||||
def copy(self) -> TypeConversionDict[K, V]: ...
|
|
||||||
def __copy__(self) -> ImmutableTypeConversionDict: ...
|
|
||||||
|
|
||||||
class MultiDict(TypeConversionDict[K, V]):
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
mapping: Optional[
|
|
||||||
Union[Mapping[K, Union[Iterable[V], V]], Iterable[Tuple[K, V]]]
|
|
||||||
] = None,
|
|
||||||
) -> None: ...
|
|
||||||
def __getitem__(self, item: K) -> V: ...
|
|
||||||
def __setitem__(self, key: K, value: V) -> None: ...
|
|
||||||
def add(self, key: K, value: V) -> None: ...
|
|
||||||
@overload
|
|
||||||
def getlist(self, key: K) -> List[V]: ...
|
|
||||||
@overload
|
|
||||||
def getlist(self, key: K, type: Callable[[V], T] = ...) -> List[T]: ...
|
|
||||||
def setlist(self, key: K, new_list: Iterable[V]) -> None: ...
|
|
||||||
def setdefault(self, key: K, default: Optional[V] = None) -> V: ...
|
|
||||||
def setlistdefault(
|
|
||||||
self, key: K, default_list: Optional[Iterable[V]] = None
|
|
||||||
) -> List[V]: ...
|
|
||||||
def items(self, multi: bool = False) -> Iterator[Tuple[K, V]]: ... # type: ignore
|
|
||||||
def lists(self) -> Iterator[Tuple[K, List[V]]]: ...
|
|
||||||
def values(self) -> Iterator[V]: ... # type: ignore
|
|
||||||
def listvalues(self) -> Iterator[List[V]]: ...
|
|
||||||
def copy(self) -> MultiDict[K, V]: ...
|
|
||||||
def deepcopy(self, memo: Any = None) -> MultiDict[K, V]: ...
|
|
||||||
@overload
|
|
||||||
def to_dict(self) -> Dict[K, V]: ...
|
|
||||||
@overload
|
|
||||||
def to_dict(self, flat: Literal[False]) -> Dict[K, List[V]]: ...
|
|
||||||
def update( # type: ignore
|
|
||||||
self, mapping: Union[Mapping[K, Union[Iterable[V], V]], Iterable[Tuple[K, V]]]
|
|
||||||
) -> None: ...
|
|
||||||
@overload
|
|
||||||
def pop(self, key: K) -> V: ...
|
|
||||||
@overload
|
|
||||||
def pop(self, key: K, default: Union[V, T] = ...) -> Union[V, T]: ...
|
|
||||||
def popitem(self) -> Tuple[K, V]: ...
|
|
||||||
def poplist(self, key: K) -> List[V]: ...
|
|
||||||
def popitemlist(self) -> Tuple[K, List[V]]: ...
|
|
||||||
def __copy__(self) -> MultiDict[K, V]: ...
|
|
||||||
def __deepcopy__(self, memo: Any) -> MultiDict[K, V]: ...
|
|
||||||
|
|
||||||
class _omd_bucket(Generic[K, V]):
|
|
||||||
prev: Optional[_omd_bucket]
|
|
||||||
next: Optional[_omd_bucket]
|
|
||||||
key: K
|
|
||||||
value: V
|
|
||||||
def __init__(self, omd: OrderedMultiDict, key: K, value: V) -> None: ...
|
|
||||||
def unlink(self, omd: OrderedMultiDict) -> None: ...
|
|
||||||
|
|
||||||
class OrderedMultiDict(MultiDict[K, V]):
|
|
||||||
_first_bucket: Optional[_omd_bucket]
|
|
||||||
_last_bucket: Optional[_omd_bucket]
|
|
||||||
def __init__(self, mapping: Optional[Mapping[K, V]] = None) -> None: ...
|
|
||||||
def __eq__(self, other: object) -> bool: ...
|
|
||||||
def __getitem__(self, key: K) -> V: ...
|
|
||||||
def __setitem__(self, key: K, value: V) -> None: ...
|
|
||||||
def __delitem__(self, key: K) -> None: ...
|
|
||||||
def keys(self) -> Iterator[K]: ... # type: ignore
|
|
||||||
def __iter__(self) -> Iterator[K]: ...
|
|
||||||
def values(self) -> Iterator[V]: ... # type: ignore
|
|
||||||
def items(self, multi: bool = False) -> Iterator[Tuple[K, V]]: ... # type: ignore
|
|
||||||
def lists(self) -> Iterator[Tuple[K, List[V]]]: ...
|
|
||||||
def listvalues(self) -> Iterator[List[V]]: ...
|
|
||||||
def add(self, key: K, value: V) -> None: ...
|
|
||||||
@overload
|
|
||||||
def getlist(self, key: K) -> List[V]: ...
|
|
||||||
@overload
|
|
||||||
def getlist(self, key: K, type: Callable[[V], T] = ...) -> List[T]: ...
|
|
||||||
def setlist(self, key: K, new_list: Iterable[V]) -> None: ...
|
|
||||||
def setlistdefault(
|
|
||||||
self, key: K, default_list: Optional[Iterable[V]] = None
|
|
||||||
) -> List[V]: ...
|
|
||||||
def update( # type: ignore
|
|
||||||
self, mapping: Union[Mapping[K, V], Iterable[Tuple[K, V]]]
|
|
||||||
) -> None: ...
|
|
||||||
def poplist(self, key: K) -> List[V]: ...
|
|
||||||
@overload
|
|
||||||
def pop(self, key: K) -> V: ...
|
|
||||||
@overload
|
|
||||||
def pop(self, key: K, default: Union[V, T] = ...) -> Union[V, T]: ...
|
|
||||||
def popitem(self) -> Tuple[K, V]: ...
|
|
||||||
def popitemlist(self) -> Tuple[K, List[V]]: ...
|
|
||||||
|
|
||||||
def _options_header_vkw(
|
|
||||||
value: str, kw: Mapping[str, Optional[Union[str, int]]]
|
|
||||||
) -> str: ...
|
|
||||||
def _unicodify_header_value(value: Union[str, int]) -> str: ...
|
|
||||||
|
|
||||||
HV = Union[str, int]
|
|
||||||
|
|
||||||
class Headers(Dict[str, str]):
|
|
||||||
_list: List[Tuple[str, str]]
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
defaults: Optional[
|
|
||||||
Union[Mapping[str, Union[HV, Iterable[HV]]], Iterable[Tuple[str, HV]]]
|
|
||||||
] = None,
|
|
||||||
) -> None: ...
|
|
||||||
@overload
|
|
||||||
def __getitem__(self, key: str) -> str: ...
|
|
||||||
@overload
|
|
||||||
def __getitem__(self, key: int) -> Tuple[str, str]: ...
|
|
||||||
@overload
|
|
||||||
def __getitem__(self, key: slice) -> Headers: ...
|
|
||||||
@overload
|
|
||||||
def __getitem__(self, key: str, _get_mode: Literal[True] = ...) -> str: ...
|
|
||||||
def __eq__(self, other: object) -> bool: ...
|
|
||||||
@overload # type: ignore
|
|
||||||
def get(self, key: str, default: str) -> str: ...
|
|
||||||
@overload
|
|
||||||
def get(self, key: str, default: Optional[str] = None) -> Optional[str]: ...
|
|
||||||
@overload
|
|
||||||
def get(
|
|
||||||
self, key: str, default: Optional[T] = None, type: Callable[[str], T] = ...
|
|
||||||
) -> Optional[T]: ...
|
|
||||||
@overload
|
|
||||||
def getlist(self, key: str) -> List[str]: ...
|
|
||||||
@overload
|
|
||||||
def getlist(self, key: str, type: Callable[[str], T]) -> List[T]: ...
|
|
||||||
def get_all(self, name: str) -> List[str]: ...
|
|
||||||
def items( # type: ignore
|
|
||||||
self, lower: bool = False
|
|
||||||
) -> Iterator[Tuple[str, str]]: ...
|
|
||||||
def keys(self, lower: bool = False) -> Iterator[str]: ... # type: ignore
|
|
||||||
def values(self) -> Iterator[str]: ... # type: ignore
|
|
||||||
def extend(
|
|
||||||
self,
|
|
||||||
*args: Union[Mapping[str, Union[HV, Iterable[HV]]], Iterable[Tuple[str, HV]]],
|
|
||||||
**kwargs: Union[HV, Iterable[HV]],
|
|
||||||
) -> None: ...
|
|
||||||
@overload
|
|
||||||
def __delitem__(self, key: Union[str, int, slice]) -> None: ...
|
|
||||||
@overload
|
|
||||||
def __delitem__(self, key: str, _index_operation: Literal[False]) -> None: ...
|
|
||||||
def remove(self, key: str) -> None: ...
|
|
||||||
@overload # type: ignore
|
|
||||||
def pop(self, key: str, default: Optional[str] = None) -> str: ...
|
|
||||||
@overload
|
|
||||||
def pop(
|
|
||||||
self, key: Optional[int] = None, default: Optional[Tuple[str, str]] = None
|
|
||||||
) -> Tuple[str, str]: ...
|
|
||||||
def popitem(self) -> Tuple[str, str]: ...
|
|
||||||
def __contains__(self, key: str) -> bool: ... # type: ignore
|
|
||||||
def has_key(self, key: str) -> bool: ...
|
|
||||||
def __iter__(self) -> Iterator[Tuple[str, str]]: ... # type: ignore
|
|
||||||
def add(self, _key: str, _value: HV, **kw: HV) -> None: ...
|
|
||||||
def _validate_value(self, value: str) -> None: ...
|
|
||||||
def add_header(self, _key: str, _value: HV, **_kw: HV) -> None: ...
|
|
||||||
def clear(self) -> None: ...
|
|
||||||
def set(self, _key: str, _value: HV, **kw: HV) -> None: ...
|
|
||||||
def setlist(self, key: str, values: Iterable[HV]) -> None: ...
|
|
||||||
def setdefault(self, key: str, default: HV) -> str: ... # type: ignore
|
|
||||||
def setlistdefault(self, key: str, default: Iterable[HV]) -> None: ...
|
|
||||||
@overload
|
|
||||||
def __setitem__(self, key: str, value: HV) -> None: ...
|
|
||||||
@overload
|
|
||||||
def __setitem__(self, key: int, value: Tuple[str, HV]) -> None: ...
|
|
||||||
@overload
|
|
||||||
def __setitem__(self, key: slice, value: Iterable[Tuple[str, HV]]) -> None: ...
|
|
||||||
@overload
|
|
||||||
def update(
|
|
||||||
self, __m: SupportsKeysAndGetItem[str, HV], **kwargs: Union[HV, Iterable[HV]]
|
|
||||||
) -> None: ...
|
|
||||||
@overload
|
|
||||||
def update(
|
|
||||||
self, __m: Iterable[Tuple[str, HV]], **kwargs: Union[HV, Iterable[HV]]
|
|
||||||
) -> None: ...
|
|
||||||
@overload
|
|
||||||
def update(self, **kwargs: Union[HV, Iterable[HV]]) -> None: ...
|
|
||||||
def to_wsgi_list(self) -> List[Tuple[str, str]]: ...
|
|
||||||
def copy(self) -> Headers: ...
|
|
||||||
def __copy__(self) -> Headers: ...
|
|
||||||
|
|
||||||
class ImmutableHeadersMixin(Headers):
|
|
||||||
def __delitem__(self, key: Any, _index_operation: bool = True) -> NoReturn: ...
|
|
||||||
def __setitem__(self, key: Any, value: Any) -> NoReturn: ...
|
|
||||||
def set(self, _key: Any, _value: Any, **kw: Any) -> NoReturn: ...
|
|
||||||
def setlist(self, key: Any, values: Any) -> NoReturn: ...
|
|
||||||
def add(self, _key: Any, _value: Any, **kw: Any) -> NoReturn: ...
|
|
||||||
def add_header(self, _key: Any, _value: Any, **_kw: Any) -> NoReturn: ...
|
|
||||||
def remove(self, key: Any) -> NoReturn: ...
|
|
||||||
def extend(self, *args: Any, **kwargs: Any) -> NoReturn: ...
|
|
||||||
def update(self, *args: Any, **kwargs: Any) -> NoReturn: ...
|
|
||||||
def insert(self, pos: Any, value: Any) -> NoReturn: ...
|
|
||||||
def pop(self, key: Any = None, default: Any = ...) -> NoReturn: ...
|
|
||||||
def popitem(self) -> NoReturn: ...
|
|
||||||
def setdefault(self, key: Any, default: Any) -> NoReturn: ... # type: ignore
|
|
||||||
def setlistdefault(self, key: Any, default: Any) -> NoReturn: ...
|
|
||||||
|
|
||||||
class EnvironHeaders(ImmutableHeadersMixin, Headers):
|
|
||||||
environ: WSGIEnvironment
|
|
||||||
def __init__(self, environ: WSGIEnvironment) -> None: ...
|
|
||||||
def __eq__(self, other: object) -> bool: ...
|
|
||||||
def __getitem__( # type: ignore
|
|
||||||
self, key: str, _get_mode: Literal[False] = False
|
|
||||||
) -> str: ...
|
|
||||||
def __iter__(self) -> Iterator[Tuple[str, str]]: ... # type: ignore
|
|
||||||
def copy(self) -> NoReturn: ...
|
|
||||||
|
|
||||||
class CombinedMultiDict(ImmutableMultiDictMixin[K, V], MultiDict[K, V]): # type: ignore
|
|
||||||
dicts: List[MultiDict[K, V]]
|
|
||||||
def __init__(self, dicts: Optional[Iterable[MultiDict[K, V]]]) -> None: ...
|
|
||||||
@classmethod
|
|
||||||
def fromkeys(cls, keys: Any, value: Any = None) -> NoReturn: ...
|
|
||||||
def __getitem__(self, key: K) -> V: ...
|
|
||||||
@overload # type: ignore
|
|
||||||
def get(self, key: K) -> Optional[V]: ...
|
|
||||||
@overload
|
|
||||||
def get(self, key: K, default: Union[V, T] = ...) -> Union[V, T]: ...
|
|
||||||
@overload
|
|
||||||
def get(
|
|
||||||
self, key: K, default: Optional[T] = None, type: Callable[[V], T] = ...
|
|
||||||
) -> Optional[T]: ...
|
|
||||||
@overload
|
|
||||||
def getlist(self, key: K) -> List[V]: ...
|
|
||||||
@overload
|
|
||||||
def getlist(self, key: K, type: Callable[[V], T] = ...) -> List[T]: ...
|
|
||||||
def _keys_impl(self) -> Set[K]: ...
|
|
||||||
def keys(self) -> Set[K]: ... # type: ignore
|
|
||||||
def __iter__(self) -> Set[K]: ... # type: ignore
|
|
||||||
def items(self, multi: bool = False) -> Iterator[Tuple[K, V]]: ... # type: ignore
|
|
||||||
def values(self) -> Iterator[V]: ... # type: ignore
|
|
||||||
def lists(self) -> Iterator[Tuple[K, List[V]]]: ...
|
|
||||||
def listvalues(self) -> Iterator[List[V]]: ...
|
|
||||||
def copy(self) -> MultiDict[K, V]: ...
|
|
||||||
@overload
|
|
||||||
def to_dict(self) -> Dict[K, V]: ...
|
|
||||||
@overload
|
|
||||||
def to_dict(self, flat: Literal[False]) -> Dict[K, List[V]]: ...
|
|
||||||
def __contains__(self, key: K) -> bool: ... # type: ignore
|
|
||||||
def has_key(self, key: K) -> bool: ...
|
|
||||||
|
|
||||||
class FileMultiDict(MultiDict[str, "FileStorage"]):
|
|
||||||
def add_file(
|
|
||||||
self,
|
|
||||||
name: str,
|
|
||||||
file: Union[FileStorage, str, IO[bytes]],
|
|
||||||
filename: Optional[str] = None,
|
|
||||||
content_type: Optional[str] = None,
|
|
||||||
) -> None: ...
|
|
||||||
|
|
||||||
class ImmutableDict(ImmutableDictMixin[K, V], Dict[K, V]):
|
|
||||||
def copy(self) -> Dict[K, V]: ...
|
|
||||||
def __copy__(self) -> ImmutableDict[K, V]: ...
|
|
||||||
|
|
||||||
class ImmutableMultiDict( # type: ignore
|
|
||||||
ImmutableMultiDictMixin[K, V], MultiDict[K, V]
|
|
||||||
):
|
|
||||||
def copy(self) -> MultiDict[K, V]: ...
|
|
||||||
def __copy__(self) -> ImmutableMultiDict[K, V]: ...
|
|
||||||
|
|
||||||
class ImmutableOrderedMultiDict( # type: ignore
|
|
||||||
ImmutableMultiDictMixin[K, V], OrderedMultiDict[K, V]
|
|
||||||
):
|
|
||||||
def _iter_hashitems(self) -> Iterator[Tuple[int, Tuple[K, V]]]: ...
|
|
||||||
def copy(self) -> OrderedMultiDict[K, V]: ...
|
|
||||||
def __copy__(self) -> ImmutableOrderedMultiDict[K, V]: ...
|
|
||||||
|
|
||||||
class Accept(ImmutableList[Tuple[str, int]]):
|
|
||||||
provided: bool
|
|
||||||
def __init__(
|
|
||||||
self, values: Optional[Union[Accept, Iterable[Tuple[str, float]]]] = None
|
|
||||||
) -> None: ...
|
|
||||||
def _specificity(self, value: str) -> Tuple[bool, ...]: ...
|
|
||||||
def _value_matches(self, value: str, item: str) -> bool: ...
|
|
||||||
@overload # type: ignore
|
|
||||||
def __getitem__(self, key: str) -> int: ...
|
|
||||||
@overload
|
|
||||||
def __getitem__(self, key: int) -> Tuple[str, int]: ...
|
|
||||||
@overload
|
|
||||||
def __getitem__(self, key: slice) -> Iterable[Tuple[str, int]]: ...
|
|
||||||
def quality(self, key: str) -> int: ...
|
|
||||||
def __contains__(self, value: str) -> bool: ... # type: ignore
|
|
||||||
def index(self, key: str) -> int: ... # type: ignore
|
|
||||||
def find(self, key: str) -> int: ...
|
|
||||||
def values(self) -> Iterator[str]: ...
|
|
||||||
def to_header(self) -> str: ...
|
|
||||||
def _best_single_match(self, match: str) -> Optional[Tuple[str, int]]: ...
|
|
||||||
def best_match(
|
|
||||||
self, matches: Iterable[str], default: Optional[str] = None
|
|
||||||
) -> Optional[str]: ...
|
|
||||||
@property
|
|
||||||
def best(self) -> str: ...
|
|
||||||
|
|
||||||
def _normalize_mime(value: str) -> List[str]: ...
|
|
||||||
|
|
||||||
class MIMEAccept(Accept):
|
|
||||||
def _specificity(self, value: str) -> Tuple[bool, ...]: ...
|
|
||||||
def _value_matches(self, value: str, item: str) -> bool: ...
|
|
||||||
@property
|
|
||||||
def accept_html(self) -> bool: ...
|
|
||||||
@property
|
|
||||||
def accept_xhtml(self) -> bool: ...
|
|
||||||
@property
|
|
||||||
def accept_json(self) -> bool: ...
|
|
||||||
|
|
||||||
def _normalize_lang(value: str) -> List[str]: ...
|
|
||||||
|
|
||||||
class LanguageAccept(Accept):
|
|
||||||
def _value_matches(self, value: str, item: str) -> bool: ...
|
|
||||||
def best_match(
|
|
||||||
self, matches: Iterable[str], default: Optional[str] = None
|
|
||||||
) -> Optional[str]: ...
|
|
||||||
|
|
||||||
class CharsetAccept(Accept):
|
|
||||||
def _value_matches(self, value: str, item: str) -> bool: ...
|
|
||||||
|
|
||||||
_CPT = TypeVar("_CPT", str, int, bool)
|
|
||||||
_OptCPT = Optional[_CPT]
|
|
||||||
|
|
||||||
def cache_control_property(key: str, empty: _OptCPT, type: Type[_CPT]) -> property: ...
|
|
||||||
|
|
||||||
class _CacheControl(UpdateDictMixin[str, _OptCPT], Dict[str, _OptCPT]):
|
|
||||||
provided: bool
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
values: Union[Mapping[str, _OptCPT], Iterable[Tuple[str, _OptCPT]]] = (),
|
|
||||||
on_update: Optional[Callable[[_CacheControl], None]] = None,
|
|
||||||
) -> None: ...
|
|
||||||
@property
|
|
||||||
def no_cache(self) -> Optional[bool]: ...
|
|
||||||
@no_cache.setter
|
|
||||||
def no_cache(self, value: Optional[bool]) -> None: ...
|
|
||||||
@no_cache.deleter
|
|
||||||
def no_cache(self) -> None: ...
|
|
||||||
@property
|
|
||||||
def no_store(self) -> Optional[bool]: ...
|
|
||||||
@no_store.setter
|
|
||||||
def no_store(self, value: Optional[bool]) -> None: ...
|
|
||||||
@no_store.deleter
|
|
||||||
def no_store(self) -> None: ...
|
|
||||||
@property
|
|
||||||
def max_age(self) -> Optional[int]: ...
|
|
||||||
@max_age.setter
|
|
||||||
def max_age(self, value: Optional[int]) -> None: ...
|
|
||||||
@max_age.deleter
|
|
||||||
def max_age(self) -> None: ...
|
|
||||||
@property
|
|
||||||
def no_transform(self) -> Optional[bool]: ...
|
|
||||||
@no_transform.setter
|
|
||||||
def no_transform(self, value: Optional[bool]) -> None: ...
|
|
||||||
@no_transform.deleter
|
|
||||||
def no_transform(self) -> None: ...
|
|
||||||
def _get_cache_value(self, key: str, empty: Optional[T], type: Type[T]) -> T: ...
|
|
||||||
def _set_cache_value(self, key: str, value: Optional[T], type: Type[T]) -> None: ...
|
|
||||||
def _del_cache_value(self, key: str) -> None: ...
|
|
||||||
def to_header(self) -> str: ...
|
|
||||||
@staticmethod
|
|
||||||
def cache_property(key: str, empty: _OptCPT, type: Type[_CPT]) -> property: ...
|
|
||||||
|
|
||||||
class RequestCacheControl(ImmutableDictMixin[str, _OptCPT], _CacheControl):
|
|
||||||
@property
|
|
||||||
def max_stale(self) -> Optional[int]: ...
|
|
||||||
@max_stale.setter
|
|
||||||
def max_stale(self, value: Optional[int]) -> None: ...
|
|
||||||
@max_stale.deleter
|
|
||||||
def max_stale(self) -> None: ...
|
|
||||||
@property
|
|
||||||
def min_fresh(self) -> Optional[int]: ...
|
|
||||||
@min_fresh.setter
|
|
||||||
def min_fresh(self, value: Optional[int]) -> None: ...
|
|
||||||
@min_fresh.deleter
|
|
||||||
def min_fresh(self) -> None: ...
|
|
||||||
@property
|
|
||||||
def only_if_cached(self) -> Optional[bool]: ...
|
|
||||||
@only_if_cached.setter
|
|
||||||
def only_if_cached(self, value: Optional[bool]) -> None: ...
|
|
||||||
@only_if_cached.deleter
|
|
||||||
def only_if_cached(self) -> None: ...
|
|
||||||
|
|
||||||
class ResponseCacheControl(_CacheControl):
|
|
||||||
@property
|
|
||||||
def public(self) -> Optional[bool]: ...
|
|
||||||
@public.setter
|
|
||||||
def public(self, value: Optional[bool]) -> None: ...
|
|
||||||
@public.deleter
|
|
||||||
def public(self) -> None: ...
|
|
||||||
@property
|
|
||||||
def private(self) -> Optional[bool]: ...
|
|
||||||
@private.setter
|
|
||||||
def private(self, value: Optional[bool]) -> None: ...
|
|
||||||
@private.deleter
|
|
||||||
def private(self) -> None: ...
|
|
||||||
@property
|
|
||||||
def must_revalidate(self) -> Optional[bool]: ...
|
|
||||||
@must_revalidate.setter
|
|
||||||
def must_revalidate(self, value: Optional[bool]) -> None: ...
|
|
||||||
@must_revalidate.deleter
|
|
||||||
def must_revalidate(self) -> None: ...
|
|
||||||
@property
|
|
||||||
def proxy_revalidate(self) -> Optional[bool]: ...
|
|
||||||
@proxy_revalidate.setter
|
|
||||||
def proxy_revalidate(self, value: Optional[bool]) -> None: ...
|
|
||||||
@proxy_revalidate.deleter
|
|
||||||
def proxy_revalidate(self) -> None: ...
|
|
||||||
@property
|
|
||||||
def s_maxage(self) -> Optional[int]: ...
|
|
||||||
@s_maxage.setter
|
|
||||||
def s_maxage(self, value: Optional[int]) -> None: ...
|
|
||||||
@s_maxage.deleter
|
|
||||||
def s_maxage(self) -> None: ...
|
|
||||||
@property
|
|
||||||
def immutable(self) -> Optional[bool]: ...
|
|
||||||
@immutable.setter
|
|
||||||
def immutable(self, value: Optional[bool]) -> None: ...
|
|
||||||
@immutable.deleter
|
|
||||||
def immutable(self) -> None: ...
|
|
||||||
|
|
||||||
def csp_property(key: str) -> property: ...
|
|
||||||
|
|
||||||
class ContentSecurityPolicy(UpdateDictMixin[str, str], Dict[str, str]):
|
|
||||||
@property
|
|
||||||
def base_uri(self) -> Optional[str]: ...
|
|
||||||
@base_uri.setter
|
|
||||||
def base_uri(self, value: Optional[str]) -> None: ...
|
|
||||||
@base_uri.deleter
|
|
||||||
def base_uri(self) -> None: ...
|
|
||||||
@property
|
|
||||||
def child_src(self) -> Optional[str]: ...
|
|
||||||
@child_src.setter
|
|
||||||
def child_src(self, value: Optional[str]) -> None: ...
|
|
||||||
@child_src.deleter
|
|
||||||
def child_src(self) -> None: ...
|
|
||||||
@property
|
|
||||||
def connect_src(self) -> Optional[str]: ...
|
|
||||||
@connect_src.setter
|
|
||||||
def connect_src(self, value: Optional[str]) -> None: ...
|
|
||||||
@connect_src.deleter
|
|
||||||
def connect_src(self) -> None: ...
|
|
||||||
@property
|
|
||||||
def default_src(self) -> Optional[str]: ...
|
|
||||||
@default_src.setter
|
|
||||||
def default_src(self, value: Optional[str]) -> None: ...
|
|
||||||
@default_src.deleter
|
|
||||||
def default_src(self) -> None: ...
|
|
||||||
@property
|
|
||||||
def font_src(self) -> Optional[str]: ...
|
|
||||||
@font_src.setter
|
|
||||||
def font_src(self, value: Optional[str]) -> None: ...
|
|
||||||
@font_src.deleter
|
|
||||||
def font_src(self) -> None: ...
|
|
||||||
@property
|
|
||||||
def form_action(self) -> Optional[str]: ...
|
|
||||||
@form_action.setter
|
|
||||||
def form_action(self, value: Optional[str]) -> None: ...
|
|
||||||
@form_action.deleter
|
|
||||||
def form_action(self) -> None: ...
|
|
||||||
@property
|
|
||||||
def frame_ancestors(self) -> Optional[str]: ...
|
|
||||||
@frame_ancestors.setter
|
|
||||||
def frame_ancestors(self, value: Optional[str]) -> None: ...
|
|
||||||
@frame_ancestors.deleter
|
|
||||||
def frame_ancestors(self) -> None: ...
|
|
||||||
@property
|
|
||||||
def frame_src(self) -> Optional[str]: ...
|
|
||||||
@frame_src.setter
|
|
||||||
def frame_src(self, value: Optional[str]) -> None: ...
|
|
||||||
@frame_src.deleter
|
|
||||||
def frame_src(self) -> None: ...
|
|
||||||
@property
|
|
||||||
def img_src(self) -> Optional[str]: ...
|
|
||||||
@img_src.setter
|
|
||||||
def img_src(self, value: Optional[str]) -> None: ...
|
|
||||||
@img_src.deleter
|
|
||||||
def img_src(self) -> None: ...
|
|
||||||
@property
|
|
||||||
def manifest_src(self) -> Optional[str]: ...
|
|
||||||
@manifest_src.setter
|
|
||||||
def manifest_src(self, value: Optional[str]) -> None: ...
|
|
||||||
@manifest_src.deleter
|
|
||||||
def manifest_src(self) -> None: ...
|
|
||||||
@property
|
|
||||||
def media_src(self) -> Optional[str]: ...
|
|
||||||
@media_src.setter
|
|
||||||
def media_src(self, value: Optional[str]) -> None: ...
|
|
||||||
@media_src.deleter
|
|
||||||
def media_src(self) -> None: ...
|
|
||||||
@property
|
|
||||||
def navigate_to(self) -> Optional[str]: ...
|
|
||||||
@navigate_to.setter
|
|
||||||
def navigate_to(self, value: Optional[str]) -> None: ...
|
|
||||||
@navigate_to.deleter
|
|
||||||
def navigate_to(self) -> None: ...
|
|
||||||
@property
|
|
||||||
def object_src(self) -> Optional[str]: ...
|
|
||||||
@object_src.setter
|
|
||||||
def object_src(self, value: Optional[str]) -> None: ...
|
|
||||||
@object_src.deleter
|
|
||||||
def object_src(self) -> None: ...
|
|
||||||
@property
|
|
||||||
def prefetch_src(self) -> Optional[str]: ...
|
|
||||||
@prefetch_src.setter
|
|
||||||
def prefetch_src(self, value: Optional[str]) -> None: ...
|
|
||||||
@prefetch_src.deleter
|
|
||||||
def prefetch_src(self) -> None: ...
|
|
||||||
@property
|
|
||||||
def plugin_types(self) -> Optional[str]: ...
|
|
||||||
@plugin_types.setter
|
|
||||||
def plugin_types(self, value: Optional[str]) -> None: ...
|
|
||||||
@plugin_types.deleter
|
|
||||||
def plugin_types(self) -> None: ...
|
|
||||||
@property
|
|
||||||
def report_to(self) -> Optional[str]: ...
|
|
||||||
@report_to.setter
|
|
||||||
def report_to(self, value: Optional[str]) -> None: ...
|
|
||||||
@report_to.deleter
|
|
||||||
def report_to(self) -> None: ...
|
|
||||||
@property
|
|
||||||
def report_uri(self) -> Optional[str]: ...
|
|
||||||
@report_uri.setter
|
|
||||||
def report_uri(self, value: Optional[str]) -> None: ...
|
|
||||||
@report_uri.deleter
|
|
||||||
def report_uri(self) -> None: ...
|
|
||||||
@property
|
|
||||||
def sandbox(self) -> Optional[str]: ...
|
|
||||||
@sandbox.setter
|
|
||||||
def sandbox(self, value: Optional[str]) -> None: ...
|
|
||||||
@sandbox.deleter
|
|
||||||
def sandbox(self) -> None: ...
|
|
||||||
@property
|
|
||||||
def script_src(self) -> Optional[str]: ...
|
|
||||||
@script_src.setter
|
|
||||||
def script_src(self, value: Optional[str]) -> None: ...
|
|
||||||
@script_src.deleter
|
|
||||||
def script_src(self) -> None: ...
|
|
||||||
@property
|
|
||||||
def script_src_attr(self) -> Optional[str]: ...
|
|
||||||
@script_src_attr.setter
|
|
||||||
def script_src_attr(self, value: Optional[str]) -> None: ...
|
|
||||||
@script_src_attr.deleter
|
|
||||||
def script_src_attr(self) -> None: ...
|
|
||||||
@property
|
|
||||||
def script_src_elem(self) -> Optional[str]: ...
|
|
||||||
@script_src_elem.setter
|
|
||||||
def script_src_elem(self, value: Optional[str]) -> None: ...
|
|
||||||
@script_src_elem.deleter
|
|
||||||
def script_src_elem(self) -> None: ...
|
|
||||||
@property
|
|
||||||
def style_src(self) -> Optional[str]: ...
|
|
||||||
@style_src.setter
|
|
||||||
def style_src(self, value: Optional[str]) -> None: ...
|
|
||||||
@style_src.deleter
|
|
||||||
def style_src(self) -> None: ...
|
|
||||||
@property
|
|
||||||
def style_src_attr(self) -> Optional[str]: ...
|
|
||||||
@style_src_attr.setter
|
|
||||||
def style_src_attr(self, value: Optional[str]) -> None: ...
|
|
||||||
@style_src_attr.deleter
|
|
||||||
def style_src_attr(self) -> None: ...
|
|
||||||
@property
|
|
||||||
def style_src_elem(self) -> Optional[str]: ...
|
|
||||||
@style_src_elem.setter
|
|
||||||
def style_src_elem(self, value: Optional[str]) -> None: ...
|
|
||||||
@style_src_elem.deleter
|
|
||||||
def style_src_elem(self) -> None: ...
|
|
||||||
@property
|
|
||||||
def worker_src(self) -> Optional[str]: ...
|
|
||||||
@worker_src.setter
|
|
||||||
def worker_src(self, value: Optional[str]) -> None: ...
|
|
||||||
@worker_src.deleter
|
|
||||||
def worker_src(self) -> None: ...
|
|
||||||
provided: bool
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
values: Union[Mapping[str, str], Iterable[Tuple[str, str]]] = (),
|
|
||||||
on_update: Optional[Callable[[ContentSecurityPolicy], None]] = None,
|
|
||||||
) -> None: ...
|
|
||||||
def _get_value(self, key: str) -> Optional[str]: ...
|
|
||||||
def _set_value(self, key: str, value: str) -> None: ...
|
|
||||||
def _del_value(self, key: str) -> None: ...
|
|
||||||
def to_header(self) -> str: ...
|
|
||||||
|
|
||||||
class CallbackDict(UpdateDictMixin[K, V], Dict[K, V]):
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
initial: Optional[Union[Mapping[K, V], Iterable[Tuple[K, V]]]] = None,
|
|
||||||
on_update: Optional[Callable[[_CD], None]] = None,
|
|
||||||
) -> None: ...
|
|
||||||
|
|
||||||
class HeaderSet(Set[str]):
|
|
||||||
_headers: List[str]
|
|
||||||
_set: Set[str]
|
|
||||||
on_update: Optional[Callable[[HeaderSet], None]]
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
headers: Optional[Iterable[str]] = None,
|
|
||||||
on_update: Optional[Callable[[HeaderSet], None]] = None,
|
|
||||||
) -> None: ...
|
|
||||||
def add(self, header: str) -> None: ...
|
|
||||||
def remove(self, header: str) -> None: ...
|
|
||||||
def update(self, iterable: Iterable[str]) -> None: ... # type: ignore
|
|
||||||
def discard(self, header: str) -> None: ...
|
|
||||||
def find(self, header: str) -> int: ...
|
|
||||||
def index(self, header: str) -> int: ...
|
|
||||||
def clear(self) -> None: ...
|
|
||||||
def as_set(self, preserve_casing: bool = False) -> Set[str]: ...
|
|
||||||
def to_header(self) -> str: ...
|
|
||||||
def __getitem__(self, idx: int) -> str: ...
|
|
||||||
def __delitem__(self, idx: int) -> None: ...
|
|
||||||
def __setitem__(self, idx: int, value: str) -> None: ...
|
|
||||||
def __contains__(self, header: str) -> bool: ... # type: ignore
|
|
||||||
def __len__(self) -> int: ...
|
|
||||||
def __iter__(self) -> Iterator[str]: ...
|
|
||||||
|
|
||||||
class ETags(Collection[str]):
|
|
||||||
_strong: FrozenSet[str]
|
|
||||||
_weak: FrozenSet[str]
|
|
||||||
star_tag: bool
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
strong_etags: Optional[Iterable[str]] = None,
|
|
||||||
weak_etags: Optional[Iterable[str]] = None,
|
|
||||||
star_tag: bool = False,
|
|
||||||
) -> None: ...
|
|
||||||
def as_set(self, include_weak: bool = False) -> Set[str]: ...
|
|
||||||
def is_weak(self, etag: str) -> bool: ...
|
|
||||||
def is_strong(self, etag: str) -> bool: ...
|
|
||||||
def contains_weak(self, etag: str) -> bool: ...
|
|
||||||
def contains(self, etag: str) -> bool: ...
|
|
||||||
def contains_raw(self, etag: str) -> bool: ...
|
|
||||||
def to_header(self) -> str: ...
|
|
||||||
def __call__(
|
|
||||||
self,
|
|
||||||
etag: Optional[str] = None,
|
|
||||||
data: Optional[bytes] = None,
|
|
||||||
include_weak: bool = False,
|
|
||||||
) -> bool: ...
|
|
||||||
def __len__(self) -> int: ...
|
|
||||||
def __iter__(self) -> Iterator[str]: ...
|
|
||||||
def __contains__(self, item: str) -> bool: ... # type: ignore
|
|
||||||
|
|
||||||
class IfRange:
|
|
||||||
etag: Optional[str]
|
|
||||||
date: Optional[datetime]
|
|
||||||
def __init__(
|
|
||||||
self, etag: Optional[str] = None, date: Optional[datetime] = None
|
|
||||||
) -> None: ...
|
|
||||||
def to_header(self) -> str: ...
|
|
||||||
|
|
||||||
class Range:
|
|
||||||
units: str
|
|
||||||
ranges: List[Tuple[int, Optional[int]]]
|
|
||||||
def __init__(self, units: str, ranges: List[Tuple[int, Optional[int]]]) -> None: ...
|
|
||||||
def range_for_length(self, length: Optional[int]) -> Optional[Tuple[int, int]]: ...
|
|
||||||
def make_content_range(self, length: Optional[int]) -> Optional[ContentRange]: ...
|
|
||||||
def to_header(self) -> str: ...
|
|
||||||
def to_content_range_header(self, length: Optional[int]) -> Optional[str]: ...
|
|
||||||
|
|
||||||
def _callback_property(name: str) -> property: ...
|
|
||||||
|
|
||||||
class ContentRange:
|
|
||||||
on_update: Optional[Callable[[ContentRange], None]]
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
units: Optional[str],
|
|
||||||
start: Optional[int],
|
|
||||||
stop: Optional[int],
|
|
||||||
length: Optional[int] = None,
|
|
||||||
on_update: Optional[Callable[[ContentRange], None]] = None,
|
|
||||||
) -> None: ...
|
|
||||||
@property
|
|
||||||
def units(self) -> Optional[str]: ...
|
|
||||||
@units.setter
|
|
||||||
def units(self, value: Optional[str]) -> None: ...
|
|
||||||
@property
|
|
||||||
def start(self) -> Optional[int]: ...
|
|
||||||
@start.setter
|
|
||||||
def start(self, value: Optional[int]) -> None: ...
|
|
||||||
@property
|
|
||||||
def stop(self) -> Optional[int]: ...
|
|
||||||
@stop.setter
|
|
||||||
def stop(self, value: Optional[int]) -> None: ...
|
|
||||||
@property
|
|
||||||
def length(self) -> Optional[int]: ...
|
|
||||||
@length.setter
|
|
||||||
def length(self, value: Optional[int]) -> None: ...
|
|
||||||
def set(
|
|
||||||
self,
|
|
||||||
start: Optional[int],
|
|
||||||
stop: Optional[int],
|
|
||||||
length: Optional[int] = None,
|
|
||||||
units: Optional[str] = "bytes",
|
|
||||||
) -> None: ...
|
|
||||||
def unset(self) -> None: ...
|
|
||||||
def to_header(self) -> str: ...
|
|
||||||
|
|
||||||
class Authorization(ImmutableDictMixin[str, str], Dict[str, str]):
|
|
||||||
type: str
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
auth_type: str,
|
|
||||||
data: Optional[Union[Mapping[str, str], Iterable[Tuple[str, str]]]] = None,
|
|
||||||
) -> None: ...
|
|
||||||
@property
|
|
||||||
def username(self) -> Optional[str]: ...
|
|
||||||
@property
|
|
||||||
def password(self) -> Optional[str]: ...
|
|
||||||
@property
|
|
||||||
def realm(self) -> Optional[str]: ...
|
|
||||||
@property
|
|
||||||
def nonce(self) -> Optional[str]: ...
|
|
||||||
@property
|
|
||||||
def uri(self) -> Optional[str]: ...
|
|
||||||
@property
|
|
||||||
def nc(self) -> Optional[str]: ...
|
|
||||||
@property
|
|
||||||
def cnonce(self) -> Optional[str]: ...
|
|
||||||
@property
|
|
||||||
def response(self) -> Optional[str]: ...
|
|
||||||
@property
|
|
||||||
def opaque(self) -> Optional[str]: ...
|
|
||||||
@property
|
|
||||||
def qop(self) -> Optional[str]: ...
|
|
||||||
def to_header(self) -> str: ...
|
|
||||||
|
|
||||||
def auth_property(name: str, doc: Optional[str] = None) -> property: ...
|
|
||||||
def _set_property(name: str, doc: Optional[str] = None) -> property: ...
|
|
||||||
|
|
||||||
class WWWAuthenticate(UpdateDictMixin[str, str], Dict[str, str]):
|
|
||||||
_require_quoting: FrozenSet[str]
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
auth_type: Optional[str] = None,
|
|
||||||
values: Optional[Union[Mapping[str, str], Iterable[Tuple[str, str]]]] = None,
|
|
||||||
on_update: Optional[Callable[[WWWAuthenticate], None]] = None,
|
|
||||||
) -> None: ...
|
|
||||||
def set_basic(self, realm: str = ...) -> None: ...
|
|
||||||
def set_digest(
|
|
||||||
self,
|
|
||||||
realm: str,
|
|
||||||
nonce: str,
|
|
||||||
qop: Iterable[str] = ("auth",),
|
|
||||||
opaque: Optional[str] = None,
|
|
||||||
algorithm: Optional[str] = None,
|
|
||||||
stale: bool = False,
|
|
||||||
) -> None: ...
|
|
||||||
def to_header(self) -> str: ...
|
|
||||||
@property
|
|
||||||
def type(self) -> Optional[str]: ...
|
|
||||||
@type.setter
|
|
||||||
def type(self, value: Optional[str]) -> None: ...
|
|
||||||
@property
|
|
||||||
def realm(self) -> Optional[str]: ...
|
|
||||||
@realm.setter
|
|
||||||
def realm(self, value: Optional[str]) -> None: ...
|
|
||||||
@property
|
|
||||||
def domain(self) -> HeaderSet: ...
|
|
||||||
@property
|
|
||||||
def nonce(self) -> Optional[str]: ...
|
|
||||||
@nonce.setter
|
|
||||||
def nonce(self, value: Optional[str]) -> None: ...
|
|
||||||
@property
|
|
||||||
def opaque(self) -> Optional[str]: ...
|
|
||||||
@opaque.setter
|
|
||||||
def opaque(self, value: Optional[str]) -> None: ...
|
|
||||||
@property
|
|
||||||
def algorithm(self) -> Optional[str]: ...
|
|
||||||
@algorithm.setter
|
|
||||||
def algorithm(self, value: Optional[str]) -> None: ...
|
|
||||||
@property
|
|
||||||
def qop(self) -> HeaderSet: ...
|
|
||||||
@property
|
|
||||||
def stale(self) -> Optional[bool]: ...
|
|
||||||
@stale.setter
|
|
||||||
def stale(self, value: Optional[bool]) -> None: ...
|
|
||||||
@staticmethod
|
|
||||||
def auth_property(name: str, doc: Optional[str] = None) -> property: ...
|
|
||||||
|
|
||||||
class FileStorage:
|
|
||||||
name: Optional[str]
|
|
||||||
stream: IO[bytes]
|
|
||||||
filename: Optional[str]
|
|
||||||
headers: Headers
|
|
||||||
_parsed_content_type: Tuple[str, Dict[str, str]]
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
stream: Optional[IO[bytes]] = None,
|
|
||||||
filename: Union[str, PathLike, None] = None,
|
|
||||||
name: Optional[str] = None,
|
|
||||||
content_type: Optional[str] = None,
|
|
||||||
content_length: Optional[int] = None,
|
|
||||||
headers: Optional[Headers] = None,
|
|
||||||
) -> None: ...
|
|
||||||
def _parse_content_type(self) -> None: ...
|
|
||||||
@property
|
|
||||||
def content_type(self) -> str: ...
|
|
||||||
@property
|
|
||||||
def content_length(self) -> int: ...
|
|
||||||
@property
|
|
||||||
def mimetype(self) -> str: ...
|
|
||||||
@property
|
|
||||||
def mimetype_params(self) -> Dict[str, str]: ...
|
|
||||||
def save(
|
|
||||||
self, dst: Union[str, PathLike, IO[bytes]], buffer_size: int = ...
|
|
||||||
) -> None: ...
|
|
||||||
def close(self) -> None: ...
|
|
||||||
def __bool__(self) -> bool: ...
|
|
||||||
def __getattr__(self, name: str) -> Any: ...
|
|
||||||
def __iter__(self) -> Iterator[bytes]: ...
|
|
||||||
def __repr__(self) -> str: ...
|
|
|
@ -0,0 +1,34 @@
|
||||||
|
from .accept import Accept as Accept
|
||||||
|
from .accept import CharsetAccept as CharsetAccept
|
||||||
|
from .accept import LanguageAccept as LanguageAccept
|
||||||
|
from .accept import MIMEAccept as MIMEAccept
|
||||||
|
from .auth import Authorization as Authorization
|
||||||
|
from .auth import WWWAuthenticate as WWWAuthenticate
|
||||||
|
from .cache_control import RequestCacheControl as RequestCacheControl
|
||||||
|
from .cache_control import ResponseCacheControl as ResponseCacheControl
|
||||||
|
from .csp import ContentSecurityPolicy as ContentSecurityPolicy
|
||||||
|
from .etag import ETags as ETags
|
||||||
|
from .file_storage import FileMultiDict as FileMultiDict
|
||||||
|
from .file_storage import FileStorage as FileStorage
|
||||||
|
from .headers import EnvironHeaders as EnvironHeaders
|
||||||
|
from .headers import Headers as Headers
|
||||||
|
from .mixins import ImmutableDictMixin as ImmutableDictMixin
|
||||||
|
from .mixins import ImmutableHeadersMixin as ImmutableHeadersMixin
|
||||||
|
from .mixins import ImmutableListMixin as ImmutableListMixin
|
||||||
|
from .mixins import ImmutableMultiDictMixin as ImmutableMultiDictMixin
|
||||||
|
from .mixins import UpdateDictMixin as UpdateDictMixin
|
||||||
|
from .range import ContentRange as ContentRange
|
||||||
|
from .range import IfRange as IfRange
|
||||||
|
from .range import Range as Range
|
||||||
|
from .structures import CallbackDict as CallbackDict
|
||||||
|
from .structures import CombinedMultiDict as CombinedMultiDict
|
||||||
|
from .structures import HeaderSet as HeaderSet
|
||||||
|
from .structures import ImmutableDict as ImmutableDict
|
||||||
|
from .structures import ImmutableList as ImmutableList
|
||||||
|
from .structures import ImmutableMultiDict as ImmutableMultiDict
|
||||||
|
from .structures import ImmutableOrderedMultiDict as ImmutableOrderedMultiDict
|
||||||
|
from .structures import ImmutableTypeConversionDict as ImmutableTypeConversionDict
|
||||||
|
from .structures import iter_multi_items as iter_multi_items
|
||||||
|
from .structures import MultiDict as MultiDict
|
||||||
|
from .structures import OrderedMultiDict as OrderedMultiDict
|
||||||
|
from .structures import TypeConversionDict as TypeConversionDict
|
|
@ -0,0 +1,326 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import codecs
|
||||||
|
import re
|
||||||
|
|
||||||
|
from .structures import ImmutableList
|
||||||
|
|
||||||
|
|
||||||
|
class Accept(ImmutableList):
|
||||||
|
"""An :class:`Accept` object is just a list subclass for lists of
|
||||||
|
``(value, quality)`` tuples. It is automatically sorted by specificity
|
||||||
|
and quality.
|
||||||
|
|
||||||
|
All :class:`Accept` objects work similar to a list but provide extra
|
||||||
|
functionality for working with the data. Containment checks are
|
||||||
|
normalized to the rules of that header:
|
||||||
|
|
||||||
|
>>> a = CharsetAccept([('ISO-8859-1', 1), ('utf-8', 0.7)])
|
||||||
|
>>> a.best
|
||||||
|
'ISO-8859-1'
|
||||||
|
>>> 'iso-8859-1' in a
|
||||||
|
True
|
||||||
|
>>> 'UTF8' in a
|
||||||
|
True
|
||||||
|
>>> 'utf7' in a
|
||||||
|
False
|
||||||
|
|
||||||
|
To get the quality for an item you can use normal item lookup:
|
||||||
|
|
||||||
|
>>> print a['utf-8']
|
||||||
|
0.7
|
||||||
|
>>> a['utf7']
|
||||||
|
0
|
||||||
|
|
||||||
|
.. versionchanged:: 0.5
|
||||||
|
:class:`Accept` objects are forced immutable now.
|
||||||
|
|
||||||
|
.. versionchanged:: 1.0.0
|
||||||
|
:class:`Accept` internal values are no longer ordered
|
||||||
|
alphabetically for equal quality tags. Instead the initial
|
||||||
|
order is preserved.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, values=()):
|
||||||
|
if values is None:
|
||||||
|
list.__init__(self)
|
||||||
|
self.provided = False
|
||||||
|
elif isinstance(values, Accept):
|
||||||
|
self.provided = values.provided
|
||||||
|
list.__init__(self, values)
|
||||||
|
else:
|
||||||
|
self.provided = True
|
||||||
|
values = sorted(
|
||||||
|
values, key=lambda x: (self._specificity(x[0]), x[1]), reverse=True
|
||||||
|
)
|
||||||
|
list.__init__(self, values)
|
||||||
|
|
||||||
|
def _specificity(self, value):
|
||||||
|
"""Returns a tuple describing the value's specificity."""
|
||||||
|
return (value != "*",)
|
||||||
|
|
||||||
|
def _value_matches(self, value, item):
|
||||||
|
"""Check if a value matches a given accept item."""
|
||||||
|
return item == "*" or item.lower() == value.lower()
|
||||||
|
|
||||||
|
def __getitem__(self, key):
|
||||||
|
"""Besides index lookup (getting item n) you can also pass it a string
|
||||||
|
to get the quality for the item. If the item is not in the list, the
|
||||||
|
returned quality is ``0``.
|
||||||
|
"""
|
||||||
|
if isinstance(key, str):
|
||||||
|
return self.quality(key)
|
||||||
|
return list.__getitem__(self, key)
|
||||||
|
|
||||||
|
def quality(self, key):
|
||||||
|
"""Returns the quality of the key.
|
||||||
|
|
||||||
|
.. versionadded:: 0.6
|
||||||
|
In previous versions you had to use the item-lookup syntax
|
||||||
|
(eg: ``obj[key]`` instead of ``obj.quality(key)``)
|
||||||
|
"""
|
||||||
|
for item, quality in self:
|
||||||
|
if self._value_matches(key, item):
|
||||||
|
return quality
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def __contains__(self, value):
|
||||||
|
for item, _quality in self:
|
||||||
|
if self._value_matches(value, item):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
pairs_str = ", ".join(f"({x!r}, {y})" for x, y in self)
|
||||||
|
return f"{type(self).__name__}([{pairs_str}])"
|
||||||
|
|
||||||
|
def index(self, key):
|
||||||
|
"""Get the position of an entry or raise :exc:`ValueError`.
|
||||||
|
|
||||||
|
:param key: The key to be looked up.
|
||||||
|
|
||||||
|
.. versionchanged:: 0.5
|
||||||
|
This used to raise :exc:`IndexError`, which was inconsistent
|
||||||
|
with the list API.
|
||||||
|
"""
|
||||||
|
if isinstance(key, str):
|
||||||
|
for idx, (item, _quality) in enumerate(self):
|
||||||
|
if self._value_matches(key, item):
|
||||||
|
return idx
|
||||||
|
raise ValueError(key)
|
||||||
|
return list.index(self, key)
|
||||||
|
|
||||||
|
def find(self, key):
|
||||||
|
"""Get the position of an entry or return -1.
|
||||||
|
|
||||||
|
:param key: The key to be looked up.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return self.index(key)
|
||||||
|
except ValueError:
|
||||||
|
return -1
|
||||||
|
|
||||||
|
def values(self):
|
||||||
|
"""Iterate over all values."""
|
||||||
|
for item in self:
|
||||||
|
yield item[0]
|
||||||
|
|
||||||
|
def to_header(self):
|
||||||
|
"""Convert the header set into an HTTP header string."""
|
||||||
|
result = []
|
||||||
|
for value, quality in self:
|
||||||
|
if quality != 1:
|
||||||
|
value = f"{value};q={quality}"
|
||||||
|
result.append(value)
|
||||||
|
return ",".join(result)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.to_header()
|
||||||
|
|
||||||
|
def _best_single_match(self, match):
|
||||||
|
for client_item, quality in self:
|
||||||
|
if self._value_matches(match, client_item):
|
||||||
|
# self is sorted by specificity descending, we can exit
|
||||||
|
return client_item, quality
|
||||||
|
return None
|
||||||
|
|
||||||
|
def best_match(self, matches, default=None):
|
||||||
|
"""Returns the best match from a list of possible matches based
|
||||||
|
on the specificity and quality of the client. If two items have the
|
||||||
|
same quality and specificity, the one is returned that comes first.
|
||||||
|
|
||||||
|
:param matches: a list of matches to check for
|
||||||
|
:param default: the value that is returned if none match
|
||||||
|
"""
|
||||||
|
result = default
|
||||||
|
best_quality = -1
|
||||||
|
best_specificity = (-1,)
|
||||||
|
for server_item in matches:
|
||||||
|
match = self._best_single_match(server_item)
|
||||||
|
if not match:
|
||||||
|
continue
|
||||||
|
client_item, quality = match
|
||||||
|
specificity = self._specificity(client_item)
|
||||||
|
if quality <= 0 or quality < best_quality:
|
||||||
|
continue
|
||||||
|
# better quality or same quality but more specific => better match
|
||||||
|
if quality > best_quality or specificity > best_specificity:
|
||||||
|
result = server_item
|
||||||
|
best_quality = quality
|
||||||
|
best_specificity = specificity
|
||||||
|
return result
|
||||||
|
|
||||||
|
@property
|
||||||
|
def best(self):
|
||||||
|
"""The best match as value."""
|
||||||
|
if self:
|
||||||
|
return self[0][0]
|
||||||
|
|
||||||
|
|
||||||
|
_mime_split_re = re.compile(r"/|(?:\s*;\s*)")
|
||||||
|
|
||||||
|
|
||||||
|
def _normalize_mime(value):
|
||||||
|
return _mime_split_re.split(value.lower())
|
||||||
|
|
||||||
|
|
||||||
|
class MIMEAccept(Accept):
|
||||||
|
"""Like :class:`Accept` but with special methods and behavior for
|
||||||
|
mimetypes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _specificity(self, value):
|
||||||
|
return tuple(x != "*" for x in _mime_split_re.split(value))
|
||||||
|
|
||||||
|
def _value_matches(self, value, item):
|
||||||
|
# item comes from the client, can't match if it's invalid.
|
||||||
|
if "/" not in item:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# value comes from the application, tell the developer when it
|
||||||
|
# doesn't look valid.
|
||||||
|
if "/" not in value:
|
||||||
|
raise ValueError(f"invalid mimetype {value!r}")
|
||||||
|
|
||||||
|
# Split the match value into type, subtype, and a sorted list of parameters.
|
||||||
|
normalized_value = _normalize_mime(value)
|
||||||
|
value_type, value_subtype = normalized_value[:2]
|
||||||
|
value_params = sorted(normalized_value[2:])
|
||||||
|
|
||||||
|
# "*/*" is the only valid value that can start with "*".
|
||||||
|
if value_type == "*" and value_subtype != "*":
|
||||||
|
raise ValueError(f"invalid mimetype {value!r}")
|
||||||
|
|
||||||
|
# Split the accept item into type, subtype, and parameters.
|
||||||
|
normalized_item = _normalize_mime(item)
|
||||||
|
item_type, item_subtype = normalized_item[:2]
|
||||||
|
item_params = sorted(normalized_item[2:])
|
||||||
|
|
||||||
|
# "*/not-*" from the client is invalid, can't match.
|
||||||
|
if item_type == "*" and item_subtype != "*":
|
||||||
|
return False
|
||||||
|
|
||||||
|
return (
|
||||||
|
(item_type == "*" and item_subtype == "*")
|
||||||
|
or (value_type == "*" and value_subtype == "*")
|
||||||
|
) or (
|
||||||
|
item_type == value_type
|
||||||
|
and (
|
||||||
|
item_subtype == "*"
|
||||||
|
or value_subtype == "*"
|
||||||
|
or (item_subtype == value_subtype and item_params == value_params)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def accept_html(self):
|
||||||
|
"""True if this object accepts HTML."""
|
||||||
|
return (
|
||||||
|
"text/html" in self or "application/xhtml+xml" in self or self.accept_xhtml
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def accept_xhtml(self):
|
||||||
|
"""True if this object accepts XHTML."""
|
||||||
|
return "application/xhtml+xml" in self or "application/xml" in self
|
||||||
|
|
||||||
|
@property
|
||||||
|
def accept_json(self):
|
||||||
|
"""True if this object accepts JSON."""
|
||||||
|
return "application/json" in self
|
||||||
|
|
||||||
|
|
||||||
|
_locale_delim_re = re.compile(r"[_-]")
|
||||||
|
|
||||||
|
|
||||||
|
def _normalize_lang(value):
|
||||||
|
"""Process a language tag for matching."""
|
||||||
|
return _locale_delim_re.split(value.lower())
|
||||||
|
|
||||||
|
|
||||||
|
class LanguageAccept(Accept):
|
||||||
|
"""Like :class:`Accept` but with normalization for language tags."""
|
||||||
|
|
||||||
|
def _value_matches(self, value, item):
|
||||||
|
return item == "*" or _normalize_lang(value) == _normalize_lang(item)
|
||||||
|
|
||||||
|
def best_match(self, matches, default=None):
|
||||||
|
"""Given a list of supported values, finds the best match from
|
||||||
|
the list of accepted values.
|
||||||
|
|
||||||
|
Language tags are normalized for the purpose of matching, but
|
||||||
|
are returned unchanged.
|
||||||
|
|
||||||
|
If no exact match is found, this will fall back to matching
|
||||||
|
the first subtag (primary language only), first with the
|
||||||
|
accepted values then with the match values. This partial is not
|
||||||
|
applied to any other language subtags.
|
||||||
|
|
||||||
|
The default is returned if no exact or fallback match is found.
|
||||||
|
|
||||||
|
:param matches: A list of supported languages to find a match.
|
||||||
|
:param default: The value that is returned if none match.
|
||||||
|
"""
|
||||||
|
# Look for an exact match first. If a client accepts "en-US",
|
||||||
|
# "en-US" is a valid match at this point.
|
||||||
|
result = super().best_match(matches)
|
||||||
|
|
||||||
|
if result is not None:
|
||||||
|
return result
|
||||||
|
|
||||||
|
# Fall back to accepting primary tags. If a client accepts
|
||||||
|
# "en-US", "en" is a valid match at this point. Need to use
|
||||||
|
# re.split to account for 2 or 3 letter codes.
|
||||||
|
fallback = Accept(
|
||||||
|
[(_locale_delim_re.split(item[0], 1)[0], item[1]) for item in self]
|
||||||
|
)
|
||||||
|
result = fallback.best_match(matches)
|
||||||
|
|
||||||
|
if result is not None:
|
||||||
|
return result
|
||||||
|
|
||||||
|
# Fall back to matching primary tags. If the client accepts
|
||||||
|
# "en", "en-US" is a valid match at this point.
|
||||||
|
fallback_matches = [_locale_delim_re.split(item, 1)[0] for item in matches]
|
||||||
|
result = super().best_match(fallback_matches)
|
||||||
|
|
||||||
|
# Return a value from the original match list. Find the first
|
||||||
|
# original value that starts with the matched primary tag.
|
||||||
|
if result is not None:
|
||||||
|
return next(item for item in matches if item.startswith(result))
|
||||||
|
|
||||||
|
return default
|
||||||
|
|
||||||
|
|
||||||
|
class CharsetAccept(Accept):
|
||||||
|
"""Like :class:`Accept` but with normalization for charsets."""
|
||||||
|
|
||||||
|
def _value_matches(self, value, item):
|
||||||
|
def _normalize(name):
|
||||||
|
try:
|
||||||
|
return codecs.lookup(name).name
|
||||||
|
except LookupError:
|
||||||
|
return name.lower()
|
||||||
|
|
||||||
|
return item == "*" or _normalize(value) == _normalize(item)
|
|
@ -0,0 +1,54 @@
|
||||||
|
from collections.abc import Iterable
|
||||||
|
from collections.abc import Iterator
|
||||||
|
from typing import overload
|
||||||
|
|
||||||
|
from .structures import ImmutableList
|
||||||
|
|
||||||
|
class Accept(ImmutableList[tuple[str, int]]):
|
||||||
|
provided: bool
|
||||||
|
def __init__(
|
||||||
|
self, values: Accept | Iterable[tuple[str, float]] | None = None
|
||||||
|
) -> None: ...
|
||||||
|
def _specificity(self, value: str) -> tuple[bool, ...]: ...
|
||||||
|
def _value_matches(self, value: str, item: str) -> bool: ...
|
||||||
|
@overload # type: ignore
|
||||||
|
def __getitem__(self, key: str) -> int: ...
|
||||||
|
@overload
|
||||||
|
def __getitem__(self, key: int) -> tuple[str, int]: ...
|
||||||
|
@overload
|
||||||
|
def __getitem__(self, key: slice) -> Iterable[tuple[str, int]]: ...
|
||||||
|
def quality(self, key: str) -> int: ...
|
||||||
|
def __contains__(self, value: str) -> bool: ... # type: ignore
|
||||||
|
def index(self, key: str) -> int: ... # type: ignore
|
||||||
|
def find(self, key: str) -> int: ...
|
||||||
|
def values(self) -> Iterator[str]: ...
|
||||||
|
def to_header(self) -> str: ...
|
||||||
|
def _best_single_match(self, match: str) -> tuple[str, int] | None: ...
|
||||||
|
@overload
|
||||||
|
def best_match(self, matches: Iterable[str], default: str) -> str: ...
|
||||||
|
@overload
|
||||||
|
def best_match(
|
||||||
|
self, matches: Iterable[str], default: str | None = None
|
||||||
|
) -> str | None: ...
|
||||||
|
@property
|
||||||
|
def best(self) -> str: ...
|
||||||
|
|
||||||
|
def _normalize_mime(value: str) -> list[str]: ...
|
||||||
|
|
||||||
|
class MIMEAccept(Accept):
|
||||||
|
def _specificity(self, value: str) -> tuple[bool, ...]: ...
|
||||||
|
def _value_matches(self, value: str, item: str) -> bool: ...
|
||||||
|
@property
|
||||||
|
def accept_html(self) -> bool: ...
|
||||||
|
@property
|
||||||
|
def accept_xhtml(self) -> bool: ...
|
||||||
|
@property
|
||||||
|
def accept_json(self) -> bool: ...
|
||||||
|
|
||||||
|
def _normalize_lang(value: str) -> list[str]: ...
|
||||||
|
|
||||||
|
class LanguageAccept(Accept):
|
||||||
|
def _value_matches(self, value: str, item: str) -> bool: ...
|
||||||
|
|
||||||
|
class CharsetAccept(Accept):
|
||||||
|
def _value_matches(self, value: str, item: str) -> bool: ...
|
|
@ -0,0 +1,318 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import binascii
|
||||||
|
import typing as t
|
||||||
|
|
||||||
|
from ..http import dump_header
|
||||||
|
from ..http import parse_dict_header
|
||||||
|
from ..http import quote_header_value
|
||||||
|
from .structures import CallbackDict
|
||||||
|
|
||||||
|
if t.TYPE_CHECKING:
|
||||||
|
import typing_extensions as te
|
||||||
|
|
||||||
|
|
||||||
|
class Authorization:
|
||||||
|
"""Represents the parts of an ``Authorization`` request header.
|
||||||
|
|
||||||
|
:attr:`.Request.authorization` returns an instance if the header is set.
|
||||||
|
|
||||||
|
An instance can be used with the test :class:`.Client` request methods' ``auth``
|
||||||
|
parameter to send the header in test requests.
|
||||||
|
|
||||||
|
Depending on the auth scheme, either :attr:`parameters` or :attr:`token` will be
|
||||||
|
set. The ``Basic`` scheme's token is decoded into the ``username`` and ``password``
|
||||||
|
parameters.
|
||||||
|
|
||||||
|
For convenience, ``auth["key"]`` and ``auth.key`` both access the key in the
|
||||||
|
:attr:`parameters` dict, along with ``auth.get("key")`` and ``"key" in auth``.
|
||||||
|
|
||||||
|
.. versionchanged:: 2.3
|
||||||
|
The ``token`` parameter and attribute was added to support auth schemes that use
|
||||||
|
a token instead of parameters, such as ``Bearer``.
|
||||||
|
|
||||||
|
.. versionchanged:: 2.3
|
||||||
|
The object is no longer a ``dict``.
|
||||||
|
|
||||||
|
.. versionchanged:: 0.5
|
||||||
|
The object is an immutable dict.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
auth_type: str,
|
||||||
|
data: dict[str, str | None] | None = None,
|
||||||
|
token: str | None = None,
|
||||||
|
) -> None:
|
||||||
|
self.type = auth_type
|
||||||
|
"""The authorization scheme, like ``basic``, ``digest``, or ``bearer``."""
|
||||||
|
|
||||||
|
if data is None:
|
||||||
|
data = {}
|
||||||
|
|
||||||
|
self.parameters = data
|
||||||
|
"""A dict of parameters parsed from the header. Either this or :attr:`token`
|
||||||
|
will have a value for a given scheme.
|
||||||
|
"""
|
||||||
|
|
||||||
|
self.token = token
|
||||||
|
"""A token parsed from the header. Either this or :attr:`parameters` will have a
|
||||||
|
value for a given scheme.
|
||||||
|
|
||||||
|
.. versionadded:: 2.3
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __getattr__(self, name: str) -> str | None:
|
||||||
|
return self.parameters.get(name)
|
||||||
|
|
||||||
|
def __getitem__(self, name: str) -> str | None:
|
||||||
|
return self.parameters.get(name)
|
||||||
|
|
||||||
|
def get(self, key: str, default: str | None = None) -> str | None:
|
||||||
|
return self.parameters.get(key, default)
|
||||||
|
|
||||||
|
def __contains__(self, key: str) -> bool:
|
||||||
|
return key in self.parameters
|
||||||
|
|
||||||
|
def __eq__(self, other: object) -> bool:
|
||||||
|
if not isinstance(other, Authorization):
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
return (
|
||||||
|
other.type == self.type
|
||||||
|
and other.token == self.token
|
||||||
|
and other.parameters == self.parameters
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_header(cls, value: str | None) -> te.Self | None:
|
||||||
|
"""Parse an ``Authorization`` header value and return an instance, or ``None``
|
||||||
|
if the value is empty.
|
||||||
|
|
||||||
|
:param value: The header value to parse.
|
||||||
|
|
||||||
|
.. versionadded:: 2.3
|
||||||
|
"""
|
||||||
|
if not value:
|
||||||
|
return None
|
||||||
|
|
||||||
|
scheme, _, rest = value.partition(" ")
|
||||||
|
scheme = scheme.lower()
|
||||||
|
rest = rest.strip()
|
||||||
|
|
||||||
|
if scheme == "basic":
|
||||||
|
try:
|
||||||
|
username, _, password = base64.b64decode(rest).decode().partition(":")
|
||||||
|
except (binascii.Error, UnicodeError):
|
||||||
|
return None
|
||||||
|
|
||||||
|
return cls(scheme, {"username": username, "password": password})
|
||||||
|
|
||||||
|
if "=" in rest.rstrip("="):
|
||||||
|
# = that is not trailing, this is parameters.
|
||||||
|
return cls(scheme, parse_dict_header(rest), None)
|
||||||
|
|
||||||
|
# No = or only trailing =, this is a token.
|
||||||
|
return cls(scheme, None, rest)
|
||||||
|
|
||||||
|
def to_header(self) -> str:
|
||||||
|
"""Produce an ``Authorization`` header value representing this data.
|
||||||
|
|
||||||
|
.. versionadded:: 2.0
|
||||||
|
"""
|
||||||
|
if self.type == "basic":
|
||||||
|
value = base64.b64encode(
|
||||||
|
f"{self.username}:{self.password}".encode()
|
||||||
|
).decode("utf8")
|
||||||
|
return f"Basic {value}"
|
||||||
|
|
||||||
|
if self.token is not None:
|
||||||
|
return f"{self.type.title()} {self.token}"
|
||||||
|
|
||||||
|
return f"{self.type.title()} {dump_header(self.parameters)}"
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return self.to_header()
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return f"<{type(self).__name__} {self.to_header()}>"
|
||||||
|
|
||||||
|
|
||||||
|
class WWWAuthenticate:
|
||||||
|
"""Represents the parts of a ``WWW-Authenticate`` response header.
|
||||||
|
|
||||||
|
Set :attr:`.Response.www_authenticate` to an instance of list of instances to set
|
||||||
|
values for this header in the response. Modifying this instance will modify the
|
||||||
|
header value.
|
||||||
|
|
||||||
|
Depending on the auth scheme, either :attr:`parameters` or :attr:`token` should be
|
||||||
|
set. The ``Basic`` scheme will encode ``username`` and ``password`` parameters to a
|
||||||
|
token.
|
||||||
|
|
||||||
|
For convenience, ``auth["key"]`` and ``auth.key`` both act on the :attr:`parameters`
|
||||||
|
dict, and can be used to get, set, or delete parameters. ``auth.get("key")`` and
|
||||||
|
``"key" in auth`` are also provided.
|
||||||
|
|
||||||
|
.. versionchanged:: 2.3
|
||||||
|
The ``token`` parameter and attribute was added to support auth schemes that use
|
||||||
|
a token instead of parameters, such as ``Bearer``.
|
||||||
|
|
||||||
|
.. versionchanged:: 2.3
|
||||||
|
The object is no longer a ``dict``.
|
||||||
|
|
||||||
|
.. versionchanged:: 2.3
|
||||||
|
The ``on_update`` parameter was removed.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
auth_type: str,
|
||||||
|
values: dict[str, str | None] | None = None,
|
||||||
|
token: str | None = None,
|
||||||
|
):
|
||||||
|
self._type = auth_type.lower()
|
||||||
|
self._parameters: dict[str, str | None] = CallbackDict( # type: ignore[misc]
|
||||||
|
values, lambda _: self._trigger_on_update()
|
||||||
|
)
|
||||||
|
self._token = token
|
||||||
|
self._on_update: t.Callable[[WWWAuthenticate], None] | None = None
|
||||||
|
|
||||||
|
def _trigger_on_update(self) -> None:
|
||||||
|
if self._on_update is not None:
|
||||||
|
self._on_update(self)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def type(self) -> str:
|
||||||
|
"""The authorization scheme, like ``basic``, ``digest``, or ``bearer``."""
|
||||||
|
return self._type
|
||||||
|
|
||||||
|
@type.setter
|
||||||
|
def type(self, value: str) -> None:
|
||||||
|
self._type = value
|
||||||
|
self._trigger_on_update()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def parameters(self) -> dict[str, str | None]:
|
||||||
|
"""A dict of parameters for the header. Only one of this or :attr:`token` should
|
||||||
|
have a value for a given scheme.
|
||||||
|
"""
|
||||||
|
return self._parameters
|
||||||
|
|
||||||
|
@parameters.setter
|
||||||
|
def parameters(self, value: dict[str, str]) -> None:
|
||||||
|
self._parameters = CallbackDict( # type: ignore[misc]
|
||||||
|
value, lambda _: self._trigger_on_update()
|
||||||
|
)
|
||||||
|
self._trigger_on_update()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def token(self) -> str | None:
|
||||||
|
"""A dict of parameters for the header. Only one of this or :attr:`token` should
|
||||||
|
have a value for a given scheme.
|
||||||
|
"""
|
||||||
|
return self._token
|
||||||
|
|
||||||
|
@token.setter
|
||||||
|
def token(self, value: str | None) -> None:
|
||||||
|
"""A token for the header. Only one of this or :attr:`parameters` should have a
|
||||||
|
value for a given scheme.
|
||||||
|
|
||||||
|
.. versionadded:: 2.3
|
||||||
|
"""
|
||||||
|
self._token = value
|
||||||
|
self._trigger_on_update()
|
||||||
|
|
||||||
|
def __getitem__(self, key: str) -> str | None:
|
||||||
|
return self.parameters.get(key)
|
||||||
|
|
||||||
|
def __setitem__(self, key: str, value: str | None) -> None:
|
||||||
|
if value is None:
|
||||||
|
if key in self.parameters:
|
||||||
|
del self.parameters[key]
|
||||||
|
else:
|
||||||
|
self.parameters[key] = value
|
||||||
|
|
||||||
|
self._trigger_on_update()
|
||||||
|
|
||||||
|
def __delitem__(self, key: str) -> None:
|
||||||
|
if key in self.parameters:
|
||||||
|
del self.parameters[key]
|
||||||
|
self._trigger_on_update()
|
||||||
|
|
||||||
|
def __getattr__(self, name: str) -> str | None:
|
||||||
|
return self[name]
|
||||||
|
|
||||||
|
def __setattr__(self, name: str, value: str | None) -> None:
|
||||||
|
if name in {"_type", "_parameters", "_token", "_on_update"}:
|
||||||
|
super().__setattr__(name, value)
|
||||||
|
else:
|
||||||
|
self[name] = value
|
||||||
|
|
||||||
|
def __delattr__(self, name: str) -> None:
|
||||||
|
del self[name]
|
||||||
|
|
||||||
|
def __contains__(self, key: str) -> bool:
|
||||||
|
return key in self.parameters
|
||||||
|
|
||||||
|
def __eq__(self, other: object) -> bool:
|
||||||
|
if not isinstance(other, WWWAuthenticate):
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
return (
|
||||||
|
other.type == self.type
|
||||||
|
and other.token == self.token
|
||||||
|
and other.parameters == self.parameters
|
||||||
|
)
|
||||||
|
|
||||||
|
def get(self, key: str, default: str | None = None) -> str | None:
|
||||||
|
return self.parameters.get(key, default)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_header(cls, value: str | None) -> te.Self | None:
|
||||||
|
"""Parse a ``WWW-Authenticate`` header value and return an instance, or ``None``
|
||||||
|
if the value is empty.
|
||||||
|
|
||||||
|
:param value: The header value to parse.
|
||||||
|
|
||||||
|
.. versionadded:: 2.3
|
||||||
|
"""
|
||||||
|
if not value:
|
||||||
|
return None
|
||||||
|
|
||||||
|
scheme, _, rest = value.partition(" ")
|
||||||
|
scheme = scheme.lower()
|
||||||
|
rest = rest.strip()
|
||||||
|
|
||||||
|
if "=" in rest.rstrip("="):
|
||||||
|
# = that is not trailing, this is parameters.
|
||||||
|
return cls(scheme, parse_dict_header(rest), None)
|
||||||
|
|
||||||
|
# No = or only trailing =, this is a token.
|
||||||
|
return cls(scheme, None, rest)
|
||||||
|
|
||||||
|
def to_header(self) -> str:
|
||||||
|
"""Produce a ``WWW-Authenticate`` header value representing this data."""
|
||||||
|
if self.token is not None:
|
||||||
|
return f"{self.type.title()} {self.token}"
|
||||||
|
|
||||||
|
if self.type == "digest":
|
||||||
|
items = []
|
||||||
|
|
||||||
|
for key, value in self.parameters.items():
|
||||||
|
if key in {"realm", "domain", "nonce", "opaque", "qop"}:
|
||||||
|
value = quote_header_value(value, allow_token=False)
|
||||||
|
else:
|
||||||
|
value = quote_header_value(value)
|
||||||
|
|
||||||
|
items.append(f"{key}={value}")
|
||||||
|
|
||||||
|
return f"Digest {', '.join(items)}"
|
||||||
|
|
||||||
|
return f"{self.type.title()} {dump_header(self.parameters)}"
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return self.to_header()
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return f"<{type(self).__name__} {self.to_header()}>"
|
|
@ -0,0 +1,175 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from .mixins import ImmutableDictMixin
|
||||||
|
from .mixins import UpdateDictMixin
|
||||||
|
|
||||||
|
|
||||||
|
def cache_control_property(key, empty, type):
|
||||||
|
"""Return a new property object for a cache header. Useful if you
|
||||||
|
want to add support for a cache extension in a subclass.
|
||||||
|
|
||||||
|
.. versionchanged:: 2.0
|
||||||
|
Renamed from ``cache_property``.
|
||||||
|
"""
|
||||||
|
return property(
|
||||||
|
lambda x: x._get_cache_value(key, empty, type),
|
||||||
|
lambda x, v: x._set_cache_value(key, v, type),
|
||||||
|
lambda x: x._del_cache_value(key),
|
||||||
|
f"accessor for {key!r}",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class _CacheControl(UpdateDictMixin, dict):
|
||||||
|
"""Subclass of a dict that stores values for a Cache-Control header. It
|
||||||
|
has accessors for all the cache-control directives specified in RFC 2616.
|
||||||
|
The class does not differentiate between request and response directives.
|
||||||
|
|
||||||
|
Because the cache-control directives in the HTTP header use dashes the
|
||||||
|
python descriptors use underscores for that.
|
||||||
|
|
||||||
|
To get a header of the :class:`CacheControl` object again you can convert
|
||||||
|
the object into a string or call the :meth:`to_header` method. If you plan
|
||||||
|
to subclass it and add your own items have a look at the sourcecode for
|
||||||
|
that class.
|
||||||
|
|
||||||
|
.. versionchanged:: 2.1.0
|
||||||
|
Setting int properties such as ``max_age`` will convert the
|
||||||
|
value to an int.
|
||||||
|
|
||||||
|
.. versionchanged:: 0.4
|
||||||
|
|
||||||
|
Setting `no_cache` or `private` to boolean `True` will set the implicit
|
||||||
|
none-value which is ``*``:
|
||||||
|
|
||||||
|
>>> cc = ResponseCacheControl()
|
||||||
|
>>> cc.no_cache = True
|
||||||
|
>>> cc
|
||||||
|
<ResponseCacheControl 'no-cache'>
|
||||||
|
>>> cc.no_cache
|
||||||
|
'*'
|
||||||
|
>>> cc.no_cache = None
|
||||||
|
>>> cc
|
||||||
|
<ResponseCacheControl ''>
|
||||||
|
|
||||||
|
In versions before 0.5 the behavior documented here affected the now
|
||||||
|
no longer existing `CacheControl` class.
|
||||||
|
"""
|
||||||
|
|
||||||
|
no_cache = cache_control_property("no-cache", "*", None)
|
||||||
|
no_store = cache_control_property("no-store", None, bool)
|
||||||
|
max_age = cache_control_property("max-age", -1, int)
|
||||||
|
no_transform = cache_control_property("no-transform", None, None)
|
||||||
|
|
||||||
|
def __init__(self, values=(), on_update=None):
|
||||||
|
dict.__init__(self, values or ())
|
||||||
|
self.on_update = on_update
|
||||||
|
self.provided = values is not None
|
||||||
|
|
||||||
|
def _get_cache_value(self, key, empty, type):
|
||||||
|
"""Used internally by the accessor properties."""
|
||||||
|
if type is bool:
|
||||||
|
return key in self
|
||||||
|
if key in self:
|
||||||
|
value = self[key]
|
||||||
|
if value is None:
|
||||||
|
return empty
|
||||||
|
elif type is not None:
|
||||||
|
try:
|
||||||
|
value = type(value)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
return value
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _set_cache_value(self, key, value, type):
|
||||||
|
"""Used internally by the accessor properties."""
|
||||||
|
if type is bool:
|
||||||
|
if value:
|
||||||
|
self[key] = None
|
||||||
|
else:
|
||||||
|
self.pop(key, None)
|
||||||
|
else:
|
||||||
|
if value is None:
|
||||||
|
self.pop(key, None)
|
||||||
|
elif value is True:
|
||||||
|
self[key] = None
|
||||||
|
else:
|
||||||
|
if type is not None:
|
||||||
|
self[key] = type(value)
|
||||||
|
else:
|
||||||
|
self[key] = value
|
||||||
|
|
||||||
|
def _del_cache_value(self, key):
|
||||||
|
"""Used internally by the accessor properties."""
|
||||||
|
if key in self:
|
||||||
|
del self[key]
|
||||||
|
|
||||||
|
def to_header(self):
|
||||||
|
"""Convert the stored values into a cache control header."""
|
||||||
|
return http.dump_header(self)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.to_header()
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
kv_str = " ".join(f"{k}={v!r}" for k, v in sorted(self.items()))
|
||||||
|
return f"<{type(self).__name__} {kv_str}>"
|
||||||
|
|
||||||
|
cache_property = staticmethod(cache_control_property)
|
||||||
|
|
||||||
|
|
||||||
|
class RequestCacheControl(ImmutableDictMixin, _CacheControl):
|
||||||
|
"""A cache control for requests. This is immutable and gives access
|
||||||
|
to all the request-relevant cache control headers.
|
||||||
|
|
||||||
|
To get a header of the :class:`RequestCacheControl` object again you can
|
||||||
|
convert the object into a string or call the :meth:`to_header` method. If
|
||||||
|
you plan to subclass it and add your own items have a look at the sourcecode
|
||||||
|
for that class.
|
||||||
|
|
||||||
|
.. versionchanged:: 2.1.0
|
||||||
|
Setting int properties such as ``max_age`` will convert the
|
||||||
|
value to an int.
|
||||||
|
|
||||||
|
.. versionadded:: 0.5
|
||||||
|
In previous versions a `CacheControl` class existed that was used
|
||||||
|
both for request and response.
|
||||||
|
"""
|
||||||
|
|
||||||
|
max_stale = cache_control_property("max-stale", "*", int)
|
||||||
|
min_fresh = cache_control_property("min-fresh", "*", int)
|
||||||
|
only_if_cached = cache_control_property("only-if-cached", None, bool)
|
||||||
|
|
||||||
|
|
||||||
|
class ResponseCacheControl(_CacheControl):
|
||||||
|
"""A cache control for responses. Unlike :class:`RequestCacheControl`
|
||||||
|
this is mutable and gives access to response-relevant cache control
|
||||||
|
headers.
|
||||||
|
|
||||||
|
To get a header of the :class:`ResponseCacheControl` object again you can
|
||||||
|
convert the object into a string or call the :meth:`to_header` method. If
|
||||||
|
you plan to subclass it and add your own items have a look at the sourcecode
|
||||||
|
for that class.
|
||||||
|
|
||||||
|
.. versionchanged:: 2.1.1
|
||||||
|
``s_maxage`` converts the value to an int.
|
||||||
|
|
||||||
|
.. versionchanged:: 2.1.0
|
||||||
|
Setting int properties such as ``max_age`` will convert the
|
||||||
|
value to an int.
|
||||||
|
|
||||||
|
.. versionadded:: 0.5
|
||||||
|
In previous versions a `CacheControl` class existed that was used
|
||||||
|
both for request and response.
|
||||||
|
"""
|
||||||
|
|
||||||
|
public = cache_control_property("public", None, bool)
|
||||||
|
private = cache_control_property("private", "*", None)
|
||||||
|
must_revalidate = cache_control_property("must-revalidate", None, bool)
|
||||||
|
proxy_revalidate = cache_control_property("proxy-revalidate", None, bool)
|
||||||
|
s_maxage = cache_control_property("s-maxage", None, int)
|
||||||
|
immutable = cache_control_property("immutable", None, bool)
|
||||||
|
|
||||||
|
|
||||||
|
# circular dependencies
|
||||||
|
from .. import http
|
|
@ -0,0 +1,109 @@
|
||||||
|
from collections.abc import Callable
|
||||||
|
from collections.abc import Iterable
|
||||||
|
from collections.abc import Mapping
|
||||||
|
from typing import TypeVar
|
||||||
|
|
||||||
|
from .mixins import ImmutableDictMixin
|
||||||
|
from .mixins import UpdateDictMixin
|
||||||
|
|
||||||
|
T = TypeVar("T")
|
||||||
|
_CPT = TypeVar("_CPT", str, int, bool)
|
||||||
|
_OptCPT = _CPT | None
|
||||||
|
|
||||||
|
def cache_control_property(key: str, empty: _OptCPT, type: type[_CPT]) -> property: ...
|
||||||
|
|
||||||
|
class _CacheControl(UpdateDictMixin[str, _OptCPT], dict[str, _OptCPT]):
|
||||||
|
provided: bool
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
values: Mapping[str, _OptCPT] | Iterable[tuple[str, _OptCPT]] = (),
|
||||||
|
on_update: Callable[[_CacheControl], None] | None = None,
|
||||||
|
) -> None: ...
|
||||||
|
@property
|
||||||
|
def no_cache(self) -> bool | None: ...
|
||||||
|
@no_cache.setter
|
||||||
|
def no_cache(self, value: bool | None) -> None: ...
|
||||||
|
@no_cache.deleter
|
||||||
|
def no_cache(self) -> None: ...
|
||||||
|
@property
|
||||||
|
def no_store(self) -> bool | None: ...
|
||||||
|
@no_store.setter
|
||||||
|
def no_store(self, value: bool | None) -> None: ...
|
||||||
|
@no_store.deleter
|
||||||
|
def no_store(self) -> None: ...
|
||||||
|
@property
|
||||||
|
def max_age(self) -> int | None: ...
|
||||||
|
@max_age.setter
|
||||||
|
def max_age(self, value: int | None) -> None: ...
|
||||||
|
@max_age.deleter
|
||||||
|
def max_age(self) -> None: ...
|
||||||
|
@property
|
||||||
|
def no_transform(self) -> bool | None: ...
|
||||||
|
@no_transform.setter
|
||||||
|
def no_transform(self, value: bool | None) -> None: ...
|
||||||
|
@no_transform.deleter
|
||||||
|
def no_transform(self) -> None: ...
|
||||||
|
def _get_cache_value(self, key: str, empty: T | None, type: type[T]) -> T: ...
|
||||||
|
def _set_cache_value(self, key: str, value: T | None, type: type[T]) -> None: ...
|
||||||
|
def _del_cache_value(self, key: str) -> None: ...
|
||||||
|
def to_header(self) -> str: ...
|
||||||
|
@staticmethod
|
||||||
|
def cache_property(key: str, empty: _OptCPT, type: type[_CPT]) -> property: ...
|
||||||
|
|
||||||
|
class RequestCacheControl(ImmutableDictMixin[str, _OptCPT], _CacheControl):
|
||||||
|
@property
|
||||||
|
def max_stale(self) -> int | None: ...
|
||||||
|
@max_stale.setter
|
||||||
|
def max_stale(self, value: int | None) -> None: ...
|
||||||
|
@max_stale.deleter
|
||||||
|
def max_stale(self) -> None: ...
|
||||||
|
@property
|
||||||
|
def min_fresh(self) -> int | None: ...
|
||||||
|
@min_fresh.setter
|
||||||
|
def min_fresh(self, value: int | None) -> None: ...
|
||||||
|
@min_fresh.deleter
|
||||||
|
def min_fresh(self) -> None: ...
|
||||||
|
@property
|
||||||
|
def only_if_cached(self) -> bool | None: ...
|
||||||
|
@only_if_cached.setter
|
||||||
|
def only_if_cached(self, value: bool | None) -> None: ...
|
||||||
|
@only_if_cached.deleter
|
||||||
|
def only_if_cached(self) -> None: ...
|
||||||
|
|
||||||
|
class ResponseCacheControl(_CacheControl):
|
||||||
|
@property
|
||||||
|
def public(self) -> bool | None: ...
|
||||||
|
@public.setter
|
||||||
|
def public(self, value: bool | None) -> None: ...
|
||||||
|
@public.deleter
|
||||||
|
def public(self) -> None: ...
|
||||||
|
@property
|
||||||
|
def private(self) -> bool | None: ...
|
||||||
|
@private.setter
|
||||||
|
def private(self, value: bool | None) -> None: ...
|
||||||
|
@private.deleter
|
||||||
|
def private(self) -> None: ...
|
||||||
|
@property
|
||||||
|
def must_revalidate(self) -> bool | None: ...
|
||||||
|
@must_revalidate.setter
|
||||||
|
def must_revalidate(self, value: bool | None) -> None: ...
|
||||||
|
@must_revalidate.deleter
|
||||||
|
def must_revalidate(self) -> None: ...
|
||||||
|
@property
|
||||||
|
def proxy_revalidate(self) -> bool | None: ...
|
||||||
|
@proxy_revalidate.setter
|
||||||
|
def proxy_revalidate(self, value: bool | None) -> None: ...
|
||||||
|
@proxy_revalidate.deleter
|
||||||
|
def proxy_revalidate(self) -> None: ...
|
||||||
|
@property
|
||||||
|
def s_maxage(self) -> int | None: ...
|
||||||
|
@s_maxage.setter
|
||||||
|
def s_maxage(self, value: int | None) -> None: ...
|
||||||
|
@s_maxage.deleter
|
||||||
|
def s_maxage(self) -> None: ...
|
||||||
|
@property
|
||||||
|
def immutable(self) -> bool | None: ...
|
||||||
|
@immutable.setter
|
||||||
|
def immutable(self, value: bool | None) -> None: ...
|
||||||
|
@immutable.deleter
|
||||||
|
def immutable(self) -> None: ...
|
|
@ -0,0 +1,94 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from .mixins import UpdateDictMixin
|
||||||
|
|
||||||
|
|
||||||
|
def csp_property(key):
|
||||||
|
"""Return a new property object for a content security policy header.
|
||||||
|
Useful if you want to add support for a csp extension in a
|
||||||
|
subclass.
|
||||||
|
"""
|
||||||
|
return property(
|
||||||
|
lambda x: x._get_value(key),
|
||||||
|
lambda x, v: x._set_value(key, v),
|
||||||
|
lambda x: x._del_value(key),
|
||||||
|
f"accessor for {key!r}",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ContentSecurityPolicy(UpdateDictMixin, dict):
|
||||||
|
"""Subclass of a dict that stores values for a Content Security Policy
|
||||||
|
header. It has accessors for all the level 3 policies.
|
||||||
|
|
||||||
|
Because the csp directives in the HTTP header use dashes the
|
||||||
|
python descriptors use underscores for that.
|
||||||
|
|
||||||
|
To get a header of the :class:`ContentSecuirtyPolicy` object again
|
||||||
|
you can convert the object into a string or call the
|
||||||
|
:meth:`to_header` method. If you plan to subclass it and add your
|
||||||
|
own items have a look at the sourcecode for that class.
|
||||||
|
|
||||||
|
.. versionadded:: 1.0.0
|
||||||
|
Support for Content Security Policy headers was added.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
base_uri = csp_property("base-uri")
|
||||||
|
child_src = csp_property("child-src")
|
||||||
|
connect_src = csp_property("connect-src")
|
||||||
|
default_src = csp_property("default-src")
|
||||||
|
font_src = csp_property("font-src")
|
||||||
|
form_action = csp_property("form-action")
|
||||||
|
frame_ancestors = csp_property("frame-ancestors")
|
||||||
|
frame_src = csp_property("frame-src")
|
||||||
|
img_src = csp_property("img-src")
|
||||||
|
manifest_src = csp_property("manifest-src")
|
||||||
|
media_src = csp_property("media-src")
|
||||||
|
navigate_to = csp_property("navigate-to")
|
||||||
|
object_src = csp_property("object-src")
|
||||||
|
prefetch_src = csp_property("prefetch-src")
|
||||||
|
plugin_types = csp_property("plugin-types")
|
||||||
|
report_to = csp_property("report-to")
|
||||||
|
report_uri = csp_property("report-uri")
|
||||||
|
sandbox = csp_property("sandbox")
|
||||||
|
script_src = csp_property("script-src")
|
||||||
|
script_src_attr = csp_property("script-src-attr")
|
||||||
|
script_src_elem = csp_property("script-src-elem")
|
||||||
|
style_src = csp_property("style-src")
|
||||||
|
style_src_attr = csp_property("style-src-attr")
|
||||||
|
style_src_elem = csp_property("style-src-elem")
|
||||||
|
worker_src = csp_property("worker-src")
|
||||||
|
|
||||||
|
def __init__(self, values=(), on_update=None):
|
||||||
|
dict.__init__(self, values or ())
|
||||||
|
self.on_update = on_update
|
||||||
|
self.provided = values is not None
|
||||||
|
|
||||||
|
def _get_value(self, key):
|
||||||
|
"""Used internally by the accessor properties."""
|
||||||
|
return self.get(key)
|
||||||
|
|
||||||
|
def _set_value(self, key, value):
|
||||||
|
"""Used internally by the accessor properties."""
|
||||||
|
if value is None:
|
||||||
|
self.pop(key, None)
|
||||||
|
else:
|
||||||
|
self[key] = value
|
||||||
|
|
||||||
|
def _del_value(self, key):
|
||||||
|
"""Used internally by the accessor properties."""
|
||||||
|
if key in self:
|
||||||
|
del self[key]
|
||||||
|
|
||||||
|
def to_header(self):
|
||||||
|
"""Convert the stored values into a cache control header."""
|
||||||
|
from ..http import dump_csp_header
|
||||||
|
|
||||||
|
return dump_csp_header(self)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.to_header()
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
kv_str = " ".join(f"{k}={v!r}" for k, v in sorted(self.items()))
|
||||||
|
return f"<{type(self).__name__} {kv_str}>"
|
|
@ -0,0 +1,169 @@
|
||||||
|
from collections.abc import Callable
|
||||||
|
from collections.abc import Iterable
|
||||||
|
from collections.abc import Mapping
|
||||||
|
|
||||||
|
from .mixins import UpdateDictMixin
|
||||||
|
|
||||||
|
def csp_property(key: str) -> property: ...
|
||||||
|
|
||||||
|
class ContentSecurityPolicy(UpdateDictMixin[str, str], dict[str, str]):
|
||||||
|
@property
|
||||||
|
def base_uri(self) -> str | None: ...
|
||||||
|
@base_uri.setter
|
||||||
|
def base_uri(self, value: str | None) -> None: ...
|
||||||
|
@base_uri.deleter
|
||||||
|
def base_uri(self) -> None: ...
|
||||||
|
@property
|
||||||
|
def child_src(self) -> str | None: ...
|
||||||
|
@child_src.setter
|
||||||
|
def child_src(self, value: str | None) -> None: ...
|
||||||
|
@child_src.deleter
|
||||||
|
def child_src(self) -> None: ...
|
||||||
|
@property
|
||||||
|
def connect_src(self) -> str | None: ...
|
||||||
|
@connect_src.setter
|
||||||
|
def connect_src(self, value: str | None) -> None: ...
|
||||||
|
@connect_src.deleter
|
||||||
|
def connect_src(self) -> None: ...
|
||||||
|
@property
|
||||||
|
def default_src(self) -> str | None: ...
|
||||||
|
@default_src.setter
|
||||||
|
def default_src(self, value: str | None) -> None: ...
|
||||||
|
@default_src.deleter
|
||||||
|
def default_src(self) -> None: ...
|
||||||
|
@property
|
||||||
|
def font_src(self) -> str | None: ...
|
||||||
|
@font_src.setter
|
||||||
|
def font_src(self, value: str | None) -> None: ...
|
||||||
|
@font_src.deleter
|
||||||
|
def font_src(self) -> None: ...
|
||||||
|
@property
|
||||||
|
def form_action(self) -> str | None: ...
|
||||||
|
@form_action.setter
|
||||||
|
def form_action(self, value: str | None) -> None: ...
|
||||||
|
@form_action.deleter
|
||||||
|
def form_action(self) -> None: ...
|
||||||
|
@property
|
||||||
|
def frame_ancestors(self) -> str | None: ...
|
||||||
|
@frame_ancestors.setter
|
||||||
|
def frame_ancestors(self, value: str | None) -> None: ...
|
||||||
|
@frame_ancestors.deleter
|
||||||
|
def frame_ancestors(self) -> None: ...
|
||||||
|
@property
|
||||||
|
def frame_src(self) -> str | None: ...
|
||||||
|
@frame_src.setter
|
||||||
|
def frame_src(self, value: str | None) -> None: ...
|
||||||
|
@frame_src.deleter
|
||||||
|
def frame_src(self) -> None: ...
|
||||||
|
@property
|
||||||
|
def img_src(self) -> str | None: ...
|
||||||
|
@img_src.setter
|
||||||
|
def img_src(self, value: str | None) -> None: ...
|
||||||
|
@img_src.deleter
|
||||||
|
def img_src(self) -> None: ...
|
||||||
|
@property
|
||||||
|
def manifest_src(self) -> str | None: ...
|
||||||
|
@manifest_src.setter
|
||||||
|
def manifest_src(self, value: str | None) -> None: ...
|
||||||
|
@manifest_src.deleter
|
||||||
|
def manifest_src(self) -> None: ...
|
||||||
|
@property
|
||||||
|
def media_src(self) -> str | None: ...
|
||||||
|
@media_src.setter
|
||||||
|
def media_src(self, value: str | None) -> None: ...
|
||||||
|
@media_src.deleter
|
||||||
|
def media_src(self) -> None: ...
|
||||||
|
@property
|
||||||
|
def navigate_to(self) -> str | None: ...
|
||||||
|
@navigate_to.setter
|
||||||
|
def navigate_to(self, value: str | None) -> None: ...
|
||||||
|
@navigate_to.deleter
|
||||||
|
def navigate_to(self) -> None: ...
|
||||||
|
@property
|
||||||
|
def object_src(self) -> str | None: ...
|
||||||
|
@object_src.setter
|
||||||
|
def object_src(self, value: str | None) -> None: ...
|
||||||
|
@object_src.deleter
|
||||||
|
def object_src(self) -> None: ...
|
||||||
|
@property
|
||||||
|
def prefetch_src(self) -> str | None: ...
|
||||||
|
@prefetch_src.setter
|
||||||
|
def prefetch_src(self, value: str | None) -> None: ...
|
||||||
|
@prefetch_src.deleter
|
||||||
|
def prefetch_src(self) -> None: ...
|
||||||
|
@property
|
||||||
|
def plugin_types(self) -> str | None: ...
|
||||||
|
@plugin_types.setter
|
||||||
|
def plugin_types(self, value: str | None) -> None: ...
|
||||||
|
@plugin_types.deleter
|
||||||
|
def plugin_types(self) -> None: ...
|
||||||
|
@property
|
||||||
|
def report_to(self) -> str | None: ...
|
||||||
|
@report_to.setter
|
||||||
|
def report_to(self, value: str | None) -> None: ...
|
||||||
|
@report_to.deleter
|
||||||
|
def report_to(self) -> None: ...
|
||||||
|
@property
|
||||||
|
def report_uri(self) -> str | None: ...
|
||||||
|
@report_uri.setter
|
||||||
|
def report_uri(self, value: str | None) -> None: ...
|
||||||
|
@report_uri.deleter
|
||||||
|
def report_uri(self) -> None: ...
|
||||||
|
@property
|
||||||
|
def sandbox(self) -> str | None: ...
|
||||||
|
@sandbox.setter
|
||||||
|
def sandbox(self, value: str | None) -> None: ...
|
||||||
|
@sandbox.deleter
|
||||||
|
def sandbox(self) -> None: ...
|
||||||
|
@property
|
||||||
|
def script_src(self) -> str | None: ...
|
||||||
|
@script_src.setter
|
||||||
|
def script_src(self, value: str | None) -> None: ...
|
||||||
|
@script_src.deleter
|
||||||
|
def script_src(self) -> None: ...
|
||||||
|
@property
|
||||||
|
def script_src_attr(self) -> str | None: ...
|
||||||
|
@script_src_attr.setter
|
||||||
|
def script_src_attr(self, value: str | None) -> None: ...
|
||||||
|
@script_src_attr.deleter
|
||||||
|
def script_src_attr(self) -> None: ...
|
||||||
|
@property
|
||||||
|
def script_src_elem(self) -> str | None: ...
|
||||||
|
@script_src_elem.setter
|
||||||
|
def script_src_elem(self, value: str | None) -> None: ...
|
||||||
|
@script_src_elem.deleter
|
||||||
|
def script_src_elem(self) -> None: ...
|
||||||
|
@property
|
||||||
|
def style_src(self) -> str | None: ...
|
||||||
|
@style_src.setter
|
||||||
|
def style_src(self, value: str | None) -> None: ...
|
||||||
|
@style_src.deleter
|
||||||
|
def style_src(self) -> None: ...
|
||||||
|
@property
|
||||||
|
def style_src_attr(self) -> str | None: ...
|
||||||
|
@style_src_attr.setter
|
||||||
|
def style_src_attr(self, value: str | None) -> None: ...
|
||||||
|
@style_src_attr.deleter
|
||||||
|
def style_src_attr(self) -> None: ...
|
||||||
|
@property
|
||||||
|
def style_src_elem(self) -> str | None: ...
|
||||||
|
@style_src_elem.setter
|
||||||
|
def style_src_elem(self, value: str | None) -> None: ...
|
||||||
|
@style_src_elem.deleter
|
||||||
|
def style_src_elem(self) -> None: ...
|
||||||
|
@property
|
||||||
|
def worker_src(self) -> str | None: ...
|
||||||
|
@worker_src.setter
|
||||||
|
def worker_src(self, value: str | None) -> None: ...
|
||||||
|
@worker_src.deleter
|
||||||
|
def worker_src(self) -> None: ...
|
||||||
|
provided: bool
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
values: Mapping[str, str] | Iterable[tuple[str, str]] = (),
|
||||||
|
on_update: Callable[[ContentSecurityPolicy], None] | None = None,
|
||||||
|
) -> None: ...
|
||||||
|
def _get_value(self, key: str) -> str | None: ...
|
||||||
|
def _set_value(self, key: str, value: str) -> None: ...
|
||||||
|
def _del_value(self, key: str) -> None: ...
|
||||||
|
def to_header(self) -> str: ...
|
|
@ -0,0 +1,95 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Collection
|
||||||
|
|
||||||
|
|
||||||
|
class ETags(Collection):
|
||||||
|
"""A set that can be used to check if one etag is present in a collection
|
||||||
|
of etags.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, strong_etags=None, weak_etags=None, star_tag=False):
|
||||||
|
if not star_tag and strong_etags:
|
||||||
|
self._strong = frozenset(strong_etags)
|
||||||
|
else:
|
||||||
|
self._strong = frozenset()
|
||||||
|
|
||||||
|
self._weak = frozenset(weak_etags or ())
|
||||||
|
self.star_tag = star_tag
|
||||||
|
|
||||||
|
def as_set(self, include_weak=False):
|
||||||
|
"""Convert the `ETags` object into a python set. Per default all the
|
||||||
|
weak etags are not part of this set."""
|
||||||
|
rv = set(self._strong)
|
||||||
|
if include_weak:
|
||||||
|
rv.update(self._weak)
|
||||||
|
return rv
|
||||||
|
|
||||||
|
def is_weak(self, etag):
|
||||||
|
"""Check if an etag is weak."""
|
||||||
|
return etag in self._weak
|
||||||
|
|
||||||
|
def is_strong(self, etag):
|
||||||
|
"""Check if an etag is strong."""
|
||||||
|
return etag in self._strong
|
||||||
|
|
||||||
|
def contains_weak(self, etag):
|
||||||
|
"""Check if an etag is part of the set including weak and strong tags."""
|
||||||
|
return self.is_weak(etag) or self.contains(etag)
|
||||||
|
|
||||||
|
def contains(self, etag):
|
||||||
|
"""Check if an etag is part of the set ignoring weak tags.
|
||||||
|
It is also possible to use the ``in`` operator.
|
||||||
|
"""
|
||||||
|
if self.star_tag:
|
||||||
|
return True
|
||||||
|
return self.is_strong(etag)
|
||||||
|
|
||||||
|
def contains_raw(self, etag):
|
||||||
|
"""When passed a quoted tag it will check if this tag is part of the
|
||||||
|
set. If the tag is weak it is checked against weak and strong tags,
|
||||||
|
otherwise strong only."""
|
||||||
|
from ..http import unquote_etag
|
||||||
|
|
||||||
|
etag, weak = unquote_etag(etag)
|
||||||
|
if weak:
|
||||||
|
return self.contains_weak(etag)
|
||||||
|
return self.contains(etag)
|
||||||
|
|
||||||
|
def to_header(self):
|
||||||
|
"""Convert the etags set into a HTTP header string."""
|
||||||
|
if self.star_tag:
|
||||||
|
return "*"
|
||||||
|
return ", ".join(
|
||||||
|
[f'"{x}"' for x in self._strong] + [f'W/"{x}"' for x in self._weak]
|
||||||
|
)
|
||||||
|
|
||||||
|
def __call__(self, etag=None, data=None, include_weak=False):
|
||||||
|
if [etag, data].count(None) != 1:
|
||||||
|
raise TypeError("either tag or data required, but at least one")
|
||||||
|
if etag is None:
|
||||||
|
from ..http import generate_etag
|
||||||
|
|
||||||
|
etag = generate_etag(data)
|
||||||
|
if include_weak:
|
||||||
|
if etag in self._weak:
|
||||||
|
return True
|
||||||
|
return etag in self._strong
|
||||||
|
|
||||||
|
def __bool__(self):
|
||||||
|
return bool(self.star_tag or self._strong or self._weak)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.to_header()
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return len(self._strong)
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return iter(self._strong)
|
||||||
|
|
||||||
|
def __contains__(self, etag):
|
||||||
|
return self.contains(etag)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"<{type(self).__name__} {str(self)!r}>"
|
|
@ -0,0 +1,30 @@
|
||||||
|
from collections.abc import Collection
|
||||||
|
from collections.abc import Iterable
|
||||||
|
from collections.abc import Iterator
|
||||||
|
|
||||||
|
class ETags(Collection[str]):
|
||||||
|
_strong: frozenset[str]
|
||||||
|
_weak: frozenset[str]
|
||||||
|
star_tag: bool
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
strong_etags: Iterable[str] | None = None,
|
||||||
|
weak_etags: Iterable[str] | None = None,
|
||||||
|
star_tag: bool = False,
|
||||||
|
) -> None: ...
|
||||||
|
def as_set(self, include_weak: bool = False) -> set[str]: ...
|
||||||
|
def is_weak(self, etag: str) -> bool: ...
|
||||||
|
def is_strong(self, etag: str) -> bool: ...
|
||||||
|
def contains_weak(self, etag: str) -> bool: ...
|
||||||
|
def contains(self, etag: str) -> bool: ...
|
||||||
|
def contains_raw(self, etag: str) -> bool: ...
|
||||||
|
def to_header(self) -> str: ...
|
||||||
|
def __call__(
|
||||||
|
self,
|
||||||
|
etag: str | None = None,
|
||||||
|
data: bytes | None = None,
|
||||||
|
include_weak: bool = False,
|
||||||
|
) -> bool: ...
|
||||||
|
def __len__(self) -> int: ...
|
||||||
|
def __iter__(self) -> Iterator[str]: ...
|
||||||
|
def __contains__(self, item: str) -> bool: ... # type: ignore
|
|
@ -0,0 +1,196 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import mimetypes
|
||||||
|
from io import BytesIO
|
||||||
|
from os import fsdecode
|
||||||
|
from os import fspath
|
||||||
|
|
||||||
|
from .._internal import _plain_int
|
||||||
|
from .structures import MultiDict
|
||||||
|
|
||||||
|
|
||||||
|
class FileStorage:
|
||||||
|
"""The :class:`FileStorage` class is a thin wrapper over incoming files.
|
||||||
|
It is used by the request object to represent uploaded files. All the
|
||||||
|
attributes of the wrapper stream are proxied by the file storage so
|
||||||
|
it's possible to do ``storage.read()`` instead of the long form
|
||||||
|
``storage.stream.read()``.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
stream=None,
|
||||||
|
filename=None,
|
||||||
|
name=None,
|
||||||
|
content_type=None,
|
||||||
|
content_length=None,
|
||||||
|
headers=None,
|
||||||
|
):
|
||||||
|
self.name = name
|
||||||
|
self.stream = stream or BytesIO()
|
||||||
|
|
||||||
|
# If no filename is provided, attempt to get the filename from
|
||||||
|
# the stream object. Python names special streams like
|
||||||
|
# ``<stderr>`` with angular brackets, skip these streams.
|
||||||
|
if filename is None:
|
||||||
|
filename = getattr(stream, "name", None)
|
||||||
|
|
||||||
|
if filename is not None:
|
||||||
|
filename = fsdecode(filename)
|
||||||
|
|
||||||
|
if filename and filename[0] == "<" and filename[-1] == ">":
|
||||||
|
filename = None
|
||||||
|
else:
|
||||||
|
filename = fsdecode(filename)
|
||||||
|
|
||||||
|
self.filename = filename
|
||||||
|
|
||||||
|
if headers is None:
|
||||||
|
from .headers import Headers
|
||||||
|
|
||||||
|
headers = Headers()
|
||||||
|
self.headers = headers
|
||||||
|
if content_type is not None:
|
||||||
|
headers["Content-Type"] = content_type
|
||||||
|
if content_length is not None:
|
||||||
|
headers["Content-Length"] = str(content_length)
|
||||||
|
|
||||||
|
def _parse_content_type(self):
|
||||||
|
if not hasattr(self, "_parsed_content_type"):
|
||||||
|
self._parsed_content_type = http.parse_options_header(self.content_type)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def content_type(self):
|
||||||
|
"""The content-type sent in the header. Usually not available"""
|
||||||
|
return self.headers.get("content-type")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def content_length(self):
|
||||||
|
"""The content-length sent in the header. Usually not available"""
|
||||||
|
if "content-length" in self.headers:
|
||||||
|
try:
|
||||||
|
return _plain_int(self.headers["content-length"])
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
@property
|
||||||
|
def mimetype(self):
|
||||||
|
"""Like :attr:`content_type`, but without parameters (eg, without
|
||||||
|
charset, type etc.) and always lowercase. For example if the content
|
||||||
|
type is ``text/HTML; charset=utf-8`` the mimetype would be
|
||||||
|
``'text/html'``.
|
||||||
|
|
||||||
|
.. versionadded:: 0.7
|
||||||
|
"""
|
||||||
|
self._parse_content_type()
|
||||||
|
return self._parsed_content_type[0].lower()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def mimetype_params(self):
|
||||||
|
"""The mimetype parameters as dict. For example if the content
|
||||||
|
type is ``text/html; charset=utf-8`` the params would be
|
||||||
|
``{'charset': 'utf-8'}``.
|
||||||
|
|
||||||
|
.. versionadded:: 0.7
|
||||||
|
"""
|
||||||
|
self._parse_content_type()
|
||||||
|
return self._parsed_content_type[1]
|
||||||
|
|
||||||
|
def save(self, dst, buffer_size=16384):
|
||||||
|
"""Save the file to a destination path or file object. If the
|
||||||
|
destination is a file object you have to close it yourself after the
|
||||||
|
call. The buffer size is the number of bytes held in memory during
|
||||||
|
the copy process. It defaults to 16KB.
|
||||||
|
|
||||||
|
For secure file saving also have a look at :func:`secure_filename`.
|
||||||
|
|
||||||
|
:param dst: a filename, :class:`os.PathLike`, or open file
|
||||||
|
object to write to.
|
||||||
|
:param buffer_size: Passed as the ``length`` parameter of
|
||||||
|
:func:`shutil.copyfileobj`.
|
||||||
|
|
||||||
|
.. versionchanged:: 1.0
|
||||||
|
Supports :mod:`pathlib`.
|
||||||
|
"""
|
||||||
|
from shutil import copyfileobj
|
||||||
|
|
||||||
|
close_dst = False
|
||||||
|
|
||||||
|
if hasattr(dst, "__fspath__"):
|
||||||
|
dst = fspath(dst)
|
||||||
|
|
||||||
|
if isinstance(dst, str):
|
||||||
|
dst = open(dst, "wb")
|
||||||
|
close_dst = True
|
||||||
|
|
||||||
|
try:
|
||||||
|
copyfileobj(self.stream, dst, buffer_size)
|
||||||
|
finally:
|
||||||
|
if close_dst:
|
||||||
|
dst.close()
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
"""Close the underlying file if possible."""
|
||||||
|
try:
|
||||||
|
self.stream.close()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def __bool__(self):
|
||||||
|
return bool(self.filename)
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
try:
|
||||||
|
return getattr(self.stream, name)
|
||||||
|
except AttributeError:
|
||||||
|
# SpooledTemporaryFile doesn't implement IOBase, get the
|
||||||
|
# attribute from its backing file instead.
|
||||||
|
# https://github.com/python/cpython/pull/3249
|
||||||
|
if hasattr(self.stream, "_file"):
|
||||||
|
return getattr(self.stream._file, name)
|
||||||
|
raise
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return iter(self.stream)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"<{type(self).__name__}: {self.filename!r} ({self.content_type!r})>"
|
||||||
|
|
||||||
|
|
||||||
|
class FileMultiDict(MultiDict):
|
||||||
|
"""A special :class:`MultiDict` that has convenience methods to add
|
||||||
|
files to it. This is used for :class:`EnvironBuilder` and generally
|
||||||
|
useful for unittesting.
|
||||||
|
|
||||||
|
.. versionadded:: 0.5
|
||||||
|
"""
|
||||||
|
|
||||||
|
def add_file(self, name, file, filename=None, content_type=None):
|
||||||
|
"""Adds a new file to the dict. `file` can be a file name or
|
||||||
|
a :class:`file`-like or a :class:`FileStorage` object.
|
||||||
|
|
||||||
|
:param name: the name of the field.
|
||||||
|
:param file: a filename or :class:`file`-like object
|
||||||
|
:param filename: an optional filename
|
||||||
|
:param content_type: an optional content type
|
||||||
|
"""
|
||||||
|
if isinstance(file, FileStorage):
|
||||||
|
value = file
|
||||||
|
else:
|
||||||
|
if isinstance(file, str):
|
||||||
|
if filename is None:
|
||||||
|
filename = file
|
||||||
|
file = open(file, "rb")
|
||||||
|
if filename and content_type is None:
|
||||||
|
content_type = (
|
||||||
|
mimetypes.guess_type(filename)[0] or "application/octet-stream"
|
||||||
|
)
|
||||||
|
value = FileStorage(file, filename, name, content_type)
|
||||||
|
|
||||||
|
self.add(name, value)
|
||||||
|
|
||||||
|
|
||||||
|
# circular dependencies
|
||||||
|
from .. import http
|
|
@ -0,0 +1,47 @@
|
||||||
|
from collections.abc import Iterator
|
||||||
|
from os import PathLike
|
||||||
|
from typing import Any
|
||||||
|
from typing import IO
|
||||||
|
|
||||||
|
from .headers import Headers
|
||||||
|
from .structures import MultiDict
|
||||||
|
|
||||||
|
class FileStorage:
|
||||||
|
name: str | None
|
||||||
|
stream: IO[bytes]
|
||||||
|
filename: str | None
|
||||||
|
headers: Headers
|
||||||
|
_parsed_content_type: tuple[str, dict[str, str]]
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
stream: IO[bytes] | None = None,
|
||||||
|
filename: str | PathLike | None = None,
|
||||||
|
name: str | None = None,
|
||||||
|
content_type: str | None = None,
|
||||||
|
content_length: int | None = None,
|
||||||
|
headers: Headers | None = None,
|
||||||
|
) -> None: ...
|
||||||
|
def _parse_content_type(self) -> None: ...
|
||||||
|
@property
|
||||||
|
def content_type(self) -> str: ...
|
||||||
|
@property
|
||||||
|
def content_length(self) -> int: ...
|
||||||
|
@property
|
||||||
|
def mimetype(self) -> str: ...
|
||||||
|
@property
|
||||||
|
def mimetype_params(self) -> dict[str, str]: ...
|
||||||
|
def save(self, dst: str | PathLike | IO[bytes], buffer_size: int = ...) -> None: ...
|
||||||
|
def close(self) -> None: ...
|
||||||
|
def __bool__(self) -> bool: ...
|
||||||
|
def __getattr__(self, name: str) -> Any: ...
|
||||||
|
def __iter__(self) -> Iterator[bytes]: ...
|
||||||
|
def __repr__(self) -> str: ...
|
||||||
|
|
||||||
|
class FileMultiDict(MultiDict[str, FileStorage]):
|
||||||
|
def add_file(
|
||||||
|
self,
|
||||||
|
name: str,
|
||||||
|
file: FileStorage | str | IO[bytes],
|
||||||
|
filename: str | None = None,
|
||||||
|
content_type: str | None = None,
|
||||||
|
) -> None: ...
|
|
@ -0,0 +1,515 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import re
|
||||||
|
import typing as t
|
||||||
|
|
||||||
|
from .._internal import _missing
|
||||||
|
from ..exceptions import BadRequestKeyError
|
||||||
|
from .mixins import ImmutableHeadersMixin
|
||||||
|
from .structures import iter_multi_items
|
||||||
|
from .structures import MultiDict
|
||||||
|
|
||||||
|
|
||||||
|
class Headers:
|
||||||
|
"""An object that stores some headers. It has a dict-like interface,
|
||||||
|
but is ordered, can store the same key multiple times, and iterating
|
||||||
|
yields ``(key, value)`` pairs instead of only keys.
|
||||||
|
|
||||||
|
This data structure is useful if you want a nicer way to handle WSGI
|
||||||
|
headers which are stored as tuples in a list.
|
||||||
|
|
||||||
|
From Werkzeug 0.3 onwards, the :exc:`KeyError` raised by this class is
|
||||||
|
also a subclass of the :class:`~exceptions.BadRequest` HTTP exception
|
||||||
|
and will render a page for a ``400 BAD REQUEST`` if caught in a
|
||||||
|
catch-all for HTTP exceptions.
|
||||||
|
|
||||||
|
Headers is mostly compatible with the Python :class:`wsgiref.headers.Headers`
|
||||||
|
class, with the exception of `__getitem__`. :mod:`wsgiref` will return
|
||||||
|
`None` for ``headers['missing']``, whereas :class:`Headers` will raise
|
||||||
|
a :class:`KeyError`.
|
||||||
|
|
||||||
|
To create a new ``Headers`` object, pass it a list, dict, or
|
||||||
|
other ``Headers`` object with default values. These values are
|
||||||
|
validated the same way values added later are.
|
||||||
|
|
||||||
|
:param defaults: The list of default values for the :class:`Headers`.
|
||||||
|
|
||||||
|
.. versionchanged:: 2.1.0
|
||||||
|
Default values are validated the same as values added later.
|
||||||
|
|
||||||
|
.. versionchanged:: 0.9
|
||||||
|
This data structure now stores unicode values similar to how the
|
||||||
|
multi dicts do it. The main difference is that bytes can be set as
|
||||||
|
well which will automatically be latin1 decoded.
|
||||||
|
|
||||||
|
.. versionchanged:: 0.9
|
||||||
|
The :meth:`linked` function was removed without replacement as it
|
||||||
|
was an API that does not support the changes to the encoding model.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, defaults=None):
|
||||||
|
self._list = []
|
||||||
|
if defaults is not None:
|
||||||
|
self.extend(defaults)
|
||||||
|
|
||||||
|
def __getitem__(self, key, _get_mode=False):
|
||||||
|
if not _get_mode:
|
||||||
|
if isinstance(key, int):
|
||||||
|
return self._list[key]
|
||||||
|
elif isinstance(key, slice):
|
||||||
|
return self.__class__(self._list[key])
|
||||||
|
if not isinstance(key, str):
|
||||||
|
raise BadRequestKeyError(key)
|
||||||
|
ikey = key.lower()
|
||||||
|
for k, v in self._list:
|
||||||
|
if k.lower() == ikey:
|
||||||
|
return v
|
||||||
|
# micro optimization: if we are in get mode we will catch that
|
||||||
|
# exception one stack level down so we can raise a standard
|
||||||
|
# key error instead of our special one.
|
||||||
|
if _get_mode:
|
||||||
|
raise KeyError()
|
||||||
|
raise BadRequestKeyError(key)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
def lowered(item):
|
||||||
|
return (item[0].lower(),) + item[1:]
|
||||||
|
|
||||||
|
return other.__class__ is self.__class__ and set(
|
||||||
|
map(lowered, other._list)
|
||||||
|
) == set(map(lowered, self._list))
|
||||||
|
|
||||||
|
__hash__ = None
|
||||||
|
|
||||||
|
def get(self, key, default=None, type=None):
|
||||||
|
"""Return the default value if the requested data doesn't exist.
|
||||||
|
If `type` is provided and is a callable it should convert the value,
|
||||||
|
return it or raise a :exc:`ValueError` if that is not possible. In
|
||||||
|
this case the function will return the default as if the value was not
|
||||||
|
found:
|
||||||
|
|
||||||
|
>>> d = Headers([('Content-Length', '42')])
|
||||||
|
>>> d.get('Content-Length', type=int)
|
||||||
|
42
|
||||||
|
|
||||||
|
:param key: The key to be looked up.
|
||||||
|
:param default: The default value to be returned if the key can't
|
||||||
|
be looked up. If not further specified `None` is
|
||||||
|
returned.
|
||||||
|
:param type: A callable that is used to cast the value in the
|
||||||
|
:class:`Headers`. If a :exc:`ValueError` is raised
|
||||||
|
by this callable the default value is returned.
|
||||||
|
|
||||||
|
.. versionchanged:: 3.0
|
||||||
|
The ``as_bytes`` parameter was removed.
|
||||||
|
|
||||||
|
.. versionchanged:: 0.9
|
||||||
|
The ``as_bytes`` parameter was added.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
rv = self.__getitem__(key, _get_mode=True)
|
||||||
|
except KeyError:
|
||||||
|
return default
|
||||||
|
if type is None:
|
||||||
|
return rv
|
||||||
|
try:
|
||||||
|
return type(rv)
|
||||||
|
except ValueError:
|
||||||
|
return default
|
||||||
|
|
||||||
|
def getlist(self, key, type=None):
|
||||||
|
"""Return the list of items for a given key. If that key is not in the
|
||||||
|
:class:`Headers`, the return value will be an empty list. Just like
|
||||||
|
:meth:`get`, :meth:`getlist` accepts a `type` parameter. All items will
|
||||||
|
be converted with the callable defined there.
|
||||||
|
|
||||||
|
:param key: The key to be looked up.
|
||||||
|
:param type: A callable that is used to cast the value in the
|
||||||
|
:class:`Headers`. If a :exc:`ValueError` is raised
|
||||||
|
by this callable the value will be removed from the list.
|
||||||
|
:return: a :class:`list` of all the values for the key.
|
||||||
|
|
||||||
|
.. versionchanged:: 3.0
|
||||||
|
The ``as_bytes`` parameter was removed.
|
||||||
|
|
||||||
|
.. versionchanged:: 0.9
|
||||||
|
The ``as_bytes`` parameter was added.
|
||||||
|
"""
|
||||||
|
ikey = key.lower()
|
||||||
|
result = []
|
||||||
|
for k, v in self:
|
||||||
|
if k.lower() == ikey:
|
||||||
|
if type is not None:
|
||||||
|
try:
|
||||||
|
v = type(v)
|
||||||
|
except ValueError:
|
||||||
|
continue
|
||||||
|
result.append(v)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def get_all(self, name):
|
||||||
|
"""Return a list of all the values for the named field.
|
||||||
|
|
||||||
|
This method is compatible with the :mod:`wsgiref`
|
||||||
|
:meth:`~wsgiref.headers.Headers.get_all` method.
|
||||||
|
"""
|
||||||
|
return self.getlist(name)
|
||||||
|
|
||||||
|
def items(self, lower=False):
|
||||||
|
for key, value in self:
|
||||||
|
if lower:
|
||||||
|
key = key.lower()
|
||||||
|
yield key, value
|
||||||
|
|
||||||
|
def keys(self, lower=False):
|
||||||
|
for key, _ in self.items(lower):
|
||||||
|
yield key
|
||||||
|
|
||||||
|
def values(self):
|
||||||
|
for _, value in self.items():
|
||||||
|
yield value
|
||||||
|
|
||||||
|
def extend(self, *args, **kwargs):
|
||||||
|
"""Extend headers in this object with items from another object
|
||||||
|
containing header items as well as keyword arguments.
|
||||||
|
|
||||||
|
To replace existing keys instead of extending, use
|
||||||
|
:meth:`update` instead.
|
||||||
|
|
||||||
|
If provided, the first argument can be another :class:`Headers`
|
||||||
|
object, a :class:`MultiDict`, :class:`dict`, or iterable of
|
||||||
|
pairs.
|
||||||
|
|
||||||
|
.. versionchanged:: 1.0
|
||||||
|
Support :class:`MultiDict`. Allow passing ``kwargs``.
|
||||||
|
"""
|
||||||
|
if len(args) > 1:
|
||||||
|
raise TypeError(f"update expected at most 1 arguments, got {len(args)}")
|
||||||
|
|
||||||
|
if args:
|
||||||
|
for key, value in iter_multi_items(args[0]):
|
||||||
|
self.add(key, value)
|
||||||
|
|
||||||
|
for key, value in iter_multi_items(kwargs):
|
||||||
|
self.add(key, value)
|
||||||
|
|
||||||
|
def __delitem__(self, key, _index_operation=True):
|
||||||
|
if _index_operation and isinstance(key, (int, slice)):
|
||||||
|
del self._list[key]
|
||||||
|
return
|
||||||
|
key = key.lower()
|
||||||
|
new = []
|
||||||
|
for k, v in self._list:
|
||||||
|
if k.lower() != key:
|
||||||
|
new.append((k, v))
|
||||||
|
self._list[:] = new
|
||||||
|
|
||||||
|
def remove(self, key):
|
||||||
|
"""Remove a key.
|
||||||
|
|
||||||
|
:param key: The key to be removed.
|
||||||
|
"""
|
||||||
|
return self.__delitem__(key, _index_operation=False)
|
||||||
|
|
||||||
|
def pop(self, key=None, default=_missing):
|
||||||
|
"""Removes and returns a key or index.
|
||||||
|
|
||||||
|
:param key: The key to be popped. If this is an integer the item at
|
||||||
|
that position is removed, if it's a string the value for
|
||||||
|
that key is. If the key is omitted or `None` the last
|
||||||
|
item is removed.
|
||||||
|
:return: an item.
|
||||||
|
"""
|
||||||
|
if key is None:
|
||||||
|
return self._list.pop()
|
||||||
|
if isinstance(key, int):
|
||||||
|
return self._list.pop(key)
|
||||||
|
try:
|
||||||
|
rv = self[key]
|
||||||
|
self.remove(key)
|
||||||
|
except KeyError:
|
||||||
|
if default is not _missing:
|
||||||
|
return default
|
||||||
|
raise
|
||||||
|
return rv
|
||||||
|
|
||||||
|
def popitem(self):
|
||||||
|
"""Removes a key or index and returns a (key, value) item."""
|
||||||
|
return self.pop()
|
||||||
|
|
||||||
|
def __contains__(self, key):
|
||||||
|
"""Check if a key is present."""
|
||||||
|
try:
|
||||||
|
self.__getitem__(key, _get_mode=True)
|
||||||
|
except KeyError:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
"""Yield ``(key, value)`` tuples."""
|
||||||
|
return iter(self._list)
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return len(self._list)
|
||||||
|
|
||||||
|
def add(self, _key, _value, **kw):
|
||||||
|
"""Add a new header tuple to the list.
|
||||||
|
|
||||||
|
Keyword arguments can specify additional parameters for the header
|
||||||
|
value, with underscores converted to dashes::
|
||||||
|
|
||||||
|
>>> d = Headers()
|
||||||
|
>>> d.add('Content-Type', 'text/plain')
|
||||||
|
>>> d.add('Content-Disposition', 'attachment', filename='foo.png')
|
||||||
|
|
||||||
|
The keyword argument dumping uses :func:`dump_options_header`
|
||||||
|
behind the scenes.
|
||||||
|
|
||||||
|
.. versionadded:: 0.4.1
|
||||||
|
keyword arguments were added for :mod:`wsgiref` compatibility.
|
||||||
|
"""
|
||||||
|
if kw:
|
||||||
|
_value = _options_header_vkw(_value, kw)
|
||||||
|
_value = _str_header_value(_value)
|
||||||
|
self._list.append((_key, _value))
|
||||||
|
|
||||||
|
def add_header(self, _key, _value, **_kw):
|
||||||
|
"""Add a new header tuple to the list.
|
||||||
|
|
||||||
|
An alias for :meth:`add` for compatibility with the :mod:`wsgiref`
|
||||||
|
:meth:`~wsgiref.headers.Headers.add_header` method.
|
||||||
|
"""
|
||||||
|
self.add(_key, _value, **_kw)
|
||||||
|
|
||||||
|
def clear(self):
|
||||||
|
"""Clears all headers."""
|
||||||
|
del self._list[:]
|
||||||
|
|
||||||
|
def set(self, _key, _value, **kw):
|
||||||
|
"""Remove all header tuples for `key` and add a new one. The newly
|
||||||
|
added key either appears at the end of the list if there was no
|
||||||
|
entry or replaces the first one.
|
||||||
|
|
||||||
|
Keyword arguments can specify additional parameters for the header
|
||||||
|
value, with underscores converted to dashes. See :meth:`add` for
|
||||||
|
more information.
|
||||||
|
|
||||||
|
.. versionchanged:: 0.6.1
|
||||||
|
:meth:`set` now accepts the same arguments as :meth:`add`.
|
||||||
|
|
||||||
|
:param key: The key to be inserted.
|
||||||
|
:param value: The value to be inserted.
|
||||||
|
"""
|
||||||
|
if kw:
|
||||||
|
_value = _options_header_vkw(_value, kw)
|
||||||
|
_value = _str_header_value(_value)
|
||||||
|
if not self._list:
|
||||||
|
self._list.append((_key, _value))
|
||||||
|
return
|
||||||
|
listiter = iter(self._list)
|
||||||
|
ikey = _key.lower()
|
||||||
|
for idx, (old_key, _old_value) in enumerate(listiter):
|
||||||
|
if old_key.lower() == ikey:
|
||||||
|
# replace first occurrence
|
||||||
|
self._list[idx] = (_key, _value)
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
self._list.append((_key, _value))
|
||||||
|
return
|
||||||
|
self._list[idx + 1 :] = [t for t in listiter if t[0].lower() != ikey]
|
||||||
|
|
||||||
|
def setlist(self, key, values):
|
||||||
|
"""Remove any existing values for a header and add new ones.
|
||||||
|
|
||||||
|
:param key: The header key to set.
|
||||||
|
:param values: An iterable of values to set for the key.
|
||||||
|
|
||||||
|
.. versionadded:: 1.0
|
||||||
|
"""
|
||||||
|
if values:
|
||||||
|
values_iter = iter(values)
|
||||||
|
self.set(key, next(values_iter))
|
||||||
|
|
||||||
|
for value in values_iter:
|
||||||
|
self.add(key, value)
|
||||||
|
else:
|
||||||
|
self.remove(key)
|
||||||
|
|
||||||
|
def setdefault(self, key, default):
|
||||||
|
"""Return the first value for the key if it is in the headers,
|
||||||
|
otherwise set the header to the value given by ``default`` and
|
||||||
|
return that.
|
||||||
|
|
||||||
|
:param key: The header key to get.
|
||||||
|
:param default: The value to set for the key if it is not in the
|
||||||
|
headers.
|
||||||
|
"""
|
||||||
|
if key in self:
|
||||||
|
return self[key]
|
||||||
|
|
||||||
|
self.set(key, default)
|
||||||
|
return default
|
||||||
|
|
||||||
|
def setlistdefault(self, key, default):
|
||||||
|
"""Return the list of values for the key if it is in the
|
||||||
|
headers, otherwise set the header to the list of values given
|
||||||
|
by ``default`` and return that.
|
||||||
|
|
||||||
|
Unlike :meth:`MultiDict.setlistdefault`, modifying the returned
|
||||||
|
list will not affect the headers.
|
||||||
|
|
||||||
|
:param key: The header key to get.
|
||||||
|
:param default: An iterable of values to set for the key if it
|
||||||
|
is not in the headers.
|
||||||
|
|
||||||
|
.. versionadded:: 1.0
|
||||||
|
"""
|
||||||
|
if key not in self:
|
||||||
|
self.setlist(key, default)
|
||||||
|
|
||||||
|
return self.getlist(key)
|
||||||
|
|
||||||
|
def __setitem__(self, key, value):
|
||||||
|
"""Like :meth:`set` but also supports index/slice based setting."""
|
||||||
|
if isinstance(key, (slice, int)):
|
||||||
|
if isinstance(key, int):
|
||||||
|
value = [value]
|
||||||
|
value = [(k, _str_header_value(v)) for (k, v) in value]
|
||||||
|
if isinstance(key, int):
|
||||||
|
self._list[key] = value[0]
|
||||||
|
else:
|
||||||
|
self._list[key] = value
|
||||||
|
else:
|
||||||
|
self.set(key, value)
|
||||||
|
|
||||||
|
def update(self, *args, **kwargs):
|
||||||
|
"""Replace headers in this object with items from another
|
||||||
|
headers object and keyword arguments.
|
||||||
|
|
||||||
|
To extend existing keys instead of replacing, use :meth:`extend`
|
||||||
|
instead.
|
||||||
|
|
||||||
|
If provided, the first argument can be another :class:`Headers`
|
||||||
|
object, a :class:`MultiDict`, :class:`dict`, or iterable of
|
||||||
|
pairs.
|
||||||
|
|
||||||
|
.. versionadded:: 1.0
|
||||||
|
"""
|
||||||
|
if len(args) > 1:
|
||||||
|
raise TypeError(f"update expected at most 1 arguments, got {len(args)}")
|
||||||
|
|
||||||
|
if args:
|
||||||
|
mapping = args[0]
|
||||||
|
|
||||||
|
if isinstance(mapping, (Headers, MultiDict)):
|
||||||
|
for key in mapping.keys():
|
||||||
|
self.setlist(key, mapping.getlist(key))
|
||||||
|
elif isinstance(mapping, dict):
|
||||||
|
for key, value in mapping.items():
|
||||||
|
if isinstance(value, (list, tuple)):
|
||||||
|
self.setlist(key, value)
|
||||||
|
else:
|
||||||
|
self.set(key, value)
|
||||||
|
else:
|
||||||
|
for key, value in mapping:
|
||||||
|
self.set(key, value)
|
||||||
|
|
||||||
|
for key, value in kwargs.items():
|
||||||
|
if isinstance(value, (list, tuple)):
|
||||||
|
self.setlist(key, value)
|
||||||
|
else:
|
||||||
|
self.set(key, value)
|
||||||
|
|
||||||
|
def to_wsgi_list(self):
|
||||||
|
"""Convert the headers into a list suitable for WSGI.
|
||||||
|
|
||||||
|
:return: list
|
||||||
|
"""
|
||||||
|
return list(self)
|
||||||
|
|
||||||
|
def copy(self):
|
||||||
|
return self.__class__(self._list)
|
||||||
|
|
||||||
|
def __copy__(self):
|
||||||
|
return self.copy()
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
"""Returns formatted headers suitable for HTTP transmission."""
|
||||||
|
strs = []
|
||||||
|
for key, value in self.to_wsgi_list():
|
||||||
|
strs.append(f"{key}: {value}")
|
||||||
|
strs.append("\r\n")
|
||||||
|
return "\r\n".join(strs)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"{type(self).__name__}({list(self)!r})"
|
||||||
|
|
||||||
|
|
||||||
|
def _options_header_vkw(value: str, kw: dict[str, t.Any]):
|
||||||
|
return http.dump_options_header(
|
||||||
|
value, {k.replace("_", "-"): v for k, v in kw.items()}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
_newline_re = re.compile(r"[\r\n]")
|
||||||
|
|
||||||
|
|
||||||
|
def _str_header_value(value: t.Any) -> str:
|
||||||
|
if not isinstance(value, str):
|
||||||
|
value = str(value)
|
||||||
|
|
||||||
|
if _newline_re.search(value) is not None:
|
||||||
|
raise ValueError("Header values must not contain newline characters.")
|
||||||
|
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
class EnvironHeaders(ImmutableHeadersMixin, Headers):
|
||||||
|
"""Read only version of the headers from a WSGI environment. This
|
||||||
|
provides the same interface as `Headers` and is constructed from
|
||||||
|
a WSGI environment.
|
||||||
|
From Werkzeug 0.3 onwards, the `KeyError` raised by this class is also a
|
||||||
|
subclass of the :exc:`~exceptions.BadRequest` HTTP exception and will
|
||||||
|
render a page for a ``400 BAD REQUEST`` if caught in a catch-all for
|
||||||
|
HTTP exceptions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, environ):
|
||||||
|
self.environ = environ
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return self.environ is other.environ
|
||||||
|
|
||||||
|
__hash__ = None
|
||||||
|
|
||||||
|
def __getitem__(self, key, _get_mode=False):
|
||||||
|
# _get_mode is a no-op for this class as there is no index but
|
||||||
|
# used because get() calls it.
|
||||||
|
if not isinstance(key, str):
|
||||||
|
raise KeyError(key)
|
||||||
|
key = key.upper().replace("-", "_")
|
||||||
|
if key in {"CONTENT_TYPE", "CONTENT_LENGTH"}:
|
||||||
|
return self.environ[key]
|
||||||
|
return self.environ[f"HTTP_{key}"]
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
# the iter is necessary because otherwise list calls our
|
||||||
|
# len which would call list again and so forth.
|
||||||
|
return len(list(iter(self)))
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
for key, value in self.environ.items():
|
||||||
|
if key.startswith("HTTP_") and key not in {
|
||||||
|
"HTTP_CONTENT_TYPE",
|
||||||
|
"HTTP_CONTENT_LENGTH",
|
||||||
|
}:
|
||||||
|
yield key[5:].replace("_", "-").title(), value
|
||||||
|
elif key in {"CONTENT_TYPE", "CONTENT_LENGTH"} and value:
|
||||||
|
yield key.replace("_", "-").title(), value
|
||||||
|
|
||||||
|
def copy(self):
|
||||||
|
raise TypeError(f"cannot create {type(self).__name__!r} copies")
|
||||||
|
|
||||||
|
|
||||||
|
# circular dependencies
|
||||||
|
from .. import http
|
|
@ -0,0 +1,109 @@
|
||||||
|
from collections.abc import Callable
|
||||||
|
from collections.abc import Iterable
|
||||||
|
from collections.abc import Iterator
|
||||||
|
from collections.abc import Mapping
|
||||||
|
from typing import Literal
|
||||||
|
from typing import NoReturn
|
||||||
|
from typing import overload
|
||||||
|
from typing import TypeVar
|
||||||
|
|
||||||
|
from _typeshed import SupportsKeysAndGetItem
|
||||||
|
from _typeshed.wsgi import WSGIEnvironment
|
||||||
|
|
||||||
|
from .mixins import ImmutableHeadersMixin
|
||||||
|
|
||||||
|
D = TypeVar("D")
|
||||||
|
T = TypeVar("T")
|
||||||
|
|
||||||
|
class Headers(dict[str, str]):
|
||||||
|
_list: list[tuple[str, str]]
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
defaults: Mapping[str, str | Iterable[str]]
|
||||||
|
| Iterable[tuple[str, str]]
|
||||||
|
| None = None,
|
||||||
|
) -> None: ...
|
||||||
|
@overload
|
||||||
|
def __getitem__(self, key: str) -> str: ...
|
||||||
|
@overload
|
||||||
|
def __getitem__(self, key: int) -> tuple[str, str]: ...
|
||||||
|
@overload
|
||||||
|
def __getitem__(self, key: slice) -> Headers: ...
|
||||||
|
@overload
|
||||||
|
def __getitem__(self, key: str, _get_mode: Literal[True] = ...) -> str: ...
|
||||||
|
def __eq__(self, other: object) -> bool: ...
|
||||||
|
@overload # type: ignore
|
||||||
|
def get(self, key: str, default: str) -> str: ...
|
||||||
|
@overload
|
||||||
|
def get(self, key: str, default: str | None = None) -> str | None: ...
|
||||||
|
@overload
|
||||||
|
def get(
|
||||||
|
self, key: str, default: T | None = None, type: Callable[[str], T] = ...
|
||||||
|
) -> T | None: ...
|
||||||
|
@overload
|
||||||
|
def getlist(self, key: str) -> list[str]: ...
|
||||||
|
@overload
|
||||||
|
def getlist(self, key: str, type: Callable[[str], T]) -> list[T]: ...
|
||||||
|
def get_all(self, name: str) -> list[str]: ...
|
||||||
|
def items( # type: ignore
|
||||||
|
self, lower: bool = False
|
||||||
|
) -> Iterator[tuple[str, str]]: ...
|
||||||
|
def keys(self, lower: bool = False) -> Iterator[str]: ... # type: ignore
|
||||||
|
def values(self) -> Iterator[str]: ... # type: ignore
|
||||||
|
def extend(
|
||||||
|
self,
|
||||||
|
*args: Mapping[str, str | Iterable[str]] | Iterable[tuple[str, str]],
|
||||||
|
**kwargs: str | Iterable[str],
|
||||||
|
) -> None: ...
|
||||||
|
@overload
|
||||||
|
def __delitem__(self, key: str | int | slice) -> None: ...
|
||||||
|
@overload
|
||||||
|
def __delitem__(self, key: str, _index_operation: Literal[False]) -> None: ...
|
||||||
|
def remove(self, key: str) -> None: ...
|
||||||
|
@overload # type: ignore
|
||||||
|
def pop(self, key: str, default: str | None = None) -> str: ...
|
||||||
|
@overload
|
||||||
|
def pop(
|
||||||
|
self, key: int | None = None, default: tuple[str, str] | None = None
|
||||||
|
) -> tuple[str, str]: ...
|
||||||
|
def popitem(self) -> tuple[str, str]: ...
|
||||||
|
def __contains__(self, key: str) -> bool: ... # type: ignore
|
||||||
|
def has_key(self, key: str) -> bool: ...
|
||||||
|
def __iter__(self) -> Iterator[tuple[str, str]]: ... # type: ignore
|
||||||
|
def add(self, _key: str, _value: str, **kw: str) -> None: ...
|
||||||
|
def _validate_value(self, value: str) -> None: ...
|
||||||
|
def add_header(self, _key: str, _value: str, **_kw: str) -> None: ...
|
||||||
|
def clear(self) -> None: ...
|
||||||
|
def set(self, _key: str, _value: str, **kw: str) -> None: ...
|
||||||
|
def setlist(self, key: str, values: Iterable[str]) -> None: ...
|
||||||
|
def setdefault(self, key: str, default: str) -> str: ...
|
||||||
|
def setlistdefault(self, key: str, default: Iterable[str]) -> None: ...
|
||||||
|
@overload
|
||||||
|
def __setitem__(self, key: str, value: str) -> None: ...
|
||||||
|
@overload
|
||||||
|
def __setitem__(self, key: int, value: tuple[str, str]) -> None: ...
|
||||||
|
@overload
|
||||||
|
def __setitem__(self, key: slice, value: Iterable[tuple[str, str]]) -> None: ...
|
||||||
|
@overload
|
||||||
|
def update(
|
||||||
|
self, __m: SupportsKeysAndGetItem[str, str], **kwargs: str | Iterable[str]
|
||||||
|
) -> None: ...
|
||||||
|
@overload
|
||||||
|
def update(
|
||||||
|
self, __m: Iterable[tuple[str, str]], **kwargs: str | Iterable[str]
|
||||||
|
) -> None: ...
|
||||||
|
@overload
|
||||||
|
def update(self, **kwargs: str | Iterable[str]) -> None: ...
|
||||||
|
def to_wsgi_list(self) -> list[tuple[str, str]]: ...
|
||||||
|
def copy(self) -> Headers: ...
|
||||||
|
def __copy__(self) -> Headers: ...
|
||||||
|
|
||||||
|
class EnvironHeaders(ImmutableHeadersMixin, Headers):
|
||||||
|
environ: WSGIEnvironment
|
||||||
|
def __init__(self, environ: WSGIEnvironment) -> None: ...
|
||||||
|
def __eq__(self, other: object) -> bool: ...
|
||||||
|
def __getitem__( # type: ignore
|
||||||
|
self, key: str, _get_mode: Literal[False] = False
|
||||||
|
) -> str: ...
|
||||||
|
def __iter__(self) -> Iterator[tuple[str, str]]: ... # type: ignore
|
||||||
|
def copy(self) -> NoReturn: ...
|
|
@ -0,0 +1,242 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from itertools import repeat
|
||||||
|
|
||||||
|
from .._internal import _missing
|
||||||
|
|
||||||
|
|
||||||
|
def is_immutable(self):
|
||||||
|
raise TypeError(f"{type(self).__name__!r} objects are immutable")
|
||||||
|
|
||||||
|
|
||||||
|
class ImmutableListMixin:
|
||||||
|
"""Makes a :class:`list` immutable.
|
||||||
|
|
||||||
|
.. versionadded:: 0.5
|
||||||
|
|
||||||
|
:private:
|
||||||
|
"""
|
||||||
|
|
||||||
|
_hash_cache = None
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
if self._hash_cache is not None:
|
||||||
|
return self._hash_cache
|
||||||
|
rv = self._hash_cache = hash(tuple(self))
|
||||||
|
return rv
|
||||||
|
|
||||||
|
def __reduce_ex__(self, protocol):
|
||||||
|
return type(self), (list(self),)
|
||||||
|
|
||||||
|
def __delitem__(self, key):
|
||||||
|
is_immutable(self)
|
||||||
|
|
||||||
|
def __iadd__(self, other):
|
||||||
|
is_immutable(self)
|
||||||
|
|
||||||
|
def __imul__(self, other):
|
||||||
|
is_immutable(self)
|
||||||
|
|
||||||
|
def __setitem__(self, key, value):
|
||||||
|
is_immutable(self)
|
||||||
|
|
||||||
|
def append(self, item):
|
||||||
|
is_immutable(self)
|
||||||
|
|
||||||
|
def remove(self, item):
|
||||||
|
is_immutable(self)
|
||||||
|
|
||||||
|
def extend(self, iterable):
|
||||||
|
is_immutable(self)
|
||||||
|
|
||||||
|
def insert(self, pos, value):
|
||||||
|
is_immutable(self)
|
||||||
|
|
||||||
|
def pop(self, index=-1):
|
||||||
|
is_immutable(self)
|
||||||
|
|
||||||
|
def reverse(self):
|
||||||
|
is_immutable(self)
|
||||||
|
|
||||||
|
def sort(self, key=None, reverse=False):
|
||||||
|
is_immutable(self)
|
||||||
|
|
||||||
|
|
||||||
|
class ImmutableDictMixin:
|
||||||
|
"""Makes a :class:`dict` immutable.
|
||||||
|
|
||||||
|
.. versionadded:: 0.5
|
||||||
|
|
||||||
|
:private:
|
||||||
|
"""
|
||||||
|
|
||||||
|
_hash_cache = None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def fromkeys(cls, keys, value=None):
|
||||||
|
instance = super().__new__(cls)
|
||||||
|
instance.__init__(zip(keys, repeat(value)))
|
||||||
|
return instance
|
||||||
|
|
||||||
|
def __reduce_ex__(self, protocol):
|
||||||
|
return type(self), (dict(self),)
|
||||||
|
|
||||||
|
def _iter_hashitems(self):
|
||||||
|
return self.items()
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
if self._hash_cache is not None:
|
||||||
|
return self._hash_cache
|
||||||
|
rv = self._hash_cache = hash(frozenset(self._iter_hashitems()))
|
||||||
|
return rv
|
||||||
|
|
||||||
|
def setdefault(self, key, default=None):
|
||||||
|
is_immutable(self)
|
||||||
|
|
||||||
|
def update(self, *args, **kwargs):
|
||||||
|
is_immutable(self)
|
||||||
|
|
||||||
|
def pop(self, key, default=None):
|
||||||
|
is_immutable(self)
|
||||||
|
|
||||||
|
def popitem(self):
|
||||||
|
is_immutable(self)
|
||||||
|
|
||||||
|
def __setitem__(self, key, value):
|
||||||
|
is_immutable(self)
|
||||||
|
|
||||||
|
def __delitem__(self, key):
|
||||||
|
is_immutable(self)
|
||||||
|
|
||||||
|
def clear(self):
|
||||||
|
is_immutable(self)
|
||||||
|
|
||||||
|
|
||||||
|
class ImmutableMultiDictMixin(ImmutableDictMixin):
|
||||||
|
"""Makes a :class:`MultiDict` immutable.
|
||||||
|
|
||||||
|
.. versionadded:: 0.5
|
||||||
|
|
||||||
|
:private:
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __reduce_ex__(self, protocol):
|
||||||
|
return type(self), (list(self.items(multi=True)),)
|
||||||
|
|
||||||
|
def _iter_hashitems(self):
|
||||||
|
return self.items(multi=True)
|
||||||
|
|
||||||
|
def add(self, key, value):
|
||||||
|
is_immutable(self)
|
||||||
|
|
||||||
|
def popitemlist(self):
|
||||||
|
is_immutable(self)
|
||||||
|
|
||||||
|
def poplist(self, key):
|
||||||
|
is_immutable(self)
|
||||||
|
|
||||||
|
def setlist(self, key, new_list):
|
||||||
|
is_immutable(self)
|
||||||
|
|
||||||
|
def setlistdefault(self, key, default_list=None):
|
||||||
|
is_immutable(self)
|
||||||
|
|
||||||
|
|
||||||
|
class ImmutableHeadersMixin:
|
||||||
|
"""Makes a :class:`Headers` immutable. We do not mark them as
|
||||||
|
hashable though since the only usecase for this datastructure
|
||||||
|
in Werkzeug is a view on a mutable structure.
|
||||||
|
|
||||||
|
.. versionadded:: 0.5
|
||||||
|
|
||||||
|
:private:
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __delitem__(self, key, **kwargs):
|
||||||
|
is_immutable(self)
|
||||||
|
|
||||||
|
def __setitem__(self, key, value):
|
||||||
|
is_immutable(self)
|
||||||
|
|
||||||
|
def set(self, _key, _value, **kwargs):
|
||||||
|
is_immutable(self)
|
||||||
|
|
||||||
|
def setlist(self, key, values):
|
||||||
|
is_immutable(self)
|
||||||
|
|
||||||
|
def add(self, _key, _value, **kwargs):
|
||||||
|
is_immutable(self)
|
||||||
|
|
||||||
|
def add_header(self, _key, _value, **_kwargs):
|
||||||
|
is_immutable(self)
|
||||||
|
|
||||||
|
def remove(self, key):
|
||||||
|
is_immutable(self)
|
||||||
|
|
||||||
|
def extend(self, *args, **kwargs):
|
||||||
|
is_immutable(self)
|
||||||
|
|
||||||
|
def update(self, *args, **kwargs):
|
||||||
|
is_immutable(self)
|
||||||
|
|
||||||
|
def insert(self, pos, value):
|
||||||
|
is_immutable(self)
|
||||||
|
|
||||||
|
def pop(self, key=None, default=_missing):
|
||||||
|
is_immutable(self)
|
||||||
|
|
||||||
|
def popitem(self):
|
||||||
|
is_immutable(self)
|
||||||
|
|
||||||
|
def setdefault(self, key, default):
|
||||||
|
is_immutable(self)
|
||||||
|
|
||||||
|
def setlistdefault(self, key, default):
|
||||||
|
is_immutable(self)
|
||||||
|
|
||||||
|
|
||||||
|
def _calls_update(name):
|
||||||
|
def oncall(self, *args, **kw):
|
||||||
|
rv = getattr(super(UpdateDictMixin, self), name)(*args, **kw)
|
||||||
|
|
||||||
|
if self.on_update is not None:
|
||||||
|
self.on_update(self)
|
||||||
|
|
||||||
|
return rv
|
||||||
|
|
||||||
|
oncall.__name__ = name
|
||||||
|
return oncall
|
||||||
|
|
||||||
|
|
||||||
|
class UpdateDictMixin(dict):
|
||||||
|
"""Makes dicts call `self.on_update` on modifications.
|
||||||
|
|
||||||
|
.. versionadded:: 0.5
|
||||||
|
|
||||||
|
:private:
|
||||||
|
"""
|
||||||
|
|
||||||
|
on_update = None
|
||||||
|
|
||||||
|
def setdefault(self, key, default=None):
|
||||||
|
modified = key not in self
|
||||||
|
rv = super().setdefault(key, default)
|
||||||
|
if modified and self.on_update is not None:
|
||||||
|
self.on_update(self)
|
||||||
|
return rv
|
||||||
|
|
||||||
|
def pop(self, key, default=_missing):
|
||||||
|
modified = key in self
|
||||||
|
if default is _missing:
|
||||||
|
rv = super().pop(key)
|
||||||
|
else:
|
||||||
|
rv = super().pop(key, default)
|
||||||
|
if modified and self.on_update is not None:
|
||||||
|
self.on_update(self)
|
||||||
|
return rv
|
||||||
|
|
||||||
|
__setitem__ = _calls_update("__setitem__")
|
||||||
|
__delitem__ = _calls_update("__delitem__")
|
||||||
|
clear = _calls_update("clear")
|
||||||
|
popitem = _calls_update("popitem")
|
||||||
|
update = _calls_update("update")
|
|
@ -0,0 +1,97 @@
|
||||||
|
from collections.abc import Callable
|
||||||
|
from collections.abc import Hashable
|
||||||
|
from collections.abc import Iterable
|
||||||
|
from typing import Any
|
||||||
|
from typing import NoReturn
|
||||||
|
from typing import overload
|
||||||
|
from typing import SupportsIndex
|
||||||
|
from typing import TypeVar
|
||||||
|
|
||||||
|
from _typeshed import SupportsKeysAndGetItem
|
||||||
|
|
||||||
|
from .headers import Headers
|
||||||
|
|
||||||
|
K = TypeVar("K")
|
||||||
|
T = TypeVar("T")
|
||||||
|
V = TypeVar("V")
|
||||||
|
|
||||||
|
def is_immutable(self: object) -> NoReturn: ...
|
||||||
|
|
||||||
|
class ImmutableListMixin(list[V]):
|
||||||
|
_hash_cache: int | None
|
||||||
|
def __hash__(self) -> int: ... # type: ignore
|
||||||
|
def __delitem__(self, key: SupportsIndex | slice) -> NoReturn: ...
|
||||||
|
def __iadd__(self, other: t.Any) -> NoReturn: ... # type: ignore
|
||||||
|
def __imul__(self, other: SupportsIndex) -> NoReturn: ...
|
||||||
|
def __setitem__(self, key: int | slice, value: V) -> NoReturn: ... # type: ignore
|
||||||
|
def append(self, value: V) -> NoReturn: ...
|
||||||
|
def remove(self, value: V) -> NoReturn: ...
|
||||||
|
def extend(self, values: Iterable[V]) -> NoReturn: ...
|
||||||
|
def insert(self, pos: SupportsIndex, value: V) -> NoReturn: ...
|
||||||
|
def pop(self, index: SupportsIndex = -1) -> NoReturn: ...
|
||||||
|
def reverse(self) -> NoReturn: ...
|
||||||
|
def sort(
|
||||||
|
self, key: Callable[[V], Any] | None = None, reverse: bool = False
|
||||||
|
) -> NoReturn: ...
|
||||||
|
|
||||||
|
class ImmutableDictMixin(dict[K, V]):
|
||||||
|
_hash_cache: int | None
|
||||||
|
@classmethod
|
||||||
|
def fromkeys( # type: ignore
|
||||||
|
cls, keys: Iterable[K], value: V | None = None
|
||||||
|
) -> ImmutableDictMixin[K, V]: ...
|
||||||
|
def _iter_hashitems(self) -> Iterable[Hashable]: ...
|
||||||
|
def __hash__(self) -> int: ... # type: ignore
|
||||||
|
def setdefault(self, key: K, default: V | None = None) -> NoReturn: ...
|
||||||
|
def update(self, *args: Any, **kwargs: V) -> NoReturn: ...
|
||||||
|
def pop(self, key: K, default: V | None = None) -> NoReturn: ... # type: ignore
|
||||||
|
def popitem(self) -> NoReturn: ...
|
||||||
|
def __setitem__(self, key: K, value: V) -> NoReturn: ...
|
||||||
|
def __delitem__(self, key: K) -> NoReturn: ...
|
||||||
|
def clear(self) -> NoReturn: ...
|
||||||
|
|
||||||
|
class ImmutableMultiDictMixin(ImmutableDictMixin[K, V]):
|
||||||
|
def _iter_hashitems(self) -> Iterable[Hashable]: ...
|
||||||
|
def add(self, key: K, value: V) -> NoReturn: ...
|
||||||
|
def popitemlist(self) -> NoReturn: ...
|
||||||
|
def poplist(self, key: K) -> NoReturn: ...
|
||||||
|
def setlist(self, key: K, new_list: Iterable[V]) -> NoReturn: ...
|
||||||
|
def setlistdefault(
|
||||||
|
self, key: K, default_list: Iterable[V] | None = None
|
||||||
|
) -> NoReturn: ...
|
||||||
|
|
||||||
|
class ImmutableHeadersMixin(Headers):
|
||||||
|
def __delitem__(self, key: Any, _index_operation: bool = True) -> NoReturn: ...
|
||||||
|
def __setitem__(self, key: Any, value: Any) -> NoReturn: ...
|
||||||
|
def set(self, _key: Any, _value: Any, **kw: Any) -> NoReturn: ...
|
||||||
|
def setlist(self, key: Any, values: Any) -> NoReturn: ...
|
||||||
|
def add(self, _key: Any, _value: Any, **kw: Any) -> NoReturn: ...
|
||||||
|
def add_header(self, _key: Any, _value: Any, **_kw: Any) -> NoReturn: ...
|
||||||
|
def remove(self, key: Any) -> NoReturn: ...
|
||||||
|
def extend(self, *args: Any, **kwargs: Any) -> NoReturn: ...
|
||||||
|
def update(self, *args: Any, **kwargs: Any) -> NoReturn: ...
|
||||||
|
def insert(self, pos: Any, value: Any) -> NoReturn: ...
|
||||||
|
def pop(self, key: Any = None, default: Any = ...) -> NoReturn: ...
|
||||||
|
def popitem(self) -> NoReturn: ...
|
||||||
|
def setdefault(self, key: Any, default: Any) -> NoReturn: ...
|
||||||
|
def setlistdefault(self, key: Any, default: Any) -> NoReturn: ...
|
||||||
|
|
||||||
|
def _calls_update(name: str) -> Callable[[UpdateDictMixin[K, V]], Any]: ...
|
||||||
|
|
||||||
|
class UpdateDictMixin(dict[K, V]):
|
||||||
|
on_update: Callable[[UpdateDictMixin[K, V] | None, None], None]
|
||||||
|
def setdefault(self, key: K, default: V | None = None) -> V: ...
|
||||||
|
@overload
|
||||||
|
def pop(self, key: K) -> V: ...
|
||||||
|
@overload
|
||||||
|
def pop(self, key: K, default: V | T = ...) -> V | T: ...
|
||||||
|
def __setitem__(self, key: K, value: V) -> None: ...
|
||||||
|
def __delitem__(self, key: K) -> None: ...
|
||||||
|
def clear(self) -> None: ...
|
||||||
|
def popitem(self) -> tuple[K, V]: ...
|
||||||
|
@overload
|
||||||
|
def update(self, __m: SupportsKeysAndGetItem[K, V], **kwargs: V) -> None: ...
|
||||||
|
@overload
|
||||||
|
def update(self, __m: Iterable[tuple[K, V]], **kwargs: V) -> None: ...
|
||||||
|
@overload
|
||||||
|
def update(self, **kwargs: V) -> None: ...
|
|
@ -0,0 +1,180 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
|
||||||
|
class IfRange:
|
||||||
|
"""Very simple object that represents the `If-Range` header in parsed
|
||||||
|
form. It will either have neither a etag or date or one of either but
|
||||||
|
never both.
|
||||||
|
|
||||||
|
.. versionadded:: 0.7
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, etag=None, date=None):
|
||||||
|
#: The etag parsed and unquoted. Ranges always operate on strong
|
||||||
|
#: etags so the weakness information is not necessary.
|
||||||
|
self.etag = etag
|
||||||
|
#: The date in parsed format or `None`.
|
||||||
|
self.date = date
|
||||||
|
|
||||||
|
def to_header(self):
|
||||||
|
"""Converts the object back into an HTTP header."""
|
||||||
|
if self.date is not None:
|
||||||
|
return http.http_date(self.date)
|
||||||
|
if self.etag is not None:
|
||||||
|
return http.quote_etag(self.etag)
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.to_header()
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"<{type(self).__name__} {str(self)!r}>"
|
||||||
|
|
||||||
|
|
||||||
|
class Range:
|
||||||
|
"""Represents a ``Range`` header. All methods only support only
|
||||||
|
bytes as the unit. Stores a list of ranges if given, but the methods
|
||||||
|
only work if only one range is provided.
|
||||||
|
|
||||||
|
:raise ValueError: If the ranges provided are invalid.
|
||||||
|
|
||||||
|
.. versionchanged:: 0.15
|
||||||
|
The ranges passed in are validated.
|
||||||
|
|
||||||
|
.. versionadded:: 0.7
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, units, ranges):
|
||||||
|
#: The units of this range. Usually "bytes".
|
||||||
|
self.units = units
|
||||||
|
#: A list of ``(begin, end)`` tuples for the range header provided.
|
||||||
|
#: The ranges are non-inclusive.
|
||||||
|
self.ranges = ranges
|
||||||
|
|
||||||
|
for start, end in ranges:
|
||||||
|
if start is None or (end is not None and (start < 0 or start >= end)):
|
||||||
|
raise ValueError(f"{(start, end)} is not a valid range.")
|
||||||
|
|
||||||
|
def range_for_length(self, length):
|
||||||
|
"""If the range is for bytes, the length is not None and there is
|
||||||
|
exactly one range and it is satisfiable it returns a ``(start, stop)``
|
||||||
|
tuple, otherwise `None`.
|
||||||
|
"""
|
||||||
|
if self.units != "bytes" or length is None or len(self.ranges) != 1:
|
||||||
|
return None
|
||||||
|
start, end = self.ranges[0]
|
||||||
|
if end is None:
|
||||||
|
end = length
|
||||||
|
if start < 0:
|
||||||
|
start += length
|
||||||
|
if http.is_byte_range_valid(start, end, length):
|
||||||
|
return start, min(end, length)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def make_content_range(self, length):
|
||||||
|
"""Creates a :class:`~werkzeug.datastructures.ContentRange` object
|
||||||
|
from the current range and given content length.
|
||||||
|
"""
|
||||||
|
rng = self.range_for_length(length)
|
||||||
|
if rng is not None:
|
||||||
|
return ContentRange(self.units, rng[0], rng[1], length)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def to_header(self):
|
||||||
|
"""Converts the object back into an HTTP header."""
|
||||||
|
ranges = []
|
||||||
|
for begin, end in self.ranges:
|
||||||
|
if end is None:
|
||||||
|
ranges.append(f"{begin}-" if begin >= 0 else str(begin))
|
||||||
|
else:
|
||||||
|
ranges.append(f"{begin}-{end - 1}")
|
||||||
|
return f"{self.units}={','.join(ranges)}"
|
||||||
|
|
||||||
|
def to_content_range_header(self, length):
|
||||||
|
"""Converts the object into `Content-Range` HTTP header,
|
||||||
|
based on given length
|
||||||
|
"""
|
||||||
|
range = self.range_for_length(length)
|
||||||
|
if range is not None:
|
||||||
|
return f"{self.units} {range[0]}-{range[1] - 1}/{length}"
|
||||||
|
return None
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.to_header()
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"<{type(self).__name__} {str(self)!r}>"
|
||||||
|
|
||||||
|
|
||||||
|
def _callback_property(name):
|
||||||
|
def fget(self):
|
||||||
|
return getattr(self, name)
|
||||||
|
|
||||||
|
def fset(self, value):
|
||||||
|
setattr(self, name, value)
|
||||||
|
if self.on_update is not None:
|
||||||
|
self.on_update(self)
|
||||||
|
|
||||||
|
return property(fget, fset)
|
||||||
|
|
||||||
|
|
||||||
|
class ContentRange:
|
||||||
|
"""Represents the content range header.
|
||||||
|
|
||||||
|
.. versionadded:: 0.7
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, units, start, stop, length=None, on_update=None):
|
||||||
|
assert http.is_byte_range_valid(start, stop, length), "Bad range provided"
|
||||||
|
self.on_update = on_update
|
||||||
|
self.set(start, stop, length, units)
|
||||||
|
|
||||||
|
#: The units to use, usually "bytes"
|
||||||
|
units = _callback_property("_units")
|
||||||
|
#: The start point of the range or `None`.
|
||||||
|
start = _callback_property("_start")
|
||||||
|
#: The stop point of the range (non-inclusive) or `None`. Can only be
|
||||||
|
#: `None` if also start is `None`.
|
||||||
|
stop = _callback_property("_stop")
|
||||||
|
#: The length of the range or `None`.
|
||||||
|
length = _callback_property("_length")
|
||||||
|
|
||||||
|
def set(self, start, stop, length=None, units="bytes"):
|
||||||
|
"""Simple method to update the ranges."""
|
||||||
|
assert http.is_byte_range_valid(start, stop, length), "Bad range provided"
|
||||||
|
self._units = units
|
||||||
|
self._start = start
|
||||||
|
self._stop = stop
|
||||||
|
self._length = length
|
||||||
|
if self.on_update is not None:
|
||||||
|
self.on_update(self)
|
||||||
|
|
||||||
|
def unset(self):
|
||||||
|
"""Sets the units to `None` which indicates that the header should
|
||||||
|
no longer be used.
|
||||||
|
"""
|
||||||
|
self.set(None, None, units=None)
|
||||||
|
|
||||||
|
def to_header(self):
|
||||||
|
if self.units is None:
|
||||||
|
return ""
|
||||||
|
if self.length is None:
|
||||||
|
length = "*"
|
||||||
|
else:
|
||||||
|
length = self.length
|
||||||
|
if self.start is None:
|
||||||
|
return f"{self.units} */{length}"
|
||||||
|
return f"{self.units} {self.start}-{self.stop - 1}/{length}"
|
||||||
|
|
||||||
|
def __bool__(self):
|
||||||
|
return self.units is not None
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.to_header()
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"<{type(self).__name__} {str(self)!r}>"
|
||||||
|
|
||||||
|
|
||||||
|
# circular dependencies
|
||||||
|
from .. import http
|
|
@ -0,0 +1,57 @@
|
||||||
|
from collections.abc import Callable
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
class IfRange:
|
||||||
|
etag: str | None
|
||||||
|
date: datetime | None
|
||||||
|
def __init__(
|
||||||
|
self, etag: str | None = None, date: datetime | None = None
|
||||||
|
) -> None: ...
|
||||||
|
def to_header(self) -> str: ...
|
||||||
|
|
||||||
|
class Range:
|
||||||
|
units: str
|
||||||
|
ranges: list[tuple[int, int | None]]
|
||||||
|
def __init__(self, units: str, ranges: list[tuple[int, int | None]]) -> None: ...
|
||||||
|
def range_for_length(self, length: int | None) -> tuple[int, int] | None: ...
|
||||||
|
def make_content_range(self, length: int | None) -> ContentRange | None: ...
|
||||||
|
def to_header(self) -> str: ...
|
||||||
|
def to_content_range_header(self, length: int | None) -> str | None: ...
|
||||||
|
|
||||||
|
def _callback_property(name: str) -> property: ...
|
||||||
|
|
||||||
|
class ContentRange:
|
||||||
|
on_update: Callable[[ContentRange], None] | None
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
units: str | None,
|
||||||
|
start: int | None,
|
||||||
|
stop: int | None,
|
||||||
|
length: int | None = None,
|
||||||
|
on_update: Callable[[ContentRange], None] | None = None,
|
||||||
|
) -> None: ...
|
||||||
|
@property
|
||||||
|
def units(self) -> str | None: ...
|
||||||
|
@units.setter
|
||||||
|
def units(self, value: str | None) -> None: ...
|
||||||
|
@property
|
||||||
|
def start(self) -> int | None: ...
|
||||||
|
@start.setter
|
||||||
|
def start(self, value: int | None) -> None: ...
|
||||||
|
@property
|
||||||
|
def stop(self) -> int | None: ...
|
||||||
|
@stop.setter
|
||||||
|
def stop(self, value: int | None) -> None: ...
|
||||||
|
@property
|
||||||
|
def length(self) -> int | None: ...
|
||||||
|
@length.setter
|
||||||
|
def length(self, value: int | None) -> None: ...
|
||||||
|
def set(
|
||||||
|
self,
|
||||||
|
start: int | None,
|
||||||
|
stop: int | None,
|
||||||
|
length: int | None = None,
|
||||||
|
units: str | None = "bytes",
|
||||||
|
) -> None: ...
|
||||||
|
def unset(self) -> None: ...
|
||||||
|
def to_header(self) -> str: ...
|
|
@ -0,0 +1,208 @@
|
||||||
|
from collections.abc import Callable
|
||||||
|
from collections.abc import Iterable
|
||||||
|
from collections.abc import Iterator
|
||||||
|
from collections.abc import Mapping
|
||||||
|
from typing import Any
|
||||||
|
from typing import Generic
|
||||||
|
from typing import Literal
|
||||||
|
from typing import NoReturn
|
||||||
|
from typing import overload
|
||||||
|
from typing import TypeVar
|
||||||
|
|
||||||
|
from .mixins import (
|
||||||
|
ImmutableDictMixin,
|
||||||
|
ImmutableListMixin,
|
||||||
|
ImmutableMultiDictMixin,
|
||||||
|
UpdateDictMixin,
|
||||||
|
)
|
||||||
|
|
||||||
|
D = TypeVar("D")
|
||||||
|
K = TypeVar("K")
|
||||||
|
T = TypeVar("T")
|
||||||
|
V = TypeVar("V")
|
||||||
|
_CD = TypeVar("_CD", bound="CallbackDict")
|
||||||
|
|
||||||
|
def is_immutable(self: object) -> NoReturn: ...
|
||||||
|
def iter_multi_items(
|
||||||
|
mapping: Mapping[K, V | Iterable[V]] | Iterable[tuple[K, V]]
|
||||||
|
) -> Iterator[tuple[K, V]]: ...
|
||||||
|
|
||||||
|
class ImmutableList(ImmutableListMixin[V]): ...
|
||||||
|
|
||||||
|
class TypeConversionDict(dict[K, V]):
|
||||||
|
@overload
|
||||||
|
def get(self, key: K, default: None = ..., type: None = ...) -> V | None: ...
|
||||||
|
@overload
|
||||||
|
def get(self, key: K, default: D, type: None = ...) -> D | V: ...
|
||||||
|
@overload
|
||||||
|
def get(self, key: K, default: D, type: Callable[[V], T]) -> D | T: ...
|
||||||
|
@overload
|
||||||
|
def get(self, key: K, type: Callable[[V], T]) -> T | None: ...
|
||||||
|
|
||||||
|
class ImmutableTypeConversionDict(ImmutableDictMixin[K, V], TypeConversionDict[K, V]):
|
||||||
|
def copy(self) -> TypeConversionDict[K, V]: ...
|
||||||
|
def __copy__(self) -> ImmutableTypeConversionDict: ...
|
||||||
|
|
||||||
|
class MultiDict(TypeConversionDict[K, V]):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
mapping: Mapping[K, Iterable[V] | V] | Iterable[tuple[K, V]] | None = None,
|
||||||
|
) -> None: ...
|
||||||
|
def __getitem__(self, item: K) -> V: ...
|
||||||
|
def __setitem__(self, key: K, value: V) -> None: ...
|
||||||
|
def add(self, key: K, value: V) -> None: ...
|
||||||
|
@overload
|
||||||
|
def getlist(self, key: K) -> list[V]: ...
|
||||||
|
@overload
|
||||||
|
def getlist(self, key: K, type: Callable[[V], T] = ...) -> list[T]: ...
|
||||||
|
def setlist(self, key: K, new_list: Iterable[V]) -> None: ...
|
||||||
|
def setdefault(self, key: K, default: V | None = None) -> V: ...
|
||||||
|
def setlistdefault(
|
||||||
|
self, key: K, default_list: Iterable[V] | None = None
|
||||||
|
) -> list[V]: ...
|
||||||
|
def items(self, multi: bool = False) -> Iterator[tuple[K, V]]: ... # type: ignore
|
||||||
|
def lists(self) -> Iterator[tuple[K, list[V]]]: ...
|
||||||
|
def values(self) -> Iterator[V]: ... # type: ignore
|
||||||
|
def listvalues(self) -> Iterator[list[V]]: ...
|
||||||
|
def copy(self) -> MultiDict[K, V]: ...
|
||||||
|
def deepcopy(self, memo: Any = None) -> MultiDict[K, V]: ...
|
||||||
|
@overload
|
||||||
|
def to_dict(self) -> dict[K, V]: ...
|
||||||
|
@overload
|
||||||
|
def to_dict(self, flat: Literal[False]) -> dict[K, list[V]]: ...
|
||||||
|
def update( # type: ignore
|
||||||
|
self, mapping: Mapping[K, Iterable[V] | V] | Iterable[tuple[K, V]]
|
||||||
|
) -> None: ...
|
||||||
|
@overload
|
||||||
|
def pop(self, key: K) -> V: ...
|
||||||
|
@overload
|
||||||
|
def pop(self, key: K, default: V | T = ...) -> V | T: ...
|
||||||
|
def popitem(self) -> tuple[K, V]: ...
|
||||||
|
def poplist(self, key: K) -> list[V]: ...
|
||||||
|
def popitemlist(self) -> tuple[K, list[V]]: ...
|
||||||
|
def __copy__(self) -> MultiDict[K, V]: ...
|
||||||
|
def __deepcopy__(self, memo: Any) -> MultiDict[K, V]: ...
|
||||||
|
|
||||||
|
class _omd_bucket(Generic[K, V]):
|
||||||
|
prev: _omd_bucket | None
|
||||||
|
next: _omd_bucket | None
|
||||||
|
key: K
|
||||||
|
value: V
|
||||||
|
def __init__(self, omd: OrderedMultiDict, key: K, value: V) -> None: ...
|
||||||
|
def unlink(self, omd: OrderedMultiDict) -> None: ...
|
||||||
|
|
||||||
|
class OrderedMultiDict(MultiDict[K, V]):
|
||||||
|
_first_bucket: _omd_bucket | None
|
||||||
|
_last_bucket: _omd_bucket | None
|
||||||
|
def __init__(self, mapping: Mapping[K, V] | None = None) -> None: ...
|
||||||
|
def __eq__(self, other: object) -> bool: ...
|
||||||
|
def __getitem__(self, key: K) -> V: ...
|
||||||
|
def __setitem__(self, key: K, value: V) -> None: ...
|
||||||
|
def __delitem__(self, key: K) -> None: ...
|
||||||
|
def keys(self) -> Iterator[K]: ... # type: ignore
|
||||||
|
def __iter__(self) -> Iterator[K]: ...
|
||||||
|
def values(self) -> Iterator[V]: ... # type: ignore
|
||||||
|
def items(self, multi: bool = False) -> Iterator[tuple[K, V]]: ... # type: ignore
|
||||||
|
def lists(self) -> Iterator[tuple[K, list[V]]]: ...
|
||||||
|
def listvalues(self) -> Iterator[list[V]]: ...
|
||||||
|
def add(self, key: K, value: V) -> None: ...
|
||||||
|
@overload
|
||||||
|
def getlist(self, key: K) -> list[V]: ...
|
||||||
|
@overload
|
||||||
|
def getlist(self, key: K, type: Callable[[V], T] = ...) -> list[T]: ...
|
||||||
|
def setlist(self, key: K, new_list: Iterable[V]) -> None: ...
|
||||||
|
def setlistdefault(
|
||||||
|
self, key: K, default_list: Iterable[V] | None = None
|
||||||
|
) -> list[V]: ...
|
||||||
|
def update( # type: ignore
|
||||||
|
self, mapping: Mapping[K, V] | Iterable[tuple[K, V]]
|
||||||
|
) -> None: ...
|
||||||
|
def poplist(self, key: K) -> list[V]: ...
|
||||||
|
@overload
|
||||||
|
def pop(self, key: K) -> V: ...
|
||||||
|
@overload
|
||||||
|
def pop(self, key: K, default: V | T = ...) -> V | T: ...
|
||||||
|
def popitem(self) -> tuple[K, V]: ...
|
||||||
|
def popitemlist(self) -> tuple[K, list[V]]: ...
|
||||||
|
|
||||||
|
class CombinedMultiDict(ImmutableMultiDictMixin[K, V], MultiDict[K, V]): # type: ignore
|
||||||
|
dicts: list[MultiDict[K, V]]
|
||||||
|
def __init__(self, dicts: Iterable[MultiDict[K, V]] | None) -> None: ...
|
||||||
|
@classmethod
|
||||||
|
def fromkeys(cls, keys: Any, value: Any = None) -> NoReturn: ...
|
||||||
|
def __getitem__(self, key: K) -> V: ...
|
||||||
|
@overload # type: ignore
|
||||||
|
def get(self, key: K) -> V | None: ...
|
||||||
|
@overload
|
||||||
|
def get(self, key: K, default: V | T = ...) -> V | T: ...
|
||||||
|
@overload
|
||||||
|
def get(
|
||||||
|
self, key: K, default: T | None = None, type: Callable[[V], T] = ...
|
||||||
|
) -> T | None: ...
|
||||||
|
@overload
|
||||||
|
def getlist(self, key: K) -> list[V]: ...
|
||||||
|
@overload
|
||||||
|
def getlist(self, key: K, type: Callable[[V], T] = ...) -> list[T]: ...
|
||||||
|
def _keys_impl(self) -> set[K]: ...
|
||||||
|
def keys(self) -> set[K]: ... # type: ignore
|
||||||
|
def __iter__(self) -> set[K]: ... # type: ignore
|
||||||
|
def items(self, multi: bool = False) -> Iterator[tuple[K, V]]: ... # type: ignore
|
||||||
|
def values(self) -> Iterator[V]: ... # type: ignore
|
||||||
|
def lists(self) -> Iterator[tuple[K, list[V]]]: ...
|
||||||
|
def listvalues(self) -> Iterator[list[V]]: ...
|
||||||
|
def copy(self) -> MultiDict[K, V]: ...
|
||||||
|
@overload
|
||||||
|
def to_dict(self) -> dict[K, V]: ...
|
||||||
|
@overload
|
||||||
|
def to_dict(self, flat: Literal[False]) -> dict[K, list[V]]: ...
|
||||||
|
def __contains__(self, key: K) -> bool: ... # type: ignore
|
||||||
|
def has_key(self, key: K) -> bool: ...
|
||||||
|
|
||||||
|
class ImmutableDict(ImmutableDictMixin[K, V], dict[K, V]):
|
||||||
|
def copy(self) -> dict[K, V]: ...
|
||||||
|
def __copy__(self) -> ImmutableDict[K, V]: ...
|
||||||
|
|
||||||
|
class ImmutableMultiDict( # type: ignore
|
||||||
|
ImmutableMultiDictMixin[K, V], MultiDict[K, V]
|
||||||
|
):
|
||||||
|
def copy(self) -> MultiDict[K, V]: ...
|
||||||
|
def __copy__(self) -> ImmutableMultiDict[K, V]: ...
|
||||||
|
|
||||||
|
class ImmutableOrderedMultiDict( # type: ignore
|
||||||
|
ImmutableMultiDictMixin[K, V], OrderedMultiDict[K, V]
|
||||||
|
):
|
||||||
|
def _iter_hashitems(self) -> Iterator[tuple[int, tuple[K, V]]]: ...
|
||||||
|
def copy(self) -> OrderedMultiDict[K, V]: ...
|
||||||
|
def __copy__(self) -> ImmutableOrderedMultiDict[K, V]: ...
|
||||||
|
|
||||||
|
class CallbackDict(UpdateDictMixin[K, V], dict[K, V]):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
initial: Mapping[K, V] | Iterable[tuple[K, V]] | None = None,
|
||||||
|
on_update: Callable[[_CD], None] | None = None,
|
||||||
|
) -> None: ...
|
||||||
|
|
||||||
|
class HeaderSet(set[str]):
|
||||||
|
_headers: list[str]
|
||||||
|
_set: set[str]
|
||||||
|
on_update: Callable[[HeaderSet], None] | None
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
headers: Iterable[str] | None = None,
|
||||||
|
on_update: Callable[[HeaderSet], None] | None = None,
|
||||||
|
) -> None: ...
|
||||||
|
def add(self, header: str) -> None: ...
|
||||||
|
def remove(self, header: str) -> None: ...
|
||||||
|
def update(self, iterable: Iterable[str]) -> None: ... # type: ignore
|
||||||
|
def discard(self, header: str) -> None: ...
|
||||||
|
def find(self, header: str) -> int: ...
|
||||||
|
def index(self, header: str) -> int: ...
|
||||||
|
def clear(self) -> None: ...
|
||||||
|
def as_set(self, preserve_casing: bool = False) -> set[str]: ...
|
||||||
|
def to_header(self) -> str: ...
|
||||||
|
def __getitem__(self, idx: int) -> str: ...
|
||||||
|
def __delitem__(self, idx: int) -> None: ...
|
||||||
|
def __setitem__(self, idx: int, value: str) -> None: ...
|
||||||
|
def __contains__(self, header: str) -> bool: ... # type: ignore
|
||||||
|
def __len__(self) -> int: ...
|
||||||
|
def __iter__(self) -> Iterator[str]: ...
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import getpass
|
import getpass
|
||||||
import hashlib
|
import hashlib
|
||||||
import json
|
import json
|
||||||
|
@ -9,7 +11,6 @@ import time
|
||||||
import typing as t
|
import typing as t
|
||||||
import uuid
|
import uuid
|
||||||
from contextlib import ExitStack
|
from contextlib import ExitStack
|
||||||
from contextlib import nullcontext
|
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from itertools import chain
|
from itertools import chain
|
||||||
from os.path import basename
|
from os.path import basename
|
||||||
|
@ -41,16 +42,16 @@ def hash_pin(pin: str) -> str:
|
||||||
return hashlib.sha1(f"{pin} added salt".encode("utf-8", "replace")).hexdigest()[:12]
|
return hashlib.sha1(f"{pin} added salt".encode("utf-8", "replace")).hexdigest()[:12]
|
||||||
|
|
||||||
|
|
||||||
_machine_id: t.Optional[t.Union[str, bytes]] = None
|
_machine_id: str | bytes | None = None
|
||||||
|
|
||||||
|
|
||||||
def get_machine_id() -> t.Optional[t.Union[str, bytes]]:
|
def get_machine_id() -> str | bytes | None:
|
||||||
global _machine_id
|
global _machine_id
|
||||||
|
|
||||||
if _machine_id is not None:
|
if _machine_id is not None:
|
||||||
return _machine_id
|
return _machine_id
|
||||||
|
|
||||||
def _generate() -> t.Optional[t.Union[str, bytes]]:
|
def _generate() -> str | bytes | None:
|
||||||
linux = b""
|
linux = b""
|
||||||
|
|
||||||
# machine-id is stable across boots, boot_id is not.
|
# machine-id is stable across boots, boot_id is not.
|
||||||
|
@ -104,7 +105,7 @@ def get_machine_id() -> t.Optional[t.Union[str, bytes]]:
|
||||||
0,
|
0,
|
||||||
winreg.KEY_READ | winreg.KEY_WOW64_64KEY,
|
winreg.KEY_READ | winreg.KEY_WOW64_64KEY,
|
||||||
) as rk:
|
) as rk:
|
||||||
guid: t.Union[str, bytes]
|
guid: str | bytes
|
||||||
guid_type: int
|
guid_type: int
|
||||||
guid, guid_type = winreg.QueryValueEx(rk, "MachineGuid")
|
guid, guid_type = winreg.QueryValueEx(rk, "MachineGuid")
|
||||||
|
|
||||||
|
@ -126,7 +127,7 @@ class _ConsoleFrame:
|
||||||
standalone console.
|
standalone console.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, namespace: t.Dict[str, t.Any]):
|
def __init__(self, namespace: dict[str, t.Any]):
|
||||||
self.console = Console(namespace)
|
self.console = Console(namespace)
|
||||||
self.id = 0
|
self.id = 0
|
||||||
|
|
||||||
|
@ -135,8 +136,8 @@ class _ConsoleFrame:
|
||||||
|
|
||||||
|
|
||||||
def get_pin_and_cookie_name(
|
def get_pin_and_cookie_name(
|
||||||
app: "WSGIApplication",
|
app: WSGIApplication,
|
||||||
) -> t.Union[t.Tuple[str, str], t.Tuple[None, None]]:
|
) -> tuple[str, str] | tuple[None, None]:
|
||||||
"""Given an application object this returns a semi-stable 9 digit pin
|
"""Given an application object this returns a semi-stable 9 digit pin
|
||||||
code and a random key. The hope is that this is stable between
|
code and a random key. The hope is that this is stable between
|
||||||
restarts to not make debugging particularly frustrating. If the pin
|
restarts to not make debugging particularly frustrating. If the pin
|
||||||
|
@ -161,7 +162,7 @@ def get_pin_and_cookie_name(
|
||||||
num = pin
|
num = pin
|
||||||
|
|
||||||
modname = getattr(app, "__module__", t.cast(object, app).__class__.__module__)
|
modname = getattr(app, "__module__", t.cast(object, app).__class__.__module__)
|
||||||
username: t.Optional[str]
|
username: str | None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# getuser imports the pwd module, which does not exist in Google
|
# getuser imports the pwd module, which does not exist in Google
|
||||||
|
@ -229,8 +230,8 @@ class DebuggedApplication:
|
||||||
|
|
||||||
The ``evalex`` argument allows evaluating expressions in any frame
|
The ``evalex`` argument allows evaluating expressions in any frame
|
||||||
of a traceback. This works by preserving each frame with its local
|
of a traceback. This works by preserving each frame with its local
|
||||||
state. Some state, such as :doc:`local`, cannot be restored with the
|
state. Some state, such as context globals, cannot be restored with
|
||||||
frame by default. When ``evalex`` is enabled,
|
the frame by default. When ``evalex`` is enabled,
|
||||||
``environ["werkzeug.debug.preserve_context"]`` will be a callable
|
``environ["werkzeug.debug.preserve_context"]`` will be a callable
|
||||||
that takes a context manager, and can be called multiple times.
|
that takes a context manager, and can be called multiple times.
|
||||||
Each context manager will be entered before evaluating code in the
|
Each context manager will be entered before evaluating code in the
|
||||||
|
@ -262,11 +263,11 @@ class DebuggedApplication:
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
app: "WSGIApplication",
|
app: WSGIApplication,
|
||||||
evalex: bool = False,
|
evalex: bool = False,
|
||||||
request_key: str = "werkzeug.request",
|
request_key: str = "werkzeug.request",
|
||||||
console_path: str = "/console",
|
console_path: str = "/console",
|
||||||
console_init_func: t.Optional[t.Callable[[], t.Dict[str, t.Any]]] = None,
|
console_init_func: t.Callable[[], dict[str, t.Any]] | None = None,
|
||||||
show_hidden_frames: bool = False,
|
show_hidden_frames: bool = False,
|
||||||
pin_security: bool = True,
|
pin_security: bool = True,
|
||||||
pin_logging: bool = True,
|
pin_logging: bool = True,
|
||||||
|
@ -275,8 +276,8 @@ class DebuggedApplication:
|
||||||
console_init_func = None
|
console_init_func = None
|
||||||
self.app = app
|
self.app = app
|
||||||
self.evalex = evalex
|
self.evalex = evalex
|
||||||
self.frames: t.Dict[int, t.Union[DebugFrameSummary, _ConsoleFrame]] = {}
|
self.frames: dict[int, DebugFrameSummary | _ConsoleFrame] = {}
|
||||||
self.frame_contexts: t.Dict[int, t.List[t.ContextManager[None]]] = {}
|
self.frame_contexts: dict[int, list[t.ContextManager[None]]] = {}
|
||||||
self.request_key = request_key
|
self.request_key = request_key
|
||||||
self.console_path = console_path
|
self.console_path = console_path
|
||||||
self.console_init_func = console_init_func
|
self.console_init_func = console_init_func
|
||||||
|
@ -297,7 +298,7 @@ class DebuggedApplication:
|
||||||
self.pin = None
|
self.pin = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def pin(self) -> t.Optional[str]:
|
def pin(self) -> str | None:
|
||||||
if not hasattr(self, "_pin"):
|
if not hasattr(self, "_pin"):
|
||||||
pin_cookie = get_pin_and_cookie_name(self.app)
|
pin_cookie = get_pin_and_cookie_name(self.app)
|
||||||
self._pin, self._pin_cookie = pin_cookie # type: ignore
|
self._pin, self._pin_cookie = pin_cookie # type: ignore
|
||||||
|
@ -316,10 +317,10 @@ class DebuggedApplication:
|
||||||
return self._pin_cookie
|
return self._pin_cookie
|
||||||
|
|
||||||
def debug_application(
|
def debug_application(
|
||||||
self, environ: "WSGIEnvironment", start_response: "StartResponse"
|
self, environ: WSGIEnvironment, start_response: StartResponse
|
||||||
) -> t.Iterator[bytes]:
|
) -> t.Iterator[bytes]:
|
||||||
"""Run the application and conserve the traceback frames."""
|
"""Run the application and conserve the traceback frames."""
|
||||||
contexts: t.List[t.ContextManager[t.Any]] = []
|
contexts: list[t.ContextManager[t.Any]] = []
|
||||||
|
|
||||||
if self.evalex:
|
if self.evalex:
|
||||||
environ["werkzeug.debug.preserve_context"] = contexts.append
|
environ["werkzeug.debug.preserve_context"] = contexts.append
|
||||||
|
@ -329,7 +330,7 @@ class DebuggedApplication:
|
||||||
app_iter = self.app(environ, start_response)
|
app_iter = self.app(environ, start_response)
|
||||||
yield from app_iter
|
yield from app_iter
|
||||||
if hasattr(app_iter, "close"):
|
if hasattr(app_iter, "close"):
|
||||||
app_iter.close() # type: ignore
|
app_iter.close()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if hasattr(app_iter, "close"):
|
if hasattr(app_iter, "close"):
|
||||||
app_iter.close() # type: ignore
|
app_iter.close() # type: ignore
|
||||||
|
@ -367,7 +368,7 @@ class DebuggedApplication:
|
||||||
self,
|
self,
|
||||||
request: Request,
|
request: Request,
|
||||||
command: str,
|
command: str,
|
||||||
frame: t.Union[DebugFrameSummary, _ConsoleFrame],
|
frame: DebugFrameSummary | _ConsoleFrame,
|
||||||
) -> Response:
|
) -> Response:
|
||||||
"""Execute a command in a console."""
|
"""Execute a command in a console."""
|
||||||
contexts = self.frame_contexts.get(id(frame), [])
|
contexts = self.frame_contexts.get(id(frame), [])
|
||||||
|
@ -410,7 +411,7 @@ class DebuggedApplication:
|
||||||
BytesIO(data), request.environ, download_name=filename, etag=etag
|
BytesIO(data), request.environ, download_name=filename, etag=etag
|
||||||
)
|
)
|
||||||
|
|
||||||
def check_pin_trust(self, environ: "WSGIEnvironment") -> t.Optional[bool]:
|
def check_pin_trust(self, environ: WSGIEnvironment) -> bool | None:
|
||||||
"""Checks if the request passed the pin test. This returns `True` if the
|
"""Checks if the request passed the pin test. This returns `True` if the
|
||||||
request is trusted on a pin/cookie basis and returns `False` if not.
|
request is trusted on a pin/cookie basis and returns `False` if not.
|
||||||
Additionally if the cookie's stored pin hash is wrong it will return
|
Additionally if the cookie's stored pin hash is wrong it will return
|
||||||
|
@ -497,7 +498,7 @@ class DebuggedApplication:
|
||||||
return Response("")
|
return Response("")
|
||||||
|
|
||||||
def __call__(
|
def __call__(
|
||||||
self, environ: "WSGIEnvironment", start_response: "StartResponse"
|
self, environ: WSGIEnvironment, start_response: StartResponse
|
||||||
) -> t.Iterable[bytes]:
|
) -> t.Iterable[bytes]:
|
||||||
"""Dispatch the requests."""
|
"""Dispatch the requests."""
|
||||||
# important: don't ever access a function here that reads the incoming
|
# important: don't ever access a function here that reads the incoming
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import code
|
import code
|
||||||
import sys
|
import sys
|
||||||
import typing as t
|
import typing as t
|
||||||
|
@ -10,10 +12,7 @@ from .repr import debug_repr
|
||||||
from .repr import dump
|
from .repr import dump
|
||||||
from .repr import helper
|
from .repr import helper
|
||||||
|
|
||||||
if t.TYPE_CHECKING:
|
_stream: ContextVar[HTMLStringO] = ContextVar("werkzeug.debug.console.stream")
|
||||||
import codeop # noqa: F401
|
|
||||||
|
|
||||||
_stream: ContextVar["HTMLStringO"] = ContextVar("werkzeug.debug.console.stream")
|
|
||||||
_ipy: ContextVar = ContextVar("werkzeug.debug.console.ipy")
|
_ipy: ContextVar = ContextVar("werkzeug.debug.console.ipy")
|
||||||
|
|
||||||
|
|
||||||
|
@ -21,7 +20,7 @@ class HTMLStringO:
|
||||||
"""A StringO version that HTML escapes on write."""
|
"""A StringO version that HTML escapes on write."""
|
||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
self._buffer: t.List[str] = []
|
self._buffer: list[str] = []
|
||||||
|
|
||||||
def isatty(self) -> bool:
|
def isatty(self) -> bool:
|
||||||
return False
|
return False
|
||||||
|
@ -48,8 +47,6 @@ class HTMLStringO:
|
||||||
return val
|
return val
|
||||||
|
|
||||||
def _write(self, x: str) -> None:
|
def _write(self, x: str) -> None:
|
||||||
if isinstance(x, bytes):
|
|
||||||
x = x.decode("utf-8", "replace")
|
|
||||||
self._buffer.append(x)
|
self._buffer.append(x)
|
||||||
|
|
||||||
def write(self, x: str) -> None:
|
def write(self, x: str) -> None:
|
||||||
|
@ -94,7 +91,7 @@ class ThreadedStream:
|
||||||
def __setattr__(self, name: str, value: t.Any) -> None:
|
def __setattr__(self, name: str, value: t.Any) -> None:
|
||||||
raise AttributeError(f"read only attribute {name}")
|
raise AttributeError(f"read only attribute {name}")
|
||||||
|
|
||||||
def __dir__(self) -> t.List[str]:
|
def __dir__(self) -> list[str]:
|
||||||
return dir(sys.__stdout__)
|
return dir(sys.__stdout__)
|
||||||
|
|
||||||
def __getattribute__(self, name: str) -> t.Any:
|
def __getattribute__(self, name: str) -> t.Any:
|
||||||
|
@ -116,7 +113,7 @@ sys.displayhook = ThreadedStream.displayhook
|
||||||
|
|
||||||
class _ConsoleLoader:
|
class _ConsoleLoader:
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
self._storage: t.Dict[int, str] = {}
|
self._storage: dict[int, str] = {}
|
||||||
|
|
||||||
def register(self, code: CodeType, source: str) -> None:
|
def register(self, code: CodeType, source: str) -> None:
|
||||||
self._storage[id(code)] = source
|
self._storage[id(code)] = source
|
||||||
|
@ -125,7 +122,7 @@ class _ConsoleLoader:
|
||||||
if isinstance(var, CodeType):
|
if isinstance(var, CodeType):
|
||||||
self._storage[id(var)] = source
|
self._storage[id(var)] = source
|
||||||
|
|
||||||
def get_source_by_code(self, code: CodeType) -> t.Optional[str]:
|
def get_source_by_code(self, code: CodeType) -> str | None:
|
||||||
try:
|
try:
|
||||||
return self._storage[id(code)]
|
return self._storage[id(code)]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
|
@ -133,9 +130,9 @@ class _ConsoleLoader:
|
||||||
|
|
||||||
|
|
||||||
class _InteractiveConsole(code.InteractiveInterpreter):
|
class _InteractiveConsole(code.InteractiveInterpreter):
|
||||||
locals: t.Dict[str, t.Any]
|
locals: dict[str, t.Any]
|
||||||
|
|
||||||
def __init__(self, globals: t.Dict[str, t.Any], locals: t.Dict[str, t.Any]) -> None:
|
def __init__(self, globals: dict[str, t.Any], locals: dict[str, t.Any]) -> None:
|
||||||
self.loader = _ConsoleLoader()
|
self.loader = _ConsoleLoader()
|
||||||
locals = {
|
locals = {
|
||||||
**globals,
|
**globals,
|
||||||
|
@ -147,7 +144,7 @@ class _InteractiveConsole(code.InteractiveInterpreter):
|
||||||
super().__init__(locals)
|
super().__init__(locals)
|
||||||
original_compile = self.compile
|
original_compile = self.compile
|
||||||
|
|
||||||
def compile(source: str, filename: str, symbol: str) -> t.Optional[CodeType]:
|
def compile(source: str, filename: str, symbol: str) -> CodeType | None:
|
||||||
code = original_compile(source, filename, symbol)
|
code = original_compile(source, filename, symbol)
|
||||||
|
|
||||||
if code is not None:
|
if code is not None:
|
||||||
|
@ -157,7 +154,7 @@ class _InteractiveConsole(code.InteractiveInterpreter):
|
||||||
|
|
||||||
self.compile = compile # type: ignore[assignment]
|
self.compile = compile # type: ignore[assignment]
|
||||||
self.more = False
|
self.more = False
|
||||||
self.buffer: t.List[str] = []
|
self.buffer: list[str] = []
|
||||||
|
|
||||||
def runsource(self, source: str, **kwargs: t.Any) -> str: # type: ignore
|
def runsource(self, source: str, **kwargs: t.Any) -> str: # type: ignore
|
||||||
source = f"{source.rstrip()}\n"
|
source = f"{source.rstrip()}\n"
|
||||||
|
@ -188,7 +185,7 @@ class _InteractiveConsole(code.InteractiveInterpreter):
|
||||||
te = DebugTraceback(exc, skip=1)
|
te = DebugTraceback(exc, skip=1)
|
||||||
sys.stdout._write(te.render_traceback_html()) # type: ignore
|
sys.stdout._write(te.render_traceback_html()) # type: ignore
|
||||||
|
|
||||||
def showsyntaxerror(self, filename: t.Optional[str] = None) -> None:
|
def showsyntaxerror(self, filename: str | None = None) -> None:
|
||||||
from .tbtools import DebugTraceback
|
from .tbtools import DebugTraceback
|
||||||
|
|
||||||
exc = t.cast(BaseException, sys.exc_info()[1])
|
exc = t.cast(BaseException, sys.exc_info()[1])
|
||||||
|
@ -204,8 +201,8 @@ class Console:
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
globals: t.Optional[t.Dict[str, t.Any]] = None,
|
globals: dict[str, t.Any] | None = None,
|
||||||
locals: t.Optional[t.Dict[str, t.Any]] = None,
|
locals: dict[str, t.Any] | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
if locals is None:
|
if locals is None:
|
||||||
locals = {}
|
locals = {}
|
||||||
|
|
|
@ -4,6 +4,8 @@ repr, these expose more information and produce HTML instead of ASCII.
|
||||||
Together with the CSS and JavaScript of the debugger this gives a
|
Together with the CSS and JavaScript of the debugger this gives a
|
||||||
colorful and more compact output.
|
colorful and more compact output.
|
||||||
"""
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import codecs
|
import codecs
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
@ -57,7 +59,7 @@ class _Helper:
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
return "Type help(object) for help about object."
|
return "Type help(object) for help about object."
|
||||||
|
|
||||||
def __call__(self, topic: t.Optional[t.Any] = None) -> None:
|
def __call__(self, topic: t.Any | None = None) -> None:
|
||||||
if topic is None:
|
if topic is None:
|
||||||
sys.stdout._write(f"<span class=help>{self!r}</span>") # type: ignore
|
sys.stdout._write(f"<span class=help>{self!r}</span>") # type: ignore
|
||||||
return
|
return
|
||||||
|
@ -65,8 +67,6 @@ class _Helper:
|
||||||
|
|
||||||
pydoc.help(topic)
|
pydoc.help(topic)
|
||||||
rv = sys.stdout.reset() # type: ignore
|
rv = sys.stdout.reset() # type: ignore
|
||||||
if isinstance(rv, bytes):
|
|
||||||
rv = rv.decode("utf-8", "ignore")
|
|
||||||
paragraphs = _paragraph_re.split(rv)
|
paragraphs = _paragraph_re.split(rv)
|
||||||
if len(paragraphs) > 1:
|
if len(paragraphs) > 1:
|
||||||
title = paragraphs[0]
|
title = paragraphs[0]
|
||||||
|
@ -81,7 +81,7 @@ helper = _Helper()
|
||||||
|
|
||||||
|
|
||||||
def _add_subclass_info(
|
def _add_subclass_info(
|
||||||
inner: str, obj: object, base: t.Union[t.Type, t.Tuple[t.Type, ...]]
|
inner: str, obj: object, base: t.Type | tuple[t.Type, ...]
|
||||||
) -> str:
|
) -> str:
|
||||||
if isinstance(base, tuple):
|
if isinstance(base, tuple):
|
||||||
for cls in base:
|
for cls in base:
|
||||||
|
@ -97,8 +97,8 @@ def _add_subclass_info(
|
||||||
|
|
||||||
def _sequence_repr_maker(
|
def _sequence_repr_maker(
|
||||||
left: str, right: str, base: t.Type, limit: int = 8
|
left: str, right: str, base: t.Type, limit: int = 8
|
||||||
) -> t.Callable[["DebugReprGenerator", t.Iterable, bool], str]:
|
) -> t.Callable[[DebugReprGenerator, t.Iterable, bool], str]:
|
||||||
def proxy(self: "DebugReprGenerator", obj: t.Iterable, recursive: bool) -> str:
|
def proxy(self: DebugReprGenerator, obj: t.Iterable, recursive: bool) -> str:
|
||||||
if recursive:
|
if recursive:
|
||||||
return _add_subclass_info(f"{left}...{right}", obj, base)
|
return _add_subclass_info(f"{left}...{right}", obj, base)
|
||||||
buf = [left]
|
buf = [left]
|
||||||
|
@ -120,7 +120,7 @@ def _sequence_repr_maker(
|
||||||
|
|
||||||
class DebugReprGenerator:
|
class DebugReprGenerator:
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
self._stack: t.List[t.Any] = []
|
self._stack: list[t.Any] = []
|
||||||
|
|
||||||
list_repr = _sequence_repr_maker("[", "]", list)
|
list_repr = _sequence_repr_maker("[", "]", list)
|
||||||
tuple_repr = _sequence_repr_maker("(", ")", tuple)
|
tuple_repr = _sequence_repr_maker("(", ")", tuple)
|
||||||
|
@ -132,11 +132,11 @@ class DebugReprGenerator:
|
||||||
|
|
||||||
def regex_repr(self, obj: t.Pattern) -> str:
|
def regex_repr(self, obj: t.Pattern) -> str:
|
||||||
pattern = repr(obj.pattern)
|
pattern = repr(obj.pattern)
|
||||||
pattern = codecs.decode(pattern, "unicode-escape", "ignore") # type: ignore
|
pattern = codecs.decode(pattern, "unicode-escape", "ignore")
|
||||||
pattern = f"r{pattern}"
|
pattern = f"r{pattern}"
|
||||||
return f're.compile(<span class="string regex">{pattern}</span>)'
|
return f're.compile(<span class="string regex">{pattern}</span>)'
|
||||||
|
|
||||||
def string_repr(self, obj: t.Union[str, bytes], limit: int = 70) -> str:
|
def string_repr(self, obj: str | bytes, limit: int = 70) -> str:
|
||||||
buf = ['<span class="string">']
|
buf = ['<span class="string">']
|
||||||
r = repr(obj)
|
r = repr(obj)
|
||||||
|
|
||||||
|
@ -165,7 +165,7 @@ class DebugReprGenerator:
|
||||||
|
|
||||||
def dict_repr(
|
def dict_repr(
|
||||||
self,
|
self,
|
||||||
d: t.Union[t.Dict[int, None], t.Dict[str, int], t.Dict[t.Union[str, int], int]],
|
d: dict[int, None] | dict[str, int] | dict[str | int, int],
|
||||||
recursive: bool,
|
recursive: bool,
|
||||||
limit: int = 5,
|
limit: int = 5,
|
||||||
) -> str:
|
) -> str:
|
||||||
|
@ -188,9 +188,7 @@ class DebugReprGenerator:
|
||||||
buf.append("}")
|
buf.append("}")
|
||||||
return _add_subclass_info("".join(buf), d, dict)
|
return _add_subclass_info("".join(buf), d, dict)
|
||||||
|
|
||||||
def object_repr(
|
def object_repr(self, obj: type[dict] | t.Callable | type[list] | None) -> str:
|
||||||
self, obj: t.Optional[t.Union[t.Type[dict], t.Callable, t.Type[list]]]
|
|
||||||
) -> str:
|
|
||||||
r = repr(obj)
|
r = repr(obj)
|
||||||
return f'<span class="object">{escape(r)}</span>'
|
return f'<span class="object">{escape(r)}</span>'
|
||||||
|
|
||||||
|
@ -244,7 +242,7 @@ class DebugReprGenerator:
|
||||||
|
|
||||||
def dump_object(self, obj: object) -> str:
|
def dump_object(self, obj: object) -> str:
|
||||||
repr = None
|
repr = None
|
||||||
items: t.Optional[t.List[t.Tuple[str, str]]] = None
|
items: list[tuple[str, str]] | None = None
|
||||||
|
|
||||||
if isinstance(obj, dict):
|
if isinstance(obj, dict):
|
||||||
title = "Contents of"
|
title = "Contents of"
|
||||||
|
@ -266,12 +264,12 @@ class DebugReprGenerator:
|
||||||
title += f" {object.__repr__(obj)[1:-1]}"
|
title += f" {object.__repr__(obj)[1:-1]}"
|
||||||
return self.render_object_dump(items, title, repr)
|
return self.render_object_dump(items, title, repr)
|
||||||
|
|
||||||
def dump_locals(self, d: t.Dict[str, t.Any]) -> str:
|
def dump_locals(self, d: dict[str, t.Any]) -> str:
|
||||||
items = [(key, self.repr(value)) for key, value in d.items()]
|
items = [(key, self.repr(value)) for key, value in d.items()]
|
||||||
return self.render_object_dump(items, "Local variables in frame")
|
return self.render_object_dump(items, "Local variables in frame")
|
||||||
|
|
||||||
def render_object_dump(
|
def render_object_dump(
|
||||||
self, items: t.List[t.Tuple[str, str]], title: str, repr: t.Optional[str] = None
|
self, items: list[tuple[str, str]], title: str, repr: str | None = None
|
||||||
) -> str:
|
) -> str:
|
||||||
html_items = []
|
html_items = []
|
||||||
for key, value in items:
|
for key, value in items:
|
||||||
|
|
|
@ -305,7 +305,8 @@ function handleConsoleSubmit(e, command, frameID) {
|
||||||
wrapperSpan.append(spanToWrap);
|
wrapperSpan.append(spanToWrap);
|
||||||
spanToWrap.hidden = true;
|
spanToWrap.hidden = true;
|
||||||
|
|
||||||
expansionButton.addEventListener("click", () => {
|
expansionButton.addEventListener("click", (event) => {
|
||||||
|
event.preventDefault();
|
||||||
spanToWrap.hidden = !spanToWrap.hidden;
|
spanToWrap.hidden = !spanToWrap.hidden;
|
||||||
expansionButton.classList.toggle("open");
|
expansionButton.classList.toggle("open");
|
||||||
return false;
|
return false;
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import itertools
|
import itertools
|
||||||
import linecache
|
import linecache
|
||||||
import os
|
import os
|
||||||
|
@ -123,7 +125,7 @@ FRAME_HTML = """\
|
||||||
|
|
||||||
def _process_traceback(
|
def _process_traceback(
|
||||||
exc: BaseException,
|
exc: BaseException,
|
||||||
te: t.Optional[traceback.TracebackException] = None,
|
te: traceback.TracebackException | None = None,
|
||||||
*,
|
*,
|
||||||
skip: int = 0,
|
skip: int = 0,
|
||||||
hide: bool = True,
|
hide: bool = True,
|
||||||
|
@ -146,7 +148,7 @@ def _process_traceback(
|
||||||
frame_gen = itertools.islice(frame_gen, skip, None)
|
frame_gen = itertools.islice(frame_gen, skip, None)
|
||||||
del te.stack[:skip]
|
del te.stack[:skip]
|
||||||
|
|
||||||
new_stack: t.List[DebugFrameSummary] = []
|
new_stack: list[DebugFrameSummary] = []
|
||||||
hidden = False
|
hidden = False
|
||||||
|
|
||||||
# Match each frame with the FrameSummary that was generated.
|
# Match each frame with the FrameSummary that was generated.
|
||||||
|
@ -175,7 +177,7 @@ def _process_traceback(
|
||||||
elif hide_value or hidden:
|
elif hide_value or hidden:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
frame_args: t.Dict[str, t.Any] = {
|
frame_args: dict[str, t.Any] = {
|
||||||
"filename": fs.filename,
|
"filename": fs.filename,
|
||||||
"lineno": fs.lineno,
|
"lineno": fs.lineno,
|
||||||
"name": fs.name,
|
"name": fs.name,
|
||||||
|
@ -184,7 +186,7 @@ def _process_traceback(
|
||||||
}
|
}
|
||||||
|
|
||||||
if hasattr(fs, "colno"):
|
if hasattr(fs, "colno"):
|
||||||
frame_args["colno"] = fs.colno # type: ignore[attr-defined]
|
frame_args["colno"] = fs.colno
|
||||||
frame_args["end_colno"] = fs.end_colno # type: ignore[attr-defined]
|
frame_args["end_colno"] = fs.end_colno # type: ignore[attr-defined]
|
||||||
|
|
||||||
new_stack.append(DebugFrameSummary(**frame_args))
|
new_stack.append(DebugFrameSummary(**frame_args))
|
||||||
|
@ -221,7 +223,7 @@ class DebugTraceback:
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
exc: BaseException,
|
exc: BaseException,
|
||||||
te: t.Optional[traceback.TracebackException] = None,
|
te: traceback.TracebackException | None = None,
|
||||||
*,
|
*,
|
||||||
skip: int = 0,
|
skip: int = 0,
|
||||||
hide: bool = True,
|
hide: bool = True,
|
||||||
|
@ -234,7 +236,7 @@ class DebugTraceback:
|
||||||
@cached_property
|
@cached_property
|
||||||
def all_tracebacks(
|
def all_tracebacks(
|
||||||
self,
|
self,
|
||||||
) -> t.List[t.Tuple[t.Optional[str], traceback.TracebackException]]:
|
) -> list[tuple[str | None, traceback.TracebackException]]:
|
||||||
out = []
|
out = []
|
||||||
current = self._te
|
current = self._te
|
||||||
|
|
||||||
|
@ -261,7 +263,7 @@ class DebugTraceback:
|
||||||
return out
|
return out
|
||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
def all_frames(self) -> t.List["DebugFrameSummary"]:
|
def all_frames(self) -> list[DebugFrameSummary]:
|
||||||
return [
|
return [
|
||||||
f for _, te in self.all_tracebacks for f in te.stack # type: ignore[misc]
|
f for _, te in self.all_tracebacks for f in te.stack # type: ignore[misc]
|
||||||
]
|
]
|
||||||
|
@ -325,7 +327,7 @@ class DebugTraceback:
|
||||||
"evalex": "true" if evalex else "false",
|
"evalex": "true" if evalex else "false",
|
||||||
"evalex_trusted": "true" if evalex_trusted else "false",
|
"evalex_trusted": "true" if evalex_trusted else "false",
|
||||||
"console": "false",
|
"console": "false",
|
||||||
"title": exc_lines[0],
|
"title": escape(exc_lines[0]),
|
||||||
"exception": escape("".join(exc_lines)),
|
"exception": escape("".join(exc_lines)),
|
||||||
"exception_type": escape(self._te.exc_type.__name__),
|
"exception_type": escape(self._te.exc_type.__name__),
|
||||||
"summary": self.render_traceback_html(include_title=False),
|
"summary": self.render_traceback_html(include_title=False),
|
||||||
|
@ -351,8 +353,8 @@ class DebugFrameSummary(traceback.FrameSummary):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
*,
|
*,
|
||||||
locals: t.Dict[str, t.Any],
|
locals: dict[str, t.Any],
|
||||||
globals: t.Dict[str, t.Any],
|
globals: dict[str, t.Any],
|
||||||
**kwargs: t.Any,
|
**kwargs: t.Any,
|
||||||
) -> None:
|
) -> None:
|
||||||
super().__init__(locals=None, **kwargs)
|
super().__init__(locals=None, **kwargs)
|
||||||
|
@ -360,7 +362,7 @@ class DebugFrameSummary(traceback.FrameSummary):
|
||||||
self.global_ns = globals
|
self.global_ns = globals
|
||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
def info(self) -> t.Optional[str]:
|
def info(self) -> str | None:
|
||||||
return self.local_ns.get("__traceback_info__")
|
return self.local_ns.get("__traceback_info__")
|
||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
|
|
|
@ -43,6 +43,8 @@ code, you can add a second except for a specific subclass of an error:
|
||||||
return e
|
return e
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import typing as t
|
import typing as t
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
|
@ -52,13 +54,12 @@ from markupsafe import Markup
|
||||||
from ._internal import _get_environ
|
from ._internal import _get_environ
|
||||||
|
|
||||||
if t.TYPE_CHECKING:
|
if t.TYPE_CHECKING:
|
||||||
import typing_extensions as te
|
|
||||||
from _typeshed.wsgi import StartResponse
|
from _typeshed.wsgi import StartResponse
|
||||||
from _typeshed.wsgi import WSGIEnvironment
|
from _typeshed.wsgi import WSGIEnvironment
|
||||||
from .datastructures import WWWAuthenticate
|
from .datastructures import WWWAuthenticate
|
||||||
from .sansio.response import Response
|
from .sansio.response import Response
|
||||||
from .wrappers.request import Request as WSGIRequest # noqa: F401
|
from .wrappers.request import Request as WSGIRequest
|
||||||
from .wrappers.response import Response as WSGIResponse # noqa: F401
|
from .wrappers.response import Response as WSGIResponse
|
||||||
|
|
||||||
|
|
||||||
class HTTPException(Exception):
|
class HTTPException(Exception):
|
||||||
|
@ -70,13 +71,13 @@ class HTTPException(Exception):
|
||||||
Removed the ``wrap`` class method.
|
Removed the ``wrap`` class method.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
code: t.Optional[int] = None
|
code: int | None = None
|
||||||
description: t.Optional[str] = None
|
description: str | None = None
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
description: t.Optional[str] = None,
|
description: str | None = None,
|
||||||
response: t.Optional["Response"] = None,
|
response: Response | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
super().__init__()
|
super().__init__()
|
||||||
if description is not None:
|
if description is not None:
|
||||||
|
@ -92,14 +93,12 @@ class HTTPException(Exception):
|
||||||
|
|
||||||
def get_description(
|
def get_description(
|
||||||
self,
|
self,
|
||||||
environ: t.Optional["WSGIEnvironment"] = None,
|
environ: WSGIEnvironment | None = None,
|
||||||
scope: t.Optional[dict] = None,
|
scope: dict | None = None,
|
||||||
) -> str:
|
) -> str:
|
||||||
"""Get the description."""
|
"""Get the description."""
|
||||||
if self.description is None:
|
if self.description is None:
|
||||||
description = ""
|
description = ""
|
||||||
elif not isinstance(self.description, str):
|
|
||||||
description = str(self.description)
|
|
||||||
else:
|
else:
|
||||||
description = self.description
|
description = self.description
|
||||||
|
|
||||||
|
@ -108,8 +107,8 @@ class HTTPException(Exception):
|
||||||
|
|
||||||
def get_body(
|
def get_body(
|
||||||
self,
|
self,
|
||||||
environ: t.Optional["WSGIEnvironment"] = None,
|
environ: WSGIEnvironment | None = None,
|
||||||
scope: t.Optional[dict] = None,
|
scope: dict | None = None,
|
||||||
) -> str:
|
) -> str:
|
||||||
"""Get the HTML body."""
|
"""Get the HTML body."""
|
||||||
return (
|
return (
|
||||||
|
@ -122,17 +121,17 @@ class HTTPException(Exception):
|
||||||
|
|
||||||
def get_headers(
|
def get_headers(
|
||||||
self,
|
self,
|
||||||
environ: t.Optional["WSGIEnvironment"] = None,
|
environ: WSGIEnvironment | None = None,
|
||||||
scope: t.Optional[dict] = None,
|
scope: dict | None = None,
|
||||||
) -> t.List[t.Tuple[str, str]]:
|
) -> list[tuple[str, str]]:
|
||||||
"""Get a list of headers."""
|
"""Get a list of headers."""
|
||||||
return [("Content-Type", "text/html; charset=utf-8")]
|
return [("Content-Type", "text/html; charset=utf-8")]
|
||||||
|
|
||||||
def get_response(
|
def get_response(
|
||||||
self,
|
self,
|
||||||
environ: t.Optional[t.Union["WSGIEnvironment", "WSGIRequest"]] = None,
|
environ: WSGIEnvironment | WSGIRequest | None = None,
|
||||||
scope: t.Optional[dict] = None,
|
scope: dict | None = None,
|
||||||
) -> "Response":
|
) -> Response:
|
||||||
"""Get a response object. If one was passed to the exception
|
"""Get a response object. If one was passed to the exception
|
||||||
it's returned directly.
|
it's returned directly.
|
||||||
|
|
||||||
|
@ -151,7 +150,7 @@ class HTTPException(Exception):
|
||||||
return WSGIResponse(self.get_body(environ, scope), self.code, headers)
|
return WSGIResponse(self.get_body(environ, scope), self.code, headers)
|
||||||
|
|
||||||
def __call__(
|
def __call__(
|
||||||
self, environ: "WSGIEnvironment", start_response: "StartResponse"
|
self, environ: WSGIEnvironment, start_response: StartResponse
|
||||||
) -> t.Iterable[bytes]:
|
) -> t.Iterable[bytes]:
|
||||||
"""Call the exception as WSGI application.
|
"""Call the exception as WSGI application.
|
||||||
|
|
||||||
|
@ -196,7 +195,7 @@ class BadRequestKeyError(BadRequest, KeyError):
|
||||||
#: useful in a debug mode.
|
#: useful in a debug mode.
|
||||||
show_exception = False
|
show_exception = False
|
||||||
|
|
||||||
def __init__(self, arg: t.Optional[str] = None, *args: t.Any, **kwargs: t.Any):
|
def __init__(self, arg: str | None = None, *args: t.Any, **kwargs: t.Any):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
if arg is None:
|
if arg is None:
|
||||||
|
@ -205,7 +204,7 @@ class BadRequestKeyError(BadRequest, KeyError):
|
||||||
KeyError.__init__(self, arg)
|
KeyError.__init__(self, arg)
|
||||||
|
|
||||||
@property # type: ignore
|
@property # type: ignore
|
||||||
def description(self) -> str: # type: ignore
|
def description(self) -> str:
|
||||||
if self.show_exception:
|
if self.show_exception:
|
||||||
return (
|
return (
|
||||||
f"{self._description}\n"
|
f"{self._description}\n"
|
||||||
|
@ -297,11 +296,9 @@ class Unauthorized(HTTPException):
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
description: t.Optional[str] = None,
|
description: str | None = None,
|
||||||
response: t.Optional["Response"] = None,
|
response: Response | None = None,
|
||||||
www_authenticate: t.Optional[
|
www_authenticate: None | (WWWAuthenticate | t.Iterable[WWWAuthenticate]) = None,
|
||||||
t.Union["WWWAuthenticate", t.Iterable["WWWAuthenticate"]]
|
|
||||||
] = None,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
super().__init__(description, response)
|
super().__init__(description, response)
|
||||||
|
|
||||||
|
@ -314,9 +311,9 @@ class Unauthorized(HTTPException):
|
||||||
|
|
||||||
def get_headers(
|
def get_headers(
|
||||||
self,
|
self,
|
||||||
environ: t.Optional["WSGIEnvironment"] = None,
|
environ: WSGIEnvironment | None = None,
|
||||||
scope: t.Optional[dict] = None,
|
scope: dict | None = None,
|
||||||
) -> t.List[t.Tuple[str, str]]:
|
) -> list[tuple[str, str]]:
|
||||||
headers = super().get_headers(environ, scope)
|
headers = super().get_headers(environ, scope)
|
||||||
if self.www_authenticate:
|
if self.www_authenticate:
|
||||||
headers.extend(("WWW-Authenticate", str(x)) for x in self.www_authenticate)
|
headers.extend(("WWW-Authenticate", str(x)) for x in self.www_authenticate)
|
||||||
|
@ -367,9 +364,9 @@ class MethodNotAllowed(HTTPException):
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
valid_methods: t.Optional[t.Iterable[str]] = None,
|
valid_methods: t.Iterable[str] | None = None,
|
||||||
description: t.Optional[str] = None,
|
description: str | None = None,
|
||||||
response: t.Optional["Response"] = None,
|
response: Response | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Takes an optional list of valid http methods
|
"""Takes an optional list of valid http methods
|
||||||
starting with werkzeug 0.3 the list will be mandatory."""
|
starting with werkzeug 0.3 the list will be mandatory."""
|
||||||
|
@ -378,9 +375,9 @@ class MethodNotAllowed(HTTPException):
|
||||||
|
|
||||||
def get_headers(
|
def get_headers(
|
||||||
self,
|
self,
|
||||||
environ: t.Optional["WSGIEnvironment"] = None,
|
environ: WSGIEnvironment | None = None,
|
||||||
scope: t.Optional[dict] = None,
|
scope: dict | None = None,
|
||||||
) -> t.List[t.Tuple[str, str]]:
|
) -> list[tuple[str, str]]:
|
||||||
headers = super().get_headers(environ, scope)
|
headers = super().get_headers(environ, scope)
|
||||||
if self.valid_methods:
|
if self.valid_methods:
|
||||||
headers.append(("Allow", ", ".join(self.valid_methods)))
|
headers.append(("Allow", ", ".join(self.valid_methods)))
|
||||||
|
@ -524,10 +521,10 @@ class RequestedRangeNotSatisfiable(HTTPException):
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
length: t.Optional[int] = None,
|
length: int | None = None,
|
||||||
units: str = "bytes",
|
units: str = "bytes",
|
||||||
description: t.Optional[str] = None,
|
description: str | None = None,
|
||||||
response: t.Optional["Response"] = None,
|
response: Response | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Takes an optional `Content-Range` header value based on ``length``
|
"""Takes an optional `Content-Range` header value based on ``length``
|
||||||
parameter.
|
parameter.
|
||||||
|
@ -538,9 +535,9 @@ class RequestedRangeNotSatisfiable(HTTPException):
|
||||||
|
|
||||||
def get_headers(
|
def get_headers(
|
||||||
self,
|
self,
|
||||||
environ: t.Optional["WSGIEnvironment"] = None,
|
environ: WSGIEnvironment | None = None,
|
||||||
scope: t.Optional[dict] = None,
|
scope: dict | None = None,
|
||||||
) -> t.List[t.Tuple[str, str]]:
|
) -> list[tuple[str, str]]:
|
||||||
headers = super().get_headers(environ, scope)
|
headers = super().get_headers(environ, scope)
|
||||||
if self.length is not None:
|
if self.length is not None:
|
||||||
headers.append(("Content-Range", f"{self.units} */{self.length}"))
|
headers.append(("Content-Range", f"{self.units} */{self.length}"))
|
||||||
|
@ -638,18 +635,18 @@ class _RetryAfter(HTTPException):
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
description: t.Optional[str] = None,
|
description: str | None = None,
|
||||||
response: t.Optional["Response"] = None,
|
response: Response | None = None,
|
||||||
retry_after: t.Optional[t.Union[datetime, int]] = None,
|
retry_after: datetime | int | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
super().__init__(description, response)
|
super().__init__(description, response)
|
||||||
self.retry_after = retry_after
|
self.retry_after = retry_after
|
||||||
|
|
||||||
def get_headers(
|
def get_headers(
|
||||||
self,
|
self,
|
||||||
environ: t.Optional["WSGIEnvironment"] = None,
|
environ: WSGIEnvironment | None = None,
|
||||||
scope: t.Optional[dict] = None,
|
scope: dict | None = None,
|
||||||
) -> t.List[t.Tuple[str, str]]:
|
) -> list[tuple[str, str]]:
|
||||||
headers = super().get_headers(environ, scope)
|
headers = super().get_headers(environ, scope)
|
||||||
|
|
||||||
if self.retry_after:
|
if self.retry_after:
|
||||||
|
@ -728,9 +725,9 @@ class InternalServerError(HTTPException):
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
description: t.Optional[str] = None,
|
description: str | None = None,
|
||||||
response: t.Optional["Response"] = None,
|
response: Response | None = None,
|
||||||
original_exception: t.Optional[BaseException] = None,
|
original_exception: BaseException | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
#: The original exception that caused this 500 error. Can be
|
#: The original exception that caused this 500 error. Can be
|
||||||
#: used by frameworks to provide context when handling
|
#: used by frameworks to provide context when handling
|
||||||
|
@ -809,7 +806,7 @@ class HTTPVersionNotSupported(HTTPException):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
default_exceptions: t.Dict[int, t.Type[HTTPException]] = {}
|
default_exceptions: dict[int, type[HTTPException]] = {}
|
||||||
|
|
||||||
|
|
||||||
def _find_exceptions() -> None:
|
def _find_exceptions() -> None:
|
||||||
|
@ -841,8 +838,8 @@ class Aborter:
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
mapping: t.Optional[t.Dict[int, t.Type[HTTPException]]] = None,
|
mapping: dict[int, type[HTTPException]] | None = None,
|
||||||
extra: t.Optional[t.Dict[int, t.Type[HTTPException]]] = None,
|
extra: dict[int, type[HTTPException]] | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
if mapping is None:
|
if mapping is None:
|
||||||
mapping = default_exceptions
|
mapping = default_exceptions
|
||||||
|
@ -851,8 +848,8 @@ class Aborter:
|
||||||
self.mapping.update(extra)
|
self.mapping.update(extra)
|
||||||
|
|
||||||
def __call__(
|
def __call__(
|
||||||
self, code: t.Union[int, "Response"], *args: t.Any, **kwargs: t.Any
|
self, code: int | Response, *args: t.Any, **kwargs: t.Any
|
||||||
) -> "te.NoReturn":
|
) -> t.NoReturn:
|
||||||
from .sansio.response import Response
|
from .sansio.response import Response
|
||||||
|
|
||||||
if isinstance(code, Response):
|
if isinstance(code, Response):
|
||||||
|
@ -864,9 +861,7 @@ class Aborter:
|
||||||
raise self.mapping[code](*args, **kwargs)
|
raise self.mapping[code](*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def abort(
|
def abort(status: int | Response, *args: t.Any, **kwargs: t.Any) -> t.NoReturn:
|
||||||
status: t.Union[int, "Response"], *args: t.Any, **kwargs: t.Any
|
|
||||||
) -> "te.NoReturn":
|
|
||||||
"""Raises an :py:exc:`HTTPException` for the given status code or WSGI
|
"""Raises an :py:exc:`HTTPException` for the given status code or WSGI
|
||||||
application.
|
application.
|
||||||
|
|
||||||
|
|
|
@ -1,13 +1,14 @@
|
||||||
import typing as t
|
from __future__ import annotations
|
||||||
from functools import update_wrapper
|
|
||||||
from io import BytesIO
|
|
||||||
from itertools import chain
|
|
||||||
from typing import Union
|
|
||||||
|
|
||||||
from . import exceptions
|
import typing as t
|
||||||
|
from io import BytesIO
|
||||||
|
from urllib.parse import parse_qsl
|
||||||
|
|
||||||
|
from ._internal import _plain_int
|
||||||
from .datastructures import FileStorage
|
from .datastructures import FileStorage
|
||||||
from .datastructures import Headers
|
from .datastructures import Headers
|
||||||
from .datastructures import MultiDict
|
from .datastructures import MultiDict
|
||||||
|
from .exceptions import RequestEntityTooLarge
|
||||||
from .http import parse_options_header
|
from .http import parse_options_header
|
||||||
from .sansio.multipart import Data
|
from .sansio.multipart import Data
|
||||||
from .sansio.multipart import Epilogue
|
from .sansio.multipart import Epilogue
|
||||||
|
@ -15,8 +16,6 @@ from .sansio.multipart import Field
|
||||||
from .sansio.multipart import File
|
from .sansio.multipart import File
|
||||||
from .sansio.multipart import MultipartDecoder
|
from .sansio.multipart import MultipartDecoder
|
||||||
from .sansio.multipart import NeedData
|
from .sansio.multipart import NeedData
|
||||||
from .urls import url_decode_stream
|
|
||||||
from .wsgi import _make_chunk_iter
|
|
||||||
from .wsgi import get_content_length
|
from .wsgi import get_content_length
|
||||||
from .wsgi import get_input_stream
|
from .wsgi import get_input_stream
|
||||||
|
|
||||||
|
@ -38,10 +37,10 @@ if t.TYPE_CHECKING:
|
||||||
class TStreamFactory(te.Protocol):
|
class TStreamFactory(te.Protocol):
|
||||||
def __call__(
|
def __call__(
|
||||||
self,
|
self,
|
||||||
total_content_length: t.Optional[int],
|
total_content_length: int | None,
|
||||||
content_type: t.Optional[str],
|
content_type: str | None,
|
||||||
filename: t.Optional[str],
|
filename: str | None,
|
||||||
content_length: t.Optional[int] = None,
|
content_length: int | None = None,
|
||||||
) -> t.IO[bytes]:
|
) -> t.IO[bytes]:
|
||||||
...
|
...
|
||||||
|
|
||||||
|
@ -49,17 +48,11 @@ if t.TYPE_CHECKING:
|
||||||
F = t.TypeVar("F", bound=t.Callable[..., t.Any])
|
F = t.TypeVar("F", bound=t.Callable[..., t.Any])
|
||||||
|
|
||||||
|
|
||||||
def _exhaust(stream: t.IO[bytes]) -> None:
|
|
||||||
bts = stream.read(64 * 1024)
|
|
||||||
while bts:
|
|
||||||
bts = stream.read(64 * 1024)
|
|
||||||
|
|
||||||
|
|
||||||
def default_stream_factory(
|
def default_stream_factory(
|
||||||
total_content_length: t.Optional[int],
|
total_content_length: int | None,
|
||||||
content_type: t.Optional[str],
|
content_type: str | None,
|
||||||
filename: t.Optional[str],
|
filename: str | None,
|
||||||
content_length: t.Optional[int] = None,
|
content_length: int | None = None,
|
||||||
) -> t.IO[bytes]:
|
) -> t.IO[bytes]:
|
||||||
max_size = 1024 * 500
|
max_size = 1024 * 500
|
||||||
|
|
||||||
|
@ -72,15 +65,15 @@ def default_stream_factory(
|
||||||
|
|
||||||
|
|
||||||
def parse_form_data(
|
def parse_form_data(
|
||||||
environ: "WSGIEnvironment",
|
environ: WSGIEnvironment,
|
||||||
stream_factory: t.Optional["TStreamFactory"] = None,
|
stream_factory: TStreamFactory | None = None,
|
||||||
charset: str = "utf-8",
|
max_form_memory_size: int | None = None,
|
||||||
errors: str = "replace",
|
max_content_length: int | None = None,
|
||||||
max_form_memory_size: t.Optional[int] = None,
|
cls: type[MultiDict] | None = None,
|
||||||
max_content_length: t.Optional[int] = None,
|
|
||||||
cls: t.Optional[t.Type[MultiDict]] = None,
|
|
||||||
silent: bool = True,
|
silent: bool = True,
|
||||||
) -> "t_parse_result":
|
*,
|
||||||
|
max_form_parts: int | None = None,
|
||||||
|
) -> t_parse_result:
|
||||||
"""Parse the form data in the environ and return it as tuple in the form
|
"""Parse the form data in the environ and return it as tuple in the form
|
||||||
``(stream, form, files)``. You should only call this method if the
|
``(stream, form, files)``. You should only call this method if the
|
||||||
transport method is `POST`, `PUT`, or `PATCH`.
|
transport method is `POST`, `PUT`, or `PATCH`.
|
||||||
|
@ -92,21 +85,10 @@ def parse_form_data(
|
||||||
|
|
||||||
This is a shortcut for the common usage of :class:`FormDataParser`.
|
This is a shortcut for the common usage of :class:`FormDataParser`.
|
||||||
|
|
||||||
Have a look at :doc:`/request_data` for more details.
|
|
||||||
|
|
||||||
.. versionadded:: 0.5
|
|
||||||
The `max_form_memory_size`, `max_content_length` and
|
|
||||||
`cls` parameters were added.
|
|
||||||
|
|
||||||
.. versionadded:: 0.5.1
|
|
||||||
The optional `silent` flag was added.
|
|
||||||
|
|
||||||
:param environ: the WSGI environment to be used for parsing.
|
:param environ: the WSGI environment to be used for parsing.
|
||||||
:param stream_factory: An optional callable that returns a new read and
|
:param stream_factory: An optional callable that returns a new read and
|
||||||
writeable file descriptor. This callable works
|
writeable file descriptor. This callable works
|
||||||
the same as :meth:`Response._get_file_stream`.
|
the same as :meth:`Response._get_file_stream`.
|
||||||
:param charset: The character set for URL and url encoded form data.
|
|
||||||
:param errors: The encoding error behavior.
|
|
||||||
:param max_form_memory_size: the maximum number of bytes to be accepted for
|
:param max_form_memory_size: the maximum number of bytes to be accepted for
|
||||||
in-memory stored form data. If the data
|
in-memory stored form data. If the data
|
||||||
exceeds the value specified an
|
exceeds the value specified an
|
||||||
|
@ -119,40 +101,33 @@ def parse_form_data(
|
||||||
:param cls: an optional dict class to use. If this is not specified
|
:param cls: an optional dict class to use. If this is not specified
|
||||||
or `None` the default :class:`MultiDict` is used.
|
or `None` the default :class:`MultiDict` is used.
|
||||||
:param silent: If set to False parsing errors will not be caught.
|
:param silent: If set to False parsing errors will not be caught.
|
||||||
|
:param max_form_parts: The maximum number of multipart parts to be parsed. If this
|
||||||
|
is exceeded, a :exc:`~exceptions.RequestEntityTooLarge` exception is raised.
|
||||||
:return: A tuple in the form ``(stream, form, files)``.
|
:return: A tuple in the form ``(stream, form, files)``.
|
||||||
|
|
||||||
|
.. versionchanged:: 3.0
|
||||||
|
The ``charset`` and ``errors`` parameters were removed.
|
||||||
|
|
||||||
|
.. versionchanged:: 2.3
|
||||||
|
Added the ``max_form_parts`` parameter.
|
||||||
|
|
||||||
|
.. versionadded:: 0.5.1
|
||||||
|
Added the ``silent`` parameter.
|
||||||
|
|
||||||
|
.. versionadded:: 0.5
|
||||||
|
Added the ``max_form_memory_size``, ``max_content_length``, and ``cls``
|
||||||
|
parameters.
|
||||||
"""
|
"""
|
||||||
return FormDataParser(
|
return FormDataParser(
|
||||||
stream_factory,
|
stream_factory=stream_factory,
|
||||||
charset,
|
max_form_memory_size=max_form_memory_size,
|
||||||
errors,
|
max_content_length=max_content_length,
|
||||||
max_form_memory_size,
|
max_form_parts=max_form_parts,
|
||||||
max_content_length,
|
silent=silent,
|
||||||
cls,
|
cls=cls,
|
||||||
silent,
|
|
||||||
).parse_from_environ(environ)
|
).parse_from_environ(environ)
|
||||||
|
|
||||||
|
|
||||||
def exhaust_stream(f: F) -> F:
|
|
||||||
"""Helper decorator for methods that exhausts the stream on return."""
|
|
||||||
|
|
||||||
def wrapper(self, stream, *args, **kwargs): # type: ignore
|
|
||||||
try:
|
|
||||||
return f(self, stream, *args, **kwargs)
|
|
||||||
finally:
|
|
||||||
exhaust = getattr(stream, "exhaust", None)
|
|
||||||
|
|
||||||
if exhaust is not None:
|
|
||||||
exhaust()
|
|
||||||
else:
|
|
||||||
while True:
|
|
||||||
chunk = stream.read(1024 * 64)
|
|
||||||
|
|
||||||
if not chunk:
|
|
||||||
break
|
|
||||||
|
|
||||||
return update_wrapper(t.cast(F, wrapper), f)
|
|
||||||
|
|
||||||
|
|
||||||
class FormDataParser:
|
class FormDataParser:
|
||||||
"""This class implements parsing of form data for Werkzeug. By itself
|
"""This class implements parsing of form data for Werkzeug. By itself
|
||||||
it can parse multipart and url encoded form data. It can be subclassed
|
it can parse multipart and url encoded form data. It can be subclassed
|
||||||
|
@ -160,13 +135,9 @@ class FormDataParser:
|
||||||
untouched stream and expose it as separate attributes on a request
|
untouched stream and expose it as separate attributes on a request
|
||||||
object.
|
object.
|
||||||
|
|
||||||
.. versionadded:: 0.8
|
|
||||||
|
|
||||||
:param stream_factory: An optional callable that returns a new read and
|
:param stream_factory: An optional callable that returns a new read and
|
||||||
writeable file descriptor. This callable works
|
writeable file descriptor. This callable works
|
||||||
the same as :meth:`Response._get_file_stream`.
|
the same as :meth:`Response._get_file_stream`.
|
||||||
:param charset: The character set for URL and url encoded form data.
|
|
||||||
:param errors: The encoding error behavior.
|
|
||||||
:param max_form_memory_size: the maximum number of bytes to be accepted for
|
:param max_form_memory_size: the maximum number of bytes to be accepted for
|
||||||
in-memory stored form data. If the data
|
in-memory stored form data. If the data
|
||||||
exceeds the value specified an
|
exceeds the value specified an
|
||||||
|
@ -179,26 +150,38 @@ class FormDataParser:
|
||||||
:param cls: an optional dict class to use. If this is not specified
|
:param cls: an optional dict class to use. If this is not specified
|
||||||
or `None` the default :class:`MultiDict` is used.
|
or `None` the default :class:`MultiDict` is used.
|
||||||
:param silent: If set to False parsing errors will not be caught.
|
:param silent: If set to False parsing errors will not be caught.
|
||||||
|
:param max_form_parts: The maximum number of multipart parts to be parsed. If this
|
||||||
|
is exceeded, a :exc:`~exceptions.RequestEntityTooLarge` exception is raised.
|
||||||
|
|
||||||
|
.. versionchanged:: 3.0
|
||||||
|
The ``charset`` and ``errors`` parameters were removed.
|
||||||
|
|
||||||
|
.. versionchanged:: 3.0
|
||||||
|
The ``parse_functions`` attribute and ``get_parse_func`` methods were removed.
|
||||||
|
|
||||||
|
.. versionchanged:: 2.2.3
|
||||||
|
Added the ``max_form_parts`` parameter.
|
||||||
|
|
||||||
|
.. versionadded:: 0.8
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
stream_factory: t.Optional["TStreamFactory"] = None,
|
stream_factory: TStreamFactory | None = None,
|
||||||
charset: str = "utf-8",
|
max_form_memory_size: int | None = None,
|
||||||
errors: str = "replace",
|
max_content_length: int | None = None,
|
||||||
max_form_memory_size: t.Optional[int] = None,
|
cls: type[MultiDict] | None = None,
|
||||||
max_content_length: t.Optional[int] = None,
|
|
||||||
cls: t.Optional[t.Type[MultiDict]] = None,
|
|
||||||
silent: bool = True,
|
silent: bool = True,
|
||||||
|
*,
|
||||||
|
max_form_parts: int | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
if stream_factory is None:
|
if stream_factory is None:
|
||||||
stream_factory = default_stream_factory
|
stream_factory = default_stream_factory
|
||||||
|
|
||||||
self.stream_factory = stream_factory
|
self.stream_factory = stream_factory
|
||||||
self.charset = charset
|
|
||||||
self.errors = errors
|
|
||||||
self.max_form_memory_size = max_form_memory_size
|
self.max_form_memory_size = max_form_memory_size
|
||||||
self.max_content_length = max_content_length
|
self.max_content_length = max_content_length
|
||||||
|
self.max_form_parts = max_form_parts
|
||||||
|
|
||||||
if cls is None:
|
if cls is None:
|
||||||
cls = MultiDict
|
cls = MultiDict
|
||||||
|
@ -206,34 +189,29 @@ class FormDataParser:
|
||||||
self.cls = cls
|
self.cls = cls
|
||||||
self.silent = silent
|
self.silent = silent
|
||||||
|
|
||||||
def get_parse_func(
|
def parse_from_environ(self, environ: WSGIEnvironment) -> t_parse_result:
|
||||||
self, mimetype: str, options: t.Dict[str, str]
|
|
||||||
) -> t.Optional[
|
|
||||||
t.Callable[
|
|
||||||
["FormDataParser", t.IO[bytes], str, t.Optional[int], t.Dict[str, str]],
|
|
||||||
"t_parse_result",
|
|
||||||
]
|
|
||||||
]:
|
|
||||||
return self.parse_functions.get(mimetype)
|
|
||||||
|
|
||||||
def parse_from_environ(self, environ: "WSGIEnvironment") -> "t_parse_result":
|
|
||||||
"""Parses the information from the environment as form data.
|
"""Parses the information from the environment as form data.
|
||||||
|
|
||||||
:param environ: the WSGI environment to be used for parsing.
|
:param environ: the WSGI environment to be used for parsing.
|
||||||
:return: A tuple in the form ``(stream, form, files)``.
|
:return: A tuple in the form ``(stream, form, files)``.
|
||||||
"""
|
"""
|
||||||
content_type = environ.get("CONTENT_TYPE", "")
|
stream = get_input_stream(environ, max_content_length=self.max_content_length)
|
||||||
content_length = get_content_length(environ)
|
content_length = get_content_length(environ)
|
||||||
mimetype, options = parse_options_header(content_type)
|
mimetype, options = parse_options_header(environ.get("CONTENT_TYPE"))
|
||||||
return self.parse(get_input_stream(environ), mimetype, content_length, options)
|
return self.parse(
|
||||||
|
stream,
|
||||||
|
content_length=content_length,
|
||||||
|
mimetype=mimetype,
|
||||||
|
options=options,
|
||||||
|
)
|
||||||
|
|
||||||
def parse(
|
def parse(
|
||||||
self,
|
self,
|
||||||
stream: t.IO[bytes],
|
stream: t.IO[bytes],
|
||||||
mimetype: str,
|
mimetype: str,
|
||||||
content_length: t.Optional[int],
|
content_length: int | None,
|
||||||
options: t.Optional[t.Dict[str, str]] = None,
|
options: dict[str, str] | None = None,
|
||||||
) -> "t_parse_result":
|
) -> t_parse_result:
|
||||||
"""Parses the information from the given stream, mimetype,
|
"""Parses the information from the given stream, mimetype,
|
||||||
content length and mimetype parameters.
|
content length and mimetype parameters.
|
||||||
|
|
||||||
|
@ -243,43 +221,40 @@ class FormDataParser:
|
||||||
:param options: optional mimetype parameters (used for
|
:param options: optional mimetype parameters (used for
|
||||||
the multipart boundary for instance)
|
the multipart boundary for instance)
|
||||||
:return: A tuple in the form ``(stream, form, files)``.
|
:return: A tuple in the form ``(stream, form, files)``.
|
||||||
|
|
||||||
|
.. versionchanged:: 3.0
|
||||||
|
The invalid ``application/x-url-encoded`` content type is not
|
||||||
|
treated as ``application/x-www-form-urlencoded``.
|
||||||
"""
|
"""
|
||||||
if (
|
if mimetype == "multipart/form-data":
|
||||||
self.max_content_length is not None
|
parse_func = self._parse_multipart
|
||||||
and content_length is not None
|
elif mimetype == "application/x-www-form-urlencoded":
|
||||||
and content_length > self.max_content_length
|
parse_func = self._parse_urlencoded
|
||||||
):
|
else:
|
||||||
# if the input stream is not exhausted, firefox reports Connection Reset
|
return stream, self.cls(), self.cls()
|
||||||
_exhaust(stream)
|
|
||||||
raise exceptions.RequestEntityTooLarge()
|
|
||||||
|
|
||||||
if options is None:
|
if options is None:
|
||||||
options = {}
|
options = {}
|
||||||
|
|
||||||
parse_func = self.get_parse_func(mimetype, options)
|
try:
|
||||||
|
return parse_func(stream, mimetype, content_length, options)
|
||||||
if parse_func is not None:
|
except ValueError:
|
||||||
try:
|
if not self.silent:
|
||||||
return parse_func(self, stream, mimetype, content_length, options)
|
raise
|
||||||
except ValueError:
|
|
||||||
if not self.silent:
|
|
||||||
raise
|
|
||||||
|
|
||||||
return stream, self.cls(), self.cls()
|
return stream, self.cls(), self.cls()
|
||||||
|
|
||||||
@exhaust_stream
|
|
||||||
def _parse_multipart(
|
def _parse_multipart(
|
||||||
self,
|
self,
|
||||||
stream: t.IO[bytes],
|
stream: t.IO[bytes],
|
||||||
mimetype: str,
|
mimetype: str,
|
||||||
content_length: t.Optional[int],
|
content_length: int | None,
|
||||||
options: t.Dict[str, str],
|
options: dict[str, str],
|
||||||
) -> "t_parse_result":
|
) -> t_parse_result:
|
||||||
parser = MultiPartParser(
|
parser = MultiPartParser(
|
||||||
self.stream_factory,
|
stream_factory=self.stream_factory,
|
||||||
self.charset,
|
|
||||||
self.errors,
|
|
||||||
max_form_memory_size=self.max_form_memory_size,
|
max_form_memory_size=self.max_form_memory_size,
|
||||||
|
max_form_parts=self.max_form_parts,
|
||||||
cls=self.cls,
|
cls=self.cls,
|
||||||
)
|
)
|
||||||
boundary = options.get("boundary", "").encode("ascii")
|
boundary = options.get("boundary", "").encode("ascii")
|
||||||
|
@ -290,66 +265,43 @@ class FormDataParser:
|
||||||
form, files = parser.parse(stream, boundary, content_length)
|
form, files = parser.parse(stream, boundary, content_length)
|
||||||
return stream, form, files
|
return stream, form, files
|
||||||
|
|
||||||
@exhaust_stream
|
|
||||||
def _parse_urlencoded(
|
def _parse_urlencoded(
|
||||||
self,
|
self,
|
||||||
stream: t.IO[bytes],
|
stream: t.IO[bytes],
|
||||||
mimetype: str,
|
mimetype: str,
|
||||||
content_length: t.Optional[int],
|
content_length: int | None,
|
||||||
options: t.Dict[str, str],
|
options: dict[str, str],
|
||||||
) -> "t_parse_result":
|
) -> t_parse_result:
|
||||||
if (
|
if (
|
||||||
self.max_form_memory_size is not None
|
self.max_form_memory_size is not None
|
||||||
and content_length is not None
|
and content_length is not None
|
||||||
and content_length > self.max_form_memory_size
|
and content_length > self.max_form_memory_size
|
||||||
):
|
):
|
||||||
# if the input stream is not exhausted, firefox reports Connection Reset
|
raise RequestEntityTooLarge()
|
||||||
_exhaust(stream)
|
|
||||||
raise exceptions.RequestEntityTooLarge()
|
|
||||||
|
|
||||||
form = url_decode_stream(stream, self.charset, errors=self.errors, cls=self.cls)
|
try:
|
||||||
return stream, form, self.cls()
|
items = parse_qsl(
|
||||||
|
stream.read().decode(),
|
||||||
|
keep_blank_values=True,
|
||||||
|
errors="werkzeug.url_quote",
|
||||||
|
)
|
||||||
|
except ValueError as e:
|
||||||
|
raise RequestEntityTooLarge() from e
|
||||||
|
|
||||||
#: mapping of mimetypes to parsing functions
|
return stream, self.cls(items), self.cls()
|
||||||
parse_functions: t.Dict[
|
|
||||||
str,
|
|
||||||
t.Callable[
|
|
||||||
["FormDataParser", t.IO[bytes], str, t.Optional[int], t.Dict[str, str]],
|
|
||||||
"t_parse_result",
|
|
||||||
],
|
|
||||||
] = {
|
|
||||||
"multipart/form-data": _parse_multipart,
|
|
||||||
"application/x-www-form-urlencoded": _parse_urlencoded,
|
|
||||||
"application/x-url-encoded": _parse_urlencoded,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def _line_parse(line: str) -> t.Tuple[str, bool]:
|
|
||||||
"""Removes line ending characters and returns a tuple (`stripped_line`,
|
|
||||||
`is_terminated`).
|
|
||||||
"""
|
|
||||||
if line[-2:] == "\r\n":
|
|
||||||
return line[:-2], True
|
|
||||||
|
|
||||||
elif line[-1:] in {"\r", "\n"}:
|
|
||||||
return line[:-1], True
|
|
||||||
|
|
||||||
return line, False
|
|
||||||
|
|
||||||
|
|
||||||
class MultiPartParser:
|
class MultiPartParser:
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
stream_factory: t.Optional["TStreamFactory"] = None,
|
stream_factory: TStreamFactory | None = None,
|
||||||
charset: str = "utf-8",
|
max_form_memory_size: int | None = None,
|
||||||
errors: str = "replace",
|
cls: type[MultiDict] | None = None,
|
||||||
max_form_memory_size: t.Optional[int] = None,
|
|
||||||
cls: t.Optional[t.Type[MultiDict]] = None,
|
|
||||||
buffer_size: int = 64 * 1024,
|
buffer_size: int = 64 * 1024,
|
||||||
|
max_form_parts: int | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
self.charset = charset
|
|
||||||
self.errors = errors
|
|
||||||
self.max_form_memory_size = max_form_memory_size
|
self.max_form_memory_size = max_form_memory_size
|
||||||
|
self.max_form_parts = max_form_parts
|
||||||
|
|
||||||
if stream_factory is None:
|
if stream_factory is None:
|
||||||
stream_factory = default_stream_factory
|
stream_factory = default_stream_factory
|
||||||
|
@ -360,10 +312,9 @@ class MultiPartParser:
|
||||||
cls = MultiDict
|
cls = MultiDict
|
||||||
|
|
||||||
self.cls = cls
|
self.cls = cls
|
||||||
|
|
||||||
self.buffer_size = buffer_size
|
self.buffer_size = buffer_size
|
||||||
|
|
||||||
def fail(self, message: str) -> "te.NoReturn":
|
def fail(self, message: str) -> te.NoReturn:
|
||||||
raise ValueError(message)
|
raise ValueError(message)
|
||||||
|
|
||||||
def get_part_charset(self, headers: Headers) -> str:
|
def get_part_charset(self, headers: Headers) -> str:
|
||||||
|
@ -371,18 +322,23 @@ class MultiPartParser:
|
||||||
content_type = headers.get("content-type")
|
content_type = headers.get("content-type")
|
||||||
|
|
||||||
if content_type:
|
if content_type:
|
||||||
mimetype, ct_params = parse_options_header(content_type)
|
parameters = parse_options_header(content_type)[1]
|
||||||
return ct_params.get("charset", self.charset)
|
ct_charset = parameters.get("charset", "").lower()
|
||||||
|
|
||||||
return self.charset
|
# A safe list of encodings. Modern clients should only send ASCII or UTF-8.
|
||||||
|
# This list will not be extended further.
|
||||||
|
if ct_charset in {"ascii", "us-ascii", "utf-8", "iso-8859-1"}:
|
||||||
|
return ct_charset
|
||||||
|
|
||||||
|
return "utf-8"
|
||||||
|
|
||||||
def start_file_streaming(
|
def start_file_streaming(
|
||||||
self, event: File, total_content_length: t.Optional[int]
|
self, event: File, total_content_length: int | None
|
||||||
) -> t.IO[bytes]:
|
) -> t.IO[bytes]:
|
||||||
content_type = event.headers.get("content-type")
|
content_type = event.headers.get("content-type")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
content_length = int(event.headers["content-length"])
|
content_length = _plain_int(event.headers["content-length"])
|
||||||
except (KeyError, ValueError):
|
except (KeyError, ValueError):
|
||||||
content_length = 0
|
content_length = 0
|
||||||
|
|
||||||
|
@ -395,27 +351,22 @@ class MultiPartParser:
|
||||||
return container
|
return container
|
||||||
|
|
||||||
def parse(
|
def parse(
|
||||||
self, stream: t.IO[bytes], boundary: bytes, content_length: t.Optional[int]
|
self, stream: t.IO[bytes], boundary: bytes, content_length: int | None
|
||||||
) -> t.Tuple[MultiDict, MultiDict]:
|
) -> tuple[MultiDict, MultiDict]:
|
||||||
container: t.Union[t.IO[bytes], t.List[bytes]]
|
current_part: Field | File
|
||||||
|
container: t.IO[bytes] | list[bytes]
|
||||||
_write: t.Callable[[bytes], t.Any]
|
_write: t.Callable[[bytes], t.Any]
|
||||||
|
|
||||||
iterator = chain(
|
parser = MultipartDecoder(
|
||||||
_make_chunk_iter(
|
boundary,
|
||||||
stream,
|
max_form_memory_size=self.max_form_memory_size,
|
||||||
limit=content_length,
|
max_parts=self.max_form_parts,
|
||||||
buffer_size=self.buffer_size,
|
|
||||||
),
|
|
||||||
[None],
|
|
||||||
)
|
)
|
||||||
|
|
||||||
parser = MultipartDecoder(boundary, self.max_form_memory_size)
|
|
||||||
|
|
||||||
fields = []
|
fields = []
|
||||||
files = []
|
files = []
|
||||||
|
|
||||||
current_part: Union[Field, File]
|
for data in _chunk_iter(stream.read, self.buffer_size):
|
||||||
for data in iterator:
|
|
||||||
parser.receive_data(data)
|
parser.receive_data(data)
|
||||||
event = parser.next_event()
|
event = parser.next_event()
|
||||||
while not isinstance(event, (Epilogue, NeedData)):
|
while not isinstance(event, (Epilogue, NeedData)):
|
||||||
|
@ -432,7 +383,7 @@ class MultiPartParser:
|
||||||
if not event.more_data:
|
if not event.more_data:
|
||||||
if isinstance(current_part, Field):
|
if isinstance(current_part, Field):
|
||||||
value = b"".join(container).decode(
|
value = b"".join(container).decode(
|
||||||
self.get_part_charset(current_part.headers), self.errors
|
self.get_part_charset(current_part.headers), "replace"
|
||||||
)
|
)
|
||||||
fields.append((current_part.name, value))
|
fields.append((current_part.name, value))
|
||||||
else:
|
else:
|
||||||
|
@ -453,3 +404,18 @@ class MultiPartParser:
|
||||||
event = parser.next_event()
|
event = parser.next_event()
|
||||||
|
|
||||||
return self.cls(fields), self.cls(files)
|
return self.cls(fields), self.cls(files)
|
||||||
|
|
||||||
|
|
||||||
|
def _chunk_iter(read: t.Callable[[int], bytes], size: int) -> t.Iterator[bytes | None]:
|
||||||
|
"""Read data in chunks for multipart/form-data parsing. Stop if no data is read.
|
||||||
|
Yield ``None`` at the end to signal end of parsing.
|
||||||
|
"""
|
||||||
|
while True:
|
||||||
|
data = read(size)
|
||||||
|
|
||||||
|
if not data:
|
||||||
|
break
|
||||||
|
|
||||||
|
yield data
|
||||||
|
|
||||||
|
yield None
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
import math
|
import math
|
||||||
import operator
|
import operator
|
||||||
|
@ -18,7 +20,7 @@ T = t.TypeVar("T")
|
||||||
F = t.TypeVar("F", bound=t.Callable[..., t.Any])
|
F = t.TypeVar("F", bound=t.Callable[..., t.Any])
|
||||||
|
|
||||||
|
|
||||||
def release_local(local: t.Union["Local", "LocalStack"]) -> None:
|
def release_local(local: Local | LocalStack) -> None:
|
||||||
"""Release the data for the current context in a :class:`Local` or
|
"""Release the data for the current context in a :class:`Local` or
|
||||||
:class:`LocalStack` without using a :class:`LocalManager`.
|
:class:`LocalStack` without using a :class:`LocalManager`.
|
||||||
|
|
||||||
|
@ -49,9 +51,7 @@ class Local:
|
||||||
|
|
||||||
__slots__ = ("__storage",)
|
__slots__ = ("__storage",)
|
||||||
|
|
||||||
def __init__(
|
def __init__(self, context_var: ContextVar[dict[str, t.Any]] | None = None) -> None:
|
||||||
self, context_var: t.Optional[ContextVar[t.Dict[str, t.Any]]] = None
|
|
||||||
) -> None:
|
|
||||||
if context_var is None:
|
if context_var is None:
|
||||||
# A ContextVar not created at global scope interferes with
|
# A ContextVar not created at global scope interferes with
|
||||||
# Python's garbage collection. However, a local only makes
|
# Python's garbage collection. However, a local only makes
|
||||||
|
@ -61,12 +61,10 @@ class Local:
|
||||||
|
|
||||||
object.__setattr__(self, "_Local__storage", context_var)
|
object.__setattr__(self, "_Local__storage", context_var)
|
||||||
|
|
||||||
def __iter__(self) -> t.Iterator[t.Tuple[str, t.Any]]:
|
def __iter__(self) -> t.Iterator[tuple[str, t.Any]]:
|
||||||
return iter(self.__storage.get({}).items())
|
return iter(self.__storage.get({}).items())
|
||||||
|
|
||||||
def __call__(
|
def __call__(self, name: str, *, unbound_message: str | None = None) -> LocalProxy:
|
||||||
self, name: str, *, unbound_message: t.Optional[str] = None
|
|
||||||
) -> "LocalProxy":
|
|
||||||
"""Create a :class:`LocalProxy` that access an attribute on this
|
"""Create a :class:`LocalProxy` that access an attribute on this
|
||||||
local namespace.
|
local namespace.
|
||||||
|
|
||||||
|
@ -124,7 +122,7 @@ class LocalStack(t.Generic[T]):
|
||||||
|
|
||||||
__slots__ = ("_storage",)
|
__slots__ = ("_storage",)
|
||||||
|
|
||||||
def __init__(self, context_var: t.Optional[ContextVar[t.List[T]]] = None) -> None:
|
def __init__(self, context_var: ContextVar[list[T]] | None = None) -> None:
|
||||||
if context_var is None:
|
if context_var is None:
|
||||||
# A ContextVar not created at global scope interferes with
|
# A ContextVar not created at global scope interferes with
|
||||||
# Python's garbage collection. However, a local only makes
|
# Python's garbage collection. However, a local only makes
|
||||||
|
@ -137,14 +135,14 @@ class LocalStack(t.Generic[T]):
|
||||||
def __release_local__(self) -> None:
|
def __release_local__(self) -> None:
|
||||||
self._storage.set([])
|
self._storage.set([])
|
||||||
|
|
||||||
def push(self, obj: T) -> t.List[T]:
|
def push(self, obj: T) -> list[T]:
|
||||||
"""Add a new item to the top of the stack."""
|
"""Add a new item to the top of the stack."""
|
||||||
stack = self._storage.get([]).copy()
|
stack = self._storage.get([]).copy()
|
||||||
stack.append(obj)
|
stack.append(obj)
|
||||||
self._storage.set(stack)
|
self._storage.set(stack)
|
||||||
return stack
|
return stack
|
||||||
|
|
||||||
def pop(self) -> t.Optional[T]:
|
def pop(self) -> T | None:
|
||||||
"""Remove the top item from the stack and return it. If the
|
"""Remove the top item from the stack and return it. If the
|
||||||
stack is empty, return ``None``.
|
stack is empty, return ``None``.
|
||||||
"""
|
"""
|
||||||
|
@ -158,7 +156,7 @@ class LocalStack(t.Generic[T]):
|
||||||
return rv
|
return rv
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def top(self) -> t.Optional[T]:
|
def top(self) -> T | None:
|
||||||
"""The topmost item on the stack. If the stack is empty,
|
"""The topmost item on the stack. If the stack is empty,
|
||||||
`None` is returned.
|
`None` is returned.
|
||||||
"""
|
"""
|
||||||
|
@ -170,8 +168,8 @@ class LocalStack(t.Generic[T]):
|
||||||
return stack[-1]
|
return stack[-1]
|
||||||
|
|
||||||
def __call__(
|
def __call__(
|
||||||
self, name: t.Optional[str] = None, *, unbound_message: t.Optional[str] = None
|
self, name: str | None = None, *, unbound_message: str | None = None
|
||||||
) -> "LocalProxy":
|
) -> LocalProxy:
|
||||||
"""Create a :class:`LocalProxy` that accesses the top of this
|
"""Create a :class:`LocalProxy` that accesses the top of this
|
||||||
local stack.
|
local stack.
|
||||||
|
|
||||||
|
@ -192,9 +190,8 @@ class LocalManager:
|
||||||
|
|
||||||
:param locals: A local or list of locals to manage.
|
:param locals: A local or list of locals to manage.
|
||||||
|
|
||||||
.. versionchanged:: 2.0
|
.. versionchanged:: 2.1
|
||||||
``ident_func`` is deprecated and will be removed in Werkzeug
|
The ``ident_func`` was removed.
|
||||||
2.1.
|
|
||||||
|
|
||||||
.. versionchanged:: 0.7
|
.. versionchanged:: 0.7
|
||||||
The ``ident_func`` parameter was added.
|
The ``ident_func`` parameter was added.
|
||||||
|
@ -208,9 +205,7 @@ class LocalManager:
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
locals: t.Optional[
|
locals: None | (Local | LocalStack | t.Iterable[Local | LocalStack]) = None,
|
||||||
t.Union[Local, LocalStack, t.Iterable[t.Union[Local, LocalStack]]]
|
|
||||||
] = None,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
if locals is None:
|
if locals is None:
|
||||||
self.locals = []
|
self.locals = []
|
||||||
|
@ -226,19 +221,19 @@ class LocalManager:
|
||||||
for local in self.locals:
|
for local in self.locals:
|
||||||
release_local(local)
|
release_local(local)
|
||||||
|
|
||||||
def make_middleware(self, app: "WSGIApplication") -> "WSGIApplication":
|
def make_middleware(self, app: WSGIApplication) -> WSGIApplication:
|
||||||
"""Wrap a WSGI application so that local data is released
|
"""Wrap a WSGI application so that local data is released
|
||||||
automatically after the response has been sent for a request.
|
automatically after the response has been sent for a request.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def application(
|
def application(
|
||||||
environ: "WSGIEnvironment", start_response: "StartResponse"
|
environ: WSGIEnvironment, start_response: StartResponse
|
||||||
) -> t.Iterable[bytes]:
|
) -> t.Iterable[bytes]:
|
||||||
return ClosingIterator(app(environ, start_response), self.cleanup)
|
return ClosingIterator(app(environ, start_response), self.cleanup)
|
||||||
|
|
||||||
return application
|
return application
|
||||||
|
|
||||||
def middleware(self, func: "WSGIApplication") -> "WSGIApplication":
|
def middleware(self, func: WSGIApplication) -> WSGIApplication:
|
||||||
"""Like :meth:`make_middleware` but used as a decorator on the
|
"""Like :meth:`make_middleware` but used as a decorator on the
|
||||||
WSGI application function.
|
WSGI application function.
|
||||||
|
|
||||||
|
@ -274,24 +269,24 @@ class _ProxyLookup:
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
f: t.Optional[t.Callable] = None,
|
f: t.Callable | None = None,
|
||||||
fallback: t.Optional[t.Callable] = None,
|
fallback: t.Callable | None = None,
|
||||||
class_value: t.Optional[t.Any] = None,
|
class_value: t.Any | None = None,
|
||||||
is_attr: bool = False,
|
is_attr: bool = False,
|
||||||
) -> None:
|
) -> None:
|
||||||
bind_f: t.Optional[t.Callable[["LocalProxy", t.Any], t.Callable]]
|
bind_f: t.Callable[[LocalProxy, t.Any], t.Callable] | None
|
||||||
|
|
||||||
if hasattr(f, "__get__"):
|
if hasattr(f, "__get__"):
|
||||||
# A Python function, can be turned into a bound method.
|
# A Python function, can be turned into a bound method.
|
||||||
|
|
||||||
def bind_f(instance: "LocalProxy", obj: t.Any) -> t.Callable:
|
def bind_f(instance: LocalProxy, obj: t.Any) -> t.Callable:
|
||||||
return f.__get__(obj, type(obj)) # type: ignore
|
return f.__get__(obj, type(obj)) # type: ignore
|
||||||
|
|
||||||
elif f is not None:
|
elif f is not None:
|
||||||
# A C function, use partial to bind the first argument.
|
# A C function, use partial to bind the first argument.
|
||||||
|
|
||||||
def bind_f(instance: "LocalProxy", obj: t.Any) -> t.Callable:
|
def bind_f(instance: LocalProxy, obj: t.Any) -> t.Callable:
|
||||||
return partial(f, obj) # type: ignore
|
return partial(f, obj)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# Use getattr, which will produce a bound method.
|
# Use getattr, which will produce a bound method.
|
||||||
|
@ -302,10 +297,10 @@ class _ProxyLookup:
|
||||||
self.class_value = class_value
|
self.class_value = class_value
|
||||||
self.is_attr = is_attr
|
self.is_attr = is_attr
|
||||||
|
|
||||||
def __set_name__(self, owner: "LocalProxy", name: str) -> None:
|
def __set_name__(self, owner: LocalProxy, name: str) -> None:
|
||||||
self.name = name
|
self.name = name
|
||||||
|
|
||||||
def __get__(self, instance: "LocalProxy", owner: t.Optional[type] = None) -> t.Any:
|
def __get__(self, instance: LocalProxy, owner: type | None = None) -> t.Any:
|
||||||
if instance is None:
|
if instance is None:
|
||||||
if self.class_value is not None:
|
if self.class_value is not None:
|
||||||
return self.class_value
|
return self.class_value
|
||||||
|
@ -313,7 +308,7 @@ class _ProxyLookup:
|
||||||
return self
|
return self
|
||||||
|
|
||||||
try:
|
try:
|
||||||
obj = instance._get_current_object() # type: ignore[misc]
|
obj = instance._get_current_object()
|
||||||
except RuntimeError:
|
except RuntimeError:
|
||||||
if self.fallback is None:
|
if self.fallback is None:
|
||||||
raise
|
raise
|
||||||
|
@ -335,7 +330,7 @@ class _ProxyLookup:
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
return f"proxy {self.name}"
|
return f"proxy {self.name}"
|
||||||
|
|
||||||
def __call__(self, instance: "LocalProxy", *args: t.Any, **kwargs: t.Any) -> t.Any:
|
def __call__(self, instance: LocalProxy, *args: t.Any, **kwargs: t.Any) -> t.Any:
|
||||||
"""Support calling unbound methods from the class. For example,
|
"""Support calling unbound methods from the class. For example,
|
||||||
this happens with ``copy.copy``, which does
|
this happens with ``copy.copy``, which does
|
||||||
``type(x).__copy__(x)``. ``type(x)`` can't be proxied, so it
|
``type(x).__copy__(x)``. ``type(x)`` can't be proxied, so it
|
||||||
|
@ -352,12 +347,12 @@ class _ProxyIOp(_ProxyLookup):
|
||||||
__slots__ = ()
|
__slots__ = ()
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self, f: t.Optional[t.Callable] = None, fallback: t.Optional[t.Callable] = None
|
self, f: t.Callable | None = None, fallback: t.Callable | None = None
|
||||||
) -> None:
|
) -> None:
|
||||||
super().__init__(f, fallback)
|
super().__init__(f, fallback)
|
||||||
|
|
||||||
def bind_f(instance: "LocalProxy", obj: t.Any) -> t.Callable:
|
def bind_f(instance: LocalProxy, obj: t.Any) -> t.Callable:
|
||||||
def i_op(self: t.Any, other: t.Any) -> "LocalProxy":
|
def i_op(self: t.Any, other: t.Any) -> LocalProxy:
|
||||||
f(self, other) # type: ignore
|
f(self, other) # type: ignore
|
||||||
return instance
|
return instance
|
||||||
|
|
||||||
|
@ -471,10 +466,10 @@ class LocalProxy(t.Generic[T]):
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
local: t.Union[ContextVar[T], Local, LocalStack[T], t.Callable[[], T]],
|
local: ContextVar[T] | Local | LocalStack[T] | t.Callable[[], T],
|
||||||
name: t.Optional[str] = None,
|
name: str | None = None,
|
||||||
*,
|
*,
|
||||||
unbound_message: t.Optional[str] = None,
|
unbound_message: str | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
if name is None:
|
if name is None:
|
||||||
get_name = _identity
|
get_name = _identity
|
||||||
|
@ -497,7 +492,7 @@ class LocalProxy(t.Generic[T]):
|
||||||
elif isinstance(local, LocalStack):
|
elif isinstance(local, LocalStack):
|
||||||
|
|
||||||
def _get_current_object() -> T:
|
def _get_current_object() -> T:
|
||||||
obj = local.top # type: ignore[union-attr]
|
obj = local.top
|
||||||
|
|
||||||
if obj is None:
|
if obj is None:
|
||||||
raise RuntimeError(unbound_message)
|
raise RuntimeError(unbound_message)
|
||||||
|
@ -508,7 +503,7 @@ class LocalProxy(t.Generic[T]):
|
||||||
|
|
||||||
def _get_current_object() -> T:
|
def _get_current_object() -> T:
|
||||||
try:
|
try:
|
||||||
obj = local.get() # type: ignore[union-attr]
|
obj = local.get()
|
||||||
except LookupError:
|
except LookupError:
|
||||||
raise RuntimeError(unbound_message) from None
|
raise RuntimeError(unbound_message) from None
|
||||||
|
|
||||||
|
@ -517,7 +512,7 @@ class LocalProxy(t.Generic[T]):
|
||||||
elif callable(local):
|
elif callable(local):
|
||||||
|
|
||||||
def _get_current_object() -> T:
|
def _get_current_object() -> T:
|
||||||
return get_name(local()) # type: ignore
|
return get_name(local())
|
||||||
|
|
||||||
else:
|
else:
|
||||||
raise TypeError(f"Don't know how to proxy '{type(local)}'.")
|
raise TypeError(f"Don't know how to proxy '{type(local)}'.")
|
||||||
|
|
|
@ -1,22 +0,0 @@
|
||||||
"""
|
|
||||||
Middleware
|
|
||||||
==========
|
|
||||||
|
|
||||||
A WSGI middleware is a WSGI application that wraps another application
|
|
||||||
in order to observe or change its behavior. Werkzeug provides some
|
|
||||||
middleware for common use cases.
|
|
||||||
|
|
||||||
.. toctree::
|
|
||||||
:maxdepth: 1
|
|
||||||
|
|
||||||
proxy_fix
|
|
||||||
shared_data
|
|
||||||
dispatcher
|
|
||||||
http_proxy
|
|
||||||
lint
|
|
||||||
profiler
|
|
||||||
|
|
||||||
The :doc:`interactive debugger </debug>` is also a middleware that can
|
|
||||||
be applied manually, although it is typically used automatically with
|
|
||||||
the :doc:`development server </serving>`.
|
|
||||||
"""
|
|
|
@ -30,6 +30,8 @@ and the static files would be served directly by the HTTP server.
|
||||||
:copyright: 2007 Pallets
|
:copyright: 2007 Pallets
|
||||||
:license: BSD-3-Clause
|
:license: BSD-3-Clause
|
||||||
"""
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import typing as t
|
import typing as t
|
||||||
|
|
||||||
if t.TYPE_CHECKING:
|
if t.TYPE_CHECKING:
|
||||||
|
@ -50,14 +52,14 @@ class DispatcherMiddleware:
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
app: "WSGIApplication",
|
app: WSGIApplication,
|
||||||
mounts: t.Optional[t.Dict[str, "WSGIApplication"]] = None,
|
mounts: dict[str, WSGIApplication] | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
self.app = app
|
self.app = app
|
||||||
self.mounts = mounts or {}
|
self.mounts = mounts or {}
|
||||||
|
|
||||||
def __call__(
|
def __call__(
|
||||||
self, environ: "WSGIEnvironment", start_response: "StartResponse"
|
self, environ: WSGIEnvironment, start_response: StartResponse
|
||||||
) -> t.Iterable[bytes]:
|
) -> t.Iterable[bytes]:
|
||||||
script = environ.get("PATH_INFO", "")
|
script = environ.get("PATH_INFO", "")
|
||||||
path_info = ""
|
path_info = ""
|
||||||
|
|
|
@ -7,13 +7,15 @@ Basic HTTP Proxy
|
||||||
:copyright: 2007 Pallets
|
:copyright: 2007 Pallets
|
||||||
:license: BSD-3-Clause
|
:license: BSD-3-Clause
|
||||||
"""
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import typing as t
|
import typing as t
|
||||||
from http import client
|
from http import client
|
||||||
|
from urllib.parse import quote
|
||||||
|
from urllib.parse import urlsplit
|
||||||
|
|
||||||
from ..datastructures import EnvironHeaders
|
from ..datastructures import EnvironHeaders
|
||||||
from ..http import is_hop_by_hop_header
|
from ..http import is_hop_by_hop_header
|
||||||
from ..urls import url_parse
|
|
||||||
from ..urls import url_quote
|
|
||||||
from ..wsgi import get_input_stream
|
from ..wsgi import get_input_stream
|
||||||
|
|
||||||
if t.TYPE_CHECKING:
|
if t.TYPE_CHECKING:
|
||||||
|
@ -78,12 +80,12 @@ class ProxyMiddleware:
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
app: "WSGIApplication",
|
app: WSGIApplication,
|
||||||
targets: t.Mapping[str, t.Dict[str, t.Any]],
|
targets: t.Mapping[str, dict[str, t.Any]],
|
||||||
chunk_size: int = 2 << 13,
|
chunk_size: int = 2 << 13,
|
||||||
timeout: int = 10,
|
timeout: int = 10,
|
||||||
) -> None:
|
) -> None:
|
||||||
def _set_defaults(opts: t.Dict[str, t.Any]) -> t.Dict[str, t.Any]:
|
def _set_defaults(opts: dict[str, t.Any]) -> dict[str, t.Any]:
|
||||||
opts.setdefault("remove_prefix", False)
|
opts.setdefault("remove_prefix", False)
|
||||||
opts.setdefault("host", "<auto>")
|
opts.setdefault("host", "<auto>")
|
||||||
opts.setdefault("headers", {})
|
opts.setdefault("headers", {})
|
||||||
|
@ -98,13 +100,14 @@ class ProxyMiddleware:
|
||||||
self.timeout = timeout
|
self.timeout = timeout
|
||||||
|
|
||||||
def proxy_to(
|
def proxy_to(
|
||||||
self, opts: t.Dict[str, t.Any], path: str, prefix: str
|
self, opts: dict[str, t.Any], path: str, prefix: str
|
||||||
) -> "WSGIApplication":
|
) -> WSGIApplication:
|
||||||
target = url_parse(opts["target"])
|
target = urlsplit(opts["target"])
|
||||||
host = t.cast(str, target.ascii_host)
|
# socket can handle unicode host, but header must be ascii
|
||||||
|
host = target.hostname.encode("idna").decode("ascii")
|
||||||
|
|
||||||
def application(
|
def application(
|
||||||
environ: "WSGIEnvironment", start_response: "StartResponse"
|
environ: WSGIEnvironment, start_response: StartResponse
|
||||||
) -> t.Iterable[bytes]:
|
) -> t.Iterable[bytes]:
|
||||||
headers = list(EnvironHeaders(environ).items())
|
headers = list(EnvironHeaders(environ).items())
|
||||||
headers[:] = [
|
headers[:] = [
|
||||||
|
@ -157,7 +160,9 @@ class ProxyMiddleware:
|
||||||
)
|
)
|
||||||
|
|
||||||
con.connect()
|
con.connect()
|
||||||
remote_url = url_quote(remote_path)
|
# safe = https://url.spec.whatwg.org/#url-path-segment-string
|
||||||
|
# as well as percent for things that are already quoted
|
||||||
|
remote_url = quote(remote_path, safe="!$&'()*+,/:;=@%")
|
||||||
querystring = environ["QUERY_STRING"]
|
querystring = environ["QUERY_STRING"]
|
||||||
|
|
||||||
if querystring:
|
if querystring:
|
||||||
|
@ -217,7 +222,7 @@ class ProxyMiddleware:
|
||||||
return application
|
return application
|
||||||
|
|
||||||
def __call__(
|
def __call__(
|
||||||
self, environ: "WSGIEnvironment", start_response: "StartResponse"
|
self, environ: WSGIEnvironment, start_response: StartResponse
|
||||||
) -> t.Iterable[bytes]:
|
) -> t.Iterable[bytes]:
|
||||||
path = environ["PATH_INFO"]
|
path = environ["PATH_INFO"]
|
||||||
app = self.app
|
app = self.app
|
||||||
|
|
|
@ -12,6 +12,8 @@ common HTTP errors such as non-empty responses for 304 status codes.
|
||||||
:copyright: 2007 Pallets
|
:copyright: 2007 Pallets
|
||||||
:license: BSD-3-Clause
|
:license: BSD-3-Clause
|
||||||
"""
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import typing as t
|
import typing as t
|
||||||
from types import TracebackType
|
from types import TracebackType
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
@ -117,7 +119,7 @@ class ErrorStream:
|
||||||
|
|
||||||
|
|
||||||
class GuardedWrite:
|
class GuardedWrite:
|
||||||
def __init__(self, write: t.Callable[[bytes], object], chunks: t.List[int]) -> None:
|
def __init__(self, write: t.Callable[[bytes], object], chunks: list[int]) -> None:
|
||||||
self._write = write
|
self._write = write
|
||||||
self._chunks = chunks
|
self._chunks = chunks
|
||||||
|
|
||||||
|
@ -131,8 +133,8 @@ class GuardedIterator:
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
iterator: t.Iterable[bytes],
|
iterator: t.Iterable[bytes],
|
||||||
headers_set: t.Tuple[int, Headers],
|
headers_set: tuple[int, Headers],
|
||||||
chunks: t.List[int],
|
chunks: list[int],
|
||||||
) -> None:
|
) -> None:
|
||||||
self._iterator = iterator
|
self._iterator = iterator
|
||||||
self._next = iter(iterator).__next__
|
self._next = iter(iterator).__next__
|
||||||
|
@ -140,7 +142,7 @@ class GuardedIterator:
|
||||||
self.headers_set = headers_set
|
self.headers_set = headers_set
|
||||||
self.chunks = chunks
|
self.chunks = chunks
|
||||||
|
|
||||||
def __iter__(self) -> "GuardedIterator":
|
def __iter__(self) -> GuardedIterator:
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __next__(self) -> bytes:
|
def __next__(self) -> bytes:
|
||||||
|
@ -164,7 +166,7 @@ class GuardedIterator:
|
||||||
self.closed = True
|
self.closed = True
|
||||||
|
|
||||||
if hasattr(self._iterator, "close"):
|
if hasattr(self._iterator, "close"):
|
||||||
self._iterator.close() # type: ignore
|
self._iterator.close()
|
||||||
|
|
||||||
if self.headers_set:
|
if self.headers_set:
|
||||||
status_code, headers = self.headers_set
|
status_code, headers = self.headers_set
|
||||||
|
@ -230,10 +232,10 @@ class LintMiddleware:
|
||||||
app = LintMiddleware(app)
|
app = LintMiddleware(app)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, app: "WSGIApplication") -> None:
|
def __init__(self, app: WSGIApplication) -> None:
|
||||||
self.app = app
|
self.app = app
|
||||||
|
|
||||||
def check_environ(self, environ: "WSGIEnvironment") -> None:
|
def check_environ(self, environ: WSGIEnvironment) -> None:
|
||||||
if type(environ) is not dict:
|
if type(environ) is not dict:
|
||||||
warn(
|
warn(
|
||||||
"WSGI environment is not a standard Python dict.",
|
"WSGI environment is not a standard Python dict.",
|
||||||
|
@ -280,11 +282,9 @@ class LintMiddleware:
|
||||||
def check_start_response(
|
def check_start_response(
|
||||||
self,
|
self,
|
||||||
status: str,
|
status: str,
|
||||||
headers: t.List[t.Tuple[str, str]],
|
headers: list[tuple[str, str]],
|
||||||
exc_info: t.Optional[
|
exc_info: None | (tuple[type[BaseException], BaseException, TracebackType]),
|
||||||
t.Tuple[t.Type[BaseException], BaseException, TracebackType]
|
) -> tuple[int, Headers]:
|
||||||
],
|
|
||||||
) -> t.Tuple[int, Headers]:
|
|
||||||
check_type("status", status, str)
|
check_type("status", status, str)
|
||||||
status_code_str = status.split(None, 1)[0]
|
status_code_str = status.split(None, 1)[0]
|
||||||
|
|
||||||
|
@ -359,9 +359,9 @@ class LintMiddleware:
|
||||||
)
|
)
|
||||||
|
|
||||||
def check_iterator(self, app_iter: t.Iterable[bytes]) -> None:
|
def check_iterator(self, app_iter: t.Iterable[bytes]) -> None:
|
||||||
if isinstance(app_iter, bytes):
|
if isinstance(app_iter, str):
|
||||||
warn(
|
warn(
|
||||||
"The application returned a bytestring. The response will send one"
|
"The application returned a string. The response will send one"
|
||||||
" character at a time to the client, which will kill performance."
|
" character at a time to the client, which will kill performance."
|
||||||
" Return a list or iterable instead.",
|
" Return a list or iterable instead.",
|
||||||
WSGIWarning,
|
WSGIWarning,
|
||||||
|
@ -377,8 +377,8 @@ class LintMiddleware:
|
||||||
"A WSGI app does not take keyword arguments.", WSGIWarning, stacklevel=2
|
"A WSGI app does not take keyword arguments.", WSGIWarning, stacklevel=2
|
||||||
)
|
)
|
||||||
|
|
||||||
environ: "WSGIEnvironment" = args[0]
|
environ: WSGIEnvironment = args[0]
|
||||||
start_response: "StartResponse" = args[1]
|
start_response: StartResponse = args[1]
|
||||||
|
|
||||||
self.check_environ(environ)
|
self.check_environ(environ)
|
||||||
environ["wsgi.input"] = InputStream(environ["wsgi.input"])
|
environ["wsgi.input"] = InputStream(environ["wsgi.input"])
|
||||||
|
@ -388,8 +388,8 @@ class LintMiddleware:
|
||||||
# iterate to the end and we can check the content length.
|
# iterate to the end and we can check the content length.
|
||||||
environ["wsgi.file_wrapper"] = FileWrapper
|
environ["wsgi.file_wrapper"] = FileWrapper
|
||||||
|
|
||||||
headers_set: t.List[t.Any] = []
|
headers_set: list[t.Any] = []
|
||||||
chunks: t.List[int] = []
|
chunks: list[int] = []
|
||||||
|
|
||||||
def checking_start_response(
|
def checking_start_response(
|
||||||
*args: t.Any, **kwargs: t.Any
|
*args: t.Any, **kwargs: t.Any
|
||||||
|
@ -405,10 +405,10 @@ class LintMiddleware:
|
||||||
warn("'start_response' does not take keyword arguments.", WSGIWarning)
|
warn("'start_response' does not take keyword arguments.", WSGIWarning)
|
||||||
|
|
||||||
status: str = args[0]
|
status: str = args[0]
|
||||||
headers: t.List[t.Tuple[str, str]] = args[1]
|
headers: list[tuple[str, str]] = args[1]
|
||||||
exc_info: t.Optional[
|
exc_info: None | (
|
||||||
t.Tuple[t.Type[BaseException], BaseException, TracebackType]
|
tuple[type[BaseException], BaseException, TracebackType]
|
||||||
] = (args[2] if len(args) == 3 else None)
|
) = (args[2] if len(args) == 3 else None)
|
||||||
|
|
||||||
headers_set[:] = self.check_start_response(status, headers, exc_info)
|
headers_set[:] = self.check_start_response(status, headers, exc_info)
|
||||||
return GuardedWrite(start_response(status, headers, exc_info), chunks)
|
return GuardedWrite(start_response(status, headers, exc_info), chunks)
|
||||||
|
|
|
@ -11,6 +11,8 @@ that may be slowing down your application.
|
||||||
:copyright: 2007 Pallets
|
:copyright: 2007 Pallets
|
||||||
:license: BSD-3-Clause
|
:license: BSD-3-Clause
|
||||||
"""
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import os.path
|
import os.path
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
@ -42,11 +44,16 @@ class ProfilerMiddleware:
|
||||||
|
|
||||||
- ``{method}`` - The request method; GET, POST, etc.
|
- ``{method}`` - The request method; GET, POST, etc.
|
||||||
- ``{path}`` - The request path or 'root' should one not exist.
|
- ``{path}`` - The request path or 'root' should one not exist.
|
||||||
- ``{elapsed}`` - The elapsed time of the request.
|
- ``{elapsed}`` - The elapsed time of the request in milliseconds.
|
||||||
- ``{time}`` - The time of the request.
|
- ``{time}`` - The time of the request.
|
||||||
|
|
||||||
If it is a callable, it will be called with the WSGI ``environ``
|
If it is a callable, it will be called with the WSGI ``environ`` and
|
||||||
dict and should return a filename.
|
be expected to return a filename string. The ``environ`` dictionary
|
||||||
|
will also have the ``"werkzeug.profiler"`` key populated with a
|
||||||
|
dictionary containing the following fields (more may be added in the
|
||||||
|
future):
|
||||||
|
- ``{elapsed}`` - The elapsed time of the request in milliseconds.
|
||||||
|
- ``{time}`` - The time of the request.
|
||||||
|
|
||||||
:param app: The WSGI application to wrap.
|
:param app: The WSGI application to wrap.
|
||||||
:param stream: Write stats to this stream. Disable with ``None``.
|
:param stream: Write stats to this stream. Disable with ``None``.
|
||||||
|
@ -63,6 +70,10 @@ class ProfilerMiddleware:
|
||||||
from werkzeug.middleware.profiler import ProfilerMiddleware
|
from werkzeug.middleware.profiler import ProfilerMiddleware
|
||||||
app = ProfilerMiddleware(app)
|
app = ProfilerMiddleware(app)
|
||||||
|
|
||||||
|
.. versionchanged:: 3.0
|
||||||
|
Added the ``"werkzeug.profiler"`` key to the ``filename_format(environ)``
|
||||||
|
parameter with the ``elapsed`` and ``time`` fields.
|
||||||
|
|
||||||
.. versionchanged:: 0.15
|
.. versionchanged:: 0.15
|
||||||
Stats are written even if ``profile_dir`` is given, and can be
|
Stats are written even if ``profile_dir`` is given, and can be
|
||||||
disable by passing ``stream=None``.
|
disable by passing ``stream=None``.
|
||||||
|
@ -76,11 +87,11 @@ class ProfilerMiddleware:
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
app: "WSGIApplication",
|
app: WSGIApplication,
|
||||||
stream: t.IO[str] = sys.stdout,
|
stream: t.IO[str] | None = sys.stdout,
|
||||||
sort_by: t.Iterable[str] = ("time", "calls"),
|
sort_by: t.Iterable[str] = ("time", "calls"),
|
||||||
restrictions: t.Iterable[t.Union[str, int, float]] = (),
|
restrictions: t.Iterable[str | int | float] = (),
|
||||||
profile_dir: t.Optional[str] = None,
|
profile_dir: str | None = None,
|
||||||
filename_format: str = "{method}.{path}.{elapsed:.0f}ms.{time:.0f}.prof",
|
filename_format: str = "{method}.{path}.{elapsed:.0f}ms.{time:.0f}.prof",
|
||||||
) -> None:
|
) -> None:
|
||||||
self._app = app
|
self._app = app
|
||||||
|
@ -91,9 +102,9 @@ class ProfilerMiddleware:
|
||||||
self._filename_format = filename_format
|
self._filename_format = filename_format
|
||||||
|
|
||||||
def __call__(
|
def __call__(
|
||||||
self, environ: "WSGIEnvironment", start_response: "StartResponse"
|
self, environ: WSGIEnvironment, start_response: StartResponse
|
||||||
) -> t.Iterable[bytes]:
|
) -> t.Iterable[bytes]:
|
||||||
response_body: t.List[bytes] = []
|
response_body: list[bytes] = []
|
||||||
|
|
||||||
def catching_start_response(status, headers, exc_info=None): # type: ignore
|
def catching_start_response(status, headers, exc_info=None): # type: ignore
|
||||||
start_response(status, headers, exc_info)
|
start_response(status, headers, exc_info)
|
||||||
|
@ -106,7 +117,7 @@ class ProfilerMiddleware:
|
||||||
response_body.extend(app_iter)
|
response_body.extend(app_iter)
|
||||||
|
|
||||||
if hasattr(app_iter, "close"):
|
if hasattr(app_iter, "close"):
|
||||||
app_iter.close() # type: ignore
|
app_iter.close()
|
||||||
|
|
||||||
profile = Profile()
|
profile = Profile()
|
||||||
start = time.time()
|
start = time.time()
|
||||||
|
@ -116,6 +127,10 @@ class ProfilerMiddleware:
|
||||||
|
|
||||||
if self._profile_dir is not None:
|
if self._profile_dir is not None:
|
||||||
if callable(self._filename_format):
|
if callable(self._filename_format):
|
||||||
|
environ["werkzeug.profiler"] = {
|
||||||
|
"elapsed": elapsed * 1000.0,
|
||||||
|
"time": time.time(),
|
||||||
|
}
|
||||||
filename = self._filename_format(environ)
|
filename = self._filename_format(environ)
|
||||||
else:
|
else:
|
||||||
filename = self._filename_format.format(
|
filename = self._filename_format.format(
|
||||||
|
|
|
@ -21,6 +21,8 @@ setting each header so the middleware knows what to trust.
|
||||||
:copyright: 2007 Pallets
|
:copyright: 2007 Pallets
|
||||||
:license: BSD-3-Clause
|
:license: BSD-3-Clause
|
||||||
"""
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import typing as t
|
import typing as t
|
||||||
|
|
||||||
from ..http import parse_list_header
|
from ..http import parse_list_header
|
||||||
|
@ -64,23 +66,16 @@ class ProxyFix:
|
||||||
app = ProxyFix(app, x_for=1, x_host=1)
|
app = ProxyFix(app, x_for=1, x_host=1)
|
||||||
|
|
||||||
.. versionchanged:: 1.0
|
.. versionchanged:: 1.0
|
||||||
Deprecated code has been removed:
|
The ``num_proxies`` argument and attribute; the ``get_remote_addr`` method; and
|
||||||
|
the environ keys ``orig_remote_addr``, ``orig_wsgi_url_scheme``, and
|
||||||
* The ``num_proxies`` argument and attribute.
|
``orig_http_host`` were removed.
|
||||||
* The ``get_remote_addr`` method.
|
|
||||||
* The environ keys ``orig_remote_addr``,
|
|
||||||
``orig_wsgi_url_scheme``, and ``orig_http_host``.
|
|
||||||
|
|
||||||
.. versionchanged:: 0.15
|
.. versionchanged:: 0.15
|
||||||
All headers support multiple values. The ``num_proxies``
|
All headers support multiple values. Each header is configured with a separate
|
||||||
argument is deprecated. Each header is configured with a
|
number of trusted proxies.
|
||||||
separate number of trusted proxies.
|
|
||||||
|
|
||||||
.. versionchanged:: 0.15
|
.. versionchanged:: 0.15
|
||||||
Original WSGI environ values are stored in the
|
Original WSGI environ values are stored in the ``werkzeug.proxy_fix.orig`` dict.
|
||||||
``werkzeug.proxy_fix.orig`` dict. ``orig_remote_addr``,
|
|
||||||
``orig_wsgi_url_scheme``, and ``orig_http_host`` are deprecated
|
|
||||||
and will be removed in 1.0.
|
|
||||||
|
|
||||||
.. versionchanged:: 0.15
|
.. versionchanged:: 0.15
|
||||||
Support ``X-Forwarded-Port`` and ``X-Forwarded-Prefix``.
|
Support ``X-Forwarded-Port`` and ``X-Forwarded-Prefix``.
|
||||||
|
@ -92,7 +87,7 @@ class ProxyFix:
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
app: "WSGIApplication",
|
app: WSGIApplication,
|
||||||
x_for: int = 1,
|
x_for: int = 1,
|
||||||
x_proto: int = 1,
|
x_proto: int = 1,
|
||||||
x_host: int = 0,
|
x_host: int = 0,
|
||||||
|
@ -106,7 +101,7 @@ class ProxyFix:
|
||||||
self.x_port = x_port
|
self.x_port = x_port
|
||||||
self.x_prefix = x_prefix
|
self.x_prefix = x_prefix
|
||||||
|
|
||||||
def _get_real_value(self, trusted: int, value: t.Optional[str]) -> t.Optional[str]:
|
def _get_real_value(self, trusted: int, value: str | None) -> str | None:
|
||||||
"""Get the real value from a list header based on the configured
|
"""Get the real value from a list header based on the configured
|
||||||
number of trusted proxies.
|
number of trusted proxies.
|
||||||
|
|
||||||
|
@ -128,7 +123,7 @@ class ProxyFix:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def __call__(
|
def __call__(
|
||||||
self, environ: "WSGIEnvironment", start_response: "StartResponse"
|
self, environ: WSGIEnvironment, start_response: StartResponse
|
||||||
) -> t.Iterable[bytes]:
|
) -> t.Iterable[bytes]:
|
||||||
"""Modify the WSGI environ based on the various ``Forwarded``
|
"""Modify the WSGI environ based on the various ``Forwarded``
|
||||||
headers before calling the wrapped application. Store the
|
headers before calling the wrapped application. Store the
|
||||||
|
|
|
@ -8,9 +8,11 @@ Serve Shared Static Files
|
||||||
:copyright: 2007 Pallets
|
:copyright: 2007 Pallets
|
||||||
:license: BSD-3-Clause
|
:license: BSD-3-Clause
|
||||||
"""
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import importlib.util
|
||||||
import mimetypes
|
import mimetypes
|
||||||
import os
|
import os
|
||||||
import pkgutil
|
|
||||||
import posixpath
|
import posixpath
|
||||||
import typing as t
|
import typing as t
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
@ -99,18 +101,18 @@ class SharedDataMiddleware:
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
app: "WSGIApplication",
|
app: WSGIApplication,
|
||||||
exports: t.Union[
|
exports: (
|
||||||
t.Dict[str, t.Union[str, t.Tuple[str, str]]],
|
dict[str, str | tuple[str, str]]
|
||||||
t.Iterable[t.Tuple[str, t.Union[str, t.Tuple[str, str]]]],
|
| t.Iterable[tuple[str, str | tuple[str, str]]]
|
||||||
],
|
),
|
||||||
disallow: None = None,
|
disallow: None = None,
|
||||||
cache: bool = True,
|
cache: bool = True,
|
||||||
cache_timeout: int = 60 * 60 * 12,
|
cache_timeout: int = 60 * 60 * 12,
|
||||||
fallback_mimetype: str = "application/octet-stream",
|
fallback_mimetype: str = "application/octet-stream",
|
||||||
) -> None:
|
) -> None:
|
||||||
self.app = app
|
self.app = app
|
||||||
self.exports: t.List[t.Tuple[str, _TLoader]] = []
|
self.exports: list[tuple[str, _TLoader]] = []
|
||||||
self.cache = cache
|
self.cache = cache
|
||||||
self.cache_timeout = cache_timeout
|
self.cache_timeout = cache_timeout
|
||||||
|
|
||||||
|
@ -156,12 +158,12 @@ class SharedDataMiddleware:
|
||||||
|
|
||||||
def get_package_loader(self, package: str, package_path: str) -> _TLoader:
|
def get_package_loader(self, package: str, package_path: str) -> _TLoader:
|
||||||
load_time = datetime.now(timezone.utc)
|
load_time = datetime.now(timezone.utc)
|
||||||
provider = pkgutil.get_loader(package)
|
spec = importlib.util.find_spec(package)
|
||||||
reader = provider.get_resource_reader(package) # type: ignore
|
reader = spec.loader.get_resource_reader(package) # type: ignore[union-attr]
|
||||||
|
|
||||||
def loader(
|
def loader(
|
||||||
path: t.Optional[str],
|
path: str | None,
|
||||||
) -> t.Tuple[t.Optional[str], t.Optional[_TOpener]]:
|
) -> tuple[str | None, _TOpener | None]:
|
||||||
if path is None:
|
if path is None:
|
||||||
return None, None
|
return None, None
|
||||||
|
|
||||||
|
@ -198,8 +200,8 @@ class SharedDataMiddleware:
|
||||||
|
|
||||||
def get_directory_loader(self, directory: str) -> _TLoader:
|
def get_directory_loader(self, directory: str) -> _TLoader:
|
||||||
def loader(
|
def loader(
|
||||||
path: t.Optional[str],
|
path: str | None,
|
||||||
) -> t.Tuple[t.Optional[str], t.Optional[_TOpener]]:
|
) -> tuple[str | None, _TOpener | None]:
|
||||||
if path is not None:
|
if path is not None:
|
||||||
path = safe_join(directory, path)
|
path = safe_join(directory, path)
|
||||||
|
|
||||||
|
@ -222,7 +224,7 @@ class SharedDataMiddleware:
|
||||||
return f"wzsdm-{timestamp}-{file_size}-{checksum}"
|
return f"wzsdm-{timestamp}-{file_size}-{checksum}"
|
||||||
|
|
||||||
def __call__(
|
def __call__(
|
||||||
self, environ: "WSGIEnvironment", start_response: "StartResponse"
|
self, environ: WSGIEnvironment, start_response: StartResponse
|
||||||
) -> t.Iterable[bytes]:
|
) -> t.Iterable[bytes]:
|
||||||
path = get_path_info(environ)
|
path = get_path_info(environ)
|
||||||
file_loader = None
|
file_loader = None
|
||||||
|
|
|
@ -1,8 +1,9 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import typing as t
|
import typing as t
|
||||||
import uuid
|
import uuid
|
||||||
|
from urllib.parse import quote
|
||||||
from ..urls import _fast_url_quote
|
|
||||||
|
|
||||||
if t.TYPE_CHECKING:
|
if t.TYPE_CHECKING:
|
||||||
from .map import Map
|
from .map import Map
|
||||||
|
@ -15,22 +16,33 @@ class ValidationError(ValueError):
|
||||||
|
|
||||||
|
|
||||||
class BaseConverter:
|
class BaseConverter:
|
||||||
"""Base class for all converters."""
|
"""Base class for all converters.
|
||||||
|
|
||||||
|
.. versionchanged:: 2.3
|
||||||
|
``part_isolating`` defaults to ``False`` if ``regex`` contains a ``/``.
|
||||||
|
"""
|
||||||
|
|
||||||
regex = "[^/]+"
|
regex = "[^/]+"
|
||||||
weight = 100
|
weight = 100
|
||||||
part_isolating = True
|
part_isolating = True
|
||||||
|
|
||||||
def __init__(self, map: "Map", *args: t.Any, **kwargs: t.Any) -> None:
|
def __init_subclass__(cls, **kwargs: t.Any) -> None:
|
||||||
|
super().__init_subclass__(**kwargs)
|
||||||
|
|
||||||
|
# If the converter isn't inheriting its regex, disable part_isolating by default
|
||||||
|
# if the regex contains a / character.
|
||||||
|
if "regex" in cls.__dict__ and "part_isolating" not in cls.__dict__:
|
||||||
|
cls.part_isolating = "/" not in cls.regex
|
||||||
|
|
||||||
|
def __init__(self, map: Map, *args: t.Any, **kwargs: t.Any) -> None:
|
||||||
self.map = map
|
self.map = map
|
||||||
|
|
||||||
def to_python(self, value: str) -> t.Any:
|
def to_python(self, value: str) -> t.Any:
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def to_url(self, value: t.Any) -> str:
|
def to_url(self, value: t.Any) -> str:
|
||||||
if isinstance(value, (bytes, bytearray)):
|
# safe = https://url.spec.whatwg.org/#url-path-segment-string
|
||||||
return _fast_url_quote(value)
|
return quote(str(value), safe="!$&'()*+,/:;=@")
|
||||||
return _fast_url_quote(str(value).encode(self.map.charset))
|
|
||||||
|
|
||||||
|
|
||||||
class UnicodeConverter(BaseConverter):
|
class UnicodeConverter(BaseConverter):
|
||||||
|
@ -51,14 +63,12 @@ class UnicodeConverter(BaseConverter):
|
||||||
:param length: the exact length of the string.
|
:param length: the exact length of the string.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
part_isolating = True
|
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
map: "Map",
|
map: Map,
|
||||||
minlength: int = 1,
|
minlength: int = 1,
|
||||||
maxlength: t.Optional[int] = None,
|
maxlength: int | None = None,
|
||||||
length: t.Optional[int] = None,
|
length: int | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
super().__init__(map)
|
super().__init__(map)
|
||||||
if length is not None:
|
if length is not None:
|
||||||
|
@ -86,9 +96,7 @@ class AnyConverter(BaseConverter):
|
||||||
Value is validated when building a URL.
|
Value is validated when building a URL.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
part_isolating = True
|
def __init__(self, map: Map, *items: str) -> None:
|
||||||
|
|
||||||
def __init__(self, map: "Map", *items: str) -> None:
|
|
||||||
super().__init__(map)
|
super().__init__(map)
|
||||||
self.items = set(items)
|
self.items = set(items)
|
||||||
self.regex = f"(?:{'|'.join([re.escape(x) for x in items])})"
|
self.regex = f"(?:{'|'.join([re.escape(x) for x in items])})"
|
||||||
|
@ -111,9 +119,9 @@ class PathConverter(BaseConverter):
|
||||||
:param map: the :class:`Map`.
|
:param map: the :class:`Map`.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
part_isolating = False
|
||||||
regex = "[^/].*?"
|
regex = "[^/].*?"
|
||||||
weight = 200
|
weight = 200
|
||||||
part_isolating = False
|
|
||||||
|
|
||||||
|
|
||||||
class NumberConverter(BaseConverter):
|
class NumberConverter(BaseConverter):
|
||||||
|
@ -124,14 +132,13 @@ class NumberConverter(BaseConverter):
|
||||||
|
|
||||||
weight = 50
|
weight = 50
|
||||||
num_convert: t.Callable = int
|
num_convert: t.Callable = int
|
||||||
part_isolating = True
|
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
map: "Map",
|
map: Map,
|
||||||
fixed_digits: int = 0,
|
fixed_digits: int = 0,
|
||||||
min: t.Optional[int] = None,
|
min: int | None = None,
|
||||||
max: t.Optional[int] = None,
|
max: int | None = None,
|
||||||
signed: bool = False,
|
signed: bool = False,
|
||||||
) -> None:
|
) -> None:
|
||||||
if signed:
|
if signed:
|
||||||
|
@ -186,7 +193,6 @@ class IntegerConverter(NumberConverter):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
regex = r"\d+"
|
regex = r"\d+"
|
||||||
part_isolating = True
|
|
||||||
|
|
||||||
|
|
||||||
class FloatConverter(NumberConverter):
|
class FloatConverter(NumberConverter):
|
||||||
|
@ -210,13 +216,12 @@ class FloatConverter(NumberConverter):
|
||||||
|
|
||||||
regex = r"\d+\.\d+"
|
regex = r"\d+\.\d+"
|
||||||
num_convert = float
|
num_convert = float
|
||||||
part_isolating = True
|
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
map: "Map",
|
map: Map,
|
||||||
min: t.Optional[float] = None,
|
min: float | None = None,
|
||||||
max: t.Optional[float] = None,
|
max: float | None = None,
|
||||||
signed: bool = False,
|
signed: bool = False,
|
||||||
) -> None:
|
) -> None:
|
||||||
super().__init__(map, min=min, max=max, signed=signed) # type: ignore
|
super().__init__(map, min=min, max=max, signed=signed) # type: ignore
|
||||||
|
@ -236,7 +241,6 @@ class UUIDConverter(BaseConverter):
|
||||||
r"[A-Fa-f0-9]{8}-[A-Fa-f0-9]{4}-"
|
r"[A-Fa-f0-9]{8}-[A-Fa-f0-9]{4}-"
|
||||||
r"[A-Fa-f0-9]{4}-[A-Fa-f0-9]{4}-[A-Fa-f0-9]{12}"
|
r"[A-Fa-f0-9]{4}-[A-Fa-f0-9]{4}-[A-Fa-f0-9]{12}"
|
||||||
)
|
)
|
||||||
part_isolating = True
|
|
||||||
|
|
||||||
def to_python(self, value: str) -> uuid.UUID:
|
def to_python(self, value: str) -> uuid.UUID:
|
||||||
return uuid.UUID(value)
|
return uuid.UUID(value)
|
||||||
|
@ -246,7 +250,7 @@ class UUIDConverter(BaseConverter):
|
||||||
|
|
||||||
|
|
||||||
#: the default converter mapping for the map.
|
#: the default converter mapping for the map.
|
||||||
DEFAULT_CONVERTERS: t.Mapping[str, t.Type[BaseConverter]] = {
|
DEFAULT_CONVERTERS: t.Mapping[str, type[BaseConverter]] = {
|
||||||
"default": UnicodeConverter,
|
"default": UnicodeConverter,
|
||||||
"string": UnicodeConverter,
|
"string": UnicodeConverter,
|
||||||
"any": AnyConverter,
|
"any": AnyConverter,
|
||||||
|
|