diff --git a/.github/workflows/changelog.yml b/.github/workflows/changelog.yml index a47ab4ffc7..105837dbfa 100644 --- a/.github/workflows/changelog.yml +++ b/.github/workflows/changelog.yml @@ -13,7 +13,7 @@ jobs: steps: - name: "Checkout repository" - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 with: # `towncrier check` runs `git diff --name-only origin/main...`, which # needs a non-shallow clone. diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1ced0d9fe2..b1f3723b6d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,6 +1,6 @@ name: CI -on: [push, pull_request] +on: [push, pull_request, workflow_dispatch] permissions: "read-all" @@ -15,10 +15,10 @@ jobs: steps: - name: "Checkout repository" - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - name: "Setup Python" - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0 + uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 with: python-version: "3.x" cache: "pip" @@ -103,10 +103,10 @@ jobs: timeout-minutes: 30 steps: - name: "Checkout repository" - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - name: "Setup Python ${{ matrix.python-version }}" - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0 + uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -115,10 +115,10 @@ jobs: run: python -m pip install --upgrade pip setuptools nox - name: "Install Chrome" - uses: browser-actions/setup-chrome@52f10de5479c69bcbbab2eab094c9d373148005e # v1.4.0 + uses: browser-actions/setup-chrome@db1b524c26f20a8d1a10f7fc385c92387e2d0477 # v1.7.1 if: ${{ matrix.nox-session == 'emscripten' }} - name: "Install Firefox" - uses: browser-actions/setup-firefox@29a706787c6fb2196f091563261e1273bf379ead # v1.4.0 + uses: browser-actions/setup-firefox@233224b712fc07910ded8c15fb95a555c86da76f # v1.5.0 if: ${{ matrix.nox-session == 'emscripten' }} - name: "Run tests" # If no explicit NOX_SESSION is set, run the default tests for the chosen Python version @@ -127,10 +127,10 @@ jobs: PYTHON_VERSION: ${{ matrix.python-version }} NOX_SESSION: ${{ matrix.nox-session }} - - name: "Upload artifact" - uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2 + - name: "Upload coverage data" + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 with: - name: coverage-data + name: coverage-data-${{ matrix.python-version }}-${{ matrix.os }}-${{ matrix.experimental }}-${{ matrix.nox-session }} path: ".coverage.*" if-no-files-found: error @@ -141,20 +141,21 @@ jobs: needs: test steps: - name: "Checkout repository" - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - name: "Setup Python" - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0 + uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 with: python-version: "3.x" - name: "Install coverage" run: "python -m pip install -r dev-requirements.txt" - - name: "Download artifact" - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2 + - name: "Download coverage data" + uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 with: - name: coverage-data + pattern: coverage-data-* + merge-multiple: true - name: "Combine & check coverage" run: | @@ -164,7 +165,7 @@ jobs: - if: ${{ failure() }} name: "Upload report if check failed" - uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2 + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 with: name: coverage-report path: htmlcov diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index a0de4fcc59..ad45723b99 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -7,6 +7,7 @@ on: branches: ["main"] schedule: - cron: "0 0 * * 5" + workflow_dispatch: permissions: "read-all" @@ -21,16 +22,16 @@ jobs: security-events: write steps: - name: "Checkout repository" - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - name: "Run CodeQL init" - uses: github/codeql-action/init@cdcdbb579706841c47f7063dda365e292e5cad7a # v2.13.4 + uses: github/codeql-action/init@9fdb3e49720b44c48891d036bb502feb25684276 # v3.25.6 with: config-file: "./.github/codeql.yml" languages: "python" - name: "Run CodeQL autobuild" - uses: github/codeql-action/autobuild@cdcdbb579706841c47f7063dda365e292e5cad7a # v2.13.4 + uses: github/codeql-action/autobuild@9fdb3e49720b44c48891d036bb502feb25684276 # v3.25.6 - name: "Run CodeQL analyze" - uses: github/codeql-action/analyze@cdcdbb579706841c47f7063dda365e292e5cad7a # v2.13.4 + uses: github/codeql-action/analyze@9fdb3e49720b44c48891d036bb502feb25684276 # v3.25.6 diff --git a/.github/workflows/downstream.yml b/.github/workflows/downstream.yml index b0fba2da8a..8f4206bb06 100644 --- a/.github/workflows/downstream.yml +++ b/.github/workflows/downstream.yml @@ -1,6 +1,6 @@ name: Downstream -on: [push, pull_request] +on: [push, pull_request, workflow_dispatch] permissions: "read-all" @@ -15,10 +15,10 @@ jobs: steps: - name: "Checkout repository" - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - name: "Setup Python" - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0 + uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 with: python-version: "3.x" diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index f7375a4d38..d28ea81816 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -1,6 +1,6 @@ name: lint -on: [push, pull_request] +on: [push, pull_request, workflow_dispatch] permissions: "read-all" @@ -11,10 +11,10 @@ jobs: steps: - name: "Checkout repository" - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - name: "Setup Python" - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0 + uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 with: python-version: "3.x" cache: pip diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index e79066829d..d020de0515 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -19,10 +19,10 @@ jobs: steps: - name: "Checkout repository" - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - name: "Setup Python" - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0 + uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 with: python-version: "3.x" @@ -53,7 +53,7 @@ jobs: actions: read contents: write id-token: write # Needed to access the workflow's OIDC identity. - uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v1.9.0 + uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v1.10.0 with: base64-subjects: "${{ needs.build.outputs.hashes }}" upload-assets: true diff --git a/.github/workflows/scorecards.yml b/.github/workflows/scorecards.yml index 356be2d94d..2714f1056d 100644 --- a/.github/workflows/scorecards.yml +++ b/.github/workflows/scorecards.yml @@ -21,7 +21,7 @@ jobs: steps: - name: "Checkout repository" - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 with: persist-credentials: false diff --git a/CHANGES.rst b/CHANGES.rst index 319accb223..0f4e5cc581 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,9 +1,16 @@ +2.2.2 (2024-06-17) +================== + +- Added the ``Proxy-Authorization`` header to the list of headers to strip from requests when redirecting to a different host. As before, different headers can be set via ``Retry.remove_headers_on_redirect``. +- Allowed passing negative integers as ``amt`` to read methods of ``http.client.HTTPResponse`` as an alternative to ``None``. (`#3122 `__) +- Fixed return types representing copying actions to use ``typing.Self``. (`#3363 `__) + 2.2.1 (2024-02-16) ================== - Fixed issue where ``InsecureRequestWarning`` was emitted for HTTPS connections when using Emscripten. (`#3331 `__) - Fixed ``HTTPConnectionPool.urlopen`` to stop automatically casting non-proxy headers to ``HTTPHeaderDict``. This change was premature as it did not apply to proxy headers and ``HTTPHeaderDict`` does not handle byte header values correctly yet. (`#3343 `__) -- Changed ``ProtocolError`` to ``InvalidChunkLength`` when response terminates before the chunk length is sent. (`#2860 `__) +- Changed ``InvalidChunkLength`` to ``ProtocolError`` when response terminates before the chunk length is sent. (`#2860 `__) - Changed ``ProtocolError`` to be more verbose on incomplete reads with excess content. (`#3261 `__) diff --git a/dev-requirements.txt b/dev-requirements.txt index 9ac700bbe1..f712fe2a9e 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -1,19 +1,19 @@ h2==4.1.0 coverage==7.4.1 PySocks==1.7.1 -pytest==7.4.4 +pytest==8.0.2 pytest-timeout==2.1.0 pyOpenSSL==24.0.0 -idna==3.4 -trustme==1.1.0 -cryptography==42.0.2 +idna==3.7 +# As of v1.1.0, child CA certificates generated by trustme fail +# verification by CPython 3.13. +# https://github.com/python-trio/trustme/pull/642 +trustme @ git+https://github.com/python-trio/trustme@b3a767f336e20600f30c9ff78385a58352ff6ee3 +cryptography==42.0.4 backports.zoneinfo==0.2.1;python_version<"3.9" towncrier==23.6.0 pytest-memray==1.5.0;python_version<"3.13" and sys_platform!="win32" and implementation_name=="cpython" -trio==0.23.1;python_version<"3.13" -# We need a release of Trio newer than 0.24.0 to support CPython 3.13. -# https://github.com/python-trio/trio/issues/2903 -trio @ git+https://github.com/python-trio/trio@e4c8eb2d7ef59eeea1441656e392fe1b0870a374; python_version == "3.13" +trio==0.25.0 Quart==0.19.4 quart-trio==0.11.1 # https://github.com/pgjones/hypercorn/issues/62 @@ -22,8 +22,5 @@ quart-trio==0.11.1 hypercorn @ git+https://github.com/urllib3/hypercorn@urllib3-changes httpx==0.25.2 pytest-socket==0.7.0 -# CFFI is not going to support CPython 3.13 in an actual release until -# there is a release candidate for 3.13. -# https://github.com/python-cffi/cffi/issues/23#issuecomment-1845861410 -cffi @ git+https://github.com/python-cffi/cffi@14723b0bbd127790c450945099db31018d80fa83; python_version == "3.13" +cffi==1.17.0rc1 diff --git a/docs/conf.py b/docs/conf.py index 138f99abbe..0ab1b3a245 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -110,7 +110,6 @@ ("py:class", "_HttplibHTTPResponse"), ("py:class", "_HttplibHTTPMessage"), ("py:class", "TracebackType"), - ("py:class", "Literal"), ("py:class", "email.errors.MessageDefect"), ("py:class", "MessageDefect"), ("py:class", "http.client.HTTPMessage"), diff --git a/docs/user-guide.rst b/docs/user-guide.rst index 9416fe1263..5c78c8af1c 100644 --- a/docs/user-guide.rst +++ b/docs/user-guide.rst @@ -99,6 +99,8 @@ The :class:`~response.HTTPResponse` object provides print(resp.headers) # HTTPHeaderDict({"Content-Length": "32", ...}) +.. _json_content: + JSON Content ~~~~~~~~~~~~ JSON content can be loaded by :meth:`~response.HTTPResponse.json` diff --git a/dummyserver/app.py b/dummyserver/app.py index 692c31441b..9fc9d1b7ff 100644 --- a/dummyserver/app.py +++ b/dummyserver/app.py @@ -12,10 +12,8 @@ from typing import Iterator import trio -from quart import make_response, request - -# TODO switch to Response if https://github.com/pallets/quart/issues/288 is fixed -from quart.typing import ResponseTypes +from quart import Response, make_response, request +from quart.typing import ResponseReturnValue from quart_trio import QuartTrio hypercorn_app = QuartTrio(__name__) @@ -39,19 +37,19 @@ @hypercorn_app.route("/") @pyodide_testing_app.route("/") @pyodide_testing_app.route("/index") -async def index() -> ResponseTypes: +async def index() -> ResponseReturnValue: return await make_response("Dummy server!") @hypercorn_app.route("/alpn_protocol") -async def alpn_protocol() -> ResponseTypes: +async def alpn_protocol() -> ResponseReturnValue: """Return the requester's certificate.""" alpn_protocol = request.scope["extensions"]["tls"]["alpn_protocol"] return await make_response(alpn_protocol) @hypercorn_app.route("/certificate") -async def certificate() -> ResponseTypes: +async def certificate() -> ResponseReturnValue: """Return the requester's certificate.""" print("scope", request.scope) subject = request.scope["extensions"]["tls"]["client_cert_name"] @@ -61,7 +59,7 @@ async def certificate() -> ResponseTypes: @hypercorn_app.route("/specific_method", methods=["GET", "POST", "PUT"]) @pyodide_testing_app.route("/specific_method", methods=["GET", "POST", "PUT"]) -async def specific_method() -> ResponseTypes: +async def specific_method() -> ResponseReturnValue: "Confirm that the request matches the desired method type" method_param = (await request.values).get("method", "") @@ -74,7 +72,7 @@ async def specific_method() -> ResponseTypes: @hypercorn_app.route("/upload", methods=["POST"]) -async def upload() -> ResponseTypes: +async def upload() -> ResponseReturnValue: "Confirm that the uploaded file conforms to specification" params = await request.form param = params.get("upload_param") @@ -105,7 +103,7 @@ async def upload() -> ResponseTypes: @hypercorn_app.route("/chunked") -async def chunked() -> ResponseTypes: +async def chunked() -> ResponseReturnValue: def generate() -> Iterator[str]: for _ in range(4): yield "123" @@ -114,7 +112,7 @@ def generate() -> Iterator[str]: @hypercorn_app.route("/chunked_gzip") -async def chunked_gzip() -> ResponseTypes: +async def chunked_gzip() -> ResponseReturnValue: def generate() -> Iterator[bytes]: compressor = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS) @@ -126,7 +124,7 @@ def generate() -> Iterator[bytes]: @hypercorn_app.route("/keepalive") -async def keepalive() -> ResponseTypes: +async def keepalive() -> ResponseReturnValue: if request.args.get("close", b"0") == b"1": headers = [("Connection", "close")] return await make_response("Closing", 200, headers) @@ -136,7 +134,7 @@ async def keepalive() -> ResponseTypes: @hypercorn_app.route("/echo", methods=["GET", "POST", "PUT"]) -async def echo() -> ResponseTypes: +async def echo() -> ResponseReturnValue: "Echo back the params" if request.method == "GET": return await make_response(request.query_string) @@ -146,7 +144,7 @@ async def echo() -> ResponseTypes: @hypercorn_app.route("/echo_json", methods=["POST"]) @pyodide_testing_app.route("/echo_json", methods=["POST", "OPTIONS"]) -async def echo_json() -> ResponseTypes: +async def echo_json() -> ResponseReturnValue: "Echo back the JSON" if request.method == "OPTIONS": return await make_response("", 200) @@ -156,14 +154,14 @@ async def echo_json() -> ResponseTypes: @hypercorn_app.route("/echo_uri/") @hypercorn_app.route("/echo_uri", defaults={"rest": ""}) -async def echo_uri(rest: str) -> ResponseTypes: +async def echo_uri(rest: str) -> ResponseReturnValue: "Echo back the requested URI" assert request.full_path is not None return await make_response(request.full_path) @hypercorn_app.route("/echo_params") -async def echo_params() -> ResponseTypes: +async def echo_params() -> ResponseReturnValue: "Echo back the query parameters" await request.get_data() echod = sorted((k, v) for k, v in request.args.items()) @@ -171,12 +169,12 @@ async def echo_params() -> ResponseTypes: @hypercorn_app.route("/headers", methods=["GET", "POST"]) -async def headers() -> ResponseTypes: +async def headers() -> ResponseReturnValue: return await make_response(dict(request.headers.items())) @hypercorn_app.route("/headers_and_params") -async def headers_and_params() -> ResponseTypes: +async def headers_and_params() -> ResponseReturnValue: return await make_response( { "headers": dict(request.headers), @@ -186,12 +184,12 @@ async def headers_and_params() -> ResponseTypes: @hypercorn_app.route("/multi_headers", methods=["GET", "POST"]) -async def multi_headers() -> ResponseTypes: +async def multi_headers() -> ResponseReturnValue: return await make_response({"headers": list(request.headers)}) @hypercorn_app.route("/multi_redirect") -async def multi_redirect() -> ResponseTypes: +async def multi_redirect() -> ResponseReturnValue: "Performs a redirect chain based on ``redirect_codes``" params = request.args codes = params.get("redirect_codes", "200") @@ -206,7 +204,7 @@ async def multi_redirect() -> ResponseTypes: @hypercorn_app.route("/encodingrequest") -async def encodingrequest() -> ResponseTypes: +async def encodingrequest() -> ResponseReturnValue: "Check for UA accepting gzip/deflate encoding" data = b"hello, world!" encoding = request.headers.get("Accept-Encoding", "") @@ -230,7 +228,7 @@ async def encodingrequest() -> ResponseTypes: @hypercorn_app.route("/redirect", methods=["GET", "POST", "PUT"]) -async def redirect() -> ResponseTypes: +async def redirect() -> ResponseReturnValue: "Perform a redirect to ``target``" values = await request.values target = values.get("target", "/") @@ -242,7 +240,7 @@ async def redirect() -> ResponseTypes: @hypercorn_app.route("/redirect_after") -async def redirect_after() -> ResponseTypes: +async def redirect_after() -> ResponseReturnValue: "Perform a redirect to ``target``" params = request.args date = params.get("date") @@ -258,7 +256,7 @@ async def redirect_after() -> ResponseTypes: @hypercorn_app.route("/retry_after") -async def retry_after() -> ResponseTypes: +async def retry_after() -> ResponseReturnValue: global LAST_RETRY_AFTER_REQ params = request.args if datetime.datetime.now() - LAST_RETRY_AFTER_REQ < datetime.timedelta(seconds=1): @@ -273,7 +271,7 @@ async def retry_after() -> ResponseTypes: @hypercorn_app.route("/status") @pyodide_testing_app.route("/status") -async def status() -> ResponseTypes: +async def status() -> ResponseReturnValue: values = await request.values status = values.get("status", "200 OK") status_code = status.split(" ")[0] @@ -281,13 +279,13 @@ async def status() -> ResponseTypes: @hypercorn_app.route("/source_address") -async def source_address() -> ResponseTypes: +async def source_address() -> ResponseReturnValue: """Return the requester's IP address.""" return await make_response(request.remote_addr) @hypercorn_app.route("/successful_retry", methods=["GET", "PUT"]) -async def successful_retry() -> ResponseTypes: +async def successful_retry() -> ResponseReturnValue: """First return an error and then success It's not currently very flexible as the number of retries is hard-coded. @@ -305,20 +303,20 @@ async def successful_retry() -> ResponseTypes: @pyodide_testing_app.after_request -def apply_caching(response: ResponseTypes) -> ResponseTypes: +def apply_caching(response: Response) -> ResponseReturnValue: for header, value in DEFAULT_HEADERS: response.headers[header] = value return response @pyodide_testing_app.route("/slow") -async def slow() -> ResponseTypes: +async def slow() -> ResponseReturnValue: await trio.sleep(10) return await make_response("TEN SECONDS LATER", 200) @pyodide_testing_app.route("/bigfile") -async def bigfile() -> ResponseTypes: +async def bigfile() -> ResponseReturnValue: # great big text file, should force streaming # if supported bigdata = 1048576 * b"WOOO YAY BOOYAKAH" @@ -326,14 +324,14 @@ async def bigfile() -> ResponseTypes: @pyodide_testing_app.route("/mediumfile") -async def mediumfile() -> ResponseTypes: +async def mediumfile() -> ResponseReturnValue: # quite big file bigdata = 1024 * b"WOOO YAY BOOYAKAH" return await make_response(bigdata, 200) @pyodide_testing_app.route("/upload", methods=["POST", "OPTIONS"]) -async def pyodide_upload() -> ResponseTypes: +async def pyodide_upload() -> ResponseReturnValue: if request.method == "OPTIONS": return await make_response("", 200) spare_data = await request.get_data(parse_form_data=True) @@ -356,7 +354,7 @@ async def pyodide_upload() -> ResponseTypes: @pyodide_testing_app.route("/pyodide/") -async def pyodide(py_file: str) -> ResponseTypes: +async def pyodide(py_file: str) -> ResponseReturnValue: file_path = Path(pyodide_testing_app.config["pyodide_dist_dir"], py_file) if file_path.exists(): mime_type, encoding = mimetypes.guess_type(file_path) @@ -370,7 +368,7 @@ async def pyodide(py_file: str) -> ResponseTypes: @pyodide_testing_app.route("/wheel/dist.whl") -async def wheel() -> ResponseTypes: +async def wheel() -> ResponseReturnValue: # serve our wheel wheel_folder = Path(__file__).parent.parent / "dist" wheels = list(wheel_folder.glob("*.whl")) diff --git a/dummyserver/socketserver.py b/dummyserver/socketserver.py index 202915ce88..b8524b914d 100755 --- a/dummyserver/socketserver.py +++ b/dummyserver/socketserver.py @@ -108,6 +108,7 @@ def __init__( socket_handler: typing.Callable[[socket.socket], None], host: str = "localhost", ready_event: threading.Event | None = None, + quit_event: threading.Event | None = None, ) -> None: super().__init__() self.daemon = True @@ -115,6 +116,7 @@ def __init__( self.socket_handler = socket_handler self.host = host self.ready_event = ready_event + self.quit_event = quit_event def _start_server(self) -> None: if self.USE_IPV6: diff --git a/dummyserver/testcase.py b/dummyserver/testcase.py index 7eed47668b..66a43606a5 100644 --- a/dummyserver/testcase.py +++ b/dummyserver/testcase.py @@ -5,6 +5,7 @@ import ssl import threading import typing +from test import LONG_TIMEOUT import hypercorn import pytest @@ -19,11 +20,19 @@ def consume_socket( - sock: SSLTransport | socket.socket, chunks: int = 65536 + sock: SSLTransport | socket.socket, + chunks: int = 65536, + quit_event: threading.Event | None = None, ) -> bytearray: consumed = bytearray() + sock.settimeout(LONG_TIMEOUT) while True: - b = sock.recv(chunks) + if quit_event and quit_event.is_set(): + break + try: + b = sock.recv(chunks) + except (TimeoutError, socket.timeout): + continue assert isinstance(b, bytes) consumed += b if b.endswith(b"\r\n\r\n"): @@ -57,11 +66,16 @@ class SocketDummyServerTestCase: @classmethod def _start_server( - cls, socket_handler: typing.Callable[[socket.socket], None] + cls, + socket_handler: typing.Callable[[socket.socket], None], + quit_event: threading.Event | None = None, ) -> None: ready_event = threading.Event() cls.server_thread = SocketServerThread( - socket_handler=socket_handler, ready_event=ready_event, host=cls.host + socket_handler=socket_handler, + ready_event=ready_event, + host=cls.host, + quit_event=quit_event, ) cls.server_thread.start() ready_event.wait(5) @@ -71,23 +85,41 @@ def _start_server( @classmethod def start_response_handler( - cls, response: bytes, num: int = 1, block_send: threading.Event | None = None + cls, + response: bytes, + num: int = 1, + block_send: threading.Event | None = None, ) -> threading.Event: ready_event = threading.Event() + quit_event = threading.Event() def socket_handler(listener: socket.socket) -> None: for _ in range(num): ready_event.set() - sock = listener.accept()[0] - consume_socket(sock) + listener.settimeout(LONG_TIMEOUT) + while True: + if quit_event.is_set(): + return + try: + sock = listener.accept()[0] + break + except (TimeoutError, socket.timeout): + continue + consume_socket(sock, quit_event=quit_event) + if quit_event.is_set(): + sock.close() + return if block_send: - block_send.wait() + while not block_send.wait(LONG_TIMEOUT): + if quit_event.is_set(): + sock.close() + return block_send.clear() sock.send(response) sock.close() - cls._start_server(socket_handler) + cls._start_server(socket_handler, quit_event=quit_event) return ready_event @classmethod @@ -100,10 +132,25 @@ def start_basic_handler( block_send, ) + @staticmethod + def quit_server_thread(server_thread: SocketServerThread) -> None: + if server_thread.quit_event: + server_thread.quit_event.set() + # in principle the maximum time that the thread can take to notice + # the quit_event is LONG_TIMEOUT and the thread should terminate + # shortly after that, we give 5 seconds leeway just in case + server_thread.join(LONG_TIMEOUT * 2 + 5.0) + if server_thread.is_alive(): + raise Exception("server_thread did not exit") + @classmethod def teardown_class(cls) -> None: if hasattr(cls, "server_thread"): - cls.server_thread.join(0.1) + cls.quit_server_thread(cls.server_thread) + + def teardown_method(self) -> None: + if hasattr(self, "server_thread"): + self.quit_server_thread(self.server_thread) def assert_header_received( self, @@ -128,11 +175,16 @@ def assert_header_received( class IPV4SocketDummyServerTestCase(SocketDummyServerTestCase): @classmethod def _start_server( - cls, socket_handler: typing.Callable[[socket.socket], None] + cls, + socket_handler: typing.Callable[[socket.socket], None], + quit_event: threading.Event | None = None, ) -> None: ready_event = threading.Event() cls.server_thread = SocketServerThread( - socket_handler=socket_handler, ready_event=ready_event, host=cls.host + socket_handler=socket_handler, + ready_event=ready_event, + host=cls.host, + quit_event=quit_event, ) cls.server_thread.USE_IPV6 = False cls.server_thread.start() diff --git a/mypy-requirements.txt b/mypy-requirements.txt index 50105b8f3a..d6dae313d3 100644 --- a/mypy-requirements.txt +++ b/mypy-requirements.txt @@ -11,3 +11,4 @@ httpx==0.25.2 types-backports types-requests nox +zstandard diff --git a/noxfile.py b/noxfile.py index 3b25894a31..92565613f1 100644 --- a/noxfile.py +++ b/noxfile.py @@ -178,9 +178,6 @@ def downstream_requests(session: nox.Session) -> None: session.install(".[socks]", silent=False) session.install("-r", "requirements-dev.txt", silent=False) - # Workaround until https://github.com/psf/httpbin/pull/29 gets released - session.install("flask<3", "werkzeug<3", silent=False) - session.cd(root) session.install(".", silent=False) session.cd(f"{tmp_dir}/requests") diff --git a/src/urllib3/_base_connection.py b/src/urllib3/_base_connection.py index bb349c744b..29ca334879 100644 --- a/src/urllib3/_base_connection.py +++ b/src/urllib3/_base_connection.py @@ -12,7 +12,7 @@ class ProxyConfig(typing.NamedTuple): ssl_context: ssl.SSLContext | None use_forwarding_for_https: bool - assert_hostname: None | str | Literal[False] + assert_hostname: None | str | typing.Literal[False] assert_fingerprint: str | None @@ -28,7 +28,7 @@ class _ResponseOptions(typing.NamedTuple): if typing.TYPE_CHECKING: import ssl - from typing import Literal, Protocol + from typing import Protocol from .response import BaseHTTPResponse @@ -124,7 +124,7 @@ class BaseHTTPSConnection(BaseHTTPConnection, Protocol): # Certificate verification methods cert_reqs: int | str | None - assert_hostname: None | str | Literal[False] + assert_hostname: None | str | typing.Literal[False] assert_fingerprint: str | None ssl_context: ssl.SSLContext | None @@ -155,7 +155,7 @@ def __init__( proxy: Url | None = None, proxy_config: ProxyConfig | None = None, cert_reqs: int | str | None = None, - assert_hostname: None | str | Literal[False] = None, + assert_hostname: None | str | typing.Literal[False] = None, assert_fingerprint: str | None = None, server_hostname: str | None = None, ssl_context: ssl.SSLContext | None = None, diff --git a/src/urllib3/_collections.py b/src/urllib3/_collections.py index 55b0324797..8a4409a122 100644 --- a/src/urllib3/_collections.py +++ b/src/urllib3/_collections.py @@ -427,7 +427,7 @@ def _copy_from(self, other: HTTPHeaderDict) -> None: val = other.getlist(key) self._container[key.lower()] = [key, *val] - def copy(self) -> HTTPHeaderDict: + def copy(self) -> Self: clone = type(self)() clone._copy_from(self) return clone @@ -462,7 +462,7 @@ def __ior__(self, other: object) -> HTTPHeaderDict: self.extend(maybe_constructable) return self - def __or__(self, other: object) -> HTTPHeaderDict: + def __or__(self, other: object) -> Self: # Supports merging header dicts using operator | # combining items with add instead of __setitem__ maybe_constructable = ensure_can_construct_http_header_dict(other) @@ -472,7 +472,7 @@ def __or__(self, other: object) -> HTTPHeaderDict: result.extend(maybe_constructable) return result - def __ror__(self, other: object) -> HTTPHeaderDict: + def __ror__(self, other: object) -> Self: # Supports merging header dicts using operator | when other is on left side # combining items with add instead of __setitem__ maybe_constructable = ensure_can_construct_http_header_dict(other) diff --git a/src/urllib3/_version.py b/src/urllib3/_version.py index 095cf3c16b..7442f2b842 100644 --- a/src/urllib3/_version.py +++ b/src/urllib3/_version.py @@ -1,4 +1,4 @@ # This file is protected via CODEOWNERS from __future__ import annotations -__version__ = "2.2.1" +__version__ = "2.2.2" diff --git a/src/urllib3/connection.py b/src/urllib3/connection.py index aa5c547c66..1b16279209 100644 --- a/src/urllib3/connection.py +++ b/src/urllib3/connection.py @@ -14,8 +14,6 @@ from socket import timeout as SocketTimeout if typing.TYPE_CHECKING: - from typing import Literal - from .response import HTTPResponse from .util.ssl_ import _TYPE_PEER_CERT_RET_DICT from .util.ssltransport import SSLTransport @@ -482,6 +480,7 @@ def getresponse( # type: ignore[override] headers=headers, status=httplib_response.status, version=httplib_response.version, + version_string=getattr(self, "_http_vsn_str", "HTTP/?"), reason=httplib_response.reason, preload_content=resp_options.preload_content, decode_content=resp_options.decode_content, @@ -523,7 +522,7 @@ def __init__( proxy: Url | None = None, proxy_config: ProxyConfig | None = None, cert_reqs: int | str | None = None, - assert_hostname: None | str | Literal[False] = None, + assert_hostname: None | str | typing.Literal[False] = None, assert_fingerprint: str | None = None, server_hostname: str | None = None, ssl_context: ssl.SSLContext | None = None, @@ -577,7 +576,7 @@ def set_cert( cert_reqs: int | str | None = None, key_password: str | None = None, ca_certs: str | None = None, - assert_hostname: None | str | Literal[False] = None, + assert_hostname: None | str | typing.Literal[False] = None, assert_fingerprint: str | None = None, ca_cert_dir: str | None = None, ca_cert_data: None | str | bytes = None, @@ -742,7 +741,7 @@ def _ssl_wrap_socket_and_match_hostname( ca_certs: str | None, ca_cert_dir: str | None, ca_cert_data: None | str | bytes, - assert_hostname: None | str | Literal[False], + assert_hostname: None | str | typing.Literal[False], assert_fingerprint: str | None, server_hostname: str | None, ssl_context: ssl.SSLContext | None, diff --git a/src/urllib3/connectionpool.py b/src/urllib3/connectionpool.py index bd58ff14dd..a2c3cf6098 100644 --- a/src/urllib3/connectionpool.py +++ b/src/urllib3/connectionpool.py @@ -53,7 +53,8 @@ if typing.TYPE_CHECKING: import ssl - from typing import Literal + + from typing_extensions import Self from ._base_connection import BaseHTTPConnection, BaseHTTPSConnection @@ -61,8 +62,6 @@ _TYPE_TIMEOUT = typing.Union[Timeout, float, _TYPE_DEFAULT, None] -_SelfT = typing.TypeVar("_SelfT") - # Pool objects class ConnectionPool: @@ -95,7 +94,7 @@ def __init__(self, host: str, port: int | None = None) -> None: def __str__(self) -> str: return f"{type(self).__name__}(host={self.host!r}, port={self.port!r})" - def __enter__(self: _SelfT) -> _SelfT: + def __enter__(self) -> Self: return self def __exit__( @@ -103,7 +102,7 @@ def __exit__( exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None, - ) -> Literal[False]: + ) -> typing.Literal[False]: self.close() # Return False to re-raise any potential exceptions return False @@ -544,17 +543,14 @@ def _make_request( response._connection = response_conn # type: ignore[attr-defined] response._pool = self # type: ignore[attr-defined] - # emscripten connection doesn't have _http_vsn_str - http_version = getattr(conn, "_http_vsn_str", "HTTP/?") log.debug( - '%s://%s:%s "%s %s %s" %s %s', + '%s://%s:%s "%s %s HTTP/%s" %s %s', self.scheme, self.host, self.port, method, url, - # HTTP version - http_version, + response.version, response.status, response.length_remaining, ) @@ -1002,7 +998,7 @@ def __init__( ssl_version: int | str | None = None, ssl_minimum_version: ssl.TLSVersion | None = None, ssl_maximum_version: ssl.TLSVersion | None = None, - assert_hostname: str | Literal[False] | None = None, + assert_hostname: str | typing.Literal[False] | None = None, assert_fingerprint: str | None = None, ca_cert_dir: str | None = None, **conn_kw: typing.Any, diff --git a/src/urllib3/contrib/emscripten/response.py b/src/urllib3/contrib/emscripten/response.py index 303b4ee011..cd3d80e430 100644 --- a/src/urllib3/contrib/emscripten/response.py +++ b/src/urllib3/contrib/emscripten/response.py @@ -45,6 +45,7 @@ def __init__( status=internal_response.status_code, request_url=url, version=0, + version_string="HTTP/?", reason="", decode_content=True, ) @@ -155,7 +156,7 @@ def read( self.length_is_certain = True # wrap body in IOStream self._response.body = BytesIO(self._response.body) - if amt is not None: + if amt is not None and amt >= 0: # don't cache partial content cache_content = False data = self._response.body.read(amt) @@ -211,13 +212,21 @@ def data(self) -> bytes: def json(self) -> typing.Any: """ - Parses the body of the HTTP response as JSON. + Deserializes the body of the HTTP response as a Python object. - To use a custom JSON decoder pass the result of :attr:`HTTPResponse.data` to the decoder. + The body of the HTTP response must be encoded using UTF-8, as per + `RFC 8529 Section 8.1 `_. - This method can raise either `UnicodeDecodeError` or `json.JSONDecodeError`. + To use a custom JSON decoder pass the result of :attr:`HTTPResponse.data` to + your custom decoder instead. - Read more :ref:`here `. + If the body of the HTTP response is not decodable to UTF-8, a + `UnicodeDecodeError` will be raised. If the body of the HTTP response is not a + valid JSON document, a `json.JSONDecodeError` will be raised. + + Read more :ref:`here `. + + :returns: The body of the HTTP response as a Python object. """ data = self.data.decode("utf-8") return _json.loads(data) diff --git a/src/urllib3/contrib/socks.py b/src/urllib3/contrib/socks.py index 5a803916b0..c62b5e0332 100644 --- a/src/urllib3/contrib/socks.py +++ b/src/urllib3/contrib/socks.py @@ -71,10 +71,8 @@ except ImportError: ssl = None # type: ignore[assignment] -from typing import TypedDict - -class _TYPE_SOCKS_OPTIONS(TypedDict): +class _TYPE_SOCKS_OPTIONS(typing.TypedDict): socks_version: int proxy_host: str | None proxy_port: str | None diff --git a/src/urllib3/http2.py b/src/urllib3/http2.py index 15fa9d9157..ceb40602da 100644 --- a/src/urllib3/http2.py +++ b/src/urllib3/http2.py @@ -195,6 +195,7 @@ def __init__( headers=headers, # Following CPython, we map HTTP versions to major * 10 + minor integers version=20, + version_string="HTTP/2", # No reason phrase in HTTP/2 reason=None, decode_content=decode_content, diff --git a/src/urllib3/poolmanager.py b/src/urllib3/poolmanager.py index 32da0a00ab..085d1dbafd 100644 --- a/src/urllib3/poolmanager.py +++ b/src/urllib3/poolmanager.py @@ -26,7 +26,8 @@ if typing.TYPE_CHECKING: import ssl - from typing import Literal + + from typing_extensions import Self __all__ = ["PoolManager", "ProxyManager", "proxy_from_url"] @@ -51,8 +52,6 @@ # http.client.HTTPConnection & http.client.HTTPSConnection in Python 3.7 _DEFAULT_BLOCKSIZE = 16384 -_SelfT = typing.TypeVar("_SelfT") - class PoolKey(typing.NamedTuple): """ @@ -214,7 +213,7 @@ def __init__( self.pool_classes_by_scheme = pool_classes_by_scheme self.key_fn_by_scheme = key_fn_by_scheme.copy() - def __enter__(self: _SelfT) -> _SelfT: + def __enter__(self) -> Self: return self def __exit__( @@ -222,7 +221,7 @@ def __exit__( exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None, - ) -> Literal[False]: + ) -> typing.Literal[False]: self.clear() # Return False to re-raise any potential exceptions return False @@ -553,7 +552,7 @@ def __init__( proxy_headers: typing.Mapping[str, str] | None = None, proxy_ssl_context: ssl.SSLContext | None = None, use_forwarding_for_https: bool = False, - proxy_assert_hostname: None | str | Literal[False] = None, + proxy_assert_hostname: None | str | typing.Literal[False] = None, proxy_assert_fingerprint: str | None = None, **connection_pool_kw: typing.Any, ) -> None: diff --git a/src/urllib3/response.py b/src/urllib3/response.py index d31fac9ba0..a0273d65b0 100644 --- a/src/urllib3/response.py +++ b/src/urllib3/response.py @@ -26,20 +26,21 @@ brotli = None try: - import zstandard as zstd # type: ignore[import-not-found] - + import zstandard as zstd +except (AttributeError, ImportError, ValueError): # Defensive: + HAS_ZSTD = False +else: # The package 'zstandard' added the 'eof' property starting # in v0.18.0 which we require to ensure a complete and # valid zstd stream was fed into the ZstdDecoder. # See: https://github.com/urllib3/urllib3/pull/2624 - _zstd_version = _zstd_version = tuple( + _zstd_version = tuple( map(int, re.search(r"^([0-9]+)\.([0-9]+)", zstd.__version__).groups()) # type: ignore[union-attr] ) if _zstd_version < (0, 18): # Defensive: - zstd = None - -except (AttributeError, ImportError, ValueError): # Defensive: - zstd = None + HAS_ZSTD = False + else: + HAS_ZSTD = True from . import util from ._base_connection import _TYPE_BODY @@ -61,8 +62,6 @@ from .util.retry import Retry if typing.TYPE_CHECKING: - from typing import Literal - from .connectionpool import HTTPConnectionPool log = logging.getLogger(__name__) @@ -163,7 +162,7 @@ def flush(self) -> bytes: return b"" -if zstd is not None: +if HAS_ZSTD: class ZstdDecoder(ContentDecoder): def __init__(self) -> None: @@ -183,7 +182,7 @@ def flush(self) -> bytes: ret = self._obj.flush() # note: this is a no-op if not self._obj.eof: raise DecodeError("Zstandard data is incomplete") - return ret # type: ignore[no-any-return] + return ret class MultiDecoder(ContentDecoder): @@ -219,7 +218,7 @@ def _get_decoder(mode: str) -> ContentDecoder: if brotli is not None and mode == "br": return BrotliDecoder() - if zstd is not None and mode == "zstd": + if HAS_ZSTD and mode == "zstd": return ZstdDecoder() return DeflateDecoder() @@ -302,7 +301,7 @@ class BaseHTTPResponse(io.IOBase): CONTENT_DECODERS = ["gzip", "x-gzip", "deflate"] if brotli is not None: CONTENT_DECODERS += ["br"] - if zstd is not None: + if HAS_ZSTD: CONTENT_DECODERS += ["zstd"] REDIRECT_STATUSES = [301, 302, 303, 307, 308] @@ -310,7 +309,7 @@ class BaseHTTPResponse(io.IOBase): if brotli is not None: DECODER_ERROR_CLASSES += (brotli.error,) - if zstd is not None: + if HAS_ZSTD: DECODER_ERROR_CLASSES += (zstd.ZstdError,) def __init__( @@ -319,6 +318,7 @@ def __init__( headers: typing.Mapping[str, str] | typing.Mapping[bytes, bytes] | None = None, status: int, version: int, + version_string: str, reason: str | None, decode_content: bool, request_url: str | None, @@ -330,6 +330,7 @@ def __init__( self.headers = HTTPHeaderDict(headers) # type: ignore[arg-type] self.status = status self.version = version + self.version_string = version_string self.reason = reason self.decode_content = decode_content self._has_decoded_content = False @@ -346,7 +347,7 @@ def __init__( self._decoder: ContentDecoder | None = None self.length_remaining: int | None - def get_redirect_location(self) -> str | None | Literal[False]: + def get_redirect_location(self) -> str | None | typing.Literal[False]: """ Should we redirect and where to? @@ -364,13 +365,21 @@ def data(self) -> bytes: def json(self) -> typing.Any: """ - Parses the body of the HTTP response as JSON. + Deserializes the body of the HTTP response as a Python object. + + The body of the HTTP response must be encoded using UTF-8, as per + `RFC 8529 Section 8.1 `_. - To use a custom JSON decoder pass the result of :attr:`HTTPResponse.data` to the decoder. + To use a custom JSON decoder pass the result of :attr:`HTTPResponse.data` to + your custom decoder instead. - This method can raise either `UnicodeDecodeError` or `json.JSONDecodeError`. + If the body of the HTTP response is not decodable to UTF-8, a + `UnicodeDecodeError` will be raised. If the body of the HTTP response is not a + valid JSON document, a `json.JSONDecodeError` will be raised. - Read more :ref:`here `. + Read more :ref:`here `. + + :returns: The body of the HTTP response as a Python object. """ data = self.data.decode("utf-8") return _json.loads(data) @@ -567,6 +576,7 @@ def __init__( headers: typing.Mapping[str, str] | typing.Mapping[bytes, bytes] | None = None, status: int = 0, version: int = 0, + version_string: str = "HTTP/?", reason: str | None = None, preload_content: bool = True, decode_content: bool = True, @@ -584,6 +594,7 @@ def __init__( headers=headers, status=status, version=version, + version_string=version_string, reason=reason, decode_content=decode_content, request_url=request_url, @@ -926,7 +937,10 @@ def read( if decode_content is None: decode_content = self.decode_content - if amt is not None: + if amt and amt < 0: + # Negative numbers and `None` should be treated the same. + amt = None + elif amt is not None: cache_content = False if len(self._decoded_buffer) >= amt: @@ -986,6 +1000,9 @@ def read1( """ if decode_content is None: decode_content = self.decode_content + if amt and amt < 0: + # Negative numbers and `None` should be treated the same. + amt = None # try and respond without going to the network if self._has_decoded_content: if not decode_content: @@ -1180,6 +1197,11 @@ def read_chunked( if self._fp.fp is None: # type: ignore[union-attr] return None + if amt and amt < 0: + # Negative numbers and `None` should be treated the same, + # but httplib handles only `None` correctly. + amt = None + while True: self._update_chunk_length() if self.chunk_left == 0: diff --git a/src/urllib3/util/request.py b/src/urllib3/util/request.py index fe0e3485e8..859597e276 100644 --- a/src/urllib3/util/request.py +++ b/src/urllib3/util/request.py @@ -29,7 +29,7 @@ else: ACCEPT_ENCODING += ",br" try: - import zstandard as _unused_module_zstd # type: ignore[import-not-found] # noqa: F401 + import zstandard as _unused_module_zstd # noqa: F401 except ImportError: pass else: diff --git a/src/urllib3/util/retry.py b/src/urllib3/util/retry.py index 7572bfd26a..0456cceba4 100644 --- a/src/urllib3/util/retry.py +++ b/src/urllib3/util/retry.py @@ -21,6 +21,8 @@ from .util import reraise if typing.TYPE_CHECKING: + from typing_extensions import Self + from ..connectionpool import ConnectionPool from ..response import BaseHTTPResponse @@ -187,7 +189,9 @@ class Retry: RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503]) #: Default headers to be used for ``remove_headers_on_redirect`` - DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Cookie", "Authorization"]) + DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset( + ["Cookie", "Authorization", "Proxy-Authorization"] + ) #: Default maximum backoff time. DEFAULT_BACKOFF_MAX = 120 @@ -240,7 +244,7 @@ def __init__( ) self.backoff_jitter = backoff_jitter - def new(self, **kw: typing.Any) -> Retry: + def new(self, **kw: typing.Any) -> Self: params = dict( total=self.total, connect=self.connect, @@ -429,7 +433,7 @@ def increment( error: Exception | None = None, _pool: ConnectionPool | None = None, _stacktrace: TracebackType | None = None, - ) -> Retry: + ) -> Self: """Return a new Retry object with incremented retry counters. :param response: A response object, or None, if the server did not diff --git a/src/urllib3/util/ssl_.py b/src/urllib3/util/ssl_.py index b14cf27b61..c46dd83e53 100644 --- a/src/urllib3/util/ssl_.py +++ b/src/urllib3/util/ssl_.py @@ -78,7 +78,7 @@ def _is_has_never_check_common_name_reliable( if typing.TYPE_CHECKING: from ssl import VerifyMode - from typing import Literal, TypedDict + from typing import TypedDict from .ssltransport import SSLTransport as SSLTransportType @@ -365,7 +365,7 @@ def ssl_wrap_socket( ca_cert_dir: str | None = ..., key_password: str | None = ..., ca_cert_data: None | str | bytes = ..., - tls_in_tls: Literal[False] = ..., + tls_in_tls: typing.Literal[False] = ..., ) -> ssl.SSLSocket: ... diff --git a/src/urllib3/util/ssltransport.py b/src/urllib3/util/ssltransport.py index fa9f2b37c5..b52c477c77 100644 --- a/src/urllib3/util/ssltransport.py +++ b/src/urllib3/util/ssltransport.py @@ -8,12 +8,11 @@ from ..exceptions import ProxySchemeUnsupported if typing.TYPE_CHECKING: - from typing import Literal + from typing_extensions import Self from .ssl_ import _TYPE_PEER_CERT_RET, _TYPE_PEER_CERT_RET_DICT -_SelfT = typing.TypeVar("_SelfT", bound="SSLTransport") _WriteBuffer = typing.Union[bytearray, memoryview] _ReturnValue = typing.TypeVar("_ReturnValue") @@ -70,7 +69,7 @@ def __init__( # Perform initial handshake. self._ssl_io_loop(self.sslobj.do_handshake) - def __enter__(self: _SelfT) -> _SelfT: + def __enter__(self) -> Self: return self def __exit__(self, *_: typing.Any) -> None: @@ -174,12 +173,12 @@ def close(self) -> None: @typing.overload def getpeercert( - self, binary_form: Literal[False] = ... + self, binary_form: typing.Literal[False] = ... ) -> _TYPE_PEER_CERT_RET_DICT | None: ... @typing.overload - def getpeercert(self, binary_form: Literal[True]) -> bytes | None: + def getpeercert(self, binary_form: typing.Literal[True]) -> bytes | None: ... def getpeercert(self, binary_form: bool = False) -> _TYPE_PEER_CERT_RET: diff --git a/test/__init__.py b/test/__init__.py index 12c0055493..62c26330e6 100644 --- a/test/__init__.py +++ b/test/__init__.py @@ -26,9 +26,11 @@ brotli = None try: - import zstandard as zstd # type: ignore[import-not-found] + import zstandard as _unused_module_zstd # noqa: F401 except ImportError: - zstd = None + HAS_ZSTD = False +else: + HAS_ZSTD = True from urllib3 import util from urllib3.connectionpool import ConnectionPool @@ -42,7 +44,6 @@ if typing.TYPE_CHECKING: import ssl - from typing import Literal _RT = typing.TypeVar("_RT") # return type @@ -144,13 +145,13 @@ def notBrotli() -> typing.Callable[[_TestFuncT], _TestFuncT]: def onlyZstd() -> typing.Callable[[_TestFuncT], _TestFuncT]: return pytest.mark.skipif( - zstd is None, reason="only run if a python-zstandard library is installed" + not HAS_ZSTD, reason="only run if a python-zstandard library is installed" ) def notZstd() -> typing.Callable[[_TestFuncT], _TestFuncT]: return pytest.mark.skipif( - zstd is not None, + HAS_ZSTD, reason="only run if a python-zstandard library is not installed", ) @@ -264,7 +265,7 @@ def __exit__( exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None, - ) -> Literal[False]: + ) -> typing.Literal[False]: self.uninstall() return False diff --git a/test/test_response.py b/test/test_response.py index 6a3c50e147..c0062771ec 100644 --- a/test/test_response.py +++ b/test/test_response.py @@ -30,7 +30,6 @@ BytesQueueBuffer, HTTPResponse, brotli, - zstd, ) from urllib3.util.response import is_fp_closed from urllib3.util.retry import RequestHistory, Retry @@ -218,6 +217,12 @@ def test_reference_read(self) -> None: assert r.read() == b"" assert r.read() == b"" + @pytest.mark.parametrize("read_args", ((), (None,), (-1,))) + def test_reference_read_until_eof(self, read_args: tuple[typing.Any, ...]) -> None: + fp = BytesIO(b"foo") + r = HTTPResponse(fp, preload_content=False) + assert r.read(*read_args) == b"foo" + def test_reference_read1(self) -> None: fp = BytesIO(b"foobar") r = HTTPResponse(fp, preload_content=False) @@ -228,6 +233,14 @@ def test_reference_read1(self) -> None: assert r.read1() == b"bar" assert r.read1() == b"" + @pytest.mark.parametrize("read1_args", ((), (None,), (-1,))) + def test_reference_read1_without_limit( + self, read1_args: tuple[typing.Any, ...] + ) -> None: + fp = BytesIO(b"foo") + r = HTTPResponse(fp, preload_content=False) + assert r.read1(*read1_args) == b"foo" + def test_reference_read1_nodecode(self) -> None: fp = BytesIO(b"foobar") r = HTTPResponse(fp, preload_content=False, decode_content=False) @@ -389,6 +402,8 @@ def test_decode_brotli_error(self) -> None: @onlyZstd() def test_decode_zstd(self) -> None: + import zstandard as zstd + data = zstd.compress(b"foo") fp = BytesIO(data) @@ -397,6 +412,8 @@ def test_decode_zstd(self) -> None: @onlyZstd() def test_decode_multiframe_zstd(self) -> None: + import zstandard as zstd + data = ( # Zstandard frame zstd.compress(b"foo") @@ -416,6 +433,8 @@ def test_decode_multiframe_zstd(self) -> None: @onlyZstd() def test_chunked_decoding_zstd(self) -> None: + import zstandard as zstd + data = zstd.compress(b"foobarbaz") fp = BytesIO(data) @@ -447,6 +466,8 @@ def test_decode_zstd_error(self, data: bytes) -> None: @onlyZstd() @pytest.mark.parametrize("data", decode_param_set) def test_decode_zstd_incomplete_preload_content(self, data: bytes) -> None: + import zstandard as zstd + data = zstd.compress(data) fp = BytesIO(data[:-1]) @@ -456,6 +477,8 @@ def test_decode_zstd_incomplete_preload_content(self, data: bytes) -> None: @onlyZstd() @pytest.mark.parametrize("data", decode_param_set) def test_decode_zstd_incomplete_read(self, data: bytes) -> None: + import zstandard as zstd + data = zstd.compress(data) fp = BytesIO(data[:-1]) # shorten the data to trigger DecodeError @@ -471,6 +494,8 @@ def test_decode_zstd_incomplete_read(self, data: bytes) -> None: @onlyZstd() @pytest.mark.parametrize("data", decode_param_set) def test_decode_zstd_incomplete_read1(self, data: bytes) -> None: + import zstandard as zstd + data = zstd.compress(data) fp = BytesIO(data[:-1]) @@ -489,6 +514,8 @@ def test_decode_zstd_incomplete_read1(self, data: bytes) -> None: @onlyZstd() @pytest.mark.parametrize("data", decode_param_set) def test_decode_zstd_read1(self, data: bytes) -> None: + import zstandard as zstd + encoded_data = zstd.compress(data) fp = BytesIO(encoded_data) @@ -564,6 +591,7 @@ def test_base_io(self) -> None: resp = BaseHTTPResponse( status=200, version=11, + version_string="HTTP/1.1", reason=None, decode_content=False, request_url=None, @@ -1249,7 +1277,10 @@ def test_mock_transfer_encoding_chunked_custom_read(self) -> None: response = list(resp.read_chunked(2)) assert expected_response == response - def test_mock_transfer_encoding_chunked_unlmtd_read(self) -> None: + @pytest.mark.parametrize("read_chunked_args", ((), (None,), (-1,))) + def test_mock_transfer_encoding_chunked_unlmtd_read( + self, read_chunked_args: tuple[typing.Any, ...] + ) -> None: stream = [b"foooo", b"bbbbaaaaar"] fp = MockChunkedEncodingResponse(stream) r = httplib.HTTPResponse(MockSock) # type: ignore[arg-type] @@ -1259,7 +1290,7 @@ def test_mock_transfer_encoding_chunked_unlmtd_read(self) -> None: resp = HTTPResponse( r, preload_content=False, headers={"transfer-encoding": "chunked"} ) - assert stream == list(resp.read_chunked()) + assert stream == list(resp.read_chunked(*read_chunked_args)) def test_read_not_chunked_response_as_chunks(self) -> None: fp = BytesIO(b"foo") diff --git a/test/test_retry.py b/test/test_retry.py index f71e7acc9e..ac3ce4ca73 100644 --- a/test/test_retry.py +++ b/test/test_retry.py @@ -334,7 +334,11 @@ def test_retry_method_not_allowed(self) -> None: def test_retry_default_remove_headers_on_redirect(self) -> None: retry = Retry() - assert retry.remove_headers_on_redirect == {"authorization", "cookie"} + assert retry.remove_headers_on_redirect == { + "authorization", + "proxy-authorization", + "cookie", + } def test_retry_set_remove_headers_on_redirect(self) -> None: retry = Retry(remove_headers_on_redirect=["X-API-Secret"]) diff --git a/test/test_ssltransport.py b/test/test_ssltransport.py index 4cce4def02..b6d1f861eb 100644 --- a/test/test_ssltransport.py +++ b/test/test_ssltransport.py @@ -4,6 +4,7 @@ import select import socket import ssl +import threading import typing from unittest import mock @@ -14,9 +15,6 @@ from urllib3.util import ssl_ from urllib3.util.ssltransport import SSLTransport -if typing.TYPE_CHECKING: - from typing import Literal - # consume_socket can iterate forever, we add timeouts to prevent halting. PER_TEST_TIMEOUT = 60 @@ -34,12 +32,12 @@ def server_client_ssl_contexts() -> tuple[ssl.SSLContext, ssl.SSLContext]: @typing.overload -def sample_request(binary: Literal[True] = ...) -> bytes: +def sample_request(binary: typing.Literal[True] = ...) -> bytes: ... @typing.overload -def sample_request(binary: Literal[False]) -> str: +def sample_request(binary: typing.Literal[False]) -> str: ... @@ -54,7 +52,7 @@ def sample_request(binary: bool = True) -> bytes | str: def validate_request( - provided_request: bytearray, binary: Literal[False, True] = True + provided_request: bytearray, binary: typing.Literal[False, True] = True ) -> None: assert provided_request is not None expected_request = sample_request(binary) @@ -62,12 +60,12 @@ def validate_request( @typing.overload -def sample_response(binary: Literal[True] = ...) -> bytes: +def sample_response(binary: typing.Literal[True] = ...) -> bytes: ... @typing.overload -def sample_response(binary: Literal[False]) -> str: +def sample_response(binary: typing.Literal[False]) -> str: ... @@ -111,20 +109,29 @@ def setup_class(cls) -> None: cls.server_context, cls.client_context = server_client_ssl_contexts() def start_dummy_server( - self, handler: typing.Callable[[socket.socket], None] | None = None + self, + handler: typing.Callable[[socket.socket], None] | None = None, + validate: bool = True, ) -> None: + quit_event = threading.Event() + def socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] try: with self.server_context.wrap_socket(sock, server_side=True) as ssock: - request = consume_socket(ssock) + request = consume_socket( + ssock, + quit_event=quit_event, + ) + if not validate: + return validate_request(request) ssock.send(sample_response()) except (ConnectionAbortedError, ConnectionResetError): return chosen_handler = handler if handler else socket_handler - self._start_server(chosen_handler) + self._start_server(chosen_handler, quit_event=quit_event) @pytest.mark.timeout(PER_TEST_TIMEOUT) def test_start_closed_socket(self) -> None: @@ -138,7 +145,7 @@ def test_start_closed_socket(self) -> None: @pytest.mark.timeout(PER_TEST_TIMEOUT) def test_close_after_handshake(self) -> None: """Socket errors should be bubbled up""" - self.start_dummy_server() + self.start_dummy_server(validate=False) sock = socket.create_connection((self.host, self.port)) with SSLTransport( diff --git a/test/test_util.py b/test/test_util.py index 8ed92ee189..268f79f0dc 100644 --- a/test/test_util.py +++ b/test/test_util.py @@ -41,9 +41,6 @@ from . import clear_warnings -if typing.TYPE_CHECKING: - from typing import Literal - # This number represents a time in seconds, it doesn't mean anything in # isolation. Setting to a high-ish value to avoid conflicts with the smaller # numbers used for timeouts @@ -516,7 +513,7 @@ def test_netloc(self, url: str, expected_netloc: str | None) -> None: @pytest.mark.parametrize("url, expected_url", url_vulnerabilities) def test_url_vulnerabilities( - self, url: str, expected_url: Literal[False] | Url + self, url: str, expected_url: typing.Literal[False] | Url ) -> None: if expected_url is False: with pytest.raises(LocationParseError): @@ -748,7 +745,7 @@ def test_timeout_elapsed(self, time_monotonic: MagicMock) -> None: def test_is_fp_closed_object_supports_closed(self) -> None: class ClosedFile: @property - def closed(self) -> Literal[True]: + def closed(self) -> typing.Literal[True]: return True assert is_fp_closed(ClosedFile()) @@ -764,7 +761,7 @@ def fp(self) -> None: def test_is_fp_closed_object_has_fp(self) -> None: class FpFile: @property - def fp(self) -> Literal[True]: + def fp(self) -> typing.Literal[True]: return True assert not is_fp_closed(FpFile()) diff --git a/test/with_dummyserver/test_poolmanager.py b/test/with_dummyserver/test_poolmanager.py index 4fa9ec850a..af77241d6c 100644 --- a/test/with_dummyserver/test_poolmanager.py +++ b/test/with_dummyserver/test_poolmanager.py @@ -144,7 +144,11 @@ def test_redirect_cross_host_remove_headers(self) -> None: "GET", f"{self.base_url}/redirect", fields={"target": f"{self.base_url_alt}/headers"}, - headers={"Authorization": "foo", "Cookie": "foo=bar"}, + headers={ + "Authorization": "foo", + "Proxy-Authorization": "bar", + "Cookie": "foo=bar", + }, ) assert r.status == 200 @@ -152,13 +156,18 @@ def test_redirect_cross_host_remove_headers(self) -> None: data = r.json() assert "Authorization" not in data + assert "Proxy-Authorization" not in data assert "Cookie" not in data r = http.request( "GET", f"{self.base_url}/redirect", fields={"target": f"{self.base_url_alt}/headers"}, - headers={"authorization": "foo", "cookie": "foo=bar"}, + headers={ + "authorization": "foo", + "proxy-authorization": "baz", + "cookie": "foo=bar", + }, ) assert r.status == 200 @@ -167,6 +176,8 @@ def test_redirect_cross_host_remove_headers(self) -> None: assert "authorization" not in data assert "Authorization" not in data + assert "proxy-authorization" not in data + assert "Proxy-Authorization" not in data assert "cookie" not in data assert "Cookie" not in data @@ -176,7 +187,11 @@ def test_redirect_cross_host_no_remove_headers(self) -> None: "GET", f"{self.base_url}/redirect", fields={"target": f"{self.base_url_alt}/headers"}, - headers={"Authorization": "foo", "Cookie": "foo=bar"}, + headers={ + "Authorization": "foo", + "Proxy-Authorization": "bar", + "Cookie": "foo=bar", + }, retries=Retry(remove_headers_on_redirect=[]), ) @@ -185,6 +200,7 @@ def test_redirect_cross_host_no_remove_headers(self) -> None: data = r.json() assert data["Authorization"] == "foo" + assert data["Proxy-Authorization"] == "bar" assert data["Cookie"] == "foo=bar" def test_redirect_cross_host_set_removed_headers(self) -> None: @@ -196,6 +212,7 @@ def test_redirect_cross_host_set_removed_headers(self) -> None: headers={ "X-API-Secret": "foo", "Authorization": "bar", + "Proxy-Authorization": "baz", "Cookie": "foo=bar", }, retries=Retry(remove_headers_on_redirect=["X-API-Secret"]), @@ -207,11 +224,13 @@ def test_redirect_cross_host_set_removed_headers(self) -> None: assert "X-API-Secret" not in data assert data["Authorization"] == "bar" + assert data["Proxy-Authorization"] == "baz" assert data["Cookie"] == "foo=bar" headers = { "x-api-secret": "foo", "authorization": "bar", + "proxy-authorization": "baz", "cookie": "foo=bar", } r = http.request( @@ -229,12 +248,14 @@ def test_redirect_cross_host_set_removed_headers(self) -> None: assert "x-api-secret" not in data assert "X-API-Secret" not in data assert data["Authorization"] == "bar" + assert data["Proxy-Authorization"] == "baz" assert data["Cookie"] == "foo=bar" # Ensure the header argument itself is not modified in-place. assert headers == { "x-api-secret": "foo", "authorization": "bar", + "proxy-authorization": "baz", "cookie": "foo=bar", } diff --git a/test/with_dummyserver/test_socketlevel.py b/test/with_dummyserver/test_socketlevel.py index 69d8070b8b..dceb5ee0ee 100644 --- a/test/with_dummyserver/test_socketlevel.py +++ b/test/with_dummyserver/test_socketlevel.py @@ -12,6 +12,7 @@ import socket import ssl import tempfile +import threading import typing import zlib from collections import OrderedDict @@ -955,7 +956,11 @@ def socket_handler(listener: socket.socket) -> None: assert response.connection is None def test_socket_close_socket_then_file(self) -> None: - def consume_ssl_socket(listener: socket.socket) -> None: + quit_event = threading.Event() + + def consume_ssl_socket( + listener: socket.socket, + ) -> None: try: with listener.accept()[0] as sock, original_ssl_wrap_socket( sock, @@ -964,11 +969,11 @@ def consume_ssl_socket(listener: socket.socket) -> None: certfile=DEFAULT_CERTS["certfile"], ca_certs=DEFAULT_CA, ) as ssl_sock: - consume_socket(ssl_sock) + consume_socket(ssl_sock, quit_event=quit_event) except (ConnectionResetError, ConnectionAbortedError, OSError): pass - self._start_server(consume_ssl_socket) + self._start_server(consume_ssl_socket, quit_event=quit_event) with socket.create_connection( (self.host, self.port) ) as sock, contextlib.closing( @@ -983,6 +988,8 @@ def consume_ssl_socket(listener: socket.socket) -> None: assert ssl_sock.fileno() == -1 def test_socket_close_stays_open_with_makefile_open(self) -> None: + quit_event = threading.Event() + def consume_ssl_socket(listener: socket.socket) -> None: try: with listener.accept()[0] as sock, original_ssl_wrap_socket( @@ -992,11 +999,11 @@ def consume_ssl_socket(listener: socket.socket) -> None: certfile=DEFAULT_CERTS["certfile"], ca_certs=DEFAULT_CA, ) as ssl_sock: - consume_socket(ssl_sock) + consume_socket(ssl_sock, quit_event=quit_event) except (ConnectionResetError, ConnectionAbortedError, OSError): pass - self._start_server(consume_ssl_socket) + self._start_server(consume_ssl_socket, quit_event=quit_event) with socket.create_connection( (self.host, self.port) ) as sock, contextlib.closing( @@ -2232,11 +2239,28 @@ def socket_handler(listener: socket.socket) -> None: class TestMultipartResponse(SocketDummyServerTestCase): def test_multipart_assert_header_parsing_no_defects(self) -> None: + quit_event = threading.Event() + def socket_handler(listener: socket.socket) -> None: for _ in range(2): - sock = listener.accept()[0] - while not sock.recv(65536).endswith(b"\r\n\r\n"): - pass + listener.settimeout(LONG_TIMEOUT) + + while True: + if quit_event and quit_event.is_set(): + return + try: + sock = listener.accept()[0] + break + except (TimeoutError, socket.timeout): + continue + + sock.settimeout(LONG_TIMEOUT) + while True: + if quit_event and quit_event.is_set(): + sock.close() + return + if sock.recv(65536).endswith(b"\r\n\r\n"): + break sock.sendall( b"HTTP/1.1 404 Not Found\r\n" @@ -2252,7 +2276,7 @@ def socket_handler(listener: socket.socket) -> None: ) sock.close() - self._start_server(socket_handler) + self._start_server(socket_handler, quit_event=quit_event) from urllib3.connectionpool import log with mock.patch.object(log, "warning") as log_warning: @@ -2308,15 +2332,26 @@ def socket_handler(listener: socket.socket) -> None: def test_chunked_specified( self, method: str, chunked: bool, body_type: str ) -> None: + quit_event = threading.Event() buffer = bytearray() expected_bytes = b"\r\n\r\na\r\nxxxxxxxxxx\r\n0\r\n\r\n" def socket_handler(listener: socket.socket) -> None: nonlocal buffer - sock = listener.accept()[0] - sock.settimeout(0) + listener.settimeout(LONG_TIMEOUT) + while True: + if quit_event.is_set(): + return + try: + sock = listener.accept()[0] + break + except (TimeoutError, socket.timeout): + continue + sock.settimeout(LONG_TIMEOUT) while expected_bytes not in buffer: + if quit_event.is_set(): + return with contextlib.suppress(BlockingIOError): buffer += sock.recv(65536) @@ -2327,7 +2362,7 @@ def socket_handler(listener: socket.socket) -> None: ) sock.close() - self._start_server(socket_handler) + self._start_server(socket_handler, quit_event=quit_event) body: typing.Any if body_type == "generator":