From 17383faf18ed742a69a4288c6cd7ca16c40d11cb Mon Sep 17 00:00:00 2001 From: TAHRI Ahmed R Date: Mon, 1 Jan 2024 05:36:40 +0100 Subject: [PATCH] :bookmark: Release 3.4.0 (#59) 3.4.0 (2024-01-01) ------------------ **Added** - Support for specifying a custom DNS resolver in `Session`. - Support for passing the source address in `Session`. - Support for disabling either IPv4 or IPv6 within a `Session`. **Changed** - PySocks is no longer used for SOCKS proxies. Replaced by **python-socks** instead. - urllib3.future minimal version raised to 2.4+ to leverage newly added features. - Improve compatibility when end-user inadvertently pass a `Timeout` or `Retry` instance from the legacy `urllib3` instead of `urllib3_future`. **Fixed** - Blocking the event loop when closing the `AsyncSession` using `with`. - Rare exception on older PyPy interpreters due to Generic having unsupported type variable in `extensions._sync_to_async` module. **Misc** - Project extras aligned with **urllib3.future**. - Using nox instead of tox. - Switch to ruff instead of black and isort. --- .github/workflows/lint.yml | 8 +- .github/workflows/run-tests.yml | 4 +- .gitignore | 2 + .pre-commit-config.yaml | 24 ++- HISTORY.md | 22 +++ Makefile | 10 +- README.md | 16 +- docs/api.rst | 2 +- docs/index.rst | 45 +++--- docs/user/advanced.rst | 86 ++++++++++- docs/user/authentication.rst | 22 +++ docs/user/quickstart.rst | 84 +++++++++++ noxfile.py | 60 ++++++++ pyproject.toml | 8 +- setup.cfg | 4 - src/niquests/__init__.py | 1 + src/niquests/__version__.py | 4 +- src/niquests/_async.py | 8 +- src/niquests/_compat.py | 52 +++++++ src/niquests/_typing.py | 14 +- src/niquests/adapters.py | 40 ++++- src/niquests/extensions/_ocsp.py | 11 +- src/niquests/extensions/_sync_to_async.py | 15 +- src/niquests/models.py | 5 +- src/niquests/sessions.py | 145 +++++++++++++++++-- src/niquests/utils.py | 89 +++++++++++- tests/test_live.py | 92 ++++++++++++ tests/test_lowlevel.py | 169 +++++++++++----------- tests/test_requests.py | 9 +- tox.ini | 11 -- 30 files changed, 876 insertions(+), 186 deletions(-) create mode 100644 noxfile.py delete mode 100644 setup.cfg create mode 100644 tests/test_live.py delete mode 100644 tox.ini diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 427f9d9f08..c6a6c210be 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -11,7 +11,7 @@ permissions: jobs: lint: - runs-on: ubuntu-20.04 + runs-on: ubuntu-latest timeout-minutes: 10 steps: @@ -20,5 +20,7 @@ jobs: uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0 with: python-version: "3.x" - - name: Run pre-commit - uses: pre-commit/action@646c83fcd040023954eafda54b4db0192ce70507 # v3.0.0 + - name: Install nox + run: python -m pip install nox + - name: run pre-commit + run: nox -s lint diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml index 480b720243..9aff71be6f 100644 --- a/.github/workflows/run-tests.yml +++ b/.github/workflows/run-tests.yml @@ -33,7 +33,7 @@ jobs: cache: 'pip' - name: Install dependencies run: | - make + pip install nox - name: Run tests run: | - make ci + nox -s "test-${{ startsWith(matrix.python-version, 'pypy') && 'pypy' || matrix.python-version }}" diff --git a/.gitignore b/.gitignore index 34741a7354..52fc689163 100644 --- a/.gitignore +++ b/.gitignore @@ -21,6 +21,8 @@ env/ .pytest_cache/ .vscode/ .eggs/ +.nox/ +.ruff_cache/ .workon diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ff06607d60..2e2aee6585 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -8,28 +8,24 @@ repos: - id: debug-statements - id: end-of-file-fixer - id: trailing-whitespace -- repo: https://github.com/PyCQA/isort - rev: 5.12.0 - hooks: - - id: isort -- repo: https://github.com/psf/black - rev: 23.7.0 - hooks: - - id: black - exclude: tests/test_lowlevel.py - repo: https://github.com/asottile/pyupgrade rev: v3.10.1 hooks: - id: pyupgrade args: [--py37-plus] -- repo: https://github.com/PyCQA/flake8 - rev: 6.1.0 +- repo: https://github.com/astral-sh/ruff-pre-commit + # Ruff version. + rev: v0.1.7 hooks: - - id: flake8 + # Run the linter. + - id: ruff + args: [ --fix ] + # Run the formatter. + - id: ruff-format - repo: https://github.com/pre-commit/mirrors-mypy rev: v1.5.1 hooks: - id: mypy args: [--check-untyped-defs] - exclude: 'tests/' - additional_dependencies: ['charset_normalizer', 'urllib3.future>=2.3.900', 'wassima>=1.0.1', 'idna', 'kiss_headers'] + exclude: 'tests/|noxfile.py' + additional_dependencies: ['charset_normalizer', 'urllib3.future>=2.4.900', 'wassima>=1.0.1', 'idna', 'kiss_headers'] diff --git a/HISTORY.md b/HISTORY.md index 272a9711d8..cdf8d1a43f 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -1,6 +1,28 @@ Release History =============== +3.4.0 (2024-01-01) +------------------ + +**Added** +- Support for specifying a custom DNS resolver in `Session`. +- Support for passing the source address in `Session`. +- Support for disabling either IPv4 or IPv6 within a `Session`. + +**Changed** +- PySocks is no longer used for SOCKS proxies. Replaced by **python-socks** instead. +- urllib3.future minimal version raised to 2.4+ to leverage newly added features. +- Improve compatibility when end-user inadvertently pass a `Timeout` or `Retry` instance from the legacy `urllib3` instead of `urllib3_future`. + +**Fixed** +- Blocking the event loop when closing the `AsyncSession` using `with`. +- Rare exception on older PyPy interpreters due to Generic having unsupported type variable in `extensions._sync_to_async` module. + +**Misc** +- Project extras aligned with **urllib3.future**. +- Using nox instead of tox. +- Switch to ruff instead of black and isort. + 3.3.4 (2023-12-03) ------------------ diff --git a/Makefile b/Makefile index e2f6c31acb..d3d2ad385b 100644 --- a/Makefile +++ b/Makefile @@ -1,15 +1,15 @@ .PHONY: docs init: - python -m pip install -r requirements-dev.txt + python -m pip install nox coverage test: # This runs all of the tests on all supported Python versions. - tox -p + nox -s test ci: - python -m pytest tests --verbose --junitxml=report.xml + nox -s test coverage: - python -m pytest --cov-config .coveragerc --verbose --cov-report term --cov-report xml --cov=niquests tests + python -m coverage combine && python -m coverage report --ignore-errors --show-missing docs: - cd docs && make html + nox -s docs @echo "\033[95m\n\nBuild successful! View the docs homepage at docs/_build/html/index.html.\n\033[0m" diff --git a/README.md b/README.md index 4ed4c3c0ed..1bd56390cc 100644 --- a/README.md +++ b/README.md @@ -12,11 +12,14 @@ Niquests, is the “**Safest**, **Fastest***, **Easiest**, and **Most ```python >>> import niquests ->>> r = niquests.get('https://httpbin.org/basic-auth/user/pass', auth=('user', 'pass')) +>>> s = niquests.Session(resolver="doh+google://", multiplexed=True) +>>> r = s.get('https://pie.dev/basic-auth/user/pass', auth=('user', 'pass')) >>> r.status_code 200 >>> r.headers['content-type'] 'application/json; charset=utf8' +>>> r.oheaders.content_type.charset +'utf8' >>> r.encoding 'utf-8' >>> r.text @@ -24,9 +27,11 @@ Niquests, is the “**Safest**, **Fastest***, **Easiest**, and **Most >>> r.json() {'authenticated': True, ...} >>> r - + >>> r.ocsp_verified True +>>> r.conn_info.established_latency +datetime.timedelta(microseconds=38) ``` Niquests allows you to send HTTP requests extremely easily. There’s no need to manually add query strings to your URLs, or to form-encode your `PUT` & `POST` data — but nowadays, just use the `json` method! @@ -48,9 +53,11 @@ Niquests officially supports Python or PyPy 3.7+. Niquests is ready for the demands of building robust and reliable HTTP–speaking applications, for the needs of today. +- DNS over HTTPS, DNS over QUIC, DNS over TLS, and DNS over UDP - Automatic Content Decompression and Decoding - OS truststore by default, no more certifi! - OCSP Certificate Revocation Verification +- Advanced connection timings inspection - In-memory certificates (CAs, and mTLS) - Browser-style TLS/SSL Verification - Sessions with Cookie Persistence @@ -59,6 +66,7 @@ Niquests is ready for the demands of building robust and reliable HTTP–speakin - Automatic honoring of `.netrc` - Basic & Digest Authentication - Familiar `dict`–like Cookies +- Network settings fine-tuning - Object-oriented headers - Multi-part File Uploads - Chunked HTTP Requests @@ -69,8 +77,12 @@ Niquests is ready for the demands of building robust and reliable HTTP–speakin - HTTP/2 by default - HTTP/3 over QUIC - Multiplexed! +- Thread-safe! +- DNSSEC! - Async! +Need something more? Create an issue, we listen. + ## Why did we pursue this? For many years now, **Requests** has been frozen and blocked millions of developers, left in a vegetative state diff --git a/docs/api.rst b/docs/api.rst index a3e0ae1010..f397dba02c 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -142,7 +142,7 @@ Removed * Dependency check at runtime for ``urllib3``. There's no more check and warnings at runtime for that subject. Ever. Behavioural Changes -~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~ * Niquests negotiate for a HTTP/2 connection by default, fallback to HTTP/1.1 if not available. * Support for HTTP/3 can be present by default if your platform support the pre-built wheel for qh3. diff --git a/docs/index.rst b/docs/index.rst index 11137710a4..ab6dae09d1 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -28,21 +28,27 @@ is designed to be a drop-in replacement for **Requests** that is no longer under **Behold, the power of Niquests**:: - >>> r = niquests.get('https://api.github.com/user', auth=('user', 'pass')) - >>> r.status_code - 200 - >>> r.headers['content-type'] - 'application/json; charset=utf8' - >>> r.encoding - 'utf-8' - >>> r.text - '{"type":"User"...' - >>> r.json() - {'private_gists': 419, 'total_private_repos': 77, ...} - >>> r - - >>> r.ocsp_verified - True + >>> import niquests + >>> s = niquests.Session(resolver="doh+google://", multiplexed=True) + >>> r = s.get('https://pie.dev/basic-auth/user/pass', auth=('user', 'pass')) + >>> r.status_code + 200 + >>> r.headers['content-type'] + 'application/json; charset=utf8' + >>> r.oheaders.content_type.charset + 'utf8' + >>> r.encoding + 'utf-8' + >>> r.text + '{"authenticated": true, ...' + >>> r.json() + {'authenticated': True, ...} + >>> r + + >>> r.ocsp_verified + True + >>> r.conn_info.established_latency + datetime.timedelta(microseconds=38) See `similar code, sans Niquests `_. @@ -57,16 +63,19 @@ Beloved Features Niquests is ready for today's web. +- DNS over HTTPS, DNS over QUIC, DNS over TLS, and DNS over UDP - Automatic Content Decompression and Decoding - OS truststore by default, no more certifi! - OCSP Certificate Revocation Verification +- Advanced connection timings inspection +- In-memory certificates (CAs, and mTLS) - Browser-style TLS/SSL Verification - Sessions with Cookie Persistence - Keep-Alive & Connection Pooling - International Domains and URLs -- Automatic honoring of ``.netrc`` +- Automatic honoring of `.netrc` - Basic & Digest Authentication -- Familiar ``dict``–like Cookies +- Familiar `dict`–like Cookies - Object-oriented headers - Multi-part File Uploads - Chunked HTTP Requests @@ -77,6 +86,8 @@ Niquests is ready for today's web. - HTTP/2 by default - HTTP/3 over QUIC - Multiplexed! +- Thread-safe! +- DNSSEC! - Async! Niquests officially supports Python 3.7+, and runs great on PyPy. diff --git a/docs/user/advanced.rst b/docs/user/advanced.rst index a8bcf7fa21..aec5aa1a3a 100644 --- a/docs/user/advanced.rst +++ b/docs/user/advanced.rst @@ -286,6 +286,27 @@ You may specify the private key passphrase using the following example:: >>> niquests.get('https://kennethreitz.org', cert=('/path/client.cert', '/path/client.key', 'my_key_password')) +DNS with mTLS +~~~~~~~~~~~~~ + +You can pass your client side certificate to authenticate yourself against the given resolver. +To do so, you will have to do as follow:: + + from urllib3 import ResolverDescription + from niquests import Session + + rd = ResolverDescription.from_url("doq://my-resolver.tld") + rd["cert_data"] = in_memory_cert # not a path, it should contain your cert content PEM format directly + rd["cert_key"] = ... + rd["key_password"] = ... + + with Session(resolver=rd) as s: + ... + +.. note:: Instead of in-memory cert, you can pass file path instead with ``cert_file``, ``key_file``. + +This method of authentication is broadly used with DNS over TLS, QUIC, and HTTPS. + In-memory Certificates ---------------------- @@ -345,7 +366,7 @@ make the request within a ``with`` statement to ensure it's always closed:: Keep-Alive ---------- -Excellent news — thanks to urllib3, keep-alive is 100% automatic within a session! +Excellent news — thanks to urllib3.future, keep-alive is 100% automatic within a session! Any requests that you make within a session will automatically reuse the appropriate connection! @@ -365,9 +386,7 @@ file-like object for your body:: with open('massive-body', 'rb') as f: niquests.post('http://some.url/streamed', data=f) -.. warning:: It is recommended that you open files in :ref:`binary - mode `. Errors may occur if you open the file in *text mode*. - due to the fact that this will be re-encoded later in the process. +.. warning:: It is recommended that you open files in binary mode. .. _chunk-encoding: @@ -500,11 +519,18 @@ You can find a example of how to retrieve the connection information just before Here, ``r`` is the ``PreparedRequest`` and ``conn_info`` contains a ``ConnectionInfo``. You can explore the following data in it. -- **certificate_der**: The certificate in DER format (binary) -- **certificate_dict**: The certificate as a dictionary like ``ssl.SSLSocket.getpeercert(binary_from=False)`` output it. +- **certificate_der**: The peer certificate in DER format (binary) +- **certificate_dict**: The peer certificate as a dictionary like ``ssl.SSLSocket.getpeercert(binary_from=False)`` output it. - **tls_version**: TLS version. - **cipher**: Cipher used. - **http_version**: Http version that is about to be used. +- **destination_address**: The remote peer address given to us by the DNS resolver. +- **issuer_certificate_der**: Immediate issuer (in the TLS certificate chain) in DER format (binary) +- **issuer_certificate_dict**: Immediate issuer (in the TLS certificate chain) as a dictionary +- **established_latency**: The amount of time consumed to get an ESTABLISHED network link. +- **resolution_latency**: The amount of time consumed for the hostname resolution. +- **tls_handshake_latency**: The amount of time consumed for the TLS handshake completion. +- **request_sent_latency**: The amount of time consumed to encode and send the whole request through the socket. .. warning:: Depending on your platform and interpreter, some key element might not be available and be assigned ``None`` everytime. Like **certificate_dict** on MacOS. @@ -1290,4 +1316,50 @@ Thread Safety Niquests is meant to be thread-safe. Any error or unattended behaviors are covered by our support for bug policy. Both main scenarios are eligible, meaning Thread and Async, with Thread and Sync. -Support include notable performance issues like abusive lock. \ No newline at end of file +Support include notable performance issues like abusive lock. + +Use a custom CA without loosing the official ones +------------------------------------------------- + +There's an interesting use-case where a user may want to be able to request both private +and public HTTP endpoints without doing some gymnastic with ``verify=...``. + +Thanks to our underlying library ``wassima`` you can register globally your own set +of certificate authorities like so:: + + import wassima + + wassima.register_ca(my_own_ca_pem_str) + +That's it! Niquests will now automatically recognize it and use it to verify your secure endpoints. +You'll have to register it prior to your HTTP requests. + +.. note:: While doing local development with HTTPS, we recommend using tool like ``mkcert`` that will register the CA into your local machine trust store. Niquests is natively capable of picking them up. + +Disable either IPv4 or IPv6 +--------------------------- + +You may be interested in controlling what kind of address you would accept connecting to. +Since Niquests 3.4+, you can configure that aspect per ``Session`` instance. + +Having a session without IPv6 enabled should be done that way:: + + import niquests + + session = niquests.Session(disable_ipv6=True) + +.. warning:: You cannot set both ``disable_ipv4`` and ``disable_ipv6`` at the cost of receiving a RuntimeError exception. + +Setting the source network adapter +---------------------------------- + +In a complex scenario, you could face the following: "I have multiple network adapters, some can access this and other that.." +Since Niquests 3.4+, you can configure that aspect per ``Session`` instance. + +Having a session without IPv6 enabled should be done that way:: + + import niquests + + session = niquests.Session(source_address=(10.10.4.1, 4444)) + +It will be passed down the the lower stack. No effort required. diff --git a/docs/user/authentication.rst b/docs/user/authentication.rst index f8bfa42ffc..cc5666292b 100644 --- a/docs/user/authentication.rst +++ b/docs/user/authentication.rst @@ -32,6 +32,17 @@ for using it:: Providing the credentials in a tuple like this is exactly the same as the ``HTTPBasicAuth`` example above. +For DNS +~~~~~~~ + +Doing basic authorization using for DNS over HTTPS resolver can be done easily. +You must provide the user and pass into the DNS url as such:: + + from niquests import Session + + with Session(resolver="doh://user:pass@my-resolver.tld") as s: + resp = s.get("pie.dev/get") + Passing a bearer token ---------------------- @@ -40,6 +51,17 @@ get, post, request, etc... .. note:: If you pass a token with its custom prefix, it will be taken and passed as-is. e.g. ``auth="NotBearer eyDdx.."`` +For DNS +~~~~~~~ + +Doing a bearer token using for DNS over HTTPS resolver can be done easily. +You must provide the token directly into the DNS url as such:: + + from niquests import Session + + with Session(resolver="doh://token@my-resolver.tld") as s: + resp = s.get("pie.dev/get") + netrc Authentication ~~~~~~~~~~~~~~~~~~~~ diff --git a/docs/user/quickstart.rst b/docs/user/quickstart.rst index 666e8181d3..13d4bf9b2c 100644 --- a/docs/user/quickstart.rst +++ b/docs/user/quickstart.rst @@ -133,6 +133,8 @@ The ``gzip`` and ``deflate`` transfer-encodings are automatically decoded for yo The ``br`` transfer-encoding is automatically decoded for you if a Brotli library like `brotli `_ or `brotlicffi `_ is installed. +The ``zstd`` transfer-encoding is automatically decoded for you if the zstandard library `zstandard `_ is installed. + For example, to create an image from binary data returned by a request, you can use the following code:: @@ -159,6 +161,8 @@ attempting ``r.json()`` raises ``niquests.exceptions.JSONDecodeError``. This wra provides interoperability for multiple exceptions that may be thrown by different python versions and json serialization libraries. +.. warning:: It should be noted that this method will raise ``niquests.exceptions.JSONDecodeError`` if the proper Content-Type isn't set to anything that refer to JSON. + It should be noted that the success of the call to ``r.json()`` does **not** indicate the success of the response. Some servers may return a JSON object in a failed response (e.g. error details with HTTP 500). Such JSON will be decoded @@ -622,6 +626,8 @@ It is saved in-memory by Niquests. You may also run the following command ``python -m niquests.help`` to find out if you support HTTP/3. In 95 percents of the case, the answer is yes! +.. note:: Since urllib3.future version 2.4+ we support negotiating HTTP/3 without a first TCP connection if the remote peer indicated in a HTTPS (DNS) record that the server support HTTP/3. + Multiplexed Connection ---------------------- @@ -780,6 +786,84 @@ Look at this basic sample:: if __name__ == "__main__": asyncio.run(main()) + +DNS Resolution +-------------- + +Niquests has a built-in support for DNS over HTTPS, DNS over TLS, DNS over UDP, and DNS over QUIC. +Thanks to our built-in system trust store access, you don't have to worry one bit about certificates validation. + +This feature is based on the native implementation brought to you by the awesome **urllib3.future**. +Once you have specified a custom resolver (e.g. not the system default), you will automatically be protected with +DNSSEC in additions to specifics security perks on chosen protocol. + +Specify your own resolver +~~~~~~~~~~~~~~~~~~~~~~~~~ + +In order to specify a resolver, you have to use a ``Session``. Each ``Session`` can have a different resolver. +Here is a basic example that leverage Google public DNS over HTTPS:: + + from niquests import Session + + with Session(resolver="doh+google://") as s: + resp = s.get("pie.dev/get") + +Here, the domain name (**pie.dev**) will be resolved using the provided DNS url. + +.. note:: By default, Niquests still use the good old, often insecure, system DNS. + +Use multiple resolvers +~~~~~~~~~~~~~~~~~~~~~~ + +You may specify a list of resolvers to be tested in order:: + + from niquests import Session + + with Session(resolver=["doh+google://", "doh://cloudflare-dns.com"]) as s: + resp = s.get("pie.dev/get") + +The second entry ``doh://cloudflare-dns.com`` will only be tested if ``doh+google://`` failed to provide a usable answer. + +.. note:: In a multi-threaded context, both resolvers are going to be used in order to improve performance. + +Supported DNS url +~~~~~~~~~~~~~~~~~ + +Niquests support a wide range of DNS protocols. Here are a few examples:: + + "doh+google://" # shortcut url for Google DNS over HTTPS + "dot+google://" # shortcut url for Google DNS over TLS + "doh+cloudflare://" # shortcut url for Cloudflare DNS over HTTPS + "doq+adguard://" # shortcut url for Adguard DNS over QUIC + "dou://1.1.1.1" # url for DNS over UDP (Plain resolver) + "dou://1.1.1.1:8853" # url for DNS over UDP using port 8853 (Plain resolver) + "doh://my-resolver.tld" # url for DNS over HTTPS using server my-resolver.tld + +.. note:: Learn more by looking at the **urllib3.future** documentation: https://urllib3future.readthedocs.io/en/latest/advanced-usage.html#using-a-custom-dns-resolver + +Set DNS via environment +~~~~~~~~~~~~~~~~~~~~~~~ + +You can set the ``NIQUESTS_DNS_URL`` environment variable with desired resolver, it will be +used in every Session **that does not manually specify a resolver.** + +Example:: + + export NIQUESTS_DNS_URL="doh://google.dns" + +Disable DNS certificate verification +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Simply add ``verify=false`` into your DNS url to pursue:: + + from niquests import Session + + with Session(resolver="doh+google://default/?verify=false") as s: + resp = s.get("pie.dev/get") + + +.. warning:: Doing a ``s.get("pie.dev/get", verify=False)`` does not impact the resolver. + ----------------------- Ready for more? Check out the :ref:`advanced ` section. diff --git a/noxfile.py b/noxfile.py new file mode 100644 index 0000000000..af0c97c301 --- /dev/null +++ b/noxfile.py @@ -0,0 +1,60 @@ +from __future__ import annotations + +import os +import shutil + +import nox + + +def tests_impl( + session: nox.Session, + extras: str = "socks", +) -> None: + # Install deps and the package itself. + session.install("-r", "requirements-dev.txt") + session.install(f".[{extras}]", silent=False) + + # Show the pip version. + session.run("pip", "--version") + session.run("python", "--version") + + session.run( + "python", + "-m", + "coverage", + "run", + "--parallel-mode", + "-m", + "pytest", + "-v", + "-ra", + f"--color={'yes' if 'GITHUB_ACTIONS' in os.environ else 'auto'}", + "--tb=native", + "--durations=10", + "--strict-config", + "--strict-markers", + *(session.posargs or ("tests/",)), + env={"PYTHONWARNINGS": "always::DeprecationWarning"}, + ) + + +@nox.session(python=["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "pypy"]) +def test(session: nox.Session) -> None: + tests_impl(session) + + +@nox.session +def lint(session: nox.Session) -> None: + session.install("pre-commit") + session.run("pre-commit", "run", "--all-files") + + +@nox.session +def docs(session: nox.Session) -> None: + session.install("-r", "docs/requirements.txt") + session.install(".[socks]") + + session.chdir("docs") + if os.path.exists("_build"): + shutil.rmtree("_build") + session.run("sphinx-build", "-b", "html", ".", "_build/html") diff --git a/pyproject.toml b/pyproject.toml index 32354c74f0..6044b392aa 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,7 +8,7 @@ description = "Python HTTP for Humans." readme = "README.md" license-files = { paths = ["LICENSE"] } license = "Apache-2.0" -keywords = ["requests", "http/2", "http/3", "QUIC", "http", "https", "http client", "http/1.1", "ocsp", "revocation", "tls", "multiplexed"] +keywords = ["requests", "http/2", "http/3", "QUIC", "http", "https", "http client", "http/1.1", "ocsp", "revocation", "tls", "multiplexed", "dns-over-quic", "doq", "dns-over-tls", "dot", "dns-over-https", "doh", "dnssec"] authors = [ {name = "Kenneth Reitz", email = "me@kennethreitz.org"} ] @@ -41,17 +41,17 @@ dynamic = ["version"] dependencies = [ "charset_normalizer>=2,<4", "idna>=2.5,<4", - "urllib3.future>=2.3.900,<3", + "urllib3.future>=2.4.901,<3", "wassima>=1.0.1,<2", "kiss_headers>=2,<4", ] [project.optional-dependencies] socks = [ - "PySocks>=1.5.6, !=1.5.7", + "urllib3.future[socks]>=2.4.901,<3", ] http3 = [ - "qh3<1.0.0,>=0.14.0" + "urllib3.future[qh3]>=2.4.901,<3" ] ocsp = [ "cryptography<42.0.0,>=41.0.0" diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 35b3de22ec..0000000000 --- a/setup.cfg +++ /dev/null @@ -1,4 +0,0 @@ -[flake8] -ignore = E203, E501, W503 -per-file-ignores = - src/niquests/__init__.py:E402, F401 diff --git a/src/niquests/__init__.py b/src/niquests/__init__.py index b9353ae3be..a3ed5097ad 100644 --- a/src/niquests/__init__.py +++ b/src/niquests/__init__.py @@ -59,6 +59,7 @@ "ignore", "Parsed a negative serial number", module="cryptography" ) +# ruff: noqa: E402 from . import utils from .__version__ import ( __author__, diff --git a/src/niquests/__version__.py b/src/niquests/__version__.py index 3ae6112d4a..245287598f 100644 --- a/src/niquests/__version__.py +++ b/src/niquests/__version__.py @@ -9,9 +9,9 @@ __url__: str = "https://niquests.readthedocs.io" __version__: str -__version__ = "3.3.4" +__version__ = "3.4.0" -__build__: int = 0x030304 +__build__: int = 0x030400 __author__: str = "Kenneth Reitz" __author_email__: str = "me@kennethreitz.org" __license__: str = "Apache-2.0" diff --git a/src/niquests/_async.py b/src/niquests/_async.py index 4f8842efda..22db239dcc 100644 --- a/src/niquests/_async.py +++ b/src/niquests/_async.py @@ -36,7 +36,9 @@ async def __aenter__(self): return self async def __aexit__(self, exc, value, tb): - super().__exit__() + await sync_to_async( + super().__exit__, thread_sensitive=AsyncSession.disable_thread + )() async def send(self, request: PreparedRequest, **kwargs: typing.Any) -> Response: # type: ignore[override] return await sync_to_async( @@ -81,7 +83,9 @@ async def request( # type: ignore[override] ) prep: PreparedRequest = dispatch_hook( - "pre_request", hooks, self.prepare_request(req) # type: ignore[arg-type] + "pre_request", + hooks, # type: ignore[arg-type] + self.prepare_request(req), ) assert prep.url is not None diff --git a/src/niquests/_compat.py b/src/niquests/_compat.py index 1bee261e26..856363f497 100644 --- a/src/niquests/_compat.py +++ b/src/niquests/_compat.py @@ -1,5 +1,7 @@ from __future__ import annotations +import typing + try: from urllib3._version import __version__ @@ -9,3 +11,53 @@ # 1) urllib3 does not exist -> fallback to urllib3_future # 2) urllib3 exist but not fork -> fallback to urllib3_future HAS_LEGACY_URLLIB3 = True + +if HAS_LEGACY_URLLIB3: + import urllib3_future +else: + urllib3_future = None # type: ignore[assignment] + +try: + import urllib3 +except ImportError: + urllib3 = None # type: ignore[assignment] + +T = typing.TypeVar("T", urllib3.Timeout, urllib3.Retry) + + +def urllib3_ensure_type(o: T) -> T: + """Retry, Timeout must be the one in urllib3_future.""" + if urllib3 is None: + return o + + if HAS_LEGACY_URLLIB3: + if "urllib3_future" not in str(type(o)): + assert urllib3_future is not None + + if isinstance(o, urllib3.Timeout): + return urllib3_future.Timeout( # type: ignore[return-value] + o.total, # type: ignore[arg-type] + o.connect_timeout, # type: ignore[arg-type] + o.read_timeout, # type: ignore[arg-type] + ) + if isinstance(o, urllib3.Retry): + return urllib3_future.Retry( # type: ignore[return-value] + o.total, + o.connect, + o.read, + redirect=o.redirect, + status=o.status, + other=o.other, + allowed_methods=o.allowed_methods, + status_forcelist=o.status_forcelist, + backoff_factor=o.backoff_factor, + backoff_max=o.backoff_max, + raise_on_redirect=o.raise_on_redirect, + raise_on_status=o.raise_on_status, + history=o.history, # type: ignore[arg-type] + respect_retry_after_header=o.respect_retry_after_header, + remove_headers_on_redirect=o.remove_headers_on_redirect, + backoff_jitter=o.backoff_jitter, + ) + + return o diff --git a/src/niquests/_typing.py b/src/niquests/_typing.py index b8c0d5cb03..36d51b4697 100644 --- a/src/niquests/_typing.py +++ b/src/niquests/_typing.py @@ -8,11 +8,13 @@ from ._compat import HAS_LEGACY_URLLIB3 if HAS_LEGACY_URLLIB3 is False: - from urllib3 import Retry, Timeout + from urllib3 import ResolverDescription, Retry, Timeout + from urllib3.contrib.resolver import BaseResolver from urllib3.fields import RequestField else: - from urllib3_future import Retry, Timeout # type: ignore[assignment] + from urllib3_future import Retry, Timeout, ResolverDescription # type: ignore[assignment] from urllib3_future.fields import RequestField # type: ignore[assignment] + from urllib3_future.contrib.resolver import BaseResolver # type: ignore[assignment] from .auth import AuthBase from .structures import CaseInsensitiveDict @@ -146,3 +148,11 @@ ] RetryType: typing.TypeAlias = typing.Union[bool, int, Retry] + +ResolverType: typing.TypeAlias = typing.Union[ + str, + ResolverDescription, + BaseResolver, + typing.List[str], + typing.List[ResolverDescription], +] diff --git a/src/niquests/adapters.py b/src/niquests/adapters.py index 37ad7c8e4b..62f4032ce6 100644 --- a/src/niquests/adapters.py +++ b/src/niquests/adapters.py @@ -28,7 +28,12 @@ from ._compat import HAS_LEGACY_URLLIB3 if HAS_LEGACY_URLLIB3 is False: - from urllib3 import ConnectionInfo, HTTPConnectionPool, HTTPSConnectionPool + from urllib3 import ( + ConnectionInfo, + HTTPConnectionPool, + HTTPSConnectionPool, + ResolverDescription, + ) from urllib3.backend import HttpVersion, ResponsePromise from urllib3.exceptions import ClosedPoolError, ConnectTimeoutError from urllib3.exceptions import HTTPError as _HTTPError @@ -48,7 +53,12 @@ from urllib3.util import parse_url from urllib3.util.retry import Retry else: - from urllib3_future import ConnectionInfo, HTTPConnectionPool, HTTPSConnectionPool # type: ignore[assignment] + from urllib3_future import ( # type: ignore[assignment] + ConnectionInfo, + HTTPConnectionPool, + HTTPSConnectionPool, + ResolverDescription, + ) from urllib3_future.backend import HttpVersion, ResponsePromise # type: ignore[assignment] from urllib3_future.exceptions import ClosedPoolError, ConnectTimeoutError # type: ignore[assignment] from urllib3_future.exceptions import HTTPError as _HTTPError # type: ignore[assignment] @@ -73,6 +83,7 @@ CacheLayerAltSvcType, HookType, ProxyType, + ResolverType, RetryType, TLSClientCertType, TLSVerifyType, @@ -102,6 +113,7 @@ prepend_scheme_if_needed, select_proxy, urldefragauth, + resolve_socket_family, ) try: @@ -165,6 +177,7 @@ def gather(self, *responses: Response, max_fetch: int | None = None) -> None: """ Load responses that are still 'lazy'. This method is meant for a multiplexed connection. Implementation is not mandatory. + :param max_fetch: Maximal number of response to be fetched before exiting the loop. By default, it waits until all pending (lazy) response are resolved. """ @@ -207,6 +220,9 @@ class HTTPAdapter(BaseAdapter): "_quic_cache_layer", "_disable_http2", "_disable_http3", + "_source_address", + "_disable_ipv4", + "_disable_ipv6", ] def __init__( @@ -219,6 +235,10 @@ def __init__( disable_http2: bool = False, disable_http3: bool = False, max_in_flight_multiplexed: int | None = None, + resolver: ResolverType | None = None, + source_address: tuple[str, int] | None = None, + disable_ipv4: bool = False, + disable_ipv6: bool = False, ): if isinstance(max_retries, bool): self.max_retries: RetryType = False @@ -244,6 +264,10 @@ def __init__( self._quic_cache_layer = quic_cache_layer self._disable_http2 = disable_http2 self._disable_http3 = disable_http3 + self._resolver = resolver + self._source_address = source_address + self._disable_ipv4 = disable_ipv4 + self._disable_ipv6 = disable_ipv6 #: we keep a list of pending (lazy) response self._promises: dict[str, Response] = {} @@ -267,6 +291,9 @@ def __init__( block=pool_block, quic_cache_layer=quic_cache_layer, disabled_svn=disabled_svn, + resolver=resolver, + source_address=source_address, + socket_family=resolve_socket_family(disable_ipv4, disable_ipv6), ) def __getstate__(self) -> dict[str, typing.Any | None]: @@ -281,6 +308,8 @@ def __setstate__(self, state): for attr, value in state.items(): setattr(self, attr, value) + self._resolver = ResolverDescription.from_url("system://").new() + disabled_svn = set() if self._disable_http2: @@ -294,6 +323,8 @@ def __setstate__(self, state): block=self._pool_block, quic_cache_layer=self._quic_cache_layer, disabled_svn=disabled_svn, + source_address=self._source_address, + socket_family=resolve_socket_family(self._disable_ipv4, self._disable_ipv6), ) def init_poolmanager( @@ -346,6 +377,9 @@ def proxy_manager_for(self, proxy: str, **proxy_kwargs: typing.Any) -> ProxyMana if self._disable_http2: disabled_svn.add(HttpVersion.h2) + if self._source_address and "source_address" not in proxy_kwargs: + proxy_kwargs["source_address"] = self._source_address + if proxy in self.proxy_manager: manager = self.proxy_manager[proxy] elif proxy.lower().startswith("socks"): @@ -358,6 +392,7 @@ def proxy_manager_for(self, proxy: str, **proxy_kwargs: typing.Any) -> ProxyMana maxsize=self._pool_maxsize, block=self._pool_block, disabled_svn=disabled_svn, + resolver=self._resolver, **proxy_kwargs, ) else: @@ -369,6 +404,7 @@ def proxy_manager_for(self, proxy: str, **proxy_kwargs: typing.Any) -> ProxyMana maxsize=self._pool_maxsize, block=self._pool_block, disabled_svn=disabled_svn, + resolver=self._resolver, **proxy_kwargs, ) diff --git a/src/niquests/extensions/_ocsp.py b/src/niquests/extensions/_ocsp.py index 7c7a577b4d..c090d3a78e 100644 --- a/src/niquests/extensions/_ocsp.py +++ b/src/niquests/extensions/_ocsp.py @@ -25,10 +25,12 @@ from urllib3 import ConnectionInfo from urllib3.exceptions import SecurityWarning from urllib3.util.url import parse_url + from urllib3.contrib.resolver import BaseResolver else: from urllib3_future import ConnectionInfo # type: ignore[assignment] from urllib3_future.exceptions import SecurityWarning # type: ignore[assignment] from urllib3_future.util.url import parse_url # type: ignore[assignment] + from urllib3_future.contrib.resolver import BaseResolver # type: ignore[assignment] from .._typing import ProxyType from ..exceptions import RequestException, SSLError @@ -313,6 +315,7 @@ def verify( strict: bool = False, timeout: float | int = 0.2, proxies: ProxyType | None = None, + resolver: BaseResolver | None = None, ) -> None: conn_info: ConnectionInfo | None = r.conn_info @@ -381,7 +384,7 @@ def verify( from ..sessions import Session - with Session() as session: + with Session(resolver=resolver) as session: session.trust_env = False session.proxies = proxies @@ -424,7 +427,11 @@ def verify( except ValueError: issuer_certificate = None - hint_ca_issuers: list[str] = [ep for ep in list(conn_info.certificate_dict.get("caIssuers", [])) if ep.startswith("http://")] # type: ignore + hint_ca_issuers: list[str] = [ + ep # type: ignore + for ep in list(conn_info.certificate_dict.get("caIssuers", [])) # type: ignore + if ep.startswith("http://") # type: ignore + ] if issuer_certificate is None and hint_ca_issuers: try: diff --git a/src/niquests/extensions/_sync_to_async.py b/src/niquests/extensions/_sync_to_async.py index 404f8f911b..9ca66e5c99 100644 --- a/src/niquests/extensions/_sync_to_async.py +++ b/src/niquests/extensions/_sync_to_async.py @@ -45,7 +45,7 @@ if sys.version_info >= (3, 10): from typing import ParamSpec else: - from typing import _type_check + from typing import _type_check # type: ignore[attr-defined] class _Immutable: """Mixin to indicate that object should not be copied.""" @@ -120,7 +120,7 @@ class _DefaultMixin: """Mixin for TypeVarLike defaults.""" __slots__ = () - __init__ = _set_default + __init__ = _set_default # type: ignore[assignment] def _caller(depth=2): try: @@ -135,7 +135,7 @@ def __repr__(self): _marker = _Sentinel() # Inherits from list as a workaround for Callable checks in Python < 3.9.2. - class ParamSpec(list, _DefaultMixin): + class ParamSpec(list, _DefaultMixin): # type: ignore[no-redef] """Parameter specification variable. Usage:: @@ -183,7 +183,7 @@ def add_two(x: float, y: float) -> float: """ # Trick Generic __parameters__. - __class__ = TypeVar + __class__ = TypeVar # type: ignore[assignment] @property def args(self): @@ -246,6 +246,13 @@ def __call__(self, *args, **kwargs): _F = TypeVar("_F", bound=Callable[..., Any]) _P = ParamSpec("_P") + +# Circumvent strict check at runtime for PyPy +# TypeError: Parameters to Generic[...] must all be type variables +# TODO: Find a better way. +if sys.implementation.name == "pypy" and (3, 10) > sys.version_info: + _P = TypeVar("_P") # type: ignore + _R = TypeVar("_R") diff --git a/src/niquests/models.py b/src/niquests/models.py index 1e1966d172..4b7fe971c7 100644 --- a/src/niquests/models.py +++ b/src/niquests/models.py @@ -478,7 +478,7 @@ def prepare_body( if not data and json is not None: # urllib3 requires a bytes-like body. Python 2's json.dumps # provides this natively, but Python 3 gives a Unicode string. - content_type = "application/json" + content_type = 'application/json; charset="utf-8"' try: body = _json.dumps(json, allow_nan=False) @@ -748,7 +748,8 @@ def _encode_params( ) if boundary_for_multipart: return encode_multipart_formdata( - result, boundary=boundary_for_multipart # type: ignore[arg-type] + result, # type: ignore[arg-type] + boundary=boundary_for_multipart, )[0] return urlencode(result, doseq=True) else: diff --git a/src/niquests/sessions.py b/src/niquests/sessions.py index ba44229f50..50507df5a4 100644 --- a/src/niquests/sessions.py +++ b/src/niquests/sessions.py @@ -11,6 +11,7 @@ import sys import time import typing +import warnings from collections import OrderedDict from collections.abc import Mapping from datetime import timedelta @@ -18,7 +19,7 @@ from http.cookiejar import CookieJar from urllib.parse import urljoin, urlparse -from ._compat import HAS_LEGACY_URLLIB3 +from ._compat import HAS_LEGACY_URLLIB3, urllib3_ensure_type if HAS_LEGACY_URLLIB3 is False: from urllib3 import ConnectionInfo @@ -38,6 +39,7 @@ MultiPartFilesType, ProxyType, QueryParameterType, + ResolverType, RetryType, TimeoutType, TLSClientCertType, @@ -87,6 +89,7 @@ rewind_body, should_bypass_proxies, to_key_val_list, + create_resolver, ) # Preferred clock, based on which one is more accurate on a given system. @@ -203,20 +206,40 @@ class Session: "max_redirects", "retries", "multiplexed", + "source_address", + "_disable_ipv4", + "_disable_ipv6", + "_disable_http2", + "_disable_http3", ] def __init__( self, *, + resolver: ResolverType | None = None, + source_address: tuple[str, int] | None = None, quic_cache_layer: CacheLayerAltSvcType | None = None, retries: RetryType = DEFAULT_RETRIES, multiplexed: bool = False, disable_http2: bool = False, disable_http3: bool = False, + disable_ipv6: bool = False, + disable_ipv4: bool = False, ): + if [disable_ipv4, disable_ipv6].count(True) == 2: + raise RuntimeError("Cannot disable both IPv4 and IPv6") + #: Configured retries for current Session self.retries = retries + if ( + self.retries + and HAS_LEGACY_URLLIB3 + and hasattr(self.retries, "total") + and "urllib3_future" not in str(type(self.retries)) + ): + self.retries = urllib3_ensure_type(self.retries) # type: ignore[type-var] + #: A case-insensitive dictionary of headers to be sent on each #: :class:`Request ` sent from this #: :class:`Session `. @@ -245,6 +268,20 @@ def __init__( #: Toggle to leverage multiplexed connection. self.multiplexed = multiplexed + #: Custom DNS resolution method. + self.resolver = create_resolver(resolver) + #: Internal use, know whether we should/can close it on session close. + self._own_resolver: bool = resolver != self.resolver + + #: Bind to address/network adapter + self.source_address = source_address + + self._disable_http2 = disable_http2 + self._disable_http3 = disable_http3 + + self._disable_ipv4 = disable_ipv4 + self._disable_ipv6 = disable_ipv6 + #: SSL Verification default. #: Defaults to `True`, requiring requests to verify the TLS certificate at the #: remote end. @@ -293,9 +330,22 @@ def __init__( max_retries=retries, disable_http2=disable_http2, disable_http3=disable_http3, + resolver=resolver, + source_address=source_address, + disable_ipv4=disable_ipv4, + disable_ipv6=disable_ipv6, + ), + ) + self.mount( + "http://", + HTTPAdapter( + max_retries=retries, + resolver=resolver, + source_address=source_address, + disable_ipv4=disable_ipv4, + disable_ipv6=disable_ipv6, ), ) - self.mount("http://", HTTPAdapter(max_retries=retries)) def __enter__(self): return self @@ -429,7 +479,9 @@ def request( ) prep: PreparedRequest = dispatch_hook( - "pre_request", hooks, self.prepare_request(req) # type: ignore[arg-type] + "pre_request", + hooks, # type: ignore[arg-type] + self.prepare_request(req), ) assert prep.url is not None @@ -955,9 +1007,19 @@ def send(self, request: PreparedRequest, **kwargs: typing.Any) -> Response: kwargs.setdefault("stream", self.stream) kwargs.setdefault("verify", self.verify) kwargs.setdefault("cert", self.cert) + if "proxies" not in kwargs: kwargs["proxies"] = resolve_proxies(request, self.proxies, self.trust_env) + if ( + "timeout" in kwargs + and kwargs["timeout"] + and HAS_LEGACY_URLLIB3 + and hasattr(kwargs["timeout"], "total") + and "urllib3_future" not in str(type(kwargs["timeout"])) + ): + kwargs["timeout"] = urllib3_ensure_type(kwargs["timeout"]) + # It's possible that users might accidentally send a Request object. # Guard against that specific failure case. if isinstance(request, Request): @@ -990,6 +1052,7 @@ def on_post_connection(conn_info: ConnectionInfo) -> None: strict_ocsp_enabled, 0.2 if not strict_ocsp_enabled else 1.0, kwargs["proxies"], + resolver=self.resolver, ) # don't trigger pre_send for redirects @@ -1021,6 +1084,40 @@ def handle_upload_progress( assert request.url is not None + # Recycle the resolver if unavailable + if not self.resolver.is_available(): + if not self._own_resolver: + warnings.warn( + "A externally instantiated resolver was closed. Attempt to recycling it internally, " + "the Session will detach itself from given resolver.", + ResourceWarning, + ) + self.close() + self.resolver = self.resolver.recycle() + self.mount( + "https://", + HTTPAdapter( + quic_cache_layer=self.quic_cache_layer, + max_retries=self.retries, + disable_http2=self._disable_http2, + disable_http3=self._disable_http3, + resolver=self.resolver, + source_address=self.source_address, + disable_ipv4=self._disable_ipv4, + disable_ipv6=self._disable_ipv6, + ), + ) + self.mount( + "http://", + HTTPAdapter( + max_retries=self.retries, + resolver=self.resolver, + source_address=self.source_address, + disable_ipv4=self._disable_ipv4, + disable_ipv6=self._disable_ipv6, + ), + ) + # Get the appropriate adapter to use adapter = self.get_adapter(url=request.url) @@ -1033,7 +1130,10 @@ def handle_upload_progress( # We are leveraging a multiplexed connection if r.raw is None: r._gather = lambda: adapter.gather(r) - r._resolve_redirect = lambda x, y: next(self.resolve_redirects(x, y, yield_requests=True, **kwargs), None) # type: ignore[assignment, arg-type] + r._resolve_redirect = lambda x, y: next( + self.resolve_redirects(x, y, yield_requests=True, **kwargs), # type: ignore + None, + ) # in multiplexed mode, we are unable to forward this local function for safety reasons. kwargs["on_post_connection"] = None @@ -1097,7 +1197,9 @@ def handle_upload_progress( if r.is_redirect: try: r._next = next( - self.resolve_redirects(r, request, yield_requests=True, **kwargs) # type: ignore[assignment] + self.resolve_redirects( + r, request, yield_requests=True, **kwargs + ) # type: ignore[assignment] ) except StopIteration: pass @@ -1110,10 +1212,11 @@ def handle_upload_progress( def gather(self, *responses: Response, max_fetch: int | None = None) -> None: """ Call this method to make sure in-flight responses are retrieved efficiently. This is a no-op - if multiplexed is set to False (which is the default value). - Passing a limited set of responses will wait for given promises and discard others for later. - :param max_fetch: Maximal number of response to be fetched before exiting the loop. By default, - it waits until all pending (lazy) response are resolved. + if multiplexed is set to False (which is the default value). Passing a limited set of responses + will wait for given promises and discard others for later. + + :param max_fetch: Maximal number of response to be fetched before exiting the loop. + By default, it waits until all pending (lazy) response are resolved. """ if self.multiplexed is False: return @@ -1171,6 +1274,8 @@ def close(self) -> None: """Closes all adapters and as such the session""" for v in self.adapters.values(): v.close() + if self._own_resolver: + self.resolver.close() def mount(self, prefix: str, adapter: BaseAdapter) -> None: """Registers a connection adapter to a prefix. @@ -1192,15 +1297,33 @@ def __setstate__(self, state): setattr(self, attr, value) self.quic_cache_layer = QuicSharedCache(max_size=12_288) + self.resolver = create_resolver(None) + self._own_resolver = True self.adapters = OrderedDict() self.mount( "https://", HTTPAdapter( - quic_cache_layer=self.quic_cache_layer, max_retries=self.retries + quic_cache_layer=self.quic_cache_layer, + max_retries=self.retries, + disable_http2=self._disable_http2, + disable_http3=self._disable_http3, + source_address=self.source_address, + disable_ipv4=self._disable_ipv4, + disable_ipv6=self._disable_ipv6, + resolver=self.resolver, + ), + ) + self.mount( + "http://", + HTTPAdapter( + max_retries=self.retries, + source_address=self.source_address, + disable_ipv4=self._disable_ipv4, + disable_ipv6=self._disable_ipv6, + resolver=self.resolver, ), ) - self.mount("http://", HTTPAdapter(max_retries=self.retries)) def get_redirect_target(self, resp: Response) -> str | None: """Receives a Response. Returns a redirect URI or ``None``""" diff --git a/src/niquests/utils.py b/src/niquests/utils.py index d57f2ab724..d44a763c3e 100644 --- a/src/niquests/utils.py +++ b/src/niquests/utils.py @@ -17,6 +17,7 @@ import sys import tempfile import typing +import wassima from collections import OrderedDict from functools import lru_cache from http.cookiejar import CookieJar @@ -35,8 +36,20 @@ if HAS_LEGACY_URLLIB3 is False: from urllib3.util import make_headers, parse_url + from urllib3.contrib.resolver import ( + BaseResolver, + ProtocolResolver, + ResolverDescription, + ManyResolver, + ) else: from urllib3_future.util import make_headers, parse_url # type: ignore[assignment] + from urllib3_future.contrib.resolver import ( # type: ignore[assignment] + BaseResolver, + ProtocolResolver, + ResolverDescription, + ManyResolver, + ) from .__version__ import __version__ from .cookies import cookiejar_from_dict @@ -46,6 +59,7 @@ if typing.TYPE_CHECKING: from .cookies import RequestsCookieJar from .models import PreparedRequest, Request, Response + from ._typing import ResolverType getproxies = lru_cache()(getproxies) @@ -285,7 +299,7 @@ def from_key_val_list(value: typing.Any | None) -> OrderedDict | None: def to_key_val_list( - value: dict[_KT, _VT] | typing.Mapping[_KT, _VT] | typing.Iterable[tuple[_KT, _VT]] + value: dict[_KT, _VT] | typing.Mapping[_KT, _VT] | typing.Iterable[tuple[_KT, _VT]], ) -> list[tuple[_KT, _VT]]: """Take an object and test to see if it can be represented as a dictionary. If it can be, return a list of tuples, e.g., @@ -947,3 +961,76 @@ def rewind_body(prepared_request: PreparedRequest) -> None: ) else: raise UnrewindableBodyError("Unable to rewind request body for redirect.") + + +def create_resolver(definition: ResolverType | None) -> BaseResolver: + """Instantiate a unique resolver, reusable across the Session scope.""" + if definition is None: + overrule_dns = os.environ.get("NIQUESTS_DNS_URL", None) + if overrule_dns is not None: + definition = ResolverDescription.from_url(overrule_dns) + else: + return ResolverDescription(ProtocolResolver.SYSTEM).new() + + if isinstance(definition, BaseResolver): + return definition + + if isinstance(definition, str): + resolver = [ResolverDescription.from_url(definition)] + elif isinstance(definition, ResolverDescription): + resolver = [definition] + else: + raise ValueError("invalid resolver definition given") + + resolvers: list[ResolverDescription] = [] + + can_resolve_localhost: bool = False + + for resolver_description in resolver: + if isinstance(resolver_description, str): + resolvers.append(ResolverDescription.from_url(resolver_description)) + + if resolvers[-1].protocol == ProtocolResolver.SYSTEM: + can_resolve_localhost = True + + if "verify" in resolvers[-1] and resolvers[-1].kwargs["verify"] is False: + resolvers[-1]["cert_reqs"] = 0 + del resolvers[-1].kwargs["verify"] + + continue + + resolvers.append(resolver_description) + + if "verify" in resolvers[-1] and resolvers[-1].kwargs["verify"] is False: + resolvers[-1]["cert_reqs"] = 0 + del resolvers[-1].kwargs["verify"] + + if resolvers[-1].protocol == ProtocolResolver.SYSTEM: + can_resolve_localhost = True + + if not can_resolve_localhost: + resolvers.append( + ResolverDescription.from_url("system://default?hosts=localhost") + ) + + #: We want to automatically forward ca_cert_data, ca_cert_dir, and ca_certs. + for rd in resolvers: + # If no CA bundle is provided, inject the system's default! + if ( + "ca_cert_data" not in rd + and "ca_cert_dir" not in rd + and "ca_certs" not in rd + ): + rd["ca_cert_data"] = wassima.generate_ca_bundle() + + return ManyResolver(*[r.new() for r in resolvers]) + + +def resolve_socket_family( + disable_ipv4: bool, disable_ipv6: bool +) -> socket.AddressFamily: + if disable_ipv4: + return socket.AF_INET6 + if disable_ipv6: + return socket.AF_INET + return socket.AF_UNSPEC diff --git a/tests/test_live.py b/tests/test_live.py new file mode 100644 index 0000000000..fe032247e0 --- /dev/null +++ b/tests/test_live.py @@ -0,0 +1,92 @@ +from __future__ import annotations + +import os +from unittest.mock import patch, MagicMock + +import pytest + +from niquests import Session +from niquests.utils import is_ipv4_address, is_ipv6_address +from niquests.exceptions import ConnectionError +from urllib3 import HttpVersion, ResolverDescription + +try: + import qh3 +except ImportError: + qh3 = None + + +@pytest.mark.usefixtures("requires_wan") +class TestLiveStandardCase: + def test_ensure_ipv4(self) -> None: + with Session(disable_ipv6=True, resolver="doh+google://") as s: + r = s.get("https://pie.dev/get") + + assert r.conn_info.destination_address is not None + assert is_ipv4_address(r.conn_info.destination_address[0]) + + def test_ensure_ipv6(self) -> None: + if os.environ.get("CI", None) is not None: + # GitHub hosted runner can't reach external IPv6... + with pytest.raises(ConnectionError, match="No route to host|unreachable"): + with Session(disable_ipv4=True, resolver="doh+google://") as s: + s.get("https://pie.dev/get") + return + + with Session(disable_ipv4=True, resolver="doh+google://") as s: + r = s.get("https://pie.dev/get") + + assert r.conn_info.destination_address is not None + assert is_ipv6_address(r.conn_info.destination_address[0]) + + def test_ensure_http2(self) -> None: + with Session(disable_http3=True) as s: + r = s.get("https://pie.dev/get") + assert r.conn_info.http_version is not None + assert r.conn_info.http_version == HttpVersion.h2 + + @pytest.mark.skipif(qh3 is None, reason="qh3 unavailable") + def test_ensure_http3_default(self) -> None: + with Session(resolver="doh+cloudflare://") as s: + r = s.get("https://pie.dev/get") + assert r.conn_info.http_version is not None + assert r.conn_info.http_version == HttpVersion.h3 + + @patch("urllib3.contrib.resolver.doh.HTTPSResolver.getaddrinfo") + def test_manual_resolver(self, getaddrinfo_mock: MagicMock) -> None: + with Session(resolver="doh+cloudflare://") as s: + with pytest.raises(ConnectionError): + s.get("https://pie.dev/get") + + assert getaddrinfo_mock.call_count + + def test_not_owned_resolver(self) -> None: + resolver = ResolverDescription.from_url("doh+cloudflare://").new() + + with Session(resolver=resolver) as s: + s.get("https://pie.dev/get") + + assert resolver.is_available() + + assert resolver.is_available() + + def test_owned_resolver_must_close(self) -> None: + with Session(resolver="doh+cloudflare://") as s: + s.get("https://pie.dev/get") + + assert s.resolver.is_available() + + assert not s.resolver.is_available() + + def test_owned_resolver_must_recycle(self) -> None: + s = Session(resolver="doh+cloudflare://") + + s.get("https://pie.dev/get") + + s.resolver.close() + + assert not s.resolver.is_available() + + s.get("https://pie.dev/get") + + assert s.resolver.is_available() diff --git a/tests/test_lowlevel.py b/tests/test_lowlevel.py index 6c27f1bf44..f5939fcf7a 100644 --- a/tests/test_lowlevel.py +++ b/tests/test_lowlevel.py @@ -15,11 +15,10 @@ def echo_response_handler(sock): """Simple handler that will take request and echo it back to requester.""" request_content = consume_socket_content(sock, timeout=0.5) - text_200 = ( - b"HTTP/1.1 200 OK\r\n" - b"Content-Length: %d\r\n\r\n" - b"%s" - ) % (len(request_content), request_content) + text_200 = (b"HTTP/1.1 200 OK\r\n" b"Content-Length: %d\r\n\r\n" b"%s") % ( + len(request_content), + request_content, + ) sock.send(text_200) @@ -45,10 +44,7 @@ def incomplete_chunked_response_handler(sock): request_content = consume_socket_content(sock, timeout=0.5) # The server never ends the request and doesn't provide any valid chunks - sock.send( - b"HTTP/1.1 200 OK\r\n" - b"Transfer-Encoding: chunked\r\n\r\n" - ) + sock.send(b"HTTP/1.1 200 OK\r\n" b"Transfer-Encoding: chunked\r\n\r\n") return request_content @@ -132,24 +128,25 @@ def test_digestauth_401_count_reset_on_redirect(): See https://github.com/psf/requests/issues/1979. """ - text_401 = (b'HTTP/1.1 401 UNAUTHORIZED\r\n' - b'Content-Length: 0\r\n' - b'WWW-Authenticate: Digest nonce="6bf5d6e4da1ce66918800195d6b9130d"' - b', opaque="372825293d1c26955496c80ed6426e9e", ' - b'realm="me@kennethreitz.com", qop=auth\r\n\r\n') + text_401 = ( + b"HTTP/1.1 401 UNAUTHORIZED\r\n" + b"Content-Length: 0\r\n" + b'WWW-Authenticate: Digest nonce="6bf5d6e4da1ce66918800195d6b9130d"' + b', opaque="372825293d1c26955496c80ed6426e9e", ' + b'realm="me@kennethreitz.com", qop=auth\r\n\r\n' + ) - text_302 = (b'HTTP/1.1 302 FOUND\r\n' - b'Content-Length: 0\r\n' - b'Location: /\r\n\r\n') + text_302 = b"HTTP/1.1 302 FOUND\r\n" b"Content-Length: 0\r\n" b"Location: /\r\n\r\n" - text_200 = (b'HTTP/1.1 200 OK\r\n' - b'Content-Length: 0\r\n\r\n') + text_200 = b"HTTP/1.1 200 OK\r\n" b"Content-Length: 0\r\n\r\n" - expected_digest = (b'Authorization: Digest username="user", ' - b'realm="me@kennethreitz.com", ' - b'nonce="6bf5d6e4da1ce66918800195d6b9130d", uri="/"') + expected_digest = ( + b'Authorization: Digest username="user", ' + b'realm="me@kennethreitz.com", ' + b'nonce="6bf5d6e4da1ce66918800195d6b9130d", uri="/"' + ) - auth = niquests.auth.HTTPDigestAuth('user', 'pass') + auth = niquests.auth.HTTPDigestAuth("user", "pass") def digest_response_handler(sock): # Respond to initial GET with a challenge. @@ -165,7 +162,7 @@ def digest_response_handler(sock): # Verify Authorization isn't sent to the redirected host, # then send another challenge. request_content = consume_socket_content(sock, timeout=0.5) - assert b'Authorization:' not in request_content + assert b"Authorization:" not in request_content sock.send(text_401) # Verify Authorization is sent correctly again, and return 200 OK. @@ -179,13 +176,13 @@ def digest_response_handler(sock): server = Server(digest_response_handler, wait_to_close_event=close_server) with server as (host, port): - url = f'http://{host}:{port}/' + url = f"http://{host}:{port}/" r = niquests.get(url, auth=auth) # Verify server succeeded in authenticating. assert r.status_code == 200 # Verify Authorization was sent in final request. - assert 'Authorization' in r.request.headers - assert r.request.headers['Authorization'].startswith('Digest ') + assert "Authorization" in r.request.headers + assert r.request.headers["Authorization"].startswith("Digest ") # Verify redirect happened as we expected. assert r.history[0].status_code == 302 close_server.set() @@ -195,17 +192,21 @@ def test_digestauth_401_only_sent_once(): """Ensure we correctly respond to a 401 challenge once, and then stop responding if challenged again. """ - text_401 = (b'HTTP/1.1 401 UNAUTHORIZED\r\n' - b'Content-Length: 0\r\n' - b'WWW-Authenticate: Digest nonce="6bf5d6e4da1ce66918800195d6b9130d"' - b', opaque="372825293d1c26955496c80ed6426e9e", ' - b'realm="me@kennethreitz.com", qop=auth\r\n\r\n') - - expected_digest = (b'Authorization: Digest username="user", ' - b'realm="me@kennethreitz.com", ' - b'nonce="6bf5d6e4da1ce66918800195d6b9130d", uri="/"') - - auth = niquests.auth.HTTPDigestAuth('user', 'pass') + text_401 = ( + b"HTTP/1.1 401 UNAUTHORIZED\r\n" + b"Content-Length: 0\r\n" + b'WWW-Authenticate: Digest nonce="6bf5d6e4da1ce66918800195d6b9130d"' + b', opaque="372825293d1c26955496c80ed6426e9e", ' + b'realm="me@kennethreitz.com", qop=auth\r\n\r\n' + ) + + expected_digest = ( + b'Authorization: Digest username="user", ' + b'realm="me@kennethreitz.com", ' + b'nonce="6bf5d6e4da1ce66918800195d6b9130d", uri="/"' + ) + + auth = niquests.auth.HTTPDigestAuth("user", "pass") def digest_failed_response_handler(sock): # Respond to initial GET with a challenge. @@ -221,7 +222,7 @@ def digest_failed_response_handler(sock): # Verify the client didn't respond to second challenge. request_content = consume_socket_content(sock, timeout=0.5) - assert request_content == b'' + assert request_content == b"" return request_content @@ -229,7 +230,7 @@ def digest_failed_response_handler(sock): server = Server(digest_failed_response_handler, wait_to_close_event=close_server) with server as (host, port): - url = f'http://{host}:{port}/' + url = f"http://{host}:{port}/" r = niquests.get(url, auth=auth) # Verify server didn't authenticate us. assert r.status_code == 401 @@ -242,13 +243,15 @@ def test_digestauth_only_on_4xx(): See https://github.com/psf/requests/issues/3772. """ - text_200_chal = (b'HTTP/1.1 200 OK\r\n' - b'Content-Length: 0\r\n' - b'WWW-Authenticate: Digest nonce="6bf5d6e4da1ce66918800195d6b9130d"' - b', opaque="372825293d1c26955496c80ed6426e9e", ' - b'realm="me@kennethreitz.com", qop=auth\r\n\r\n') + text_200_chal = ( + b"HTTP/1.1 200 OK\r\n" + b"Content-Length: 0\r\n" + b'WWW-Authenticate: Digest nonce="6bf5d6e4da1ce66918800195d6b9130d"' + b', opaque="372825293d1c26955496c80ed6426e9e", ' + b'realm="me@kennethreitz.com", qop=auth\r\n\r\n' + ) - auth = niquests.auth.HTTPDigestAuth('user', 'pass') + auth = niquests.auth.HTTPDigestAuth("user", "pass") def digest_response_handler(sock): # Respond to GET with a 200 containing www-authenticate header. @@ -258,7 +261,7 @@ def digest_response_handler(sock): # Verify the client didn't respond with auth. request_content = consume_socket_content(sock, timeout=0.5) - assert request_content == b'' + assert request_content == b"" return request_content @@ -266,7 +269,7 @@ def digest_response_handler(sock): server = Server(digest_response_handler, wait_to_close_event=close_server) with server as (host, port): - url = f'http://{host}:{port}/' + url = f"http://{host}:{port}/" r = niquests.get(url, auth=auth) # Verify server didn't receive auth from us. assert r.status_code == 200 @@ -275,9 +278,9 @@ def digest_response_handler(sock): _schemes_by_var_prefix = [ - ('http', ['http']), - ('https', ['https']), - ('all', ['http', 'https']), + ("http", ["http"]), + ("https", ["https"]), + ("all", ["http", "https"]), ] _proxy_combos = [] @@ -308,35 +311,36 @@ def test_use_proxy_from_environment(httpbin, var, scheme): def test_redirect_rfc1808_to_non_ascii_location(): - path = 'š' - expected_path = b'%C5%A1' + path = "š" + expected_path = b"%C5%A1" redirect_request = [] # stores the second request to the server def redirect_resp_handler(sock): consume_socket_content(sock, timeout=0.5) - location = f'//{host}:{port}/{path}' + location = f"//{host}:{port}/{path}" sock.send( ( - b'HTTP/1.1 301 Moved Permanently\r\n' - b'Content-Length: 0\r\n' - b'Location: %s\r\n' - b'\r\n' - ) % location.encode('utf8') + b"HTTP/1.1 301 Moved Permanently\r\n" + b"Content-Length: 0\r\n" + b"Location: %s\r\n" + b"\r\n" + ) + % location.encode("utf8") ) redirect_request.append(consume_socket_content(sock, timeout=0.5)) - sock.send(b'HTTP/1.1 200 OK\r\n\r\n') + sock.send(b"HTTP/1.1 200 OK\r\n\r\n") close_server = threading.Event() server = Server(redirect_resp_handler, wait_to_close_event=close_server) with server as (host, port): - url = f'http://{host}:{port}' + url = f"http://{host}:{port}" r = niquests.get(url=url, allow_redirects=True) assert r.status_code == 200 assert len(r.history) == 1 assert r.history[0].status_code == 301 - assert redirect_request[0].startswith(b'GET /' + expected_path + b' HTTP/1.1') - assert r.url == '{}/{}'.format(url, expected_path.decode('ascii')) + assert redirect_request[0].startswith(b"GET /" + expected_path + b" HTTP/1.1") + assert r.url == "{}/{}".format(url, expected_path.decode("ascii")) close_server.set() @@ -347,16 +351,16 @@ def test_fragment_not_sent_with_request(): server = Server(echo_response_handler, wait_to_close_event=close_server) with server as (host, port): - url = f'http://{host}:{port}/path/to/thing/#view=edit&token=hunter2' + url = f"http://{host}:{port}/path/to/thing/#view=edit&token=hunter2" r = niquests.get(url) raw_request = r.content assert r.status_code == 200 - headers, body = raw_request.split(b'\r\n\r\n', 1) - status_line, headers = headers.split(b'\r\n', 1) + headers, body = raw_request.split(b"\r\n\r\n", 1) + status_line, headers = headers.split(b"\r\n", 1) - assert status_line == b'GET /path/to/thing/ HTTP/1.1' - for frag in (b'view', b'edit', b'token', b'hunter2'): + assert status_line == b"GET /path/to/thing/ HTTP/1.1" + for frag in (b"view", b"edit", b"token", b"hunter2"): assert frag not in headers assert frag not in body @@ -372,26 +376,24 @@ def test_fragment_update_on_redirect(): def response_handler(sock): consume_socket_content(sock, timeout=0.5) sock.send( - b'HTTP/1.1 302 FOUND\r\n' - b'Content-Length: 0\r\n' - b'Location: /get#relevant-section\r\n\r\n' + b"HTTP/1.1 302 FOUND\r\n" + b"Content-Length: 0\r\n" + b"Location: /get#relevant-section\r\n\r\n" ) consume_socket_content(sock, timeout=0.5) sock.send( - b'HTTP/1.1 302 FOUND\r\n' - b'Content-Length: 0\r\n' - b'Location: /final-url/\r\n\r\n' + b"HTTP/1.1 302 FOUND\r\n" + b"Content-Length: 0\r\n" + b"Location: /final-url/\r\n\r\n" ) consume_socket_content(sock, timeout=0.5) - sock.send( - b'HTTP/1.1 200 OK\r\n\r\n' - ) + sock.send(b"HTTP/1.1 200 OK\r\n\r\n") close_server = threading.Event() server = Server(response_handler, wait_to_close_event=close_server) with server as (host, port): - url = f'http://{host}:{port}/path/to/thing/#view=edit&token=hunter2' + url = f"http://{host}:{port}/path/to/thing/#view=edit&token=hunter2" r = niquests.get(url) assert r.status_code == 200 @@ -399,20 +401,19 @@ def response_handler(sock): assert r.history[0].request.url == url # Verify we haven't overwritten the location with our previous fragment. - assert r.history[1].request.url == f'http://{host}:{port}/get#relevant-section' + assert r.history[1].request.url == f"http://{host}:{port}/get#relevant-section" # Verify previous fragment is used and not the original. - assert r.url == f'http://{host}:{port}/final-url/#relevant-section' + assert r.url == f"http://{host}:{port}/final-url/#relevant-section" close_server.set() def test_json_decode_compatibility_for_alt_utf_encodings(): - def response_handler(sock): consume_socket_content(sock, timeout=0.5) sock.send( - b'HTTP/1.1 200 OK\r\n' - b'Content-Length: 18\r\n\r\n' + b"HTTP/1.1 200 OK\r\n" + b"Content-Length: 18\r\n\r\n" b'\xff\xfe{\x00"\x00K0"\x00=\x00"\x00\xab0"\x00\r\n' ) @@ -420,7 +421,7 @@ def response_handler(sock): server = Server(response_handler, wait_to_close_event=close_server) with server as (host, port): - url = f'http://{host}:{port}/' + url = f"http://{host}:{port}/" r = niquests.get(url) r.encoding = None with pytest.raises(niquests.exceptions.JSONDecodeError) as excinfo: diff --git a/tests/test_requests.py b/tests/test_requests.py index d81a1cb629..3f81c33af0 100644 --- a/tests/test_requests.py +++ b/tests/test_requests.py @@ -931,10 +931,11 @@ def test_invalid_ca_certificate_path(self, httpbin_secure): INVALID_PATH = "/garbage" with pytest.raises(IOError) as e: niquests.get(httpbin_secure(), verify=INVALID_PATH) - assert str( - e.value - ) == "Could not find a suitable TLS CA certificate bundle, invalid path: {}".format( - INVALID_PATH + assert ( + str(e.value) + == "Could not find a suitable TLS CA certificate bundle, invalid path: {}".format( + INVALID_PATH + ) ) def test_invalid_ssl_certificate_files(self, httpbin_secure): diff --git a/tox.ini b/tox.ini deleted file mode 100644 index 5a33735c4e..0000000000 --- a/tox.ini +++ /dev/null @@ -1,11 +0,0 @@ -[tox] -envlist = py{37,38,39,310,311,312}-{default} - -[testenv] -deps = -rrequirements-dev.txt -extras = - socks -commands = - pytest tests - -[testenv:default]