Script 'mail_helper' called by obssrc
Hello community,

here is the log from the commit of package python-urllib3 for openSUSE:Factory 
checked in at 2025-06-24 20:45:47
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-urllib3 (Old)
 and      /work/SRC/openSUSE:Factory/.python-urllib3.new.7067 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-urllib3"

Tue Jun 24 20:45:47 2025 rev:72 rq:1287779 version:2.5.0

Changes:
--------
--- /work/SRC/openSUSE:Factory/python-urllib3/python-urllib3.changes    
2025-05-31 19:14:11.944143225 +0200
+++ /work/SRC/openSUSE:Factory/.python-urllib3.new.7067/python-urllib3.changes  
2025-06-24 20:46:12.164975997 +0200
@@ -1,0 +2,18 @@
+Mon Jun 23 02:03:12 UTC 2025 - Steve Kowalik <steven.kowa...@suse.com>
+
+- Update to 2.5.0:
+  * Security issues
+    Pool managers now properly control redirects when retries is passed
+    (CVE-2025-50181, GHSA-pq67-6m6q-mj2v, bsc#1244925)
+    Redirects are now controlled by urllib3 in the Node.js runtime
+    (CVE-2025-50182, GHSA-48p4-8xcf-vxj5, bsc#1244924)
+  * Features
+    Added support for the compression.zstd module that is new in Python 3.14.
+    Added support for version 0.5 of hatch-vcs
+  * Bugfixes
+    Raised exception for HTTPResponse.shutdown on a connection already
+    released to the pool.
+    Fixed incorrect CONNECT statement when using an IPv6 proxy with
+    connection_from_host. Previously would not be wrapped in [].
+
+-------------------------------------------------------------------

Old:
----
  urllib3-2.4.0.tar.gz

New:
----
  urllib3-2.5.0.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-urllib3.spec ++++++
--- /var/tmp/diff_new_pack.gZnFZ6/_old  2025-06-24 20:46:12.732999545 +0200
+++ /var/tmp/diff_new_pack.gZnFZ6/_new  2025-06-24 20:46:12.736999711 +0200
@@ -18,8 +18,6 @@
 
 %global flavor @BUILD_FLAVOR@%{nil}
 %if "%{flavor}" == "test"
-# No Quart for Python 3.10
-%define skip_python310 1
 %define psuffix -test
 %bcond_without test
 %else
@@ -28,7 +26,7 @@
 %endif
 %{?sle15_python_module_pythons}
 Name:           python-urllib3%{psuffix}
-Version:        2.4.0
+Version:        2.5.0
 Release:        0
 Summary:        HTTP library with thread-safe connection pooling, file post, 
and more
 License:        MIT

++++++ urllib3-2.4.0.tar.gz -> urllib3-2.5.0.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/urllib3-2.4.0/CHANGES.rst 
new/urllib3-2.5.0/CHANGES.rst
--- old/urllib3-2.4.0/CHANGES.rst       2025-04-10 16:55:56.000000000 +0200
+++ new/urllib3-2.5.0/CHANGES.rst       2025-06-18 15:55:24.000000000 +0200
@@ -1,3 +1,26 @@
+2.5.0 (2025-06-18)
+==================
+
+Features
+--------
+
+- Added support for the ``compression.zstd`` module that is new in Python 3.14.
+  See `PEP 784 <https://peps.python.org/pep-0784/>`_ for more information. 
(`#3610 <https://github.com/urllib3/urllib3/issues/3610>`__)
+- Added support for version 0.5 of ``hatch-vcs`` (`#3612 
<https://github.com/urllib3/urllib3/issues/3612>`__)
+
+
+Bugfixes
+--------
+
+- Fixed a security issue where restricting the maximum number of followed
+  redirects at the ``urllib3.PoolManager`` level via the ``retries`` parameter
+  did not work.
+- Made the Node.js runtime respect redirect parameters such as ``retries``
+  and ``redirects``.
+- Raised exception for ``HTTPResponse.shutdown`` on a connection already 
released to the pool. (`#3581 
<https://github.com/urllib3/urllib3/issues/3581>`__)
+- Fixed incorrect `CONNECT` statement when using an IPv6 proxy with 
`connection_from_host`. Previously would not be wrapped in `[]`. (`#3615 
<https://github.com/urllib3/urllib3/issues/3615>`__)
+
+
 2.4.0 (2025-04-10)
 ==================
 
@@ -29,7 +52,7 @@
 --------
 
 - Added ``HTTPResponse.shutdown()`` to stop any ongoing or future reads for a 
specific response. It calls ``shutdown(SHUT_RD)`` on the underlying socket. 
This feature was `sponsored by LaunchDarkly 
<https://opencollective.com/urllib3/contributions/815307>`__. (`#2868 
<https://github.com/urllib3/urllib3/issues/2868>`__)
-- Added support for JavaScript Promise Integration on Emscripten. This enables 
more efficient WebAssembly 
+- Added support for JavaScript Promise Integration on Emscripten. This enables 
more efficient WebAssembly
   requests and streaming, and makes it possible to use in Node.js if you 
launch it as  ``node --experimental-wasm-stack-switching``. (`#3400 
<https://github.com/urllib3/urllib3/issues/3400>`__)
 - Added the ``proxy_is_tunneling`` property to ``HTTPConnection`` and 
``HTTPSConnection``. (`#3285 
<https://github.com/urllib3/urllib3/issues/3285>`__)
 - Added pickling support to ``NewConnectionError`` and 
``NameResolutionError``. (`#3480 
<https://github.com/urllib3/urllib3/issues/3480>`__)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/urllib3-2.4.0/PKG-INFO new/urllib3-2.5.0/PKG-INFO
--- old/urllib3-2.4.0/PKG-INFO  2025-04-10 16:55:56.000000000 +0200
+++ new/urllib3-2.5.0/PKG-INFO  2025-06-18 15:55:24.000000000 +0200
@@ -1,6 +1,6 @@
 Metadata-Version: 2.4
 Name: urllib3
-Version: 2.4.0
+Version: 2.5.0
 Summary: HTTP library with thread-safe connection pooling, file post, and more.
 Project-URL: Changelog, 
https://github.com/urllib3/urllib3/blob/main/CHANGES.rst
 Project-URL: Documentation, https://urllib3.readthedocs.io
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/urllib3-2.4.0/docs/advanced-usage.rst 
new/urllib3-2.5.0/docs/advanced-usage.rst
--- old/urllib3-2.4.0/docs/advanced-usage.rst   2025-04-10 16:55:56.000000000 
+0200
+++ new/urllib3-2.5.0/docs/advanced-usage.rst   2025-06-18 15:55:24.000000000 
+0200
@@ -561,11 +561,14 @@
 `Zstandard <https://datatracker.ietf.org/doc/html/rfc8878>`_
 is a compression algorithm created by Facebook with better compression
 than brotli, gzip and deflate (see `benchmarks 
<https://facebook.github.io/zstd/#benchmarks>`_)
-and is supported by urllib3 if the `zstandard package 
<https://pypi.org/project/zstandard/>`_ is installed.
+and is supported by urllib3 in Python 3.14+ using the `compression.zstd 
<https://peps.python.org/pep-0784/>`_ standard library module
+and for Python 3.13 and earlier if the `zstandard package 
<https://pypi.org/project/zstandard/>`_ is installed.
 You may also request the package be installed via the ``urllib3[zstd]`` extra:
 
 .. code-block:: bash
 
+    # This is only necessary on Python 3.13 and earlier.
+    # Otherwise zstandard support is included in the Python standard library.
     $ python -m pip install urllib3[zstd]
 
 .. note::
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/urllib3-2.4.0/docs/reference/contrib/emscripten.rst 
new/urllib3-2.5.0/docs/reference/contrib/emscripten.rst
--- old/urllib3-2.4.0/docs/reference/contrib/emscripten.rst     2025-04-10 
16:55:56.000000000 +0200
+++ new/urllib3-2.5.0/docs/reference/contrib/emscripten.rst     2025-06-18 
15:55:24.000000000 +0200
@@ -65,7 +65,7 @@
 * Timeouts
 * Retries
 * Streaming (with Web Workers and Cross-Origin Isolation)
-* Redirects
+* Redirects (urllib3 controls redirects in Node.js but not in browsers where 
behavior is determined by runtime)
 * Decompressing response bodies
 
 Features which don't work with Emscripten:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/urllib3-2.4.0/docs/v2-migration-guide.rst 
new/urllib3-2.5.0/docs/v2-migration-guide.rst
--- old/urllib3-2.4.0/docs/v2-migration-guide.rst       2025-04-10 
16:55:56.000000000 +0200
+++ new/urllib3-2.5.0/docs/v2-migration-guide.rst       2025-06-18 
15:55:24.000000000 +0200
@@ -1,9 +1,9 @@
 v2.0 Migration Guide
 ====================
 
-**urllib3 v2.0 is now available!** Read below for how to get started and what 
is contained in the new major release.
+**urllib3 2.x is now available!** Read below for how to get started and what 
is contained in the new major release.
 
-**🚀 Migrating from 1.x to 2.0**
+**🚀 Migrating from 1.x to 2.x**
 --------------------------------
 
 We're maintaining **functional API compatibility for most users** to make the
@@ -13,44 +13,25 @@
 
 .. note::
 
-  If you have difficulty migrating to v2.0 or following this guide
+  If you have difficulty migrating to 2.x or following this guide
   you can `open an issue on GitHub 
<https://github.com/urllib3/urllib3/issues>`_
   or reach out in `our community Discord channel 
<https://discord.gg/urllib3>`_.
 
 
-Timeline for deprecations and breaking changes
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-The 2.x initial release schedule will look like this:
-
-* urllib3 ``v2.0.0-alpha1`` will be released in November 2022. This release
-  contains **minor breaking changes and deprecation warnings for other 
breaking changes**.
-  There may be other pre-releases to address fixes before v2.0.0 is released.
-* urllib3 ``v2.0.0`` will be released in early 2023 after some initial 
integration testing
-  against dependent packages and fixing of bug reports.
-* urllib3 ``v2.1.0`` will be released in the summer of 2023 with **all 
breaking changes
-  being warned about in v2.0.0**.
-
-.. warning::
-
-  Please take the ``DeprecationWarnings`` you receive when migrating from v1.x 
to v2.0 seriously
-  as they will become errors after 2.1.0 is released.
-
-
 What are the important changes?
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-Here's a short summary of which changes in urllib3 v2.0 are most important:
+Here's a short summary of which changes in urllib3 2.x are most important:
 
-- Python version must be **3.7 or later** (previously supported Python 2.7, 
3.5, and 3.6).
+- Python version must be **3.9 or later** (previously supported Python 2.7, 
and 3.5 to 3.8).
 - Removed support for non-OpenSSL TLS libraries (like LibreSSL and wolfSSL).
 - Removed support for OpenSSL versions older than 1.1.1.
 - Removed support for Python implementations that aren't CPython or PyPy3 
(previously supported Google App Engine, Jython).
 - Removed the ``urllib3.contrib.ntlmpool`` module.
-- Deprecated the ``urllib3.contrib.pyopenssl``, 
``urllib3.contrib.securetransport`` modules, will be removed in v2.1.0.
-- Deprecated the ``urllib3[secure]`` extra, will be removed in v2.1.0.
-- Deprecated the ``HTTPResponse.getheaders()`` method in favor of 
``HTTPResponse.headers``, will be removed in v2.1.0.
-- Deprecated the ``HTTPResponse.getheader(name, default)`` method in favor of 
``HTTPResponse.headers.get(name, default)``, will be removed in v2.1.0.
+- Removed the ``urllib3.contrib.securetransport`` module.
+- Removed the ``urllib3[secure]`` extra.
+- Deprecated the ``HTTPResponse.getheaders()`` method in favor of 
``HTTPResponse.headers``, will be removed in 2.6.0.
+- Deprecated the ``HTTPResponse.getheader(name, default)`` method in favor of 
``HTTPResponse.headers.get(name, default)``, will be removed in 2.6.0.
 - Deprecated URLs without a scheme (ie 'https://') and will be raising an 
error in a future version of urllib3.
 - Changed the default minimum TLS version to TLS 1.2 (previously was TLS 1.0).
 - Changed the default request body encoding from 'ISO-8859-1' to 'UTF-8'.
@@ -60,129 +41,22 @@
 For a full list of changes you can look at `the changelog 
<https://github.com/urllib3/urllib3/blob/main/CHANGES.rst>`_.
 
 
-Migrating as a package maintainer?
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-If you're a maintainer of a package that uses urllib3 under the hood then this 
section is for you.
-You may have already seen an issue opened from someone on our team about the 
upcoming release.
-
-The primary goal for migrating to urllib3 v2.x should be to ensure your 
package supports **both urllib3 v1.26.x and v2.0 for some time**.
-This is to reduce the chance that diamond dependencies are introduced into 
your users' dependencies which will then cause issues
-with them upgrading to the latest version of **your package**.
-
-The first step to supporting urllib3 v2.0 is to make sure the version v2.x not 
being excluded by ``install_requires``. You should
-ensure your package allows for both urllib3 1.26.x and 2.0 to be used:
-
-.. code-block:: python
-
-  # setup.py (setuptools)
-  setup(
-    ...
-    install_requires=["urllib3>=1.26,<3"]
-  )
-
-  # pyproject.toml (hatch)
-  [project]
-  dependencies = [
-    "urllib3>=1.26,<3"
-  ]
-
-Next you should try installing urllib3 v2.0 locally and run your test suite.
-
-.. code-block:: bash
-
-  $ python -m pip install -U --pre 'urllib3>=2.0.0a1'
-
-
-Because there are many ``DeprecationWarnings`` you should ensure that you're
-able to see those warnings when running your test suite. To do so you can add
-the following to your test setup to ensure even ``DeprecationWarnings`` are
-output to the terminal:
-
-.. code-block:: bash
-
-  # Set PYTHONWARNING=default to show all warnings.
-  $ export PYTHONWARNINGS="default"
-
-  # Run your test suite and look for failures.
-  # Pytest automatically prints all warnings.
-  $ pytest tests/
-
-or you can opt-in within your Python code:
-
-.. code-block:: python
-
-  # You can change warning filters according to the filter rules:
-  # https://docs.python.org/3/library/warnings.html#warning-filter
-  import warnings
-  warnings.filterwarnings("default", category=DeprecationWarning)
-
-Any failures or deprecation warnings you receive should be fixed as urllib3 
v2.1.0 will remove all
-deprecated features. Many deprecation warnings will make suggestions about 
what to do to avoid the deprecated feature.
-
-Warnings will look something like this:
-
-.. code-block:: bash
-
-  DeprecationWarning: 'ssl_version' option is deprecated and will be removed
-  in urllib3 v2.1.0. Instead use 'ssl_minimum_version'
-
-Continue removing deprecation warnings until there are no more. After this you 
can publish a new release of your package
-that supports both urllib3 v1.26.x and v2.x.
-
-.. note::
-
-  If you're not able to support both 1.26.x and v2.0 of urllib3 at the same 
time with your package please
-  `open an issue on GitHub <https://github.com/urllib3/urllib3/issues>`_ or 
reach out in
-  `our community Discord channel <https://discord.gg/urllib3>`_.
-
-
-Migrating as an application developer?
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-If you're someone who writes Python but doesn't ship as a package (things like 
web services, data science, tools, and more) this section is for you.
-
-Python environments only allow for one version of a dependency to be installed 
per environment which means
-that **all of your dependencies using urllib3 need to support v2.0 for you to 
upgrade**.
-
-The best way to visualize relationships between your dependencies is using 
`pipdeptree <https://pypi.org/project/pipdeptree>`_ and ``$ pipdeptree 
--reverse``:
-
-.. code-block:: bash
-
-  # From inside your Python environment:
-  $ python -m pip install pipdeptree
-  # We only care about packages requiring urllib3
-  $ pipdeptree --reverse | grep "requires: urllib3"
-
-  - botocore==1.29.8 [requires: urllib3>=1.25.4,<2]
-  - requests==2.28.1 [requires: urllib3>=1.21.1,<2]
-
-Reading the output from above, there are two packages which depend on urllib3: 
``botocore`` and ``requests``.
-The versions of these two packages both require urllib3 that is less than v2.0 
(ie ``<2``).
-
-Because both of these packages require urllib3 before v2.0 the new version of 
urllib3 can't be installed
-by default. There are ways to force installing the newer version of urllib3 
v2.0 (ie pinning to ``urllib3==2.0.0``)
-which you can do to test your application.
 
-It's important to know that even if you don't upgrade all of your services to 
2.x
-immediately you will `receive security fixes on the 1.26.x release stream 
<#security-fixes-for-urllib3-v1-26-x>` for some time.
+Sunsetting urllib3 1.26.x
+~~~~~~~~~~~~~~~~~~~~~~~~~
 
-
-Security fixes for urllib3 v1.26.x
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Thanks to support from `Tidelift 
<https://tidelift.com/subscription/pkg/pypi-urllib3>`_
-we're able to continue supporting the v1.26.x release stream with
-security fixes for the foreseeable future 💖
+urllib3 1.26.x is not currently maintained. urllib3 2.x is the best version of 
urllib3
+and is widely supported by the larger Python ecosystem. That said, urllib3 
1.26.x still
+sees significant download numbers, mainly because the botocore package still 
requires
+urllib3 1.26.x for Python 3.9 and earlier. If your organization would benefit 
from the
+continued support of urllib3 1.26.x, please contact 
sethmichaellar...@gmail.com to
+discuss sponsorship or contribution opportunities.
 
 However, upgrading is still recommended as **no new feature developments or 
non-critical
 bug fixes will be shipped to the 1.26.x release stream**.
 
-If your organization relies on urllib3 and is interested in continuing support 
you can learn
-more about the `Tidelift Subscription for Enterprise 
<https://tidelift.com/subscription/pkg/pypi-urllib3?utm_source=pypi-urllib3&utm_medium=referral&utm_campaign=docs>`_.
-
 **🤔 Common upgrading issues**
--------------------------------
+------------------------------
 
 ssl module is compiled with OpenSSL 1.0.2.k-fips
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -194,7 +68,7 @@
 
 Remediation depends on your system:
 
-- **AWS Lambda**: Upgrade to the Python3.10 runtime as it uses OpenSSL 1.1.1. 
Alternatively, you can
+- **AWS Lambda**: Upgrade to the Python 3.10 (or later) runtime as it uses 
OpenSSL 1.1.1. Alternatively, you can
   use a `custom Docker image
   
<https://aws.amazon.com/blogs/aws/new-for-aws-lambda-container-image-support/>`_
 and ensure you
   use a Python build that uses OpenSSL 1.1.1 or later.
@@ -202,7 +76,7 @@
   <https://aws.amazon.com/linux/amazon-linux-2023/>`_. Alternatively, you can 
install OpenSSL 1.1.1
   on Amazon Linux 2 using ``yum install openssl11 openssl11-devel`` and then 
install Python with a
   tool like pyenv.
-- **Red Hat Enterpritse Linux 7 (RHEL 7)**: Upgrade to RHEL 8 or RHEL 9.
+- **Red Hat Enterpritse Linux 7 (RHEL 7)**: Upgrade to RHEL 8 or later.
 - **Read the Docs**: Upgrade your `configuration file to use Ubuntu 22.04
   <https://docs.readthedocs.io/en/stable/config-file/v2.html>`_ by using ``os: 
ubuntu-22.04`` in the
   ``build`` section. Feel free to use the `urllib3 configuration
@@ -228,12 +102,12 @@
 This likely happens because you're using botocore which `does not support 
urllib3 2.0 yet
 <https://github.com/boto/botocore/issues/2921>`_. The good news is that 
botocore explicitly declares
 in its dependencies that it only supports ``urllib3<2``. Make sure to use a 
recent pip. That way, pip
-will install urllib3 1.26.x until botocore starts supporting urllib3 2.0.
+will install urllib3 1.26.x for versions of botocore that do not support 
urllib3 2.0.
 
-If you're deploying to an AWS environment such as Lambda or a host using 
Amazon Linux 2,
-you'll need to explicitly pin to ``urllib3<2`` in your project to ensure 
urllib3 2.0 isn't
-brought into your environment. Otherwise, this may result in unintended side 
effects with
-the default boto3 installation.
+If you're deploying to an AWS environment such as Lambda with the Python 3.9 
runtime or a host
+using Amazon Linux 2, you'll need to explicitly pin to ``urllib3<2`` in your 
project to ensure
+urllib3 2.0 isn't brought into your environment. Otherwise, this may result in 
unintended side
+effects with the default boto3 installation.
 
 AttributeError: module 'urllib3.connectionpool' has no attribute 
'VerifiedHTTPSConnection'
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -251,18 +125,108 @@
 
 The ``strict`` parameter is unneeded with Python 3 and should be removed.
 
-Pinning urllib3<2
-~~~~~~~~~~~~~~~~~
 
-If the advice from the above sections did not help, you can pin urllib3 to 
1.26.x by installing
-``urllib3<2``. Please do **not** specify ``urllib3==1.26.15`` to make sure you 
continue getting
-1.26.x updates!
-
-While urllib3 1.26.x is still supported, it won't get new features or bug 
fixes, just security
-updates. Consider opening a tracking issue to unpin urllib3 in the future to 
not stay on 1.26.x
-indefinitely.  For more details on the recommended way to handle your 
dependencies in general, see
-`Semantic Versioning Will Not Save You 
<https://hynek.me/articles/semver-will-not-save-you/>`_. The
-second half even uses urllib3 2.0 as an example!
+
+Migrating as an application developer?
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+If you're someone who writes Python but doesn't ship as a package (things like 
web services, data science, tools, and more) this section is for you.
+
+Python environments only allow for one version of a dependency to be installed 
per environment which means
+that **all of your dependencies using urllib3 need to support 2.x for you to 
upgrade**.
+
+The best way to visualize relationships between your dependencies is using 
`pipdeptree <https://pypi.org/project/pipdeptree>`_ and ``$ pipdeptree 
--reverse``:
+
+.. code-block:: bash
+
+  # From inside your Python environment:
+  $ python -m pip install pipdeptree
+  # We only care about packages requiring urllib3
+  $ pipdeptree --reverse | grep "requires: urllib3"
+  ├── botocore==1.38.36 [requires: urllib3>=1.25.4,<1.27]
+  └── requests==2.32.4 [requires: urllib3>=1.21.1,<3]
+
+Reading the output from above, there are two packages which depend on urllib3: 
``botocore`` and ``requests``.
+While requests supports urllib3 2.x (with the ``<3`` specified), botocore 
requires urllib3 1.26.x (with the ``<1.27`` version specifier).
+Note that botocore does support urllib3 2.x, but it only supports it on Python 
3.10 and later, which mandates OpenSSL 1.1.1+.
+
+It's important to know `urllib3 does not receive security fixes at the moment 
<#sunsetting-urllib3-1-26-x>`.
+
+
+Migrating as a package maintainer?
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+If you're a maintainer of a package that uses urllib3 under the hood then this 
section is for you.
+You may have already seen an issue opened from someone on our team about the 
upcoming release.
+
+The primary goal for migrating to urllib3 2.x should be to ensure your package 
supports **both urllib3 v1.26.x and 2.x for some time**.
+This is to reduce the chance that diamond dependencies are introduced into 
your users' dependencies which will then cause issues
+with them upgrading to the latest version of **your package**.
+
+The first step to supporting urllib3 2.x is to make sure the version 2.x not 
being excluded by ``install_requires``. You should
+ensure your package allows for both urllib3 1.26.x and 2.x to be used:
+
+.. code-block:: python
+
+  # setup.py (setuptools)
+  setup(
+    ...
+    install_requires=["urllib3>=1.26,<3"]
+  )
+
+  # pyproject.toml (hatch)
+  [project]
+  dependencies = [
+    "urllib3>=1.26,<3"
+  ]
+
+Next you should try installing urllib3 v2.0 locally and run your test suite.
+
+.. code-block:: bash
+
+  $ python -m pip install -U 'urllib3>=2'
+
+Because there are new ``DeprecationWarnings`` you should ensure that you're
+able to see those warnings when running your test suite. To do so you can add
+the following to your test setup to ensure even ``DeprecationWarnings`` are
+output to the terminal:
+
+.. code-block:: bash
+
+  # Set PYTHONWARNING=default to show all warnings.
+  $ export PYTHONWARNINGS="default"
+
+  # Run your test suite and look for failures.
+  # Pytest automatically prints all warnings.
+  $ pytest tests/
+
+or you can opt-in within your Python code:
+
+.. code-block:: python
+
+  # You can change warning filters according to the filter rules:
+  # https://docs.python.org/3/library/warnings.html#warning-filter
+  import warnings
+  warnings.filterwarnings("default", category=DeprecationWarning)
+
+Any failures or deprecation warnings you receive should be fixed as urllib3 
v2.1.0 will remove all
+deprecated features. Many deprecation warnings will make suggestions about 
what to do to avoid the deprecated feature.
+
+Warnings will look something like this:
+
+.. code-block:: bash
+
+  DeprecationWarning: 'ssl_version' option is deprecated and will be removed
+  in urllib3 v2.6.0. Instead use 'ssl_minimum_version'
+
+Continue removing deprecation warnings until there are no more. After this you 
can publish a new release of your package
+that supports both urllib3 1.26.x and 2.x.
+
+.. note::
+
+  If you're not able to support both 1.26.x and 2.x of urllib3 at the same 
time with your package please
+  `open an issue on GitHub <https://github.com/urllib3/urllib3/issues>`_ or 
reach out in
+  `our community Discord channel <https://discord.gg/urllib3>`_.
 
 
 **💪 User-friendly features**
@@ -321,15 +285,14 @@
   }
 
 
-**✨ Optimized for Python 3.7+**
+**✨ Optimized for Python 3.9+**
 --------------------------------
 
-In v2.0 we'll be specifically targeting
-CPython 3.7+ and PyPy 7.0+ (compatible with CPython 3.7)
-and dropping support for Python versions 2.7, 3.5, and 3.6.
-
+urllib3 2.x specifically targets CPython 3.9+ and PyPy 7.3.17+ (compatible 
with CPython 3.10)
+and dropping support for Python versions 2.7, and 3.5 to 3.8.
+  
 By dropping end-of-life Python versions we're able to optimize
-the codebase for Python 3.7+ by using new features to improve
+the codebase for Python 3.9+ by using new features to improve
 performance and reduce the amount of code that needs to be executed
 in order to support legacy versions.
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/urllib3-2.4.0/dummyserver/app.py 
new/urllib3-2.5.0/dummyserver/app.py
--- old/urllib3-2.4.0/dummyserver/app.py        2025-04-10 16:55:56.000000000 
+0200
+++ new/urllib3-2.5.0/dummyserver/app.py        2025-06-18 15:55:24.000000000 
+0200
@@ -227,6 +227,7 @@
 
 
 @hypercorn_app.route("/redirect", methods=["GET", "POST", "PUT"])
+@pyodide_testing_app.route("/redirect", methods=["GET", "POST", "PUT"])
 async def redirect() -> ResponseReturnValue:
     "Perform a redirect to ``target``"
     values = await request.values
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/urllib3-2.4.0/pyproject.toml 
new/urllib3-2.5.0/pyproject.toml
--- old/urllib3-2.4.0/pyproject.toml    2025-04-10 16:55:56.000000000 +0200
+++ new/urllib3-2.5.0/pyproject.toml    2025-06-18 15:55:24.000000000 +0200
@@ -1,7 +1,7 @@
 # This file is protected via CODEOWNERS
 
 [build-system]
-requires = ["hatchling>=1.6.0,<2", "hatch-vcs==0.4.0"]
+requires = ["hatchling>=1.6.0,<2", "hatch-vcs>=0.4.0,<0.6.0", 
"setuptools-scm>=8,<9"]
 build-backend = "hatchling.build"
 
 [project]
@@ -44,6 +44,10 @@
   "brotli>=1.0.9; platform_python_implementation == 'CPython'",
   "brotlicffi>=0.8.0; platform_python_implementation != 'CPython'"
 ]
+# Once we drop support for Python 3.13 this extra can be removed.
+# We'll need a deprecation period for the 'zstandard' module support
+# so that folks using Python without the 'compression.zstd' module
+# compiled will know to start doing so (although it'll likely be rare).
 zstd = [
   "zstandard>=0.18.0",
 ]
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/urllib3-2.4.0/src/urllib3/_version.py 
new/urllib3-2.5.0/src/urllib3/_version.py
--- old/urllib3-2.4.0/src/urllib3/_version.py   2025-04-10 16:55:56.000000000 
+0200
+++ new/urllib3-2.5.0/src/urllib3/_version.py   2025-06-18 15:55:24.000000000 
+0200
@@ -17,5 +17,5 @@
 __version_tuple__: VERSION_TUPLE
 version_tuple: VERSION_TUPLE
 
-__version__ = version = '2.4.0'
-__version_tuple__ = version_tuple = (2, 4, 0)
+__version__ = version = '2.5.0'
+__version_tuple__ = version_tuple = (2, 5, 0)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/urllib3-2.4.0/src/urllib3/connection.py 
new/urllib3-2.5.0/src/urllib3/connection.py
--- old/urllib3-2.4.0/src/urllib3/connection.py 2025-04-10 16:55:56.000000000 
+0200
+++ new/urllib3-2.5.0/src/urllib3/connection.py 2025-06-18 15:55:24.000000000 
+0200
@@ -74,7 +74,7 @@
 
 # When it comes time to update this value as a part of regular maintenance
 # (ie test_recent_date is failing) update it to ~6 months before the current 
date.
-RECENT_DATE = datetime.date(2023, 6, 1)
+RECENT_DATE = datetime.date(2025, 1, 1)
 
 _CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
 
@@ -232,45 +232,94 @@
         super().set_tunnel(host, port=port, headers=headers)
         self._tunnel_scheme = scheme
 
-    if sys.version_info < (3, 11, 4):
+    if sys.version_info < (3, 11, 9) or ((3, 12) <= sys.version_info < (3, 12, 
3)):
+        # Taken from python/cpython#100986 which was backported in 3.11.9 and 
3.12.3.
+        # When using connection_from_host, host will come without brackets.
+        def _wrap_ipv6(self, ip: bytes) -> bytes:
+            if b":" in ip and ip[0] != b"["[0]:
+                return b"[" + ip + b"]"
+            return ip
+
+        if sys.version_info < (3, 11, 9):
+            # `_tunnel` copied from 3.11.13 backporting
+            # 
https://github.com/python/cpython/commit/0d4026432591d43185568dd31cef6a034c4b9261
+            # and 
https://github.com/python/cpython/commit/6fbc61070fda2ffb8889e77e3b24bca4249ab4d1
+            def _tunnel(self) -> None:
+                _MAXLINE = http.client._MAXLINE  # type: ignore[attr-defined]
+                connect = b"CONNECT %s:%d HTTP/1.0\r\n" % (  # type: 
ignore[str-format]
+                    self._wrap_ipv6(self._tunnel_host.encode("ascii")),  # 
type: ignore[union-attr]
+                    self._tunnel_port,
+                )
+                headers = [connect]
+                for header, value in self._tunnel_headers.items():  # type: 
ignore[attr-defined]
+                    headers.append(f"{header}: {value}\r\n".encode("latin-1"))
+                headers.append(b"\r\n")
+                # Making a single send() call instead of one per line 
encourages
+                # the host OS to use a more optimal packet size instead of
+                # potentially emitting a series of small packets.
+                self.send(b"".join(headers))
+                del headers
+
+                response = self.response_class(self.sock, method=self._method) 
 # type: ignore[attr-defined]
+                try:
+                    (version, code, message) = response._read_status()  # 
type: ignore[attr-defined]
+
+                    if code != http.HTTPStatus.OK:
+                        self.close()
+                        raise OSError(
+                            f"Tunnel connection failed: {code} 
{message.strip()}"
+                        )
+                    while True:
+                        line = response.fp.readline(_MAXLINE + 1)
+                        if len(line) > _MAXLINE:
+                            raise http.client.LineTooLong("header line")
+                        if not line:
+                            # for sites which EOF without sending a trailer
+                            break
+                        if line in (b"\r\n", b"\n", b""):
+                            break
+
+                        if self.debuglevel > 0:
+                            print("header:", line.decode())
+                finally:
+                    response.close()
+
+        elif (3, 12) <= sys.version_info < (3, 12, 3):
+            # `_tunnel` copied from 3.12.11 backporting
+            # 
https://github.com/python/cpython/commit/23aef575c7629abcd4aaf028ebd226fb41a4b3c8
+            def _tunnel(self) -> None:  # noqa: F811
+                connect = b"CONNECT %s:%d HTTP/1.1\r\n" % (  # type: 
ignore[str-format]
+                    self._wrap_ipv6(self._tunnel_host.encode("idna")),  # 
type: ignore[union-attr]
+                    self._tunnel_port,
+                )
+                headers = [connect]
+                for header, value in self._tunnel_headers.items():  # type: 
ignore[attr-defined]
+                    headers.append(f"{header}: {value}\r\n".encode("latin-1"))
+                headers.append(b"\r\n")
+                # Making a single send() call instead of one per line 
encourages
+                # the host OS to use a more optimal packet size instead of
+                # potentially emitting a series of small packets.
+                self.send(b"".join(headers))
+                del headers
+
+                response = self.response_class(self.sock, method=self._method) 
 # type: ignore[attr-defined]
+                try:
+                    (version, code, message) = response._read_status()  # 
type: ignore[attr-defined]
 
-        def _tunnel(self) -> None:
-            _MAXLINE = http.client._MAXLINE  # type: ignore[attr-defined]
-            connect = b"CONNECT %s:%d HTTP/1.0\r\n" % (  # type: 
ignore[str-format]
-                self._tunnel_host.encode("ascii"),  # type: ignore[union-attr]
-                self._tunnel_port,
-            )
-            headers = [connect]
-            for header, value in self._tunnel_headers.items():  # type: 
ignore[attr-defined]
-                headers.append(f"{header}: {value}\r\n".encode("latin-1"))
-            headers.append(b"\r\n")
-            # Making a single send() call instead of one per line encourages
-            # the host OS to use a more optimal packet size instead of
-            # potentially emitting a series of small packets.
-            self.send(b"".join(headers))
-            del headers
-
-            response = self.response_class(self.sock, method=self._method)  # 
type: ignore[attr-defined]
-            try:
-                (version, code, message) = response._read_status()  # type: 
ignore[attr-defined]
-
-                if code != http.HTTPStatus.OK:
-                    self.close()
-                    raise OSError(f"Tunnel connection failed: {code} 
{message.strip()}")
-                while True:
-                    line = response.fp.readline(_MAXLINE + 1)
-                    if len(line) > _MAXLINE:
-                        raise http.client.LineTooLong("header line")
-                    if not line:
-                        # for sites which EOF without sending a trailer
-                        break
-                    if line in (b"\r\n", b"\n", b""):
-                        break
+                    self._raw_proxy_headers = 
http.client._read_headers(response.fp)  # type: ignore[attr-defined]
 
                     if self.debuglevel > 0:
-                        print("header:", line.decode())
-            finally:
-                response.close()
+                        for header in self._raw_proxy_headers:
+                            print("header:", header.decode())
+
+                    if code != http.HTTPStatus.OK:
+                        self.close()
+                        raise OSError(
+                            f"Tunnel connection failed: {code} 
{message.strip()}"
+                        )
+
+                finally:
+                    response.close()
 
     def connect(self) -> None:
         self.sock = self._new_conn()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/urllib3-2.4.0/src/urllib3/contrib/emscripten/fetch.py 
new/urllib3-2.5.0/src/urllib3/contrib/emscripten/fetch.py
--- old/urllib3-2.4.0/src/urllib3/contrib/emscripten/fetch.py   2025-04-10 
16:55:56.000000000 +0200
+++ new/urllib3-2.5.0/src/urllib3/contrib/emscripten/fetch.py   2025-06-18 
15:55:24.000000000 +0200
@@ -573,6 +573,11 @@
         "method": request.method,
         "signal": js_abort_controller.signal,
     }
+    # Node.js returns the whole response (unlike opaqueredirect in browsers),
+    # so urllib3 can set `redirect: manual` to control redirects itself.
+    # https://stackoverflow.com/a/78524615
+    if _is_node_js():
+        fetch_data["redirect"] = "manual"
     # Call JavaScript fetch (async api, returns a promise)
     fetcher_promise_js = js.fetch(request.url, _obj_from_dict(fetch_data))
     # Now suspend WebAssembly until we resolve that promise
@@ -693,6 +698,21 @@
         return False
 
 
+def _is_node_js() -> bool:
+    """
+    Check if we are in Node.js.
+
+    :return: True if we are in Node.js.
+    :rtype: bool
+    """
+    return (
+        hasattr(js, "process")
+        and hasattr(js.process, "release")
+        # According to the Node.js documentation, the release name is always 
"node".
+        and js.process.release.name == "node"
+    )
+
+
 def streaming_ready() -> bool | None:
     if _fetcher:
         return _fetcher.streaming_ready
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/urllib3-2.4.0/src/urllib3/poolmanager.py 
new/urllib3-2.5.0/src/urllib3/poolmanager.py
--- old/urllib3-2.4.0/src/urllib3/poolmanager.py        2025-04-10 
16:55:56.000000000 +0200
+++ new/urllib3-2.5.0/src/urllib3/poolmanager.py        2025-06-18 
15:55:24.000000000 +0200
@@ -203,6 +203,22 @@
         **connection_pool_kw: typing.Any,
     ) -> None:
         super().__init__(headers)
+        if "retries" in connection_pool_kw:
+            retries = connection_pool_kw["retries"]
+            if not isinstance(retries, Retry):
+                # When Retry is initialized, raise_on_redirect is based
+                # on a redirect boolean value.
+                # But requests made via a pool manager always set
+                # redirect to False, and raise_on_redirect always ends
+                # up being False consequently.
+                # Here we fix the issue by setting raise_on_redirect to
+                # a value needed by the pool manager without considering
+                # the redirect boolean.
+                raise_on_redirect = retries is not False
+                retries = Retry.from_int(retries, redirect=False)
+                retries.raise_on_redirect = raise_on_redirect
+                connection_pool_kw = connection_pool_kw.copy()
+                connection_pool_kw["retries"] = retries
         self.connection_pool_kw = connection_pool_kw
 
         self.pools: RecentlyUsedContainer[PoolKey, HTTPConnectionPool]
@@ -456,7 +472,7 @@
             kw["body"] = None
             kw["headers"] = 
HTTPHeaderDict(kw["headers"])._prepare_for_method_change()
 
-        retries = kw.get("retries")
+        retries = kw.get("retries", response.retries)
         if not isinstance(retries, Retry):
             retries = Retry.from_int(retries, redirect=redirect)
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/urllib3-2.4.0/src/urllib3/response.py 
new/urllib3-2.5.0/src/urllib3/response.py
--- old/urllib3-2.4.0/src/urllib3/response.py   2025-04-10 16:55:56.000000000 
+0200
+++ new/urllib3-2.5.0/src/urllib3/response.py   2025-06-18 15:55:24.000000000 
+0200
@@ -26,23 +26,6 @@
 except ImportError:
     brotli = None
 
-try:
-    import zstandard as zstd
-except (AttributeError, ImportError, ValueError):  # Defensive:
-    HAS_ZSTD = False
-else:
-    # The package 'zstandard' added the 'eof' property starting
-    # in v0.18.0 which we require to ensure a complete and
-    # valid zstd stream was fed into the ZstdDecoder.
-    # See: https://github.com/urllib3/urllib3/pull/2624
-    _zstd_version = tuple(
-        map(int, re.search(r"^([0-9]+)\.([0-9]+)", zstd.__version__).groups()) 
 # type: ignore[union-attr]
-    )
-    if _zstd_version < (0, 18):  # Defensive:
-        HAS_ZSTD = False
-    else:
-        HAS_ZSTD = True
-
 from . import util
 from ._base_connection import _TYPE_BODY
 from ._collections import HTTPHeaderDict
@@ -163,11 +146,15 @@
             return b""
 
 
-if HAS_ZSTD:
+try:
+    # Python 3.14+
+    from compression import zstd  # type: ignore[import-not-found] # noqa: F401
+
+    HAS_ZSTD = True
 
     class ZstdDecoder(ContentDecoder):
         def __init__(self) -> None:
-            self._obj = zstd.ZstdDecompressor().decompressobj()
+            self._obj = zstd.ZstdDecompressor()
 
         def decompress(self, data: bytes) -> bytes:
             if not data:
@@ -175,15 +162,53 @@
             data_parts = [self._obj.decompress(data)]
             while self._obj.eof and self._obj.unused_data:
                 unused_data = self._obj.unused_data
-                self._obj = zstd.ZstdDecompressor().decompressobj()
+                self._obj = zstd.ZstdDecompressor()
                 data_parts.append(self._obj.decompress(unused_data))
             return b"".join(data_parts)
 
         def flush(self) -> bytes:
-            ret = self._obj.flush()  # note: this is a no-op
             if not self._obj.eof:
                 raise DecodeError("Zstandard data is incomplete")
-            return ret
+            return b""
+
+except ImportError:
+    try:
+        # Python 3.13 and earlier require the 'zstandard' module.
+        import zstandard as zstd
+
+        # The package 'zstandard' added the 'eof' property starting
+        # in v0.18.0 which we require to ensure a complete and
+        # valid zstd stream was fed into the ZstdDecoder.
+        # See: https://github.com/urllib3/urllib3/pull/2624
+        _zstd_version = tuple(
+            map(int, re.search(r"^([0-9]+)\.([0-9]+)", 
zstd.__version__).groups())  # type: ignore[union-attr]
+        )
+        if _zstd_version < (0, 18):  # Defensive:
+            raise ImportError("zstandard module doesn't have eof")
+    except (AttributeError, ImportError, ValueError):  # Defensive:
+        HAS_ZSTD = False
+    else:
+        HAS_ZSTD = True
+
+        class ZstdDecoder(ContentDecoder):  # type: ignore[no-redef]
+            def __init__(self) -> None:
+                self._obj = zstd.ZstdDecompressor().decompressobj()
+
+            def decompress(self, data: bytes) -> bytes:
+                if not data:
+                    return b""
+                data_parts = [self._obj.decompress(data)]
+                while self._obj.eof and self._obj.unused_data:
+                    unused_data = self._obj.unused_data
+                    self._obj = zstd.ZstdDecompressor().decompressobj()
+                    data_parts.append(self._obj.decompress(unused_data))
+                return b"".join(data_parts)
+
+            def flush(self) -> bytes:
+                ret = self._obj.flush()  # note: this is a no-op
+                if not self._obj.eof:
+                    raise DecodeError("Zstandard data is incomplete")
+                return ret  # type: ignore[no-any-return]
 
 
 class MultiDecoder(ContentDecoder):
@@ -518,7 +543,7 @@
     def getheaders(self) -> HTTPHeaderDict:
         warnings.warn(
             "HTTPResponse.getheaders() is deprecated and will be removed "
-            "in urllib3 v2.1.0. Instead access HTTPResponse.headers directly.",
+            "in urllib3 v2.6.0. Instead access HTTPResponse.headers directly.",
             category=DeprecationWarning,
             stacklevel=2,
         )
@@ -527,7 +552,7 @@
     def getheader(self, name: str, default: str | None = None) -> str | None:
         warnings.warn(
             "HTTPResponse.getheader() is deprecated and will be removed "
-            "in urllib3 v2.1.0. Instead use HTTPResponse.headers.get(name, 
default).",
+            "in urllib3 v2.6.0. Instead use HTTPResponse.headers.get(name, 
default).",
             category=DeprecationWarning,
             stacklevel=2,
         )
@@ -1075,6 +1100,10 @@
     def shutdown(self) -> None:
         if not self._sock_shutdown:
             raise ValueError("Cannot shutdown socket as self._sock_shutdown is 
not set")
+        if self._connection is None:
+            raise RuntimeError(
+                "Cannot shutdown as connection has already been released to 
the pool"
+            )
         self._sock_shutdown(socket.SHUT_RD)
 
     def close(self) -> None:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/urllib3-2.4.0/src/urllib3/util/request.py 
new/urllib3-2.5.0/src/urllib3/util/request.py
--- old/urllib3-2.4.0/src/urllib3/util/request.py       2025-04-10 
16:55:56.000000000 +0200
+++ new/urllib3-2.5.0/src/urllib3/util/request.py       2025-06-18 
15:55:24.000000000 +0200
@@ -28,12 +28,20 @@
     pass
 else:
     ACCEPT_ENCODING += ",br"
+
 try:
-    import zstandard as _unused_module_zstd  # noqa: F401
-except ImportError:
-    pass
-else:
+    from compression import (  # type: ignore[import-not-found] # noqa: F401
+        zstd as _unused_module_zstd,
+    )
+
     ACCEPT_ENCODING += ",zstd"
+except ImportError:
+    try:
+        import zstandard as _unused_module_zstd  # noqa: F401
+
+        ACCEPT_ENCODING += ",zstd"
+    except ImportError:
+        pass
 
 
 class _TYPE_FAILEDTELL(Enum):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/urllib3-2.4.0/src/urllib3/util/ssl_.py 
new/urllib3-2.5.0/src/urllib3/util/ssl_.py
--- old/urllib3-2.4.0/src/urllib3/util/ssl_.py  2025-04-10 16:55:56.000000000 
+0200
+++ new/urllib3-2.5.0/src/urllib3/util/ssl_.py  2025-06-18 15:55:24.000000000 
+0200
@@ -289,7 +289,7 @@
             # keep the maximum version to be it's default value: 
'TLSVersion.MAXIMUM_SUPPORTED'
             warnings.warn(
                 "'ssl_version' option is deprecated and will be "
-                "removed in urllib3 v2.1.0. Instead use 'ssl_minimum_version'",
+                "removed in urllib3 v2.6.0. Instead use 'ssl_minimum_version'",
                 category=DeprecationWarning,
                 stacklevel=2,
             )
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/urllib3-2.4.0/test/__init__.py 
new/urllib3-2.5.0/test/__init__.py
--- old/urllib3-2.4.0/test/__init__.py  2025-04-10 16:55:56.000000000 +0200
+++ new/urllib3-2.5.0/test/__init__.py  2025-06-18 15:55:24.000000000 +0200
@@ -26,9 +26,18 @@
     brotli = None
 
 try:
-    import zstandard as _unused_module_zstd  # noqa: F401
+    # Python 3.14
+    from compression import (  # type: ignore[import-not-found] # noqa: F401
+        zstd as _unused_module_zstd,
+    )
 except ImportError:
-    HAS_ZSTD = False
+    # Python 3.13 and earlier require the 'zstandard' module.
+    try:
+        import zstandard as _unused_module_zstd  # noqa: F401
+    except ImportError:
+        HAS_ZSTD = False
+    else:
+        HAS_ZSTD = True
 else:
     HAS_ZSTD = True
 
@@ -127,14 +136,15 @@
 
 def onlyZstd() -> typing.Callable[[_TestFuncT], _TestFuncT]:
     return pytest.mark.skipif(
-        not HAS_ZSTD, reason="only run if a python-zstandard library is 
installed"
+        not HAS_ZSTD,
+        reason="only run if a python-zstandard library is installed or Python 
3.14 and later",
     )
 
 
 def notZstd() -> typing.Callable[[_TestFuncT], _TestFuncT]:
     return pytest.mark.skipif(
         HAS_ZSTD,
-        reason="only run if a python-zstandard library is not installed",
+        reason="only run if a python-zstandard library is not installed or 
Python 3.13 and earlier",
     )
 
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/urllib3-2.4.0/test/contrib/emscripten/conftest.py 
new/urllib3-2.5.0/test/contrib/emscripten/conftest.py
--- old/urllib3-2.4.0/test/contrib/emscripten/conftest.py       2025-04-10 
16:55:56.000000000 +0200
+++ new/urllib3-2.5.0/test/contrib/emscripten/conftest.py       2025-06-18 
15:55:24.000000000 +0200
@@ -84,6 +84,19 @@
         result = self.run_js(
             f'await 
pyodide.loadPackage("http://{testserver_http.http_host}:{testserver_http.http_port}/dist/urllib3.whl";)'
         )
+        if not self.with_jspi:
+            # force Chrome to execute the current test without JSPI
+            # even though it is always enabled in
+            # chrome >= 137. We do this by monkeypatching
+            # pyodide.ffi.can_run_sync
+            self.run_async(
+                """
+                import pyodide.ffi
+                if pyodide.ffi.can_run_sync():
+                    pyodide.ffi.can_run_sync = lambda: False
+                """
+            )
+
         print("Installed package:", result)
         self.run_js(
             """
@@ -128,11 +141,14 @@
 
 
 class ServerRunnerInfo:
-    def __init__(self, host: str, port: int, selenium: Any, dist_dir: Path) -> 
None:
+    def __init__(
+        self, host: str, port: int, selenium: Any, dist_dir: Path, has_jspi: 
bool
+    ) -> None:
         self.host = host
         self.port = port
         self.selenium = selenium
         self.dist_dir = dist_dir
+        self.has_jspi = has_jspi
 
     def run_webworker(self, code: str) -> Any:
         if isinstance(code, str) and code.startswith("\n"):
@@ -148,6 +164,19 @@
             """
         )
 
+        # Monkeypatch pyodide to force disable JSPI in newer chrome
+        # so those code paths get tested
+        if self.has_jspi is False:
+            jspi_fix_code = textwrap.dedent(
+                """
+                import pyodide.ffi
+                if pyodide.ffi.can_run_sync():
+                    pyodide.ffi.can_run_sync = lambda: False
+                """
+            )
+        else:
+            jspi_fix_code = ""
+
         coverage_end_code = textwrap.dedent(
             """
             _coverage.stop()
@@ -164,7 +193,15 @@
 
         # the ordering of these code blocks is important - makes sure
         # that the first thing that happens is our wheel is loaded
-        code = coverage_init_code + "\n" + code + "\n" + coverage_end_code
+        code = (
+            coverage_init_code
+            + "\n"
+            + jspi_fix_code
+            + "\n"
+            + code
+            + "\n"
+            + coverage_end_code
+        )
 
         if self.selenium.browser == "firefox":
             # running in worker is SLOW on firefox
@@ -232,6 +269,7 @@
         testserver_http.https_port,
         selenium_coverage,
         dist_dir,
+        selenium_coverage.with_jspi,
     )
 
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/urllib3-2.4.0/test/contrib/emscripten/test_emscripten.py 
new/urllib3-2.5.0/test/contrib/emscripten/test_emscripten.py
--- old/urllib3-2.4.0/test/contrib/emscripten/test_emscripten.py        
2025-04-10 16:55:56.000000000 +0200
+++ new/urllib3-2.5.0/test/contrib/emscripten/test_emscripten.py        
2025-06-18 15:55:24.000000000 +0200
@@ -944,6 +944,68 @@
     pyodide_test(selenium_coverage, testserver_http.http_host, 
find_unused_port())
 
 
+def test_redirects(
+    selenium_coverage: typing.Any, testserver_http: PyodideServerInfo
+) -> None:
+    @run_in_pyodide  # type: ignore[misc]
+    def pyodide_test(selenium_coverage: typing.Any, host: str, port: int) -> 
None:
+        from urllib3 import request
+
+        redirect_url = f"http://{host}:{port}/redirect";
+        response = request("GET", redirect_url)
+        assert response.status == 200
+
+    pyodide_test(
+        selenium_coverage, testserver_http.http_host, testserver_http.http_port
+    )
+
+
+@pytest.mark.with_jspi
+def test_disabled_redirects(
+    selenium_coverage: typing.Any, testserver_http: PyodideServerInfo
+) -> None:
+    """
+    Test that urllib3 can control redirects in Node.js.
+    """
+
+    @run_in_pyodide  # type: ignore[misc]
+    def pyodide_test(selenium_coverage: typing.Any, host: str, port: int) -> 
None:
+        import pytest
+
+        from urllib3 import PoolManager, request
+        from urllib3.contrib.emscripten.fetch import _is_node_js
+        from urllib3.exceptions import MaxRetryError
+
+        if not _is_node_js():
+            pytest.skip("urllib3 does not control redirects in browsers.")
+
+        redirect_url = f"http://{host}:{port}/redirect";
+
+        with PoolManager(retries=0) as http:
+            with pytest.raises(MaxRetryError):
+                http.request("GET", redirect_url)
+
+            response = http.request("GET", redirect_url, redirect=False)
+            assert response.status == 303
+
+        with PoolManager(retries=False) as http:
+            response = http.request("GET", redirect_url)
+            assert response.status == 303
+
+        with pytest.raises(MaxRetryError):
+            request("GET", redirect_url, retries=0)
+
+        response = request("GET", redirect_url, redirect=False)
+        assert response.status == 303
+
+        response = request("GET", redirect_url, retries=0, redirect=False)
+        assert response.status == 303
+
+    pyodide_test(
+        selenium_coverage, testserver_http.http_host, testserver_http.http_port
+    )
+
+
 def test_insecure_requests_warning(
     selenium_coverage: typing.Any, testserver_http: PyodideServerInfo
 ) -> None:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/urllib3-2.4.0/test/test_poolmanager.py 
new/urllib3-2.5.0/test/test_poolmanager.py
--- old/urllib3-2.4.0/test/test_poolmanager.py  2025-04-10 16:55:56.000000000 
+0200
+++ new/urllib3-2.5.0/test/test_poolmanager.py  2025-06-18 15:55:24.000000000 
+0200
@@ -379,9 +379,10 @@
 
     def test_merge_pool_kwargs(self) -> None:
         """Assert _merge_pool_kwargs works in the happy case"""
-        p = PoolManager(retries=100)
+        retries = retry.Retry(total=100)
+        p = PoolManager(retries=retries)
         merged = p._merge_pool_kwargs({"new_key": "value"})
-        assert {"retries": 100, "new_key": "value"} == merged
+        assert {"retries": retries, "new_key": "value"} == merged
 
     def test_merge_pool_kwargs_none(self) -> None:
         """Assert false-y values to _merge_pool_kwargs result in defaults"""
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/urllib3-2.4.0/test/test_response.py 
new/urllib3-2.5.0/test/test_response.py
--- old/urllib3-2.4.0/test/test_response.py     2025-04-10 16:55:56.000000000 
+0200
+++ new/urllib3-2.5.0/test/test_response.py     2025-06-18 15:55:24.000000000 
+0200
@@ -35,6 +35,14 @@
 from urllib3.util.retry import RequestHistory, Retry
 
 
+def zstd_compress(data: bytes) -> bytes:
+    try:
+        from compression import zstd  # type: ignore[import-not-found] # noqa: 
F401
+    except ImportError:
+        import zstandard as zstd
+    return zstd.compress(data)  # type: ignore[no-any-return]
+
+
 class TestBytesQueueBuffer:
     def test_single_chunk(self) -> None:
         buffer = BytesQueueBuffer()
@@ -411,9 +419,7 @@
 
     @onlyZstd()
     def test_decode_zstd(self) -> None:
-        import zstandard as zstd
-
-        data = zstd.compress(b"foo")
+        data = zstd_compress(b"foo")
 
         fp = BytesIO(data)
         r = HTTPResponse(fp, headers={"content-encoding": "zstd"})
@@ -421,11 +427,9 @@
 
     @onlyZstd()
     def test_decode_multiframe_zstd(self) -> None:
-        import zstandard as zstd
-
         data = (
             # Zstandard frame
-            zstd.compress(b"foo")
+            zstd_compress(b"foo")
             # skippable frame (must be ignored)
             + bytes.fromhex(
                 "50 2A 4D 18"  # Magic_Number (little-endian)
@@ -433,7 +437,7 @@
                 "00 00 00 00 00 00 00"  # User_Data
             )
             # Zstandard frame
-            + zstd.compress(b"bar")
+            + zstd_compress(b"bar")
         )
 
         fp = BytesIO(data)
@@ -442,9 +446,7 @@
 
     @onlyZstd()
     def test_chunked_decoding_zstd(self) -> None:
-        import zstandard as zstd
-
-        data = zstd.compress(b"foobarbaz")
+        data = zstd_compress(b"foobarbaz")
 
         fp = BytesIO(data)
         r = HTTPResponse(
@@ -475,9 +477,7 @@
     @onlyZstd()
     @pytest.mark.parametrize("data", decode_param_set)
     def test_decode_zstd_incomplete_preload_content(self, data: bytes) -> None:
-        import zstandard as zstd
-
-        data = zstd.compress(data)
+        data = zstd_compress(data)
         fp = BytesIO(data[:-1])
 
         with pytest.raises(DecodeError):
@@ -486,9 +486,7 @@
     @onlyZstd()
     @pytest.mark.parametrize("data", decode_param_set)
     def test_decode_zstd_incomplete_read(self, data: bytes) -> None:
-        import zstandard as zstd
-
-        data = zstd.compress(data)
+        data = zstd_compress(data)
         fp = BytesIO(data[:-1])  # shorten the data to trigger DecodeError
 
         # create response object without(!) reading/decoding the content
@@ -503,9 +501,7 @@
     @onlyZstd()
     @pytest.mark.parametrize("data", decode_param_set)
     def test_decode_zstd_incomplete_read1(self, data: bytes) -> None:
-        import zstandard as zstd
-
-        data = zstd.compress(data)
+        data = zstd_compress(data)
         fp = BytesIO(data[:-1])
 
         r = HTTPResponse(
@@ -523,9 +519,7 @@
     @onlyZstd()
     @pytest.mark.parametrize("data", decode_param_set)
     def test_decode_zstd_read1(self, data: bytes) -> None:
-        import zstandard as zstd
-
-        encoded_data = zstd.compress(data)
+        encoded_data = zstd_compress(data)
         fp = BytesIO(encoded_data)
 
         r = HTTPResponse(
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/urllib3-2.4.0/test/test_ssl.py 
new/urllib3-2.5.0/test/test_ssl.py
--- old/urllib3-2.4.0/test/test_ssl.py  2025-04-10 16:55:56.000000000 +0200
+++ new/urllib3-2.5.0/test/test_ssl.py  2025-06-18 15:55:24.000000000 +0200
@@ -241,7 +241,7 @@
         with pytest.warns(
             DeprecationWarning,
             match=r"'ssl_version' option is deprecated and will be removed in "
-            r"urllib3 v2\.1\.0\. Instead use 'ssl_minimum_version'",
+            r"urllib3 v2\.6\.0\. Instead use 'ssl_minimum_version'",
         ):
             ssl_.create_urllib3_context(**kwargs)
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/urllib3-2.4.0/test/with_dummyserver/test_connectionpool.py 
new/urllib3-2.5.0/test/with_dummyserver/test_connectionpool.py
--- old/urllib3-2.4.0/test/with_dummyserver/test_connectionpool.py      
2025-04-10 16:55:56.000000000 +0200
+++ new/urllib3-2.5.0/test/with_dummyserver/test_connectionpool.py      
2025-06-18 15:55:24.000000000 +0200
@@ -912,6 +912,18 @@
             # the pool should still contain poolsize elements
             assert http.pool.qsize() == http.pool.maxsize
 
+    def test_shutdown_on_connection_released_to_pool(self) -> None:
+        with HTTPConnectionPool(self.host, self.port) as pool:
+            resp = pool.urlopen("GET", "/", preload_content=False)
+            resp.drain_conn()
+            resp.release_conn()
+
+        with pytest.raises(
+            RuntimeError,
+            match="Cannot shutdown as connection has already been released to 
the pool",
+        ):
+            resp.shutdown()
+
     def test_mixed_case_hostname(self) -> None:
         with HTTPConnectionPool("LoCaLhOsT", self.port) as pool:
             response = pool.request("GET", f"http://LoCaLhOsT:{self.port}/";)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/urllib3-2.4.0/test/with_dummyserver/test_https.py 
new/urllib3-2.5.0/test/with_dummyserver/test_https.py
--- old/urllib3-2.4.0/test/with_dummyserver/test_https.py       2025-04-10 
16:55:56.000000000 +0200
+++ new/urllib3-2.5.0/test/with_dummyserver/test_https.py       2025-06-18 
15:55:24.000000000 +0200
@@ -762,7 +762,7 @@
                 cmgr = pytest.warns(
                     DeprecationWarning,
                     match=r"'ssl_version' option is deprecated and will be 
removed "
-                    r"in urllib3 v2\.1\.0\. Instead use 'ssl_minimum_version'",
+                    r"in urllib3 v2\.6\.0\. Instead use 'ssl_minimum_version'",
                 )
             with cmgr:
                 r = https_pool.request("GET", "/")
@@ -836,7 +836,7 @@
             str(x.message)
             == (
                 "'ssl_version' option is deprecated and will be removed in "
-                "urllib3 v2.1.0. Instead use 'ssl_minimum_version'"
+                "urllib3 v2.6.0. Instead use 'ssl_minimum_version'"
             )
             for x in w
         )
@@ -1132,7 +1132,7 @@
         with pytest.warns(
             DeprecationWarning,
             match=r"'ssl_version' option is deprecated and will be removed in "
-            r"urllib3 v2\.1\.0\. Instead use 'ssl_minimum_version'",
+            r"urllib3 v2\.6\.0\. Instead use 'ssl_minimum_version'",
         ):
             ctx = urllib3.util.ssl_.create_urllib3_context(
                 ssl_version=self.ssl_version()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/urllib3-2.4.0/test/with_dummyserver/test_poolmanager.py 
new/urllib3-2.5.0/test/with_dummyserver/test_poolmanager.py
--- old/urllib3-2.4.0/test/with_dummyserver/test_poolmanager.py 2025-04-10 
16:55:56.000000000 +0200
+++ new/urllib3-2.5.0/test/with_dummyserver/test_poolmanager.py 2025-06-18 
15:55:24.000000000 +0200
@@ -84,6 +84,89 @@
             assert r.status == 200
             assert r.data == b"Dummy server!"
 
+    @pytest.mark.parametrize(
+        "retries",
+        (0, Retry(total=0), Retry(redirect=0), Retry(total=0, redirect=0)),
+    )
+    def test_redirects_disabled_for_pool_manager_with_0(
+        self, retries: typing.Literal[0] | Retry
+    ) -> None:
+        """
+        Check handling redirects when retries is set to 0 on the pool
+        manager.
+        """
+        with PoolManager(retries=retries) as http:
+            with pytest.raises(MaxRetryError):
+                http.request("GET", f"{self.base_url}/redirect")
+
+            # Setting redirect=True should not change the behavior.
+            with pytest.raises(MaxRetryError):
+                http.request("GET", f"{self.base_url}/redirect", redirect=True)
+
+            # Setting redirect=False should not make it follow the redirect,
+            # but MaxRetryError should not be raised.
+            response = http.request("GET", f"{self.base_url}/redirect", 
redirect=False)
+            assert response.status == 303
+
+    @pytest.mark.parametrize(
+        "retries",
+        (
+            False,
+            Retry(total=False),
+            Retry(redirect=False),
+            Retry(total=False, redirect=False),
+        ),
+    )
+    def test_redirects_disabled_for_pool_manager_with_false(
+        self, retries: typing.Literal[False] | Retry
+    ) -> None:
+        """
+        Check that setting retries set to False on the pool manager disables
+        raising MaxRetryError and redirect=True does not change the
+        behavior.
+        """
+        with PoolManager(retries=retries) as http:
+            response = http.request("GET", f"{self.base_url}/redirect")
+            assert response.status == 303
+
+            response = http.request("GET", f"{self.base_url}/redirect", 
redirect=True)
+            assert response.status == 303
+
+            response = http.request("GET", f"{self.base_url}/redirect", 
redirect=False)
+            assert response.status == 303
+
+    def test_redirects_disabled_for_individual_request(self) -> None:
+        """
+        Check handling redirects when they are meant to be disabled
+        on the request level.
+        """
+        with PoolManager() as http:
+            # Check when redirect is not passed.
+            with pytest.raises(MaxRetryError):
+                http.request("GET", f"{self.base_url}/redirect", retries=0)
+            response = http.request("GET", f"{self.base_url}/redirect", 
retries=False)
+            assert response.status == 303
+
+            # Check when redirect=True.
+            with pytest.raises(MaxRetryError):
+                http.request(
+                    "GET", f"{self.base_url}/redirect", retries=0, 
redirect=True
+                )
+            response = http.request(
+                "GET", f"{self.base_url}/redirect", retries=False, 
redirect=True
+            )
+            assert response.status == 303
+
+            # Check when redirect=False.
+            response = http.request(
+                "GET", f"{self.base_url}/redirect", retries=0, redirect=False
+            )
+            assert response.status == 303
+            response = http.request(
+                "GET", f"{self.base_url}/redirect", retries=False, 
redirect=False
+            )
+            assert response.status == 303
+
     def test_cross_host_redirect(self) -> None:
         with PoolManager() as http:
             cross_host_location = f"{self.base_url_alt}/echo?a=b"
@@ -138,6 +221,24 @@
             pool = http.connection_from_host(self.host, self.port)
             assert pool.num_connections == 1
 
+        # Check when retries are configured for the pool manager.
+        with PoolManager(retries=1) as http:
+            with pytest.raises(MaxRetryError):
+                http.request(
+                    "GET",
+                    f"{self.base_url}/redirect",
+                    fields={"target": f"/redirect?target={self.base_url}/"},
+                )
+
+            # Here we allow more retries for the request.
+            response = http.request(
+                "GET",
+                f"{self.base_url}/redirect",
+                fields={"target": f"/redirect?target={self.base_url}/"},
+                retries=2,
+            )
+            assert response.status == 200
+
     def test_redirect_cross_host_remove_headers(self) -> None:
         with PoolManager() as http:
             r = http.request(
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/urllib3-2.4.0/test/with_dummyserver/test_socketlevel.py 
new/urllib3-2.5.0/test/with_dummyserver/test_socketlevel.py
--- old/urllib3-2.4.0/test/with_dummyserver/test_socketlevel.py 2025-04-10 
16:55:56.000000000 +0200
+++ new/urllib3-2.5.0/test/with_dummyserver/test_socketlevel.py 2025-06-18 
15:55:24.000000000 +0200
@@ -22,6 +22,7 @@
 from test import LONG_TIMEOUT, SHORT_TIMEOUT, notWindows, resolvesLocalhostFQDN
 from threading import Event
 from unittest import mock
+from urllib.parse import urlparse
 
 import pytest
 import trustme
@@ -1289,7 +1290,7 @@
             r = conn.urlopen("GET", url, retries=0)
             assert r.status == 200
 
-    def test_connect_ipv6_addr(self) -> None:
+    def test_connect_ipv6_addr_from_host(self) -> None:
         ipv6_addr = "2001:4998:c:a06::2:4008"
 
         def echo_socket_handler(listener: socket.socket) -> None:
@@ -1329,13 +1330,75 @@
 
         with proxy_from_url(base_url, cert_reqs="NONE") as proxy:
             url = f"https://[{ipv6_addr}]";
+
+            # Try with connection_from_host
+            parsed_request_url = urlparse(url)
+
+            conn = proxy.connection_from_host(
+                scheme=parsed_request_url.scheme.lower(),
+                host=parsed_request_url.hostname,
+                port=parsed_request_url.port,
+            )
+            try:
+                with pytest.warns(InsecureRequestWarning):
+                    r = conn.urlopen("GET", url, retries=0)
+                assert r.status == 200
+            except MaxRetryError:
+                pytest.fail(
+                    "Invalid IPv6 format in HTTP CONNECT request when using 
connection_from_host"
+                )
+
+    def test_connect_ipv6_addr_from_url(self) -> None:
+        ipv6_addr = "2001:4998:c:a06::2:4008"
+
+        def echo_socket_handler(listener: socket.socket) -> None:
+            sock = listener.accept()[0]
+
+            buf = b""
+            while not buf.endswith(b"\r\n\r\n"):
+                buf += sock.recv(65536)
+            s = buf.decode("utf-8")
+
+            if s.startswith(f"CONNECT [{ipv6_addr}]:443"):
+                sock.send(b"HTTP/1.1 200 Connection Established\r\n\r\n")
+                ssl_sock = original_ssl_wrap_socket(
+                    sock,
+                    server_side=True,
+                    keyfile=DEFAULT_CERTS["keyfile"],
+                    certfile=DEFAULT_CERTS["certfile"],
+                )
+                buf = b""
+                while not buf.endswith(b"\r\n\r\n"):
+                    buf += ssl_sock.recv(65536)
+
+                ssl_sock.send(
+                    b"HTTP/1.1 200 OK\r\n"
+                    b"Content-Type: text/plain\r\n"
+                    b"Content-Length: 2\r\n"
+                    b"Connection: close\r\n"
+                    b"\r\n"
+                    b"Hi"
+                )
+                ssl_sock.close()
+            else:
+                sock.close()
+
+        self._start_server(echo_socket_handler)
+        base_url = f"http://{self.host}:{self.port}";
+
+        with proxy_from_url(base_url, cert_reqs="NONE") as proxy:
+            url = f"https://[{ipv6_addr}]";
+
+            # Try with connection_from_url
             conn = proxy.connection_from_url(url)
             try:
                 with pytest.warns(InsecureRequestWarning):
                     r = conn.urlopen("GET", url, retries=0)
                 assert r.status == 200
             except MaxRetryError:
-                pytest.fail("Invalid IPv6 format in HTTP CONNECT request")
+                pytest.fail(
+                    "Invalid IPv6 format in HTTP CONNECT request when using 
connection_from_url"
+                )
 
     @pytest.mark.parametrize("target_scheme", ["http", "https"])
     def test_https_proxymanager_connected_to_http_proxy(
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/urllib3-2.4.0/uv.lock new/urllib3-2.5.0/uv.lock
--- old/urllib3-2.4.0/uv.lock   2025-04-10 16:55:56.000000000 +0200
+++ new/urllib3-2.5.0/uv.lock   2025-06-18 15:55:24.000000000 +0200
@@ -1,4 +1,5 @@
 version = 1
+revision = 1
 requires-python = ">=3.9"
 resolution-markers = [
     "python_full_version >= '3.10'",
@@ -698,15 +699,15 @@
 
 [[package]]
 name = "httpcore"
-version = "1.0.7"
+version = "1.0.8"
 source = { registry = "https://pypi.org/simple"; }
 dependencies = [
     { name = "certifi" },
     { name = "h11" },
 ]
-sdist = { url = 
"https://files.pythonhosted.org/packages/6a/41/d7d0a89eb493922c37d343b607bc1b5da7f5be7e383740b4753ad8943e90/httpcore-1.0.7.tar.gz";,
 hash = 
"sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c", size 
= 85196 }
+sdist = { url = 
"https://files.pythonhosted.org/packages/9f/45/ad3e1b4d448f22c0cff4f5692f5ed0666658578e358b8d58a19846048059/httpcore-1.0.8.tar.gz";,
 hash = 
"sha256:86e94505ed24ea06514883fd44d2bc02d90e77e7979c8eb71b90f41d364a1bad", size 
= 85385 }
 wheels = [
-    { url = 
"https://files.pythonhosted.org/packages/87/f5/72347bc88306acb359581ac4d52f23c0ef445b57157adedb9aee0cd689d2/httpcore-1.0.7-py3-none-any.whl";,
 hash = 
"sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd", size 
= 78551 },
+    { url = 
"https://files.pythonhosted.org/packages/18/8d/f052b1e336bb2c1fc7ed1aaed898aa570c0b61a09707b108979d9fc6e308/httpcore-1.0.8-py3-none-any.whl";,
 hash = 
"sha256:5254cf149bcb5f75e9d1b2b9f729ea4a4b883d1ad7379fc632b727cec23674be", size 
= 78732 },
 ]
 
 [[package]]
@@ -1826,6 +1827,7 @@
     { name = "pysocks", marker = "extra == 'socks'", specifier = 
">=1.5.6,!=1.5.7,<2.0" },
     { name = "zstandard", marker = "extra == 'zstd'", specifier = ">=0.18.0" },
 ]
+provides-extras = ["brotli", "h2", "socks", "zstd"]
 
 [package.metadata.requires-dev]
 dev = [

Reply via email to