Script 'mail_helper' called by obssrc
Hello community,

here is the log from the commit of package python-Scrapy for openSUSE:Factory 
checked in at 2026-05-04 12:54:47
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-Scrapy (Old)
 and      /work/SRC/openSUSE:Factory/.python-Scrapy.new.30200 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-Scrapy"

Mon May  4 12:54:47 2026 rev:28 rq:1350589 version:2.15.2

Changes:
--------
--- /work/SRC/openSUSE:Factory/python-Scrapy/python-Scrapy.changes      
2026-04-23 17:13:15.110006862 +0200
+++ /work/SRC/openSUSE:Factory/.python-Scrapy.new.30200/python-Scrapy.changes   
2026-05-04 12:58:27.241670835 +0200
@@ -1,0 +2,9 @@
+Sun May  3 20:49:17 UTC 2026 - Dirk Müller <[email protected]>
+
+- update to 2.15.2:
+  * Fixed links in https://docs.scrapy.org/llms.txt
+    (:issue:`7467`)
+  * Sharing of the SSL context between multiple connections,
+    introduced in 2.15.0
+
+-------------------------------------------------------------------

Old:
----
  scrapy-2.15.0.tar.gz

New:
----
  scrapy-2.15.2.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-Scrapy.spec ++++++
--- /var/tmp/diff_new_pack.um9kTW/_old  2026-05-04 12:58:27.849695859 +0200
+++ /var/tmp/diff_new_pack.um9kTW/_new  2026-05-04 12:58:27.849695859 +0200
@@ -35,7 +35,7 @@
 %define skip_python314 1
 %{?sle15_python_module_pythons}
 Name:           python-Scrapy%{?psuffix}
-Version:        2.15.0
+Version:        2.15.2
 Release:        0
 Summary:        A high-level Python Screen Scraping framework
 License:        BSD-3-Clause

++++++ scrapy-2.15.0.tar.gz -> scrapy-2.15.2.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/scrapy-2.15.0/PKG-INFO new/scrapy-2.15.2/PKG-INFO
--- old/scrapy-2.15.0/PKG-INFO  2020-02-02 01:00:00.000000000 +0100
+++ new/scrapy-2.15.2/PKG-INFO  2020-02-02 01:00:00.000000000 +0100
@@ -1,6 +1,6 @@
 Metadata-Version: 2.4
 Name: Scrapy
-Version: 2.15.0
+Version: 2.15.2
 Summary: A high-level Web Crawling and Web Scraping framework
 Project-URL: Homepage, https://scrapy.org/
 Project-URL: Documentation, https://docs.scrapy.org/
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/scrapy-2.15.0/docs/news.rst 
new/scrapy-2.15.2/docs/news.rst
--- old/scrapy-2.15.0/docs/news.rst     2020-02-02 01:00:00.000000000 +0100
+++ new/scrapy-2.15.2/docs/news.rst     2020-02-02 01:00:00.000000000 +0100
@@ -3,6 +3,41 @@
 Release notes
 =============
 
+.. _release-2.15.2:
+
+Scrapy 2.15.2 (2026-04-28)
+--------------------------
+
+Bug fixes
+~~~~~~~~~
+
+-   Fixed links in https://docs.scrapy.org/llms.txt (:issue:`7467`)
+
+.. _release-2.15.1:
+
+Scrapy 2.15.1 (2026-04-23)
+--------------------------
+
+Bug fixes
+~~~~~~~~~
+
+-   Sharing of the SSL context between multiple connections, introduced in
+    Scrapy 2.15.0, is reverted as it caused problems and wasn't actually
+    needed.
+    (:issue:`7445`, :issue:`7450`)
+
+-   Fixed :meth:`scrapy.settings.BaseSettings.getwithbase` failing on keys with
+    dots that aren't import names. It now works the way it worked before Scrapy
+    2.15.0, without trying to match class objects and import path. A separate
+    method,
+    
:func:`~scrapy.settings.BaseSettings.get_component_priority_dict_with_base`,
+    was added that does that, and it is now used for :ref:`component priority
+    dictionaries <component-priority-dictionaries>`.
+    (:issue:`7426`, :issue:`7449`)
+
+-   Documentation rendering improvements.
+    (:issue:`7452`, :issue:`7454`)
+
 .. _release-2.15.0:
 
 Scrapy 2.15.0 (2026-04-09)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/scrapy-2.15.0/docs/requirements.in 
new/scrapy-2.15.2/docs/requirements.in
--- old/scrapy-2.15.0/docs/requirements.in      2020-02-02 01:00:00.000000000 
+0100
+++ new/scrapy-2.15.2/docs/requirements.in      2020-02-02 01:00:00.000000000 
+0100
@@ -5,4 +5,4 @@
 sphinx-notfound-page
 sphinx-rtd-theme
 sphinx-rtd-dark-mode
-sphinx-scrapy @ git+https://github.com/scrapy/[email protected]
+sphinx-scrapy @ git+https://github.com/scrapy/[email protected]
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/scrapy-2.15.0/docs/requirements.txt 
new/scrapy-2.15.2/docs/requirements.txt
--- old/scrapy-2.15.0/docs/requirements.txt     2020-02-02 01:00:00.000000000 
+0100
+++ new/scrapy-2.15.2/docs/requirements.txt     2020-02-02 01:00:00.000000000 
+0100
@@ -1,5 +1,5 @@
 # This file was autogenerated by uv via the following command:
-#    uv pip compile requirements.in -o requirements.txt
+#    uv pip compile -p 3.13 requirements.in -o requirements.txt
 alabaster==1.0.0
     # via sphinx
 annotated-types==0.7.0
@@ -130,6 +130,7 @@
     # via
     #   -r requirements.in
     #   sphinx-copybutton
+    #   sphinx-last-updated-by-git
     #   sphinx-llms-txt
     #   sphinx-markdown-builder
     #   sphinx-notfound-page
@@ -138,9 +139,11 @@
     #   sphinxcontrib-jquery
 sphinx-copybutton==0.5.2
     # via sphinx-scrapy
+sphinx-last-updated-by-git==0.3.8
+    # via sphinx-sitemap
 sphinx-llms-txt @ 
git+https://github.com/zytedata/sphinx-llms-txt.git@5e8866cb0cc249aa2017ad9050b3b83a7ca16f69
     # via sphinx-scrapy
-sphinx-markdown-builder @ 
git+https://github.com/zytedata/sphinx-markdown-builder.git@ac9f8babfe622e4300099ab44b96d9d9228e742e
+sphinx-markdown-builder @ 
git+https://github.com/zytedata/sphinx-markdown-builder.git@cfe4c0bfd7b4542f7e6b65a58cdf9ec765829940
     # via sphinx-scrapy
 sphinx-notfound-page==1.1.0
     # via -r requirements.in
@@ -150,8 +153,10 @@
     # via
     #   -r requirements.in
     #   sphinx-rtd-dark-mode
-sphinx-scrapy @ 
git+https://github.com/scrapy/sphinx-scrapy.git@2b5f6c7de64c8317cb771fdeb2e5020d1c9c9dcf
+sphinx-scrapy @ 
git+https://github.com/scrapy/sphinx-scrapy.git@eef1f8c3ab3b74b6891752b8f4624373345bae26
     # via -r requirements.in
+sphinx-sitemap==2.9.0
+    # via sphinx-scrapy
 sphinxcontrib-applehelp==2.0.0
     # via sphinx
 sphinxcontrib-devhelp==2.0.0
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/scrapy-2.15.0/pyproject.toml 
new/scrapy-2.15.2/pyproject.toml
--- old/scrapy-2.15.0/pyproject.toml    2020-02-02 01:00:00.000000000 +0100
+++ new/scrapy-2.15.2/pyproject.toml    2020-02-02 01:00:00.000000000 +0100
@@ -154,7 +154,7 @@
 ignore_missing_imports = true
 
 [tool.bumpversion]
-current_version = "2.15.0"
+current_version = "2.15.2"
 commit = true
 tag = true
 tag_name = "{new_version}"
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/scrapy-2.15.0/scrapy/VERSION 
new/scrapy-2.15.2/scrapy/VERSION
--- old/scrapy-2.15.0/scrapy/VERSION    2020-02-02 01:00:00.000000000 +0100
+++ new/scrapy-2.15.2/scrapy/VERSION    2020-02-02 01:00:00.000000000 +0100
@@ -1 +1 @@
-2.15.0
+2.15.2
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/scrapy-2.15.0/scrapy/commands/check.py 
new/scrapy-2.15.2/scrapy/commands/check.py
--- old/scrapy-2.15.0/scrapy/commands/check.py  2020-02-02 01:00:00.000000000 
+0100
+++ new/scrapy-2.15.2/scrapy/commands/check.py  2020-02-02 01:00:00.000000000 
+0100
@@ -73,7 +73,9 @@
     def run(self, args: list[str], opts: argparse.Namespace) -> None:
         # load contracts
         assert self.settings is not None
-        contracts = 
build_component_list(self.settings.getwithbase("SPIDER_CONTRACTS"))
+        contracts = build_component_list(
+            
self.settings.get_component_priority_dict_with_base("SPIDER_CONTRACTS")
+        )
         conman = ContractsManager(load_object(c) for c in contracts)
         runner = TextTestRunner(verbosity=2 if opts.verbose else 1)
         result = TextTestResult(runner.stream, runner.descriptions, 
runner.verbosity)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/scrapy-2.15.0/scrapy/core/downloader/contextfactory.py 
new/scrapy-2.15.2/scrapy/core/downloader/contextfactory.py
--- old/scrapy-2.15.0/scrapy/core/downloader/contextfactory.py  2020-02-02 
01:00:00.000000000 +0100
+++ new/scrapy-2.15.2/scrapy/core/downloader/contextfactory.py  2020-02-02 
01:00:00.000000000 +0100
@@ -78,13 +78,6 @@
             self.tls_ciphers = 
AcceptableCiphers.fromOpenSSLCipherString(tls_ciphers)
         else:
             self.tls_ciphers = DEFAULT_CIPHERS
-        with _filter_method_warning():
-            self._certificate_options = CertificateOptions(
-                method=self._ssl_method,
-                fixBrokenPeers=True,
-                acceptableCiphers=self.tls_ciphers,
-            )
-        self._ctx = self._get_context()
         self._verify_certificates = verify_certificates
 
     @classmethod
@@ -109,23 +102,38 @@
             **kwargs,
         )
 
+    # should be removed together with ScrapyClientContextFactory
     def getCertificateOptions(self) -> CertificateOptions:  # pragma: no cover
-        return self._certificate_options
+        return self._get_cert_options()
+
+    def _get_cert_options(self) -> CertificateOptions:
+        with _filter_method_warning():
+            return CertificateOptions(
+                method=self._ssl_method,
+                fixBrokenPeers=True,
+                acceptableCiphers=self.tls_ciphers,
+            )
 
     # kept for old-style HTTP/1.0 downloader context twisted calls,
     # e.g. connectSSL()
+    # should be removed together with ScrapyClientContextFactory
     def getContext(self, hostname: Any = None, port: Any = None) -> 
SSL.Context:
-        return self._ctx
+        return self._get_context()
 
     def _get_context(self) -> SSL.Context:
-        ctx = self._certificate_options.getContext()
+        cert_options = self._get_cert_options()
+        ctx = cert_options.getContext()
         ctx.set_options(0x4)  # OP_LEGACY_SERVER_CONNECT
         return ctx
 
     def creatorForNetloc(self, hostname: bytes, port: int) -> ClientTLSOptions:
         if not self._verify_certificates:
-            return _ScrapyClientTLSOptions(hostname.decode("ascii"), 
self._ctx)  # type: ignore[no-untyped-call]
-        # Note that this doesn't use self._ctx
+            # _ScrapyClientTLSOptions is needed to skip verification errors
+            return _ScrapyClientTLSOptions(
+                hostname.decode("ascii"), self._get_context()
+            )  # type: ignore[no-untyped-call]
+        # Otherwise use the normal Twisted function.
+        # Note that this doesn't use self._get_context().
         with _filter_method_warning():
             return optionsForClientTLS(
                 hostname=hostname.decode("ascii"),
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/scrapy-2.15.0/scrapy/core/downloader/middleware.py 
new/scrapy-2.15.2/scrapy/core/downloader/middleware.py
--- old/scrapy-2.15.0/scrapy/core/downloader/middleware.py      2020-02-02 
01:00:00.000000000 +0100
+++ new/scrapy-2.15.2/scrapy/core/downloader/middleware.py      2020-02-02 
01:00:00.000000000 +0100
@@ -36,7 +36,9 @@
 
     @classmethod
     def _get_mwlist_from_settings(cls, settings: BaseSettings) -> list[Any]:
-        return 
build_component_list(settings.getwithbase("DOWNLOADER_MIDDLEWARES"))
+        return build_component_list(
+            
settings.get_component_priority_dict_with_base("DOWNLOADER_MIDDLEWARES")
+        )
 
     def _add_middleware(self, mw: Any) -> None:
         if hasattr(mw, "process_request"):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/scrapy-2.15.0/scrapy/core/spidermw.py 
new/scrapy-2.15.2/scrapy/core/spidermw.py
--- old/scrapy-2.15.0/scrapy/core/spidermw.py   2020-02-02 01:00:00.000000000 
+0100
+++ new/scrapy-2.15.2/scrapy/core/spidermw.py   2020-02-02 01:00:00.000000000 
+0100
@@ -56,7 +56,9 @@
 
     @classmethod
     def _get_mwlist_from_settings(cls, settings: BaseSettings) -> list[Any]:
-        return build_component_list(settings.getwithbase("SPIDER_MIDDLEWARES"))
+        return build_component_list(
+            
settings.get_component_priority_dict_with_base("SPIDER_MIDDLEWARES")
+        )
 
     def __init__(self, *middlewares: Any, crawler: Crawler | None = None) -> 
None:
         self._check_deprecated_process_start_requests_use(middlewares)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/scrapy-2.15.0/scrapy/extension.py 
new/scrapy-2.15.2/scrapy/extension.py
--- old/scrapy-2.15.0/scrapy/extension.py       2020-02-02 01:00:00.000000000 
+0100
+++ new/scrapy-2.15.2/scrapy/extension.py       2020-02-02 01:00:00.000000000 
+0100
@@ -20,4 +20,6 @@
 
     @classmethod
     def _get_mwlist_from_settings(cls, settings: Settings) -> list[Any]:
-        return build_component_list(settings.getwithbase("EXTENSIONS"))
+        return build_component_list(
+            settings.get_component_priority_dict_with_base("EXTENSIONS")
+        )
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/scrapy-2.15.0/scrapy/pipelines/__init__.py 
new/scrapy-2.15.2/scrapy/pipelines/__init__.py
--- old/scrapy-2.15.0/scrapy/pipelines/__init__.py      2020-02-02 
01:00:00.000000000 +0100
+++ new/scrapy-2.15.2/scrapy/pipelines/__init__.py      2020-02-02 
01:00:00.000000000 +0100
@@ -33,7 +33,9 @@
 
     @classmethod
     def _get_mwlist_from_settings(cls, settings: Settings) -> list[Any]:
-        return build_component_list(settings.getwithbase("ITEM_PIPELINES"))
+        return build_component_list(
+            settings.get_component_priority_dict_with_base("ITEM_PIPELINES")
+        )
 
     def _add_middleware(self, pipe: Any) -> None:
         if hasattr(pipe, "open_spider"):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/scrapy-2.15.0/scrapy/settings/__init__.py 
new/scrapy-2.15.2/scrapy/settings/__init__.py
--- old/scrapy-2.15.0/scrapy/settings/__init__.py       2020-02-02 
01:00:00.000000000 +0100
+++ new/scrapy-2.15.2/scrapy/settings/__init__.py       2020-02-02 
01:00:00.000000000 +0100
@@ -323,14 +323,36 @@
         return copy.deepcopy(value)
 
     def getwithbase(self, name: _SettingsKey) -> BaseSettings:
-        """Get a composition of a dictionary-like setting and its `_BASE`
+        """Get a composition of a dictionary-like setting and its ``_BASE``
         counterpart.
 
+        Use
+        
:meth:`~scrapy.settings.BaseSettings.get_component_priority_dict_with_base`
+        instead if the setting is a :ref:`component priority dictionary
+        <component-priority-dictionaries>`.
+
         :param name: name of the dictionary-like setting
         :type name: str
         """
         if not isinstance(name, str):
             raise ValueError(f"Base setting key must be a string, got {name}")
+        compbs = BaseSettings()
+        compbs.update(self[name + "_BASE"])
+        compbs.update(self[name])
+        return compbs
+
+    def get_component_priority_dict_with_base(self, name: _SettingsKey) -> 
BaseSettings:
+        """Get a composition of a component priority dictionary setting and
+        its ``_BASE`` counterpart.
+
+        Keys are resolved to their import path for deduplication and then
+        restored to their latest input representation.
+
+        :param name: name of the component priority dictionary setting
+        :type name: str
+        """
+        if not isinstance(name, str):
+            raise ValueError(f"Base setting key must be a string, got {name}")
 
         normalized_keys = {}
         obj_keys = set()
@@ -345,10 +367,10 @@
                 f"be kept."
             )
 
-        def normalize_key(key: Any) -> str:
+        def normalize_key(key: Any) -> Any:
             try:
                 loaded_key = load_object(key)
-            except (AttributeError, TypeError, ValueError):
+            except (NameError, TypeError, ValueError):
                 loaded_key = key
             else:
                 import_path = global_object_name(loaded_key)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/scrapy-2.15.0/scrapy/utils/_deps_compat.py 
new/scrapy-2.15.2/scrapy/utils/_deps_compat.py
--- old/scrapy-2.15.0/scrapy/utils/_deps_compat.py      2020-02-02 
01:00:00.000000000 +0100
+++ new/scrapy-2.15.2/scrapy/utils/_deps_compat.py      2020-02-02 
01:00:00.000000000 +0100
@@ -6,5 +6,7 @@
 TWISTED_FAILURE_HAS_STACK = TWISTED_VERSION < TxVersion("twisted", 24, 10, 0)
 
 PYOPENSSL_VERSION = Version(PYOPENSSL_VERSION_STRING)
-# SSL.Context.use_certificate wants an X509 object, SSL.Context.use_privatekey 
wants a PKey object
+# SSL.Context.use_certificate() wants an X509 object, 
SSL.Context.use_privatekey() wants a PKey object
 PYOPENSSL_WANTS_X509_PKEY = PYOPENSSL_VERSION < Version("24.3.0")
+# SSL.Context.set_cipher_list() creates a temporary connection, making the 
context immutable
+PYOPENSSL_SET_CIPHER_LIST_TMP_CONN = PYOPENSSL_VERSION < Version("25.2.0")
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/scrapy-2.15.0/tests/test_command_check.py 
new/scrapy-2.15.2/tests/test_command_check.py
--- old/scrapy-2.15.0/tests/test_command_check.py       2020-02-02 
01:00:00.000000000 +0100
+++ new/scrapy-2.15.2/tests/test_command_check.py       2020-02-02 
01:00:00.000000000 +0100
@@ -186,7 +186,9 @@
         output = StringIO()
         sys.stdout = output
         cmd = Command()
-        cmd.settings = Mock(getwithbase=Mock(return_value={}))
+        cmd.settings = Mock(
+            get_component_priority_dict_with_base=Mock(return_value={}),
+        )
         cm_cls_mock.return_value = cm_mock = Mock()
         spider_loader_mock = Mock()
         cmd.crawler_process = Mock(spider_loader=spider_loader_mock)
@@ -211,7 +213,9 @@
         self, cm_cls_mock
     ) -> None:
         cmd = Command()
-        cmd.settings = Mock(getwithbase=Mock(return_value={}))
+        cmd.settings = Mock(
+            get_component_priority_dict_with_base=Mock(return_value={}),
+        )
         cm_cls_mock.return_value = cm_mock = Mock()
         spider_loader_mock = Mock()
         cmd.crawler_process = Mock(spider_loader=spider_loader_mock)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/scrapy-2.15.0/tests/test_core_downloader.py 
new/scrapy-2.15.2/tests/test_core_downloader.py
--- old/scrapy-2.15.0/tests/test_core_downloader.py     2020-02-02 
01:00:00.000000000 +0100
+++ new/scrapy-2.15.2/tests/test_core_downloader.py     2020-02-02 
01:00:00.000000000 +0100
@@ -17,6 +17,7 @@
 )
 from scrapy.core.downloader.handlers.http11 import _RequestBodyProducer
 from scrapy.exceptions import ScrapyDeprecationWarning
+from scrapy.utils._deps_compat import PYOPENSSL_SET_CIPHER_LIST_TMP_CONN
 from scrapy.utils.defer import maybe_deferred_to_future
 from scrapy.utils.misc import build_from_crawler
 from scrapy.utils.python import to_bytes
@@ -98,7 +99,7 @@
 
 class TestContextFactory(TestContextFactoryBase):
     @coroutine_test
-    async def testPayload(self, server_url: str) -> None:
+    async def test_payload(self, server_url: str) -> None:
         s = "0123456789" * 10
         crawler = get_crawler()
         client_context_factory = _load_context_factory_from_settings(crawler)
@@ -107,6 +108,41 @@
         )
         assert body == to_bytes(s)
 
+    def test_no_context_sharing(self) -> None:
+        """Every call to creatorForNetloc() should give a fresh context."""
+        crawler = get_crawler()
+        client_context_factory: _ScrapyClientContextFactory = (
+            _load_context_factory_from_settings(crawler)
+        )
+        creator1 = client_context_factory.creatorForNetloc(b"website1.tld", 
443)
+        assert creator1._hostnameBytes == b"website1.tld"
+        creator2 = client_context_factory.creatorForNetloc(b"website2.tld", 
443)
+        assert creator2._hostnameBytes == b"website2.tld"
+        assert creator1._ctx is not creator2._ctx
+
+    @pytest.mark.skipif(
+        PYOPENSSL_SET_CIPHER_LIST_TMP_CONN,
+        reason="Fails or doesn't make sense on this pyOpenSSL version",
+    )
+    def test_no_immutable_ctx_warning(self) -> None:
+        """There should be no pyOpenSSL context modification warning.
+
+        pyOpenSSL < 25.1.0 doesn't produce this warning, and on 25.1.0 it's
+        always produced due to
+        https://github.com/scrapy/scrapy/issues/6859#issuecomment-4294917851.
+        """
+        crawler = get_crawler()
+        client_context_factory: _ScrapyClientContextFactory = (
+            _load_context_factory_from_settings(crawler)
+        )
+        with warnings.catch_warnings():
+            warnings.filterwarnings(
+                "error",
+                category=DeprecationWarning,
+                message="Attempting to mutate a Context after a Connection was 
created",
+            )
+            client_context_factory.creatorForNetloc(b"website.tld", 443)
+
 
 class TestContextFactoryTLSMethod(TestContextFactoryBase):
     async def _assert_factory_works(
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/scrapy-2.15.0/tests/test_settings/__init__.py 
new/scrapy-2.15.2/tests/test_settings/__init__.py
--- old/scrapy-2.15.0/tests/test_settings/__init__.py   2020-02-02 
01:00:00.000000000 +0100
+++ new/scrapy-2.15.2/tests/test_settings/__init__.py   2020-02-02 
01:00:00.000000000 +0100
@@ -410,7 +410,45 @@
         assert frozencopy.frozen
         assert frozencopy is not self.settings
 
-    def test_getwithbase_override_none_by_type(self):
+    def test_getwithbase_for_dotted_keys(self):
+        settings = BaseSettings(
+            {
+                "FEED_EXPORTERS_BASE": BaseSettings({"json": "foo"}),
+                "FEED_EXPORTERS": BaseSettings({"csv.gz": "bar"}),
+            }
+        )
+        value = settings.getwithbase("FEED_EXPORTERS")
+        assert isinstance(value, BaseSettings)
+        assert dict(value) == {
+            "json": "foo",
+            "csv.gz": "bar",
+        }
+
+    @pytest.mark.parametrize(
+        ("key", "exception"),
+        [
+            pytest.param(1, TypeError, id="type-error"),
+            pytest.param("foo", ValueError, id="value-error"),
+            pytest.param("csv.gz", NameError, id="name-error"),
+        ],
+    )
+    def 
test_get_component_priority_dict_with_base_handles_load_object_exceptions(
+        self, key, exception
+    ):
+        with pytest.raises(exception):
+            load_object(key)
+
+        settings = BaseSettings(
+            {
+                "FOO": BaseSettings({key: 1}),
+            }
+        )
+        value = settings.get_component_priority_dict_with_base("FOO")
+
+        assert isinstance(value, BaseSettings)
+        assert dict(value) == {key: 1}
+
+    def test_get_component_priority_dict_with_base_override_none_by_type(self):
         settings = BaseSettings()
         setting_names = set()
         for k, v in scrapy_default_settings.__dict__.items():
@@ -426,10 +464,10 @@
                 load_object(import_path): None for import_path in v
             }
         for setting_name in setting_names:
-            value = settings.getwithbase(setting_name)
+            value = 
settings.get_component_priority_dict_with_base(setting_name)
             assert not dict(value)
 
-    def test_getwithbase_override_value_by_type(self):
+    def 
test_get_component_priority_dict_with_base_override_value_by_type(self):
         settings = BaseSettings()
         setting_names = set()
         value = 0
@@ -446,7 +484,10 @@
                 load_object(import_path): value for import_path in v
             }
         for setting_name in setting_names:
-            assert settings.getwithbase(setting_name) == settings[setting_name]
+            assert (
+                settings.get_component_priority_dict_with_base(setting_name)
+                == settings[setting_name]
+            )
 
     def test_getwithbase_for_non_component_priority_dicts(self):
         settings = BaseSettings()
@@ -465,7 +506,9 @@
             assert isinstance(value, BaseSettings)
             assert dict(value) == expected
 
-    def test_getwithbase_warns_on_duplicate_import_paths(self, caplog):
+    def 
test_get_component_priority_dict_with_base_warns_on_duplicate_import_paths(
+        self, caplog
+    ):
         settings = BaseSettings()
         settings["FOO"] = BaseSettings(
             {
@@ -474,20 +517,22 @@
             }
         )
         with caplog.at_level(logging.WARNING):
-            value = settings.getwithbase("FOO")
+            value = settings.get_component_priority_dict_with_base("FOO")
         assert isinstance(value, BaseSettings)
         assert dict(value) == {"scrapy.http.Request": 2}
         assert caplog.records, "Expected a warning to be logged"
         msg = caplog.records[0].message
         assert "scrapy.http.request.Request" in msg
 
-    def test_getwithbase_warns_on_duplicate_mixed_type_and_path(self, caplog):
+    def 
test_get_component_priority_dict_with_base_warns_on_duplicate_mixed_type_and_path(
+        self, caplog
+    ):
         settings = BaseSettings()
         settings["FOO"] = BaseSettings(
             {Component1: 1, "tests.test_settings.Component1": 2}
         )
         with caplog.at_level(logging.WARNING):
-            value = settings.getwithbase("FOO")
+            value = settings.get_component_priority_dict_with_base("FOO")
         assert isinstance(value, BaseSettings)
         assert dict(value) == {"tests.test_settings.Component1": 2}
         assert caplog.records, "Expected a warning to be logged"
@@ -501,6 +546,13 @@
         ):
             settings.getwithbase(123)
 
+    def test_get_component_priority_dict_with_base_invalid_setting_name(self):
+        settings = BaseSettings()
+        with pytest.raises(
+            ValueError, match="Base setting key must be a string, got 123"
+        ):
+            settings.get_component_priority_dict_with_base(123)
+
 
 class TestSettings:
     def setup_method(self):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/scrapy-2.15.0/tox.ini new/scrapy-2.15.2/tox.ini
--- old/scrapy-2.15.0/tox.ini   2020-02-02 01:00:00.000000000 +0100
+++ new/scrapy-2.15.2/tox.ini   2020-02-02 01:00:00.000000000 +0100
@@ -5,7 +5,7 @@
 
 [tox]
 requires =
-    sphinx-scrapy @ git+https://github.com/scrapy/[email protected]
+    sphinx-scrapy[tox] @ git+https://github.com/scrapy/[email protected]
 envlist = pre-commit,pylint,typing,py,docs
 minversion = 1.7.0
 

Reply via email to