Script 'mail_helper' called by obssrc
Hello community,

here is the log from the commit of package python-asgiref for openSUSE:Factory 
checked in at 2024-04-09 16:46:23
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-asgiref (Old)
 and      /work/SRC/openSUSE:Factory/.python-asgiref.new.29460 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-asgiref"

Tue Apr  9 16:46:23 2024 rev:11 rq:1165940 version:3.8.1

Changes:
--------
--- /work/SRC/openSUSE:Factory/python-asgiref/python-asgiref.changes    
2023-11-30 22:00:40.927647124 +0100
+++ /work/SRC/openSUSE:Factory/.python-asgiref.new.29460/python-asgiref.changes 
2024-04-09 16:46:35.802677633 +0200
@@ -1,0 +2,14 @@
+Sun Apr  7 08:43:16 UTC 2024 - Dirk Müller <dmuel...@suse.com>
+
+- update to 3.8.1:
+  * Fixes a regression in 3.8.0 affecting nested task cancellation
+    inside sync_to_async.
+  * Adds support for Python 3.12.
+  * Drops support for (end-of-life) Python 3.7.
+  * Fixes task cancellation propagation to subtasks when using
+    synchronous Django middleware.
+  * Allows nesting ``sync_to_async`` via ``asyncio.wait_for``.
+  * Corrects WSGI adapter handling of root path.
+  * Handles case where `"client"` is ``None`` in WsgiToAsgi adapter.
+
+-------------------------------------------------------------------

Old:
----
  asgiref-3.7.2.tar.gz

New:
----
  asgiref-3.8.1.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-asgiref.spec ++++++
--- /var/tmp/diff_new_pack.rcrUpL/_old  2024-04-09 16:46:37.278731919 +0200
+++ /var/tmp/diff_new_pack.rcrUpL/_new  2024-04-09 16:46:37.302732802 +0200
@@ -1,7 +1,7 @@
 #
 # spec file for package python-asgiref
 #
-# Copyright (c) 2023 SUSE LLC
+# Copyright (c) 2024 SUSE LLC
 #
 # All modifications and additions to the file contributed by third parties
 # remain the property of their copyright owners, unless otherwise agreed
@@ -18,7 +18,7 @@
 
 %{?sle15_python_module_pythons}
 Name:           python-asgiref
-Version:        3.7.2
+Version:        3.8.1
 Release:        0
 Summary:        ASGI specs, helper code, and adapters
 License:        BSD-3-Clause

++++++ asgiref-3.7.2.tar.gz -> asgiref-3.8.1.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/asgiref-3.7.2/MANIFEST.in 
new/asgiref-3.8.1/MANIFEST.in
--- old/asgiref-3.7.2/MANIFEST.in       2023-05-23 18:35:51.000000000 +0200
+++ new/asgiref-3.8.1/MANIFEST.in       2024-03-21 14:33:51.000000000 +0100
@@ -1,3 +1,4 @@
 include LICENSE
 include asgiref/py.typed
+include tox.ini
 recursive-include tests *.py
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/asgiref-3.7.2/PKG-INFO new/asgiref-3.8.1/PKG-INFO
--- old/asgiref-3.7.2/PKG-INFO  2023-05-27 19:21:19.747680700 +0200
+++ new/asgiref-3.8.1/PKG-INFO  2024-03-22 15:39:12.874578500 +0100
@@ -1,6 +1,6 @@
 Metadata-Version: 2.1
 Name: asgiref
-Version: 3.7.2
+Version: 3.8.1
 Summary: ASGI specs, helper code, and adapters
 Home-page: https://github.com/django/asgiref/
 Author: Django Software Foundation
@@ -17,21 +17,25 @@
 Classifier: Programming Language :: Python
 Classifier: Programming Language :: Python :: 3
 Classifier: Programming Language :: Python :: 3 :: Only
-Classifier: Programming Language :: Python :: 3.7
 Classifier: Programming Language :: Python :: 3.8
 Classifier: Programming Language :: Python :: 3.9
 Classifier: Programming Language :: Python :: 3.10
 Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
 Classifier: Topic :: Internet :: WWW/HTTP
-Requires-Python: >=3.7
-Provides-Extra: tests
+Requires-Python: >=3.8
 License-File: LICENSE
+Requires-Dist: typing_extensions>=4; python_version < "3.11"
+Provides-Extra: tests
+Requires-Dist: pytest; extra == "tests"
+Requires-Dist: pytest-asyncio; extra == "tests"
+Requires-Dist: mypy>=0.800; extra == "tests"
 
 asgiref
 =======
 
-.. image:: https://api.travis-ci.org/django/asgiref.svg
-    :target: https://travis-ci.org/django/asgiref
+.. image:: 
https://github.com/django/asgiref/actions/workflows/tests.yml/badge.svg
+    :target: https://github.com/django/asgiref/actions/workflows/tests.yml
 
 .. image:: https://img.shields.io/pypi/v/asgiref.svg
     :target: https://pypi.python.org/pypi/asgiref
@@ -125,7 +129,7 @@
 Dependencies
 ------------
 
-``asgiref`` requires Python 3.7 or higher.
+``asgiref`` requires Python 3.8 or higher.
 
 
 Contributing
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/asgiref-3.7.2/README.rst new/asgiref-3.8.1/README.rst
--- old/asgiref-3.7.2/README.rst        2023-05-23 18:35:51.000000000 +0200
+++ new/asgiref-3.8.1/README.rst        2024-03-21 14:08:55.000000000 +0100
@@ -1,8 +1,8 @@
 asgiref
 =======
 
-.. image:: https://api.travis-ci.org/django/asgiref.svg
-    :target: https://travis-ci.org/django/asgiref
+.. image:: 
https://github.com/django/asgiref/actions/workflows/tests.yml/badge.svg
+    :target: https://github.com/django/asgiref/actions/workflows/tests.yml
 
 .. image:: https://img.shields.io/pypi/v/asgiref.svg
     :target: https://pypi.python.org/pypi/asgiref
@@ -96,7 +96,7 @@
 Dependencies
 ------------
 
-``asgiref`` requires Python 3.7 or higher.
+``asgiref`` requires Python 3.8 or higher.
 
 
 Contributing
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/asgiref-3.7.2/asgiref/__init__.py 
new/asgiref-3.8.1/asgiref/__init__.py
--- old/asgiref-3.7.2/asgiref/__init__.py       2023-05-27 19:20:44.000000000 
+0200
+++ new/asgiref-3.8.1/asgiref/__init__.py       2024-03-22 15:26:51.000000000 
+0100
@@ -1 +1 @@
-__version__ = "3.7.2"
+__version__ = "3.8.1"
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/asgiref-3.7.2/asgiref/current_thread_executor.py 
new/asgiref-3.8.1/asgiref/current_thread_executor.py
--- old/asgiref-3.7.2/asgiref/current_thread_executor.py        2023-05-23 
18:35:51.000000000 +0200
+++ new/asgiref-3.8.1/asgiref/current_thread_executor.py        2024-03-21 
14:08:55.000000000 +0100
@@ -106,9 +106,9 @@
 
     # Python 3.9+ has a new signature for submit with a "/" after `fn`, to 
enforce
     # it to be a positional argument. If we ignore[override] mypy on 3.9+ will 
be
-    # happy but 3.7/3.8 will say that the ignore comment is unused, even when
+    # happy but 3.8 will say that the ignore comment is unused, even when
     # defining them differently based on sys.version_info.
-    # We should be able to remove this when we drop support for 3.7/3.8.
+    # We should be able to remove this when we drop support for 3.8.
     if not TYPE_CHECKING:
 
         def submit(self, fn, *args, **kwargs):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/asgiref-3.7.2/asgiref/local.py 
new/asgiref-3.8.1/asgiref/local.py
--- old/asgiref-3.7.2/asgiref/local.py  2023-05-23 18:35:51.000000000 +0200
+++ new/asgiref-3.8.1/asgiref/local.py  2024-03-21 14:08:55.000000000 +0100
@@ -1,120 +1,128 @@
-import random
-import string
-import sys
+import asyncio
+import contextlib
+import contextvars
 import threading
-import weakref
+from typing import Any, Dict, Union
+
+
+class _CVar:
+    """Storage utility for Local."""
+
+    def __init__(self) -> None:
+        self._data: "contextvars.ContextVar[Dict[str, Any]]" = 
contextvars.ContextVar(
+            "asgiref.local"
+        )
+
+    def __getattr__(self, key):
+        storage_object = self._data.get({})
+        try:
+            return storage_object[key]
+        except KeyError:
+            raise AttributeError(f"{self!r} object has no attribute {key!r}")
+
+    def __setattr__(self, key: str, value: Any) -> None:
+        if key == "_data":
+            return super().__setattr__(key, value)
+
+        storage_object = self._data.get({})
+        storage_object[key] = value
+        self._data.set(storage_object)
+
+    def __delattr__(self, key: str) -> None:
+        storage_object = self._data.get({})
+        if key in storage_object:
+            del storage_object[key]
+            self._data.set(storage_object)
+        else:
+            raise AttributeError(f"{self!r} object has no attribute {key!r}")
 
 
 class Local:
-    """
-    A drop-in replacement for threading.locals that also works with asyncio
-    Tasks (via the current_task asyncio method), and passes locals through
-    sync_to_async and async_to_sync.
-
-    Specifically:
-     - Locals work per-coroutine on any thread not spawned using asgiref
-     - Locals work per-thread on any thread not spawned using asgiref
-     - Locals are shared with the parent coroutine when using sync_to_async
-     - Locals are shared with the parent thread when using async_to_sync
-       (and if that thread was launched using sync_to_async, with its parent
-       coroutine as well, with this working for indefinite levels of nesting)
-
-    Set thread_critical to True to not allow locals to pass from an async Task
-    to a thread it spawns. This is needed for code that truly needs
-    thread-safety, as opposed to things used for helpful context (e.g. sqlite
-    does not like being called from a different thread to the one it is from).
-    Thread-critical code will still be differentiated per-Task within a thread
-    as it is expected it does not like concurrent access.
+    """Local storage for async tasks.
 
-    This doesn't use contextvars as it needs to support 3.6. Once it can 
support
-    3.7 only, we can then reimplement the storage more nicely.
+    This is a namespace object (similar to `threading.local`) where data is
+    also local to the current async task (if there is one).
+
+    In async threads, local means in the same sense as the `contextvars`
+    module - i.e. a value set in an async frame will be visible:
+
+    - to other async code `await`-ed from this frame.
+    - to tasks spawned using `asyncio` utilities (`create_task`, `wait_for`,
+      `gather` and probably others).
+    - to code scheduled in a sync thread using `sync_to_async`
+
+    In "sync" threads (a thread with no async event loop running), the
+    data is thread-local, but additionally shared with async code executed
+    via the `async_to_sync` utility, which schedules async code in a new thread
+    and copies context across to that thread.
+
+    If `thread_critical` is True, then the local will only be visible 
per-thread,
+    behaving exactly like `threading.local` if the thread is sync, and as
+    `contextvars` if the thread is async. This allows genuinely 
thread-sensitive
+    code (such as DB handles) to be kept stricly to their initial thread and
+    disable the sharing across `sync_to_async` and `async_to_sync` wrapped 
calls.
+
+    Unlike plain `contextvars` objects, this utility is threadsafe.
     """
 
     def __init__(self, thread_critical: bool = False) -> None:
         self._thread_critical = thread_critical
         self._thread_lock = threading.RLock()
-        self._context_refs: "weakref.WeakSet[object]" = weakref.WeakSet()
-        # Random suffixes stop accidental reuse between different Locals,
-        # though we try to force deletion as well.
-        self._attr_name = "_asgiref_local_impl_{}_{}".format(
-            id(self),
-            "".join(random.choice(string.ascii_letters) for i in range(8)),
-        )
 
-    def _get_context_id(self):
-        """
-        Get the ID we should use for looking up variables
-        """
-        # Prevent a circular reference
-        from .sync import AsyncToSync, SyncToAsync
-
-        # First, pull the current task if we can
-        context_id = SyncToAsync.get_current_task()
-        context_is_async = True
-        # OK, let's try for a thread ID
-        if context_id is None:
-            context_id = threading.current_thread()
-            context_is_async = False
-        # If we're thread-critical, we stop here, as we can't share contexts.
+        self._storage: "Union[threading.local, _CVar]"
+
+        if thread_critical:
+            # Thread-local storage
+            self._storage = threading.local()
+        else:
+            # Contextvar storage
+            self._storage = _CVar()
+
+    @contextlib.contextmanager
+    def _lock_storage(self):
+        # Thread safe access to storage
         if self._thread_critical:
-            return context_id
-        # Now, take those and see if we can resolve them through the launch 
maps
-        for i in range(sys.getrecursionlimit()):
             try:
-                if context_is_async:
-                    # Tasks have a source thread in AsyncToSync
-                    context_id = AsyncToSync.launch_map[context_id]
-                    context_is_async = False
-                else:
-                    # Threads have a source task in SyncToAsync
-                    context_id = SyncToAsync.launch_map[context_id]
-                    context_is_async = True
-            except KeyError:
-                break
+                # this is a test for are we in a async or sync
+                # thread - will raise RuntimeError if there is
+                # no current loop
+                asyncio.get_running_loop()
+            except RuntimeError:
+                # We are in a sync thread, the storage is
+                # just the plain thread local (i.e, "global within
+                # this thread" - it doesn't matter where you are
+                # in a call stack you see the same storage)
+                yield self._storage
+            else:
+                # We are in an async thread - storage is still
+                # local to this thread, but additionally should
+                # behave like a context var (is only visible with
+                # the same async call stack)
+
+                # Ensure context exists in the current thread
+                if not hasattr(self._storage, "cvar"):
+                    self._storage.cvar = _CVar()
+
+                # self._storage is a thread local, so the members
+                # can't be accessed in another thread (we don't
+                # need any locks)
+                yield self._storage.cvar
         else:
-            # Catch infinite loops (they happen if you are screwing around
-            # with AsyncToSync implementations)
-            raise RuntimeError("Infinite launch_map loops")
-        return context_id
-
-    def _get_storage(self):
-        context_obj = self._get_context_id()
-        if not hasattr(context_obj, self._attr_name):
-            setattr(context_obj, self._attr_name, {})
-            self._context_refs.add(context_obj)
-        return getattr(context_obj, self._attr_name)
-
-    def __del__(self):
-        try:
-            for context_obj in self._context_refs:
-                try:
-                    delattr(context_obj, self._attr_name)
-                except AttributeError:
-                    pass
-        except TypeError:
-            # WeakSet.__iter__ can crash when interpreter is shutting down due
-            # to _IterationGuard being None.
-            pass
+            # Lock for thread_critical=False as other threads
+            # can access the exact same storage object
+            with self._thread_lock:
+                yield self._storage
 
     def __getattr__(self, key):
-        with self._thread_lock:
-            storage = self._get_storage()
-            if key in storage:
-                return storage[key]
-            else:
-                raise AttributeError(f"{self!r} object has no attribute 
{key!r}")
+        with self._lock_storage() as storage:
+            return getattr(storage, key)
 
     def __setattr__(self, key, value):
-        if key in ("_context_refs", "_thread_critical", "_thread_lock", 
"_attr_name"):
+        if key in ("_local", "_storage", "_thread_critical", "_thread_lock"):
             return super().__setattr__(key, value)
-        with self._thread_lock:
-            storage = self._get_storage()
-            storage[key] = value
+        with self._lock_storage() as storage:
+            setattr(storage, key, value)
 
     def __delattr__(self, key):
-        with self._thread_lock:
-            storage = self._get_storage()
-            if key in storage:
-                del storage[key]
-            else:
-                raise AttributeError(f"{self!r} object has no attribute 
{key!r}")
+        with self._lock_storage() as storage:
+            delattr(storage, key)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/asgiref-3.7.2/asgiref/sync.py 
new/asgiref-3.8.1/asgiref/sync.py
--- old/asgiref-3.7.2/asgiref/sync.py   2023-05-27 19:19:57.000000000 +0200
+++ new/asgiref-3.8.1/asgiref/sync.py   2024-03-22 15:15:14.000000000 +0100
@@ -57,7 +57,6 @@
 # inspect.iscoroutinefunction(), whilst also removing the _is_coroutine marker.
 # The latter is replaced with the inspect.markcoroutinefunction decorator.
 # Until 3.12 is the minimum supported Python version, provide a shim.
-# Django 4.0 only supports 3.8+, so don't concern with the _or_partial 
backport.
 
 if hasattr(inspect, "markcoroutinefunction"):
     iscoroutinefunction = inspect.iscoroutinefunction
@@ -70,22 +69,6 @@
         return func
 
 
-if sys.version_info >= (3, 8):
-    _iscoroutinefunction_or_partial = iscoroutinefunction
-else:
-
-    def _iscoroutinefunction_or_partial(func: Any) -> bool:
-        # Python < 3.8 does not correctly determine partially wrapped
-        # coroutine functions are coroutine functions, hence the need for
-        # this to exist. Code taken from CPython.
-        while inspect.ismethod(func):
-            func = func.__func__
-        while isinstance(func, functools.partial):
-            func = func.func
-
-        return iscoroutinefunction(func)
-
-
 class ThreadSensitiveContext:
     """Async context manager to manage context for thread sensitive mode
 
@@ -93,8 +76,8 @@
     thread sensitive mode. By default, a single thread pool executor is shared
     within a process.
 
-    In Python 3.7+, the ThreadSensitiveContext() context manager may be used to
-    specify a thread pool per context.
+    The ThreadSensitiveContext() context manager may be used to specify a
+    thread pool per context.
 
     This context manager is re-entrant, so only the outer-most call to
     ThreadSensitiveContext will set the context.
@@ -140,12 +123,9 @@
     finally exiting once the async task returns.
     """
 
-    # Maps launched Tasks to the threads that launched them (for locals impl)
-    launch_map: "Dict[asyncio.Task[object], threading.Thread]" = {}
-
-    # Keeps track of which CurrentThreadExecutor to use. This uses an asgiref
-    # Local, not a threadlocal, so that tasks can work out what their parent 
used.
-    executors = Local()
+    # Keeps a reference to the CurrentThreadExecutor in local context, so that
+    # any sync_to_async inside the wrapped code can find it.
+    executors: "Local" = Local()
 
     # When we can't find a CurrentThreadExecutor from the context, such as
     # inside create_task, we'll look it up here from the running event loop.
@@ -160,10 +140,8 @@
         force_new_loop: bool = False,
     ):
         if not callable(awaitable) or (
-            not _iscoroutinefunction_or_partial(awaitable)
-            and not _iscoroutinefunction_or_partial(
-                getattr(awaitable, "__call__", awaitable)
-            )
+            not iscoroutinefunction(awaitable)
+            and not iscoroutinefunction(getattr(awaitable, "__call__", 
awaitable))
         ):
             # Python does not have very reliable detection of async functions
             # (lots of false negatives) so this is just a warning.
@@ -175,30 +153,30 @@
             self.__self__ = self.awaitable.__self__  # type: ignore[union-attr]
         except AttributeError:
             pass
-        if force_new_loop:
-            # They have asked that we always run in a new sub-loop.
-            self.main_event_loop = None
-        else:
-            try:
-                self.main_event_loop = asyncio.get_running_loop()
-            except RuntimeError:
-                # There's no event loop in this thread. Look for the 
threadlocal if
-                # we're inside SyncToAsync
-                main_event_loop_pid = getattr(
-                    SyncToAsync.threadlocal, "main_event_loop_pid", None
-                )
-                # We make sure the parent loop is from the same process - if
-                # they've forked, this is not going to be valid any more (#194)
-                if main_event_loop_pid and main_event_loop_pid == os.getpid():
-                    self.main_event_loop = getattr(
-                        SyncToAsync.threadlocal, "main_event_loop", None
-                    )
-                else:
-                    self.main_event_loop = None
+        self.force_new_loop = force_new_loop
+        self.main_event_loop = None
+        try:
+            self.main_event_loop = asyncio.get_running_loop()
+        except RuntimeError:
+            # There's no event loop in this thread.
+            pass
 
     def __call__(self, *args: _P.args, **kwargs: _P.kwargs) -> _R:
         __traceback_hide__ = True  # noqa: F841
 
+        if not self.force_new_loop and not self.main_event_loop:
+            # There's no event loop in this thread. Look for the threadlocal if
+            # we're inside SyncToAsync
+            main_event_loop_pid = getattr(
+                SyncToAsync.threadlocal, "main_event_loop_pid", None
+            )
+            # We make sure the parent loop is from the same process - if
+            # they've forked, this is not going to be valid any more (#194)
+            if main_event_loop_pid and main_event_loop_pid == os.getpid():
+                self.main_event_loop = getattr(
+                    SyncToAsync.threadlocal, "main_event_loop", None
+                )
+
         # You can't call AsyncToSync from a thread with a running event loop
         try:
             event_loop = asyncio.get_running_loop()
@@ -211,23 +189,24 @@
                     "just await the async function directly."
                 )
 
-        # Wrapping context in list so it can be reassigned from within
-        # `main_wrap`.
-        context = [contextvars.copy_context()]
-
         # Make a future for the return information
         call_result: "Future[_R]" = Future()
-        # Get the source thread
-        source_thread = threading.current_thread()
+
         # Make a CurrentThreadExecutor we'll use to idle in this thread - we
         # need one for every sync frame, even if there's one above us in the
         # same thread.
-        if hasattr(self.executors, "current"):
-            old_current_executor = self.executors.current
-        else:
-            old_current_executor = None
+        old_executor = getattr(self.executors, "current", None)
         current_executor = CurrentThreadExecutor()
         self.executors.current = current_executor
+
+        # Wrapping context in list so it can be reassigned from within
+        # `main_wrap`.
+        context = [contextvars.copy_context()]
+
+        # Get task context so that parent task knows which task to propagate
+        # an asyncio.CancelledError to.
+        task_context = getattr(SyncToAsync.threadlocal, "task_context", None)
+
         loop = None
         # Use call_soon_threadsafe to schedule a synchronous callback on the
         # main event loop's thread if it's there, otherwise make a new loop
@@ -235,8 +214,8 @@
         try:
             awaitable = self.main_wrap(
                 call_result,
-                source_thread,
                 sys.exc_info(),
+                task_context,
                 context,
                 *args,
                 **kwargs,
@@ -267,11 +246,9 @@
             # Clean up any executor we were running
             if loop is not None:
                 del self.loop_thread_executors[loop]
-            if hasattr(self.executors, "current"):
-                del self.executors.current
-            if old_current_executor:
-                self.executors.current = old_current_executor
             _restore_context(context[0])
+            # Restore old current thread executor state
+            self.executors.current = old_executor
 
         # Wait for results from the future.
         return call_result.result()
@@ -322,8 +299,8 @@
     async def main_wrap(
         self,
         call_result: "Future[_R]",
-        source_thread: threading.Thread,
         exc_info: "OptExcInfo",
+        task_context: "Optional[List[asyncio.Task[Any]]]",
         context: List[contextvars.Context],
         *args: _P.args,
         **kwargs: _P.kwargs,
@@ -338,9 +315,10 @@
         if context is not None:
             _restore_context(context[0])
 
-        current_task = SyncToAsync.get_current_task()
-        assert current_task is not None
-        self.launch_map[current_task] = source_thread
+        current_task = asyncio.current_task()
+        if current_task is not None and task_context is not None:
+            task_context.append(current_task)
+
         try:
             # If we have an exception, run the function inside the except block
             # after raising it so exc_info is correctly populated.
@@ -356,8 +334,8 @@
         else:
             call_result.set_result(result)
         finally:
-            del self.launch_map[current_task]
-
+            if current_task is not None and task_context is not None:
+                task_context.remove(current_task)
             context[0] = contextvars.copy_context()
 
 
@@ -383,9 +361,6 @@
     a TypeError will be raised.
     """
 
-    # Maps launched threads to the coroutines that spawned them
-    launch_map: "Dict[threading.Thread, asyncio.Task[object]]" = {}
-
     # Storage for main event loop references
     threadlocal = threading.local()
 
@@ -418,8 +393,8 @@
     ) -> None:
         if (
             not callable(func)
-            or _iscoroutinefunction_or_partial(func)
-            or _iscoroutinefunction_or_partial(getattr(func, "__call__", func))
+            or iscoroutinefunction(func)
+            or iscoroutinefunction(getattr(func, "__call__", func))
         ):
             raise TypeError("sync_to_async can only be applied to sync 
functions.")
         self.func = func
@@ -440,9 +415,10 @@
 
         # Work out what thread to run the code in
         if self._thread_sensitive:
-            if hasattr(AsyncToSync.executors, "current"):
+            current_thread_executor = getattr(AsyncToSync.executors, 
"current", None)
+            if current_thread_executor:
                 # If we have a parent sync thread above somewhere, use that
-                executor = AsyncToSync.executors.current
+                executor = current_thread_executor
             elif self.thread_sensitive_context.get(None):
                 # If we have a way of retrieving the current context, attempt
                 # to use a per-context thread pool executor
@@ -473,21 +449,40 @@
         context = contextvars.copy_context()
         child = functools.partial(self.func, *args, **kwargs)
         func = context.run
+        task_context: List[asyncio.Task[Any]] = []
 
+        # Run the code in the right thread
+        exec_coro = loop.run_in_executor(
+            executor,
+            functools.partial(
+                self.thread_handler,
+                loop,
+                sys.exc_info(),
+                task_context,
+                func,
+                child,
+            ),
+        )
+        ret: _R
         try:
-            # Run the code in the right thread
-            ret: _R = await loop.run_in_executor(
-                executor,
-                functools.partial(
-                    self.thread_handler,
-                    loop,
-                    self.get_current_task(),
-                    sys.exc_info(),
-                    func,
-                    child,
-                ),
-            )
-
+            ret = await asyncio.shield(exec_coro)
+        except asyncio.CancelledError:
+            cancel_parent = True
+            try:
+                task = task_context[0]
+                task.cancel()
+                try:
+                    await task
+                    cancel_parent = False
+                except asyncio.CancelledError:
+                    pass
+            except IndexError:
+                pass
+            if exec_coro.done():
+                raise
+            if cancel_parent:
+                exec_coro.cancel()
+            ret = await exec_coro
         finally:
             _restore_context(context)
             self.deadlock_context.set(False)
@@ -503,7 +498,7 @@
         func = functools.partial(self.__call__, parent)
         return functools.update_wrapper(func, self.func)
 
-    def thread_handler(self, loop, source_task, exc_info, func, *args, 
**kwargs):
+    def thread_handler(self, loop, exc_info, task_context, func, *args, 
**kwargs):
         """
         Wraps the sync application with exception handling.
         """
@@ -513,45 +508,18 @@
         # Set the threadlocal for AsyncToSync
         self.threadlocal.main_event_loop = loop
         self.threadlocal.main_event_loop_pid = os.getpid()
-        # Set the task mapping (used for the locals module)
-        current_thread = threading.current_thread()
-        if AsyncToSync.launch_map.get(source_task) == current_thread:
-            # Our parent task was launched from this same thread, so don't make
-            # a launch map entry - let it shortcut over us! (and stop infinite 
loops)
-            parent_set = False
-        else:
-            self.launch_map[current_thread] = source_task
-            parent_set = True
-        source_task = (
-            None  # allow the task to be garbage-collected in case of 
exceptions
-        )
+        self.threadlocal.task_context = task_context
+
         # Run the function
-        try:
-            # If we have an exception, run the function inside the except block
-            # after raising it so exc_info is correctly populated.
-            if exc_info[1]:
-                try:
-                    raise exc_info[1]
-                except BaseException:
-                    return func(*args, **kwargs)
-            else:
+        # If we have an exception, run the function inside the except block
+        # after raising it so exc_info is correctly populated.
+        if exc_info[1]:
+            try:
+                raise exc_info[1]
+            except BaseException:
                 return func(*args, **kwargs)
-        finally:
-            # Only delete the launch_map parent if we set it, otherwise it is
-            # from someone else.
-            if parent_set:
-                del self.launch_map[current_thread]
-
-    @staticmethod
-    def get_current_task() -> Optional["asyncio.Task[Any]"]:
-        """
-        Implementation of asyncio.current_task()
-        that returns None if there is no task.
-        """
-        try:
-            return asyncio.current_task()
-        except RuntimeError:
-            return None
+        else:
+            return func(*args, **kwargs)
 
 
 @overload
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/asgiref-3.7.2/asgiref/testing.py 
new/asgiref-3.8.1/asgiref/testing.py
--- old/asgiref-3.7.2/asgiref/testing.py        2023-05-23 18:35:51.000000000 
+0200
+++ new/asgiref-3.8.1/asgiref/testing.py        2024-03-21 14:08:55.000000000 
+0100
@@ -1,4 +1,5 @@
 import asyncio
+import contextvars
 import time
 
 from .compatibility import guarantee_single_callable
@@ -16,8 +17,13 @@
         self.scope = scope
         self.input_queue = asyncio.Queue()
         self.output_queue = asyncio.Queue()
-        self.future = asyncio.ensure_future(
-            self.application(scope, self.input_queue.get, 
self.output_queue.put)
+        # Clear context - this ensures that context vars set in the testing 
scope
+        # are not "leaked" into the application which would normally begin with
+        # an empty context. In Python >= 3.11 this could also be written as:
+        # asyncio.create_task(..., context=contextvars.Context())
+        self.future = contextvars.Context().run(
+            asyncio.create_task,
+            self.application(scope, self.input_queue.get, 
self.output_queue.put),
         )
 
     async def wait(self, timeout=1):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/asgiref-3.7.2/asgiref/typing.py 
new/asgiref-3.8.1/asgiref/typing.py
--- old/asgiref-3.7.2/asgiref/typing.py 2023-05-23 18:35:51.000000000 +0200
+++ new/asgiref-3.8.1/asgiref/typing.py 2024-03-21 14:08:55.000000000 +0100
@@ -5,17 +5,15 @@
     Callable,
     Dict,
     Iterable,
+    Literal,
     Optional,
+    Protocol,
     Tuple,
     Type,
+    TypedDict,
     Union,
 )
 
-if sys.version_info >= (3, 8):
-    from typing import Literal, Protocol, TypedDict
-else:
-    from typing_extensions import Literal, Protocol, TypedDict
-
 if sys.version_info >= (3, 11):
     from typing import NotRequired
 else:
@@ -32,6 +30,7 @@
     "HTTPResponseStartEvent",
     "HTTPResponseBodyEvent",
     "HTTPResponseTrailersEvent",
+    "HTTPResponsePathsendEvent",
     "HTTPServerPushEvent",
     "HTTPDisconnectEvent",
     "WebSocketConnectEvent",
@@ -138,6 +137,11 @@
     more_trailers: bool
 
 
+class HTTPResponsePathsendEvent(TypedDict):
+    type: Literal["http.response.pathsend"]
+    path: str
+
+
 class HTTPServerPushEvent(TypedDict):
     type: Literal["http.response.push"]
     path: str
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/asgiref-3.7.2/asgiref/wsgi.py 
new/asgiref-3.8.1/asgiref/wsgi.py
--- old/asgiref-3.7.2/asgiref/wsgi.py   2023-05-23 18:35:51.000000000 +0200
+++ new/asgiref-3.8.1/asgiref/wsgi.py   2024-03-21 14:33:45.000000000 +0100
@@ -54,10 +54,14 @@
         """
         Builds a scope and request body into a WSGI environ object.
         """
+        script_name = scope.get("root_path", 
"").encode("utf8").decode("latin1")
+        path_info = scope["path"].encode("utf8").decode("latin1")
+        if path_info.startswith(script_name):
+            path_info = path_info[len(script_name) :]
         environ = {
             "REQUEST_METHOD": scope["method"],
-            "SCRIPT_NAME": scope.get("root_path", 
"").encode("utf8").decode("latin1"),
-            "PATH_INFO": scope["path"].encode("utf8").decode("latin1"),
+            "SCRIPT_NAME": script_name,
+            "PATH_INFO": path_info,
             "QUERY_STRING": scope["query_string"].decode("ascii"),
             "SERVER_PROTOCOL": "HTTP/%s" % scope["http_version"],
             "wsgi.version": (1, 0),
@@ -76,7 +80,7 @@
             environ["SERVER_NAME"] = "localhost"
             environ["SERVER_PORT"] = "80"
 
-        if "client" in scope:
+        if scope.get("client") is not None:
             environ["REMOTE_ADDR"] = scope["client"][0]
 
         # Go through headers and make them into environ entries
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/asgiref-3.7.2/asgiref.egg-info/PKG-INFO 
new/asgiref-3.8.1/asgiref.egg-info/PKG-INFO
--- old/asgiref-3.7.2/asgiref.egg-info/PKG-INFO 2023-05-27 19:21:19.000000000 
+0200
+++ new/asgiref-3.8.1/asgiref.egg-info/PKG-INFO 2024-03-22 15:39:12.000000000 
+0100
@@ -1,6 +1,6 @@
 Metadata-Version: 2.1
 Name: asgiref
-Version: 3.7.2
+Version: 3.8.1
 Summary: ASGI specs, helper code, and adapters
 Home-page: https://github.com/django/asgiref/
 Author: Django Software Foundation
@@ -17,21 +17,25 @@
 Classifier: Programming Language :: Python
 Classifier: Programming Language :: Python :: 3
 Classifier: Programming Language :: Python :: 3 :: Only
-Classifier: Programming Language :: Python :: 3.7
 Classifier: Programming Language :: Python :: 3.8
 Classifier: Programming Language :: Python :: 3.9
 Classifier: Programming Language :: Python :: 3.10
 Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
 Classifier: Topic :: Internet :: WWW/HTTP
-Requires-Python: >=3.7
-Provides-Extra: tests
+Requires-Python: >=3.8
 License-File: LICENSE
+Requires-Dist: typing_extensions>=4; python_version < "3.11"
+Provides-Extra: tests
+Requires-Dist: pytest; extra == "tests"
+Requires-Dist: pytest-asyncio; extra == "tests"
+Requires-Dist: mypy>=0.800; extra == "tests"
 
 asgiref
 =======
 
-.. image:: https://api.travis-ci.org/django/asgiref.svg
-    :target: https://travis-ci.org/django/asgiref
+.. image:: 
https://github.com/django/asgiref/actions/workflows/tests.yml/badge.svg
+    :target: https://github.com/django/asgiref/actions/workflows/tests.yml
 
 .. image:: https://img.shields.io/pypi/v/asgiref.svg
     :target: https://pypi.python.org/pypi/asgiref
@@ -125,7 +129,7 @@
 Dependencies
 ------------
 
-``asgiref`` requires Python 3.7 or higher.
+``asgiref`` requires Python 3.8 or higher.
 
 
 Contributing
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/asgiref-3.7.2/asgiref.egg-info/SOURCES.txt 
new/asgiref-3.8.1/asgiref.egg-info/SOURCES.txt
--- old/asgiref-3.7.2/asgiref.egg-info/SOURCES.txt      2023-05-27 
19:21:19.000000000 +0200
+++ new/asgiref-3.8.1/asgiref.egg-info/SOURCES.txt      2024-03-22 
15:39:12.000000000 +0100
@@ -3,6 +3,7 @@
 README.rst
 setup.cfg
 setup.py
+tox.ini
 asgiref/__init__.py
 asgiref/compatibility.py
 asgiref/current_thread_executor.py
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/asgiref-3.7.2/setup.cfg new/asgiref-3.8.1/setup.cfg
--- old/asgiref-3.7.2/setup.cfg 2023-05-27 19:21:19.747680700 +0200
+++ new/asgiref-3.8.1/setup.cfg 2024-03-22 15:39:12.875497800 +0100
@@ -16,11 +16,11 @@
        Programming Language :: Python
        Programming Language :: Python :: 3
        Programming Language :: Python :: 3 :: Only
-       Programming Language :: Python :: 3.7
        Programming Language :: Python :: 3.8
        Programming Language :: Python :: 3.9
        Programming Language :: Python :: 3.10
        Programming Language :: Python :: 3.11
+       Programming Language :: Python :: 3.12
        Topic :: Internet :: WWW/HTTP
 project_urls = 
        Documentation = https://asgi.readthedocs.io/
@@ -28,7 +28,7 @@
        Changelog = https://github.com/django/asgiref/blob/master/CHANGELOG.txt
 
 [options]
-python_requires = >=3.7
+python_requires = >=3.8
 packages = find:
 include_package_data = true
 install_requires = 
@@ -47,7 +47,7 @@
 
 [flake8]
 exclude = venv/*,tox/*,specs/*
-ignore = E123,E128,E266,E402,W503,E731,W601
+ignore = E123,E128,E266,E402,W503,E731,W601,E203
 max-line-length = 119
 
 [isort]
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/asgiref-3.7.2/setup.py new/asgiref-3.8.1/setup.py
--- old/asgiref-3.7.2/setup.py  2023-05-23 18:35:51.000000000 +0200
+++ new/asgiref-3.8.1/setup.py  2024-03-21 14:08:55.000000000 +0100
@@ -1,3 +1,3 @@
-from setuptools import setup  # type: ignore[import]
+from setuptools import setup  # type: ignore[import-untyped]
 
 setup()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/asgiref-3.7.2/tests/test_local.py 
new/asgiref-3.8.1/tests/test_local.py
--- old/asgiref-3.7.2/tests/test_local.py       2023-05-23 18:35:51.000000000 
+0200
+++ new/asgiref-3.8.1/tests/test_local.py       2024-03-21 14:08:55.000000000 
+0100
@@ -1,3 +1,4 @@
+import asyncio
 import gc
 import threading
 
@@ -307,18 +308,33 @@
     assert test_local.counter == 6
 
 
-def test_local_del_swallows_type_error(monkeypatch):
-    test_local = Local()
-
-    blow_up_calls = 0
+def test_thread_critical_local_not_context_dependent_in_sync_thread():
+    # Test function is sync, thread critical local should
+    # be visible everywhere in the sync thread, even if set
+    # from inside a sync_to_async/async_to_sync stack (so
+    # long as it was set in sync code)
+    test_local_tc = Local(thread_critical=True)
+    test_local_not_tc = Local(thread_critical=False)
+    test_thread = threading.current_thread()
+
+    @sync_to_async
+    def inner_sync_function():
+        # sync_to_async should run this code inside the original
+        # sync thread, confirm this here
+        assert test_thread == threading.current_thread()
+        test_local_tc.test_value = "_123_"
+        test_local_not_tc.test_value = "_456_"
 
-    def blow_up(self):
-        nonlocal blow_up_calls
-        blow_up_calls += 1
-        raise TypeError()
-
-    monkeypatch.setattr("weakref.WeakSet.__iter__", blow_up)
+    @async_to_sync
+    async def async_function():
+        await asyncio.create_task(inner_sync_function())
 
-    test_local.__del__()
+    async_function()
 
-    assert blow_up_calls == 1
+    # assert: the inner_sync_function should have set a value
+    # visible here
+    assert test_local_tc.test_value == "_123_"
+    # however, if the local was non-thread-critical, then the
+    # inner value was set inside a new async context, meaning that
+    # we do not see it, as context vars don't propagate up the stack
+    assert not hasattr(test_local_not_tc, "test_value")
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/asgiref-3.7.2/tests/test_sync.py 
new/asgiref-3.8.1/tests/test_sync.py
--- old/asgiref-3.7.2/tests/test_sync.py        2023-05-23 18:35:51.000000000 
+0200
+++ new/asgiref-3.8.1/tests/test_sync.py        2024-03-21 15:29:47.000000000 
+0100
@@ -1,6 +1,7 @@
 import asyncio
 import functools
 import multiprocessing
+import sys
 import threading
 import time
 import warnings
@@ -223,6 +224,58 @@
     assert result["thread"] == threading.current_thread()
 
 
+@pytest.mark.asyncio
+async def test_async_to_sync_to_async_decorator():
+    """
+    Test async_to_sync as a function decorator uses the outer thread
+    when used inside sync_to_async.
+    """
+    result = {}
+
+    # Define async function
+    @async_to_sync
+    async def inner_async_function():
+        result["worked"] = True
+        result["thread"] = threading.current_thread()
+        return 42
+
+    # Define sync function
+    @sync_to_async
+    def sync_function():
+        return inner_async_function()
+
+    # Check it works right
+    number = await sync_function()
+    assert number == 42
+    assert result["worked"]
+    # Make sure that it didn't needlessly make a new async loop
+    assert result["thread"] == threading.current_thread()
+
+
+@pytest.mark.asyncio
+@pytest.mark.skipif(sys.version_info < (3, 9), reason="requires python3.9")
+async def test_async_to_sync_to_thread_decorator():
+    """
+    Test async_to_sync as a function decorator uses the outer thread
+    when used inside another sync thread.
+    """
+    result = {}
+
+    # Define async function
+    @async_to_sync
+    async def inner_async_function():
+        result["worked"] = True
+        result["thread"] = threading.current_thread()
+        return 42
+
+    # Check it works right
+    number = await asyncio.to_thread(inner_async_function)
+    assert number == 42
+    assert result["worked"]
+    # Make sure that it didn't needlessly make a new async loop
+    assert result["thread"] == threading.current_thread()
+
+
 def test_async_to_sync_fail_non_function():
     """
     async_to_sync raises a TypeError when applied to a non-function.
@@ -721,29 +774,6 @@
     )
 
 
-def test_sync_to_async_deadlock_raises():
-    def db_write():
-        pass
-
-    async def io_task():
-        await sync_to_async(db_write)()
-
-    async def do_io_tasks():
-        t = asyncio.create_task(io_task())
-        await t
-        # await asyncio.gather(io_task()) # Also deadlocks
-        # await io_task() # Works
-
-    def view():
-        async_to_sync(do_io_tasks)()
-
-    async def server_entry():
-        await sync_to_async(view)()
-
-    with pytest.raises(RuntimeError):
-        asyncio.run(server_entry())
-
-
 def test_sync_to_async_deadlock_ignored_with_exception():
     """
     Ensures that throwing an exception from inside a deadlock-protected block
@@ -836,3 +866,311 @@
         raise
     finally:
         await trigger_task
+
+
+@pytest.mark.asyncio
+async def test_sync_to_async_within_create_task():
+    """
+    Test a stack of sync_to_async/async_to_sync/sync_to_async works even when 
last
+    sync_to_async is wrapped in asyncio.wait_for.
+    """
+    main_thread = threading.current_thread()
+    sync_thread = None
+
+    # Hypothetical Django scenario - middleware function is sync and will run
+    # in a new thread created by sync_to_async
+    def sync_middleware():
+        nonlocal sync_thread
+        sync_thread = threading.current_thread()
+        assert sync_thread != main_thread
+        # View is async and wrapped with async_to_sync.
+        async_to_sync(async_view)()
+
+    async def async_view():
+        # Call a sync function using sync_to_async, but asyncio.wait_for it
+        # rather than directly await it.
+        await asyncio.wait_for(sync_to_async(sync_task)(), timeout=1)
+
+    task_executed = False
+
+    def sync_task():
+        nonlocal task_executed, sync_thread
+        assert sync_thread == threading.current_thread()
+        task_executed = True
+
+    async with ThreadSensitiveContext():
+        await sync_to_async(sync_middleware)()
+
+    assert task_executed
+
+
+@pytest.mark.asyncio
+async def test_inner_shield_sync_middleware():
+    """
+    Tests that asyncio.shield is capable of preventing http.disconnect from
+    cancelling a django request task when using sync middleware.
+    """
+
+    # Hypothetical Django scenario - middleware function is sync
+    def sync_middleware():
+        async_to_sync(async_view)()
+
+    task_complete = False
+    task_cancel_caught = False
+
+    # Future that completes when subtask cancellation attempt is caught
+    task_blocker = asyncio.Future()
+
+    async def async_view():
+        """Async view with a task that is shielded from cancellation."""
+        nonlocal task_complete, task_cancel_caught, task_blocker
+        task = asyncio.create_task(async_task())
+        try:
+            await asyncio.shield(task)
+        except asyncio.CancelledError:
+            task_cancel_caught = True
+            task_blocker.set_result(True)
+            await task
+            task_complete = True
+
+    task_executed = False
+
+    # Future that completes after subtask is created
+    task_started_future = asyncio.Future()
+
+    async def async_task():
+        """Async subtask that should not be canceled when parent is 
canceled."""
+        nonlocal task_started_future, task_executed, task_blocker
+        task_started_future.set_result(True)
+        await task_blocker
+        task_executed = True
+
+    task_cancel_propagated = False
+
+    async with ThreadSensitiveContext():
+        task = asyncio.create_task(sync_to_async(sync_middleware)())
+        await task_started_future
+        task.cancel()
+        try:
+            await task
+        except asyncio.CancelledError:
+            task_cancel_propagated = True
+        assert not task_cancel_propagated
+        assert task_cancel_caught
+        assert task_complete
+
+    assert task_executed
+
+
+@pytest.mark.asyncio
+async def test_inner_shield_async_middleware():
+    """
+    Tests that asyncio.shield is capable of preventing http.disconnect from
+    cancelling a django request task when using async middleware.
+    """
+
+    # Hypothetical Django scenario - middleware function is async
+    async def async_middleware():
+        await async_view()
+
+    task_complete = False
+    task_cancel_caught = False
+
+    # Future that completes when subtask cancellation attempt is caught
+    task_blocker = asyncio.Future()
+
+    async def async_view():
+        """Async view with a task that is shielded from cancellation."""
+        nonlocal task_complete, task_cancel_caught, task_blocker
+        task = asyncio.create_task(async_task())
+        try:
+            await asyncio.shield(task)
+        except asyncio.CancelledError:
+            task_cancel_caught = True
+            task_blocker.set_result(True)
+            await task
+            task_complete = True
+
+    task_executed = False
+
+    # Future that completes after subtask is created
+    task_started_future = asyncio.Future()
+
+    async def async_task():
+        """Async subtask that should not be canceled when parent is 
canceled."""
+        nonlocal task_started_future, task_executed, task_blocker
+        task_started_future.set_result(True)
+        await task_blocker
+        task_executed = True
+
+    task_cancel_propagated = False
+
+    async with ThreadSensitiveContext():
+        task = asyncio.create_task(async_middleware())
+        await task_started_future
+        task.cancel()
+        try:
+            await task
+        except asyncio.CancelledError:
+            task_cancel_propagated = True
+        assert not task_cancel_propagated
+        assert task_cancel_caught
+        assert task_complete
+
+    assert task_executed
+
+
+@pytest.mark.asyncio
+async def test_inner_shield_sync_and_async_middleware():
+    """
+    Tests that asyncio.shield is capable of preventing http.disconnect from
+    cancelling a django request task when using sync and middleware chained
+    together.
+    """
+
+    # Hypothetical Django scenario - middleware function is sync
+    def sync_middleware_1():
+        async_to_sync(async_middleware_2)()
+
+    # Hypothetical Django scenario - middleware function is async
+    async def async_middleware_2():
+        await sync_to_async(sync_middleware_3)()
+
+    # Hypothetical Django scenario - middleware function is sync
+    def sync_middleware_3():
+        async_to_sync(async_middleware_4)()
+
+    # Hypothetical Django scenario - middleware function is async
+    async def async_middleware_4():
+        await sync_to_async(sync_middleware_5)()
+
+    # Hypothetical Django scenario - middleware function is sync
+    def sync_middleware_5():
+        async_to_sync(async_view)()
+
+    task_complete = False
+    task_cancel_caught = False
+
+    # Future that completes when subtask cancellation attempt is caught
+    task_blocker = asyncio.Future()
+
+    async def async_view():
+        """Async view with a task that is shielded from cancellation."""
+        nonlocal task_complete, task_cancel_caught, task_blocker
+        task = asyncio.create_task(async_task())
+        try:
+            await asyncio.shield(task)
+        except asyncio.CancelledError:
+            task_cancel_caught = True
+            task_blocker.set_result(True)
+            await task
+            task_complete = True
+
+    task_executed = False
+
+    # Future that completes after subtask is created
+    task_started_future = asyncio.Future()
+
+    async def async_task():
+        """Async subtask that should not be canceled when parent is 
canceled."""
+        nonlocal task_started_future, task_executed, task_blocker
+        task_started_future.set_result(True)
+        await task_blocker
+        task_executed = True
+
+    task_cancel_propagated = False
+
+    async with ThreadSensitiveContext():
+        task = asyncio.create_task(sync_to_async(sync_middleware_1)())
+        await task_started_future
+        task.cancel()
+        try:
+            await task
+        except asyncio.CancelledError:
+            task_cancel_propagated = True
+        assert not task_cancel_propagated
+        assert task_cancel_caught
+        assert task_complete
+
+    assert task_executed
+
+
+@pytest.mark.asyncio
+async def test_inner_shield_sync_and_async_middleware_sync_task():
+    """
+    Tests that asyncio.shield is capable of preventing http.disconnect from
+    cancelling a django request task when using sync and middleware chained
+    together with an async view calling a sync function calling an async task.
+
+    This test ensures that a parent initiated task cancellation will not
+    propagate to a shielded subtask.
+    """
+
+    # Hypothetical Django scenario - middleware function is sync
+    def sync_middleware_1():
+        async_to_sync(async_middleware_2)()
+
+    # Hypothetical Django scenario - middleware function is async
+    async def async_middleware_2():
+        await sync_to_async(sync_middleware_3)()
+
+    # Hypothetical Django scenario - middleware function is sync
+    def sync_middleware_3():
+        async_to_sync(async_middleware_4)()
+
+    # Hypothetical Django scenario - middleware function is async
+    async def async_middleware_4():
+        await sync_to_async(sync_middleware_5)()
+
+    # Hypothetical Django scenario - middleware function is sync
+    def sync_middleware_5():
+        async_to_sync(async_view)()
+
+    task_complete = False
+    task_cancel_caught = False
+
+    # Future that completes when subtask cancellation attempt is caught
+    task_blocker = asyncio.Future()
+
+    async def async_view():
+        """Async view with a task that is shielded from cancellation."""
+        nonlocal task_complete, task_cancel_caught, task_blocker
+        task = asyncio.create_task(sync_to_async(sync_parent)())
+        try:
+            await asyncio.shield(task)
+        except asyncio.CancelledError:
+            task_cancel_caught = True
+            task_blocker.set_result(True)
+            await task
+            task_complete = True
+
+    task_executed = False
+
+    # Future that completes after subtask is created
+    task_started_future = asyncio.Future()
+
+    def sync_parent():
+        async_to_sync(async_task)()
+
+    async def async_task():
+        """Async subtask that should not be canceled when parent is 
canceled."""
+        nonlocal task_started_future, task_executed, task_blocker
+        task_started_future.set_result(True)
+        await task_blocker
+        task_executed = True
+
+    task_cancel_propagated = False
+
+    async with ThreadSensitiveContext():
+        task = asyncio.create_task(sync_to_async(sync_middleware_1)())
+        await task_started_future
+        task.cancel()
+        try:
+            await task
+        except asyncio.CancelledError:
+            task_cancel_propagated = True
+        assert not task_cancel_propagated
+        assert task_cancel_caught
+        assert task_complete
+
+    assert task_executed
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/asgiref-3.7.2/tests/test_wsgi.py 
new/asgiref-3.8.1/tests/test_wsgi.py
--- old/asgiref-3.7.2/tests/test_wsgi.py        2023-05-23 18:35:51.000000000 
+0200
+++ new/asgiref-3.8.1/tests/test_wsgi.py        2024-03-21 14:33:45.000000000 
+0100
@@ -3,7 +3,7 @@
 import pytest
 
 from asgiref.testing import ApplicationCommunicator
-from asgiref.wsgi import WsgiToAsgi
+from asgiref.wsgi import WsgiToAsgi, WsgiToAsgiInstance
 
 
 @pytest.mark.asyncio
@@ -11,6 +11,7 @@
     """
     Makes sure the WSGI wrapper has basic functionality.
     """
+
     # Define WSGI app
     def wsgi_application(environ, start_response):
         assert environ["HTTP_TEST_HEADER"] == "test value 1,test value 2"
@@ -55,11 +56,32 @@
     assert (await instance.receive_output(1)) == {"type": "http.response.body"}
 
 
+def test_script_name():
+    scope = {
+        "type": "http",
+        "http_version": "1.0",
+        "method": "GET",
+        "root_path": "/base",
+        "path": "/base/foo/",
+        "query_string": b"bar=baz",
+        "headers": [
+            [b"test-header", b"test value 1"],
+            [b"test-header", b"test value 2"],
+        ],
+    }
+    adapter = WsgiToAsgiInstance(None)
+    adapter.scope = scope
+    environ = adapter.build_environ(scope, None)
+    assert environ["SCRIPT_NAME"] == "/base"
+    assert environ["PATH_INFO"] == "/foo/"
+
+
 @pytest.mark.asyncio
 async def test_wsgi_path_encoding():
     """
     Makes sure the WSGI wrapper has basic functionality.
     """
+
     # Define WSGI app
     def wsgi_application(environ, start_response):
         assert environ["SCRIPT_NAME"] == "/中国".encode().decode("latin-1")
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/asgiref-3.7.2/tox.ini new/asgiref-3.8.1/tox.ini
--- old/asgiref-3.7.2/tox.ini   1970-01-01 01:00:00.000000000 +0100
+++ new/asgiref-3.8.1/tox.ini   2024-03-21 14:08:55.000000000 +0100
@@ -0,0 +1,20 @@
+[tox]
+envlist =
+    py{38,39,310,311,312}-{test,mypy}
+    qa
+
+[testenv]
+usedevelop = true
+extras = tests
+commands =
+    test: pytest -v {posargs}
+    mypy: mypy . {posargs}
+deps =
+    setuptools
+
+[testenv:qa]
+skip_install = true
+deps =
+    pre-commit
+commands =
+    pre-commit {posargs:run --all-files --show-diff-on-failure}

Reply via email to