增加环绕侦察场景适配

This commit is contained in:
2026-01-08 15:44:38 +08:00
parent 3eba1f962b
commit 10c5bb5a8a
5441 changed files with 40219 additions and 379695 deletions

View File

@@ -4,6 +4,7 @@ from ._core._contextmanagers import AsyncContextManagerMixin as AsyncContextMana
from ._core._contextmanagers import ContextManagerMixin as ContextManagerMixin
from ._core._eventloop import current_time as current_time
from ._core._eventloop import get_all_backends as get_all_backends
from ._core._eventloop import get_available_backends as get_available_backends
from ._core._eventloop import get_cancelled_exc_class as get_cancelled_exc_class
from ._core._eventloop import run as run
from ._core._eventloop import sleep as sleep

View File

@@ -34,7 +34,7 @@ from collections.abc import (
from concurrent.futures import Future
from contextlib import AbstractContextManager, suppress
from contextvars import Context, copy_context
from dataclasses import dataclass
from dataclasses import dataclass, field
from functools import partial, wraps
from inspect import (
CORO_RUNNING,
@@ -59,8 +59,6 @@ from typing import (
)
from weakref import WeakKeyDictionary
import sniffio
from .. import (
CapacityLimiterStatistics,
EventStatistics,
@@ -68,7 +66,11 @@ from .. import (
TaskInfo,
abc,
)
from .._core._eventloop import claim_worker_thread, threadlocals
from .._core._eventloop import (
claim_worker_thread,
set_current_async_library,
threadlocals,
)
from .._core._exceptions import (
BrokenResourceError,
BusyResourceError,
@@ -151,18 +153,18 @@ else:
def __exit__(
self,
exc_type: type[BaseException],
exc_val: BaseException,
exc_tb: TracebackType,
exc_type: type[BaseException] | None,
exc_val: BaseException | None,
exc_tb: TracebackType | None,
) -> None:
self.close()
def close(self) -> None:
"""Shutdown and close event loop."""
if self._state is not _State.INITIALIZED:
loop = self._loop
if self._state is not _State.INITIALIZED or loop is None:
return
try:
loop = self._loop
_cancel_all_tasks(loop)
loop.run_until_complete(loop.shutdown_asyncgens())
if hasattr(loop, "shutdown_default_executor"):
@@ -801,6 +803,11 @@ class TaskGroup(abc.TaskGroup):
task_status_future: asyncio.Future | None = None,
) -> asyncio.Task:
def task_done(_task: asyncio.Task) -> None:
if sys.version_info >= (3, 14) and self.cancel_scope._host_task is not None:
asyncio.future_discard_from_awaited_by(
_task, self.cancel_scope._host_task
)
task_state = _task_states[_task]
assert task_state.cancel_scope is not None
assert _task in task_state.cancel_scope._tasks
@@ -882,6 +889,9 @@ class TaskGroup(abc.TaskGroup):
)
self.cancel_scope._tasks.add(task)
self._tasks.add(task)
if sys.version_info >= (3, 14) and self.cancel_scope._host_task is not None:
asyncio.future_add_to_awaited_by(task, self.cancel_scope._host_task)
task.add_done_callback(task_done)
return task
@@ -1005,29 +1015,6 @@ _threadpool_idle_workers: RunVar[deque[WorkerThread]] = RunVar(
_threadpool_workers: RunVar[set[WorkerThread]] = RunVar("_threadpool_workers")
class BlockingPortal(abc.BlockingPortal):
def __new__(cls) -> BlockingPortal:
return object.__new__(cls)
def __init__(self) -> None:
super().__init__()
self._loop = get_running_loop()
def _spawn_task_from_thread(
self,
func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval],
args: tuple[Unpack[PosArgsT]],
kwargs: dict[str, Any],
name: object,
future: Future[T_Retval],
) -> None:
AsyncIOBackend.run_sync_from_thread(
partial(self._task_group.start_soon, name=name),
(self._call_func, func, args, kwargs, future),
self._loop,
)
#
# Subprocesses
#
@@ -1052,12 +1039,30 @@ class StreamReaderWrapper(abc.ByteReceiveStream):
@dataclass(eq=False)
class StreamWriterWrapper(abc.ByteSendStream):
_stream: asyncio.StreamWriter
_closed: bool = field(init=False, default=False)
async def send(self, item: bytes) -> None:
self._stream.write(item)
await self._stream.drain()
await AsyncIOBackend.checkpoint_if_cancelled()
stream_paused = self._stream._protocol._paused # type: ignore[attr-defined]
try:
self._stream.write(item)
await self._stream.drain()
except (ConnectionResetError, BrokenPipeError, RuntimeError) as exc:
# If closed by us and/or the peer:
# * on stdlib, drain() raises ConnectionResetError or BrokenPipeError
# * on uvloop and Winloop, write() eventually starts raising RuntimeError
if self._closed:
raise ClosedResourceError from exc
elif self._stream.is_closing():
raise BrokenResourceError from exc
raise
if not stream_paused:
await AsyncIOBackend.cancel_shielded_checkpoint()
async def aclose(self) -> None:
self._closed = True
self._stream.close()
await AsyncIOBackend.checkpoint()
@@ -1125,7 +1130,7 @@ def _forcibly_shutdown_process_pool_on_exit(
) -> None:
"""
Forcibly shuts down worker processes belonging to this event loop."""
child_watcher: asyncio.AbstractChildWatcher | None = None
child_watcher: asyncio.AbstractChildWatcher | None = None # type: ignore[name-defined]
if sys.version_info < (3, 12):
try:
child_watcher = asyncio.get_event_loop_policy().get_child_watcher()
@@ -1133,7 +1138,7 @@ def _forcibly_shutdown_process_pool_on_exit(
pass
# Close as much as possible (w/o async/await) to avoid warnings
for process in workers:
for process in workers.copy():
if process.returncode is None:
continue
@@ -1157,6 +1162,7 @@ async def _shutdown_process_pool_on_exit(workers: set[abc.Process]) -> None:
try:
await sleep(math.inf)
except asyncio.CancelledError:
workers = workers.copy()
for process in workers:
if process.returncode is None:
process.kill()
@@ -1599,8 +1605,8 @@ class UDPSocket(abc.UDPSocket):
return self._transport.get_extra_info("socket")
async def aclose(self) -> None:
self._closed = True
if not self._transport.is_closing():
self._closed = True
self._transport.close()
async def receive(self) -> tuple[bytes, IPSockAddrType]:
@@ -1647,8 +1653,8 @@ class ConnectedUDPSocket(abc.ConnectedUDPSocket):
return self._transport.get_extra_info("socket")
async def aclose(self) -> None:
self._closed = True
if not self._transport.is_closing():
self._closed = True
self._transport.close()
async def receive(self) -> bytes:
@@ -1971,8 +1977,9 @@ class CapacityLimiter(BaseCapacityLimiter):
def total_tokens(self, value: float) -> None:
if not isinstance(value, int) and not math.isinf(value):
raise TypeError("total_tokens must be an int or math.inf")
if value < 1:
raise ValueError("total_tokens must be >= 1")
if value < 0:
raise ValueError("total_tokens must be >= 0")
waiters_to_notify = max(value - self._total_tokens, 0)
self._total_tokens = value
@@ -2158,9 +2165,14 @@ class TestRunner(abc.TestRunner):
loop_factory: Callable[[], AbstractEventLoop] | None = None,
) -> None:
if use_uvloop and loop_factory is None:
import uvloop
if sys.platform != "win32":
import uvloop
loop_factory = uvloop.new_event_loop
loop_factory = uvloop.new_event_loop
else:
import winloop
loop_factory = winloop.new_event_loop
self._runner = Runner(debug=debug, loop_factory=loop_factory)
self._exceptions: list[BaseException] = []
@@ -2317,9 +2329,14 @@ class AsyncIOBackend(AsyncBackend):
debug = options.get("debug", None)
loop_factory = options.get("loop_factory", None)
if loop_factory is None and options.get("use_uvloop", False):
import uvloop
if sys.platform != "win32":
import uvloop
loop_factory = uvloop.new_event_loop
loop_factory = uvloop.new_event_loop
else:
import winloop
loop_factory = winloop.new_event_loop
with Runner(debug=debug, loop_factory=loop_factory) as runner:
return runner.run(wrapper())
@@ -2475,7 +2492,7 @@ class AsyncIOBackend(AsyncBackend):
expired_worker.stop()
context = copy_context()
context.run(sniffio.current_async_library_cvar.set, None)
context.run(set_current_async_library, None)
if abandon_on_cancel or scope._parent_scope is None:
worker_scope = scope
else:
@@ -2524,7 +2541,7 @@ class AsyncIOBackend(AsyncBackend):
raise RunFinishedError
context = copy_context()
context.run(sniffio.current_async_library_cvar.set, "asyncio")
context.run(set_current_async_library, "asyncio")
scope = getattr(threadlocals, "current_cancel_scope", None)
f: concurrent.futures.Future[T_Retval] = context.run(
asyncio.run_coroutine_threadsafe, task_wrapper(), loop=loop
@@ -2541,7 +2558,7 @@ class AsyncIOBackend(AsyncBackend):
@wraps(func)
def wrapper() -> None:
try:
sniffio.current_async_library_cvar.set("asyncio")
set_current_async_library("asyncio")
f.set_result(func(*args))
except BaseException as exc:
f.set_exception(exc)
@@ -2558,10 +2575,6 @@ class AsyncIOBackend(AsyncBackend):
loop.call_soon_threadsafe(wrapper)
return f.result()
@classmethod
def create_blocking_portal(cls) -> abc.BlockingPortal:
return BlockingPortal()
@classmethod
async def open_process(
cls,

View File

@@ -17,10 +17,8 @@ from collections.abc import (
Iterable,
Sequence,
)
from concurrent.futures import Future
from contextlib import AbstractContextManager
from dataclasses import dataclass
from functools import partial
from io import IOBase
from os import PathLike
from signal import Signals
@@ -224,38 +222,6 @@ class TaskGroup(abc.TaskGroup):
return await self._nursery.start(func, *args, name=name)
#
# Threads
#
class BlockingPortal(abc.BlockingPortal):
def __new__(cls) -> BlockingPortal:
return object.__new__(cls)
def __init__(self) -> None:
super().__init__()
self._token = trio.lowlevel.current_trio_token()
def _spawn_task_from_thread(
self,
func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval],
args: tuple[Unpack[PosArgsT]],
kwargs: dict[str, Any],
name: object,
future: Future[T_Retval],
) -> None:
trio.from_thread.run_sync(
partial(self._task_group.start_soon, name=name),
self._call_func,
func,
args,
kwargs,
future,
trio_token=self._token,
)
#
# Subprocesses
#
@@ -1114,10 +1080,6 @@ class TrioBackend(AsyncBackend):
except trio.RunFinishedError:
raise RunFinishedError from None
@classmethod
def create_blocking_portal(cls) -> abc.BlockingPortal:
return BlockingPortal()
@classmethod
async def open_process(
cls,

View File

@@ -5,16 +5,23 @@ import sys
import threading
from collections.abc import Awaitable, Callable, Generator
from contextlib import contextmanager
from contextvars import Token
from importlib import import_module
from typing import TYPE_CHECKING, Any, TypeVar
import sniffio
from ._exceptions import NoEventLoopError
if sys.version_info >= (3, 11):
from typing import TypeVarTuple, Unpack
else:
from typing_extensions import TypeVarTuple, Unpack
sniffio: Any
try:
import sniffio
except ModuleNotFoundError:
sniffio = None
if TYPE_CHECKING:
from ..abc import AsyncBackend
@@ -51,11 +58,7 @@ def run(
:raises LookupError: if the named backend is not found
"""
try:
asynclib_name = sniffio.current_async_library()
except sniffio.AsyncLibraryNotFoundError:
pass
else:
if asynclib_name := current_async_library():
raise RuntimeError(f"Already running {asynclib_name} in this thread")
try:
@@ -64,17 +67,16 @@ def run(
raise LookupError(f"No such backend: {backend}") from exc
token = None
if sniffio.current_async_library_cvar.get(None) is None:
if asynclib_name is None:
# Since we're in control of the event loop, we can cache the name of the async
# library
token = sniffio.current_async_library_cvar.set(backend)
token = set_current_async_library(backend)
try:
backend_options = backend_options or {}
return async_backend.run(func, args, {}, backend_options)
finally:
if token:
sniffio.current_async_library_cvar.reset(token)
reset_current_async_library(token)
async def sleep(delay: float) -> None:
@@ -118,6 +120,8 @@ def current_time() -> float:
Return the current value of the event loop's internal clock.
:return: the clock value (seconds)
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
current thread
"""
return get_async_backend().current_time()
@@ -128,8 +132,35 @@ def get_all_backends() -> tuple[str, ...]:
return BACKENDS
def get_available_backends() -> tuple[str, ...]:
"""
Test for the availability of built-in backends.
:return a tuple of the built-in backend names that were successfully imported
.. versionadded:: 4.12
"""
available_backends: list[str] = []
for backend_name in get_all_backends():
try:
get_async_backend(backend_name)
except ImportError:
continue
available_backends.append(backend_name)
return tuple(available_backends)
def get_cancelled_exc_class() -> type[BaseException]:
"""Return the current async library's cancellation exception class."""
"""
Return the current async library's cancellation exception class.
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
current thread
"""
return get_async_backend().cancelled_exception_class()
@@ -153,7 +184,12 @@ def claim_worker_thread(
def get_async_backend(asynclib_name: str | None = None) -> type[AsyncBackend]:
if asynclib_name is None:
asynclib_name = sniffio.current_async_library()
asynclib_name = current_async_library()
if not asynclib_name:
raise NoEventLoopError(
f"Not currently running on any asynchronous event loop. "
f"Available async backends: {', '.join(get_all_backends())}"
)
# We use our own dict instead of sys.modules to get the already imported back-end
# class because the appropriate modules in sys.modules could potentially be only
@@ -164,3 +200,35 @@ def get_async_backend(asynclib_name: str | None = None) -> type[AsyncBackend]:
module = import_module(f"anyio._backends._{asynclib_name}")
loaded_backends[asynclib_name] = module.backend_class
return module.backend_class
def current_async_library() -> str | None:
if sniffio is None:
# If sniffio is not installed, we assume we're either running asyncio or nothing
import asyncio
try:
asyncio.get_running_loop()
return "asyncio"
except RuntimeError:
pass
else:
try:
return sniffio.current_async_library()
except sniffio.AsyncLibraryNotFoundError:
pass
return None
def set_current_async_library(asynclib_name: str | None) -> Token | None:
# no-op if sniffio is not installed
if sniffio is None:
return None
return sniffio.current_async_library_cvar.set(asynclib_name)
def reset_current_async_library(token: Token | None) -> None:
if token is not None:
sniffio.current_async_library_cvar.reset(token)

View File

@@ -136,8 +136,11 @@ class WouldBlock(Exception):
class NoEventLoopError(RuntimeError):
"""
Raised by :func:`.from_thread.run` and :func:`.from_thread.run_sync` if
not calling from an AnyIO worker thread, and no ``token`` was passed.
Raised by several functions that require an event loop to be running in the current
thread when there is no running event loop.
This is also raised by :func:`.from_thread.run` and :func:`.from_thread.run_sync`
if not calling from an AnyIO worker thread, and no ``token`` was passed.
"""

View File

@@ -479,9 +479,39 @@ class Path:
await to_thread.run_sync(self._path.expanduser, abandon_on_cancel=True)
)
def glob(self, pattern: str) -> AsyncIterator[Path]:
gen = self._path.glob(pattern)
return _PathIterator(gen)
if sys.version_info < (3, 12):
# Python 3.11 and earlier
def glob(self, pattern: str) -> AsyncIterator[Path]:
gen = self._path.glob(pattern)
return _PathIterator(gen)
elif (3, 12) <= sys.version_info < (3, 13):
# changed in Python 3.12:
# - The case_sensitive parameter was added.
def glob(
self,
pattern: str,
*,
case_sensitive: bool | None = None,
) -> AsyncIterator[Path]:
gen = self._path.glob(pattern, case_sensitive=case_sensitive)
return _PathIterator(gen)
elif sys.version_info >= (3, 13):
# Changed in Python 3.13:
# - The recurse_symlinks parameter was added.
# - The pattern parameter accepts a path-like object.
def glob( # type: ignore[misc] # mypy doesn't allow for differing signatures in a conditional block
self,
pattern: str | PathLike[str],
*,
case_sensitive: bool | None = None,
recurse_symlinks: bool = False,
) -> AsyncIterator[Path]:
gen = self._path.glob(
pattern, # type: ignore[arg-type]
case_sensitive=case_sensitive,
recurse_symlinks=recurse_symlinks,
)
return _PathIterator(gen)
async def group(self) -> str:
return await to_thread.run_sync(self._path.group, abandon_on_cancel=True)
@@ -643,9 +673,36 @@ class Path:
func = partial(self._path.resolve, strict=strict)
return Path(await to_thread.run_sync(func, abandon_on_cancel=True))
def rglob(self, pattern: str) -> AsyncIterator[Path]:
gen = self._path.rglob(pattern)
return _PathIterator(gen)
if sys.version_info < (3, 12):
# Pre Python 3.12
def rglob(self, pattern: str) -> AsyncIterator[Path]:
gen = self._path.rglob(pattern)
return _PathIterator(gen)
elif (3, 12) <= sys.version_info < (3, 13):
# Changed in Python 3.12:
# - The case_sensitive parameter was added.
def rglob(
self, pattern: str, *, case_sensitive: bool | None = None
) -> AsyncIterator[Path]:
gen = self._path.rglob(pattern, case_sensitive=case_sensitive)
return _PathIterator(gen)
elif sys.version_info >= (3, 13):
# Changed in Python 3.13:
# - The recurse_symlinks parameter was added.
# - The pattern parameter accepts a path-like object.
def rglob( # type: ignore[misc] # mypy doesn't allow for differing signatures in a conditional block
self,
pattern: str | PathLike[str],
*,
case_sensitive: bool | None = None,
recurse_symlinks: bool = False,
) -> AsyncIterator[Path]:
gen = self._path.rglob(
pattern, # type: ignore[arg-type]
case_sensitive=case_sensitive,
recurse_symlinks=recurse_symlinks,
)
return _PathIterator(gen)
async def rmdir(self) -> None:
await to_thread.run_sync(self._path.rmdir)

View File

@@ -16,6 +16,8 @@ def open_signal_receiver(
:param signals: signals to receive (e.g. ``signal.SIGINT``)
:return: an asynchronous context manager for an asynchronous iterator which yields
signal numbers
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
current thread
.. warning:: Windows does not support signals natively so it is best to avoid
relying on this in cross-platform applications.

View File

@@ -664,6 +664,8 @@ def getnameinfo(sockaddr: IPSockAddrType, flags: int = 0) -> Awaitable[tuple[str
:param sockaddr: socket address (e.g. (ipaddress, port) for IPv4)
:param flags: flags to pass to upstream ``getnameinfo()``
:return: a tuple of (host name, service name)
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
current thread
.. seealso:: :func:`socket.getnameinfo`
@@ -687,6 +689,8 @@ def wait_socket_readable(sock: socket.socket) -> Awaitable[None]:
socket to become readable
:raises ~anyio.BusyResourceError: if another task is already waiting for the socket
to become readable
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
current thread
"""
return get_async_backend().wait_readable(sock.fileno())
@@ -711,6 +715,8 @@ def wait_socket_writable(sock: socket.socket) -> Awaitable[None]:
socket to become writable
:raises ~anyio.BusyResourceError: if another task is already waiting for the socket
to become writable
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
current thread
"""
return get_async_backend().wait_writable(sock.fileno())
@@ -742,6 +748,8 @@ def wait_readable(obj: FileDescriptorLike) -> Awaitable[None]:
object to become readable
:raises ~anyio.BusyResourceError: if another task is already waiting for the object
to become readable
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
current thread
"""
return get_async_backend().wait_readable(obj)
@@ -756,6 +764,8 @@ def wait_writable(obj: FileDescriptorLike) -> Awaitable[None]:
object to become writable
:raises ~anyio.BusyResourceError: if another task is already waiting for the object
to become writable
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
current thread
.. seealso:: See the documentation of :func:`wait_readable` for the definition of
``obj`` and notes on backend compatibility.
@@ -792,6 +802,8 @@ def notify_closing(obj: FileDescriptorLike) -> None:
in anyway.
:param obj: an object with a ``.fileno()`` method or an integer handle
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
current thread
"""
get_async_backend().notify_closing(obj)

View File

@@ -7,7 +7,7 @@ from warnings import warn
from ..streams.memory import (
MemoryObjectReceiveStream,
MemoryObjectSendStream,
MemoryObjectStreamState,
_MemoryObjectStreamState,
)
T_Item = TypeVar("T_Item")
@@ -48,5 +48,5 @@ class create_memory_object_stream(
stacklevel=2,
)
state = MemoryObjectStreamState[T_Item](max_buffer_size)
state = _MemoryObjectStreamState[T_Item](max_buffer_size)
return (MemoryObjectSendStream(state), MemoryObjectReceiveStream(state))

View File

@@ -7,11 +7,9 @@ from dataclasses import dataclass
from types import TracebackType
from typing import TypeVar
from sniffio import AsyncLibraryNotFoundError
from ..lowlevel import checkpoint_if_cancelled
from ._eventloop import get_async_backend
from ._exceptions import BusyResourceError
from ._exceptions import BusyResourceError, NoEventLoopError
from ._tasks import CancelScope
from ._testing import TaskInfo, get_current_task
@@ -85,7 +83,7 @@ class Event:
def __new__(cls) -> Event:
try:
return get_async_backend().create_event()
except AsyncLibraryNotFoundError:
except NoEventLoopError:
return EventAdapter()
def set(self) -> None:
@@ -153,7 +151,7 @@ class Lock:
def __new__(cls, *, fast_acquire: bool = False) -> Lock:
try:
return get_async_backend().create_lock(fast_acquire=fast_acquire)
except AsyncLibraryNotFoundError:
except NoEventLoopError:
return LockAdapter(fast_acquire=fast_acquire)
async def __aenter__(self) -> None:
@@ -380,7 +378,7 @@ class Semaphore:
return get_async_backend().create_semaphore(
initial_value, max_value=max_value, fast_acquire=fast_acquire
)
except AsyncLibraryNotFoundError:
except NoEventLoopError:
return SemaphoreAdapter(initial_value, max_value=max_value)
def __init__(
@@ -515,7 +513,7 @@ class CapacityLimiter:
def __new__(cls, total_tokens: float) -> CapacityLimiter:
try:
return get_async_backend().create_capacity_limiter(total_tokens)
except AsyncLibraryNotFoundError:
except NoEventLoopError:
return CapacityLimiterAdapter(total_tokens)
async def __aenter__(self) -> None:
@@ -540,6 +538,8 @@ class CapacityLimiter:
.. versionchanged:: 3.0
The property is now writable.
.. versionchanged:: 4.12
The value can now be set to 0.
"""
raise NotImplementedError

View File

@@ -23,6 +23,8 @@ class CancelScope:
:param deadline: The time (clock value) when this scope is cancelled automatically
:param shield: ``True`` to shield the cancel scope from external cancellation
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
current thread
"""
def __new__(
@@ -110,6 +112,8 @@ def fail_after(
:param shield: ``True`` to shield the cancel scope from external cancellation
:return: a context manager that yields a cancel scope
:rtype: :class:`~typing.ContextManager`\\[:class:`~anyio.CancelScope`\\]
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
current thread
"""
current_time = get_async_backend().current_time
@@ -131,6 +135,8 @@ def move_on_after(delay: float | None, shield: bool = False) -> CancelScope:
``None`` to disable the timeout
:param shield: ``True`` to shield the cancel scope from external cancellation
:return: a cancel scope
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
current thread
"""
deadline = (
@@ -148,6 +154,8 @@ def current_effective_deadline() -> float:
there is no deadline in effect, or ``float('-inf')`` if the current scope has
been cancelled)
:rtype: float
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
current thread
"""
return get_async_backend().current_effective_deadline()
@@ -158,6 +166,8 @@ def create_task_group() -> TaskGroup:
Create a task group.
:return: a task group
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
current thread
"""
return get_async_backend().create_task_group()

View File

@@ -58,6 +58,8 @@ def get_current_task() -> TaskInfo:
Return the current task.
:return: a representation of the current task
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
current thread
"""
return get_async_backend().get_current_task()
@@ -68,6 +70,8 @@ def get_running_tasks() -> list[TaskInfo]:
Return a list of running tasks in the current event loop.
:return: a list of task info objects
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
current thread
"""
return cast("list[TaskInfo]", get_async_backend().get_running_tasks())

View File

@@ -33,7 +33,6 @@ if TYPE_CHECKING:
from .._core._synchronization import CapacityLimiter, Event, Lock, Semaphore
from .._core._tasks import CancelScope
from .._core._testing import TaskInfo
from ..from_thread import BlockingPortal
from ._sockets import (
ConnectedUDPSocket,
ConnectedUNIXDatagramSocket,
@@ -231,11 +230,6 @@ class AsyncBackend(metaclass=ABCMeta):
) -> T_Retval:
pass
@classmethod
@abstractmethod
def create_blocking_portal(cls) -> BlockingPortal:
pass
@classmethod
@abstractmethod
async def open_process(

View File

@@ -1,5 +1,14 @@
from __future__ import annotations
__all__ = (
"BlockingPortal",
"BlockingPortalProvider",
"check_cancelled",
"run",
"run_sync",
"start_blocking_portal",
)
import sys
from collections.abc import Awaitable, Callable, Generator
from concurrent.futures import Future
@@ -9,6 +18,7 @@ from contextlib import (
contextmanager,
)
from dataclasses import dataclass, field
from functools import partial
from inspect import isawaitable
from threading import Lock, Thread, current_thread, get_ident
from types import TracebackType
@@ -21,7 +31,6 @@ from typing import (
)
from ._core._eventloop import (
get_async_backend,
get_cancelled_exc_class,
threadlocals,
)
@@ -30,7 +39,7 @@ from ._core._exceptions import NoEventLoopError
from ._core._synchronization import Event
from ._core._tasks import CancelScope, create_task_group
from .abc._tasks import TaskStatus
from .lowlevel import EventLoopToken
from .lowlevel import EventLoopToken, current_token
if sys.version_info >= (3, 11):
from typing import TypeVarTuple, Unpack
@@ -174,16 +183,18 @@ class _BlockingPortalTaskStatus(TaskStatus):
class BlockingPortal:
"""An object that lets external threads run code in an asynchronous event loop."""
"""
An object that lets external threads run code in an asynchronous event loop.
def __new__(cls) -> BlockingPortal:
return get_async_backend().create_blocking_portal()
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
current thread
"""
def __init__(self) -> None:
self._token = current_token()
self._event_loop_thread_id: int | None = get_ident()
self._stop_event = Event()
self._task_group = create_task_group()
self._cancelled_exc_class = get_cancelled_exc_class()
async def __aenter__(self) -> BlockingPortal:
await self._task_group.__aenter__()
@@ -234,25 +245,21 @@ class BlockingPortal:
future: Future[T_Retval],
) -> None:
def callback(f: Future[T_Retval]) -> None:
if f.cancelled() and self._event_loop_thread_id not in (
None,
get_ident(),
):
self.call(scope.cancel, "the future was cancelled")
if f.cancelled():
if self._event_loop_thread_id == get_ident():
scope.cancel("the future was cancelled")
elif self._event_loop_thread_id is not None:
self.call(scope.cancel, "the future was cancelled")
try:
retval_or_awaitable = func(*args, **kwargs)
if isawaitable(retval_or_awaitable):
with CancelScope() as scope:
if future.cancelled():
scope.cancel("the future was cancelled")
else:
future.add_done_callback(callback)
future.add_done_callback(callback)
retval = await retval_or_awaitable
else:
retval = retval_or_awaitable
except self._cancelled_exc_class:
except get_cancelled_exc_class():
future.cancel()
future.set_running_or_notify_cancel()
except BaseException as exc:
@@ -279,8 +286,6 @@ class BlockingPortal:
"""
Spawn a new task using the given callable.
Implementers must ensure that the future is resolved when the task finishes.
:param func: a callable
:param args: positional arguments to be passed to the callable
:param kwargs: keyword arguments to be passed to the callable
@@ -289,7 +294,15 @@ class BlockingPortal:
or the exception raised during its execution
"""
raise NotImplementedError
run_sync(
partial(self._task_group.start_soon, name=name),
self._call_func,
func,
args,
kwargs,
future,
token=self._token,
)
@overload
def call(

View File

@@ -1,7 +1,18 @@
from __future__ import annotations
__all__ = (
"EventLoopToken",
"RunvarToken",
"RunVar",
"checkpoint",
"checkpoint_if_cancelled",
"cancel_shielded_checkpoint",
"current_token",
)
import enum
from dataclasses import dataclass
from types import TracebackType
from typing import Any, Generic, Literal, TypeVar, final, overload
from weakref import WeakKeyDictionary
@@ -21,7 +32,6 @@ async def checkpoint() -> None:
await checkpoint_if_cancelled()
await cancel_shielded_checkpoint()
.. versionadded:: 3.0
"""
@@ -49,7 +59,6 @@ async def cancel_shielded_checkpoint() -> None:
with CancelScope(shield=True):
await checkpoint()
.. versionadded:: 3.0
"""
@@ -74,6 +83,9 @@ def current_token() -> EventLoopToken:
Return a token object that can be used to call code in the current event loop from
another thread.
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
current thread
.. versionadded:: 4.11.0
"""
@@ -97,10 +109,24 @@ class RunvarToken(Generic[T]):
self._value: T | Literal[_NoValueSet.NO_VALUE_SET] = value
self._redeemed = False
def __enter__(self) -> RunvarToken[T]:
return self
def __exit__(
self,
exc_type: type[BaseException] | None,
exc_val: BaseException | None,
exc_tb: TracebackType | None,
) -> None:
self._var.reset(self)
class RunVar(Generic[T]):
"""
Like a :class:`~contextvars.ContextVar`, except scoped to the running event loop.
Can be used as a context manager, Just like :class:`~contextvars.ContextVar`, that
will reset the variable to its previous value when the context block is exited.
"""
__slots__ = "_name", "_default"

View File

@@ -8,11 +8,16 @@ from inspect import isasyncgenfunction, iscoroutinefunction, ismethod
from typing import Any, cast
import pytest
import sniffio
from _pytest.fixtures import SubRequest
from _pytest.outcomes import Exit
from ._core._eventloop import get_all_backends, get_async_backend
from . import get_available_backends
from ._core._eventloop import (
current_async_library,
get_async_backend,
reset_current_async_library,
set_current_async_library,
)
from ._core._exceptions import iterate_exceptions
from .abc import TestRunner
@@ -42,11 +47,11 @@ def get_runner(
if _current_runner is None:
asynclib = get_async_backend(backend_name)
_runner_stack = ExitStack()
if sniffio.current_async_library_cvar.get(None) is None:
if current_async_library() is None:
# Since we're in control of the event loop, we can cache the name of the
# async library
token = sniffio.current_async_library_cvar.set(backend_name)
_runner_stack.callback(sniffio.current_async_library_cvar.reset, token)
token = set_current_async_library(backend_name)
_runner_stack.callback(reset_current_async_library, token)
backend_options = backend_options or {}
_current_runner = _runner_stack.enter_context(
@@ -69,7 +74,6 @@ def pytest_addoption(parser: pytest.Parser) -> None:
"anyio_mode",
default="strict",
help='AnyIO plugin mode (either "strict" or "auto")',
type="string",
)
@@ -196,7 +200,7 @@ def pytest_pyfunc_call(pyfuncitem: Any) -> bool | None:
return None
@pytest.fixture(scope="module", params=get_all_backends())
@pytest.fixture(scope="module", params=get_available_backends())
def anyio_backend(request: Any) -> Any:
return request.param

View File

@@ -1,5 +1,11 @@
from __future__ import annotations
__all__ = (
"BufferedByteReceiveStream",
"BufferedByteStream",
"BufferedConnectable",
)
import sys
from collections.abc import Callable, Iterable, Mapping
from dataclasses import dataclass, field

View File

@@ -1,5 +1,11 @@
from __future__ import annotations
__all__ = (
"FileReadStream",
"FileStreamAttribute",
"FileWriteStream",
)
from collections.abc import Callable, Mapping
from io import SEEK_SET, UnsupportedOperation
from os import PathLike

View File

@@ -1,5 +1,11 @@
from __future__ import annotations
__all__ = (
"MemoryObjectReceiveStream",
"MemoryObjectSendStream",
"MemoryObjectStreamStatistics",
)
import warnings
from collections import OrderedDict, deque
from dataclasses import dataclass, field
@@ -34,7 +40,7 @@ class MemoryObjectStreamStatistics(NamedTuple):
@dataclass(eq=False)
class MemoryObjectItemReceiver(Generic[T_Item]):
class _MemoryObjectItemReceiver(Generic[T_Item]):
task_info: TaskInfo = field(init=False, default_factory=get_current_task)
item: T_Item = field(init=False)
@@ -46,12 +52,12 @@ class MemoryObjectItemReceiver(Generic[T_Item]):
@dataclass(eq=False)
class MemoryObjectStreamState(Generic[T_Item]):
class _MemoryObjectStreamState(Generic[T_Item]):
max_buffer_size: float = field()
buffer: deque[T_Item] = field(init=False, default_factory=deque)
open_send_channels: int = field(init=False, default=0)
open_receive_channels: int = field(init=False, default=0)
waiting_receivers: OrderedDict[Event, MemoryObjectItemReceiver[T_Item]] = field(
waiting_receivers: OrderedDict[Event, _MemoryObjectItemReceiver[T_Item]] = field(
init=False, default_factory=OrderedDict
)
waiting_senders: OrderedDict[Event, T_Item] = field(
@@ -71,7 +77,7 @@ class MemoryObjectStreamState(Generic[T_Item]):
@dataclass(eq=False)
class MemoryObjectReceiveStream(Generic[T_co], ObjectReceiveStream[T_co]):
_state: MemoryObjectStreamState[T_co]
_state: _MemoryObjectStreamState[T_co]
_closed: bool = field(init=False, default=False)
def __post_init__(self) -> None:
@@ -112,7 +118,7 @@ class MemoryObjectReceiveStream(Generic[T_co], ObjectReceiveStream[T_co]):
except WouldBlock:
# Add ourselves in the queue
receive_event = Event()
receiver = MemoryObjectItemReceiver[T_co]()
receiver = _MemoryObjectItemReceiver[T_co]()
self._state.waiting_receivers[receive_event] = receiver
try:
@@ -190,7 +196,7 @@ class MemoryObjectReceiveStream(Generic[T_co], ObjectReceiveStream[T_co]):
@dataclass(eq=False)
class MemoryObjectSendStream(Generic[T_contra], ObjectSendStream[T_contra]):
_state: MemoryObjectStreamState[T_contra]
_state: _MemoryObjectStreamState[T_contra]
_closed: bool = field(init=False, default=False)
def __post_init__(self) -> None:

View File

@@ -1,5 +1,11 @@
from __future__ import annotations
__all__ = (
"MultiListener",
"StapledByteStream",
"StapledObjectStream",
)
from collections.abc import Callable, Mapping, Sequence
from dataclasses import dataclass
from typing import Any, Generic, TypeVar

View File

@@ -1,5 +1,12 @@
from __future__ import annotations
__all__ = (
"TextConnectable",
"TextReceiveStream",
"TextSendStream",
"TextStream",
)
import codecs
import sys
from collections.abc import Callable, Mapping

View File

@@ -1,5 +1,12 @@
from __future__ import annotations
__all__ = (
"TLSAttribute",
"TLSConnectable",
"TLSListener",
"TLSStream",
)
import logging
import re
import ssl

View File

@@ -1,5 +1,10 @@
from __future__ import annotations
__all__ = (
"run_sync",
"current_default_interpreter_limiter",
)
import atexit
import os
import sys
@@ -20,7 +25,9 @@ else:
if sys.version_info >= (3, 14):
from concurrent.interpreters import ExecutionFailed, create
def _interp_call(func: Callable[..., Any], args: tuple[Any, ...]):
def _interp_call(
func: Callable[..., Any], args: tuple[Any, ...]
) -> tuple[Any, bool]:
try:
retval = func(*args)
except BaseException as exc:
@@ -28,7 +35,7 @@ if sys.version_info >= (3, 14):
else:
return retval, False
class Worker:
class _Worker:
last_used: float = 0
def __init__(self) -> None:
@@ -90,7 +97,7 @@ except NotShareableError:
"exec",
)
class Worker:
class _Worker:
last_used: float = 0
def __init__(self) -> None:
@@ -128,7 +135,7 @@ except NotShareableError:
return res
else:
class Worker:
class _Worker:
last_used: float = 0
def __init__(self) -> None:
@@ -153,11 +160,11 @@ MAX_WORKER_IDLE_TIME = (
T_Retval = TypeVar("T_Retval")
PosArgsT = TypeVarTuple("PosArgsT")
_idle_workers = RunVar[deque[Worker]]("_available_workers")
_idle_workers = RunVar[deque[_Worker]]("_available_workers")
_default_interpreter_limiter = RunVar[CapacityLimiter]("_default_interpreter_limiter")
def _stop_workers(workers: deque[Worker]) -> None:
def _stop_workers(workers: deque[_Worker]) -> None:
for worker in workers:
worker.destroy()
@@ -199,7 +206,7 @@ async def run_sync(
try:
worker = idle_workers.pop()
except IndexError:
worker = Worker()
worker = _Worker()
try:
return await to_thread.run_sync(

View File

@@ -1,5 +1,11 @@
from __future__ import annotations
__all__ = (
"current_default_process_limiter",
"process_worker",
"run_sync",
)
import os
import pickle
import subprocess
@@ -54,6 +60,8 @@ async def run_sync( # type: ignore[return]
running
:param limiter: capacity limiter to use to limit the total amount of processes
running (if omitted, the default limiter is used)
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
current thread
:return: an awaitable that yields the return value of the function.
"""

View File

@@ -1,5 +1,10 @@
from __future__ import annotations
__all__ = (
"run_sync",
"current_default_thread_limiter",
)
import sys
from collections.abc import Callable
from typing import TypeVar
@@ -41,6 +46,8 @@ async def run_sync(
``abandon_on_cancel`` if both parameters are passed
:param limiter: capacity limiter to use to limit the total amount of threads running
(if omitted, the default limiter is used)
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
current thread
:return: an awaitable that yields the return value of the function.
"""
@@ -64,6 +71,8 @@ def current_default_thread_limiter() -> CapacityLimiter:
concurrent threads.
:return: a capacity limiter object
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
current thread
"""
return get_async_backend().current_default_thread_limiter()