generated from daniil-berg/boilerplate-py
sphinx documentation; adjusted all docstrings; moved some modules to non-public subpackage
This commit is contained in:
@ -14,10 +14,5 @@ See the GNU Lesser General Public License for more details.
|
||||
You should have received a copy of the GNU Lesser General Public License along with asyncio-taskpool.
|
||||
If not, see <https://www.gnu.org/licenses/>."""
|
||||
|
||||
__doc__ = """
|
||||
Brings the main classes up to package level for import convenience.
|
||||
"""
|
||||
|
||||
|
||||
from .control.server import TCPControlServer, UnixControlServer
|
||||
from .pool import TaskPool, SimpleTaskPool
|
||||
|
@ -0,0 +1,2 @@
|
||||
from .server import TCPControlServer, UnixControlServer
|
||||
from .client import TCPControlClient, UnixControlClient
|
||||
|
@ -15,7 +15,7 @@ You should have received a copy of the GNU Lesser General Public License along w
|
||||
If not, see <https://www.gnu.org/licenses/>."""
|
||||
|
||||
__doc__ = """
|
||||
CLI client entry point.
|
||||
CLI entry point script for a :class:`ControlClient`.
|
||||
"""
|
||||
|
||||
|
||||
@ -24,12 +24,15 @@ from asyncio import run
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Sequence
|
||||
|
||||
from ..constants import PACKAGE_NAME
|
||||
from ..internals.constants import PACKAGE_NAME
|
||||
from ..pool import TaskPool
|
||||
from .client import ControlClient, TCPControlClient, UnixControlClient
|
||||
from .client import TCPControlClient, UnixControlClient
|
||||
from .server import TCPControlServer, UnixControlServer
|
||||
|
||||
|
||||
__all__ = []
|
||||
|
||||
|
||||
CLIENT_CLASS = 'client_class'
|
||||
UNIX, TCP = 'unix', 'tcp'
|
||||
SOCKET_PATH = 'path'
|
||||
@ -39,7 +42,7 @@ HOST, PORT = 'host', 'port'
|
||||
def parse_cli(args: Sequence[str] = None) -> Dict[str, Any]:
|
||||
parser = ArgumentParser(
|
||||
prog=f'{PACKAGE_NAME}.control',
|
||||
description=f"Simple CLI based {ControlClient.__name__} for {PACKAGE_NAME}"
|
||||
description=f"Simple CLI based control client for {PACKAGE_NAME}"
|
||||
)
|
||||
subparsers = parser.add_subparsers(title="Connection types")
|
||||
|
||||
|
@ -27,13 +27,24 @@ from asyncio.streams import StreamReader, StreamWriter, open_connection
|
||||
from pathlib import Path
|
||||
from typing import Optional, Union
|
||||
|
||||
from ..constants import CLIENT_EXIT, CLIENT_INFO, SESSION_MSG_BYTES
|
||||
from ..types import ClientConnT, PathT
|
||||
from ..internals.constants import CLIENT_INFO, SESSION_MSG_BYTES
|
||||
from ..internals.types import ClientConnT, PathT
|
||||
|
||||
|
||||
__all__ = [
|
||||
'ControlClient',
|
||||
'TCPControlClient',
|
||||
'UnixControlClient',
|
||||
'CLIENT_EXIT'
|
||||
]
|
||||
|
||||
|
||||
CLIENT_EXIT = 'exit'
|
||||
|
||||
|
||||
class ControlClient(ABC):
|
||||
"""
|
||||
Abstract base class for a simple implementation of a task pool control client.
|
||||
Abstract base class for a simple implementation of a pool control client.
|
||||
|
||||
Since the server's control interface is simply expecting commands to be sent, any process able to connect to the
|
||||
TCP or UNIX socket and issue the relevant commands (and optionally read the responses) will work just as well.
|
||||
@ -58,7 +69,7 @@ class ControlClient(ABC):
|
||||
raise NotImplementedError
|
||||
|
||||
def __init__(self, **conn_kwargs) -> None:
|
||||
"""Simply stores the connection keyword-arguments necessary for opening the connection."""
|
||||
"""Simply stores the keyword-arguments for opening the connection."""
|
||||
self._conn_kwargs = conn_kwargs
|
||||
self._connected: bool = False
|
||||
|
||||
@ -91,7 +102,7 @@ class ControlClient(ABC):
|
||||
"""
|
||||
try:
|
||||
msg = input("> ").strip().lower()
|
||||
except EOFError: # Ctrl+D shall be equivalent to the `CLIENT_EXIT` command.
|
||||
except EOFError: # Ctrl+D shall be equivalent to the :const:`CLIENT_EXIT` command.
|
||||
msg = CLIENT_EXIT
|
||||
except KeyboardInterrupt: # Ctrl+C shall simply reset to the input prompt.
|
||||
print()
|
||||
@ -129,11 +140,14 @@ class ControlClient(ABC):
|
||||
|
||||
async def start(self) -> None:
|
||||
"""
|
||||
This method opens the pre-defined connection, performs the server-handshake, and enters the interaction loop.
|
||||
Opens connection, performs handshake, and enters interaction loop.
|
||||
|
||||
An input prompt is presented to the user and any input is sent (encoded) to the connected server.
|
||||
One exception is the :const:`CLIENT_EXIT` command (equivalent to Ctrl+D), which merely closes the connection.
|
||||
|
||||
If the connection can not be established, an error message is printed to `stderr` and the method returns.
|
||||
If the `_connected` flag is set to `False` during the interaction loop, the method returns and prints out a
|
||||
disconnected-message.
|
||||
If either the exit command is issued or the connection to the server is lost during the interaction loop,
|
||||
the method returns and prints out a disconnected-message.
|
||||
"""
|
||||
reader, writer = await self._open_connection(**self._conn_kwargs)
|
||||
if reader is None:
|
||||
@ -146,10 +160,10 @@ class ControlClient(ABC):
|
||||
|
||||
|
||||
class TCPControlClient(ControlClient):
|
||||
"""Task pool control client that expects a TCP socket to be exposed by the control server."""
|
||||
"""Task pool control client for connecting to a :class:`TCPControlServer`."""
|
||||
|
||||
def __init__(self, host: str, port: Union[int, str], **conn_kwargs) -> None:
|
||||
"""In addition to what the base class does, `host` and `port` are expected as non-optional arguments."""
|
||||
"""`host` and `port` are expected as non-optional connection arguments."""
|
||||
self._host = host
|
||||
self._port = port
|
||||
super().__init__(**conn_kwargs)
|
||||
@ -169,10 +183,10 @@ class TCPControlClient(ControlClient):
|
||||
|
||||
|
||||
class UnixControlClient(ControlClient):
|
||||
"""Task pool control client that expects a unix socket to be exposed by the control server."""
|
||||
"""Task pool control client for connecting to a :class:`UnixControlServer`."""
|
||||
|
||||
def __init__(self, socket_path: PathT, **conn_kwargs) -> None:
|
||||
"""In addition to what the base class does, the `socket_path` is expected as a non-optional argument."""
|
||||
"""`socket_path` is expected as a non-optional connection argument."""
|
||||
from asyncio.streams import open_unix_connection
|
||||
self._open_unix_connection = open_unix_connection
|
||||
self._socket_path = Path(socket_path)
|
||||
|
@ -15,7 +15,8 @@ You should have received a copy of the GNU Lesser General Public License along w
|
||||
If not, see <https://www.gnu.org/licenses/>."""
|
||||
|
||||
__doc__ = """
|
||||
This module contains the the definition of the `ControlParser` class used by a control server.
|
||||
Definition of the :class:`ControlParser` used in a
|
||||
:class:`ControlSession <asyncio_taskpool.control.session.ControlSession>`.
|
||||
"""
|
||||
|
||||
|
||||
@ -26,10 +27,13 @@ from inspect import Parameter, getmembers, isfunction, signature
|
||||
from shutil import get_terminal_size
|
||||
from typing import Any, Callable, Container, Dict, Iterable, Set, Type, TypeVar
|
||||
|
||||
from ..constants import CLIENT_INFO, CMD, STREAM_WRITER
|
||||
from ..exceptions import HelpRequested, ParserError
|
||||
from ..helpers import get_first_doc_line, resolve_dotted_path
|
||||
from ..types import ArgsT, CancelCB, CoroutineFunc, EndCB, KwArgsT
|
||||
from ..internals.constants import CLIENT_INFO, CMD, STREAM_WRITER
|
||||
from ..internals.helpers import get_first_doc_line, resolve_dotted_path
|
||||
from ..internals.types import ArgsT, CancelCB, CoroutineFunc, EndCB, KwArgsT
|
||||
|
||||
|
||||
__all__ = ['ControlParser']
|
||||
|
||||
|
||||
FmtCls = TypeVar('FmtCls', bound=Type[HelpFormatter])
|
||||
@ -42,7 +46,7 @@ NAME, PROG, HELP, DESCRIPTION = 'name', 'prog', 'help', 'description'
|
||||
|
||||
class ControlParser(ArgumentParser):
|
||||
"""
|
||||
Subclass of the standard `argparse.ArgumentParser` for remote interaction.
|
||||
Subclass of the standard :code:`argparse.ArgumentParser` for pool control.
|
||||
|
||||
Such a parser is not supposed to ever print to stdout/stderr, but instead direct all messages to a `StreamWriter`
|
||||
instance passed to it during initialization.
|
||||
@ -54,16 +58,18 @@ class ControlParser(ArgumentParser):
|
||||
@staticmethod
|
||||
def help_formatter_factory(terminal_width: int, base_cls: FmtCls = None) -> FmtCls:
|
||||
"""
|
||||
Constructs and returns a subclass of `argparse.HelpFormatter` with a fixed terminal width argument.
|
||||
Constructs and returns a subclass of :class:`argparse.HelpFormatter`
|
||||
|
||||
Although a custom formatter class can be explicitly passed into the `ArgumentParser` constructor, this is not
|
||||
as convenient, when making use of sub-parsers.
|
||||
The formatter class will have the defined `terminal_width`.
|
||||
|
||||
Although a custom formatter class can be explicitly passed into the :class:`ArgumentParser` constructor,
|
||||
this is not as convenient, when making use of sub-parsers.
|
||||
|
||||
Args:
|
||||
terminal_width:
|
||||
The number of columns of the terminal to which to adjust help formatting.
|
||||
base_cls (optional):
|
||||
The base class to use for inheritance. By default `argparse.ArgumentDefaultsHelpFormatter` is used.
|
||||
Base class to use for inheritance. By default :class:`argparse.ArgumentDefaultsHelpFormatter` is used.
|
||||
|
||||
Returns:
|
||||
The subclass of `base_cls` which fixes the constructor's `width` keyword-argument to `terminal_width`.
|
||||
@ -77,21 +83,19 @@ class ControlParser(ArgumentParser):
|
||||
super().__init__(*args, **kwargs)
|
||||
return ClientHelpFormatter
|
||||
|
||||
def __init__(self, stream_writer: StreamWriter, terminal_width: int = None,
|
||||
**kwargs) -> None:
|
||||
def __init__(self, stream_writer: StreamWriter, terminal_width: int = None, **kwargs) -> None:
|
||||
"""
|
||||
Subclass of the `ArgumentParser` geared towards asynchronous interaction with an object "from the outside".
|
||||
|
||||
Allows directing output to a specified writer rather than stdout/stderr and setting terminal width explicitly.
|
||||
Sets some internal attributes in addition to the base class.
|
||||
|
||||
Args:
|
||||
stream_writer:
|
||||
The instance of the `asyncio.StreamWriter` to use for message output.
|
||||
The instance of the :class:`asyncio.StreamWriter` to use for message output.
|
||||
terminal_width (optional):
|
||||
The terminal width to use for all message formatting. Defaults to `shutil.get_terminal_size().columns`.
|
||||
The terminal width to use for all message formatting. By default the :code:`columns` attribute from
|
||||
:func:`shutil.get_terminal_size` is taken.
|
||||
**kwargs(optional):
|
||||
Passed to the parent class constructor. The exception is the `formatter_class` parameter: Even if a
|
||||
class is specified, it will always be subclassed in the `help_formatter_factory`.
|
||||
class is specified, it will always be subclassed in the :meth:`help_formatter_factory`.
|
||||
Also, by default, `exit_on_error` is set to `False` (as opposed to how the parent class handles it).
|
||||
"""
|
||||
self._stream_writer: StreamWriter = stream_writer
|
||||
@ -105,12 +109,12 @@ class ControlParser(ArgumentParser):
|
||||
def add_function_command(self, function: Callable, omit_params: Container[str] = OMIT_PARAMS_DEFAULT,
|
||||
**subparser_kwargs) -> 'ControlParser':
|
||||
"""
|
||||
Takes a function along with its parameters and adds a corresponding (sub-)command to the parser.
|
||||
Takes a function and adds a corresponding (sub-)command to the parser.
|
||||
|
||||
The `add_subparsers` method must have been called prior to this.
|
||||
The :meth:`add_subparsers` method must have been called prior to this.
|
||||
|
||||
NOTE: Currently, only a limited spectrum of parameters can be accurately converted to a parser argument.
|
||||
This method works correctly with any public method of the `SimpleTaskPool` class.
|
||||
NOTE: Currently, only a limited spectrum of parameters can be accurately converted to parser arguments.
|
||||
This method works correctly with any public method of the any task pool class.
|
||||
|
||||
Args:
|
||||
function:
|
||||
@ -118,7 +122,7 @@ class ControlParser(ArgumentParser):
|
||||
omit_params (optional):
|
||||
Names of function parameters not to add as parser arguments.
|
||||
**subparser_kwargs (optional):
|
||||
Passed directly to the `add_parser` method.
|
||||
Passed directly to the :meth:`add_parser` method.
|
||||
|
||||
Returns:
|
||||
The subparser instance created from the function.
|
||||
@ -133,7 +137,7 @@ class ControlParser(ArgumentParser):
|
||||
|
||||
def add_property_command(self, prop: property, cls_name: str = '', **subparser_kwargs) -> 'ControlParser':
|
||||
"""
|
||||
Same as the `add_function_command` method, but for properties.
|
||||
Same as the :meth:`add_function_command` method, but for properties.
|
||||
|
||||
Args:
|
||||
prop:
|
||||
@ -141,7 +145,7 @@ class ControlParser(ArgumentParser):
|
||||
cls_name (optional):
|
||||
Name of the class the property is defined on to appear in the command help text.
|
||||
**subparser_kwargs (optional):
|
||||
Passed directly to the `add_parser` method.
|
||||
Passed directly to the :meth:`add_parser` method.
|
||||
|
||||
Returns:
|
||||
The subparser instance created from the property.
|
||||
@ -164,12 +168,12 @@ class ControlParser(ArgumentParser):
|
||||
def add_class_commands(self, cls: Type, public_only: bool = True, omit_members: Container[str] = (),
|
||||
member_arg_name: str = CMD) -> ParsersDict:
|
||||
"""
|
||||
Takes a class and adds its methods and properties as (sub-)commands to the parser.
|
||||
Adds methods/properties of a class as (sub-)commands to the parser.
|
||||
|
||||
The `add_subparsers` method must have been called prior to this.
|
||||
The :meth:`add_subparsers` method must have been called prior to this.
|
||||
|
||||
NOTE: Currently, only a limited spectrum of function parameters can be accurately converted to parser arguments.
|
||||
This method works correctly with the `SimpleTaskPool` class.
|
||||
This method works correctly with any task pool class.
|
||||
|
||||
Args:
|
||||
cls:
|
||||
@ -181,7 +185,6 @@ class ControlParser(ArgumentParser):
|
||||
member_arg_name (optional):
|
||||
After parsing the arguments, depending on which command was invoked by the user, the corresponding
|
||||
method/property will be stored as an extra argument in the parsed namespace under this attribute name.
|
||||
Defaults to `constants.CMD`.
|
||||
|
||||
Returns:
|
||||
Dictionary mapping class member names to the (sub-)parsers created from them.
|
||||
@ -202,7 +205,7 @@ class ControlParser(ArgumentParser):
|
||||
return parsers
|
||||
|
||||
def add_subparsers(self, *args, **kwargs):
|
||||
"""Adds the subparsers action as an internal attribute before returning it."""
|
||||
"""Adds the subparsers action as an attribute before returning it."""
|
||||
self._commands = super().add_subparsers(*args, **kwargs)
|
||||
return self._commands
|
||||
|
||||
@ -217,28 +220,28 @@ class ControlParser(ArgumentParser):
|
||||
self._print_message(message)
|
||||
|
||||
def error(self, message: str) -> None:
|
||||
"""This just adds the custom `HelpRequested` exception after the parent class' method."""
|
||||
"""Raises the :exc:`ParserError <asyncio_taskpool.exceptions.ParserError>` exception at the end."""
|
||||
super().error(message=message)
|
||||
raise ParserError
|
||||
|
||||
def print_help(self, file=None) -> None:
|
||||
"""This just adds the custom `HelpRequested` exception after the parent class' method."""
|
||||
"""Raises the :exc:`HelpRequested <asyncio_taskpool.exceptions.HelpRequested>` exception at the end."""
|
||||
super().print_help(file)
|
||||
raise HelpRequested
|
||||
|
||||
def add_function_arg(self, parameter: Parameter, **kwargs) -> Action:
|
||||
"""
|
||||
Takes an `inspect.Parameter` of a function and adds a corresponding argument to the parser.
|
||||
Takes an :class:`inspect.Parameter` and adds a corresponding parser argument.
|
||||
|
||||
NOTE: Currently, only a limited spectrum of parameters can be accurately converted to a parser argument.
|
||||
This method works correctly with any parameter of any public method of the `SimpleTaskPool` class.
|
||||
This method works correctly with any parameter of any public method any task pool class.
|
||||
|
||||
Args:
|
||||
parameter: The `inspect.Parameter` object to be converted to a parser argument.
|
||||
**kwargs: Passed to the `add_argument` method of the base class.
|
||||
parameter: The :class:`inspect.Parameter` object to be converted to a parser argument.
|
||||
**kwargs: Passed to the :meth:`add_argument` method of the base class.
|
||||
|
||||
Returns:
|
||||
The `argparse.Action` returned by the `add_argument` method.
|
||||
The :class:`argparse.Action` returned by the :meth:`add_argument` method.
|
||||
"""
|
||||
if parameter.default is Parameter.empty:
|
||||
# A non-optional function parameter should correspond to a positional argument.
|
||||
@ -273,10 +276,10 @@ class ControlParser(ArgumentParser):
|
||||
|
||||
def add_function_args(self, function: Callable, omit: Container[str] = OMIT_PARAMS_DEFAULT) -> None:
|
||||
"""
|
||||
Takes a function reference and adds its parameters as arguments to the parser.
|
||||
Takes a function and adds its parameters as arguments to the parser.
|
||||
|
||||
NOTE: Currently, only a limited spectrum of parameters can be accurately converted to a parser argument.
|
||||
This method works correctly with any public method of the `SimpleTaskPool` class.
|
||||
This method works correctly with any public method of any task pool class.
|
||||
|
||||
Args:
|
||||
function:
|
||||
@ -305,6 +308,16 @@ def _get_arg_type_wrapper(cls: Type) -> Callable[[Any], Any]:
|
||||
|
||||
|
||||
def _get_type_from_annotation(annotation: Type) -> Callable[[Any], Any]:
|
||||
"""
|
||||
Returns a type conversion function based on the `annotation` passed.
|
||||
|
||||
Required to properly convert parsed arguments to the type expected by certain pool methods.
|
||||
Each conversion function is wrapped by `_get_arg_type_wrapper`.
|
||||
|
||||
`Callable`-type annotations give the `resolve_dotted_path` function.
|
||||
`Iterable`- or args/kwargs-type annotations give the `ast.literal_eval` function.
|
||||
Others pass unchanged (but still wrapped with `_get_arg_type_wrapper`).
|
||||
"""
|
||||
if any(annotation is t for t in {CoroutineFunc, EndCB, CancelCB}):
|
||||
annotation = resolve_dotted_path
|
||||
if any(annotation is t for t in {ArgsT, KwArgsT, Iterable[ArgsT], Iterable[KwArgsT]}):
|
||||
|
@ -15,7 +15,7 @@ You should have received a copy of the GNU Lesser General Public License along w
|
||||
If not, see <https://www.gnu.org/licenses/>."""
|
||||
|
||||
__doc__ = """
|
||||
This module contains the task pool control server class definitions.
|
||||
Task pool control server class definitions.
|
||||
"""
|
||||
|
||||
|
||||
@ -28,10 +28,13 @@ from asyncio.tasks import Task, create_task
|
||||
from pathlib import Path
|
||||
from typing import Optional, Union
|
||||
|
||||
from ..pool import TaskPool, SimpleTaskPool
|
||||
from ..types import ConnectedCallbackT
|
||||
from .client import ControlClient, TCPControlClient, UnixControlClient
|
||||
from .session import ControlSession
|
||||
from ..pool import AnyTaskPoolT
|
||||
from ..internals.types import ConnectedCallbackT, PathT
|
||||
|
||||
|
||||
__all__ = ['ControlServer', 'TCPControlServer', 'UnixControlServer']
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
@ -41,17 +44,52 @@ class ControlServer(ABC):
|
||||
"""
|
||||
Abstract base class for a task pool control server.
|
||||
|
||||
This class acts as a wrapper around an async server instance and initializes a `ControlSession` upon a client
|
||||
connecting to it. The entire interface is defined within that session class.
|
||||
This class acts as a wrapper around an async server instance and initializes a
|
||||
:class:`ControlSession <asyncio_taskpool.control.session.ControlSession>` once a client connects to it.
|
||||
The interface is defined within the session class.
|
||||
"""
|
||||
_client_class = ControlClient
|
||||
|
||||
@classmethod
|
||||
@property
|
||||
def client_class_name(cls) -> str:
|
||||
"""Returns the name of the control client class matching the server class."""
|
||||
"""Returns the name of the matching control client class."""
|
||||
return cls._client_class.__name__
|
||||
|
||||
def __init__(self, pool: AnyTaskPoolT, **server_kwargs) -> None:
|
||||
"""
|
||||
Merely sets internal attributes, but does not start the server yet.
|
||||
The task pool must be passed here and can not be set/changed afterwards. This means a control server is always
|
||||
tied to one specific task pool.
|
||||
|
||||
Args:
|
||||
pool:
|
||||
An instance of a `BaseTaskPool` subclass to tie the server to.
|
||||
**server_kwargs (optional):
|
||||
Keyword arguments that will be passed into the function that starts the server.
|
||||
"""
|
||||
self._pool: AnyTaskPoolT = pool
|
||||
self._server_kwargs = server_kwargs
|
||||
self._server: Optional[AbstractServer] = None
|
||||
|
||||
@property
|
||||
def pool(self) -> AnyTaskPoolT:
|
||||
"""The task pool instance controlled by the server."""
|
||||
return self._pool
|
||||
|
||||
def is_serving(self) -> bool:
|
||||
"""Wrapper around the `asyncio.Server.is_serving` method."""
|
||||
return self._server.is_serving()
|
||||
|
||||
async def _client_connected_cb(self, reader: StreamReader, writer: StreamWriter) -> None:
|
||||
"""
|
||||
The universal client callback that will be passed into the `_get_server_instance` method.
|
||||
Instantiates a control session, performs the client handshake, and enters the session's `listen` loop.
|
||||
"""
|
||||
session = ControlSession(self, reader, writer)
|
||||
await session.client_handshake()
|
||||
await session.listen()
|
||||
|
||||
@abstractmethod
|
||||
async def _get_server_instance(self, client_connected_cb: ConnectedCallbackT, **kwargs) -> AbstractServer:
|
||||
"""
|
||||
@ -74,40 +112,6 @@ class ControlServer(ABC):
|
||||
"""The method to run after the server's `serve_forever` methods ends for whatever reason."""
|
||||
raise NotImplementedError
|
||||
|
||||
def __init__(self, pool: Union[TaskPool, SimpleTaskPool], **server_kwargs) -> None:
|
||||
"""
|
||||
Initializes by merely saving the internal attributes, but without starting the server yet.
|
||||
The task pool must be passed here and can not be set/changed afterwards. This means a control server is always
|
||||
tied to one specific task pool.
|
||||
|
||||
Args:
|
||||
pool:
|
||||
An instance of a `BaseTaskPool` subclass to tie the server to.
|
||||
**server_kwargs (optional):
|
||||
Keyword arguments that will be passed into the function that starts the server.
|
||||
"""
|
||||
self._pool: Union[TaskPool, SimpleTaskPool] = pool
|
||||
self._server_kwargs = server_kwargs
|
||||
self._server: Optional[AbstractServer] = None
|
||||
|
||||
@property
|
||||
def pool(self) -> Union[TaskPool, SimpleTaskPool]:
|
||||
"""Read-only property for accessing the task pool instance controlled by the server."""
|
||||
return self._pool
|
||||
|
||||
def is_serving(self) -> bool:
|
||||
"""Wrapper around the `asyncio.Server.is_serving` method."""
|
||||
return self._server.is_serving()
|
||||
|
||||
async def _client_connected_cb(self, reader: StreamReader, writer: StreamWriter) -> None:
|
||||
"""
|
||||
The universal client callback that will be passed into the `_get_server_instance` method.
|
||||
Instantiates a control session, performs the client handshake, and enters the session's `listen` loop.
|
||||
"""
|
||||
session = ControlSession(self, reader, writer)
|
||||
await session.client_handshake()
|
||||
await session.listen()
|
||||
|
||||
async def _serve_forever(self) -> None:
|
||||
"""
|
||||
To be run as an `asyncio.Task` by the following method.
|
||||
@ -124,9 +128,12 @@ class ControlServer(ABC):
|
||||
|
||||
async def serve_forever(self) -> Task:
|
||||
"""
|
||||
This method actually starts the server and begins listening to client connections on the specified interface.
|
||||
Starts the server and begins listening to client connections.
|
||||
|
||||
It should never block because the serving will be performed in a separate task.
|
||||
|
||||
Returns:
|
||||
The forever serving task. To stop the server, this task should be cancelled.
|
||||
"""
|
||||
log.debug("Starting %s...", self.__class__.__name__)
|
||||
self._server = await self._get_server_instance(self._client_connected_cb, **self._server_kwargs)
|
||||
@ -134,12 +141,13 @@ class ControlServer(ABC):
|
||||
|
||||
|
||||
class TCPControlServer(ControlServer):
|
||||
"""Task pool control server class that exposes a TCP socket for control clients to connect to."""
|
||||
"""Exposes a TCP socket for control clients to connect to."""
|
||||
_client_class = TCPControlClient
|
||||
|
||||
def __init__(self, pool: Union[TaskPool, SimpleTaskPool], **server_kwargs) -> None:
|
||||
self._host = server_kwargs.pop('host')
|
||||
self._port = server_kwargs.pop('port')
|
||||
def __init__(self, pool: AnyTaskPoolT, host: str, port: Union[int, str], **server_kwargs) -> None:
|
||||
"""`host` and `port` are expected as non-optional server arguments."""
|
||||
self._host = host
|
||||
self._port = port
|
||||
super().__init__(pool, **server_kwargs)
|
||||
|
||||
async def _get_server_instance(self, client_connected_cb: ConnectedCallbackT, **kwargs) -> AbstractServer:
|
||||
@ -152,13 +160,14 @@ class TCPControlServer(ControlServer):
|
||||
|
||||
|
||||
class UnixControlServer(ControlServer):
|
||||
"""Task pool control server class that exposes a unix socket for control clients to connect to."""
|
||||
"""Exposes a unix socket for control clients to connect to."""
|
||||
_client_class = UnixControlClient
|
||||
|
||||
def __init__(self, pool: Union[TaskPool, SimpleTaskPool], **server_kwargs) -> None:
|
||||
def __init__(self, pool: AnyTaskPoolT, socket_path: PathT, **server_kwargs) -> None:
|
||||
"""`socket_path` is expected as a non-optional server argument."""
|
||||
from asyncio.streams import start_unix_server
|
||||
self._start_unix_server = start_unix_server
|
||||
self._socket_path = Path(server_kwargs.pop('path'))
|
||||
self._socket_path = Path(socket_path)
|
||||
super().__init__(pool, **server_kwargs)
|
||||
|
||||
async def _get_server_instance(self, client_connected_cb: ConnectedCallbackT, **kwargs) -> AbstractServer:
|
||||
|
@ -15,7 +15,7 @@ You should have received a copy of the GNU Lesser General Public License along w
|
||||
If not, see <https://www.gnu.org/licenses/>."""
|
||||
|
||||
__doc__ = """
|
||||
This module contains the the definition of the `ControlSession` class used by the control server.
|
||||
Definition of the :class:`ControlSession` used by a :class:`ControlServer`.
|
||||
"""
|
||||
|
||||
|
||||
@ -26,30 +26,33 @@ from asyncio.streams import StreamReader, StreamWriter
|
||||
from inspect import isfunction, signature
|
||||
from typing import Callable, Optional, Union, TYPE_CHECKING
|
||||
|
||||
from ..constants import CLIENT_INFO, CMD, CMD_OK, SESSION_MSG_BYTES, STREAM_WRITER
|
||||
from ..exceptions import CommandError, HelpRequested, ParserError
|
||||
from ..helpers import return_or_exception
|
||||
from ..pool import TaskPool, SimpleTaskPool
|
||||
from .parser import ControlParser
|
||||
from ..exceptions import CommandError, HelpRequested, ParserError
|
||||
from ..pool import TaskPool, SimpleTaskPool
|
||||
from ..internals.constants import CLIENT_INFO, CMD, CMD_OK, SESSION_MSG_BYTES, STREAM_WRITER
|
||||
from ..internals.helpers import return_or_exception
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .server import ControlServer
|
||||
|
||||
|
||||
__all__ = ['ControlSession']
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ControlSession:
|
||||
"""
|
||||
This class defines the API for controlling a task pool instance from the outside.
|
||||
Manages a single control session between a server and a client.
|
||||
|
||||
The commands received from a connected client are translated into method calls on the task pool instance.
|
||||
A subclass of the standard `argparse.ArgumentParser` is used to handle the input read from the stream.
|
||||
A subclass of the standard :class:`argparse.ArgumentParser` is used to handle the input read from the stream.
|
||||
"""
|
||||
|
||||
def __init__(self, server: 'ControlServer', reader: StreamReader, writer: StreamWriter) -> None:
|
||||
"""
|
||||
Instantiation should happen once a client connection to the control server has already been established.
|
||||
Connection to the control server should already been established.
|
||||
|
||||
For more convenient/efficient access, some of the server's properties are saved in separate attributes.
|
||||
The argument parser is _not_ instantiated in the constructor. It requires a bit of client information during
|
||||
@ -57,7 +60,7 @@ class ControlSession:
|
||||
|
||||
Args:
|
||||
server:
|
||||
The instance of a `ControlServer` subclass starting the session.
|
||||
The instance of a :class:`ControlServer` subclass starting the session.
|
||||
reader:
|
||||
The `asyncio.StreamReader` created when a client connected to the server.
|
||||
writer:
|
||||
@ -75,8 +78,9 @@ class ControlSession:
|
||||
Takes a pool method reference, executes it, and writes a response accordingly.
|
||||
|
||||
If the first parameter is named `self`, the method will be called with the `_pool` instance as its first
|
||||
positional argument. If it returns nothing, the response upon successful execution will be `constants.CMD_OK`,
|
||||
otherwise the response written to the stream will be its return value (as an encoded string).
|
||||
positional argument.
|
||||
If it returns nothing, the response upon successful execution will be :const:`constants.CMD_OK`, otherwise the
|
||||
response written to the stream will be its return value (as an encoded string).
|
||||
|
||||
Args:
|
||||
prop:
|
||||
@ -108,7 +112,7 @@ class ControlSession:
|
||||
The reference to the property defined on the `_pool` instance's class.
|
||||
**kwargs (optional):
|
||||
If not empty, the property setter is executed and the keyword arguments are passed along to it; the
|
||||
response upon successful execution will be `constants.CMD_OK`. Otherwise the property getter is
|
||||
response upon successful execution will be :const:`constants.CMD_OK`. Otherwise the property getter is
|
||||
executed and the response written to the stream will be its return value (as an encoded string).
|
||||
"""
|
||||
if kwargs:
|
||||
@ -121,9 +125,10 @@ class ControlSession:
|
||||
|
||||
async def client_handshake(self) -> None:
|
||||
"""
|
||||
This method must be invoked before starting any other client interaction.
|
||||
Must be invoked before starting any other client interaction.
|
||||
|
||||
Client info is retrieved, server info is sent back, and the `ControlParser` is initialized and configured.
|
||||
Client info is retrieved, server info is sent back, and the
|
||||
:class:`ControlParser <asyncio_taskpool.control.parser.ControlParser>` is set up.
|
||||
"""
|
||||
client_info = json.loads((await self._reader.read(SESSION_MSG_BYTES)).decode().strip())
|
||||
log.debug("%s connected", self._client_class_name)
|
||||
@ -144,9 +149,9 @@ class ControlSession:
|
||||
"""
|
||||
Takes a message from the client and attempts to parse it.
|
||||
|
||||
If a parsing error occurs, it is returned to the client. If the `HelpRequested` exception was raised by the
|
||||
`ControlParser`, nothing else happens. Otherwise, the appropriate `_exec...` method is called with the entire
|
||||
dictionary of keyword-arguments returned by the `ControlParser` passed into it.
|
||||
If a parsing error occurs, it is returned to the client. If the :exc:`HelpRequested` exception was raised by the
|
||||
:class:`ControlParser`, nothing else happens. Otherwise, the appropriate `_exec...` method is called with the
|
||||
entire dictionary of keyword-arguments returned by the :class:`ControlParser` passed into it.
|
||||
|
||||
Args:
|
||||
msg: The non-empty string read from the client stream.
|
||||
@ -170,9 +175,10 @@ class ControlSession:
|
||||
|
||||
async def listen(self) -> None:
|
||||
"""
|
||||
Enters the main control loop that only ends if either the server or the client disconnect.
|
||||
Enters the main control loop listening to client input.
|
||||
|
||||
Messages from the client are read and passed into the `_parse_command` method, which handles the rest.
|
||||
This method only returns if either the server or the client disconnect.
|
||||
Messages from the client are read, parsed, and turned into pool commands (if possible).
|
||||
This method should be called, when the client connection was established and the handshake was successful.
|
||||
It will obviously block indefinitely.
|
||||
"""
|
||||
|
0
src/asyncio_taskpool/internals/__init__.py
Normal file
0
src/asyncio_taskpool/internals/__init__.py
Normal file
@ -16,15 +16,16 @@ If not, see <https://www.gnu.org/licenses/>."""
|
||||
|
||||
__doc__ = """
|
||||
Constants used by more than one module in the package.
|
||||
|
||||
This module should **not** be considered part of the public API.
|
||||
"""
|
||||
|
||||
|
||||
PACKAGE_NAME = 'asyncio_taskpool'
|
||||
|
||||
DEFAULT_TASK_GROUP = ''
|
||||
DATETIME_FORMAT = '%Y-%m-%d_%H-%M-%S'
|
||||
DEFAULT_TASK_GROUP = 'default'
|
||||
|
||||
CLIENT_EXIT = 'exit'
|
||||
DATETIME_FORMAT = '%Y-%m-%d_%H-%M-%S'
|
||||
|
||||
SESSION_MSG_BYTES = 1024 * 100
|
||||
|
@ -15,7 +15,9 @@ You should have received a copy of the GNU Lesser General Public License along w
|
||||
If not, see <https://www.gnu.org/licenses/>."""
|
||||
|
||||
__doc__ = """
|
||||
This module contains the definition of the `TaskGroupRegister` class.
|
||||
Definition of :class:`TaskGroupRegister`.
|
||||
|
||||
It should not be considered part of the public API.
|
||||
"""
|
||||
|
||||
|
||||
@ -26,9 +28,9 @@ from typing import Iterator, Set
|
||||
|
||||
class TaskGroupRegister(MutableSet):
|
||||
"""
|
||||
This class combines the interface of a regular `set` with that of the `asyncio.Lock`.
|
||||
Combines the interface of a regular `set` with that of the `asyncio.Lock`.
|
||||
|
||||
It serves simultaneously as a container of IDs of tasks that belong to the same group, and as a mechanism for
|
||||
Serves simultaneously as a container of IDs of tasks that belong to the same group, and as a mechanism for
|
||||
preventing race conditions within a task group. The lock should be acquired before cancelling the entire group of
|
||||
tasks, as well as before starting a task within the group.
|
||||
"""
|
@ -29,6 +29,22 @@ from .types import T, AnyCallableT, ArgsT, KwArgsT
|
||||
|
||||
|
||||
async def execute_optional(function: AnyCallableT, args: ArgsT = (), kwargs: KwArgsT = None) -> Optional[T]:
|
||||
"""
|
||||
Runs `function` with `args` and `kwargs` and returns its output.
|
||||
|
||||
Args:
|
||||
function:
|
||||
Any callable that accepts the provided positional and keyword-arguments.
|
||||
If it is a coroutine function, it will be awaited.
|
||||
If it is not a callable, nothing is returned.
|
||||
*args (optional):
|
||||
Positional arguments to pass to `function`.
|
||||
**kwargs (optional):
|
||||
Keyword-arguments to pass to `function`.
|
||||
|
||||
Returns:
|
||||
Whatever `function` returns (possibly after being awaited) or `None` if `function` is not callable.
|
||||
"""
|
||||
if not callable(function):
|
||||
return
|
||||
if kwargs is None:
|
||||
@ -39,6 +55,28 @@ async def execute_optional(function: AnyCallableT, args: ArgsT = (), kwargs: KwA
|
||||
|
||||
|
||||
def star_function(function: AnyCallableT, arg: Any, arg_stars: int = 0) -> T:
|
||||
"""
|
||||
Calls `function` passing `arg` to it, optionally unpacking it first.
|
||||
|
||||
Args:
|
||||
function:
|
||||
Any callable that accepts the provided argument(s).
|
||||
arg:
|
||||
The single positional argument that `function` expects; in this case `arg_stars` should be 0.
|
||||
Or the iterable of positional arguments that `function` expects; in this case `arg_stars` should be 1.
|
||||
Or the mapping of keyword-arguments that `function` expects; in this case `arg_stars` should be 2.
|
||||
arg_stars (optional):
|
||||
Determines if and how to unpack `arg`.
|
||||
0 means no unpacking, i.e. `arg` is passed into `function` directly as `function(arg)`.
|
||||
1 means unpacking to an arbitrary number of positional arguments, i.e. as `function(*arg)`.
|
||||
2 means unpacking to an arbitrary number of keyword-arguments, i.e. as `function(**arg)`.
|
||||
|
||||
Returns:
|
||||
Whatever `function` returns.
|
||||
|
||||
Raises:
|
||||
`ValueError`: `arg_stars` is something other than 0, 1, or 2.
|
||||
"""
|
||||
if arg_stars == 0:
|
||||
return function(arg)
|
||||
if arg_stars == 1:
|
||||
@ -49,14 +87,30 @@ def star_function(function: AnyCallableT, arg: Any, arg_stars: int = 0) -> T:
|
||||
|
||||
|
||||
async def join_queue(q: Queue) -> None:
|
||||
"""Wrapper function around the join method of an `asyncio.Queue` instance."""
|
||||
await q.join()
|
||||
|
||||
|
||||
def get_first_doc_line(obj: object) -> str:
|
||||
"""Takes an object and returns the first (non-empty) line of its docstring."""
|
||||
return getdoc(obj).strip().split("\n", 1)[0].strip()
|
||||
|
||||
|
||||
async def return_or_exception(_function_to_execute: AnyCallableT, *args, **kwargs) -> Union[T, Exception]:
|
||||
"""
|
||||
Returns the output of a function or the exception thrown during its execution.
|
||||
|
||||
Args:
|
||||
_function_to_execute:
|
||||
Any callable that accepts the provided positional and keyword-arguments.
|
||||
*args (optional):
|
||||
Positional arguments to pass to `_function_to_execute`.
|
||||
**kwargs (optional):
|
||||
Keyword-arguments to pass to `_function_to_execute`.
|
||||
|
||||
Returns:
|
||||
Whatever `_function_to_execute` returns or throws. (An exception is not raised, but returned!)
|
||||
"""
|
||||
try:
|
||||
if iscoroutinefunction(_function_to_execute):
|
||||
return await _function_to_execute(*args, **kwargs)
|
@ -16,6 +16,8 @@ If not, see <https://www.gnu.org/licenses/>."""
|
||||
|
||||
__doc__ = """
|
||||
Custom type definitions used in various modules.
|
||||
|
||||
This module should **not** be considered part of the public API.
|
||||
"""
|
||||
|
||||
|
@ -15,19 +15,15 @@ You should have received a copy of the GNU Lesser General Public License along w
|
||||
If not, see <https://www.gnu.org/licenses/>."""
|
||||
|
||||
__doc__ = """
|
||||
This module contains the definitions of the task pool classes.
|
||||
Definitions of the task pool classes.
|
||||
|
||||
A task pool is an object with a simple interface for aggregating and dynamically managing asynchronous tasks.
|
||||
Generally speaking, a task is added to a pool by providing it with a coroutine function reference as well as the
|
||||
arguments for that function.
|
||||
|
||||
The `BaseTaskPool` class is a parent class and not intended for direct use.
|
||||
The `TaskPool` and `SimpleTaskPool` are subclasses intended for direct use.
|
||||
The :class:`BaseTaskPool` is a parent class and not intended for direct use.
|
||||
The :class:`TaskPool` and :class:`SimpleTaskPool` are subclasses intended for direct use.
|
||||
While the former allows for heterogeneous collections of tasks that can be entirely unrelated to one another, the
|
||||
latter requires a preemptive decision about the function **and** its arguments upon initialization and only allows
|
||||
to dynamically control the **number** of tasks running at any point in time.
|
||||
|
||||
For further details about the classes check their respective docstrings.
|
||||
For further details about the classes check their respective documentation.
|
||||
"""
|
||||
|
||||
|
||||
@ -40,14 +36,22 @@ from asyncio.tasks import Task, create_task, gather
|
||||
from contextlib import suppress
|
||||
from datetime import datetime
|
||||
from math import inf
|
||||
from typing import Any, Awaitable, Dict, Iterable, Iterator, List, Set
|
||||
from typing import Any, Awaitable, Dict, Iterable, Iterator, List, Set, Union
|
||||
|
||||
from . import exceptions
|
||||
from .constants import DEFAULT_TASK_GROUP, DATETIME_FORMAT
|
||||
from .group_register import TaskGroupRegister
|
||||
from .helpers import execute_optional, star_function, join_queue
|
||||
from .queue_context import Queue
|
||||
from .types import ArgsT, KwArgsT, CoroutineFunc, EndCB, CancelCB
|
||||
from .internals.constants import DEFAULT_TASK_GROUP, DATETIME_FORMAT
|
||||
from .internals.group_register import TaskGroupRegister
|
||||
from .internals.helpers import execute_optional, star_function, join_queue
|
||||
from .internals.types import ArgsT, KwArgsT, CoroutineFunc, EndCB, CancelCB
|
||||
|
||||
|
||||
__all__ = [
|
||||
'BaseTaskPool',
|
||||
'TaskPool',
|
||||
'SimpleTaskPool',
|
||||
'AnyTaskPoolT'
|
||||
]
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
@ -109,7 +113,7 @@ class BaseTaskPool:
|
||||
value: A non-negative integer.
|
||||
|
||||
Raises:
|
||||
`ValueError` if `value` is less than 0.
|
||||
`ValueError`: `value` is less than 0.
|
||||
"""
|
||||
if value < 0:
|
||||
raise ValueError("Pool size can not be less than 0")
|
||||
@ -165,7 +169,7 @@ class BaseTaskPool:
|
||||
@property
|
||||
def is_full(self) -> bool:
|
||||
"""
|
||||
`False` if the number of running tasks is less than the `pool_size`.
|
||||
`False` if the number of running tasks is less than the pool size.
|
||||
|
||||
When the pool is full, any call to start a new task within it will block, until there is enough room for it.
|
||||
"""
|
||||
@ -182,7 +186,7 @@ class BaseTaskPool:
|
||||
Set of integers representing the task IDs belonging to the specified groups.
|
||||
|
||||
Raises:
|
||||
`InvalidGroupName` if one of the specified `group_names` does not exist in the pool.
|
||||
`InvalidGroupName`: One of the specified`group_names` does not exist in the pool.
|
||||
"""
|
||||
ids = set()
|
||||
for name in group_names:
|
||||
@ -206,10 +210,10 @@ class BaseTaskPool:
|
||||
ignore_lock (optional): If `True`, a locked pool will produce no error here.
|
||||
|
||||
Raises:
|
||||
`AssertionError` if both or neither of `awaitable` and `function` were passed.
|
||||
`asyncio_taskpool.exceptions.PoolIsClosed` if the pool is closed.
|
||||
`asyncio_taskpool.exceptions.NotCoroutine` if `awaitable` is not a cor. / `function` not a cor. func.
|
||||
`asyncio_taskpool.exceptions.PoolIsLocked` if the pool has been locked and `ignore_lock` is `False`.
|
||||
`AssertionError`: Both or neither of `awaitable` and `function` were passed.
|
||||
`asyncio_taskpool.exceptions.PoolIsClosed`: The pool is closed.
|
||||
`asyncio_taskpool.exceptions.NotCoroutine`: `awaitable` is not a cor. / `function` not a cor. func.
|
||||
`asyncio_taskpool.exceptions.PoolIsLocked`: The pool has been locked and `ignore_lock` is `False`.
|
||||
"""
|
||||
assert (awaitable is None) != (function is None)
|
||||
if awaitable and not iscoroutine(awaitable):
|
||||
@ -300,8 +304,8 @@ class BaseTaskPool:
|
||||
"""
|
||||
Starts a coroutine as a new task in the pool.
|
||||
|
||||
This method can block for a significant amount of time, **only if** the pool is full.
|
||||
Otherwise it merely needs to acquire the `TaskGroupRegister` lock, which should never be held for a long time.
|
||||
This method can block for a significant amount of time, **only if** the pool is full. Otherwise it merely needs
|
||||
to acquire the :class:`TaskGroupRegister` lock, which should never be held for a long time.
|
||||
|
||||
Args:
|
||||
awaitable:
|
||||
@ -341,9 +345,9 @@ class BaseTaskPool:
|
||||
task_id: The ID of a task still running within the pool.
|
||||
|
||||
Raises:
|
||||
`asyncio_taskpool.exceptions.AlreadyCancelled` if the task with `task_id` has been (recently) cancelled.
|
||||
`asyncio_taskpool.exceptions.AlreadyEnded` if the task with `task_id` has ended (recently).
|
||||
`asyncio_taskpool.exceptions.InvalidTaskID` if no task with `task_id` is known to the pool.
|
||||
`asyncio_taskpool.exceptions.AlreadyCancelled`: The task with `task_id` has been (recently) cancelled.
|
||||
`asyncio_taskpool.exceptions.AlreadyEnded`: The task with `task_id` has ended (recently).
|
||||
`asyncio_taskpool.exceptions.InvalidTaskID`: No task with `task_id` is known to the pool.
|
||||
"""
|
||||
try:
|
||||
return self._tasks_running[task_id]
|
||||
@ -358,16 +362,18 @@ class BaseTaskPool:
|
||||
"""
|
||||
Cancels the tasks with the specified IDs.
|
||||
|
||||
Each task ID must belong to a task still running within the pool. Otherwise one of the following exceptions will
|
||||
be raised:
|
||||
- `AlreadyCancelled` if one of the `task_ids` belongs to a task that has been (recently) cancelled.
|
||||
- `AlreadyEnded` if one of the `task_ids` belongs to a task that has ended (recently).
|
||||
- `InvalidTaskID` if any of the `task_ids` is not known to the pool.
|
||||
Each task ID must belong to a task still running within the pool.
|
||||
|
||||
Note that once a pool has been flushed (see below), IDs of tasks that have ended previously will be forgotten.
|
||||
|
||||
Args:
|
||||
task_ids: Arbitrary number of integers. Each must be an ID of a task still running within the pool.
|
||||
*task_ids: Arbitrary number of integers. Each must be an ID of a task still running within the pool.
|
||||
msg (optional): Passed to the `Task.cancel()` method of every task specified by the `task_ids`.
|
||||
|
||||
Raises:
|
||||
`AlreadyCancelled`: One of the `task_ids` belongs to a task that has been (recently) cancelled.
|
||||
`AlreadyEnded`: One of the `task_ids` belongs to a task that has ended (recently).
|
||||
`InvalidTaskID`: One of the `task_ids` is not known to the pool.
|
||||
"""
|
||||
tasks = [self._get_running_task(task_id) for task_id in task_ids]
|
||||
for task in tasks:
|
||||
@ -402,7 +408,7 @@ class BaseTaskPool:
|
||||
msg (optional): Passed to the `Task.cancel()` method of every task specified by the `task_ids`.
|
||||
|
||||
Raises:
|
||||
`InvalidGroupName` if no task group named `group_name` exists in the pool.
|
||||
`InvalidGroupName`: if no task group named `group_name` exists in the pool.
|
||||
"""
|
||||
log.debug("%s cancelling tasks in group %s", str(self), group_name)
|
||||
try:
|
||||
@ -428,7 +434,7 @@ class BaseTaskPool:
|
||||
|
||||
async def flush(self, return_exceptions: bool = False):
|
||||
"""
|
||||
Calls `asyncio.gather` on all ended/cancelled tasks in the pool.
|
||||
Gathers (i.e. awaits) all ended/cancelled tasks in the pool.
|
||||
|
||||
The tasks are subsequently forgotten by the pool. This method exists mainly to free up memory of unneeded
|
||||
`Task` objects.
|
||||
@ -445,11 +451,11 @@ class BaseTaskPool:
|
||||
|
||||
async def gather_and_close(self, return_exceptions: bool = False):
|
||||
"""
|
||||
Calls `asyncio.gather` on **all** tasks in the pool, then closes it.
|
||||
Gathers (i.e. awaits) **all** tasks in the pool, then closes it.
|
||||
|
||||
After this method returns, no more tasks can be started in the pool.
|
||||
|
||||
The `lock()` method must have been called prior to this.
|
||||
:meth:`lock` must have been called prior to this.
|
||||
|
||||
This method may block, if one of the tasks blocks while catching a `asyncio.CancelledError` or if any of the
|
||||
callbacks registered for a task blocks for whatever reason.
|
||||
@ -458,7 +464,7 @@ class BaseTaskPool:
|
||||
return_exceptions (optional): Passed directly into `gather`.
|
||||
|
||||
Raises:
|
||||
`PoolStillUnlocked` if the pool has not been locked yet.
|
||||
`PoolStillUnlocked`: The pool has not been locked yet.
|
||||
"""
|
||||
if not self._locked:
|
||||
raise exceptions.PoolStillUnlocked("Pool must be locked, before tasks can be gathered")
|
||||
@ -519,9 +525,9 @@ class TaskPool(BaseTaskPool):
|
||||
|
||||
The task group is subsequently forgotten by the pool.
|
||||
|
||||
If any methods such as `map()` launched meta tasks belonging to that group, these meta tasks are cancelled
|
||||
If any methods such as :meth:`map` launched meta tasks belonging to that group, these meta tasks are cancelled
|
||||
before the actual tasks are cancelled. This means that any tasks "queued" to be started by a meta task will
|
||||
**never even start**. In the case of `map()` this would mean that the `arg_iter` may be abandoned before it
|
||||
**never even start**. In the case of :meth:`map` this would mean that its `arg_iter` may be abandoned before it
|
||||
was fully consumed (if that is even possible).
|
||||
|
||||
Args:
|
||||
@ -529,7 +535,7 @@ class TaskPool(BaseTaskPool):
|
||||
msg (optional): Passed to the `Task.cancel()` method of every task specified by the `task_ids`.
|
||||
|
||||
Raises:
|
||||
`InvalidGroupName` if no task group named `group_name` exists in the pool.
|
||||
`InvalidGroupName`: No task group named `group_name` exists in the pool.
|
||||
"""
|
||||
await super().cancel_group(group_name=group_name, msg=msg)
|
||||
|
||||
@ -537,10 +543,10 @@ class TaskPool(BaseTaskPool):
|
||||
"""
|
||||
Cancels all tasks still running within the pool (including meta tasks).
|
||||
|
||||
If any methods such as `map()` launched meta tasks, these meta tasks are cancelled before the actual tasks are
|
||||
cancelled. This means that any tasks "queued" to be started by a meta task will **never even start**. In the
|
||||
case of `map()` this would mean that the `arg_iter` may be abandoned before it was fully consumed (if that is
|
||||
even possible).
|
||||
If any methods such as :meth:`map` launched meta tasks, these meta tasks are cancelled before the actual tasks
|
||||
are cancelled. This means that any tasks "queued" to be started by a meta task will **never even start**. In the
|
||||
case of :meth:`map` this would mean that its `arg_iter` may be abandoned before it was fully consumed (if that
|
||||
is even possible).
|
||||
|
||||
Args:
|
||||
msg (optional): Passed to the `Task.cancel()` method of every task specified by the `task_ids`.
|
||||
@ -575,7 +581,7 @@ class TaskPool(BaseTaskPool):
|
||||
|
||||
async def flush(self, return_exceptions: bool = False):
|
||||
"""
|
||||
Calls `asyncio.gather` on all ended/cancelled tasks in the pool.
|
||||
Gathers (i.e. awaits) all ended/cancelled tasks in the pool.
|
||||
|
||||
The tasks are subsequently forgotten by the pool. This method exists mainly to free up memory of unneeded
|
||||
`Task` objects. It also gets rid of unneeded meta tasks.
|
||||
@ -594,16 +600,16 @@ class TaskPool(BaseTaskPool):
|
||||
|
||||
async def gather_and_close(self, return_exceptions: bool = False):
|
||||
"""
|
||||
Calls `asyncio.gather` on **all** tasks in the pool, then closes it.
|
||||
Gathers (i.e. awaits) **all** tasks in the pool, then closes it.
|
||||
|
||||
After this method returns, no more tasks can be started in the pool.
|
||||
|
||||
The `lock()` method must have been called prior to this.
|
||||
|
||||
Note that this method may block indefinitely as long as any task in the pool is not done. This includes meta
|
||||
tasks launched my methods such as `map()`, which ends by itself, only once the `arg_iter` is fully consumed,
|
||||
tasks launched by methods such as :meth:`map`, which ends by itself, only once its `arg_iter` is fully consumed,
|
||||
which may not even be possible (depending on what the iterable of arguments represents). If you want to avoid
|
||||
this, make sure to call `cancel_all()` prior to this.
|
||||
this, make sure to call :meth:`cancel_all` prior to this.
|
||||
|
||||
This method may also block, if one of the tasks blocks while catching a `asyncio.CancelledError` or if any of
|
||||
the callbacks registered for a task blocks for whatever reason.
|
||||
@ -612,8 +618,9 @@ class TaskPool(BaseTaskPool):
|
||||
return_exceptions (optional): Passed directly into `gather`.
|
||||
|
||||
Raises:
|
||||
`PoolStillUnlocked` if the pool has not been locked yet.
|
||||
`PoolStillUnlocked`: The pool has not been locked yet.
|
||||
"""
|
||||
# TODO: It probably makes sense to put this superclass method call at the end (see TODO in `_map`).
|
||||
await super().gather_and_close(return_exceptions=return_exceptions)
|
||||
not_cancelled_meta_tasks = set()
|
||||
while self._group_meta_tasks_running:
|
||||
@ -701,9 +708,9 @@ class TaskPool(BaseTaskPool):
|
||||
The name of the task group that the newly spawned tasks have been added to.
|
||||
|
||||
Raises:
|
||||
`PoolIsClosed` if the pool is closed.
|
||||
`NotCoroutine` if `func` is not a coroutine function.
|
||||
`PoolIsLocked` if the pool has been locked.
|
||||
`PoolIsClosed`: The pool is closed.
|
||||
`NotCoroutine`: `func` is not a coroutine function.
|
||||
`PoolIsLocked`: The pool is currently locked.
|
||||
"""
|
||||
self._check_start(function=func)
|
||||
if group_name is None:
|
||||
@ -717,7 +724,7 @@ class TaskPool(BaseTaskPool):
|
||||
@classmethod
|
||||
async def _queue_producer(cls, arg_queue: Queue, arg_iter: Iterator[Any], group_name: str) -> None:
|
||||
"""
|
||||
Keeps the arguments queue from `_map()` full as long as the iterator has elements.
|
||||
Keeps the arguments queue from :meth:`_map` full as long as the iterator has elements.
|
||||
|
||||
Intended to be run as a meta task of a specific group.
|
||||
|
||||
@ -744,7 +751,7 @@ class TaskPool(BaseTaskPool):
|
||||
|
||||
@staticmethod
|
||||
def _get_map_end_callback(map_semaphore: Semaphore, actual_end_callback: EndCB) -> EndCB:
|
||||
"""Returns a wrapped `end_callback` for each `_queue_consumer()` task that will release the `map_semaphore`."""
|
||||
"""Returns a wrapped `end_callback` for each :meth:`_queue_consumer` task that releases the `map_semaphore`."""
|
||||
async def release_callback(task_id: int) -> None:
|
||||
map_semaphore.release()
|
||||
await execute_optional(actual_end_callback, args=(task_id,))
|
||||
@ -753,7 +760,7 @@ class TaskPool(BaseTaskPool):
|
||||
async def _queue_consumer(self, arg_queue: Queue, group_name: str, func: CoroutineFunc, arg_stars: int = 0,
|
||||
end_callback: EndCB = None, cancel_callback: CancelCB = None) -> None:
|
||||
"""
|
||||
Consumes arguments from the queue from `_map()` and keeps a limited number of tasks working on them.
|
||||
Consumes arguments from the queue from :meth:`_map` and keeps a limited number of tasks working on them.
|
||||
|
||||
The queue's maximum size is taken as the limiting value of an internal semaphore, which must be acquired before
|
||||
a new task can be started, and which must be released when one of these tasks ends.
|
||||
@ -764,7 +771,7 @@ class TaskPool(BaseTaskPool):
|
||||
arg_queue:
|
||||
The queue of function arguments to consume for starting a new task.
|
||||
group_name:
|
||||
Name of the associated task group; passed into the `_start_task()` method.
|
||||
Name of the associated task group; passed into :meth:`_start_task`.
|
||||
func:
|
||||
The coroutine function to use for spawning the new tasks within the task pool.
|
||||
arg_stars (optional):
|
||||
@ -776,16 +783,16 @@ class TaskPool(BaseTaskPool):
|
||||
The callback that was specified to execute after cancellation of the task (and the next one).
|
||||
It is run with the task's ID as its only positional argument.
|
||||
"""
|
||||
map_semaphore = Semaphore(arg_queue.maxsize) # value determined by `group_size` in `_map()`
|
||||
map_semaphore = Semaphore(arg_queue.maxsize) # value determined by `group_size` in :meth:`_map`
|
||||
release_cb = self._get_map_end_callback(map_semaphore, actual_end_callback=end_callback)
|
||||
while True:
|
||||
# The following line blocks **only if** the number of running tasks spawned by this method has reached the
|
||||
# specified maximum as determined in the `_map()` method.
|
||||
# specified maximum as determined in :meth:`_map`.
|
||||
await map_semaphore.acquire()
|
||||
# We await the queue's `get()` coroutine and subsequently ensure that its `task_done()` method is called.
|
||||
async with arg_queue as next_arg:
|
||||
if next_arg is self._QUEUE_END_SENTINEL:
|
||||
# The `_queue_producer()` either reached the last argument or was cancelled.
|
||||
# The :meth:`_queue_producer` either reached the last argument or was cancelled.
|
||||
return
|
||||
try:
|
||||
await self._start_task(star_function(func, next_arg, arg_stars=arg_stars), group_name=group_name,
|
||||
@ -816,7 +823,7 @@ class TaskPool(BaseTaskPool):
|
||||
Because this method delegates the spawning of the tasks to two meta tasks (a producer and a consumer of the
|
||||
aforementioned queue), it **never blocks**. However, just because this method returns immediately, this does
|
||||
not mean that any task was started or that any number of tasks will start soon, as this is solely determined by
|
||||
the `pool_size` and the `group_size`.
|
||||
the :attr:`BaseTaskPool.pool_size` and the `group_size`.
|
||||
|
||||
Args:
|
||||
group_name:
|
||||
@ -837,8 +844,8 @@ class TaskPool(BaseTaskPool):
|
||||
It is run with the task's ID as its only positional argument.
|
||||
|
||||
Raises:
|
||||
`ValueError` if `group_size` is less than 1.
|
||||
`asyncio_taskpool.exceptions.InvalidGroupName` if a group named `group_name` exists in the pool.
|
||||
`ValueError`: `group_size` is less than 1.
|
||||
`asyncio_taskpool.exceptions.InvalidGroupName`: A group named `group_name` exists in the pool.
|
||||
"""
|
||||
self._check_start(function=func)
|
||||
if group_size < 1:
|
||||
@ -850,6 +857,11 @@ class TaskPool(BaseTaskPool):
|
||||
# Set up internal arguments queue. We limit its maximum size to enable lazy consumption of `arg_iter` by the
|
||||
# `_queue_producer()`; that way an argument
|
||||
arg_queue = Queue(maxsize=group_size)
|
||||
# TODO: This is the wrong thing to await before gathering!
|
||||
# Since the queue producer and consumer operate in separate tasks, it is possible that the consumer
|
||||
# "finishes" the entire queue before the producer manages to put more items in it, thus returning
|
||||
# the `join` call before the arguments iterator was fully consumed.
|
||||
# Probably the queue producer task should be awaited before gathering instead.
|
||||
self._before_gathering.append(join_queue(arg_queue))
|
||||
meta_tasks = self._group_meta_tasks_running.setdefault(group_name, set())
|
||||
# Start the producer and consumer meta tasks.
|
||||
@ -877,7 +889,7 @@ class TaskPool(BaseTaskPool):
|
||||
Because this method delegates the spawning of the tasks to two meta tasks (a producer and a consumer of the
|
||||
aforementioned queue), it **never blocks**. However, just because this method returns immediately, this does
|
||||
not mean that any task was started or that any number of tasks will start soon, as this is solely determined by
|
||||
the `pool_size` and the `group_size`.
|
||||
the :attr:`BaseTaskPool.pool_size` and the `group_size`.
|
||||
|
||||
Args:
|
||||
func:
|
||||
@ -899,11 +911,11 @@ class TaskPool(BaseTaskPool):
|
||||
The name of the task group that the newly spawned tasks will be added to.
|
||||
|
||||
Raises:
|
||||
`PoolIsClosed` if the pool is closed.
|
||||
`NotCoroutine` if `func` is not a coroutine function.
|
||||
`PoolIsLocked` if the pool has been locked.
|
||||
`ValueError` if `group_size` is less than 1.
|
||||
`InvalidGroupName` if a group named `group_name` exists in the pool.
|
||||
`PoolIsClosed`: The pool is closed.
|
||||
`NotCoroutine`: `func` is not a coroutine function.
|
||||
`PoolIsLocked`: The pool is currently locked.
|
||||
`ValueError`: `group_size` is less than 1.
|
||||
`InvalidGroupName`: A group named `group_name` exists in the pool.
|
||||
"""
|
||||
if group_name is None:
|
||||
group_name = self._generate_group_name('map', func)
|
||||
@ -914,8 +926,8 @@ class TaskPool(BaseTaskPool):
|
||||
async def starmap(self, func: CoroutineFunc, args_iter: Iterable[ArgsT], group_size: int = 1,
|
||||
group_name: str = None, end_callback: EndCB = None, cancel_callback: CancelCB = None) -> str:
|
||||
"""
|
||||
Like `map()` except that the elements of `args_iter` are expected to be iterables themselves to be unpacked as
|
||||
positional arguments to the function.
|
||||
Like :meth:`map` except that the elements of `args_iter` are expected to be iterables themselves to be unpacked
|
||||
as positional arguments to the function.
|
||||
Each coroutine then looks like `func(*args)`, `args` being an element from `args_iter`.
|
||||
"""
|
||||
if group_name is None:
|
||||
@ -928,8 +940,8 @@ class TaskPool(BaseTaskPool):
|
||||
group_name: str = None, end_callback: EndCB = None,
|
||||
cancel_callback: CancelCB = None) -> str:
|
||||
"""
|
||||
Like `map()` except that the elements of `kwargs_iter` are expected to be iterables themselves to be unpacked as
|
||||
keyword-arguments to the function.
|
||||
Like :meth:`map` except that the elements of `kwargs_iter` are expected to be iterables themselves to be
|
||||
unpacked as keyword-arguments to the function.
|
||||
Each coroutine then looks like `func(**kwargs)`, `kwargs` being an element from `kwargs_iter`.
|
||||
"""
|
||||
if group_name is None:
|
||||
@ -951,7 +963,7 @@ class SimpleTaskPool(BaseTaskPool):
|
||||
As long as there is room in the pool, more tasks can be added. (By default, there is no pool size limit.)
|
||||
Each task started in the pool receives a unique ID, which can be used to cancel specific tasks at any moment.
|
||||
However, since all tasks come from the same function-arguments-combination, the specificity of the `cancel()` method
|
||||
is probably unnecessary. Instead, a simpler `stop()` method is introduced.
|
||||
is probably unnecessary. Instead, a simpler :meth:`stop` method is introduced.
|
||||
|
||||
Adding tasks blocks **only if** the pool is full at that moment.
|
||||
"""
|
||||
@ -981,7 +993,7 @@ class SimpleTaskPool(BaseTaskPool):
|
||||
An optional name for the pool.
|
||||
|
||||
Raises:
|
||||
`NotCoroutine` if `func` is not a coroutine function.
|
||||
`NotCoroutine`: `func` is not a coroutine function.
|
||||
"""
|
||||
if not iscoroutinefunction(func):
|
||||
raise exceptions.NotCoroutine(f"Not a coroutine function: {func}")
|
||||
@ -1003,17 +1015,32 @@ class SimpleTaskPool(BaseTaskPool):
|
||||
end_callback=self._end_callback, cancel_callback=self._cancel_callback)
|
||||
|
||||
async def start(self, num: int) -> List[int]:
|
||||
"""Starts `num` new tasks within the pool and returns their IDs."""
|
||||
"""
|
||||
Starts specified number of new tasks in the pool and returns their IDs.
|
||||
|
||||
This method may block if there is less room in the pool than the desired number of new tasks.
|
||||
|
||||
Args:
|
||||
num: The number of new tasks to start.
|
||||
|
||||
Returns:
|
||||
List of IDs of the new tasks that have been started (not necessarily in the order they were started).
|
||||
"""
|
||||
ids = await gather(*(self._start_one() for _ in range(num)))
|
||||
assert isinstance(ids, list) # for PyCharm
|
||||
return ids
|
||||
|
||||
def stop(self, num: int) -> List[int]:
|
||||
"""
|
||||
Cancels `num` running tasks within the pool and returns their IDs.
|
||||
Cancels specified number of tasks in the pool and returns their IDs.
|
||||
|
||||
The tasks are canceled in LIFO order, meaning tasks started later will be stopped before those started earlier.
|
||||
If `num` is greater than or equal to the number of currently running tasks, all tasks are cancelled.
|
||||
|
||||
Args:
|
||||
num: The number of tasks to cancel; if `num` >= :attr:`BaseTaskPool.num_running`, all tasks are cancelled.
|
||||
|
||||
Returns:
|
||||
List of IDs of the tasks that have been cancelled (in the order they were cancelled).
|
||||
"""
|
||||
ids = []
|
||||
for i, task_id in enumerate(reversed(self._tasks_running)):
|
||||
@ -1026,3 +1053,6 @@ class SimpleTaskPool(BaseTaskPool):
|
||||
def stop_all(self) -> List[int]:
|
||||
"""Cancels all running tasks and returns their IDs."""
|
||||
return self.stop(self.num_running)
|
||||
|
||||
|
||||
AnyTaskPoolT = Union[TaskPool, SimpleTaskPool]
|
||||
|
@ -15,7 +15,7 @@ You should have received a copy of the GNU Lesser General Public License along w
|
||||
If not, see <https://www.gnu.org/licenses/>."""
|
||||
|
||||
__doc__ = """
|
||||
This module contains the definition of an `asyncio.Queue` subclass.
|
||||
Definition of an :code:`asyncio.Queue` subclass with some small additions.
|
||||
"""
|
||||
|
||||
|
||||
@ -23,12 +23,20 @@ from asyncio.queues import Queue as _Queue
|
||||
from typing import Any
|
||||
|
||||
|
||||
__all__ = ['Queue']
|
||||
|
||||
|
||||
class Queue(_Queue):
|
||||
"""This just adds a little syntactic sugar to the `asyncio.Queue`."""
|
||||
"""
|
||||
Adds a little syntactic sugar to the :code:`asyncio.Queue`.
|
||||
|
||||
Allows being used as an async context manager awaiting `get` upon entering the context and calling
|
||||
:meth:`item_processed` upon exiting it.
|
||||
"""
|
||||
|
||||
def item_processed(self) -> None:
|
||||
"""
|
||||
Does exactly the same as `task_done()`.
|
||||
Does exactly the same as :meth:`asyncio.Queue.task_done`.
|
||||
|
||||
This method exists because `task_done` is an atrocious name for the method. It communicates the wrong thing,
|
||||
invites confusion, and immensely reduces readability (in the context of this library). And readability counts.
|
||||
@ -39,7 +47,7 @@ class Queue(_Queue):
|
||||
"""
|
||||
Implements an asynchronous context manager for the queue.
|
||||
|
||||
Upon entering `get()` is awaited and subsequently whatever came out of the queue is returned.
|
||||
Upon entering :meth:`get` is awaited and subsequently whatever came out of the queue is returned.
|
||||
It allows writing code this way:
|
||||
>>> queue = Queue()
|
||||
>>> ...
|
||||
@ -52,7 +60,7 @@ class Queue(_Queue):
|
||||
"""
|
||||
Implements an asynchronous context manager for the queue.
|
||||
|
||||
Upon exiting `item_processed()` is called. This is why this context manager may not always be what you want,
|
||||
Upon exiting :meth:`item_processed` is called. This is why this context manager may not always be what you want,
|
||||
but in some situations it makes the code much cleaner.
|
||||
"""
|
||||
self.item_processed()
|
||||
|
Reference in New Issue
Block a user