Merge pull request #150 from tiqi-group/feat/task_decorator

Feat: Replace implicit async function tasks with task decorator
This commit is contained in:
Mose Müller 2024-09-16 15:51:52 +02:00 committed by GitHub
commit 9180bb1d9e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
33 changed files with 577 additions and 536 deletions

View File

@ -13,6 +13,12 @@
::: pydase.components ::: pydase.components
handler: python handler: python
::: pydase.task
handler: python
options:
inherited_members: false
show_submodules: true
::: pydase.utils.serialization.serializer ::: pydase.utils.serialization.serializer
handler: python handler: python

View File

@ -1,20 +1,18 @@
# Understanding Tasks # Understanding Tasks
In `pydase`, a task is defined as an asynchronous function without arguments contained in a class that inherits from `pydase.DataService`. These tasks usually contain a while loop and are designed to carry out periodic functions. In `pydase`, a task is defined as an asynchronous function without arguments that is decorated with the `@task` decorator and contained in a class that inherits from `pydase.DataService`. These tasks usually contain a while loop and are designed to carry out periodic functions. For example, a task might be used to periodically read sensor data, update a database, or perform any other recurring job.
For example, a task might be used to periodically read sensor data, update a database, or perform any other recurring job. One core feature of `pydase` is its ability to automatically generate start and stop functions for these tasks. This allows you to control task execution via both the frontend and python clients, giving you flexible and powerful control over your service's operation. `pydase` allows you to control task execution via both the frontend and Python clients and can automatically start tasks upon initialization of the service. By using the `@task` decorator with the `autostart=True` argument in your service class, `pydase` will automatically start these tasks when the server is started. Here's an example:
Another powerful feature of `pydase` is its ability to automatically start tasks upon initialization of the service. By specifying the tasks and their arguments in the `_autostart_tasks` dictionary in your service class's `__init__` method, `pydase` will automatically start these tasks when the server is started. Here's an example:
```python ```python
import pydase import pydase
from pydase.task.decorator import task
class SensorService(pydase.DataService): class SensorService(pydase.DataService):
def __init__(self): def __init__(self):
super().__init__() super().__init__()
self.readout_frequency = 1.0 self.readout_frequency = 1.0
self._autostart_tasks["read_sensor_data"] = ()
def _process_data(self, data: ...) -> None: def _process_data(self, data: ...) -> None:
... ...
@ -22,6 +20,7 @@ class SensorService(pydase.DataService):
def _read_from_sensor(self) -> Any: def _read_from_sensor(self) -> Any:
... ...
@task(autostart=True)
async def read_sensor_data(self): async def read_sensor_data(self):
while True: while True:
data = self._read_from_sensor() data = self._read_from_sensor()
@ -34,6 +33,6 @@ if __name__ == "__main__":
pydase.Server(service=service).run() pydase.Server(service=service).run()
``` ```
In this example, `read_sensor_data` is a task that continuously reads data from a sensor. By adding it to the `_autostart_tasks` dictionary, it will automatically start running when `pydase.Server(service).run()` is executed. In this example, `read_sensor_data` is a task that continuously reads data from a sensor. By decorating it with `@task(autostart=True)`, it will automatically start running when `pydase.Server(service).run()` is executed.
As with all tasks, `pydase` will generate `start_read_sensor_data` and `stop_read_sensor_data` methods, which can be called to manually start and stop the data reading task. The readout frequency can be updated using the `readout_frequency` attribute.
The `@task` decorator replaces the function with a task object that has `start()` and `stop()` methods. This means you can control the task execution directly using these methods. For instance, you can manually start or stop the task by calling `service.read_sensor_data.start()` and `service.read_sensor_data.stop()`, respectively.

View File

@ -4,7 +4,6 @@ import { NumberComponent, NumberObject } from "./NumberComponent";
import { SliderComponent } from "./SliderComponent"; import { SliderComponent } from "./SliderComponent";
import { EnumComponent } from "./EnumComponent"; import { EnumComponent } from "./EnumComponent";
import { MethodComponent } from "./MethodComponent"; import { MethodComponent } from "./MethodComponent";
import { AsyncMethodComponent } from "./AsyncMethodComponent";
import { StringComponent } from "./StringComponent"; import { StringComponent } from "./StringComponent";
import { ListComponent } from "./ListComponent"; import { ListComponent } from "./ListComponent";
import { DataServiceComponent, DataServiceJSON } from "./DataServiceComponent"; import { DataServiceComponent, DataServiceJSON } from "./DataServiceComponent";
@ -17,6 +16,7 @@ import { updateValue } from "../socket";
import { DictComponent } from "./DictComponent"; import { DictComponent } from "./DictComponent";
import { parseFullAccessPath } from "../utils/stateUtils"; import { parseFullAccessPath } from "../utils/stateUtils";
import { SerializedEnum, SerializedObject } from "../types/SerializedObject"; import { SerializedEnum, SerializedObject } from "../types/SerializedObject";
import { TaskComponent, TaskStatus } from "./TaskComponent";
interface GenericComponentProps { interface GenericComponentProps {
attribute: SerializedObject; attribute: SerializedObject;
@ -144,7 +144,6 @@ export const GenericComponent = React.memo(
/> />
); );
} else if (attribute.type === "method") { } else if (attribute.type === "method") {
if (!attribute.async) {
return ( return (
<MethodComponent <MethodComponent
fullAccessPath={fullAccessPath} fullAccessPath={fullAccessPath}
@ -155,19 +154,6 @@ export const GenericComponent = React.memo(
render={attribute.frontend_render} render={attribute.frontend_render}
/> />
); );
} else {
return (
<AsyncMethodComponent
fullAccessPath={fullAccessPath}
docString={attribute.doc}
value={attribute.value as "RUNNING" | null}
addNotification={addNotification}
displayName={displayName}
id={id}
render={attribute.frontend_render}
/>
);
}
} else if (attribute.type === "str") { } else if (attribute.type === "str") {
return ( return (
<StringComponent <StringComponent
@ -182,6 +168,17 @@ export const GenericComponent = React.memo(
id={id} id={id}
/> />
); );
} else if (attribute.type == "Task") {
return (
<TaskComponent
fullAccessPath={fullAccessPath}
docString={attribute.doc}
status={attribute.value["status"].value as TaskStatus}
addNotification={addNotification}
displayName={displayName}
id={id}
/>
);
} else if (attribute.type === "DataService") { } else if (attribute.type === "DataService") {
return ( return (
<DataServiceComponent <DataServiceComponent

View File

@ -5,67 +5,51 @@ import { DocStringComponent } from "./DocStringComponent";
import { LevelName } from "./NotificationsComponent"; import { LevelName } from "./NotificationsComponent";
import useRenderCount from "../hooks/useRenderCount"; import useRenderCount from "../hooks/useRenderCount";
interface AsyncMethodProps { export type TaskStatus = "RUNNING" | "NOT_RUNNING";
interface TaskProps {
fullAccessPath: string; fullAccessPath: string;
value: "RUNNING" | null;
docString: string | null; docString: string | null;
hideOutput?: boolean; status: TaskStatus;
addNotification: (message: string, levelname?: LevelName) => void; addNotification: (message: string, levelname?: LevelName) => void;
displayName: string; displayName: string;
id: string; id: string;
render: boolean;
} }
export const AsyncMethodComponent = React.memo((props: AsyncMethodProps) => { export const TaskComponent = React.memo((props: TaskProps) => {
const { const { fullAccessPath, docString, status, addNotification, displayName, id } = props;
fullAccessPath,
docString,
value: runningTask,
addNotification,
displayName,
id,
} = props;
// Conditional rendering based on the 'render' prop.
if (!props.render) {
return null;
}
const renderCount = useRenderCount(); const renderCount = useRenderCount();
const formRef = useRef(null); const formRef = useRef(null);
const [spinning, setSpinning] = useState(false); const [spinning, setSpinning] = useState(false);
const name = fullAccessPath.split(".").at(-1)!;
const parentPath = fullAccessPath.slice(0, -(name.length + 1));
useEffect(() => { useEffect(() => {
let message: string; let message: string;
if (runningTask === null) { if (status === "RUNNING") {
message = `${fullAccessPath} task was stopped.`;
} else {
message = `${fullAccessPath} was started.`; message = `${fullAccessPath} was started.`;
} else {
message = `${fullAccessPath} was stopped.`;
} }
addNotification(message); addNotification(message);
setSpinning(false); setSpinning(false);
}, [props.value]); }, [status]);
const execute = async (event: React.FormEvent) => { const execute = async (event: React.FormEvent) => {
event.preventDefault(); event.preventDefault();
let method_name: string;
if (runningTask !== undefined && runningTask !== null) { const method_name = status == "RUNNING" ? "stop" : "start";
method_name = `stop_${name}`;
} else {
method_name = `start_${name}`;
}
const accessPath = [parentPath, method_name].filter((element) => element).join("."); const accessPath = [fullAccessPath, method_name]
.filter((element) => element)
.join(".");
setSpinning(true); setSpinning(true);
runMethod(accessPath); runMethod(accessPath);
}; };
return ( return (
<div className="component asyncMethodComponent" id={id}> <div className="component taskComponent" id={id}>
{process.env.NODE_ENV === "development" && <div>Render count: {renderCount}</div>} {process.env.NODE_ENV === "development" && <div>Render count: {renderCount}</div>}
<Form onSubmit={execute} ref={formRef}> <Form onSubmit={execute} ref={formRef}>
<InputGroup> <InputGroup>
@ -76,7 +60,7 @@ export const AsyncMethodComponent = React.memo((props: AsyncMethodProps) => {
<Button id={`button-${id}`} type="submit"> <Button id={`button-${id}`} type="submit">
{spinning ? ( {spinning ? (
<Spinner size="sm" role="status" aria-hidden="true" /> <Spinner size="sm" role="status" aria-hidden="true" />
) : runningTask === "RUNNING" ? ( ) : status === "RUNNING" ? (
"Stop " "Stop "
) : ( ) : (
"Start " "Start "
@ -88,4 +72,4 @@ export const AsyncMethodComponent = React.memo((props: AsyncMethodProps) => {
); );
}); });
AsyncMethodComponent.displayName = "AsyncMethodComponent"; TaskComponent.displayName = "TaskComponent";

View File

@ -77,7 +77,12 @@ type SerializedException = SerializedObjectBase & {
type: "Exception"; type: "Exception";
}; };
type DataServiceTypes = "DataService" | "Image" | "NumberSlider" | "DeviceConnection"; type DataServiceTypes =
| "DataService"
| "Image"
| "NumberSlider"
| "DeviceConnection"
| "Task";
type SerializedDataService = SerializedObjectBase & { type SerializedDataService = SerializedObjectBase & {
name: string; name: string;

View File

@ -1,6 +1,6 @@
[tool.poetry] [tool.poetry]
name = "pydase" name = "pydase"
version = "0.9.1" version = "0.10.0"
description = "A flexible and robust Python library for creating, managing, and interacting with data services, with built-in support for web and RPC servers, and customizable features for diverse use cases." description = "A flexible and robust Python library for creating, managing, and interacting with data services, with built-in support for web and RPC servers, and customizable features for diverse use cases."
authors = ["Mose Mueller <mosmuell@ethz.ch>"] authors = ["Mose Mueller <mosmuell@ethz.ch>"]
readme = "README.md" readme = "README.md"

View File

@ -8,6 +8,7 @@ import socketio # type: ignore
import pydase.components import pydase.components
from pydase.client.proxy_loader import ProxyClassMixin, ProxyLoader from pydase.client.proxy_loader import ProxyClassMixin, ProxyLoader
from pydase.utils.helpers import current_event_loop_exists
from pydase.utils.serialization.deserializer import loads from pydase.utils.serialization.deserializer import loads
from pydase.utils.serialization.types import SerializedDataService, SerializedObject from pydase.utils.serialization.types import SerializedDataService, SerializedObject
@ -74,6 +75,7 @@ class ProxyClass(ProxyClassMixin, pydase.components.DeviceConnection):
self, sio_client: socketio.AsyncClient, loop: asyncio.AbstractEventLoop self, sio_client: socketio.AsyncClient, loop: asyncio.AbstractEventLoop
) -> None: ) -> None:
super().__init__() super().__init__()
pydase.components.DeviceConnection.__init__(self)
self._initialise(sio_client=sio_client, loop=loop) self._initialise(sio_client=sio_client, loop=loop)
@ -107,7 +109,11 @@ class Client:
): ):
self._url = url self._url = url
self._sio = socketio.AsyncClient() self._sio = socketio.AsyncClient()
if not current_event_loop_exists():
self._loop = asyncio.new_event_loop() self._loop = asyncio.new_event_loop()
asyncio.set_event_loop(self._loop)
else:
self._loop = asyncio.get_event_loop()
self.proxy = ProxyClass(sio_client=self._sio, loop=self._loop) self.proxy = ProxyClass(sio_client=self._sio, loop=self._loop)
"""A proxy object representing the remote service, facilitating interaction as """A proxy object representing the remote service, facilitating interaction as
if it were local.""" if it were local."""

View File

@ -351,7 +351,7 @@ class ProxyLoader:
) -> Any: ) -> Any:
# Custom types like Components or DataService classes # Custom types like Components or DataService classes
component_class = cast( component_class = cast(
type, Deserializer.get_component_class(serialized_object["type"]) type, Deserializer.get_service_base_class(serialized_object["type"])
) )
class_bases = ( class_bases = (
ProxyClassMixin, ProxyClassMixin,

View File

@ -1,6 +1,7 @@
import asyncio import asyncio
import pydase.data_service import pydase.data_service
import pydase.task.decorator
class DeviceConnection(pydase.data_service.DataService): class DeviceConnection(pydase.data_service.DataService):
@ -52,7 +53,6 @@ class DeviceConnection(pydase.data_service.DataService):
def __init__(self) -> None: def __init__(self) -> None:
super().__init__() super().__init__()
self._connected = False self._connected = False
self._autostart_tasks["_handle_connection"] = () # type: ignore
self._reconnection_wait_time = 10.0 self._reconnection_wait_time = 10.0
def connect(self) -> None: def connect(self) -> None:
@ -70,6 +70,7 @@ class DeviceConnection(pydase.data_service.DataService):
""" """
return self._connected return self._connected
@pydase.task.decorator.task(autostart=True)
async def _handle_connection(self) -> None: async def _handle_connection(self) -> None:
"""Automatically tries reconnecting to the device if it is not connected. """Automatically tries reconnecting to the device if it is not connected.
This method leverages the `connect` method and the `connected` property to This method leverages the `connect` method and the `connected` property to

View File

@ -1,15 +1,7 @@
from __future__ import annotations from __future__ import annotations
from typing import TYPE_CHECKING, Any
from pydase.observer_pattern.observable.observable import Observable from pydase.observer_pattern.observable.observable import Observable
if TYPE_CHECKING:
from pydase.data_service.data_service import DataService
from pydase.data_service.task_manager import TaskManager
class AbstractDataService(Observable): class AbstractDataService(Observable):
__root__: DataService pass
_task_manager: TaskManager
_autostart_tasks: dict[str, tuple[Any]]

View File

@ -5,7 +5,6 @@ from typing import Any
import pydase.units as u import pydase.units as u
from pydase.data_service.abstract_data_service import AbstractDataService from pydase.data_service.abstract_data_service import AbstractDataService
from pydase.data_service.task_manager import TaskManager
from pydase.observer_pattern.observable.observable import ( from pydase.observer_pattern.observable.observable import (
Observable, Observable,
) )
@ -24,11 +23,6 @@ logger = logging.getLogger(__name__)
class DataService(AbstractDataService): class DataService(AbstractDataService):
def __init__(self) -> None: def __init__(self) -> None:
super().__init__() super().__init__()
self._task_manager = TaskManager(self)
if not hasattr(self, "_autostart_tasks"):
self._autostart_tasks = {}
self.__check_instance_classes() self.__check_instance_classes()
def __setattr__(self, __name: str, __value: Any) -> None: def __setattr__(self, __name: str, __value: Any) -> None:

View File

@ -53,7 +53,7 @@ class DataServiceObserver(PropertyObserver):
cached_value = cached_value_dict.get("value") cached_value = cached_value_dict.get("value")
if ( if (
all(part[0] != "_" for part in full_access_path.split(".")) all(part[0] != "_" for part in full_access_path.split("."))
and cached_value != value and cached_value != dump(value)["value"]
): ):
logger.debug("'%s' changed to '%s'", full_access_path, value) logger.debug("'%s' changed to '%s'", full_access_path, value)

View File

@ -1,225 +0,0 @@
from __future__ import annotations
import asyncio
import inspect
import logging
from enum import Enum
from typing import TYPE_CHECKING, Any
from pydase.data_service.abstract_data_service import AbstractDataService
from pydase.utils.helpers import (
function_has_arguments,
get_class_and_instance_attributes,
is_property_attribute,
)
if TYPE_CHECKING:
from collections.abc import Callable
from .data_service import DataService
logger = logging.getLogger(__name__)
class TaskStatus(Enum):
RUNNING = "running"
class TaskManager:
"""
The TaskManager class is a utility designed to manage asynchronous tasks. It
provides functionality for starting, stopping, and tracking these tasks. The class
is primarily used by the DataService class to manage its tasks.
A task in TaskManager is any asynchronous function. To add a task, you simply need
to define an async function within your class that extends TaskManager. For example:
```python
class MyService(DataService):
async def my_task(self):
# Your task implementation here
pass
```
With the above definition, TaskManager automatically creates `start_my_task` and
`stop_my_task` methods that can be used to control the task.
TaskManager also supports auto-starting tasks. If there are tasks that should start
running as soon as an instance of your class is created, you can define them in
`self._autostart_tasks` in your class constructor (__init__ method). Here's how:
```python
class MyService(DataService):
def __init__(self):
self._autostart_tasks = {
"my_task": (*args) # Replace with actual arguments
}
self.wait_time = 1
super().__init__()
async def my_task(self, *args):
while True:
# Your task implementation here
await asyncio.sleep(self.wait_time)
```
In the above example, `my_task` will start running as soon as
`_start_autostart_tasks` is called which is done when the DataService instance is
passed to the `pydase.Server` class.
The responsibilities of the TaskManager class are:
- Track all running tasks: Keeps track of all the tasks that are currently running.
This allows for monitoring of task statuses and for making sure tasks do not
overlap.
- Provide the ability to start and stop tasks: Automatically creates methods to
start and stop each task.
- Emit notifications when the status of a task changes: Has a built-in mechanism for
emitting notifications when a task starts or stops. This is used to update the user
interfaces, but can also be used to write logs, etc.
"""
def __init__(self, service: DataService) -> None:
self.service = service
self.tasks: dict[str, asyncio.Task[None]] = {}
"""A dictionary to keep track of running tasks. The keys are the names of the
tasks and the values are TaskDict instances which include the task itself and
its kwargs.
"""
self._set_start_and_stop_for_async_methods()
@property
def _loop(self) -> asyncio.AbstractEventLoop:
return asyncio.get_running_loop()
def _set_start_and_stop_for_async_methods(self) -> None:
for name in dir(self.service):
# circumvents calling properties
if is_property_attribute(self.service, name):
continue
method = getattr(self.service, name)
if inspect.iscoroutinefunction(method):
if function_has_arguments(method):
logger.info(
"Async function %a is defined with at least one argument. If "
"you want to use it as a task, remove the argument(s) from the "
"function definition.",
method.__name__,
)
continue
# create start and stop methods for each coroutine
setattr(
self.service, f"start_{name}", self._make_start_task(name, method)
)
setattr(self.service, f"stop_{name}", self._make_stop_task(name))
def _initiate_task_startup(self) -> None:
if self.service._autostart_tasks is not None:
for service_name, args in self.service._autostart_tasks.items():
start_method = getattr(self.service, f"start_{service_name}", None)
if start_method is not None and callable(start_method):
start_method(*args)
else:
logger.warning(
"No start method found for service '%s'", service_name
)
def start_autostart_tasks(self) -> None:
self._initiate_task_startup()
attrs = get_class_and_instance_attributes(self.service)
for attr_value in attrs.values():
if isinstance(attr_value, AbstractDataService):
attr_value._task_manager.start_autostart_tasks()
elif isinstance(attr_value, list):
for item in attr_value:
if isinstance(item, AbstractDataService):
item._task_manager.start_autostart_tasks()
def _make_stop_task(self, name: str) -> Callable[..., Any]:
"""
Factory function to create a 'stop_task' function for a running task.
The generated function cancels the associated asyncio task using 'name' for
identification, ensuring proper cleanup. Avoids closure and late binding issues.
Args:
name (str): The name of the coroutine task, used for its identification.
"""
def stop_task() -> None:
# cancel the task
task = self.tasks.get(name, None)
if task is not None:
self._loop.call_soon_threadsafe(task.cancel)
return stop_task
def _make_start_task(
self, name: str, method: Callable[..., Any]
) -> Callable[..., Any]:
"""
Factory function to create a 'start_task' function for a coroutine.
The generated function starts the coroutine as an asyncio task, handling
registration and monitoring.
It uses 'name' and 'method' to avoid the closure and late binding issue.
Args:
name (str): The name of the coroutine, used for task management.
method (callable): The coroutine to be turned into an asyncio task.
"""
def start_task() -> None:
def task_done_callback(task: asyncio.Task[None], name: str) -> None:
"""Handles tasks that have finished.
Removes a task from the tasks dictionary, calls the defined
callbacks, and logs and re-raises exceptions."""
# removing the finished task from the tasks i
self.tasks.pop(name, None)
# emit the notification that the task was stopped
self.service._notify_changed(name, None)
exception = task.exception()
if exception is not None:
# Handle the exception, or you can re-raise it.
logger.error(
"Task '%s' encountered an exception: %s: %s",
name,
type(exception).__name__,
exception,
)
raise exception
async def task() -> None:
try:
await method()
except asyncio.CancelledError:
logger.info("Task '%s' was cancelled", name)
if not self.tasks.get(name):
# creating the task and adding the task_done_callback which checks
# if an exception has occured during the task execution
task_object = self._loop.create_task(task())
task_object.add_done_callback(
lambda task: task_done_callback(task, name)
)
# Store the task and its arguments in the '__tasks' dictionary. The
# key is the name of the method, and the value is a dictionary
# containing the task object and the updated keyword arguments.
self.tasks[name] = task_object
# emit the notification that the task was started
self.service._notify_changed(name, TaskStatus.RUNNING)
else:
logger.error("Task '%s' is already running!", name)
return start_task

View File

@ -6,7 +6,7 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0" /> <meta name="viewport" content="width=device-width, initial-scale=1.0" />
<meta name="theme-color" content="#000000" /> <meta name="theme-color" content="#000000" />
<meta name="description" content="Web site displaying a pydase UI." /> <meta name="description" content="Web site displaying a pydase UI." />
<script type="module" crossorigin src="/assets/index-D7tStNHJ.js"></script> <script type="module" crossorigin src="/assets/index-DI9re3au.js"></script>
<link rel="stylesheet" crossorigin href="/assets/index-D2aktF3W.css"> <link rel="stylesheet" crossorigin href="/assets/index-D2aktF3W.css">
</head> </head>

View File

@ -6,7 +6,7 @@ from pydase.observer_pattern.observable.decorators import (
has_validate_set_decorator, has_validate_set_decorator,
) )
from pydase.observer_pattern.observable.observable_object import ObservableObject from pydase.observer_pattern.observable.observable_object import ObservableObject
from pydase.utils.helpers import is_property_attribute from pydase.utils.helpers import is_descriptor, is_property_attribute
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -24,6 +24,11 @@ class Observable(ObservableObject):
for name, value in class_attrs.items(): for name, value in class_attrs.items():
if isinstance(value, property) or callable(value): if isinstance(value, property) or callable(value):
continue continue
if is_descriptor(value):
# Descriptors have to be stored as a class variable in another class to
# work properly. So don't make it an instance attribute.
self._initialise_new_objects(name, value)
continue
self.__dict__[name] = self._initialise_new_objects(name, value) self.__dict__[name] = self._initialise_new_objects(name, value)
def __setattr__(self, name: str, value: Any) -> None: def __setattr__(self, name: str, value: Any) -> None:

View File

@ -60,7 +60,7 @@ class PropertyObserver(Observer):
def _process_nested_observables_properties( def _process_nested_observables_properties(
self, obj: Observable, deps: dict[str, Any], prefix: str self, obj: Observable, deps: dict[str, Any], prefix: str
) -> None: ) -> None:
for k, value in vars(obj).items(): for k, value in {**vars(type(obj)), **vars(obj)}.items():
prefix = ( prefix = (
f"{prefix}." if prefix != "" and not prefix.endswith(".") else prefix f"{prefix}." if prefix != "" and not prefix.endswith(".") else prefix
) )

View File

@ -13,6 +13,8 @@ from pydase.config import ServiceConfig
from pydase.data_service.data_service_observer import DataServiceObserver from pydase.data_service.data_service_observer import DataServiceObserver
from pydase.data_service.state_manager import StateManager from pydase.data_service.state_manager import StateManager
from pydase.server.web_server import WebServer from pydase.server.web_server import WebServer
from pydase.task.autostart import autostart_service_tasks
from pydase.utils.helpers import current_event_loop_exists
HANDLED_SIGNALS = ( HANDLED_SIGNALS = (
signal.SIGINT, # Unix signal 2. Sent by Ctrl+C. signal.SIGINT, # Unix signal 2. Sent by Ctrl+C.
@ -156,13 +158,18 @@ class Server:
self._web_port = web_port self._web_port = web_port
self._enable_web = enable_web self._enable_web = enable_web
self._kwargs = kwargs self._kwargs = kwargs
self._loop: asyncio.AbstractEventLoop
self._additional_servers = additional_servers self._additional_servers = additional_servers
self.should_exit = False self.should_exit = False
self.servers: dict[str, asyncio.Future[Any]] = {} self.servers: dict[str, asyncio.Future[Any]] = {}
self._state_manager = StateManager(self._service, filename) self._state_manager = StateManager(self._service, filename)
self._observer = DataServiceObserver(self._state_manager) self._observer = DataServiceObserver(self._state_manager)
self._state_manager.load_state() self._state_manager.load_state()
autostart_service_tasks(self._service)
if not current_event_loop_exists():
self._loop = asyncio.new_event_loop()
asyncio.set_event_loop(self._loop)
else:
self._loop = asyncio.get_event_loop()
def run(self) -> None: def run(self) -> None:
""" """
@ -170,7 +177,7 @@ class Server:
This method should be called to start the server after it's been instantiated. This method should be called to start the server after it's been instantiated.
""" """
asyncio.run(self.serve()) self._loop.run_until_complete(self.serve())
async def serve(self) -> None: async def serve(self) -> None:
process_id = os.getpid() process_id = os.getpid()
@ -186,10 +193,8 @@ class Server:
logger.info("Finished server process [%s]", process_id) logger.info("Finished server process [%s]", process_id)
async def startup(self) -> None: async def startup(self) -> None:
self._loop = asyncio.get_running_loop()
self._loop.set_exception_handler(self.custom_exception_handler) self._loop.set_exception_handler(self.custom_exception_handler)
self.install_signal_handlers() self.install_signal_handlers()
self._service._task_manager.start_autostart_tasks()
for server in self._additional_servers: for server in self._additional_servers:
addin_server = server["server"]( addin_server = server["server"](

View File

View File

@ -0,0 +1,44 @@
from typing import Any
import pydase.data_service.data_service
import pydase.task.task
from pydase.task.task_status import TaskStatus
from pydase.utils.helpers import is_property_attribute
def autostart_service_tasks(
service: pydase.data_service.data_service.DataService,
) -> None:
"""Starts the service tasks defined with the `autostart` keyword argument.
This method goes through the attributes of the passed service and its nested
[`DataService`][pydase.DataService] instances and calls the start method on
autostart-tasks.
"""
for attr in dir(service):
if is_property_attribute(service, attr): # prevent eval of property attrs
continue
val = getattr(service, attr)
if (
isinstance(val, pydase.task.task.Task)
and val.autostart
and val.status == TaskStatus.NOT_RUNNING
):
val.start()
else:
autostart_nested_service_tasks(val)
def autostart_nested_service_tasks(
service: pydase.data_service.data_service.DataService | list[Any] | dict[Any, Any],
) -> None:
if isinstance(service, pydase.DataService):
autostart_service_tasks(service)
elif isinstance(service, list):
for entry in service:
autostart_service_tasks(entry)
elif isinstance(service, dict):
for entry in service.values():
autostart_service_tasks(entry)

View File

@ -0,0 +1,75 @@
import logging
from collections.abc import Callable, Coroutine
from typing import Any, TypeVar
from pydase.task.task import Task
logger = logging.getLogger(__name__)
R = TypeVar("R")
def task(
*, autostart: bool = False
) -> Callable[
[
Callable[[Any], Coroutine[None, None, R]]
| Callable[[], Coroutine[None, None, R]]
],
Task[R],
]:
"""
A decorator to define a function as a task within a
[`DataService`][pydase.DataService] class.
This decorator transforms an asynchronous function into a
[`Task`][pydase.task.task.Task] object. The `Task` object provides methods like
`start()` and `stop()` to control the execution of the task.
Tasks are typically used to perform periodic or recurring jobs, such as reading
sensor data, updating databases, or other operations that need to be repeated over
time.
Args:
autostart:
If set to True, the task will automatically start when the service is
initialized. Defaults to False.
Returns:
A decorator that converts an asynchronous function into a
[`Task`][pydase.task.task.Task] object.
Example:
```python
import asyncio
import pydase
from pydase.task.decorator import task
class MyService(pydase.DataService):
@task(autostart=True)
async def my_task(self) -> None:
while True:
# Perform some periodic work
await asyncio.sleep(1)
if __name__ == "__main__":
service = MyService()
pydase.Server(service=service).run()
```
In this example, `my_task` is defined as a task using the `@task` decorator, and
it will start automatically when the service is initialized because
`autostart=True` is set. You can manually start or stop the task using
`service.my_task.start()` and `service.my_task.stop()`, respectively.
"""
def decorator(
func: Callable[[Any], Coroutine[None, None, R]]
| Callable[[], Coroutine[None, None, R]],
) -> Task[R]:
return Task(func, autostart=autostart)
return decorator

190
src/pydase/task/task.py Normal file
View File

@ -0,0 +1,190 @@
import asyncio
import inspect
import logging
import sys
from collections.abc import Callable, Coroutine
from typing import (
Any,
Generic,
TypeVar,
)
from typing_extensions import TypeIs
from pydase.task.task_status import TaskStatus
if sys.version_info < (3, 11):
from typing_extensions import Self
else:
from typing import Self
import pydase.data_service.data_service
from pydase.utils.helpers import current_event_loop_exists
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)
R = TypeVar("R")
def is_bound_method(
method: Callable[[], Coroutine[None, None, R | None]]
| Callable[[Any], Coroutine[None, None, R | None]],
) -> TypeIs[Callable[[], Coroutine[None, None, R | None]]]:
"""Check if instance method is bound to an object."""
return inspect.ismethod(method)
class Task(pydase.data_service.data_service.DataService, Generic[R]):
"""
A class representing a task within the `pydase` framework.
The `Task` class wraps an asynchronous function and provides methods to manage its
lifecycle, such as `start()` and `stop()`. It is typically used to perform periodic
or recurring jobs in a [`DataService`][pydase.DataService], like reading
sensor data, updating databases, or executing other background tasks.
When a function is decorated with the [`@task`][pydase.task.decorator.task]
decorator, it is replaced by a `Task` instance that controls the execution of the
original function.
Args:
func:
The asynchronous function that this task wraps. It must be a coroutine
without arguments.
autostart:
If set to True, the task will automatically start when the service is
initialized. Defaults to False.
Example:
```python
import asyncio
import pydase
from pydase.task.decorator import task
class MyService(pydase.DataService):
@task(autostart=True)
async def my_task(self) -> None:
while True:
# Perform some periodic work
await asyncio.sleep(1)
if __name__ == "__main__":
service = MyService()
pydase.Server(service=service).run()
```
In this example, `my_task` is defined as a task using the `@task` decorator, and
it will start automatically when the service is initialized because
`autostart=True` is set. You can manually start or stop the task using
`service.my_task.start()` and `service.my_task.stop()`, respectively.
"""
def __init__(
self,
func: Callable[[Any], Coroutine[None, None, R | None]]
| Callable[[], Coroutine[None, None, R | None]],
*,
autostart: bool = False,
) -> None:
super().__init__()
self._autostart = autostart
self._func_name = func.__name__
self._bound_func: Callable[[], Coroutine[None, None, R | None]] | None = None
self._set_up = False
if is_bound_method(func):
self._func = func
self._bound_func = func
else:
self._func = func
self._task: asyncio.Task[R | None] | None = None
self._status = TaskStatus.NOT_RUNNING
self._result: R | None = None
@property
def autostart(self) -> bool:
"""Defines if the task should be started automatically when the
[`Server`][pydase.Server] starts."""
return self._autostart
@property
def status(self) -> TaskStatus:
"""Returns the current status of the task."""
return self._status
def start(self) -> None:
"""Starts the asynchronous task if it is not already running."""
if self._task:
return
def task_done_callback(task: asyncio.Task[R | None]) -> None:
"""Handles tasks that have finished.
Updates the task status, calls the defined callbacks, and logs and re-raises
exceptions.
"""
self._task = None
self._status = TaskStatus.NOT_RUNNING
exception = task.exception()
if exception is not None:
# Handle the exception, or you can re-raise it.
logger.error(
"Task '%s' encountered an exception: %s: %s",
self._func_name,
type(exception).__name__,
exception,
)
raise exception
self._result = task.result()
async def run_task() -> R | None:
if inspect.iscoroutinefunction(self._bound_func):
logger.info("Starting task %r", self._func_name)
self._status = TaskStatus.RUNNING
res: Coroutine[None, None, R] = self._bound_func()
try:
return await res
except asyncio.CancelledError:
logger.info("Task '%s' was cancelled", self._func_name)
return None
logger.warning(
"Cannot start task %r. Function has not been bound yet", self._func_name
)
return None
logger.info("Creating task %r", self._func_name)
self._task = self._loop.create_task(run_task())
self._task.add_done_callback(task_done_callback)
def stop(self) -> None:
"""Stops the running asynchronous task by cancelling it."""
if self._task:
self._task.cancel()
def __get__(self, instance: Any, owner: Any) -> Self:
"""Descriptor method used to correctly set up the task.
This descriptor method is called by the class instance containing the task.
It binds the task function to that class instance.
Since the `__init__` function is called when a function is decorated with
[`@task`][pydase.task.decorator.task], some setup is delayed until this
descriptor function is called.
"""
if instance and not self._set_up:
if not current_event_loop_exists():
self._loop = asyncio.new_event_loop()
asyncio.set_event_loop(self._loop)
else:
self._loop = asyncio.get_event_loop()
self._bound_func = self._func.__get__(instance, owner)
self._set_up = True
return self

View File

@ -0,0 +1,8 @@
import enum
class TaskStatus(enum.Enum):
"""Possible statuses of a [`Task`][pydase.task.task.Task]."""
RUNNING = "running"
NOT_RUNNING = "not_running"

View File

@ -114,8 +114,6 @@ def get_class_and_instance_attributes(obj: object) -> dict[str, Any]:
If an attribute exists at both the instance and class level,the value from the If an attribute exists at both the instance and class level,the value from the
instance attribute takes precedence. instance attribute takes precedence.
The __root__ object is removed as this will lead to endless recursion in the for
loops.
""" """
return dict(chain(type(obj).__dict__.items(), obj.__dict__.items())) return dict(chain(type(obj).__dict__.items(), obj.__dict__.items()))
@ -162,6 +160,12 @@ def get_object_attr_from_path(target_obj: Any, path: str) -> Any:
return get_object_by_path_parts(target_obj, path_parts) return get_object_by_path_parts(target_obj, path_parts)
def get_task_class() -> type:
from pydase.task.task import Task
return Task
def get_component_classes() -> list[type]: def get_component_classes() -> list[type]:
""" """
Returns references to the component classes in a list. Returns references to the component classes in a list.
@ -196,3 +200,15 @@ def function_has_arguments(func: Callable[..., Any]) -> bool:
# Check if there are any parameters left which would indicate additional arguments. # Check if there are any parameters left which would indicate additional arguments.
return len(parameters) > 0 return len(parameters) > 0
def is_descriptor(obj: object) -> bool:
"""Check if an object is a descriptor."""
return any(hasattr(obj, method) for method in ("__get__", "__set__", "__delete__"))
def current_event_loop_exists() -> bool:
"""Check if an event loop has been set."""
import asyncio
return asyncio.get_event_loop_policy()._local._loop is not None # type: ignore

View File

@ -6,7 +6,9 @@ from typing import TYPE_CHECKING, Any, NoReturn, cast
import pydase import pydase
import pydase.components import pydase.components
import pydase.units as u import pydase.units as u
from pydase.utils.helpers import get_component_classes from pydase.utils.helpers import (
get_component_classes,
)
from pydase.utils.serialization.types import ( from pydase.utils.serialization.types import (
SerializedDatetime, SerializedDatetime,
SerializedException, SerializedException,
@ -49,9 +51,9 @@ class Deserializer:
return handler(serialized_object) return handler(serialized_object)
# Custom types like Components or DataService classes # Custom types like Components or DataService classes
component_class = cls.get_component_class(serialized_object["type"]) service_base_class = cls.get_service_base_class(serialized_object["type"])
if component_class: if service_base_class:
return cls.deserialize_component_type(serialized_object, component_class) return cls.deserialize_data_service(serialized_object, service_base_class)
return None return None
@ -110,11 +112,11 @@ class Deserializer:
raise exception(serialized_object["value"]) raise exception(serialized_object["value"])
@staticmethod @staticmethod
def get_component_class(type_name: str | None) -> type | None: def get_service_base_class(type_name: str | None) -> type | None:
for component_class in get_component_classes(): for component_class in get_component_classes():
if type_name == component_class.__name__: if type_name == component_class.__name__:
return component_class return component_class
if type_name == "DataService": if type_name in ("DataService", "Task"):
import pydase import pydase
return pydase.DataService return pydase.DataService
@ -137,7 +139,7 @@ class Deserializer:
return property(get, set) return property(get, set)
@classmethod @classmethod
def deserialize_component_type( def deserialize_data_service(
cls, serialized_object: SerializedObject, base_class: type cls, serialized_object: SerializedObject, base_class: type
) -> Any: ) -> Any:
def create_proxy_class(serialized_object: SerializedObject) -> type: def create_proxy_class(serialized_object: SerializedObject) -> type:

View File

@ -9,12 +9,14 @@ from typing import TYPE_CHECKING, Any, Literal, cast
import pydase.units as u import pydase.units as u
from pydase.data_service.abstract_data_service import AbstractDataService from pydase.data_service.abstract_data_service import AbstractDataService
from pydase.data_service.task_manager import TaskStatus from pydase.task.task_status import TaskStatus
from pydase.utils.decorators import render_in_frontend from pydase.utils.decorators import render_in_frontend
from pydase.utils.helpers import ( from pydase.utils.helpers import (
get_attribute_doc, get_attribute_doc,
get_component_classes, get_component_classes,
get_data_service_class_reference, get_data_service_class_reference,
get_task_class,
is_property_attribute,
parse_full_access_path, parse_full_access_path,
parse_serialized_key, parse_serialized_key,
) )
@ -280,6 +282,10 @@ class Serializer:
if component_base_cls: if component_base_cls:
obj_type = component_base_cls.__name__ # type: ignore obj_type = component_base_cls.__name__ # type: ignore
elif isinstance(obj, get_task_class()):
# Check if obj is a pydase task
obj_type = "Task"
# Get the set of DataService class attributes # Get the set of DataService class attributes
data_service_attr_set = set(dir(get_data_service_class_reference())) data_service_attr_set = set(dir(get_data_service_class_reference()))
# Get the set of the object attributes # Get the set of the object attributes
@ -294,29 +300,15 @@ class Serializer:
if key.startswith("_"): if key.startswith("_"):
continue # Skip attributes that start with underscore continue # Skip attributes that start with underscore
# Skip keys that start with "start_" or "stop_" and end with an async
# method name
if key.startswith(("start_", "stop_")) and key.split("_", 1)[1] in {
name
for name, _ in inspect.getmembers(
obj, predicate=inspect.iscoroutinefunction
)
}:
continue
val = getattr(obj, key) val = getattr(obj, key)
path = f"{access_path}.{key}" if access_path else key path = f"{access_path}.{key}" if access_path else key
serialized_object = cls.serialize_object(val, access_path=path) serialized_object = cls.serialize_object(val, access_path=path)
# If there's a running task for this method
if serialized_object["type"] == "method" and key in obj._task_manager.tasks:
serialized_object["value"] = TaskStatus.RUNNING.name
value[key] = serialized_object value[key] = serialized_object
# If the DataService attribute is a property # If the DataService attribute is a property
if isinstance(getattr(obj.__class__, key, None), property): if is_property_attribute(obj, key):
prop: property = getattr(obj.__class__, key) prop: property = getattr(obj.__class__, key)
value[key]["readonly"] = prop.fset is None value[key]["readonly"] = prop.fset is None
value[key]["doc"] = get_attribute_doc(prop) # overwrite the doc value[key]["doc"] = get_attribute_doc(prop) # overwrite the doc

View File

@ -98,7 +98,9 @@ class SerializedException(SerializedObjectBase):
type: Literal["Exception"] type: Literal["Exception"]
DataServiceTypes = Literal["DataService", "Image", "NumberSlider", "DeviceConnection"] DataServiceTypes = Literal[
"DataService", "Image", "NumberSlider", "DeviceConnection", "Task"
]
class SerializedDataService(SerializedObjectBase): class SerializedDataService(SerializedObjectBase):

View File

@ -3,6 +3,7 @@ import asyncio
import pydase import pydase
import pydase.components.device_connection import pydase.components.device_connection
import pytest import pytest
from pydase.task.autostart import autostart_service_tasks
from pytest import LogCaptureFixture from pytest import LogCaptureFixture
@ -19,10 +20,9 @@ async def test_reconnection(caplog: LogCaptureFixture) -> None:
self._connected = True self._connected = True
service_instance = MyService() service_instance = MyService()
autostart_service_tasks(service_instance)
assert service_instance._connected is False assert service_instance._connected is False
service_instance._task_manager.start_autostart_tasks()
await asyncio.sleep(0.01) await asyncio.sleep(0.01)
assert service_instance._connected is True assert service_instance._connected is True

View File

@ -36,8 +36,7 @@ def test_unexpected_type_change_warning(caplog: LogCaptureFixture) -> None:
def test_basic_inheritance_warning(caplog: LogCaptureFixture) -> None: def test_basic_inheritance_warning(caplog: LogCaptureFixture) -> None:
class SubService(DataService): class SubService(DataService): ...
...
class SomeEnum(Enum): class SomeEnum(Enum):
HI = 0 HI = 0
@ -57,11 +56,9 @@ def test_basic_inheritance_warning(caplog: LogCaptureFixture) -> None:
def name(self) -> str: def name(self) -> str:
return self._name return self._name
def some_method(self) -> None: def some_method(self) -> None: ...
...
async def some_task(self) -> None: async def some_task(self) -> None: ...
...
ServiceClass() ServiceClass()
@ -129,17 +126,12 @@ def test_exposing_methods(caplog: LogCaptureFixture) -> None:
return "some method" return "some method"
class ClassWithTask(pydase.DataService): class ClassWithTask(pydase.DataService):
async def some_task(self, sleep_time: int) -> None: @frontend
pass def some_method(self) -> str:
return "some method"
ClassWithTask() ClassWithTask()
assert (
"Async function 'some_task' is defined with at least one argument. If you want "
"to use it as a task, remove the argument(s) from the function definition."
in caplog.text
)
def test_dynamically_added_attribute(caplog: LogCaptureFixture) -> None: def test_dynamically_added_attribute(caplog: LogCaptureFixture) -> None:
class MyService(DataService): class MyService(DataService):

View File

@ -1,7 +1,6 @@
import logging import logging
import pydase import pydase
import pytest
from pydase.data_service.data_service_observer import DataServiceObserver from pydase.data_service.data_service_observer import DataServiceObserver
from pydase.data_service.state_manager import StateManager from pydase.data_service.state_manager import StateManager
@ -33,35 +32,3 @@ def test_nested_attributes_cache_callback() -> None:
] ]
== "Ciao" == "Ciao"
) )
@pytest.mark.asyncio(scope="function")
async def test_task_status_update() -> None:
class ServiceClass(pydase.DataService):
name = "World"
async def my_method(self) -> None:
pass
service_instance = ServiceClass()
state_manager = StateManager(service_instance)
DataServiceObserver(state_manager)
assert (
state_manager.cache_manager.get_value_dict_from_cache("my_method")["type"]
== "method"
)
assert (
state_manager.cache_manager.get_value_dict_from_cache("my_method")["value"]
is None
)
service_instance.start_my_method() # type: ignore
assert (
state_manager.cache_manager.get_value_dict_from_cache("my_method")["type"]
== "method"
)
assert (
state_manager.cache_manager.get_value_dict_from_cache("my_method")["value"]
== "RUNNING"
)

View File

@ -1,135 +0,0 @@
import asyncio
import logging
import pydase
import pytest
from pydase.data_service.data_service_observer import DataServiceObserver
from pydase.data_service.state_manager import StateManager
from pytest import LogCaptureFixture
logger = logging.getLogger("pydase")
@pytest.mark.asyncio(scope="function")
async def test_autostart_task_callback(caplog: LogCaptureFixture) -> None:
class MyService(pydase.DataService):
def __init__(self) -> None:
super().__init__()
self._autostart_tasks = { # type: ignore
"my_task": (), # type: ignore
"my_other_task": (), # type: ignore
}
async def my_task(self) -> None:
logger.info("Triggered task.")
async def my_other_task(self) -> None:
logger.info("Triggered other task.")
# Your test code here
service_instance = MyService()
state_manager = StateManager(service_instance)
DataServiceObserver(state_manager)
service_instance._task_manager.start_autostart_tasks()
assert "'my_task' changed to 'TaskStatus.RUNNING'" in caplog.text
assert "'my_other_task' changed to 'TaskStatus.RUNNING'" in caplog.text
@pytest.mark.asyncio(scope="function")
async def test_DataService_subclass_autostart_task_callback(
caplog: LogCaptureFixture,
) -> None:
class MySubService(pydase.DataService):
def __init__(self) -> None:
super().__init__()
self._autostart_tasks = { # type: ignore
"my_task": (),
"my_other_task": (),
}
async def my_task(self) -> None:
logger.info("Triggered task.")
async def my_other_task(self) -> None:
logger.info("Triggered other task.")
class MyService(pydase.DataService):
sub_service = MySubService()
service_instance = MyService()
state_manager = StateManager(service_instance)
DataServiceObserver(state_manager)
service_instance._task_manager.start_autostart_tasks()
assert "'sub_service.my_task' changed to 'TaskStatus.RUNNING'" in caplog.text
assert "'sub_service.my_other_task' changed to 'TaskStatus.RUNNING'" in caplog.text
@pytest.mark.asyncio(scope="function")
async def test_DataService_subclass_list_autostart_task_callback(
caplog: LogCaptureFixture,
) -> None:
class MySubService(pydase.DataService):
def __init__(self) -> None:
super().__init__()
self._autostart_tasks = { # type: ignore
"my_task": (),
"my_other_task": (),
}
async def my_task(self) -> None:
logger.info("Triggered task.")
async def my_other_task(self) -> None:
logger.info("Triggered other task.")
class MyService(pydase.DataService):
sub_services_list = [MySubService() for i in range(2)]
service_instance = MyService()
state_manager = StateManager(service_instance)
DataServiceObserver(state_manager)
service_instance._task_manager.start_autostart_tasks()
assert (
"'sub_services_list[0].my_task' changed to 'TaskStatus.RUNNING'" in caplog.text
)
assert (
"'sub_services_list[0].my_other_task' changed to 'TaskStatus.RUNNING'"
in caplog.text
)
assert (
"'sub_services_list[1].my_task' changed to 'TaskStatus.RUNNING'" in caplog.text
)
assert (
"'sub_services_list[1].my_other_task' changed to 'TaskStatus.RUNNING'"
in caplog.text
)
@pytest.mark.asyncio(scope="function")
async def test_start_and_stop_task_methods(caplog: LogCaptureFixture) -> None:
class MyService(pydase.DataService):
def __init__(self) -> None:
super().__init__()
async def my_task(self) -> None:
while True:
logger.debug("Logging message")
await asyncio.sleep(0.1)
# Your test code here
service_instance = MyService()
state_manager = StateManager(service_instance)
DataServiceObserver(state_manager)
service_instance.start_my_task()
await asyncio.sleep(0.01)
assert "'my_task' changed to 'TaskStatus.RUNNING'" in caplog.text
assert "Logging message" in caplog.text
caplog.clear()
service_instance.stop_my_task()
await asyncio.sleep(0.01)
assert "Task 'my_task' was cancelled" in caplog.text

122
tests/task/test_task.py Normal file
View File

@ -0,0 +1,122 @@
import asyncio
import logging
import pydase
import pytest
from pydase.data_service.data_service_observer import DataServiceObserver
from pydase.data_service.state_manager import StateManager
from pydase.task.autostart import autostart_service_tasks
from pydase.task.decorator import task
from pydase.task.task_status import TaskStatus
from pytest import LogCaptureFixture
logger = logging.getLogger("pydase")
@pytest.mark.asyncio(scope="function")
async def test_start_and_stop_task(caplog: LogCaptureFixture) -> None:
class MyService(pydase.DataService):
@task()
async def my_task(self) -> None:
while True:
logger.debug("Logging message")
await asyncio.sleep(0.01)
# Your test code here
service_instance = MyService()
state_manager = StateManager(service_instance)
DataServiceObserver(state_manager)
service_instance.my_task.start()
await asyncio.sleep(0.1)
assert "'my_task.status' changed to 'TaskStatus.RUNNING'" in caplog.text
assert "Logging message" in caplog.text
caplog.clear()
service_instance.my_task.stop()
await asyncio.sleep(0.1)
assert "Task 'my_task' was cancelled" in caplog.text
@pytest.mark.asyncio(scope="function")
async def test_autostart_task(caplog: LogCaptureFixture) -> None:
class MyService(pydase.DataService):
@task(autostart=True)
async def my_task(self) -> None:
logger.info("Triggered task.")
# Your test code here
service_instance = MyService()
state_manager = StateManager(service_instance)
DataServiceObserver(state_manager)
autostart_service_tasks(service_instance)
await asyncio.sleep(0.1)
assert "'my_task.status' changed to 'TaskStatus.RUNNING'" in caplog.text
@pytest.mark.asyncio(scope="function")
async def test_nested_list_autostart_task(
caplog: LogCaptureFixture,
) -> None:
class MySubService(pydase.DataService):
@task(autostart=True)
async def my_task(self) -> None:
logger.info("Triggered task.")
class MyService(pydase.DataService):
sub_services_list = [MySubService() for i in range(2)]
service_instance = MyService()
state_manager = StateManager(service_instance)
DataServiceObserver(state_manager)
autostart_service_tasks(service_instance)
await asyncio.sleep(0.1)
assert (
"'sub_services_list[0].my_task.status' changed to 'TaskStatus.RUNNING'"
in caplog.text
)
assert (
"'sub_services_list[1].my_task.status' changed to 'TaskStatus.RUNNING'"
in caplog.text
)
@pytest.mark.asyncio(scope="function")
async def test_nested_dict_autostart_task(
caplog: LogCaptureFixture,
) -> None:
class MySubService(pydase.DataService):
@task(autostart=True)
async def my_task(self) -> None:
logger.info("Triggered task.")
while True:
await asyncio.sleep(1)
class MyService(pydase.DataService):
sub_services_dict = {"first": MySubService(), "second": MySubService()}
service_instance = MyService()
state_manager = StateManager(service_instance)
DataServiceObserver(state_manager)
autostart_service_tasks(service_instance)
await asyncio.sleep(0.1)
assert (
service_instance.sub_services_dict["first"].my_task.status == TaskStatus.RUNNING
)
assert (
"'sub_services_dict[\"first\"].my_task.status' changed to 'TaskStatus.RUNNING'"
in caplog.text
)
assert (
"'sub_services_dict[\"second\"].my_task.status' changed to 'TaskStatus.RUNNING'"
in caplog.text
)

View File

@ -1,4 +1,3 @@
import asyncio
import enum import enum
from datetime import datetime from datetime import datetime
from enum import Enum from enum import Enum
@ -8,7 +7,7 @@ import pydase
import pydase.units as u import pydase.units as u
import pytest import pytest
from pydase.components.coloured_enum import ColouredEnum from pydase.components.coloured_enum import ColouredEnum
from pydase.data_service.task_manager import TaskStatus from pydase.task.task_status import TaskStatus
from pydase.utils.decorators import frontend from pydase.utils.decorators import frontend
from pydase.utils.serialization.serializer import ( from pydase.utils.serialization.serializer import (
SerializationPathError, SerializationPathError,
@ -214,11 +213,9 @@ async def test_method_serialization() -> None:
return "some method" return "some method"
async def some_task(self) -> None: async def some_task(self) -> None:
while True: pass
await asyncio.sleep(10)
instance = ClassWithMethod() instance = ClassWithMethod()
instance.start_some_task() # type: ignore
assert dump(instance)["value"] == { assert dump(instance)["value"] == {
"some_method": { "some_method": {
@ -234,7 +231,7 @@ async def test_method_serialization() -> None:
"some_task": { "some_task": {
"full_access_path": "some_task", "full_access_path": "some_task",
"type": "method", "type": "method",
"value": TaskStatus.RUNNING.name, "value": None,
"readonly": True, "readonly": True,
"doc": None, "doc": None,
"async": True, "async": True,