23 Commits

Author SHA1 Message Date
Mose Müller
5658514c8a Merge pull request #162 from tiqi-group/fix/normalize_full_access_path
Fix: normalize full access path (for dict keys)
2024-09-23 13:23:11 +02:00
Mose Müller
109ee7d5e1 updates version to v0.10.3 2024-09-23 13:16:50 +02:00
Mose Müller
f4fa02fe11 adds test enuring dict keys can be encoded with both single and double quotes 2024-09-23 13:16:50 +02:00
Mose Müller
487ef504a8 normalizes full access path strings containing dict keys with double quotes
Full access paths containing stringed dictionary keys are sent with
double quotes from the frontend. The quotes have to be changed to single
quotes s.t. the comparison with the property dependency dictionary
works.
2024-09-23 13:16:49 +02:00
Mose Müller
c98e407ed7 Merge pull request #161 from tiqi-group/160-control-of-tasks-in-instances-derived-from-same-class-only-controls-task-of-first-instance
fix: controlling tasks of instances derived from same class
2024-09-23 12:54:38 +02:00
Mose Müller
6b6ce1d43f adds test checking for multiple instances of a class containing a task 2024-09-23 09:44:43 +02:00
Mose Müller
e491ac7458 observable does not have to initialise descriptor objects anymore
The task decorator is not returning a Task object directly anymore, but
rather a descriptor which is returning the task. This is where the task
is initialised and this does not have to be done in the observable base
class, any more.
2024-09-23 09:27:15 +02:00
Mose Müller
e9d8cbafc2 adds PerInstanceTaskDescriptor class managing task objects for service class instances
When defining a task on a DataService class, formerly a task object was
created which replaced the decorated method as a class attribute. This
caused errors when using multiple instances of that class as each
instance was referring to the same task.
This descriptor class now handles the tasks per instance of the service
class.
2024-09-23 09:21:04 +02:00
Mose Müller
aa705592b2 removes code from Task meant to bind the passed function to the containing class instance
The task object will only receive bound methods, so there is no need to
keep the descriptor functionality anymore.
2024-09-23 09:15:42 +02:00
Mose Müller
008e1262bb updates PropertyObserver to support descriptors returning observables
If a class attribute is a descriptor, get its value before checking if
it is an Observable.
2024-09-23 08:57:00 +02:00
Mose Müller
91a71ad004 updates is_descriptor to exclude false positives for methods, functions and builtins 2024-09-23 08:55:44 +02:00
Mose Müller
bbf479a440 Merge pull request #159 from tiqi-group/158-defining-task-without-autostart-fails
fix: defining task without autostart fails
2024-09-21 11:43:31 +02:00
Mose Müller
983d392ba8 properly handle Task objects in autostart method
Tasks that are not autostart or are already running were passed to
autostart_nested_services. This caused the recursion as tasks have a
__self__ attribute pointing to the containing service.
2024-09-21 11:40:25 +02:00
Mose Müller
56dd9dd8aa adapts autostart to support nested lists in dicts and vice versa 2024-09-21 09:12:01 +02:00
Mose Müller
20028c379d test: updates task tests 2024-09-21 09:04:04 +02:00
Mose Müller
e48046795e updates version to v0.10.2 2024-09-21 08:37:34 +02:00
Mose Müller
1ac9e45c73 test: updates task test to catch recursion when defining without autostart 2024-09-21 08:36:47 +02:00
Mose Müller
488415436c fixes recursion when defining task without autostart 2024-09-21 08:32:54 +02:00
Mose Müller
d7c5c2cd6e updates to version v0.10.1 2024-09-17 16:52:39 +02:00
Mose Müller
5388fd0d2b Merge pull request #157 from tiqi-group/fix/handle_minus_sign_input
fix: correctly handling minus sign input in NumberComponent
2024-09-17 16:52:08 +02:00
Mose Müller
e74b5c773a npm run build 2024-09-17 16:51:00 +02:00
Mose Müller
bb6cd159f1 frontend: refactoring minus sign handling in NumberComponent 2024-09-17 16:48:40 +02:00
Mose Müller
4a09f02882 docs: updates Readme
Trying to clarify the usage of ports in both server and clients.
2024-09-17 07:23:45 +02:00
14 changed files with 408 additions and 103 deletions

View File

@@ -105,7 +105,7 @@ class Device(pydase.DataService):
if __name__ == "__main__":
service = Device()
pydase.Server(service=service).run()
pydase.Server(service=service, web_port=8001).run()
```
In the above example, we define a `Device` class that inherits from `pydase.DataService`.
@@ -122,10 +122,13 @@ import pydase
if __name__ == "__main__":
service = Device()
pydase.Server(service=service).run()
pydase.Server(service=service, web_port=8001).run()
```
This will start the server, making your `Device` service accessible on [http://localhost:8001](http://localhost:8001).
This will start the server, making your `Device` service accessible on
[http://localhost:8001](http://localhost:8001). The port number for the web server can
be customised in the server constructor or through environment variables and defaults
to `8001`.
### Accessing the Web Interface
@@ -144,7 +147,7 @@ import pydase
# Replace the hostname and port with the IP address and the port of the machine where
# the service is running, respectively
client_proxy = pydase.Client(url="ws://<ip_addr>:<service_port>").proxy
client_proxy = pydase.Client(url="ws://<ip_addr>:<web_port>").proxy
# client_proxy = pydase.Client(url="wss://your-domain.ch").proxy # if your service uses ssl-encryption
# After the connection, interact with the service attributes as if they were local
@@ -170,7 +173,7 @@ import json
import requests
response = requests.get(
"http://<hostname>:<port>/api/v1/get_value?access_path=<full_access_path>"
"http://<hostname>:<web_port>/api/v1/get_value?access_path=<full_access_path>"
)
serialized_value = json.loads(response.text)
```

View File

@@ -132,6 +132,8 @@ const handleNumericKey = (
selectionStart: number,
selectionEnd: number,
) => {
let newValue = value;
// Check if a number key or a decimal point key is pressed
if (key === "." && value.includes(".")) {
// Check if value already contains a decimal. If so, ignore input.
@@ -139,14 +141,34 @@ const handleNumericKey = (
return { value, selectionStart };
}
let newValue = value;
// Handle minus sign input
if (key === "-") {
if (selectionStart === 0 && selectionEnd > selectionStart) {
// Replace selection with minus if selection starts at 0
newValue = "-" + value.slice(selectionEnd);
selectionStart = 1;
} else if (selectionStart === 0 && !value.startsWith("-")) {
// Add minus at the beginning if it doesn't exist
newValue = "-" + value;
selectionStart = 1;
} else if (
(selectionStart === 0 || selectionStart === 1) &&
value.startsWith("-")
) {
// Remove minus if it exists
newValue = value.slice(1);
selectionStart = 0;
}
return { value: newValue, selectionStart };
}
// Add the new key at the cursor's position
if (selectionEnd > selectionStart) {
// If there is a selection, replace it with the key
newValue = value.slice(0, selectionStart) + key + value.slice(selectionEnd);
} else {
// otherwise, append the key after the selection start
// Otherwise, insert the key at the cursor position
newValue = value.slice(0, selectionStart) + key + value.slice(selectionStart);
}
@@ -201,17 +223,7 @@ export const NumberComponent = React.memo((props: NumberComponentProps) => {
// Select everything when pressing Ctrl + a
inputTarget.setSelectionRange(0, value.length);
return;
} else if (key === "-") {
if (selectionStart === 0 && !value.startsWith("-")) {
newValue = "-" + value;
selectionStart++;
} else if (value.startsWith("-") && selectionStart === 1) {
newValue = value.substring(1); // remove minus sign
selectionStart--;
} else {
return; // Ignore "-" pressed in other positions
}
} else if (key >= "0" && key <= "9") {
} else if ((key >= "0" && key <= "9") || key === "-") {
// Check if a number key or a decimal point key is pressed
({ value: newValue, selectionStart } = handleNumericKey(
key,

View File

@@ -1,6 +1,6 @@
[tool.poetry]
name = "pydase"
version = "0.10.0"
version = "0.10.3"
description = "A flexible and robust Python library for creating, managing, and interacting with data services, with built-in support for web and RPC servers, and customizable features for diverse use cases."
authors = ["Mose Mueller <mosmuell@ethz.ch>"]
readme = "README.md"

View File

@@ -8,7 +8,10 @@ from pydase.observer_pattern.observable.observable_object import ObservableObjec
from pydase.observer_pattern.observer.property_observer import (
PropertyObserver,
)
from pydase.utils.helpers import get_object_attr_from_path
from pydase.utils.helpers import (
get_object_attr_from_path,
normalize_full_access_path_string,
)
from pydase.utils.serialization.serializer import (
SerializationPathError,
SerializedObject,
@@ -99,7 +102,8 @@ class DataServiceObserver(PropertyObserver):
)
def _notify_dependent_property_changes(self, changed_attr_path: str) -> None:
changed_props = self.property_deps_dict.get(changed_attr_path, [])
normalized_attr_path = normalize_full_access_path_string(changed_attr_path)
changed_props = self.property_deps_dict.get(normalized_attr_path, [])
for prop in changed_props:
# only notify about changing attribute if it is not currently being
# "changed" e.g. when calling the getter of a property within another

View File

@@ -6,7 +6,7 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<meta name="theme-color" content="#000000" />
<meta name="description" content="Web site displaying a pydase UI." />
<script type="module" crossorigin src="/assets/index-DI9re3au.js"></script>
<script type="module" crossorigin src="/assets/index-BjsjosWf.js"></script>
<link rel="stylesheet" crossorigin href="/assets/index-D2aktF3W.css">
</head>

View File

@@ -22,12 +22,9 @@ class Observable(ObservableObject):
- {"__annotations__"}
}
for name, value in class_attrs.items():
if isinstance(value, property) or callable(value):
continue
if is_descriptor(value):
# Descriptors have to be stored as a class variable in another class to
# work properly. So don't make it an instance attribute.
self._initialise_new_objects(name, value)
if isinstance(value, property) or callable(value) or is_descriptor(value):
# Properties, methods and descriptors have to be stored as class
# attributes to work properly. So don't make it an instance attribute.
continue
self.__dict__[name] = self._initialise_new_objects(name, value)

View File

@@ -5,6 +5,7 @@ from typing import Any
from pydase.observer_pattern.observable.observable import Observable
from pydase.observer_pattern.observer.observer import Observer
from pydase.utils.helpers import is_descriptor
logger = logging.getLogger(__name__)
@@ -61,17 +62,27 @@ class PropertyObserver(Observer):
self, obj: Observable, deps: dict[str, Any], prefix: str
) -> None:
for k, value in {**vars(type(obj)), **vars(obj)}.items():
actual_value = value
prefix = (
f"{prefix}." if prefix != "" and not prefix.endswith(".") else prefix
)
parent_path = f"{prefix}{k}"
if isinstance(value, Observable):
# Get value from descriptor
if not isinstance(value, property) and is_descriptor(value):
actual_value = getattr(obj, k)
if isinstance(actual_value, Observable):
new_prefix = f"{parent_path}."
deps.update(
self._get_properties_and_their_dependencies(value, new_prefix)
self._get_properties_and_their_dependencies(
actual_value, new_prefix
)
)
elif isinstance(value, list | dict):
self._process_collection_item_properties(value, deps, parent_path)
self._process_collection_item_properties(
actual_value, deps, parent_path
)
def _process_collection_item_properties(
self,

View File

@@ -17,16 +17,18 @@ def autostart_service_tasks(
"""
for attr in dir(service):
if is_property_attribute(service, attr): # prevent eval of property attrs
if is_property_attribute(service, attr) or attr in {
"_observers",
"__dict__",
}: # prevent eval of property attrs and recursion
continue
val = getattr(service, attr)
if (
isinstance(val, pydase.task.task.Task)
and val.autostart
and val.status == TaskStatus.NOT_RUNNING
):
val.start()
if isinstance(val, pydase.task.task.Task):
if val.autostart and val.status == TaskStatus.NOT_RUNNING:
val.start()
else:
continue
else:
autostart_nested_service_tasks(val)
@@ -38,7 +40,7 @@ def autostart_nested_service_tasks(
autostart_service_tasks(service)
elif isinstance(service, list):
for entry in service:
autostart_service_tasks(entry)
autostart_nested_service_tasks(entry)
elif isinstance(service, dict):
for entry in service.values():
autostart_service_tasks(entry)
autostart_nested_service_tasks(entry)

View File

@@ -1,7 +1,8 @@
import logging
from collections.abc import Callable, Coroutine
from typing import Any, TypeVar
from typing import Any, Generic, TypeVar, overload
from pydase.data_service.data_service import DataService
from pydase.task.task import Task
logger = logging.getLogger(__name__)
@@ -9,6 +10,69 @@ logger = logging.getLogger(__name__)
R = TypeVar("R")
class PerInstanceTaskDescriptor(Generic[R]):
"""
A descriptor class that provides a unique [`Task`][pydase.task.task.Task] object
for each instance of a [`DataService`][pydase.data_service.data_service.DataService]
class.
The `PerInstanceTaskDescriptor` is used to transform an asynchronous function into a
task that is managed independently for each instance of a `DataService` subclass.
This allows tasks to be initialized, started, and stopped on a per-instance basis,
providing better control over task execution within the service.
The `PerInstanceTaskDescriptor` is not intended to be used directly. Instead, it is
used internally by the `@task` decorator to manage task objects for each instance of
the service class.
"""
def __init__(
self,
func: Callable[[Any], Coroutine[None, None, R]]
| Callable[[], Coroutine[None, None, R]],
autostart: bool = False,
) -> None:
self.__func = func
self.__autostart = autostart
self.__task_instances: dict[object, Task[R]] = {}
def __set_name__(self, owner: type[DataService], name: str) -> None:
"""Stores the name of the task within the owning class. This method is called
automatically when the descriptor is assigned to a class attribute.
"""
self.__task_name = name
@overload
def __get__(
self, instance: None, owner: type[DataService]
) -> "PerInstanceTaskDescriptor[R]":
"""Returns the descriptor itself when accessed through the class."""
@overload
def __get__(self, instance: DataService, owner: type[DataService]) -> Task[R]:
"""Returns the `Task` object associated with the specific `DataService`
instance.
If no task exists for the instance, a new `Task` object is created and stored
in the `__task_instances` dictionary.
"""
def __get__(
self, instance: DataService | None, owner: type[DataService]
) -> "Task[R] | PerInstanceTaskDescriptor[R]":
if instance is None:
return self
# Create a new Task object for this instance, using the function's name.
if instance not in self.__task_instances:
self.__task_instances[instance] = instance._initialise_new_objects(
self.__task_name,
Task(self.__func.__get__(instance, owner), autostart=self.__autostart),
)
return self.__task_instances[instance]
def task(
*, autostart: bool = False
) -> Callable[
@@ -16,18 +80,22 @@ def task(
Callable[[Any], Coroutine[None, None, R]]
| Callable[[], Coroutine[None, None, R]]
],
Task[R],
PerInstanceTaskDescriptor[R],
]:
"""
A decorator to define a function as a task within a
A decorator to define an asynchronous function as a per-instance task within a
[`DataService`][pydase.DataService] class.
This decorator transforms an asynchronous function into a
[`Task`][pydase.task.task.Task] object. The `Task` object provides methods like
`start()` and `stop()` to control the execution of the task.
[`Task`][pydase.task.task.Task] object that is unique to each instance of the
`DataService` class. The resulting `Task` object provides methods like `start()`
and `stop()` to control the execution of the task, and manages the task's lifecycle
independently for each instance of the service.
Tasks are typically used to perform periodic or recurring jobs, such as reading
sensor data, updating databases, or other operations that need to be repeated over
The decorator is particularly useful for defining tasks that need to run
periodically or perform asynchronous operations, such as polling data sources,
updating databases, or any recurring job that should be managed within the context
of a `DataService`.
time.
Args:
@@ -36,8 +104,10 @@ def task(
initialized. Defaults to False.
Returns:
A decorator that converts an asynchronous function into a
[`Task`][pydase.task.task.Task] object.
A decorator that wraps an asynchronous function in a
[`PerInstanceTaskDescriptor`][pydase.task.decorator.PerInstanceTaskDescriptor]
object, which, when accessed, provides an instance-specific
[`Task`][pydase.task.task.Task] object.
Example:
```python
@@ -69,7 +139,7 @@ def task(
def decorator(
func: Callable[[Any], Coroutine[None, None, R]]
| Callable[[], Coroutine[None, None, R]],
) -> Task[R]:
return Task(func, autostart=autostart)
) -> PerInstanceTaskDescriptor[R]:
return PerInstanceTaskDescriptor(func, autostart=autostart)
return decorator

View File

@@ -4,19 +4,16 @@ import logging
import sys
from collections.abc import Callable, Coroutine
from typing import (
Any,
Generic,
TypeVar,
)
from typing_extensions import TypeIs
from pydase.task.task_status import TaskStatus
if sys.version_info < (3, 11):
from typing_extensions import Self
pass
else:
from typing import Self
pass
import pydase.data_service.data_service
from pydase.utils.helpers import current_event_loop_exists
@@ -27,17 +24,8 @@ logger = logging.getLogger(__name__)
R = TypeVar("R")
def is_bound_method(
method: Callable[[], Coroutine[None, None, R | None]]
| Callable[[Any], Coroutine[None, None, R | None]],
) -> TypeIs[Callable[[], Coroutine[None, None, R | None]]]:
"""Check if instance method is bound to an object."""
return inspect.ismethod(method)
class Task(pydase.data_service.data_service.DataService, Generic[R]):
"""
A class representing a task within the `pydase` framework.
"""A class representing a task within the `pydase` framework.
The `Task` class wraps an asynchronous function and provides methods to manage its
lifecycle, such as `start()` and `stop()`. It is typically used to perform periodic
@@ -85,25 +73,24 @@ class Task(pydase.data_service.data_service.DataService, Generic[R]):
def __init__(
self,
func: Callable[[Any], Coroutine[None, None, R | None]]
| Callable[[], Coroutine[None, None, R | None]],
func: Callable[[], Coroutine[None, None, R | None]],
*,
autostart: bool = False,
) -> None:
super().__init__()
self._autostart = autostart
self._func_name = func.__name__
self._bound_func: Callable[[], Coroutine[None, None, R | None]] | None = None
self._set_up = False
if is_bound_method(func):
self._func = func
self._bound_func = func
else:
self._func = func
self._func = func
self._task: asyncio.Task[R | None] | None = None
self._status = TaskStatus.NOT_RUNNING
self._result: R | None = None
if not current_event_loop_exists():
self._loop = asyncio.new_event_loop()
asyncio.set_event_loop(self._loop)
else:
self._loop = asyncio.get_event_loop()
@property
def autostart(self) -> bool:
"""Defines if the task should be started automatically when the
@@ -144,10 +131,10 @@ class Task(pydase.data_service.data_service.DataService, Generic[R]):
self._result = task.result()
async def run_task() -> R | None:
if inspect.iscoroutinefunction(self._bound_func):
if inspect.iscoroutinefunction(self._func):
logger.info("Starting task %r", self._func_name)
self._status = TaskStatus.RUNNING
res: Coroutine[None, None, R] = self._bound_func()
res: Coroutine[None, None, R | None] = self._func()
try:
return await res
except asyncio.CancelledError:
@@ -167,24 +154,3 @@ class Task(pydase.data_service.data_service.DataService, Generic[R]):
if self._task:
self._task.cancel()
def __get__(self, instance: Any, owner: Any) -> Self:
"""Descriptor method used to correctly set up the task.
This descriptor method is called by the class instance containing the task.
It binds the task function to that class instance.
Since the `__init__` function is called when a function is decorated with
[`@task`][pydase.task.decorator.task], some setup is delayed until this
descriptor function is called.
"""
if instance and not self._set_up:
if not current_event_loop_exists():
self._loop = asyncio.new_event_loop()
asyncio.set_event_loop(self._loop)
else:
self._loop = asyncio.get_event_loop()
self._bound_func = self._func.__get__(instance, owner)
self._set_up = True
return self

View File

@@ -204,6 +204,17 @@ def function_has_arguments(func: Callable[..., Any]) -> bool:
def is_descriptor(obj: object) -> bool:
"""Check if an object is a descriptor."""
# Exclude functions, methods, builtins and properties
if (
inspect.isfunction(obj)
or inspect.ismethod(obj)
or inspect.isbuiltin(obj)
or isinstance(obj, property)
):
return False
# Check if it has any descriptor methods
return any(hasattr(obj, method) for method in ("__get__", "__set__", "__delete__"))
@@ -212,3 +223,25 @@ def current_event_loop_exists() -> bool:
import asyncio
return asyncio.get_event_loop_policy()._local._loop is not None # type: ignore
def normalize_full_access_path_string(s: str) -> str:
"""Normalizes a string representing a full access path by converting double quotes
to single quotes.
This function is useful for ensuring consistency in strings that represent access
paths containing dictionary keys, by replacing all double quotes (`"`) with single
quotes (`'`).
Args:
s (str): The input string to be normalized.
Returns:
A new string with all double quotes replaced by single quotes.
Example:
>>> normalize_full_access_path_string('dictionary["first"].my_task')
"dictionary['first'].my_task"
"""
return s.replace('"', "'")

View File

@@ -5,7 +5,7 @@ import pydase
import pytest
from pydase.data_service.data_service_observer import DataServiceObserver
from pydase.data_service.state_manager import StateManager
from pydase.utils.serialization.serializer import SerializationError
from pydase.utils.serialization.serializer import SerializationError, dump
logger = logging.getLogger()
@@ -146,3 +146,41 @@ def test_private_attribute_does_not_have_to_be_serializable() -> None:
service_instance.change_publ_attr()
service_instance.change_priv_attr()
def test_normalized_attr_path_in_dependent_property_changes(
caplog: pytest.LogCaptureFixture,
) -> None:
class SubService(pydase.DataService):
_prop = 10.0
@property
def prop(self) -> float:
return self._prop
class MyService(pydase.DataService):
def __init__(self) -> None:
super().__init__()
self.service_dict = {"one": SubService()}
service_instance = MyService()
state_manager = StateManager(service=service_instance)
observer = DataServiceObserver(state_manager=state_manager)
assert observer.property_deps_dict["service_dict['one']._prop"] == [
"service_dict['one'].prop"
]
# We can use dict key path encoded with double quotes
state_manager.set_service_attribute_value_by_path(
'service_dict["one"]._prop', dump(11.0)
)
assert service_instance.service_dict["one"].prop == 11.0
assert "'service_dict[\"one\"].prop' changed to '11.0'" in caplog.text
# We can use dict key path encoded with single quotes
state_manager.set_service_attribute_value_by_path(
"service_dict['one']._prop", dump(12.0)
)
assert service_instance.service_dict["one"].prop == 12.0
assert "'service_dict[\"one\"].prop' changed to '12.0'" in caplog.text

View File

@@ -18,23 +18,30 @@ async def test_start_and_stop_task(caplog: LogCaptureFixture) -> None:
class MyService(pydase.DataService):
@task()
async def my_task(self) -> None:
logger.info("Triggered task.")
while True:
logger.debug("Logging message")
await asyncio.sleep(0.01)
await asyncio.sleep(1)
# Your test code here
service_instance = MyService()
state_manager = StateManager(service_instance)
DataServiceObserver(state_manager)
autostart_service_tasks(service_instance)
await asyncio.sleep(0.1)
assert service_instance.my_task.status == TaskStatus.NOT_RUNNING
service_instance.my_task.start()
await asyncio.sleep(0.1)
assert service_instance.my_task.status == TaskStatus.RUNNING
assert "'my_task.status' changed to 'TaskStatus.RUNNING'" in caplog.text
assert "Logging message" in caplog.text
assert "Triggered task." in caplog.text
caplog.clear()
service_instance.my_task.stop()
await asyncio.sleep(0.1)
assert service_instance.my_task.status == TaskStatus.NOT_RUNNING
assert "Task 'my_task' was cancelled" in caplog.text
@@ -44,6 +51,8 @@ async def test_autostart_task(caplog: LogCaptureFixture) -> None:
@task(autostart=True)
async def my_task(self) -> None:
logger.info("Triggered task.")
while True:
await asyncio.sleep(1)
# Your test code here
service_instance = MyService()
@@ -53,6 +62,7 @@ async def test_autostart_task(caplog: LogCaptureFixture) -> None:
autostart_service_tasks(service_instance)
await asyncio.sleep(0.1)
assert service_instance.my_task.status == TaskStatus.RUNNING
assert "'my_task.status' changed to 'TaskStatus.RUNNING'" in caplog.text
@@ -65,6 +75,8 @@ async def test_nested_list_autostart_task(
@task(autostart=True)
async def my_task(self) -> None:
logger.info("Triggered task.")
while True:
await asyncio.sleep(1)
class MyService(pydase.DataService):
sub_services_list = [MySubService() for i in range(2)]
@@ -75,6 +87,8 @@ async def test_nested_list_autostart_task(
autostart_service_tasks(service_instance)
await asyncio.sleep(0.1)
assert service_instance.sub_services_list[0].my_task.status == TaskStatus.RUNNING
assert service_instance.sub_services_list[1].my_task.status == TaskStatus.RUNNING
assert (
"'sub_services_list[0].my_task.status' changed to 'TaskStatus.RUNNING'"
@@ -111,6 +125,10 @@ async def test_nested_dict_autostart_task(
assert (
service_instance.sub_services_dict["first"].my_task.status == TaskStatus.RUNNING
)
assert (
service_instance.sub_services_dict["second"].my_task.status
== TaskStatus.RUNNING
)
assert (
"'sub_services_dict[\"first\"].my_task.status' changed to 'TaskStatus.RUNNING'"
@@ -120,3 +138,154 @@ async def test_nested_dict_autostart_task(
"'sub_services_dict[\"second\"].my_task.status' changed to 'TaskStatus.RUNNING'"
in caplog.text
)
@pytest.mark.asyncio(scope="function")
async def test_manual_start_with_multiple_service_instances(
caplog: LogCaptureFixture,
) -> None:
class MySubService(pydase.DataService):
@task()
async def my_task(self) -> None:
logger.info("Triggered task.")
while True:
await asyncio.sleep(1)
class MyService(pydase.DataService):
sub_services_list = [MySubService() for i in range(2)]
sub_services_dict = {"first": MySubService(), "second": MySubService()}
service_instance = MyService()
state_manager = StateManager(service_instance)
DataServiceObserver(state_manager)
autostart_service_tasks(service_instance)
await asyncio.sleep(0.1)
assert (
service_instance.sub_services_list[0].my_task.status == TaskStatus.NOT_RUNNING
)
assert (
service_instance.sub_services_list[1].my_task.status == TaskStatus.NOT_RUNNING
)
assert (
service_instance.sub_services_dict["first"].my_task.status
== TaskStatus.NOT_RUNNING
)
assert (
service_instance.sub_services_dict["second"].my_task.status
== TaskStatus.NOT_RUNNING
)
service_instance.sub_services_list[0].my_task.start()
await asyncio.sleep(0.01)
assert service_instance.sub_services_list[0].my_task.status == TaskStatus.RUNNING
assert (
"'sub_services_list[0].my_task.status' changed to 'TaskStatus.RUNNING'"
in caplog.text
)
assert (
"'sub_services_list[1].my_task.status' changed to 'TaskStatus.RUNNING'"
not in caplog.text
)
assert (
"'sub_services_dict[\"first\"].my_task.status' changed to 'TaskStatus.RUNNING'"
not in caplog.text
)
assert (
"'sub_services_dict[\"second\"].my_task.status' changed to 'TaskStatus.RUNNING'"
not in caplog.text
)
service_instance.sub_services_list[0].my_task.stop()
await asyncio.sleep(0.01)
assert "Task 'my_task' was cancelled" in caplog.text
caplog.clear()
service_instance.sub_services_list[1].my_task.start()
await asyncio.sleep(0.01)
assert service_instance.sub_services_list[1].my_task.status == TaskStatus.RUNNING
assert (
"'sub_services_list[0].my_task.status' changed to 'TaskStatus.RUNNING'"
not in caplog.text
)
assert (
"'sub_services_list[1].my_task.status' changed to 'TaskStatus.RUNNING'"
in caplog.text
)
assert (
"'sub_services_dict[\"first\"].my_task.status' changed to 'TaskStatus.RUNNING'"
not in caplog.text
)
assert (
"'sub_services_dict[\"second\"].my_task.status' changed to 'TaskStatus.RUNNING'"
not in caplog.text
)
service_instance.sub_services_list[1].my_task.stop()
await asyncio.sleep(0.01)
assert "Task 'my_task' was cancelled" in caplog.text
caplog.clear()
service_instance.sub_services_dict["first"].my_task.start()
await asyncio.sleep(0.01)
assert (
service_instance.sub_services_dict["first"].my_task.status == TaskStatus.RUNNING
)
assert (
"'sub_services_list[0].my_task.status' changed to 'TaskStatus.RUNNING'"
not in caplog.text
)
assert (
"'sub_services_list[1].my_task.status' changed to 'TaskStatus.RUNNING'"
not in caplog.text
)
assert (
"'sub_services_dict[\"first\"].my_task.status' changed to 'TaskStatus.RUNNING'"
in caplog.text
)
assert (
"'sub_services_dict[\"second\"].my_task.status' changed to 'TaskStatus.RUNNING'"
not in caplog.text
)
service_instance.sub_services_dict["first"].my_task.stop()
await asyncio.sleep(0.01)
assert "Task 'my_task' was cancelled" in caplog.text
caplog.clear()
service_instance.sub_services_dict["second"].my_task.start()
await asyncio.sleep(0.01)
assert (
service_instance.sub_services_dict["second"].my_task.status
== TaskStatus.RUNNING
)
assert (
"'sub_services_list[0].my_task.status' changed to 'TaskStatus.RUNNING'"
not in caplog.text
)
assert (
"'sub_services_list[1].my_task.status' changed to 'TaskStatus.RUNNING'"
not in caplog.text
)
assert (
"'sub_services_dict[\"first\"].my_task.status' changed to 'TaskStatus.RUNNING'"
not in caplog.text
)
assert (
"'sub_services_dict[\"second\"].my_task.status' changed to 'TaskStatus.RUNNING'"
in caplog.text
)
service_instance.sub_services_dict["second"].my_task.stop()
await asyncio.sleep(0.01)
assert "Task 'my_task' was cancelled" in caplog.text