mirror of
https://github.com/tiqi-group/pydase.git
synced 2025-05-14 19:22:15 +02:00
Merge pull request #63 from tiqi-group/52-add-a-cache-storing-the-state-of-the-service
52 add a cache storing the state of the service
This commit is contained in:
commit
d0377be455
14
README.md
14
README.md
@ -380,7 +380,7 @@ Please ensure that the CSS file path is accessible from the server's running loc
|
|||||||
|
|
||||||
`pydase` allows you to easily persist the state of your service by saving it to a file. This is especially useful when you want to maintain the service's state across different runs.
|
`pydase` allows you to easily persist the state of your service by saving it to a file. This is especially useful when you want to maintain the service's state across different runs.
|
||||||
|
|
||||||
To save the state of your service, pass a `filename` keyword argument to the `__init__` method of the `DataService` base class. If the file specified by `filename` does not exist, the service will create this file and store its state in it when the service is shut down. If the file already exists, the service will load the state from this file, setting the values of its attributes to the values stored in the file.
|
To save the state of your service, pass a `filename` keyword argument to the constructor of the `pydase.Server` class. If the file specified by `filename` does not exist, the state manager will create this file and store its state in it when the service is shut down. If the file already exists, the state manager will load the state from this file, setting the values of its attributes to the values stored in the file.
|
||||||
|
|
||||||
Here's an example:
|
Here's an example:
|
||||||
|
|
||||||
@ -388,21 +388,15 @@ Here's an example:
|
|||||||
from pydase import DataService, Server
|
from pydase import DataService, Server
|
||||||
|
|
||||||
class Device(DataService):
|
class Device(DataService):
|
||||||
def __init__(self, filename: str) -> None:
|
|
||||||
# ... your init code ...
|
|
||||||
|
|
||||||
# Pass the filename argument to the parent class
|
|
||||||
super().__init__(filename=filename)
|
|
||||||
|
|
||||||
# ... defining the Device class ...
|
# ... defining the Device class ...
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
service = Device("device_state.json")
|
service = Device()
|
||||||
Server(service).run()
|
Server(service, filename="device_state.json").run()
|
||||||
```
|
```
|
||||||
|
|
||||||
In this example, the state of the `Device` service will be saved to `device_state.json` when the service is shut down. If `device_state.json` exists when the service is started, the service will restore its state from this file.
|
In this example, the state of the `Device` service will be saved to `device_state.json` when the service is shut down. If `device_state.json` exists when the server is started, the state manager will restore the state of the service from this file.
|
||||||
|
|
||||||
Note: If the service class structure has changed since the last time its state was saved, only the attributes that have remained the same will be restored from the settings file.
|
Note: If the service class structure has changed since the last time its state was saved, only the attributes that have remained the same will be restored from the settings file.
|
||||||
|
|
||||||
|
@ -5,7 +5,7 @@ from pathlib import Path
|
|||||||
from typing import TYPE_CHECKING, Optional
|
from typing import TYPE_CHECKING, Optional
|
||||||
from urllib.request import urlopen
|
from urllib.request import urlopen
|
||||||
|
|
||||||
import PIL.Image
|
import PIL.Image # type: ignore
|
||||||
|
|
||||||
from pydase.data_service.data_service import DataService
|
from pydase.data_service.data_service import DataService
|
||||||
|
|
||||||
|
@ -4,9 +4,9 @@ from abc import ABC
|
|||||||
from typing import TYPE_CHECKING, Any
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .callback_manager import CallbackManager
|
from pydase.data_service.callback_manager import CallbackManager
|
||||||
from .data_service import DataService
|
from pydase.data_service.data_service import DataService
|
||||||
from .task_manager import TaskManager
|
from pydase.data_service.task_manager import TaskManager
|
||||||
|
|
||||||
|
|
||||||
class AbstractDataService(ABC):
|
class AbstractDataService(ABC):
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import warnings
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Any, Optional, cast, get_type_hints
|
from pathlib import Path
|
||||||
|
from typing import Any, Optional, get_type_hints
|
||||||
|
|
||||||
import rpyc
|
import rpyc # type: ignore
|
||||||
|
|
||||||
import pydase.units as u
|
import pydase.units as u
|
||||||
from pydase.data_service.abstract_data_service import AbstractDataService
|
from pydase.data_service.abstract_data_service import AbstractDataService
|
||||||
@ -12,15 +12,17 @@ from pydase.data_service.callback_manager import CallbackManager
|
|||||||
from pydase.data_service.task_manager import TaskManager
|
from pydase.data_service.task_manager import TaskManager
|
||||||
from pydase.utils.helpers import (
|
from pydase.utils.helpers import (
|
||||||
convert_arguments_to_hinted_types,
|
convert_arguments_to_hinted_types,
|
||||||
generate_paths_from_DataService_dict,
|
|
||||||
get_class_and_instance_attributes,
|
get_class_and_instance_attributes,
|
||||||
get_nested_value_from_DataService_by_path_and_key,
|
|
||||||
get_object_attr_from_path,
|
get_object_attr_from_path,
|
||||||
is_property_attribute,
|
is_property_attribute,
|
||||||
parse_list_attr_and_index,
|
parse_list_attr_and_index,
|
||||||
update_value_if_changed,
|
update_value_if_changed,
|
||||||
)
|
)
|
||||||
from pydase.utils.serialization import Serializer
|
from pydase.utils.serializer import (
|
||||||
|
Serializer,
|
||||||
|
generate_serialized_data_paths,
|
||||||
|
get_nested_dict_by_path,
|
||||||
|
)
|
||||||
from pydase.utils.warnings import (
|
from pydase.utils.warnings import (
|
||||||
warn_if_instance_class_does_not_inherit_from_DataService,
|
warn_if_instance_class_does_not_inherit_from_DataService,
|
||||||
)
|
)
|
||||||
@ -40,7 +42,7 @@ def process_callable_attribute(attr: Any, args: dict[str, Any]) -> Any:
|
|||||||
|
|
||||||
|
|
||||||
class DataService(rpyc.Service, AbstractDataService):
|
class DataService(rpyc.Service, AbstractDataService):
|
||||||
def __init__(self, filename: Optional[str] = None) -> None:
|
def __init__(self, **kwargs: Any) -> None:
|
||||||
self._callback_manager: CallbackManager = CallbackManager(self)
|
self._callback_manager: CallbackManager = CallbackManager(self)
|
||||||
self._task_manager = TaskManager(self)
|
self._task_manager = TaskManager(self)
|
||||||
|
|
||||||
@ -51,12 +53,19 @@ class DataService(rpyc.Service, AbstractDataService):
|
|||||||
"""Keep track of the root object. This helps to filter the emission of
|
"""Keep track of the root object. This helps to filter the emission of
|
||||||
notifications."""
|
notifications."""
|
||||||
|
|
||||||
self._filename: Optional[str] = filename
|
filename = kwargs.pop("filename", None)
|
||||||
|
if filename is not None:
|
||||||
|
warnings.warn(
|
||||||
|
"The 'filename' argument is deprecated and will be removed in a future version. "
|
||||||
|
"Please pass the 'filename' argument to `pydase.Server`.",
|
||||||
|
DeprecationWarning,
|
||||||
|
stacklevel=2,
|
||||||
|
)
|
||||||
|
self._filename: str | Path = filename
|
||||||
|
|
||||||
self._callback_manager.register_callbacks()
|
self._callback_manager.register_callbacks()
|
||||||
self.__check_instance_classes()
|
self.__check_instance_classes()
|
||||||
self._initialised = True
|
self._initialised = True
|
||||||
self._load_values_from_json()
|
|
||||||
|
|
||||||
def __setattr__(self, __name: str, __value: Any) -> None:
|
def __setattr__(self, __name: str, __value: Any) -> None:
|
||||||
# converting attributes that are not properties
|
# converting attributes that are not properties
|
||||||
@ -129,50 +138,44 @@ class DataService(rpyc.Service, AbstractDataService):
|
|||||||
# allow all other attributes
|
# allow all other attributes
|
||||||
setattr(self, name, value)
|
setattr(self, name, value)
|
||||||
|
|
||||||
def _load_values_from_json(self) -> None:
|
|
||||||
if self._filename is not None:
|
|
||||||
# Check if the file specified by the filename exists
|
|
||||||
if os.path.exists(self._filename):
|
|
||||||
with open(self._filename, "r") as f:
|
|
||||||
# Load JSON data from file and update class attributes with these
|
|
||||||
# values
|
|
||||||
self.load_DataService_from_JSON(cast(dict[str, Any], json.load(f)))
|
|
||||||
|
|
||||||
def write_to_file(self) -> None:
|
def write_to_file(self) -> None:
|
||||||
"""
|
"""
|
||||||
Serialize the DataService instance and write it to a JSON file.
|
Serialize the DataService instance and write it to a JSON file.
|
||||||
|
|
||||||
Args:
|
This method is deprecated and will be removed in a future version.
|
||||||
filename (str): The name of the file to write to.
|
Service persistence is handled by `pydase.Server` now, instead.
|
||||||
"""
|
"""
|
||||||
if self._filename is not None:
|
|
||||||
with open(self._filename, "w") as f:
|
warnings.warn(
|
||||||
json.dump(self.serialize(), f, indent=4)
|
"'write_to_file' is deprecated and will be removed in a future version. "
|
||||||
else:
|
"Service persistence is handled by `pydase.Server` now, instead.",
|
||||||
logger.error(
|
DeprecationWarning,
|
||||||
f"Class {self.__class__.__name__} was not initialised with a filename. "
|
stacklevel=2,
|
||||||
'Skipping "write_to_file"...'
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if hasattr(self, "_state_manager"):
|
||||||
|
getattr(self, "_state_manager").save_state()
|
||||||
|
|
||||||
def load_DataService_from_JSON(self, json_dict: dict[str, Any]) -> None:
|
def load_DataService_from_JSON(self, json_dict: dict[str, Any]) -> None:
|
||||||
|
warnings.warn(
|
||||||
|
"'load_DataService_from_JSON' is deprecated and will be removed in a "
|
||||||
|
"future version. "
|
||||||
|
"Service persistence is handled by `pydase.Server` now, instead.",
|
||||||
|
DeprecationWarning,
|
||||||
|
stacklevel=2,
|
||||||
|
)
|
||||||
|
|
||||||
# Traverse the serialized representation and set the attributes of the class
|
# Traverse the serialized representation and set the attributes of the class
|
||||||
serialized_class = self.serialize()
|
serialized_class = self.serialize()
|
||||||
for path in generate_paths_from_DataService_dict(json_dict):
|
for path in generate_serialized_data_paths(json_dict):
|
||||||
value = get_nested_value_from_DataService_by_path_and_key(
|
nested_json_dict = get_nested_dict_by_path(json_dict, path)
|
||||||
json_dict, path=path
|
value = nested_json_dict["value"]
|
||||||
)
|
value_type = nested_json_dict["type"]
|
||||||
value_type = get_nested_value_from_DataService_by_path_and_key(
|
|
||||||
json_dict, path=path, key="type"
|
nested_class_dict = get_nested_dict_by_path(serialized_class, path)
|
||||||
)
|
class_value_type = nested_class_dict.get("type", None)
|
||||||
class_value_type = get_nested_value_from_DataService_by_path_and_key(
|
|
||||||
serialized_class, path=path, key="type"
|
|
||||||
)
|
|
||||||
if class_value_type == value_type:
|
if class_value_type == value_type:
|
||||||
class_attr_is_read_only = (
|
class_attr_is_read_only = nested_class_dict["readonly"]
|
||||||
get_nested_value_from_DataService_by_path_and_key(
|
|
||||||
serialized_class, path=path, key="readonly"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
if class_attr_is_read_only:
|
if class_attr_is_read_only:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
f'Attribute "{path}" is read-only. Ignoring value from JSON '
|
f'Attribute "{path}" is read-only. Ignoring value from JSON '
|
||||||
|
36
src/pydase/data_service/data_service_cache.py
Normal file
36
src/pydase/data_service/data_service_cache.py
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
import logging
|
||||||
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
|
from pydase.utils.serializer import set_nested_value_by_path
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from pydase import DataService
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class DataServiceCache:
|
||||||
|
def __init__(self, service: "DataService") -> None:
|
||||||
|
self._cache: dict[str, Any] = {}
|
||||||
|
self.service = service
|
||||||
|
self._initialize_cache()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cache(self) -> dict[str, Any]:
|
||||||
|
return self._cache
|
||||||
|
|
||||||
|
def _initialize_cache(self) -> None:
|
||||||
|
"""Initializes the cache and sets up the callback."""
|
||||||
|
logger.debug("Initializing cache.")
|
||||||
|
self._cache = self.service.serialize()
|
||||||
|
self.service._callback_manager.add_notification_callback(self.update_cache)
|
||||||
|
|
||||||
|
def update_cache(self, parent_path: str, name: str, value: Any) -> None:
|
||||||
|
# Remove the part before the first "." in the parent_path
|
||||||
|
parent_path = ".".join(parent_path.split(".")[1:])
|
||||||
|
|
||||||
|
# Construct the full path
|
||||||
|
full_path = f"{parent_path}.{name}" if parent_path else name
|
||||||
|
|
||||||
|
set_nested_value_by_path(self._cache, full_path, value)
|
||||||
|
logger.debug(f"Cache updated at path: {full_path}, with value: {value}")
|
144
src/pydase/data_service/state_manager.py
Normal file
144
src/pydase/data_service/state_manager.py
Normal file
@ -0,0 +1,144 @@
|
|||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import TYPE_CHECKING, Any, Optional, cast
|
||||||
|
|
||||||
|
import pydase.units as u
|
||||||
|
from pydase.data_service.data_service_cache import DataServiceCache
|
||||||
|
from pydase.utils.serializer import (
|
||||||
|
generate_serialized_data_paths,
|
||||||
|
get_nested_dict_by_path,
|
||||||
|
)
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from pydase import DataService
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class StateManager:
|
||||||
|
"""
|
||||||
|
Manages the state of a DataService instance, serving as both a cache and a
|
||||||
|
persistence layer. It is designed to provide quick access to the latest known state
|
||||||
|
for newly connecting web clients without the need for expensive property accesses
|
||||||
|
that may involve complex calculations or I/O operations.
|
||||||
|
|
||||||
|
The StateManager listens for state change notifications from the DataService's
|
||||||
|
callback manager and updates its cache accordingly. This cache does not always
|
||||||
|
reflect the most current complex property states but rather retains the value from
|
||||||
|
the last known state, optimizing for performance and reducing the load on the
|
||||||
|
system.
|
||||||
|
|
||||||
|
While the StateManager ensures that the cached state is as up-to-date as possible,
|
||||||
|
it does not autonomously update complex properties of the DataService. Such
|
||||||
|
properties must be updated programmatically, for instance, by invoking specific
|
||||||
|
tasks or methods that trigger the necessary operations to refresh their state.
|
||||||
|
|
||||||
|
The cached state maintained by the StateManager is particularly useful for web
|
||||||
|
clients that connect to the system and need immediate access to the current state of
|
||||||
|
the DataService. By avoiding direct and potentially costly property accesses, the
|
||||||
|
StateManager provides a snapshot of the DataService's state that is sufficiently
|
||||||
|
accurate for initial rendering and interaction.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
cache (dict[str, Any]):
|
||||||
|
A dictionary cache of the DataService's state.
|
||||||
|
filename (str):
|
||||||
|
The file name used for storing the DataService's state.
|
||||||
|
service (DataService):
|
||||||
|
The DataService instance whose state is being managed.
|
||||||
|
|
||||||
|
Note:
|
||||||
|
The StateManager's cache updates are triggered by notifications and do not
|
||||||
|
include autonomous updates of complex DataService properties, which must be
|
||||||
|
managed programmatically. The cache serves the purpose of providing immediate
|
||||||
|
state information to web clients, reflecting the state after the last property
|
||||||
|
update.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, service: "DataService", filename: Optional[str | Path] = None):
|
||||||
|
self.filename = getattr(service, "_filename", None)
|
||||||
|
|
||||||
|
if filename is not None:
|
||||||
|
if self.filename is not None:
|
||||||
|
logger.warning(
|
||||||
|
f"Overwriting filename {self.filename!r} with {filename!r}."
|
||||||
|
)
|
||||||
|
self.filename = filename
|
||||||
|
|
||||||
|
self.service = service
|
||||||
|
self._data_service_cache = DataServiceCache(self.service)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cache(self) -> dict[str, Any]:
|
||||||
|
"""Returns the cached DataService state."""
|
||||||
|
return self._data_service_cache.cache
|
||||||
|
|
||||||
|
def save_state(self) -> None:
|
||||||
|
"""
|
||||||
|
Saves the DataService's current state to a JSON file defined by `self.filename`.
|
||||||
|
Logs an error if `self.filename` is not set.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self.filename is not None:
|
||||||
|
with open(self.filename, "w") as f:
|
||||||
|
json.dump(self.cache, f, indent=4)
|
||||||
|
else:
|
||||||
|
logger.error(
|
||||||
|
"State manager was not initialised with a filename. Skipping "
|
||||||
|
"'save_state'..."
|
||||||
|
)
|
||||||
|
|
||||||
|
def load_state(self) -> None:
|
||||||
|
"""
|
||||||
|
Loads the DataService's state from a JSON file defined by `self.filename`.
|
||||||
|
Updates the service's attributes, respecting type and read-only constraints.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Traverse the serialized representation and set the attributes of the class
|
||||||
|
json_dict = self._get_state_dict_from_JSON_file()
|
||||||
|
if json_dict == {}:
|
||||||
|
logger.debug("Could not load the service state.")
|
||||||
|
return
|
||||||
|
|
||||||
|
serialized_class = self.cache
|
||||||
|
for path in generate_serialized_data_paths(json_dict):
|
||||||
|
nested_json_dict = get_nested_dict_by_path(json_dict, path)
|
||||||
|
value = nested_json_dict["value"]
|
||||||
|
value_type = nested_json_dict["type"]
|
||||||
|
|
||||||
|
nested_class_dict = get_nested_dict_by_path(serialized_class, path)
|
||||||
|
class_value_type = nested_class_dict.get("type", None)
|
||||||
|
if class_value_type == value_type:
|
||||||
|
class_attr_is_read_only = nested_class_dict["readonly"]
|
||||||
|
if class_attr_is_read_only:
|
||||||
|
logger.debug(
|
||||||
|
f"Attribute {path!r} is read-only. Ignoring value from JSON "
|
||||||
|
"file..."
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
# Split the path into parts
|
||||||
|
parts = path.split(".")
|
||||||
|
attr_name = parts[-1]
|
||||||
|
|
||||||
|
# Convert dictionary into Quantity
|
||||||
|
if class_value_type == "Quantity":
|
||||||
|
value = u.convert_to_quantity(value)
|
||||||
|
|
||||||
|
self.service.update_DataService_attribute(parts[:-1], attr_name, value)
|
||||||
|
else:
|
||||||
|
logger.info(
|
||||||
|
f"Attribute type of {path!r} changed from {value_type!r} to "
|
||||||
|
f"{class_value_type!r}. Ignoring value from JSON file..."
|
||||||
|
)
|
||||||
|
|
||||||
|
def _get_state_dict_from_JSON_file(self) -> dict[str, Any]:
|
||||||
|
if self.filename is not None:
|
||||||
|
# Check if the file specified by the filename exists
|
||||||
|
if os.path.exists(self.filename):
|
||||||
|
with open(self.filename, "r") as f:
|
||||||
|
# Load JSON data from file and update class attributes with these
|
||||||
|
# values
|
||||||
|
return cast(dict[str, Any], json.load(f))
|
||||||
|
return {}
|
@ -5,18 +5,17 @@ import signal
|
|||||||
import threading
|
import threading
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
from pathlib import Path
|
||||||
from types import FrameType
|
from types import FrameType
|
||||||
from typing import Any, Optional, Protocol, TypedDict
|
from typing import Any, Optional, Protocol, TypedDict
|
||||||
|
|
||||||
import uvicorn
|
import uvicorn
|
||||||
from rpyc import (
|
from rpyc import ForkingServer, ThreadedServer # type: ignore
|
||||||
ForkingServer, # can be used for multiprocessing, e.g. a database interface server
|
|
||||||
)
|
|
||||||
from rpyc import ThreadedServer
|
|
||||||
from uvicorn.server import HANDLED_SIGNALS
|
from uvicorn.server import HANDLED_SIGNALS
|
||||||
|
|
||||||
import pydase.units as u
|
import pydase.units as u
|
||||||
from pydase import DataService
|
from pydase import DataService
|
||||||
|
from pydase.data_service.state_manager import StateManager
|
||||||
from pydase.version import __version__
|
from pydase.version import __version__
|
||||||
|
|
||||||
from .web_server import WebAPI
|
from .web_server import WebAPI
|
||||||
@ -47,13 +46,22 @@ class AdditionalServerProtocol(Protocol):
|
|||||||
The hostname or IP address at which the server will be hosted. This could be a
|
The hostname or IP address at which the server will be hosted. This could be a
|
||||||
local address (like '127.0.0.1' for localhost) or a public IP address.
|
local address (like '127.0.0.1' for localhost) or a public IP address.
|
||||||
|
|
||||||
|
state_manager: StateManager
|
||||||
|
The state manager managing the state cache and persistence of the exposed
|
||||||
|
service.
|
||||||
|
|
||||||
**kwargs: Any
|
**kwargs: Any
|
||||||
Any additional parameters required for initializing the server. These parameters
|
Any additional parameters required for initializing the server. These parameters
|
||||||
are specific to the server's implementation.
|
are specific to the server's implementation.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self, service: DataService, port: int, host: str, **kwargs: Any
|
self,
|
||||||
|
service: DataService,
|
||||||
|
port: int,
|
||||||
|
host: str,
|
||||||
|
state_manager: StateManager,
|
||||||
|
**kwargs: Any,
|
||||||
) -> None:
|
) -> None:
|
||||||
...
|
...
|
||||||
|
|
||||||
@ -98,9 +106,10 @@ class Server:
|
|||||||
Whether to enable the RPC server. Default is True.
|
Whether to enable the RPC server. Default is True.
|
||||||
enable_web: bool
|
enable_web: bool
|
||||||
Whether to enable the web server. Default is True.
|
Whether to enable the web server. Default is True.
|
||||||
|
filename: str | Path | None
|
||||||
|
Filename of the file managing the service state persistence. Defaults to None.
|
||||||
use_forking_server: bool
|
use_forking_server: bool
|
||||||
Whether to use ForkingServer for multiprocessing (e.g. for a database interface
|
Whether to use ForkingServer for multiprocessing. Default is False.
|
||||||
server). Default is False.
|
|
||||||
web_settings: dict[str, Any]
|
web_settings: dict[str, Any]
|
||||||
Additional settings for the web server. Default is {} (an empty dictionary).
|
Additional settings for the web server. Default is {} (an empty dictionary).
|
||||||
additional_servers : list[AdditionalServer]
|
additional_servers : list[AdditionalServer]
|
||||||
@ -120,9 +129,15 @@ class Server:
|
|||||||
|
|
||||||
>>> class MyCustomServer:
|
>>> class MyCustomServer:
|
||||||
... def __init__(
|
... def __init__(
|
||||||
... self, service: DataService, port: int, host: str, **kwargs: Any
|
... self,
|
||||||
|
... service: DataService,
|
||||||
|
... port: int,
|
||||||
|
... host: str,
|
||||||
|
... state_manager: StateManager,
|
||||||
|
... **kwargs: Any
|
||||||
... ):
|
... ):
|
||||||
... self.service = service
|
... self.service = service
|
||||||
|
... self.state_manager = state_manager
|
||||||
... self.port = port
|
... self.port = port
|
||||||
... self.host = host
|
... self.host = host
|
||||||
... # handle any additional arguments...
|
... # handle any additional arguments...
|
||||||
@ -157,6 +172,7 @@ class Server:
|
|||||||
web_port: int = 8001,
|
web_port: int = 8001,
|
||||||
enable_rpc: bool = True,
|
enable_rpc: bool = True,
|
||||||
enable_web: bool = True,
|
enable_web: bool = True,
|
||||||
|
filename: Optional[str | Path] = None,
|
||||||
use_forking_server: bool = False,
|
use_forking_server: bool = False,
|
||||||
web_settings: dict[str, Any] = {},
|
web_settings: dict[str, Any] = {},
|
||||||
additional_servers: list[AdditionalServer] = [],
|
additional_servers: list[AdditionalServer] = [],
|
||||||
@ -187,6 +203,10 @@ class Server:
|
|||||||
"additional_servers": [],
|
"additional_servers": [],
|
||||||
**kwargs,
|
**kwargs,
|
||||||
}
|
}
|
||||||
|
self._state_manager = StateManager(self._service, filename)
|
||||||
|
if getattr(self._service, "_filename", None) is not None:
|
||||||
|
self._service._state_manager = self._state_manager
|
||||||
|
self._state_manager.load_state()
|
||||||
|
|
||||||
def run(self) -> None:
|
def run(self) -> None:
|
||||||
"""
|
"""
|
||||||
@ -249,6 +269,7 @@ class Server:
|
|||||||
self._service,
|
self._service,
|
||||||
port=server["port"],
|
port=server["port"],
|
||||||
host=self._host,
|
host=self._host,
|
||||||
|
state_manager=self._state_manager,
|
||||||
info=self._info,
|
info=self._info,
|
||||||
**server["kwargs"],
|
**server["kwargs"],
|
||||||
)
|
)
|
||||||
@ -271,6 +292,7 @@ class Server:
|
|||||||
self._wapi: WebAPI = WebAPI(
|
self._wapi: WebAPI = WebAPI(
|
||||||
service=self._service,
|
service=self._service,
|
||||||
info=self._info,
|
info=self._info,
|
||||||
|
state_manager=self._state_manager,
|
||||||
**self._kwargs,
|
**self._kwargs,
|
||||||
)
|
)
|
||||||
web_server = uvicorn.Server(
|
web_server = uvicorn.Server(
|
||||||
@ -322,9 +344,9 @@ class Server:
|
|||||||
async def shutdown(self) -> None:
|
async def shutdown(self) -> None:
|
||||||
logger.info("Shutting down")
|
logger.info("Shutting down")
|
||||||
|
|
||||||
logger.info(f"Saving data to {self._service._filename}.")
|
logger.info(f"Saving data to {self._state_manager.filename}.")
|
||||||
if self._service._filename is not None:
|
if self._state_manager is not None:
|
||||||
self._service.write_to_file()
|
self._state_manager.save_state()
|
||||||
|
|
||||||
await self.__cancel_servers()
|
await self.__cancel_servers()
|
||||||
await self.__cancel_tasks()
|
await self.__cancel_tasks()
|
||||||
|
@ -2,13 +2,14 @@ import logging
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, TypedDict
|
from typing import Any, TypedDict
|
||||||
|
|
||||||
import socketio
|
import socketio # type: ignore
|
||||||
from fastapi import FastAPI
|
from fastapi import FastAPI
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
from fastapi.responses import FileResponse
|
from fastapi.responses import FileResponse
|
||||||
from fastapi.staticfiles import StaticFiles
|
from fastapi.staticfiles import StaticFiles
|
||||||
|
|
||||||
from pydase import DataService
|
from pydase import DataService
|
||||||
|
from pydase.data_service.state_manager import StateManager
|
||||||
from pydase.version import __version__
|
from pydase.version import __version__
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -50,6 +51,7 @@ class WebAPI:
|
|||||||
def __init__( # noqa: CFQ002
|
def __init__( # noqa: CFQ002
|
||||||
self,
|
self,
|
||||||
service: DataService,
|
service: DataService,
|
||||||
|
state_manager: StateManager,
|
||||||
frontend: str | Path | None = None,
|
frontend: str | Path | None = None,
|
||||||
css: str | Path | None = None,
|
css: str | Path | None = None,
|
||||||
enable_CORS: bool = True,
|
enable_CORS: bool = True,
|
||||||
@ -58,6 +60,7 @@ class WebAPI:
|
|||||||
**kwargs: Any,
|
**kwargs: Any,
|
||||||
):
|
):
|
||||||
self.service = service
|
self.service = service
|
||||||
|
self.state_manager = state_manager
|
||||||
self.frontend = frontend
|
self.frontend = frontend
|
||||||
self.css = css
|
self.css = css
|
||||||
self.enable_CORS = enable_CORS
|
self.enable_CORS = enable_CORS
|
||||||
@ -114,13 +117,13 @@ class WebAPI:
|
|||||||
|
|
||||||
@app.get("/service-properties")
|
@app.get("/service-properties")
|
||||||
def service_properties() -> dict[str, Any]:
|
def service_properties() -> dict[str, Any]:
|
||||||
return self.service.serialize()
|
return self.state_manager.cache
|
||||||
|
|
||||||
# exposing custom.css file provided by user
|
# exposing custom.css file provided by user
|
||||||
if self.css is not None:
|
if self.css is not None:
|
||||||
|
|
||||||
@app.get("/custom.css")
|
@app.get("/custom.css")
|
||||||
async def styles():
|
async def styles() -> FileResponse:
|
||||||
return FileResponse(str(self.css))
|
return FileResponse(str(self.css))
|
||||||
|
|
||||||
app.mount(
|
app.mount(
|
||||||
|
@ -1,20 +1,19 @@
|
|||||||
|
import inspect
|
||||||
import logging
|
import logging
|
||||||
import re
|
|
||||||
from itertools import chain
|
from itertools import chain
|
||||||
from typing import Any, Optional, cast
|
from typing import Any, Optional
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
STANDARD_TYPES = (
|
|
||||||
"int",
|
def get_attribute_doc(attr: Any) -> Optional[str]:
|
||||||
"float",
|
"""This function takes an input attribute attr and returns its documentation
|
||||||
"bool",
|
string if it's different from the documentation of its type, otherwise,
|
||||||
"str",
|
it returns None.
|
||||||
"Enum",
|
"""
|
||||||
"NoneType",
|
attr_doc = inspect.getdoc(attr)
|
||||||
"Quantity",
|
attr_class_doc = inspect.getdoc(type(attr))
|
||||||
"ColouredEnum",
|
return attr_doc if attr_class_doc != attr_doc else None
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def get_class_and_instance_attributes(obj: object) -> dict[str, Any]:
|
def get_class_and_instance_attributes(obj: object) -> dict[str, Any]:
|
||||||
@ -65,213 +64,6 @@ def get_object_attr_from_path(target_obj: Any, path: list[str]) -> Any:
|
|||||||
return target_obj
|
return target_obj
|
||||||
|
|
||||||
|
|
||||||
def generate_paths_from_DataService_dict(
|
|
||||||
data: dict, parent_path: str = ""
|
|
||||||
) -> list[str]:
|
|
||||||
"""
|
|
||||||
Recursively generate paths from a dictionary representing a DataService object.
|
|
||||||
|
|
||||||
This function traverses through a nested dictionary, which is typically obtained
|
|
||||||
from serializing a DataService object. The function generates a list where each
|
|
||||||
element is a string representing the path to each terminal value in the original
|
|
||||||
dictionary.
|
|
||||||
|
|
||||||
The paths are represented as strings, with dots ('.') denoting nesting levels and
|
|
||||||
square brackets ('[]') denoting list indices.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
data (dict): The input dictionary to generate paths from. This is typically
|
|
||||||
obtained from serializing a DataService object.
|
|
||||||
parent_path (str, optional): The current path up to the current level of
|
|
||||||
recursion. Defaults to ''.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
list[str]: A list with paths as elements.
|
|
||||||
|
|
||||||
Note:
|
|
||||||
The function ignores keys whose "type" is "method", as these represent methods
|
|
||||||
of the DataService object and not its state.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
-------
|
|
||||||
|
|
||||||
>>> {
|
|
||||||
... "attr1": {"type": "int", "value": 10},
|
|
||||||
... "attr2": {
|
|
||||||
... "type": "list",
|
|
||||||
... "value": [{"type": "int", "value": 1}, {"type": "int", "value": 2}],
|
|
||||||
... },
|
|
||||||
... "add": {
|
|
||||||
... "type": "method",
|
|
||||||
... "async": False,
|
|
||||||
... "parameters": {"a": "float", "b": "int"},
|
|
||||||
... "doc": "Returns the sum of the numbers a and b.",
|
|
||||||
... },
|
|
||||||
... }
|
|
||||||
>>> print(generate_paths_from_DataService_dict(nested_dict))
|
|
||||||
[attr1, attr2[0], attr2[1]]
|
|
||||||
"""
|
|
||||||
|
|
||||||
paths = []
|
|
||||||
for key, value in data.items():
|
|
||||||
if value["type"] == "method":
|
|
||||||
# ignoring methods
|
|
||||||
continue
|
|
||||||
new_path = f"{parent_path}.{key}" if parent_path else key
|
|
||||||
if isinstance(value["value"], dict) and value["type"] != "Quantity":
|
|
||||||
paths.extend(generate_paths_from_DataService_dict(value["value"], new_path)) # type: ignore
|
|
||||||
elif isinstance(value["value"], list):
|
|
||||||
for index, item in enumerate(value["value"]):
|
|
||||||
indexed_key_path = f"{new_path}[{index}]"
|
|
||||||
if isinstance(item["value"], dict):
|
|
||||||
paths.extend( # type: ignore
|
|
||||||
generate_paths_from_DataService_dict(
|
|
||||||
item["value"], indexed_key_path
|
|
||||||
)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
paths.append(indexed_key_path) # type: ignore
|
|
||||||
else:
|
|
||||||
paths.append(new_path) # type: ignore
|
|
||||||
return paths
|
|
||||||
|
|
||||||
|
|
||||||
def extract_dict_or_list_entry(data: dict[str, Any], key: str) -> dict[str, Any] | None:
|
|
||||||
"""
|
|
||||||
Extract a nested dictionary or list entry based on the provided key.
|
|
||||||
|
|
||||||
Given a dictionary and a key, this function retrieves the corresponding nested
|
|
||||||
dictionary or list entry. If the key includes an index in the format "[<index>]",
|
|
||||||
the function assumes that the corresponding entry in the dictionary is a list, and
|
|
||||||
it will attempt to retrieve the indexed item from that list.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
data (dict): The input dictionary containing nested dictionaries or lists.
|
|
||||||
key (str): The key specifying the desired entry within the dictionary. The key
|
|
||||||
can be a regular dictionary key or can include an index in the format
|
|
||||||
"[<index>]" to retrieve an item from a nested list.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
dict | None: The nested dictionary or list item found for the given key. If the
|
|
||||||
key is invalid, or if the specified index is out of bounds for a list, it
|
|
||||||
returns None.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
>>> data = {
|
|
||||||
... "attr1": [
|
|
||||||
... {"type": "int", "value": 10}, {"type": "string", "value": "hello"}
|
|
||||||
... ],
|
|
||||||
... "attr2": {
|
|
||||||
... "type": "MyClass",
|
|
||||||
... "value": {"sub_attr": {"type": "float", "value": 20.5}}
|
|
||||||
... }
|
|
||||||
... }
|
|
||||||
|
|
||||||
>>> extract_dict_or_list_entry(data, "attr1[1]")
|
|
||||||
{"type": "string", "value": "hello"}
|
|
||||||
|
|
||||||
>>> extract_dict_or_list_entry(data, "attr2")
|
|
||||||
{"type": "MyClass", "value": {"sub_attr": {"type": "float", "value": 20.5}}}
|
|
||||||
"""
|
|
||||||
|
|
||||||
attr_name = key
|
|
||||||
index: Optional[int] = None
|
|
||||||
|
|
||||||
# Check if the key contains an index part like '[<index>]'
|
|
||||||
if "[" in key and key.endswith("]"):
|
|
||||||
attr_name, index_part = key.split("[", 1)
|
|
||||||
index_part = index_part.rstrip("]") # remove the closing bracket
|
|
||||||
|
|
||||||
# Convert the index part to an integer
|
|
||||||
if index_part.isdigit():
|
|
||||||
index = int(index_part)
|
|
||||||
else:
|
|
||||||
logger.error(f"Invalid index format in key: {key}")
|
|
||||||
|
|
||||||
current_data: dict[str, Any] | list[dict[str, Any]] | None = data.get(
|
|
||||||
attr_name, None
|
|
||||||
)
|
|
||||||
if not isinstance(current_data, dict):
|
|
||||||
# key does not exist in dictionary, e.g. when class does not have this
|
|
||||||
# attribute
|
|
||||||
return None
|
|
||||||
|
|
||||||
if isinstance(current_data["value"], list):
|
|
||||||
current_data = current_data["value"]
|
|
||||||
|
|
||||||
if index is not None and 0 <= index < len(current_data):
|
|
||||||
current_data = current_data[index]
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# When the attribute is a class instance, the attributes are nested in the
|
|
||||||
# "value" key
|
|
||||||
if current_data["type"] not in STANDARD_TYPES:
|
|
||||||
current_data = cast(dict[str, Any], current_data.get("value", None)) # type: ignore
|
|
||||||
assert isinstance(current_data, dict)
|
|
||||||
|
|
||||||
return current_data
|
|
||||||
|
|
||||||
|
|
||||||
def get_nested_value_from_DataService_by_path_and_key(
|
|
||||||
data: dict[str, Any], path: str, key: str = "value"
|
|
||||||
) -> Any:
|
|
||||||
"""
|
|
||||||
Get the value associated with a specific key from a dictionary given a path.
|
|
||||||
|
|
||||||
This function traverses the dictionary according to the path provided and
|
|
||||||
returns the value associated with the specified key at that path. The path is
|
|
||||||
a string with dots connecting the levels and brackets indicating list indices.
|
|
||||||
|
|
||||||
The function can handle complex dictionaries where data is nested within different
|
|
||||||
types of objects. It checks the type of each object it encounters and correctly
|
|
||||||
descends into the object if it is not a standard type (i.e., int, float, bool, str,
|
|
||||||
Enum).
|
|
||||||
|
|
||||||
Args:
|
|
||||||
data (dict): The input dictionary to get the value from.
|
|
||||||
path (str): The path to the value in the dictionary.
|
|
||||||
key (str, optional): The key associated with the value to be returned.
|
|
||||||
Default is "value".
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Any: The value associated with the specified key at the given path in the
|
|
||||||
dictionary.
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
Let's consider the following dictionary:
|
|
||||||
|
|
||||||
>>> data = {
|
|
||||||
>>> "attr1": {"type": "int", "value": 10},
|
|
||||||
>>> "attr2": {
|
|
||||||
"type": "MyClass",
|
|
||||||
"value": {"attr3": {"type": "float", "value": 20.5}}
|
|
||||||
}
|
|
||||||
>>> }
|
|
||||||
|
|
||||||
The function can be used to get the value of 'attr1' as follows:
|
|
||||||
>>> get_nested_value_by_path_and_key(data, "attr1")
|
|
||||||
10
|
|
||||||
|
|
||||||
It can also be used to get the value of 'attr3', which is nested within 'attr2',
|
|
||||||
as follows:
|
|
||||||
>>> get_nested_value_by_path_and_key(data, "attr2.attr3", "type")
|
|
||||||
float
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Split the path into parts
|
|
||||||
parts: list[str] = re.split(r"\.", path) # Split by '.'
|
|
||||||
current_data: dict[str, Any] | None = data
|
|
||||||
|
|
||||||
for part in parts:
|
|
||||||
if current_data is None:
|
|
||||||
return
|
|
||||||
current_data = extract_dict_or_list_entry(current_data, part)
|
|
||||||
|
|
||||||
if isinstance(current_data, dict):
|
|
||||||
return current_data.get(key, None)
|
|
||||||
|
|
||||||
|
|
||||||
def convert_arguments_to_hinted_types(
|
def convert_arguments_to_hinted_types(
|
||||||
args: dict[str, Any], type_hints: dict[str, Any]
|
args: dict[str, Any], type_hints: dict[str, Any]
|
||||||
) -> dict[str, Any] | str:
|
) -> dict[str, Any] | str:
|
||||||
@ -356,38 +148,34 @@ def parse_list_attr_and_index(attr_string: str) -> tuple[str, Optional[int]]:
|
|||||||
"""
|
"""
|
||||||
Parses an attribute string and extracts a potential list attribute name and its
|
Parses an attribute string and extracts a potential list attribute name and its
|
||||||
index.
|
index.
|
||||||
|
Logs an error if the index is not a valid digit.
|
||||||
|
|
||||||
This function examines the provided attribute string. If the string contains square
|
Args:
|
||||||
brackets, it assumes that it's a list attribute and the string within brackets is
|
attr_string (str):
|
||||||
the index of an element. It then returns the attribute name and the index as an
|
The attribute string to parse. Can be a regular attribute name (e.g.,
|
||||||
integer. If no brackets are present, the function assumes it's a regular attribute
|
'attr_name') or a list attribute with an index (e.g., 'list_attr[2]').
|
||||||
and returns the attribute name and None as the index.
|
|
||||||
|
|
||||||
Parameters:
|
|
||||||
-----------
|
|
||||||
attr_string: str
|
|
||||||
The attribute string to parse. Can be a regular attribute name (e.g.
|
|
||||||
'attr_name') or a list attribute with an index (e.g. 'list_attr[2]').
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
--------
|
tuple[str, Optional[int]]:
|
||||||
tuple: (str, Optional[int])
|
A tuple containing the attribute name as a string and the index as an
|
||||||
A tuple containing the attribute name as a string and the index as an integer if
|
integer if present, otherwise None.
|
||||||
present, otherwise None.
|
|
||||||
|
|
||||||
Example:
|
Examples:
|
||||||
--------
|
>>> parse_attribute_and_index('list_attr[2]')
|
||||||
>>> parse_list_attr_and_index('list_attr[2]')
|
|
||||||
('list_attr', 2)
|
('list_attr', 2)
|
||||||
>>> parse_list_attr_and_index('attr_name')
|
>>> parse_attribute_and_index('attr_name')
|
||||||
('attr_name', None)
|
('attr_name', None)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
attr_name = attr_string
|
|
||||||
index = None
|
index = None
|
||||||
if "[" in attr_string and "]" in attr_string:
|
attr_name = attr_string
|
||||||
attr_name, idx = attr_string[:-1].split("[")
|
if "[" in attr_string and attr_string.endswith("]"):
|
||||||
index = int(idx)
|
attr_name, index_part = attr_string.split("[", 1)
|
||||||
|
index_part = index_part.rstrip("]")
|
||||||
|
if index_part.isdigit():
|
||||||
|
index = int(index_part)
|
||||||
|
else:
|
||||||
|
logger.error(f"Invalid index format in key: {attr_name}")
|
||||||
return attr_name, index
|
return attr_name, index
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,217 +0,0 @@
|
|||||||
import inspect
|
|
||||||
import logging
|
|
||||||
from collections.abc import Callable
|
|
||||||
from enum import Enum
|
|
||||||
from typing import Any, Optional
|
|
||||||
|
|
||||||
import pydase.units as u
|
|
||||||
from pydase.data_service.abstract_data_service import AbstractDataService
|
|
||||||
from pydase.utils.helpers import get_component_class_names
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class Serializer:
|
|
||||||
@staticmethod
|
|
||||||
def get_attribute_doc(attr: Any) -> Optional[str]:
|
|
||||||
"""This function takes an input attribute attr and returns its documentation
|
|
||||||
string if it's different from the documentation of its type, otherwise,
|
|
||||||
it returns None.
|
|
||||||
"""
|
|
||||||
attr_doc = inspect.getdoc(attr)
|
|
||||||
attr_class_doc = inspect.getdoc(type(attr))
|
|
||||||
return attr_doc if attr_class_doc != attr_doc else None
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def serialize_object(obj: Any) -> dict[str, Any]:
|
|
||||||
result: dict[str, Any] = {}
|
|
||||||
if isinstance(obj, AbstractDataService):
|
|
||||||
result = Serializer._serialize_DataService(obj)
|
|
||||||
|
|
||||||
elif isinstance(obj, list):
|
|
||||||
result = Serializer._serialize_list(obj)
|
|
||||||
|
|
||||||
elif isinstance(obj, dict):
|
|
||||||
result = Serializer._serialize_dict(obj)
|
|
||||||
|
|
||||||
# Special handling for u.Quantity
|
|
||||||
elif isinstance(obj, u.Quantity):
|
|
||||||
result = Serializer._serialize_Quantity(obj)
|
|
||||||
|
|
||||||
# Handling for Enums
|
|
||||||
elif isinstance(obj, Enum):
|
|
||||||
result = Serializer._serialize_enum(obj)
|
|
||||||
|
|
||||||
# Methods and coroutines
|
|
||||||
elif inspect.isfunction(obj) or inspect.ismethod(obj):
|
|
||||||
result = Serializer._serialize_method(obj)
|
|
||||||
|
|
||||||
else:
|
|
||||||
obj_type = type(obj).__name__
|
|
||||||
value = obj
|
|
||||||
readonly = False
|
|
||||||
doc = Serializer.get_attribute_doc(obj)
|
|
||||||
result = {
|
|
||||||
"type": obj_type,
|
|
||||||
"value": value,
|
|
||||||
"readonly": readonly,
|
|
||||||
"doc": doc,
|
|
||||||
}
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _serialize_enum(obj: Enum) -> dict[str, Any]:
|
|
||||||
value = obj.name
|
|
||||||
readonly = False
|
|
||||||
doc = Serializer.get_attribute_doc(obj)
|
|
||||||
if type(obj).__base__.__name__ == "ColouredEnum":
|
|
||||||
obj_type = "ColouredEnum"
|
|
||||||
else:
|
|
||||||
obj_type = "Enum"
|
|
||||||
|
|
||||||
return {
|
|
||||||
"type": obj_type,
|
|
||||||
"value": value,
|
|
||||||
"readonly": readonly,
|
|
||||||
"doc": doc,
|
|
||||||
"enum": {
|
|
||||||
name: member.value for name, member in obj.__class__.__members__.items()
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _serialize_Quantity(obj: u.Quantity) -> dict[str, Any]:
|
|
||||||
obj_type = "Quantity"
|
|
||||||
readonly = False
|
|
||||||
doc = Serializer.get_attribute_doc(obj)
|
|
||||||
value = {"magnitude": obj.m, "unit": str(obj.u)}
|
|
||||||
return {
|
|
||||||
"type": obj_type,
|
|
||||||
"value": value,
|
|
||||||
"readonly": readonly,
|
|
||||||
"doc": doc,
|
|
||||||
}
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _serialize_dict(obj: dict[str, Any]) -> dict[str, Any]:
|
|
||||||
obj_type = "dict"
|
|
||||||
readonly = False
|
|
||||||
doc = Serializer.get_attribute_doc(obj)
|
|
||||||
value = {key: Serializer.serialize_object(val) for key, val in obj.items()}
|
|
||||||
return {
|
|
||||||
"type": obj_type,
|
|
||||||
"value": value,
|
|
||||||
"readonly": readonly,
|
|
||||||
"doc": doc,
|
|
||||||
}
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _serialize_list(obj: list[Any]) -> dict[str, Any]:
|
|
||||||
obj_type = "list"
|
|
||||||
readonly = False
|
|
||||||
doc = Serializer.get_attribute_doc(obj)
|
|
||||||
value = [Serializer.serialize_object(o) for o in obj]
|
|
||||||
return {
|
|
||||||
"type": obj_type,
|
|
||||||
"value": value,
|
|
||||||
"readonly": readonly,
|
|
||||||
"doc": doc,
|
|
||||||
}
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _serialize_method(obj: Callable[..., Any]) -> dict[str, Any]:
|
|
||||||
obj_type = "method"
|
|
||||||
value = None
|
|
||||||
readonly = True
|
|
||||||
doc = Serializer.get_attribute_doc(obj)
|
|
||||||
|
|
||||||
# Store parameters and their anotations in a dictionary
|
|
||||||
sig = inspect.signature(obj)
|
|
||||||
parameters: dict[str, Optional[str]] = {}
|
|
||||||
|
|
||||||
for k, v in sig.parameters.items():
|
|
||||||
annotation = v.annotation
|
|
||||||
if annotation is not inspect._empty:
|
|
||||||
if isinstance(annotation, type):
|
|
||||||
# Handle regular types
|
|
||||||
parameters[k] = annotation.__name__
|
|
||||||
else:
|
|
||||||
# Union, string annotation, Literal types, ...
|
|
||||||
parameters[k] = str(annotation)
|
|
||||||
else:
|
|
||||||
parameters[k] = None
|
|
||||||
|
|
||||||
return {
|
|
||||||
"type": obj_type,
|
|
||||||
"value": value,
|
|
||||||
"readonly": readonly,
|
|
||||||
"doc": doc,
|
|
||||||
"async": inspect.iscoroutinefunction(obj),
|
|
||||||
"parameters": parameters,
|
|
||||||
}
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _serialize_DataService(obj: AbstractDataService) -> dict[str, Any]:
|
|
||||||
readonly = False
|
|
||||||
doc = Serializer.get_attribute_doc(obj)
|
|
||||||
obj_type = type(obj).__name__
|
|
||||||
if type(obj).__name__ not in get_component_class_names():
|
|
||||||
obj_type = "DataService"
|
|
||||||
|
|
||||||
# Get the dictionary of the base class
|
|
||||||
base_set = set(type(obj).__base__.__dict__)
|
|
||||||
# Get the dictionary of the derived class
|
|
||||||
derived_set = set(type(obj).__dict__)
|
|
||||||
# Get the difference between the two dictionaries
|
|
||||||
derived_only_set = derived_set - base_set
|
|
||||||
|
|
||||||
instance_dict = set(obj.__dict__)
|
|
||||||
# Merge the class and instance dictionaries
|
|
||||||
merged_set = derived_only_set | instance_dict
|
|
||||||
value = {}
|
|
||||||
|
|
||||||
# Iterate over attributes, properties, class attributes, and methods
|
|
||||||
for key in sorted(merged_set):
|
|
||||||
if key.startswith("_"):
|
|
||||||
continue # Skip attributes that start with underscore
|
|
||||||
|
|
||||||
# Skip keys that start with "start_" or "stop_" and end with an async
|
|
||||||
# method name
|
|
||||||
if (key.startswith("start_") or key.startswith("stop_")) and key.split(
|
|
||||||
"_", 1
|
|
||||||
)[1] in {
|
|
||||||
name
|
|
||||||
for name, _ in inspect.getmembers(
|
|
||||||
obj, predicate=inspect.iscoroutinefunction
|
|
||||||
)
|
|
||||||
}:
|
|
||||||
continue
|
|
||||||
|
|
||||||
val = getattr(obj, key)
|
|
||||||
|
|
||||||
value[key] = Serializer.serialize_object(val)
|
|
||||||
|
|
||||||
# If there's a running task for this method
|
|
||||||
if key in obj._task_manager.tasks:
|
|
||||||
task_info = obj._task_manager.tasks[key]
|
|
||||||
value[key]["value"] = task_info["kwargs"]
|
|
||||||
|
|
||||||
# If the DataService attribute is a property
|
|
||||||
if isinstance(getattr(obj.__class__, key, None), property):
|
|
||||||
prop: property = getattr(obj.__class__, key)
|
|
||||||
value[key]["readonly"] = prop.fset is None
|
|
||||||
value[key]["doc"] = Serializer.get_attribute_doc(
|
|
||||||
prop
|
|
||||||
) # overwrite the doc
|
|
||||||
|
|
||||||
return {
|
|
||||||
"type": obj_type,
|
|
||||||
"value": value,
|
|
||||||
"readonly": readonly,
|
|
||||||
"doc": doc,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def dump(obj: Any) -> dict[str, Any]:
|
|
||||||
return Serializer.serialize_object(obj)
|
|
387
src/pydase/utils/serializer.py
Normal file
387
src/pydase/utils/serializer.py
Normal file
@ -0,0 +1,387 @@
|
|||||||
|
import inspect
|
||||||
|
import logging
|
||||||
|
from collections.abc import Callable
|
||||||
|
from enum import Enum
|
||||||
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
import pydase.units as u
|
||||||
|
from pydase.data_service.abstract_data_service import AbstractDataService
|
||||||
|
from pydase.utils.helpers import (
|
||||||
|
get_attribute_doc,
|
||||||
|
get_component_class_names,
|
||||||
|
parse_list_attr_and_index,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class SerializationPathError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class SerializationValueError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class Serializer:
|
||||||
|
@staticmethod
|
||||||
|
def serialize_object(obj: Any) -> dict[str, Any]:
|
||||||
|
result: dict[str, Any] = {}
|
||||||
|
if isinstance(obj, AbstractDataService):
|
||||||
|
result = Serializer._serialize_DataService(obj)
|
||||||
|
|
||||||
|
elif isinstance(obj, list):
|
||||||
|
result = Serializer._serialize_list(obj)
|
||||||
|
|
||||||
|
elif isinstance(obj, dict):
|
||||||
|
result = Serializer._serialize_dict(obj)
|
||||||
|
|
||||||
|
# Special handling for u.Quantity
|
||||||
|
elif isinstance(obj, u.Quantity):
|
||||||
|
result = Serializer._serialize_Quantity(obj)
|
||||||
|
|
||||||
|
# Handling for Enums
|
||||||
|
elif isinstance(obj, Enum):
|
||||||
|
result = Serializer._serialize_enum(obj)
|
||||||
|
|
||||||
|
# Methods and coroutines
|
||||||
|
elif inspect.isfunction(obj) or inspect.ismethod(obj):
|
||||||
|
result = Serializer._serialize_method(obj)
|
||||||
|
|
||||||
|
else:
|
||||||
|
obj_type = type(obj).__name__
|
||||||
|
value = obj
|
||||||
|
readonly = False
|
||||||
|
doc = get_attribute_doc(obj)
|
||||||
|
result = {
|
||||||
|
"type": obj_type,
|
||||||
|
"value": value,
|
||||||
|
"readonly": readonly,
|
||||||
|
"doc": doc,
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _serialize_enum(obj: Enum) -> dict[str, Any]:
|
||||||
|
value = obj.name
|
||||||
|
readonly = False
|
||||||
|
doc = get_attribute_doc(obj)
|
||||||
|
if type(obj).__base__.__name__ == "ColouredEnum":
|
||||||
|
obj_type = "ColouredEnum"
|
||||||
|
else:
|
||||||
|
obj_type = "Enum"
|
||||||
|
|
||||||
|
return {
|
||||||
|
"type": obj_type,
|
||||||
|
"value": value,
|
||||||
|
"readonly": readonly,
|
||||||
|
"doc": doc,
|
||||||
|
"enum": {
|
||||||
|
name: member.value for name, member in obj.__class__.__members__.items()
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _serialize_Quantity(obj: u.Quantity) -> dict[str, Any]:
|
||||||
|
obj_type = "Quantity"
|
||||||
|
readonly = False
|
||||||
|
doc = get_attribute_doc(obj)
|
||||||
|
value = {"magnitude": obj.m, "unit": str(obj.u)}
|
||||||
|
return {
|
||||||
|
"type": obj_type,
|
||||||
|
"value": value,
|
||||||
|
"readonly": readonly,
|
||||||
|
"doc": doc,
|
||||||
|
}
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _serialize_dict(obj: dict[str, Any]) -> dict[str, Any]:
|
||||||
|
obj_type = "dict"
|
||||||
|
readonly = False
|
||||||
|
doc = get_attribute_doc(obj)
|
||||||
|
value = {key: Serializer.serialize_object(val) for key, val in obj.items()}
|
||||||
|
return {
|
||||||
|
"type": obj_type,
|
||||||
|
"value": value,
|
||||||
|
"readonly": readonly,
|
||||||
|
"doc": doc,
|
||||||
|
}
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _serialize_list(obj: list[Any]) -> dict[str, Any]:
|
||||||
|
obj_type = "list"
|
||||||
|
readonly = False
|
||||||
|
doc = get_attribute_doc(obj)
|
||||||
|
value = [Serializer.serialize_object(o) for o in obj]
|
||||||
|
return {
|
||||||
|
"type": obj_type,
|
||||||
|
"value": value,
|
||||||
|
"readonly": readonly,
|
||||||
|
"doc": doc,
|
||||||
|
}
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _serialize_method(obj: Callable[..., Any]) -> dict[str, Any]:
|
||||||
|
obj_type = "method"
|
||||||
|
value = None
|
||||||
|
readonly = True
|
||||||
|
doc = get_attribute_doc(obj)
|
||||||
|
|
||||||
|
# Store parameters and their anotations in a dictionary
|
||||||
|
sig = inspect.signature(obj)
|
||||||
|
parameters: dict[str, Optional[str]] = {}
|
||||||
|
|
||||||
|
for k, v in sig.parameters.items():
|
||||||
|
annotation = v.annotation
|
||||||
|
if annotation is not inspect._empty:
|
||||||
|
if isinstance(annotation, type):
|
||||||
|
# Handle regular types
|
||||||
|
parameters[k] = annotation.__name__
|
||||||
|
else:
|
||||||
|
# Union, string annotation, Literal types, ...
|
||||||
|
parameters[k] = str(annotation)
|
||||||
|
else:
|
||||||
|
parameters[k] = None
|
||||||
|
|
||||||
|
return {
|
||||||
|
"type": obj_type,
|
||||||
|
"value": value,
|
||||||
|
"readonly": readonly,
|
||||||
|
"doc": doc,
|
||||||
|
"async": inspect.iscoroutinefunction(obj),
|
||||||
|
"parameters": parameters,
|
||||||
|
}
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _serialize_DataService(obj: AbstractDataService) -> dict[str, Any]:
|
||||||
|
readonly = False
|
||||||
|
doc = get_attribute_doc(obj)
|
||||||
|
obj_type = type(obj).__name__
|
||||||
|
if type(obj).__name__ not in get_component_class_names():
|
||||||
|
obj_type = "DataService"
|
||||||
|
|
||||||
|
# Get the dictionary of the base class
|
||||||
|
base_set = set(type(obj).__base__.__dict__)
|
||||||
|
# Get the dictionary of the derived class
|
||||||
|
derived_set = set(type(obj).__dict__)
|
||||||
|
# Get the difference between the two dictionaries
|
||||||
|
derived_only_set = derived_set - base_set
|
||||||
|
|
||||||
|
instance_dict = set(obj.__dict__)
|
||||||
|
# Merge the class and instance dictionaries
|
||||||
|
merged_set = derived_only_set | instance_dict
|
||||||
|
value = {}
|
||||||
|
|
||||||
|
# Iterate over attributes, properties, class attributes, and methods
|
||||||
|
for key in sorted(merged_set):
|
||||||
|
if key.startswith("_"):
|
||||||
|
continue # Skip attributes that start with underscore
|
||||||
|
|
||||||
|
# Skip keys that start with "start_" or "stop_" and end with an async
|
||||||
|
# method name
|
||||||
|
if (key.startswith("start_") or key.startswith("stop_")) and key.split(
|
||||||
|
"_", 1
|
||||||
|
)[1] in {
|
||||||
|
name
|
||||||
|
for name, _ in inspect.getmembers(
|
||||||
|
obj, predicate=inspect.iscoroutinefunction
|
||||||
|
)
|
||||||
|
}:
|
||||||
|
continue
|
||||||
|
|
||||||
|
val = getattr(obj, key)
|
||||||
|
|
||||||
|
value[key] = Serializer.serialize_object(val)
|
||||||
|
|
||||||
|
# If there's a running task for this method
|
||||||
|
if key in obj._task_manager.tasks:
|
||||||
|
task_info = obj._task_manager.tasks[key]
|
||||||
|
value[key]["value"] = task_info["kwargs"]
|
||||||
|
|
||||||
|
# If the DataService attribute is a property
|
||||||
|
if isinstance(getattr(obj.__class__, key, None), property):
|
||||||
|
prop: property = getattr(obj.__class__, key)
|
||||||
|
value[key]["readonly"] = prop.fset is None
|
||||||
|
value[key]["doc"] = get_attribute_doc(prop) # overwrite the doc
|
||||||
|
|
||||||
|
return {
|
||||||
|
"type": obj_type,
|
||||||
|
"value": value,
|
||||||
|
"readonly": readonly,
|
||||||
|
"doc": doc,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def dump(obj: Any) -> dict[str, Any]:
|
||||||
|
return Serializer.serialize_object(obj)
|
||||||
|
|
||||||
|
|
||||||
|
def set_nested_value_by_path(
|
||||||
|
serialization_dict: dict[str, Any], path: str, value: Any
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Set a value in a nested dictionary structure, which conforms to the serialization
|
||||||
|
format used by `pydase.utils.serializer.Serializer`, using a dot-notation path.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
serialization_dict:
|
||||||
|
The base dictionary representing data serialized with
|
||||||
|
`pydase.utils.serializer.Serializer`.
|
||||||
|
path:
|
||||||
|
The dot-notation path (e.g., 'attr1.attr2[0].attr3') indicating where to
|
||||||
|
set the value.
|
||||||
|
value:
|
||||||
|
The new value to set at the specified path.
|
||||||
|
|
||||||
|
Note:
|
||||||
|
- If the index equals the length of the list, the function will append the
|
||||||
|
serialized representation of the 'value' to the list.
|
||||||
|
"""
|
||||||
|
|
||||||
|
parent_path_parts, attr_name = path.split(".")[:-1], path.split(".")[-1]
|
||||||
|
current_dict: dict[str, Any] = serialization_dict
|
||||||
|
|
||||||
|
try:
|
||||||
|
for path_part in parent_path_parts:
|
||||||
|
current_dict = get_next_level_dict_by_key(
|
||||||
|
current_dict, path_part, allow_append=False
|
||||||
|
)
|
||||||
|
current_dict = current_dict["value"]
|
||||||
|
|
||||||
|
current_dict = get_next_level_dict_by_key(
|
||||||
|
current_dict, attr_name, allow_append=True
|
||||||
|
)
|
||||||
|
except (SerializationPathError, SerializationValueError, KeyError) as e:
|
||||||
|
logger.error(e)
|
||||||
|
return
|
||||||
|
|
||||||
|
# setting the new value
|
||||||
|
serialized_value = dump(value)
|
||||||
|
if "readonly" in current_dict:
|
||||||
|
current_dict["value"] = serialized_value["value"]
|
||||||
|
current_dict["type"] = serialized_value["type"]
|
||||||
|
else:
|
||||||
|
current_dict.update(serialized_value)
|
||||||
|
|
||||||
|
|
||||||
|
def get_nested_dict_by_path(
|
||||||
|
serialization_dict: dict[str, Any],
|
||||||
|
path: str,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
parent_path_parts, attr_name = path.split(".")[:-1], path.split(".")[-1]
|
||||||
|
current_dict: dict[str, Any] = serialization_dict
|
||||||
|
|
||||||
|
try:
|
||||||
|
for path_part in parent_path_parts:
|
||||||
|
current_dict = get_next_level_dict_by_key(
|
||||||
|
current_dict, path_part, allow_append=False
|
||||||
|
)
|
||||||
|
current_dict = current_dict["value"]
|
||||||
|
current_dict = get_next_level_dict_by_key(
|
||||||
|
current_dict, attr_name, allow_append=False
|
||||||
|
)
|
||||||
|
|
||||||
|
except (SerializationPathError, SerializationValueError, KeyError) as e:
|
||||||
|
logger.error(e)
|
||||||
|
return {}
|
||||||
|
|
||||||
|
return current_dict
|
||||||
|
|
||||||
|
|
||||||
|
def get_next_level_dict_by_key(
|
||||||
|
serialization_dict: dict[str, Any],
|
||||||
|
attr_name: str,
|
||||||
|
allow_append: bool = False,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Retrieve a nested dictionary entry or list item from a data structure serialized
|
||||||
|
with `pydase.utils.serializer.Serializer`.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
serialization_dict: The base dictionary representing serialized data.
|
||||||
|
attr_name: The key name representing the attribute in the dictionary,
|
||||||
|
e.g. 'list_attr[0]' or 'attr'
|
||||||
|
allow_append: Flag to allow appending a new entry if `index` is out of range by
|
||||||
|
one.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The dictionary or list item corresponding to the attribute and index.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
SerializationPathError: If the path composed of `attr_name` and `index` is
|
||||||
|
invalid or leads to an IndexError or KeyError.
|
||||||
|
SerializationValueError: If the expected nested structure is not a dictionary.
|
||||||
|
"""
|
||||||
|
# Check if the key contains an index part like 'attr_name[<index>]'
|
||||||
|
attr_name, index = parse_list_attr_and_index(attr_name)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if index is not None:
|
||||||
|
serialization_dict = serialization_dict[attr_name]["value"][index]
|
||||||
|
else:
|
||||||
|
serialization_dict = serialization_dict[attr_name]
|
||||||
|
except IndexError as e:
|
||||||
|
if allow_append and index == len(serialization_dict[attr_name]["value"]):
|
||||||
|
# Appending to list
|
||||||
|
serialization_dict[attr_name]["value"].append({})
|
||||||
|
serialization_dict = serialization_dict[attr_name]["value"][index]
|
||||||
|
else:
|
||||||
|
raise SerializationPathError(
|
||||||
|
f"Error occured trying to change '{attr_name}[{index}]': {e}"
|
||||||
|
)
|
||||||
|
except KeyError:
|
||||||
|
raise SerializationPathError(
|
||||||
|
f"Error occured trying to access the key '{attr_name}': it is either "
|
||||||
|
"not present in the current dictionary or its value does not contain "
|
||||||
|
"a 'value' key."
|
||||||
|
)
|
||||||
|
|
||||||
|
if not isinstance(serialization_dict, dict):
|
||||||
|
raise SerializationValueError(
|
||||||
|
f"Expected a dictionary at '{attr_name}', but found type "
|
||||||
|
f"'{type(serialization_dict).__name__}' instead."
|
||||||
|
)
|
||||||
|
|
||||||
|
return serialization_dict
|
||||||
|
|
||||||
|
|
||||||
|
def generate_serialized_data_paths(
|
||||||
|
data: dict[str, Any], parent_path: str = ""
|
||||||
|
) -> list[str]:
|
||||||
|
"""
|
||||||
|
Generate a list of access paths for all attributes in a dictionary representing
|
||||||
|
data serialized with `pydase.utils.serializer.Serializer`, excluding those that are
|
||||||
|
methods.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data: The dictionary representing serialized data, typically produced by
|
||||||
|
`pydase.utils.serializer.Serializer`.
|
||||||
|
parent_path: The base path to prepend to the keys in the `data` dictionary to
|
||||||
|
form the access paths. Defaults to an empty string.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A list of strings where each string is a dot-notation access path to an
|
||||||
|
attribute in the serialized data.
|
||||||
|
"""
|
||||||
|
|
||||||
|
paths = []
|
||||||
|
for key, value in data.items():
|
||||||
|
if value["type"] == "method":
|
||||||
|
# ignoring methods
|
||||||
|
continue
|
||||||
|
new_path = f"{parent_path}.{key}" if parent_path else key
|
||||||
|
if isinstance(value["value"], dict) and value["type"] != "Quantity":
|
||||||
|
paths.extend(generate_serialized_data_paths(value["value"], new_path)) # type: ignore
|
||||||
|
elif isinstance(value["value"], list):
|
||||||
|
for index, item in enumerate(value["value"]):
|
||||||
|
indexed_key_path = f"{new_path}[{index}]"
|
||||||
|
if isinstance(item["value"], dict):
|
||||||
|
paths.extend( # type: ignore
|
||||||
|
generate_serialized_data_paths(item["value"], indexed_key_path)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
paths.append(indexed_key_path) # type: ignore
|
||||||
|
else:
|
||||||
|
paths.append(new_path) # type: ignore
|
||||||
|
return paths
|
@ -1,15 +0,0 @@
|
|||||||
from collections.abc import Generator
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from pydase import DataService
|
|
||||||
from pydase.data_service.callback_manager import CallbackManager
|
|
||||||
|
|
||||||
|
|
||||||
def emit(self: Any, parent_path: str, name: str, value: Any) -> None:
|
|
||||||
if isinstance(value, DataService):
|
|
||||||
value = value.serialize()
|
|
||||||
|
|
||||||
print(f"{parent_path}.{name} = {value}")
|
|
||||||
|
|
||||||
|
|
||||||
CallbackManager.emit_notification = emit # type: ignore
|
|
@ -1,10 +1,10 @@
|
|||||||
from pytest import CaptureFixture, LogCaptureFixture
|
from pytest import LogCaptureFixture
|
||||||
|
|
||||||
from pydase.components.coloured_enum import ColouredEnum
|
from pydase.components.coloured_enum import ColouredEnum
|
||||||
from pydase.data_service.data_service import DataService
|
from pydase.data_service.data_service import DataService
|
||||||
|
|
||||||
|
|
||||||
def test_ColouredEnum(capsys: CaptureFixture) -> None:
|
def test_ColouredEnum(caplog: LogCaptureFixture) -> None:
|
||||||
class MyStatus(ColouredEnum):
|
class MyStatus(ColouredEnum):
|
||||||
RUNNING = "#00FF00"
|
RUNNING = "#00FF00"
|
||||||
FAILING = "#FF0000"
|
FAILING = "#FF0000"
|
||||||
@ -25,15 +25,7 @@ def test_ColouredEnum(capsys: CaptureFixture) -> None:
|
|||||||
|
|
||||||
service.status = MyStatus.FAILING
|
service.status = MyStatus.FAILING
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.status changed to MyStatus.FAILING" in caplog.text
|
||||||
|
|
||||||
expected_output = sorted(
|
|
||||||
[
|
|
||||||
"ServiceClass.status = MyStatus.FAILING",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n")) # type: ignore
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
|
|
||||||
def test_warning(caplog: LogCaptureFixture) -> None: # noqa
|
def test_warning(caplog: LogCaptureFixture) -> None: # noqa
|
||||||
|
@ -4,7 +4,7 @@ from pydase.components.number_slider import NumberSlider
|
|||||||
from pydase.data_service.data_service import DataService
|
from pydase.data_service.data_service import DataService
|
||||||
|
|
||||||
|
|
||||||
def test_NumberSlider(capsys: CaptureFixture) -> None:
|
def test_NumberSlider(caplog: LogCaptureFixture) -> None:
|
||||||
class ServiceClass(DataService):
|
class ServiceClass(DataService):
|
||||||
number_slider = NumberSlider(1, 0, 10, 1)
|
number_slider = NumberSlider(1, 0, 10, 1)
|
||||||
int_number_slider = NumberSlider(1, 0, 10, 1, "int")
|
int_number_slider = NumberSlider(1, 0, 10, 1, "int")
|
||||||
@ -28,28 +28,13 @@ def test_NumberSlider(capsys: CaptureFixture) -> None:
|
|||||||
service.number_slider.value = 10.0
|
service.number_slider.value = 10.0
|
||||||
service.int_number_slider.value = 10.1
|
service.int_number_slider.value = 10.1
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.number_slider.value changed to 10.0" in caplog.text
|
||||||
|
assert "ServiceClass.int_number_slider.value changed to 10" in caplog.text
|
||||||
expected_output = sorted(
|
caplog.clear()
|
||||||
[
|
|
||||||
"ServiceClass.number_slider.value = 10.0",
|
|
||||||
"ServiceClass.int_number_slider.value = 10",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n")) # type: ignore
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
service.number_slider.min = 1.1
|
service.number_slider.min = 1.1
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.number_slider.min changed to 1.1" in caplog.text
|
||||||
|
|
||||||
expected_output = sorted(
|
|
||||||
[
|
|
||||||
"ServiceClass.number_slider.min = 1.1",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n")) # type: ignore
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
|
|
||||||
def test_init_error(caplog: LogCaptureFixture) -> None: # noqa
|
def test_init_error(caplog: LogCaptureFixture) -> None: # noqa
|
||||||
|
@ -1,13 +1,13 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from pytest import CaptureFixture
|
from pytest import LogCaptureFixture
|
||||||
|
|
||||||
import pydase
|
import pydase
|
||||||
|
|
||||||
logger = logging.getLogger()
|
logger = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
def test_DataService_task_callback(capsys: CaptureFixture) -> None:
|
def test_DataService_task_callback(caplog: LogCaptureFixture) -> None:
|
||||||
class MyService(pydase.DataService):
|
class MyService(pydase.DataService):
|
||||||
async def my_task(self) -> None:
|
async def my_task(self) -> None:
|
||||||
logger.info("Triggered task.")
|
logger.info("Triggered task.")
|
||||||
@ -19,18 +19,11 @@ def test_DataService_task_callback(capsys: CaptureFixture) -> None:
|
|||||||
service.start_my_task() # type: ignore
|
service.start_my_task() # type: ignore
|
||||||
service.start_my_other_task() # type: ignore
|
service.start_my_other_task() # type: ignore
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
assert "MyService.my_task changed to {}" in caplog.text
|
||||||
expected_output = sorted(
|
assert "MyService.my_other_task changed to {}" in caplog.text
|
||||||
[
|
|
||||||
"MyService.my_task = {}",
|
|
||||||
"MyService.my_other_task = {}",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n")) # type: ignore
|
|
||||||
assert expected_output == actual_output
|
|
||||||
|
|
||||||
|
|
||||||
def test_DataServiceList_task_callback(capsys: CaptureFixture) -> None:
|
def test_DataServiceList_task_callback(caplog: LogCaptureFixture) -> None:
|
||||||
class MySubService(pydase.DataService):
|
class MySubService(pydase.DataService):
|
||||||
async def my_task(self) -> None:
|
async def my_task(self) -> None:
|
||||||
logger.info("Triggered task.")
|
logger.info("Triggered task.")
|
||||||
@ -45,12 +38,5 @@ def test_DataServiceList_task_callback(capsys: CaptureFixture) -> None:
|
|||||||
service.sub_services_list[0].start_my_task() # type: ignore
|
service.sub_services_list[0].start_my_task() # type: ignore
|
||||||
service.sub_services_list[1].start_my_other_task() # type: ignore
|
service.sub_services_list[1].start_my_other_task() # type: ignore
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
assert "MyService.sub_services_list[0].my_task changed to {}" in caplog.text
|
||||||
expected_output = sorted(
|
assert "MyService.sub_services_list[1].my_other_task changed to {}" in caplog.text
|
||||||
[
|
|
||||||
"MyService.sub_services_list[0].my_task = {}",
|
|
||||||
"MyService.sub_services_list[1].my_other_task = {}",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n")) # type: ignore
|
|
||||||
assert expected_output == actual_output
|
|
||||||
|
25
tests/data_service/test_data_service_cache.py
Normal file
25
tests/data_service/test_data_service_cache.py
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
import logging
|
||||||
|
|
||||||
|
import pydase
|
||||||
|
from pydase.data_service.data_service_cache import DataServiceCache
|
||||||
|
from pydase.utils.serializer import get_nested_dict_by_path
|
||||||
|
|
||||||
|
logger = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
def test_nested_attributes_cache_callback() -> None:
|
||||||
|
class SubClass(pydase.DataService):
|
||||||
|
name = "Hello"
|
||||||
|
|
||||||
|
class ServiceClass(pydase.DataService):
|
||||||
|
class_attr = SubClass()
|
||||||
|
name = "World"
|
||||||
|
|
||||||
|
test_service = ServiceClass()
|
||||||
|
cache = DataServiceCache(test_service)
|
||||||
|
|
||||||
|
test_service.name = "Peepz"
|
||||||
|
assert get_nested_dict_by_path(cache.cache, "name")["value"] == "Peepz"
|
||||||
|
|
||||||
|
test_service.class_attr.name = "Ciao"
|
||||||
|
assert get_nested_dict_by_path(cache.cache, "class_attr.name")["value"] == "Ciao"
|
142
tests/data_service/test_state_manager.py
Normal file
142
tests/data_service/test_state_manager.py
Normal file
@ -0,0 +1,142 @@
|
|||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from pytest import LogCaptureFixture
|
||||||
|
|
||||||
|
import pydase
|
||||||
|
import pydase.units as u
|
||||||
|
from pydase.data_service.state_manager import StateManager
|
||||||
|
|
||||||
|
|
||||||
|
class Service(pydase.DataService):
|
||||||
|
def __init__(self, **kwargs: Any) -> None:
|
||||||
|
self.some_unit: u.Quantity = 1.2 * u.units.A
|
||||||
|
self.some_float = 1.0
|
||||||
|
self._name = "Service"
|
||||||
|
super().__init__(**kwargs)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self) -> str:
|
||||||
|
return self._name
|
||||||
|
|
||||||
|
|
||||||
|
CURRENT_STATE = {
|
||||||
|
"name": {
|
||||||
|
"type": "str",
|
||||||
|
"value": "Service",
|
||||||
|
"readonly": True,
|
||||||
|
"doc": None,
|
||||||
|
},
|
||||||
|
"some_float": {
|
||||||
|
"type": "float",
|
||||||
|
"value": 1.0,
|
||||||
|
"readonly": False,
|
||||||
|
"doc": None,
|
||||||
|
},
|
||||||
|
"some_unit": {
|
||||||
|
"type": "Quantity",
|
||||||
|
"value": {"magnitude": 1.2, "unit": "A"},
|
||||||
|
"readonly": False,
|
||||||
|
"doc": None,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
LOAD_STATE = {
|
||||||
|
"name": {
|
||||||
|
"type": "str",
|
||||||
|
"value": "Service",
|
||||||
|
"readonly": True,
|
||||||
|
"doc": None,
|
||||||
|
},
|
||||||
|
"some_float": {
|
||||||
|
"type": "int",
|
||||||
|
"value": 1,
|
||||||
|
"readonly": False,
|
||||||
|
"doc": None,
|
||||||
|
},
|
||||||
|
"some_unit": {
|
||||||
|
"type": "Quantity",
|
||||||
|
"value": {"magnitude": 12.0, "unit": "A"},
|
||||||
|
"readonly": False,
|
||||||
|
"doc": None,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_save_state(tmp_path: Path):
|
||||||
|
# Create a StateManager instance with a temporary file
|
||||||
|
file = tmp_path / "test_state.json"
|
||||||
|
manager = StateManager(service=Service(), filename=str(file))
|
||||||
|
|
||||||
|
# Trigger the saving action
|
||||||
|
manager.save_state()
|
||||||
|
|
||||||
|
# Now check that the file was written correctly
|
||||||
|
assert file.read_text() == json.dumps(CURRENT_STATE, indent=4)
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_state(tmp_path: Path):
|
||||||
|
# Create a StateManager instance with a temporary file
|
||||||
|
file = tmp_path / "test_state.json"
|
||||||
|
|
||||||
|
# Write a temporary JSON file to read back
|
||||||
|
with open(file, "w") as f:
|
||||||
|
json.dump(LOAD_STATE, f, indent=4)
|
||||||
|
|
||||||
|
service = Service()
|
||||||
|
manager = StateManager(service=service, filename=str(file))
|
||||||
|
manager.load_state()
|
||||||
|
assert service.some_unit == u.Quantity(12, "A")
|
||||||
|
|
||||||
|
|
||||||
|
def test_filename_warning(tmp_path: Path, caplog: LogCaptureFixture):
|
||||||
|
file = tmp_path / "test_state.json"
|
||||||
|
|
||||||
|
service = Service(filename=str(file))
|
||||||
|
StateManager(service=service, filename=str(file))
|
||||||
|
assert f"Overwriting filename {str(file)!r} with {str(file)!r}." in caplog.text
|
||||||
|
|
||||||
|
|
||||||
|
def test_filename_error(caplog: LogCaptureFixture):
|
||||||
|
service = Service()
|
||||||
|
manager = StateManager(service=service)
|
||||||
|
|
||||||
|
manager.save_state()
|
||||||
|
assert (
|
||||||
|
"State manager was not initialised with a filename. Skipping 'save_state'..."
|
||||||
|
in caplog.text
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_readonly_attribute(tmp_path: Path, caplog: LogCaptureFixture):
|
||||||
|
# Create a StateManager instance with a temporary file
|
||||||
|
file = tmp_path / "test_state.json"
|
||||||
|
|
||||||
|
# Write a temporary JSON file to read back
|
||||||
|
with open(file, "w") as f:
|
||||||
|
json.dump(LOAD_STATE, f, indent=4)
|
||||||
|
|
||||||
|
service = Service()
|
||||||
|
manager = StateManager(service=service, filename=str(file))
|
||||||
|
manager.load_state()
|
||||||
|
assert (
|
||||||
|
"Attribute 'name' is read-only. Ignoring value from JSON file..." in caplog.text
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_changed_type(tmp_path: Path, caplog: LogCaptureFixture):
|
||||||
|
# Create a StateManager instance with a temporary file
|
||||||
|
file = tmp_path / "test_state.json"
|
||||||
|
|
||||||
|
# Write a temporary JSON file to read back
|
||||||
|
with open(file, "w") as f:
|
||||||
|
json.dump(LOAD_STATE, f, indent=4)
|
||||||
|
|
||||||
|
service = Service()
|
||||||
|
manager = StateManager(service=service, filename=str(file))
|
||||||
|
manager.load_state()
|
||||||
|
assert (
|
||||||
|
"Attribute type of 'some_float' changed from 'int' to "
|
||||||
|
"'float'. Ignoring value from JSON file..."
|
||||||
|
) in caplog.text
|
@ -1,13 +1,13 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from pytest import CaptureFixture
|
from pytest import LogCaptureFixture
|
||||||
|
|
||||||
import pydase
|
import pydase
|
||||||
|
|
||||||
logger = logging.getLogger()
|
logger = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
def test_autostart_task_callback(capsys: CaptureFixture) -> None:
|
def test_autostart_task_callback(caplog: LogCaptureFixture) -> None:
|
||||||
class MyService(pydase.DataService):
|
class MyService(pydase.DataService):
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
self._autostart_tasks = { # type: ignore
|
self._autostart_tasks = { # type: ignore
|
||||||
@ -25,18 +25,13 @@ def test_autostart_task_callback(capsys: CaptureFixture) -> None:
|
|||||||
service = MyService()
|
service = MyService()
|
||||||
service._task_manager.start_autostart_tasks()
|
service._task_manager.start_autostart_tasks()
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
assert "MyService.my_task changed to {}" in caplog.text
|
||||||
expected_output = sorted(
|
assert "MyService.my_other_task changed to {}" in caplog.text
|
||||||
[
|
|
||||||
"MyService.my_task = {}",
|
|
||||||
"MyService.my_other_task = {}",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n")) # type: ignore
|
|
||||||
assert expected_output == actual_output
|
|
||||||
|
|
||||||
|
|
||||||
def test_DataService_subclass_autostart_task_callback(capsys: CaptureFixture) -> None:
|
def test_DataService_subclass_autostart_task_callback(
|
||||||
|
caplog: LogCaptureFixture,
|
||||||
|
) -> None:
|
||||||
class MySubService(pydase.DataService):
|
class MySubService(pydase.DataService):
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
self._autostart_tasks = { # type: ignore
|
self._autostart_tasks = { # type: ignore
|
||||||
@ -57,19 +52,12 @@ def test_DataService_subclass_autostart_task_callback(capsys: CaptureFixture) ->
|
|||||||
service = MyService()
|
service = MyService()
|
||||||
service._task_manager.start_autostart_tasks()
|
service._task_manager.start_autostart_tasks()
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
assert "MyService.sub_service.my_task changed to {}" in caplog.text
|
||||||
expected_output = sorted(
|
assert "MyService.sub_service.my_other_task changed to {}" in caplog.text
|
||||||
[
|
|
||||||
"MyService.sub_service.my_task = {}",
|
|
||||||
"MyService.sub_service.my_other_task = {}",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n")) # type: ignore
|
|
||||||
assert expected_output == actual_output
|
|
||||||
|
|
||||||
|
|
||||||
def test_DataServiceList_subclass_autostart_task_callback(
|
def test_DataServiceList_subclass_autostart_task_callback(
|
||||||
capsys: CaptureFixture,
|
caplog: LogCaptureFixture,
|
||||||
) -> None:
|
) -> None:
|
||||||
class MySubService(pydase.DataService):
|
class MySubService(pydase.DataService):
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
@ -91,14 +79,7 @@ def test_DataServiceList_subclass_autostart_task_callback(
|
|||||||
service = MyService()
|
service = MyService()
|
||||||
service._task_manager.start_autostart_tasks()
|
service._task_manager.start_autostart_tasks()
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
assert "MyService.sub_services_list[0].my_task changed to {}" in caplog.text
|
||||||
expected_output = sorted(
|
assert "MyService.sub_services_list[0].my_other_task changed to {}" in caplog.text
|
||||||
[
|
assert "MyService.sub_services_list[1].my_task changed to {}" in caplog.text
|
||||||
"MyService.sub_services_list[0].my_task = {}",
|
assert "MyService.sub_services_list[1].my_other_task changed to {}" in caplog.text
|
||||||
"MyService.sub_services_list[0].my_other_task = {}",
|
|
||||||
"MyService.sub_services_list[1].my_task = {}",
|
|
||||||
"MyService.sub_services_list[1].my_other_task = {}",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n")) # type: ignore
|
|
||||||
assert expected_output == actual_output
|
|
||||||
|
@ -1,46 +1,42 @@
|
|||||||
from pytest import CaptureFixture
|
from typing import Any
|
||||||
|
|
||||||
|
from pytest import LogCaptureFixture
|
||||||
|
|
||||||
from pydase import DataService
|
from pydase import DataService
|
||||||
|
|
||||||
|
|
||||||
def test_class_list_attribute(capsys: CaptureFixture) -> None:
|
def test_class_list_attribute(caplog: LogCaptureFixture) -> None:
|
||||||
class ServiceClass(DataService):
|
class ServiceClass(DataService):
|
||||||
attr = [0, 1]
|
attr = [0, 1]
|
||||||
|
|
||||||
service_instance = ServiceClass()
|
service_instance = ServiceClass()
|
||||||
|
|
||||||
service_instance.attr[0] = 1337
|
service_instance.attr[0] = 1337
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.attr[0] changed to 1337" in caplog.text
|
||||||
assert captured.out == "ServiceClass.attr[0] = 1337\n"
|
caplog.clear()
|
||||||
|
|
||||||
|
|
||||||
def test_instance_list_attribute(capsys: CaptureFixture) -> None:
|
def test_instance_list_attribute(caplog: LogCaptureFixture) -> None:
|
||||||
class SubClass(DataService):
|
class SubClass(DataService):
|
||||||
name = "SubClass"
|
name = "SubClass"
|
||||||
|
|
||||||
class ServiceClass(DataService):
|
class ServiceClass(DataService):
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
self.attr = [0, SubClass()]
|
self.attr: list[Any] = [0, SubClass()]
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
|
||||||
service_instance = ServiceClass()
|
service_instance = ServiceClass()
|
||||||
|
|
||||||
_ = capsys.readouterr()
|
|
||||||
|
|
||||||
service_instance.attr[0] = "Hello"
|
service_instance.attr[0] = "Hello"
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.attr[0] changed to Hello" in caplog.text
|
||||||
assert captured.out == "ServiceClass.attr[0] = Hello\n"
|
caplog.clear()
|
||||||
|
|
||||||
service_instance.attr[1] = SubClass()
|
service_instance.attr[1] = SubClass()
|
||||||
captured = capsys.readouterr()
|
assert f"ServiceClass.attr[1] changed to {service_instance.attr[1]}" in caplog.text
|
||||||
assert (
|
caplog.clear()
|
||||||
captured.out.strip()
|
|
||||||
== "ServiceClass.attr[1] = {'name': {'type': 'str', 'value': 'SubClass',"
|
|
||||||
" 'readonly': False, 'doc': None}}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_reused_instance_list_attribute(capsys: CaptureFixture) -> None:
|
def test_reused_instance_list_attribute(caplog: LogCaptureFixture) -> None:
|
||||||
some_list = [0, 1, 2]
|
some_list = [0, 1, 2]
|
||||||
|
|
||||||
class ServiceClass(DataService):
|
class ServiceClass(DataService):
|
||||||
@ -53,21 +49,14 @@ def test_reused_instance_list_attribute(capsys: CaptureFixture) -> None:
|
|||||||
service_instance = ServiceClass()
|
service_instance = ServiceClass()
|
||||||
|
|
||||||
service_instance.attr[0] = 20
|
service_instance.attr[0] = 20
|
||||||
captured = capsys.readouterr()
|
|
||||||
|
|
||||||
assert service_instance.attr == service_instance.attr_2
|
assert service_instance.attr == service_instance.attr_2
|
||||||
assert service_instance.attr != service_instance.attr_3
|
assert service_instance.attr != service_instance.attr_3
|
||||||
expected_output = sorted(
|
|
||||||
[
|
assert "ServiceClass.attr[0] changed to 20" in caplog.text
|
||||||
"ServiceClass.attr[0] = 20",
|
assert "ServiceClass.attr_2[0] changed to 20" in caplog.text
|
||||||
"ServiceClass.attr_2[0] = 20",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n"))
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
|
|
||||||
def test_nested_reused_instance_list_attribute(capsys: CaptureFixture) -> None:
|
def test_nested_reused_instance_list_attribute(caplog: LogCaptureFixture) -> None:
|
||||||
some_list = [0, 1, 2]
|
some_list = [0, 1, 2]
|
||||||
|
|
||||||
class SubClass(DataService):
|
class SubClass(DataService):
|
||||||
@ -85,23 +74,16 @@ def test_nested_reused_instance_list_attribute(capsys: CaptureFixture) -> None:
|
|||||||
|
|
||||||
service_instance = ServiceClass()
|
service_instance = ServiceClass()
|
||||||
|
|
||||||
_ = capsys.readouterr()
|
|
||||||
service_instance.attr[0] = 20
|
service_instance.attr[0] = 20
|
||||||
captured = capsys.readouterr()
|
|
||||||
|
|
||||||
assert service_instance.attr == service_instance.subclass.attr_list
|
assert service_instance.attr == service_instance.subclass.attr_list
|
||||||
expected_output = sorted(
|
|
||||||
[
|
assert "ServiceClass.attr[0] changed to 20" in caplog.text
|
||||||
"ServiceClass.subclass.attr_list_2[0] = 20",
|
assert "ServiceClass.subclass.attr_list[0] changed to 20" in caplog.text
|
||||||
"ServiceClass.subclass.attr_list[0] = 20",
|
assert "ServiceClass.subclass.attr_list_2[0] changed to 20" in caplog.text
|
||||||
"ServiceClass.attr[0] = 20",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n"))
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
|
|
||||||
def test_protected_list_attribute(capsys: CaptureFixture) -> None:
|
def test_protected_list_attribute(caplog: LogCaptureFixture) -> None:
|
||||||
"""Changing protected lists should not emit notifications for the lists themselves, but
|
"""Changing protected lists should not emit notifications for the lists themselves, but
|
||||||
still for all properties depending on them.
|
still for all properties depending on them.
|
||||||
"""
|
"""
|
||||||
@ -116,12 +98,4 @@ def test_protected_list_attribute(capsys: CaptureFixture) -> None:
|
|||||||
service_instance = ServiceClass()
|
service_instance = ServiceClass()
|
||||||
|
|
||||||
service_instance._attr[0] = 1337
|
service_instance._attr[0] = 1337
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.list_dependend_property changed to 1337" in caplog.text
|
||||||
|
|
||||||
expected_output = sorted(
|
|
||||||
[
|
|
||||||
"ServiceClass.list_dependend_property = 1337",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n")) # type: ignore
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
from pytest import CaptureFixture
|
from pytest import LogCaptureFixture
|
||||||
|
|
||||||
from pydase import DataService
|
from pydase import DataService
|
||||||
|
|
||||||
|
|
||||||
def test_class_attributes(capsys: CaptureFixture) -> None:
|
def test_class_attributes(caplog: LogCaptureFixture) -> None:
|
||||||
class SubClass(DataService):
|
class SubClass(DataService):
|
||||||
name = "Hello"
|
name = "Hello"
|
||||||
|
|
||||||
@ -11,14 +11,12 @@ def test_class_attributes(capsys: CaptureFixture) -> None:
|
|||||||
attr_1 = SubClass()
|
attr_1 = SubClass()
|
||||||
|
|
||||||
service_instance = ServiceClass()
|
service_instance = ServiceClass()
|
||||||
_ = capsys.readouterr()
|
|
||||||
service_instance.attr_1.name = "Hi"
|
service_instance.attr_1.name = "Hi"
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.attr_1.name changed to Hi" in caplog.text
|
||||||
assert captured.out.strip() == "ServiceClass.attr_1.name = Hi"
|
|
||||||
|
|
||||||
|
|
||||||
def test_instance_attributes(capsys: CaptureFixture) -> None:
|
def test_instance_attributes(caplog: LogCaptureFixture) -> None:
|
||||||
class SubClass(DataService):
|
class SubClass(DataService):
|
||||||
name = "Hello"
|
name = "Hello"
|
||||||
|
|
||||||
@ -28,25 +26,22 @@ def test_instance_attributes(capsys: CaptureFixture) -> None:
|
|||||||
super().__init__()
|
super().__init__()
|
||||||
|
|
||||||
service_instance = ServiceClass()
|
service_instance = ServiceClass()
|
||||||
_ = capsys.readouterr()
|
|
||||||
service_instance.attr_1.name = "Hi"
|
service_instance.attr_1.name = "Hi"
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.attr_1.name changed to Hi" in caplog.text
|
||||||
assert captured.out.strip() == "ServiceClass.attr_1.name = Hi"
|
|
||||||
|
|
||||||
|
|
||||||
def test_class_attribute(capsys: CaptureFixture) -> None:
|
def test_class_attribute(caplog: LogCaptureFixture) -> None:
|
||||||
class ServiceClass(DataService):
|
class ServiceClass(DataService):
|
||||||
attr = 0
|
attr = 0
|
||||||
|
|
||||||
service_instance = ServiceClass()
|
service_instance = ServiceClass()
|
||||||
|
|
||||||
service_instance.attr = 1
|
service_instance.attr = 1
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.attr changed to 1" in caplog.text
|
||||||
assert captured.out == "ServiceClass.attr = 1\n"
|
|
||||||
|
|
||||||
|
|
||||||
def test_instance_attribute(capsys: CaptureFixture) -> None:
|
def test_instance_attribute(caplog: LogCaptureFixture) -> None:
|
||||||
class ServiceClass(DataService):
|
class ServiceClass(DataService):
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
self.attr = "Hello World"
|
self.attr = "Hello World"
|
||||||
@ -55,11 +50,10 @@ def test_instance_attribute(capsys: CaptureFixture) -> None:
|
|||||||
service_instance = ServiceClass()
|
service_instance = ServiceClass()
|
||||||
|
|
||||||
service_instance.attr = "Hello"
|
service_instance.attr = "Hello"
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.attr changed to Hello" in caplog.text
|
||||||
assert captured.out == "ServiceClass.attr = Hello\n"
|
|
||||||
|
|
||||||
|
|
||||||
def test_reused_instance_attributes(capsys: CaptureFixture) -> None:
|
def test_reused_instance_attributes(caplog: LogCaptureFixture) -> None:
|
||||||
class SubClass(DataService):
|
class SubClass(DataService):
|
||||||
name = "Hello"
|
name = "Hello"
|
||||||
|
|
||||||
@ -72,22 +66,14 @@ def test_reused_instance_attributes(capsys: CaptureFixture) -> None:
|
|||||||
super().__init__()
|
super().__init__()
|
||||||
|
|
||||||
service_instance = ServiceClass()
|
service_instance = ServiceClass()
|
||||||
_ = capsys.readouterr()
|
|
||||||
service_instance.attr_1.name = "Hi"
|
service_instance.attr_1.name = "Hi"
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
|
||||||
assert service_instance.attr_1 == service_instance.attr_2
|
assert service_instance.attr_1 == service_instance.attr_2
|
||||||
expected_output = sorted(
|
assert "ServiceClass.attr_1.name changed to Hi" in caplog.text
|
||||||
[
|
assert "ServiceClass.attr_2.name changed to Hi" in caplog.text
|
||||||
"ServiceClass.attr_1.name = Hi",
|
|
||||||
"ServiceClass.attr_2.name = Hi",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n"))
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
|
|
||||||
def test_reused_attributes_mixed(capsys: CaptureFixture) -> None:
|
def test_reused_attributes_mixed(caplog: LogCaptureFixture) -> None:
|
||||||
class SubClass(DataService):
|
class SubClass(DataService):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -101,22 +87,14 @@ def test_reused_attributes_mixed(capsys: CaptureFixture) -> None:
|
|||||||
super().__init__()
|
super().__init__()
|
||||||
|
|
||||||
service_instance = ServiceClass()
|
service_instance = ServiceClass()
|
||||||
_ = capsys.readouterr()
|
|
||||||
service_instance.attr_1.name = "Hi"
|
service_instance.attr_1.name = "Hi"
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
|
||||||
assert service_instance.attr_1 == service_instance.attr_2
|
assert service_instance.attr_1 == service_instance.attr_2
|
||||||
expected_output = sorted(
|
assert "ServiceClass.attr_1.name changed to Hi" in caplog.text
|
||||||
[
|
assert "ServiceClass.attr_2.name changed to Hi" in caplog.text
|
||||||
"ServiceClass.attr_1.name = Hi",
|
|
||||||
"ServiceClass.attr_2.name = Hi",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n"))
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
|
|
||||||
def test_nested_class_attributes(capsys: CaptureFixture) -> None:
|
def test_nested_class_attributes(caplog: LogCaptureFixture) -> None:
|
||||||
class SubSubSubClass(DataService):
|
class SubSubSubClass(DataService):
|
||||||
name = "Hello"
|
name = "Hello"
|
||||||
|
|
||||||
@ -133,26 +111,18 @@ def test_nested_class_attributes(capsys: CaptureFixture) -> None:
|
|||||||
attr = SubClass()
|
attr = SubClass()
|
||||||
|
|
||||||
service_instance = ServiceClass()
|
service_instance = ServiceClass()
|
||||||
_ = capsys.readouterr()
|
|
||||||
service_instance.attr.attr.attr.name = "Hi"
|
service_instance.attr.attr.attr.name = "Hi"
|
||||||
service_instance.attr.attr.name = "Hou"
|
service_instance.attr.attr.name = "Hou"
|
||||||
service_instance.attr.name = "foo"
|
service_instance.attr.name = "foo"
|
||||||
service_instance.name = "bar"
|
service_instance.name = "bar"
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.attr.attr.attr.name changed to Hi" in caplog.text
|
||||||
expected_output = sorted(
|
assert "ServiceClass.attr.attr.name changed to Hou" in caplog.text
|
||||||
[
|
assert "ServiceClass.attr.name changed to foo" in caplog.text
|
||||||
"ServiceClass.attr.attr.attr.name = Hi",
|
assert "ServiceClass.name changed to bar" in caplog.text
|
||||||
"ServiceClass.attr.attr.name = Hou",
|
|
||||||
"ServiceClass.attr.name = foo",
|
|
||||||
"ServiceClass.name = bar",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n"))
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
|
|
||||||
def test_nested_instance_attributes(capsys: CaptureFixture) -> None:
|
def test_nested_instance_attributes(caplog: LogCaptureFixture) -> None:
|
||||||
class SubSubSubClass(DataService):
|
class SubSubSubClass(DataService):
|
||||||
name = "Hello"
|
name = "Hello"
|
||||||
|
|
||||||
@ -175,26 +145,18 @@ def test_nested_instance_attributes(capsys: CaptureFixture) -> None:
|
|||||||
super().__init__()
|
super().__init__()
|
||||||
|
|
||||||
service_instance = ServiceClass()
|
service_instance = ServiceClass()
|
||||||
_ = capsys.readouterr()
|
|
||||||
service_instance.attr.attr.attr.name = "Hi"
|
service_instance.attr.attr.attr.name = "Hi"
|
||||||
service_instance.attr.attr.name = "Hou"
|
service_instance.attr.attr.name = "Hou"
|
||||||
service_instance.attr.name = "foo"
|
service_instance.attr.name = "foo"
|
||||||
service_instance.name = "bar"
|
service_instance.name = "bar"
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.attr.attr.attr.name changed to Hi" in caplog.text
|
||||||
expected_output = sorted(
|
assert "ServiceClass.attr.attr.name changed to Hou" in caplog.text
|
||||||
[
|
assert "ServiceClass.attr.name changed to foo" in caplog.text
|
||||||
"ServiceClass.attr.attr.attr.name = Hi",
|
assert "ServiceClass.name changed to bar" in caplog.text
|
||||||
"ServiceClass.attr.attr.name = Hou",
|
|
||||||
"ServiceClass.attr.name = foo",
|
|
||||||
"ServiceClass.name = bar",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n"))
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
|
|
||||||
def test_advanced_nested_class_attributes(capsys: CaptureFixture) -> None:
|
def test_advanced_nested_class_attributes(caplog: LogCaptureFixture) -> None:
|
||||||
class SubSubSubClass(DataService):
|
class SubSubSubClass(DataService):
|
||||||
name = "Hello"
|
name = "Hello"
|
||||||
|
|
||||||
@ -209,32 +171,17 @@ def test_advanced_nested_class_attributes(capsys: CaptureFixture) -> None:
|
|||||||
subattr = SubSubClass()
|
subattr = SubSubClass()
|
||||||
|
|
||||||
service_instance = ServiceClass()
|
service_instance = ServiceClass()
|
||||||
_ = capsys.readouterr()
|
|
||||||
service_instance.attr.attr.attr.name = "Hi"
|
service_instance.attr.attr.attr.name = "Hi"
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.attr.attr.attr.name changed to Hi" in caplog.text
|
||||||
expected_output = sorted(
|
assert "ServiceClass.subattr.attr.name changed to Hi" in caplog.text
|
||||||
[
|
|
||||||
"ServiceClass.attr.attr.attr.name = Hi",
|
|
||||||
"ServiceClass.subattr.attr.name = Hi",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n"))
|
|
||||||
assert actual_output == expected_output
|
|
||||||
service_instance.subattr.attr.name = "Ho"
|
service_instance.subattr.attr.name = "Ho"
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.attr.attr.attr.name changed to Ho" in caplog.text
|
||||||
expected_output = sorted(
|
assert "ServiceClass.subattr.attr.name changed to Ho" in caplog.text
|
||||||
[
|
|
||||||
"ServiceClass.attr.attr.attr.name = Ho",
|
|
||||||
"ServiceClass.subattr.attr.name = Ho",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n"))
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
|
|
||||||
def test_advanced_nested_instance_attributes(capsys: CaptureFixture) -> None:
|
def test_advanced_nested_instance_attributes(caplog: LogCaptureFixture) -> None:
|
||||||
class SubSubSubClass(DataService):
|
class SubSubSubClass(DataService):
|
||||||
name = "Hello"
|
name = "Hello"
|
||||||
|
|
||||||
@ -257,32 +204,19 @@ def test_advanced_nested_instance_attributes(capsys: CaptureFixture) -> None:
|
|||||||
super().__init__()
|
super().__init__()
|
||||||
|
|
||||||
service_instance = ServiceClass()
|
service_instance = ServiceClass()
|
||||||
_ = capsys.readouterr()
|
|
||||||
service_instance.attr.attr.attr.name = "Hi"
|
service_instance.attr.attr.attr.name = "Hi"
|
||||||
|
assert "ServiceClass.attr.attr.attr.name changed to Hi" in caplog.text
|
||||||
|
assert "ServiceClass.subattr.attr.name changed to Hi" in caplog.text
|
||||||
|
caplog.clear()
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
|
||||||
expected_output = sorted(
|
|
||||||
[
|
|
||||||
"ServiceClass.attr.attr.attr.name = Hi",
|
|
||||||
"ServiceClass.subattr.attr.name = Hi",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n"))
|
|
||||||
assert actual_output == expected_output
|
|
||||||
service_instance.subattr.attr.name = "Ho"
|
service_instance.subattr.attr.name = "Ho"
|
||||||
|
assert "ServiceClass.attr.attr.attr.name changed to Ho" in caplog.text
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.subattr.attr.name changed to Ho" in caplog.text
|
||||||
expected_output = sorted(
|
caplog.clear()
|
||||||
[
|
|
||||||
"ServiceClass.attr.attr.attr.name = Ho",
|
|
||||||
"ServiceClass.subattr.attr.name = Ho",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n"))
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
|
|
||||||
def test_advanced_nested_attributes_mixed(capsys: CaptureFixture) -> None:
|
def test_advanced_nested_attributes_mixed(caplog: LogCaptureFixture) -> None:
|
||||||
class SubSubClass(DataService):
|
class SubSubClass(DataService):
|
||||||
name = "Hello"
|
name = "Hello"
|
||||||
|
|
||||||
@ -310,44 +244,28 @@ def test_advanced_nested_attributes_mixed(capsys: CaptureFixture) -> None:
|
|||||||
# instances of SubSubClass are unequal
|
# instances of SubSubClass are unequal
|
||||||
assert service_instance.attr.attr_1 != service_instance.class_attr.class_attr
|
assert service_instance.attr.attr_1 != service_instance.class_attr.class_attr
|
||||||
|
|
||||||
_ = capsys.readouterr()
|
|
||||||
|
|
||||||
service_instance.class_attr.class_attr.name = "Ho"
|
service_instance.class_attr.class_attr.name = "Ho"
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.class_attr.class_attr.name changed to Ho" in caplog.text
|
||||||
expected_output = sorted(
|
assert "ServiceClass.attr.class_attr.name changed to Ho" in caplog.text
|
||||||
[
|
caplog.clear()
|
||||||
"ServiceClass.class_attr.class_attr.name = Ho",
|
|
||||||
"ServiceClass.attr.class_attr.name = Ho",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n"))
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
service_instance.class_attr.attr_1.name = "Ho"
|
service_instance.class_attr.attr_1.name = "Ho"
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.class_attr.attr_1.name changed to Ho" in caplog.text
|
||||||
expected_output = sorted(["ServiceClass.class_attr.attr_1.name = Ho"])
|
assert "ServiceClass.attr.attr_1.name changed to Ho" not in caplog.text
|
||||||
actual_output = sorted(captured.out.strip().split("\n"))
|
caplog.clear()
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
service_instance.attr.class_attr.name = "Ho"
|
service_instance.attr.class_attr.name = "Ho"
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.class_attr.class_attr.name changed to Ho" in caplog.text
|
||||||
expected_output = sorted(
|
assert "ServiceClass.attr.class_attr.name changed to Ho" in caplog.text
|
||||||
[
|
caplog.clear()
|
||||||
"ServiceClass.attr.class_attr.name = Ho",
|
|
||||||
"ServiceClass.class_attr.class_attr.name = Ho",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n"))
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
service_instance.attr.attr_1.name = "Ho"
|
service_instance.attr.attr_1.name = "Ho"
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.attr.attr_1.name changed to Ho" in caplog.text
|
||||||
expected_output = sorted(["ServiceClass.attr.attr_1.name = Ho"])
|
assert "ServiceClass.class_attr.attr_1.name changed to Ho" not in caplog.text
|
||||||
actual_output = sorted(captured.out.strip().split("\n"))
|
caplog.clear()
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
|
|
||||||
def test_class_list_attributes(capsys: CaptureFixture) -> None:
|
def test_class_list_attributes(caplog: LogCaptureFixture) -> None:
|
||||||
class SubClass(DataService):
|
class SubClass(DataService):
|
||||||
name = "Hello"
|
name = "Hello"
|
||||||
|
|
||||||
@ -359,59 +277,36 @@ def test_class_list_attributes(capsys: CaptureFixture) -> None:
|
|||||||
attr = subclass_instance
|
attr = subclass_instance
|
||||||
|
|
||||||
service_instance = ServiceClass()
|
service_instance = ServiceClass()
|
||||||
_ = capsys.readouterr()
|
|
||||||
|
|
||||||
assert service_instance.attr_list[0] != service_instance.attr_list[1]
|
assert service_instance.attr_list[0] != service_instance.attr_list[1]
|
||||||
|
|
||||||
service_instance.attr_list[0].name = "Ho"
|
service_instance.attr_list[0].name = "Ho"
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.attr_list[0].name changed to Ho" in caplog.text
|
||||||
expected_output = sorted(
|
assert "ServiceClass.attr_list[1].name changed to Ho" not in caplog.text
|
||||||
[
|
caplog.clear()
|
||||||
"ServiceClass.attr_list[0].name = Ho",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n"))
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
service_instance.attr_list[1].name = "Ho"
|
service_instance.attr_list[1].name = "Ho"
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.attr_list[0].name changed to Ho" not in caplog.text
|
||||||
expected_output = sorted(
|
assert "ServiceClass.attr_list[1].name changed to Ho" in caplog.text
|
||||||
[
|
caplog.clear()
|
||||||
"ServiceClass.attr_list[1].name = Ho",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n"))
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
assert service_instance.attr_list_2[0] == service_instance.attr
|
assert service_instance.attr_list_2[0] == service_instance.attr
|
||||||
assert service_instance.attr_list_2[0] == service_instance.attr_list_2[1]
|
assert service_instance.attr_list_2[0] == service_instance.attr_list_2[1]
|
||||||
|
|
||||||
service_instance.attr_list_2[0].name = "Ho"
|
service_instance.attr_list_2[0].name = "Ho"
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.attr_list_2[0].name changed to Ho" in caplog.text
|
||||||
expected_output = sorted(
|
assert "ServiceClass.attr_list_2[1].name changed to Ho" in caplog.text
|
||||||
[
|
assert "ServiceClass.attr.name changed to Ho" in caplog.text
|
||||||
"ServiceClass.attr_list_2[0].name = Ho",
|
caplog.clear()
|
||||||
"ServiceClass.attr_list_2[1].name = Ho",
|
|
||||||
"ServiceClass.attr.name = Ho",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n"))
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
service_instance.attr_list_2[1].name = "Ho"
|
service_instance.attr_list_2[1].name = "Ho"
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.attr_list_2[0].name changed to Ho" in caplog.text
|
||||||
expected_output = sorted(
|
assert "ServiceClass.attr_list_2[1].name changed to Ho" in caplog.text
|
||||||
[
|
assert "ServiceClass.attr.name changed to Ho" in caplog.text
|
||||||
"ServiceClass.attr_list_2[0].name = Ho",
|
caplog.clear()
|
||||||
"ServiceClass.attr_list_2[1].name = Ho",
|
|
||||||
"ServiceClass.attr.name = Ho",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n"))
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
|
|
||||||
def test_nested_class_list_attributes(capsys: CaptureFixture) -> None:
|
def test_nested_class_list_attributes(caplog: LogCaptureFixture) -> None:
|
||||||
class SubSubClass(DataService):
|
class SubSubClass(DataService):
|
||||||
name = "Hello"
|
name = "Hello"
|
||||||
|
|
||||||
@ -425,34 +320,21 @@ def test_nested_class_list_attributes(capsys: CaptureFixture) -> None:
|
|||||||
subattr = subsubclass_instance
|
subattr = subsubclass_instance
|
||||||
|
|
||||||
service_instance = ServiceClass()
|
service_instance = ServiceClass()
|
||||||
_ = capsys.readouterr()
|
|
||||||
|
|
||||||
assert service_instance.attr[0].attr_list[0] == service_instance.subattr
|
assert service_instance.attr[0].attr_list[0] == service_instance.subattr
|
||||||
|
|
||||||
service_instance.attr[0].attr_list[0].name = "Ho"
|
service_instance.attr[0].attr_list[0].name = "Ho"
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.attr[0].attr_list[0].name changed to Ho" in caplog.text
|
||||||
expected_output = sorted(
|
assert "ServiceClass.subattr.name changed to Ho" in caplog.text
|
||||||
[
|
caplog.clear()
|
||||||
"ServiceClass.attr[0].attr_list[0].name = Ho",
|
|
||||||
"ServiceClass.subattr.name = Ho",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n"))
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
service_instance.subattr.name = "Ho"
|
service_instance.subattr.name = "Ho"
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.attr[0].attr_list[0].name changed to Ho" in caplog.text
|
||||||
expected_output = sorted(
|
assert "ServiceClass.subattr.name changed to Ho" in caplog.text
|
||||||
[
|
caplog.clear()
|
||||||
"ServiceClass.attr[0].attr_list[0].name = Ho",
|
|
||||||
"ServiceClass.subattr.name = Ho",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n"))
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
|
|
||||||
def test_instance_list_attributes(capsys: CaptureFixture) -> None:
|
def test_instance_list_attributes(caplog: LogCaptureFixture) -> None:
|
||||||
class SubClass(DataService):
|
class SubClass(DataService):
|
||||||
name = "Hello"
|
name = "Hello"
|
||||||
|
|
||||||
@ -466,63 +348,42 @@ def test_instance_list_attributes(capsys: CaptureFixture) -> None:
|
|||||||
super().__init__()
|
super().__init__()
|
||||||
|
|
||||||
service_instance = ServiceClass()
|
service_instance = ServiceClass()
|
||||||
_ = capsys.readouterr()
|
|
||||||
|
|
||||||
assert service_instance.attr_list[0] != service_instance.attr_list[1]
|
assert service_instance.attr_list[0] != service_instance.attr_list[1]
|
||||||
|
|
||||||
service_instance.attr_list[0].name = "Ho"
|
service_instance.attr_list[0].name = "Ho"
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.attr_list[0].name changed to Ho" in caplog.text
|
||||||
expected_output = sorted(["ServiceClass.attr_list[0].name = Ho"])
|
assert "ServiceClass.attr_list[1].name changed to Ho" not in caplog.text
|
||||||
actual_output = sorted(captured.out.strip().split("\n"))
|
caplog.clear()
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
service_instance.attr_list[1].name = "Ho"
|
service_instance.attr_list[1].name = "Ho"
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.attr_list[0].name changed to Ho" not in caplog.text
|
||||||
expected_output = sorted(["ServiceClass.attr_list[1].name = Ho"])
|
assert "ServiceClass.attr_list[1].name changed to Ho" in caplog.text
|
||||||
actual_output = sorted(captured.out.strip().split("\n"))
|
caplog.clear()
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
assert service_instance.attr_list_2[0] == service_instance.attr
|
assert service_instance.attr_list_2[0] == service_instance.attr
|
||||||
assert service_instance.attr_list_2[0] == service_instance.attr_list_2[1]
|
assert service_instance.attr_list_2[0] == service_instance.attr_list_2[1]
|
||||||
|
|
||||||
service_instance.attr_list_2[0].name = "Ho"
|
service_instance.attr_list_2[0].name = "Ho"
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.attr.name changed to Ho" in caplog.text
|
||||||
expected_output = sorted(
|
assert "ServiceClass.attr_list_2[0].name changed to Ho" in caplog.text
|
||||||
[
|
assert "ServiceClass.attr_list_2[1].name changed to Ho" in caplog.text
|
||||||
"ServiceClass.attr.name = Ho",
|
caplog.clear()
|
||||||
"ServiceClass.attr_list_2[0].name = Ho",
|
|
||||||
"ServiceClass.attr_list_2[1].name = Ho",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n"))
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
service_instance.attr_list_2[1].name = "Ho"
|
service_instance.attr_list_2[1].name = "Ho"
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.attr.name changed to Ho" in caplog.text
|
||||||
expected_output = sorted(
|
assert "ServiceClass.attr_list_2[0].name changed to Ho" in caplog.text
|
||||||
[
|
assert "ServiceClass.attr_list_2[1].name changed to Ho" in caplog.text
|
||||||
"ServiceClass.attr.name = Ho",
|
caplog.clear()
|
||||||
"ServiceClass.attr_list_2[0].name = Ho",
|
|
||||||
"ServiceClass.attr_list_2[1].name = Ho",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n"))
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
service_instance.attr.name = "Ho"
|
service_instance.attr.name = "Ho"
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.attr.name changed to Ho" in caplog.text
|
||||||
expected_output = sorted(
|
assert "ServiceClass.attr_list_2[0].name changed to Ho" in caplog.text
|
||||||
[
|
assert "ServiceClass.attr_list_2[1].name changed to Ho" in caplog.text
|
||||||
"ServiceClass.attr.name = Ho",
|
caplog.clear()
|
||||||
"ServiceClass.attr_list_2[0].name = Ho",
|
|
||||||
"ServiceClass.attr_list_2[1].name = Ho",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n"))
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
|
|
||||||
def test_nested_instance_list_attributes(capsys: CaptureFixture) -> None:
|
def test_nested_instance_list_attributes(caplog: LogCaptureFixture) -> None:
|
||||||
class SubSubClass(DataService):
|
class SubSubClass(DataService):
|
||||||
name = "Hello"
|
name = "Hello"
|
||||||
|
|
||||||
@ -541,28 +402,15 @@ def test_nested_instance_list_attributes(capsys: CaptureFixture) -> None:
|
|||||||
super().__init__()
|
super().__init__()
|
||||||
|
|
||||||
service_instance = ServiceClass()
|
service_instance = ServiceClass()
|
||||||
_ = capsys.readouterr()
|
|
||||||
|
|
||||||
assert service_instance.attr[0].attr_list[0] == service_instance.class_attr
|
assert service_instance.attr[0].attr_list[0] == service_instance.class_attr
|
||||||
|
|
||||||
service_instance.attr[0].attr_list[0].name = "Ho"
|
service_instance.attr[0].attr_list[0].name = "Ho"
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.attr[0].attr_list[0].name changed to Ho" in caplog.text
|
||||||
expected_output = sorted(
|
assert "ServiceClass.class_attr.name changed to Ho" in caplog.text
|
||||||
[
|
caplog.clear()
|
||||||
"ServiceClass.attr[0].attr_list[0].name = Ho",
|
|
||||||
"ServiceClass.class_attr.name = Ho",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n"))
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
service_instance.class_attr.name = "Ho"
|
service_instance.class_attr.name = "Ho"
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.attr[0].attr_list[0].name changed to Ho" in caplog.text
|
||||||
expected_output = sorted(
|
assert "ServiceClass.class_attr.name changed to Ho" in caplog.text
|
||||||
[
|
caplog.clear()
|
||||||
"ServiceClass.attr[0].attr_list[0].name = Ho",
|
|
||||||
"ServiceClass.class_attr.name = Ho",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n"))
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
from pytest import CaptureFixture
|
from pytest import LogCaptureFixture
|
||||||
|
|
||||||
from pydase import DataService
|
from pydase import DataService
|
||||||
|
|
||||||
|
|
||||||
def test_properties(capsys: CaptureFixture) -> None:
|
def test_properties(caplog: LogCaptureFixture) -> None:
|
||||||
class ServiceClass(DataService):
|
class ServiceClass(DataService):
|
||||||
_voltage = 10.0
|
_voltage = 10.0
|
||||||
_current = 1.0
|
_current = 1.0
|
||||||
@ -31,30 +31,17 @@ def test_properties(capsys: CaptureFixture) -> None:
|
|||||||
test_service = ServiceClass()
|
test_service = ServiceClass()
|
||||||
test_service.voltage = 1
|
test_service.voltage = 1
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.power changed to 1.0" in caplog.text
|
||||||
expected_output = sorted(
|
assert "ServiceClass.voltage changed to 1.0" in caplog.text
|
||||||
[
|
caplog.clear()
|
||||||
"ServiceClass.power = 1.0",
|
|
||||||
"ServiceClass.voltage = 1.0",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n"))
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
test_service.current = 12.0
|
test_service.current = 12.0
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.power changed to 12.0" in caplog.text
|
||||||
expected_output = sorted(
|
assert "ServiceClass.current changed to 12.0" in caplog.text
|
||||||
[
|
|
||||||
"ServiceClass.power = 12.0",
|
|
||||||
"ServiceClass.current = 12.0",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n"))
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
|
|
||||||
def test_nested_properties(capsys: CaptureFixture) -> None:
|
def test_nested_properties(caplog: LogCaptureFixture) -> None:
|
||||||
class SubSubClass(DataService):
|
class SubSubClass(DataService):
|
||||||
name = "Hello"
|
name = "Hello"
|
||||||
|
|
||||||
@ -77,45 +64,31 @@ def test_nested_properties(capsys: CaptureFixture) -> None:
|
|||||||
test_service = ServiceClass()
|
test_service = ServiceClass()
|
||||||
test_service.name = "Peepz"
|
test_service.name = "Peepz"
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.name changed to Peepz" in caplog.text
|
||||||
expected_output = sorted(
|
assert "ServiceClass.sub_name changed to Hello Peepz" in caplog.text
|
||||||
[
|
assert "ServiceClass.subsub_name changed to Hello Peepz" in caplog.text
|
||||||
"ServiceClass.name = Peepz",
|
caplog.clear()
|
||||||
"ServiceClass.sub_name = Hello Peepz",
|
|
||||||
"ServiceClass.subsub_name = Hello Peepz",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n"))
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
test_service.class_attr.name = "Hi"
|
test_service.class_attr.name = "Hi"
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.sub_name changed to Hi Peepz" in caplog.text
|
||||||
expected_output = sorted(
|
assert (
|
||||||
[
|
"ServiceClass.subsub_name changed to Hello Peepz" in caplog.text
|
||||||
"ServiceClass.sub_name = Hi Peepz",
|
) # registers subclass changes
|
||||||
"ServiceClass.subsub_name = Hello Peepz", # registers subclass changes
|
assert "ServiceClass.class_attr.name changed to Hi" in caplog.text
|
||||||
"ServiceClass.class_attr.name = Hi",
|
caplog.clear()
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n"))
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
test_service.class_attr.class_attr.name = "Ciao"
|
test_service.class_attr.class_attr.name = "Ciao"
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
assert (
|
||||||
expected_output = sorted(
|
"ServiceClass.sub_name changed to Hi Peepz" in caplog.text
|
||||||
[
|
) # registers subclass changes
|
||||||
"ServiceClass.sub_name = Hi Peepz", # registers subclass changes
|
assert "ServiceClass.subsub_name changed to Ciao Peepz" in caplog.text
|
||||||
"ServiceClass.subsub_name = Ciao Peepz",
|
assert "ServiceClass.class_attr.class_attr.name changed to Ciao" in caplog.text
|
||||||
"ServiceClass.class_attr.class_attr.name = Ciao",
|
caplog.clear()
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n"))
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
|
|
||||||
def test_simple_list_properties(capsys: CaptureFixture) -> None:
|
def test_simple_list_properties(caplog: LogCaptureFixture) -> None:
|
||||||
class ServiceClass(DataService):
|
class ServiceClass(DataService):
|
||||||
list = ["Hello", "Ciao"]
|
list = ["Hello", "Ciao"]
|
||||||
name = "World"
|
name = "World"
|
||||||
@ -127,30 +100,17 @@ def test_simple_list_properties(capsys: CaptureFixture) -> None:
|
|||||||
test_service = ServiceClass()
|
test_service = ServiceClass()
|
||||||
test_service.name = "Peepz"
|
test_service.name = "Peepz"
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.name changed to Peepz" in caplog.text
|
||||||
expected_output = sorted(
|
assert "ServiceClass.total_name changed to Hello Peepz" in caplog.text
|
||||||
[
|
caplog.clear()
|
||||||
"ServiceClass.name = Peepz",
|
|
||||||
"ServiceClass.total_name = Hello Peepz",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n"))
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
test_service.list[0] = "Hi"
|
test_service.list[0] = "Hi"
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.total_name changed to Hi Peepz" in caplog.text
|
||||||
expected_output = sorted(
|
assert "ServiceClass.list[0] changed to Hi" in caplog.text
|
||||||
[
|
|
||||||
"ServiceClass.total_name = Hi Peepz",
|
|
||||||
"ServiceClass.list[0] = Hi",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n"))
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
|
|
||||||
def test_class_list_properties(capsys: CaptureFixture) -> None:
|
def test_class_list_properties(caplog: LogCaptureFixture) -> None:
|
||||||
class SubClass(DataService):
|
class SubClass(DataService):
|
||||||
name = "Hello"
|
name = "Hello"
|
||||||
|
|
||||||
@ -165,30 +125,17 @@ def test_class_list_properties(capsys: CaptureFixture) -> None:
|
|||||||
test_service = ServiceClass()
|
test_service = ServiceClass()
|
||||||
test_service.name = "Peepz"
|
test_service.name = "Peepz"
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.name changed to Peepz" in caplog.text
|
||||||
expected_output = sorted(
|
assert "ServiceClass.total_name changed to Hello Peepz" in caplog.text
|
||||||
[
|
caplog.clear()
|
||||||
"ServiceClass.name = Peepz",
|
|
||||||
"ServiceClass.total_name = Hello Peepz",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n"))
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
test_service.list[0].name = "Hi"
|
test_service.list[0].name = "Hi"
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.total_name changed to Hi Peepz" in caplog.text
|
||||||
expected_output = sorted(
|
assert "ServiceClass.list[0].name changed to Hi" in caplog.text
|
||||||
[
|
|
||||||
"ServiceClass.total_name = Hi Peepz",
|
|
||||||
"ServiceClass.list[0].name = Hi",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n"))
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
|
|
||||||
def test_subclass_properties(capsys: CaptureFixture) -> None:
|
def test_subclass_properties(caplog: LogCaptureFixture) -> None:
|
||||||
class SubClass(DataService):
|
class SubClass(DataService):
|
||||||
name = "Hello"
|
name = "Hello"
|
||||||
_voltage = 10.0
|
_voltage = 10.0
|
||||||
@ -224,21 +171,15 @@ def test_subclass_properties(capsys: CaptureFixture) -> None:
|
|||||||
test_service = ServiceClass()
|
test_service = ServiceClass()
|
||||||
test_service.class_attr.voltage = 10.0
|
test_service.class_attr.voltage = 10.0
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
|
||||||
expected_output = sorted(
|
|
||||||
{
|
|
||||||
"ServiceClass.class_attr.voltage = 10.0",
|
|
||||||
"ServiceClass.class_attr.power = 10.0",
|
|
||||||
"ServiceClass.voltage = 10.0",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
# using a set here as "ServiceClass.voltage = 10.0" is emitted twice. Once for
|
# using a set here as "ServiceClass.voltage = 10.0" is emitted twice. Once for
|
||||||
# changing voltage, and once for changing power.
|
# changing voltage, and once for changing power.
|
||||||
actual_output = sorted(set(captured.out.strip().split("\n")))
|
assert "ServiceClass.class_attr.voltage changed to 10.0" in caplog.text
|
||||||
assert actual_output == expected_output
|
assert "ServiceClass.class_attr.power changed to 10.0" in caplog.text
|
||||||
|
assert "ServiceClass.voltage changed to 10.0" in caplog.text
|
||||||
|
caplog.clear()
|
||||||
|
|
||||||
|
|
||||||
def test_subclass_properties_2(capsys: CaptureFixture) -> None:
|
def test_subclass_properties_2(caplog: LogCaptureFixture) -> None:
|
||||||
class SubClass(DataService):
|
class SubClass(DataService):
|
||||||
name = "Hello"
|
name = "Hello"
|
||||||
_voltage = 10.0
|
_voltage = 10.0
|
||||||
@ -274,24 +215,17 @@ def test_subclass_properties_2(capsys: CaptureFixture) -> None:
|
|||||||
test_service = ServiceClass()
|
test_service = ServiceClass()
|
||||||
test_service.class_attr[1].current = 10.0
|
test_service.class_attr[1].current = 10.0
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
|
||||||
expected_output = sorted(
|
|
||||||
{
|
|
||||||
"ServiceClass.class_attr[1].current = 10.0",
|
|
||||||
"ServiceClass.class_attr[1].power = 100.0",
|
|
||||||
"ServiceClass.voltage = 10.0",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
# using a set here as "ServiceClass.voltage = 10.0" is emitted twice. Once for
|
# using a set here as "ServiceClass.voltage = 10.0" is emitted twice. Once for
|
||||||
# changing current, and once for changing power. Note that the voltage property is
|
# changing current, and once for changing power. Note that the voltage property is
|
||||||
# only dependent on class_attr[0] but still emits an update notification. This is
|
# only dependent on class_attr[0] but still emits an update notification. This is
|
||||||
# because every time any item in the list `test_service.class_attr` is changed,
|
# because every time any item in the list `test_service.class_attr` is changed,
|
||||||
# a notification will be emitted.
|
# a notification will be emitted.
|
||||||
actual_output = sorted(set(captured.out.strip().split("\n")))
|
assert "ServiceClass.class_attr[1].current changed to 10.0" in caplog.text
|
||||||
assert actual_output == expected_output
|
assert "ServiceClass.class_attr[1].power changed to 100.0" in caplog.text
|
||||||
|
assert "ServiceClass.voltage changed to 10.0" in caplog.text
|
||||||
|
|
||||||
|
|
||||||
def test_subsubclass_properties(capsys: CaptureFixture) -> None:
|
def test_subsubclass_properties(caplog: LogCaptureFixture) -> None:
|
||||||
class SubSubClass(DataService):
|
class SubSubClass(DataService):
|
||||||
_voltage = 10.0
|
_voltage = 10.0
|
||||||
|
|
||||||
@ -321,21 +255,18 @@ def test_subsubclass_properties(capsys: CaptureFixture) -> None:
|
|||||||
test_service = ServiceClass()
|
test_service = ServiceClass()
|
||||||
|
|
||||||
test_service.class_attr[1].class_attr.voltage = 100.0
|
test_service.class_attr[1].class_attr.voltage = 100.0
|
||||||
captured = capsys.readouterr()
|
assert (
|
||||||
expected_output = sorted(
|
"ServiceClass.class_attr[0].class_attr.voltage changed to 100.0" in caplog.text
|
||||||
{
|
|
||||||
"ServiceClass.class_attr[0].class_attr.voltage = 100.0",
|
|
||||||
"ServiceClass.class_attr[1].class_attr.voltage = 100.0",
|
|
||||||
"ServiceClass.class_attr[0].power = 50.0",
|
|
||||||
"ServiceClass.class_attr[1].power = 50.0",
|
|
||||||
"ServiceClass.power = 50.0",
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
actual_output = sorted(set(captured.out.strip().split("\n")))
|
assert (
|
||||||
assert actual_output == expected_output
|
"ServiceClass.class_attr[1].class_attr.voltage changed to 100.0" in caplog.text
|
||||||
|
)
|
||||||
|
assert "ServiceClass.class_attr[0].power changed to 50.0" in caplog.text
|
||||||
|
assert "ServiceClass.class_attr[1].power changed to 50.0" in caplog.text
|
||||||
|
assert "ServiceClass.power changed to 50.0" in caplog.text
|
||||||
|
|
||||||
|
|
||||||
def test_subsubclass_instance_properties(capsys: CaptureFixture) -> None:
|
def test_subsubclass_instance_properties(caplog: LogCaptureFixture) -> None:
|
||||||
class SubSubClass(DataService):
|
class SubSubClass(DataService):
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
self._voltage = 10.0
|
self._voltage = 10.0
|
||||||
@ -369,16 +300,9 @@ def test_subsubclass_instance_properties(capsys: CaptureFixture) -> None:
|
|||||||
test_service = ServiceClass()
|
test_service = ServiceClass()
|
||||||
|
|
||||||
test_service.class_attr[1].attr[0].voltage = 100.0
|
test_service.class_attr[1].attr[0].voltage = 100.0
|
||||||
captured = capsys.readouterr()
|
|
||||||
# again, changing an item in a list will trigger the callbacks. This is why a
|
# again, changing an item in a list will trigger the callbacks. This is why a
|
||||||
# notification for `ServiceClass.power` is emitted although it did not change its
|
# notification for `ServiceClass.power` is emitted although it did not change its
|
||||||
# value
|
# value
|
||||||
expected_output = sorted(
|
assert "ServiceClass.class_attr[1].attr[0].voltage changed to 100.0" in caplog.text
|
||||||
{
|
assert "ServiceClass.class_attr[1].power changed to 50.0" in caplog.text
|
||||||
"ServiceClass.class_attr[1].attr[0].voltage = 100.0",
|
assert "ServiceClass.power changed to 5.0" in caplog.text
|
||||||
"ServiceClass.class_attr[1].power = 50.0",
|
|
||||||
"ServiceClass.power = 5.0",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
actual_output = sorted(set(captured.out.strip().split("\n")))
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from pytest import CaptureFixture
|
from pytest import LogCaptureFixture
|
||||||
|
|
||||||
import pydase.units as u
|
import pydase.units as u
|
||||||
from pydase.data_service.data_service import DataService
|
from pydase.data_service.data_service import DataService
|
||||||
|
|
||||||
|
|
||||||
def test_DataService_setattr(capsys: CaptureFixture) -> None:
|
def test_DataService_setattr(caplog: LogCaptureFixture) -> None:
|
||||||
class ServiceClass(DataService):
|
class ServiceClass(DataService):
|
||||||
voltage = 1.0 * u.units.V
|
voltage = 1.0 * u.units.V
|
||||||
_current: u.Quantity = 1.0 * u.units.mA
|
_current: u.Quantity = 1.0 * u.units.mA
|
||||||
@ -28,31 +28,17 @@ def test_DataService_setattr(capsys: CaptureFixture) -> None:
|
|||||||
|
|
||||||
assert service.voltage == 10.0 * u.units.V # type: ignore
|
assert service.voltage == 10.0 * u.units.V # type: ignore
|
||||||
assert service.current == 1.5 * u.units.mA
|
assert service.current == 1.5 * u.units.mA
|
||||||
captured = capsys.readouterr()
|
|
||||||
|
|
||||||
expected_output = sorted(
|
assert "ServiceClass.voltage changed to 10.0 V" in caplog.text
|
||||||
[
|
assert "ServiceClass.current changed to 1.5 mA" in caplog.text
|
||||||
"ServiceClass.voltage = 10.0 V",
|
|
||||||
"ServiceClass.current = 1.5 mA",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n")) # type: ignore
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
service.voltage = 12.0 * u.units.V # type: ignore
|
service.voltage = 12.0 * u.units.V # type: ignore
|
||||||
service.current = 1.51 * u.units.A
|
service.current = 1.51 * u.units.A
|
||||||
assert service.voltage == 12.0 * u.units.V # type: ignore
|
assert service.voltage == 12.0 * u.units.V # type: ignore
|
||||||
assert service.current == 1.51 * u.units.A
|
assert service.current == 1.51 * u.units.A
|
||||||
captured = capsys.readouterr()
|
|
||||||
|
|
||||||
expected_output = sorted(
|
assert "ServiceClass.voltage changed to 12.0 V" in caplog.text
|
||||||
[
|
assert "ServiceClass.current changed to 1.51 A" in caplog.text
|
||||||
"ServiceClass.voltage = 12.0 V",
|
|
||||||
"ServiceClass.current = 1.51 A",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n")) # type: ignore
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
|
|
||||||
def test_convert_to_quantity() -> None:
|
def test_convert_to_quantity() -> None:
|
||||||
@ -62,7 +48,7 @@ def test_convert_to_quantity() -> None:
|
|||||||
assert u.convert_to_quantity(1.0 * u.units.mV) == 1.0 * u.units.mV
|
assert u.convert_to_quantity(1.0 * u.units.mV) == 1.0 * u.units.mV
|
||||||
|
|
||||||
|
|
||||||
def test_update_DataService_attribute(capsys: CaptureFixture) -> None:
|
def test_update_DataService_attribute(caplog: LogCaptureFixture) -> None:
|
||||||
class ServiceClass(DataService):
|
class ServiceClass(DataService):
|
||||||
voltage = 1.0 * u.units.V
|
voltage = 1.0 * u.units.V
|
||||||
_current: u.Quantity = 1.0 * u.units.mA
|
_current: u.Quantity = 1.0 * u.units.mA
|
||||||
@ -80,39 +66,18 @@ def test_update_DataService_attribute(capsys: CaptureFixture) -> None:
|
|||||||
service.update_DataService_attribute(
|
service.update_DataService_attribute(
|
||||||
path_list=[], attr_name="voltage", value=1.0 * u.units.mV
|
path_list=[], attr_name="voltage", value=1.0 * u.units.mV
|
||||||
)
|
)
|
||||||
captured = capsys.readouterr()
|
|
||||||
|
|
||||||
expected_output = sorted(
|
assert "ServiceClass.voltage changed to 1.0 mV" in caplog.text
|
||||||
[
|
|
||||||
"ServiceClass.voltage = 1.0 mV",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n")) # type: ignore
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
service.update_DataService_attribute(path_list=[], attr_name="voltage", value=2)
|
service.update_DataService_attribute(path_list=[], attr_name="voltage", value=2)
|
||||||
captured = capsys.readouterr()
|
|
||||||
|
|
||||||
expected_output = sorted(
|
assert "ServiceClass.voltage changed to 2.0 mV" in caplog.text
|
||||||
[
|
|
||||||
"ServiceClass.voltage = 2.0 mV",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n")) # type: ignore
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
service.update_DataService_attribute(
|
service.update_DataService_attribute(
|
||||||
path_list=[], attr_name="voltage", value={"magnitude": 123, "unit": "kV"}
|
path_list=[], attr_name="voltage", value={"magnitude": 123, "unit": "kV"}
|
||||||
)
|
)
|
||||||
captured = capsys.readouterr()
|
|
||||||
|
|
||||||
expected_output = sorted(
|
assert "ServiceClass.voltage changed to 123.0 kV" in caplog.text
|
||||||
[
|
|
||||||
"ServiceClass.voltage = 123.0 kV",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n")) # type: ignore
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
|
||||||
|
|
||||||
def test_autoconvert_offset_to_baseunit() -> None:
|
def test_autoconvert_offset_to_baseunit() -> None:
|
||||||
@ -126,7 +91,7 @@ def test_autoconvert_offset_to_baseunit() -> None:
|
|||||||
assert False, f"Offset unit raises exception {exc}"
|
assert False, f"Offset unit raises exception {exc}"
|
||||||
|
|
||||||
|
|
||||||
def test_loading_from_json(capsys: CaptureFixture) -> None:
|
def test_loading_from_json(caplog: LogCaptureFixture) -> None:
|
||||||
"""This function tests if the quantity read from the json description is actually
|
"""This function tests if the quantity read from the json description is actually
|
||||||
passed as a quantity to the property setter."""
|
passed as a quantity to the property setter."""
|
||||||
JSON_DICT = {
|
JSON_DICT = {
|
||||||
@ -156,12 +121,4 @@ def test_loading_from_json(capsys: CaptureFixture) -> None:
|
|||||||
|
|
||||||
service.load_DataService_from_JSON(JSON_DICT)
|
service.load_DataService_from_JSON(JSON_DICT)
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
assert "ServiceClass.some_unit changed to 10.0 A" in caplog.text
|
||||||
|
|
||||||
expected_output = sorted(
|
|
||||||
[
|
|
||||||
"ServiceClass.some_unit = 10.0 A",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
actual_output = sorted(captured.out.strip().split("\n")) # type: ignore
|
|
||||||
assert actual_output == expected_output
|
|
||||||
|
@ -1,70 +1,6 @@
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from pydase.utils.helpers import (
|
from pydase.utils.helpers import is_property_attribute
|
||||||
extract_dict_or_list_entry,
|
|
||||||
get_nested_value_from_DataService_by_path_and_key,
|
|
||||||
is_property_attribute,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Sample data for the tests
|
|
||||||
data_sample = {
|
|
||||||
"attr1": {"type": "bool", "value": False, "readonly": False, "doc": None},
|
|
||||||
"class_attr": {
|
|
||||||
"type": "MyClass",
|
|
||||||
"value": {"sub_attr": {"type": "float", "value": 20.5}},
|
|
||||||
},
|
|
||||||
"list_attr": {
|
|
||||||
"type": "list",
|
|
||||||
"value": [
|
|
||||||
{"type": "int", "value": 0, "readonly": False, "doc": None},
|
|
||||||
{"type": "float", "value": 1.0, "readonly": False, "doc": None},
|
|
||||||
],
|
|
||||||
"readonly": False,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# Tests for extract_dict_or_list_entry
|
|
||||||
def test_extract_dict_with_valid_list_index() -> None:
|
|
||||||
result = extract_dict_or_list_entry(data_sample, "list_attr[1]")
|
|
||||||
assert result == {"type": "float", "value": 1.0, "readonly": False, "doc": None}
|
|
||||||
|
|
||||||
|
|
||||||
def test_extract_dict_without_list_index() -> None:
|
|
||||||
result = extract_dict_or_list_entry(data_sample, "attr1")
|
|
||||||
assert result == {"type": "bool", "value": False, "readonly": False, "doc": None}
|
|
||||||
|
|
||||||
|
|
||||||
def test_extract_dict_with_invalid_key() -> None:
|
|
||||||
result = extract_dict_or_list_entry(data_sample, "attr_not_exist")
|
|
||||||
assert result is None
|
|
||||||
|
|
||||||
|
|
||||||
def test_extract_dict_with_invalid_list_index() -> None:
|
|
||||||
result = extract_dict_or_list_entry(data_sample, "list_attr[5]")
|
|
||||||
assert result is None
|
|
||||||
|
|
||||||
|
|
||||||
# Tests for get_nested_value_from_DataService_by_path_and_key
|
|
||||||
def test_get_nested_value_with_default_key() -> None:
|
|
||||||
result = get_nested_value_from_DataService_by_path_and_key(
|
|
||||||
data_sample, "list_attr[0]"
|
|
||||||
)
|
|
||||||
assert result == 0
|
|
||||||
|
|
||||||
|
|
||||||
def test_get_nested_value_with_custom_key() -> None:
|
|
||||||
result = get_nested_value_from_DataService_by_path_and_key(
|
|
||||||
data_sample, "class_attr.sub_attr", "type"
|
|
||||||
)
|
|
||||||
assert result == "float"
|
|
||||||
|
|
||||||
|
|
||||||
def test_get_nested_value_with_invalid_path() -> None:
|
|
||||||
result = get_nested_value_from_DataService_by_path_and_key(
|
|
||||||
data_sample, "class_attr.nonexistent_attr"
|
|
||||||
)
|
|
||||||
assert result is None
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
|
@ -6,7 +6,13 @@ import pytest
|
|||||||
import pydase
|
import pydase
|
||||||
import pydase.units as u
|
import pydase.units as u
|
||||||
from pydase.components.coloured_enum import ColouredEnum
|
from pydase.components.coloured_enum import ColouredEnum
|
||||||
from pydase.utils.serialization import dump
|
from pydase.utils.serializer import (
|
||||||
|
SerializationPathError,
|
||||||
|
dump,
|
||||||
|
get_nested_dict_by_path,
|
||||||
|
get_next_level_dict_by_key,
|
||||||
|
set_nested_value_by_path,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
@ -286,3 +292,126 @@ def test_dict_serialization() -> None:
|
|||||||
"int_key": {"doc": None, "readonly": False, "type": "int", "value": 1},
|
"int_key": {"doc": None, "readonly": False, "type": "int", "value": 1},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def setup_dict():
|
||||||
|
class MySubclass(pydase.DataService):
|
||||||
|
attr3 = 1.0
|
||||||
|
list_attr = [1.0, 1]
|
||||||
|
|
||||||
|
class ServiceClass(pydase.DataService):
|
||||||
|
attr1 = 1.0
|
||||||
|
attr2 = MySubclass()
|
||||||
|
attr_list = [0, 1, MySubclass()]
|
||||||
|
|
||||||
|
return ServiceClass().serialize()
|
||||||
|
|
||||||
|
|
||||||
|
def test_update_attribute(setup_dict):
|
||||||
|
set_nested_value_by_path(setup_dict, "attr1", 15)
|
||||||
|
assert setup_dict["attr1"]["value"] == 15
|
||||||
|
|
||||||
|
|
||||||
|
def test_update_nested_attribute(setup_dict):
|
||||||
|
set_nested_value_by_path(setup_dict, "attr2.attr3", 25.0)
|
||||||
|
assert setup_dict["attr2"]["value"]["attr3"]["value"] == 25.0
|
||||||
|
|
||||||
|
|
||||||
|
def test_update_list_entry(setup_dict):
|
||||||
|
set_nested_value_by_path(setup_dict, "attr_list[1]", 20)
|
||||||
|
assert setup_dict["attr_list"]["value"][1]["value"] == 20
|
||||||
|
|
||||||
|
|
||||||
|
def test_update_list_append(setup_dict):
|
||||||
|
set_nested_value_by_path(setup_dict, "attr_list[3]", 20)
|
||||||
|
assert setup_dict["attr_list"]["value"][3]["value"] == 20
|
||||||
|
|
||||||
|
|
||||||
|
def test_update_invalid_list_index(setup_dict, caplog: pytest.LogCaptureFixture):
|
||||||
|
set_nested_value_by_path(setup_dict, "attr_list[10]", 30)
|
||||||
|
assert (
|
||||||
|
"Error occured trying to change 'attr_list[10]': list index "
|
||||||
|
"out of range" in caplog.text
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_update_invalid_path(setup_dict, caplog: pytest.LogCaptureFixture):
|
||||||
|
set_nested_value_by_path(setup_dict, "invalid_path", 30)
|
||||||
|
assert (
|
||||||
|
"Error occured trying to access the key 'invalid_path': it is either "
|
||||||
|
"not present in the current dictionary or its value does not contain "
|
||||||
|
"a 'value' key." in caplog.text
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_update_list_inside_class(setup_dict):
|
||||||
|
set_nested_value_by_path(setup_dict, "attr2.list_attr[1]", 40)
|
||||||
|
assert setup_dict["attr2"]["value"]["list_attr"]["value"][1]["value"] == 40
|
||||||
|
|
||||||
|
|
||||||
|
def test_update_class_attribute_inside_list(setup_dict):
|
||||||
|
set_nested_value_by_path(setup_dict, "attr_list[2].attr3", 50)
|
||||||
|
assert setup_dict["attr_list"]["value"][2]["value"]["attr3"]["value"] == 50
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_next_level_attribute_nested_dict(setup_dict):
|
||||||
|
nested_dict = get_next_level_dict_by_key(setup_dict, "attr1")
|
||||||
|
assert nested_dict == setup_dict["attr1"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_next_level_list_entry_nested_dict(setup_dict):
|
||||||
|
nested_dict = get_next_level_dict_by_key(setup_dict, "attr_list[0]")
|
||||||
|
assert nested_dict == setup_dict["attr_list"]["value"][0]
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_next_level_invalid_path_nested_dict(setup_dict):
|
||||||
|
with pytest.raises(SerializationPathError):
|
||||||
|
get_next_level_dict_by_key(setup_dict, "invalid_path")
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_next_level_invalid_list_index(setup_dict):
|
||||||
|
with pytest.raises(SerializationPathError):
|
||||||
|
get_next_level_dict_by_key(setup_dict, "attr_list[10]")
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_attribute(setup_dict):
|
||||||
|
nested_dict = get_nested_dict_by_path(setup_dict, "attr1")
|
||||||
|
assert nested_dict["value"] == 1.0
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_nested_attribute(setup_dict):
|
||||||
|
nested_dict = get_nested_dict_by_path(setup_dict, "attr2.attr3")
|
||||||
|
assert nested_dict["value"] == 1.0
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_list_entry(setup_dict):
|
||||||
|
nested_dict = get_nested_dict_by_path(setup_dict, "attr_list[1]")
|
||||||
|
assert nested_dict["value"] == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_list_inside_class(setup_dict):
|
||||||
|
nested_dict = get_nested_dict_by_path(setup_dict, "attr2.list_attr[1]")
|
||||||
|
assert nested_dict["value"] == 1.0
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_class_attribute_inside_list(setup_dict):
|
||||||
|
nested_dict = get_nested_dict_by_path(setup_dict, "attr_list[2].attr3")
|
||||||
|
assert nested_dict["value"] == 1.0
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_invalid_list_index(setup_dict, caplog: pytest.LogCaptureFixture):
|
||||||
|
get_nested_dict_by_path(setup_dict, "attr_list[10]")
|
||||||
|
assert (
|
||||||
|
"Error occured trying to change 'attr_list[10]': list index "
|
||||||
|
"out of range" in caplog.text
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_invalid_path(setup_dict, caplog: pytest.LogCaptureFixture):
|
||||||
|
get_nested_dict_by_path(setup_dict, "invalid_path")
|
||||||
|
assert (
|
||||||
|
"Error occured trying to access the key 'invalid_path': it is either "
|
||||||
|
"not present in the current dictionary or its value does not contain "
|
||||||
|
"a 'value' key." in caplog.text
|
||||||
|
)
|
Loading…
x
Reference in New Issue
Block a user