Merge pull request #74 from tiqi-group/cleanup/ruff_linting

Cleanup: switching to ruff linter and formatter
This commit is contained in:
Mose Müller 2023-11-28 15:23:53 +01:00 committed by GitHub
commit abafd1a2b2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
25 changed files with 598 additions and 839 deletions

View File

@ -1,8 +0,0 @@
[flake8]
ignore = E501,W503,FS003,F403,F405,E203
include = src
max-line-length = 88
max-doc-length = 88
max-complexity = 7
max-expression-complexity = 7
use_class_attributes_order_strict_mode=True

View File

@ -20,6 +20,7 @@ jobs:
steps:
- uses: actions/checkout@v3
- uses: chartboost/ruff-action@v1
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v3
with:
@ -29,13 +30,9 @@ jobs:
python -m pip install --upgrade pip
python -m pip install poetry
poetry install
- name: Lint with flake8
run: |
# stop the build if there are Python syntax errors or undefined names
poetry run flake8 src/pydase --count --show-source --statistics
- name: Test with pytest
run: |
poetry run pytest
- name: Test with pyright
run: |
poetry run pyright src/pydase
poetry run pyright src/pydase

3
.gitignore vendored
View File

@ -128,6 +128,9 @@ venv.bak/
.dmypy.json
dmypy.json
# ruff
.ruff_cache/
# Pyre type checker
.pyre/

988
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -25,17 +25,10 @@ pytest = "^7.4.0"
pytest-cov = "^4.1.0"
mypy = "^1.4.1"
black = "^23.1.0"
isort = "^5.12.0"
flake8 = "^5.0.4"
flake8-use-fstring = "^1.4"
flake8-functions = "^0.0.7"
flake8-comprehensions = "^3.11.1"
flake8-pep585 = "^0.1.7"
flake8-pep604 = "^0.1.0"
flake8-eradicate = "^1.4.0"
matplotlib = "^3.7.2"
pyright = "^1.1.323"
pytest-mock = "^3.11.1"
ruff = "^0.1.5"
[tool.poetry.group.docs.dependencies]
@ -48,6 +41,37 @@ pymdown-extensions = "^10.1"
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
[tool.ruff]
target-version = "py310" # Always generate Python 3.10-compatible code
line-length = 88
select = ["ALL"]
ignore = [
"ANN101", # typing self
"ANN401", # disallow Any typing
"B904", # Within an `except` clause, raise exceptions with `raise ... from err` or `raise ... from None` to distinguish them from errors in exception handling
"COM812", # Missing trailing comma rule; defer to Black for formatting
"E203", # whitespace-before-punctuation
"UP007", # Use `X | Y` for type annotations
"S310", # suspicious-url-open-usage
"A", # flake8-builtins
"ARG", # flake8-unused-arguments
"BLE", # flake8-blind-except
"D", # pydocstyle
"EM", # flake8 error messages
"FBT", # Boolean trap detection
"PTH", # flake8-use-pathlib
"SLF", # flake8-self
"TD", # flake8-todos
"TRY", # Exception Handling AntiPatterns in Python
]
extend-exclude = [
"docs", "frontend", "tests"
]
[tool.ruff.lint.mccabe]
max-complexity = 7
[tool.pyright]
include = ["src/pydase"]
exclude = ["**/node_modules", "**/__pycache__", "docs", "frontend", "tests"]

View File

@ -57,5 +57,3 @@ class ColouredEnum(Enum):
my_service.status = MyStatus.FAILED
```
"""
pass

View File

@ -5,7 +5,7 @@ from pathlib import Path
from typing import TYPE_CHECKING, Optional
from urllib.request import urlopen
import PIL.Image # type: ignore
import PIL.Image # type: ignore[import-untyped]
from pydase.data_service.data_service import DataService
@ -33,17 +33,17 @@ class Image(DataService):
def load_from_path(self, path: Path | str) -> None:
with PIL.Image.open(path) as image:
self._load_from_PIL(image)
self._load_from_pil(image)
def load_from_matplotlib_figure(self, fig: "Figure", format_: str = "png") -> None:
buffer = io.BytesIO()
fig.savefig(buffer, format=format_) # type: ignore
fig.savefig(buffer, format=format_) # type: ignore[reportUnknownMemberType]
value_ = base64.b64encode(buffer.getvalue())
self._load_from_base64(value_, format_)
def load_from_url(self, url: str) -> None:
image = PIL.Image.open(urlopen(url))
self._load_from_PIL(image)
self._load_from_pil(image)
def load_from_base64(self, value_: bytes, format_: Optional[str] = None) -> None:
if format_ is None:
@ -60,7 +60,7 @@ class Image(DataService):
self._value = value
self._format = format_
def _load_from_PIL(self, image: PIL.Image.Image) -> None:
def _load_from_pil(self, image: PIL.Image.Image) -> None:
if image.format is not None:
format_ = image.format
buffer = io.BytesIO()

View File

@ -13,15 +13,15 @@ class NumberSlider(DataService):
Parameters:
-----------
value (float | int, optional):
value (float, optional):
The initial value of the slider. Defaults to 0.
min (float, optional):
The minimum value of the slider. Defaults to 0.
max (float, optional):
The maximum value of the slider. Defaults to 100.
step_size (float | int, optional):
step_size (float, optional):
The increment/decrement step size of the slider. Defaults to 1.0.
type (Literal["int"] | Literal["float"], optional):
type (Literal["int", "float"], optional):
The type of the slider value. Determines if the value is an integer or float.
Defaults to "float".
@ -38,23 +38,23 @@ class NumberSlider(DataService):
```
"""
def __init__(
def __init__( # noqa: PLR0913
self,
value: float | int = 0,
min: float = 0.0,
max: float = 100.0,
step_size: float | int = 1.0,
type: Literal["int"] | Literal["float"] = "float",
value: float = 0,
min_: float = 0.0,
max_: float = 100.0,
step_size: float = 1.0,
type_: Literal["int", "float"] = "float",
) -> None:
if type not in {"float", "int"}:
logger.error(f"Unknown type '{type}'. Using 'float'.")
type = "float"
if type_ not in {"float", "int"}:
logger.error("Unknown type '%s'. Using 'float'.", type_)
type_ = "float"
self._type = type
self._type = type_
self.step_size = step_size
self.value = value
self.min = min
self.max = max
self.min = min_
self.max = max_
super().__init__()

View File

@ -3,7 +3,7 @@ from typing import Literal
from confz import BaseConfig, EnvSource
class OperationMode(BaseConfig): # type: ignore
environment: Literal["development"] | Literal["production"] = "development"
class OperationMode(BaseConfig): # type: ignore[misc]
environment: Literal["development", "production"] = "development"
CONFIG_SOURCES = EnvSource(allow=["ENVIRONMENT"])

View File

@ -2,8 +2,7 @@ from __future__ import annotations
import inspect
import logging
from collections.abc import Callable
from typing import TYPE_CHECKING, Any
from typing import TYPE_CHECKING, Any, ClassVar
from pydase.data_service.abstract_data_service import AbstractDataService
from pydase.utils.helpers import get_class_and_instance_attributes
@ -11,13 +10,15 @@ from pydase.utils.helpers import get_class_and_instance_attributes
from .data_service_list import DataServiceList
if TYPE_CHECKING:
from collections.abc import Callable
from .data_service import DataService
logger = logging.getLogger(__name__)
class CallbackManager:
_notification_callbacks: list[Callable[[str, str, Any], Any]] = []
_notification_callbacks: ClassVar[list[Callable[[str, str, Any], Any]]] = []
"""
A list of callback functions that are executed when a change occurs in the
DataService instance. These functions are intended to handle or respond to these
@ -38,7 +39,7 @@ class CallbackManager:
This implementation follows the observer pattern, with the DataService instance as
the "subject" and the callback functions as the "observers".
"""
_list_mapping: dict[int, DataServiceList] = {}
_list_mapping: ClassVar[dict[int, DataServiceList]] = {}
"""
A dictionary mapping the id of the original lists to the corresponding
DataServiceList instances.
@ -53,7 +54,7 @@ class CallbackManager:
self.service = service
def _register_list_change_callbacks( # noqa: C901
self, obj: "AbstractDataService", parent_path: str
self, obj: AbstractDataService, parent_path: str
) -> None:
"""
This method ensures that notifications are emitted whenever a public list
@ -135,8 +136,8 @@ class CallbackManager:
new_path = f"{parent_path}.{attr_name}[{i}]"
self._register_list_change_callbacks(item, new_path)
def _register_DataService_instance_callbacks(
self, obj: "AbstractDataService", parent_path: str
def _register_data_service_instance_callbacks(
self, obj: AbstractDataService, parent_path: str
) -> None:
"""
This function is a key part of the observer pattern implemented by the
@ -208,7 +209,7 @@ class CallbackManager:
)
def _register_service_callbacks(
self, nested_attr: "AbstractDataService", parent_path: str, attr_name: str
self, nested_attr: AbstractDataService, parent_path: str, attr_name: str
) -> None:
"""Handles registration of callbacks for DataService attributes"""
@ -217,11 +218,11 @@ class CallbackManager:
nested_attr.__dict__["__root__"] = self.service.__root__
new_path = f"{parent_path}.{attr_name}"
self._register_DataService_instance_callbacks(nested_attr, new_path)
self._register_data_service_instance_callbacks(nested_attr, new_path)
def __register_recursive_parameter_callback(
self,
obj: "AbstractDataService | DataServiceList",
obj: AbstractDataService | DataServiceList,
callback: Callable[[str | int, Any], None],
) -> None:
"""
@ -248,14 +249,14 @@ class CallbackManager:
item._callback_manager.callbacks.add(callback)
for attr_name in set(dir(item)) - set(dir(object)) - {"__root__"}:
attr_value = getattr(item, attr_name)
if isinstance(attr_value, (AbstractDataService, DataServiceList)):
if isinstance(attr_value, AbstractDataService | DataServiceList):
self.__register_recursive_parameter_callback(
attr_value, callback
)
def _register_property_callbacks( # noqa: C901
self,
obj: "AbstractDataService",
obj: AbstractDataService,
parent_path: str,
) -> None:
"""
@ -284,8 +285,8 @@ class CallbackManager:
item, parent_path=f"{parent_path}.{attr_name}[{i}]"
)
if isinstance(attr_value, property):
dependencies = attr_value.fget.__code__.co_names # type: ignore
source_code_string = inspect.getsource(attr_value.fget) # type: ignore
dependencies = attr_value.fget.__code__.co_names # type: ignore[union-attr]
source_code_string = inspect.getsource(attr_value.fget) # type: ignore[arg-type]
for dependency in dependencies:
# check if the dependencies are attributes of obj
@ -304,11 +305,13 @@ class CallbackManager:
dependency_value = getattr(obj, dependency)
if isinstance(
dependency_value, (DataServiceList, AbstractDataService)
dependency_value, DataServiceList | AbstractDataService
):
def list_or_data_service_callback(
name: Any, value: Any, dependent_attr: str = attr_name
name: Any,
value: Any,
dependent_attr: str = attr_name,
) -> None:
"""Emits a notification through the service's callback
manager.
@ -345,8 +348,8 @@ class CallbackManager:
# Add to callbacks
obj._callback_manager.callbacks.add(callback)
def _register_start_stop_task_callbacks( # noqa
self, obj: "AbstractDataService", parent_path: str
def _register_start_stop_task_callbacks( # noqa: C901
self, obj: AbstractDataService, parent_path: str
) -> None:
"""
This function registers callbacks for start and stop methods of async functions.
@ -400,7 +403,7 @@ class CallbackManager:
self._register_list_change_callbacks(
self.service, f"{self.service.__class__.__name__}"
)
self._register_DataService_instance_callbacks(
self._register_data_service_instance_callbacks(
self.service, f"{self.service.__class__.__name__}"
)
self._register_property_callbacks(
@ -411,12 +414,12 @@ class CallbackManager:
)
def emit_notification(self, parent_path: str, name: str, value: Any) -> None:
logger.debug(f"{parent_path}.{name} changed to {value}!")
logger.debug("%s.%s changed to %s!", parent_path, name, value)
for callback in self._notification_callbacks:
try:
callback(parent_path, name, value)
except Exception as e:
except Exception as e: # noqa: PERF203
logger.error(e)
def add_notification_callback(

View File

@ -1,10 +1,9 @@
import logging
import warnings
from enum import Enum
from pathlib import Path
from typing import Any, Optional, get_type_hints
from typing import TYPE_CHECKING, Any, Optional, get_type_hints
import rpyc # type: ignore
import rpyc # type: ignore[import-untyped]
import pydase.units as u
from pydase.data_service.abstract_data_service import AbstractDataService
@ -24,9 +23,12 @@ from pydase.utils.serializer import (
get_nested_dict_by_path,
)
from pydase.utils.warnings import (
warn_if_instance_class_does_not_inherit_from_DataService,
warn_if_instance_class_does_not_inherit_from_data_service,
)
if TYPE_CHECKING:
from pathlib import Path
logger = logging.getLogger(__name__)
@ -56,8 +58,8 @@ class DataService(rpyc.Service, AbstractDataService):
filename = kwargs.pop("filename", None)
if filename is not None:
warnings.warn(
"The 'filename' argument is deprecated and will be removed in a future version. "
"Please pass the 'filename' argument to `pydase.Server`.",
"The 'filename' argument is deprecated and will be removed in a future "
"version. Please pass the 'filename' argument to `pydase.Server`.",
DeprecationWarning,
stacklevel=2,
)
@ -80,14 +82,15 @@ class DataService(rpyc.Service, AbstractDataService):
super().__setattr__(__name, __value)
if self.__dict__.get("_initialised") and not __name == "_initialised":
if self.__dict__.get("_initialised") and __name != "_initialised":
for callback in self._callback_manager.callbacks:
callback(__name, __value)
elif __name.startswith(f"_{self.__class__.__name__}__"):
logger.warning(
f"Warning: You should not set private but rather protected attributes! "
f"Use {__name.replace(f'_{self.__class__.__name__}__', '_')} instead "
f"of {__name.replace(f'_{self.__class__.__name__}__', '__')}."
"Warning: You should not set private but rather protected attributes! "
"Use %s instead of %s.",
__name.replace(f"_{self.__class__.__name__}__", "_"),
__name.replace(f"_{self.__class__.__name__}__", "__"),
)
def __check_instance_classes(self) -> None:
@ -95,9 +98,9 @@ class DataService(rpyc.Service, AbstractDataService):
# every class defined by the user should inherit from DataService if it is
# assigned to a public attribute
if not attr_name.startswith("_"):
warn_if_instance_class_does_not_inherit_from_DataService(attr_value)
warn_if_instance_class_does_not_inherit_from_data_service(attr_value)
def __set_attribute_based_on_type( # noqa:CFQ002
def __set_attribute_based_on_type( # noqa: PLR0913
self,
target_obj: Any,
attr_name: str,
@ -154,9 +157,11 @@ class DataService(rpyc.Service, AbstractDataService):
)
if hasattr(self, "_state_manager"):
getattr(self, "_state_manager").save_state()
self._state_manager.save_state() # type: ignore[reportGeneralTypeIssue]
def load_DataService_from_JSON(self, json_dict: dict[str, Any]) -> None:
def load_DataService_from_JSON( # noqa: N802
self, json_dict: dict[str, Any]
) -> None:
warnings.warn(
"'load_DataService_from_JSON' is deprecated and will be removed in a "
"future version. "
@ -178,8 +183,9 @@ class DataService(rpyc.Service, AbstractDataService):
class_attr_is_read_only = nested_class_dict["readonly"]
if class_attr_is_read_only:
logger.debug(
f'Attribute "{path}" is read-only. Ignoring value from JSON '
"file..."
"Attribute '%s' is read-only. Ignoring value from JSON "
"file...",
path,
)
continue
# Split the path into parts
@ -193,11 +199,14 @@ class DataService(rpyc.Service, AbstractDataService):
self.update_DataService_attribute(parts[:-1], attr_name, value)
else:
logger.info(
f'Attribute type of "{path}" changed from "{value_type}" to '
f'"{class_value_type}". Ignoring value from JSON file...'
"Attribute type of '%s' changed from '%s' to "
"'%s'. Ignoring value from JSON file...",
path,
value_type,
class_value_type,
)
def serialize(self) -> dict[str, dict[str, Any]]: # noqa
def serialize(self) -> dict[str, dict[str, Any]]:
"""
Serializes the instance into a dictionary, preserving the structure of the
instance.
@ -216,7 +225,7 @@ class DataService(rpyc.Service, AbstractDataService):
"""
return Serializer.serialize_object(self)["value"]
def update_DataService_attribute(
def update_DataService_attribute( # noqa: N802
self,
path_list: list[str],
attr_name: str,

View File

@ -3,7 +3,7 @@ from typing import Any
import pydase.units as u
from pydase.utils.warnings import (
warn_if_instance_class_does_not_inherit_from_DataService,
warn_if_instance_class_does_not_inherit_from_data_service,
)
@ -36,14 +36,14 @@ class DataServiceList(list):
self._callbacks = callback_list
for item in args[0]:
warn_if_instance_class_does_not_inherit_from_DataService(item)
warn_if_instance_class_does_not_inherit_from_data_service(item)
# prevent gc to delete the passed list by keeping a reference
self._original_list = args[0]
super().__init__(*args, **kwargs) # type: ignore
super().__init__(*args, **kwargs) # type: ignore[reportUnknownMemberType]
def __setitem__(self, key: int, value: Any) -> None: # type: ignore
def __setitem__(self, key: int, value: Any) -> None: # type: ignore[override]
current_value = self.__getitem__(key)
# parse ints into floats if current value is a float
@ -52,7 +52,7 @@ class DataServiceList(list):
if isinstance(current_value, u.Quantity):
value = u.convert_to_quantity(value, str(current_value.u))
super().__setitem__(key, value) # type: ignore
super().__setitem__(key, value) # type: ignore[reportUnknownMemberType]
for callback in self._callbacks:
callback(key, value)

View File

@ -41,7 +41,7 @@ def load_state(func: Callable[..., Any]) -> Callable[..., Any]:
... self._name = value
"""
func._load_state = True # type: ignore
func._load_state = True # type: ignore[attr-defined]
return func
@ -51,7 +51,7 @@ def has_load_state_decorator(prop: property) -> bool:
"""
try:
return getattr(prop.fset, "_load_state")
return prop.fset._load_state # type: ignore[union-attr]
except AttributeError:
return False
@ -96,13 +96,15 @@ class StateManager:
update.
"""
def __init__(self, service: "DataService", filename: Optional[str | Path] = None):
def __init__(
self, service: "DataService", filename: Optional[str | Path] = None
) -> None:
self.filename = getattr(service, "_filename", None)
if filename is not None:
if self.filename is not None:
logger.warning(
f"Overwriting filename {self.filename!r} with {filename!r}."
"Overwriting filename '%s' with '%s'.", self.filename, filename
)
self.filename = filename
@ -136,7 +138,7 @@ class StateManager:
"""
# Traverse the serialized representation and set the attributes of the class
json_dict = self._get_state_dict_from_JSON_file()
json_dict = self._get_state_dict_from_json_file()
if json_dict == {}:
logger.debug("Could not load the service state.")
return
@ -155,18 +157,19 @@ class StateManager:
self.set_service_attribute_value_by_path(path, value)
else:
logger.info(
f"Attribute type of {path!r} changed from {value_type!r} to "
f"{class_attr_value_type!r}. Ignoring value from JSON file..."
"Attribute type of '%s' changed from '%s' to "
"'%s'. Ignoring value from JSON file...",
path,
value_type,
class_attr_value_type,
)
def _get_state_dict_from_JSON_file(self) -> dict[str, Any]:
if self.filename is not None:
# Check if the file specified by the filename exists
if os.path.exists(self.filename):
with open(self.filename, "r") as f:
# Load JSON data from file and update class attributes with these
# values
return cast(dict[str, Any], json.load(f))
def _get_state_dict_from_json_file(self) -> dict[str, Any]:
if self.filename is not None and os.path.exists(self.filename):
with open(self.filename) as f:
# Load JSON data from file and update class attributes with these
# values
return cast(dict[str, Any], json.load(f))
return {}
def set_service_attribute_value_by_path(
@ -192,7 +195,7 @@ class StateManager:
# This will also filter out methods as they are 'read-only'
if current_value_dict["readonly"]:
logger.debug(f"Attribute {path!r} is read-only. Ignoring new value...")
logger.debug("Attribute '%s' is read-only. Ignoring new value...", path)
return
converted_value = self.__convert_value_if_needed(value, current_value_dict)
@ -201,7 +204,7 @@ class StateManager:
if self.__attr_value_has_changed(converted_value, current_value_dict["value"]):
self.__update_attribute_by_path(path, converted_value)
else:
logger.debug(f"Value of attribute {path!r} has not changed...")
logger.debug("Value of attribute '%s' has not changed...", path)
def __attr_value_has_changed(self, value_object: Any, current_value: Any) -> bool:
"""Check if the serialized value of `value_object` differs from `current_value`.
@ -262,8 +265,9 @@ class StateManager:
has_decorator = has_load_state_decorator(prop)
if not has_decorator:
logger.debug(
f"Property {attr_name!r} has no '@load_state' decorator. "
"Ignoring value from JSON file..."
"Property '%s' has no '@load_state' decorator. "
"Ignoring value from JSON file...",
attr_name,
)
return has_decorator
return True

View File

@ -3,7 +3,6 @@ from __future__ import annotations
import asyncio
import inspect
import logging
from collections.abc import Callable
from functools import wraps
from typing import TYPE_CHECKING, Any, TypedDict
@ -12,6 +11,8 @@ from pydase.data_service.data_service_list import DataServiceList
from pydase.utils.helpers import get_class_and_instance_attributes
if TYPE_CHECKING:
from collections.abc import Callable
from .data_service import DataService
logger = logging.getLogger(__name__)
@ -94,7 +95,7 @@ class TaskManager:
self._set_start_and_stop_for_async_methods()
def _set_start_and_stop_for_async_methods(self) -> None: # noqa: C901
def _set_start_and_stop_for_async_methods(self) -> None:
# inspect the methods of the class
for name, method in inspect.getmembers(
self.service, predicate=inspect.iscoroutinefunction
@ -111,18 +112,18 @@ class TaskManager:
start_method(*args)
else:
logger.warning(
f"No start method found for service '{service_name}'"
"No start method found for service '%s'", service_name
)
def start_autostart_tasks(self) -> None:
self._initiate_task_startup()
attrs = get_class_and_instance_attributes(self.service)
for _, attr_value in attrs.items():
for attr_value in attrs.values():
if isinstance(attr_value, AbstractDataService):
attr_value._task_manager.start_autostart_tasks()
elif isinstance(attr_value, DataServiceList):
for i, item in enumerate(attr_value):
for item in attr_value:
if isinstance(item, AbstractDataService):
item._task_manager.start_autostart_tasks()
@ -145,7 +146,7 @@ class TaskManager:
return stop_task
def _make_start_task( # noqa
def _make_start_task( # noqa: C901
self, name: str, method: Callable[..., Any]
) -> Callable[..., Any]:
"""
@ -161,7 +162,7 @@ class TaskManager:
"""
@wraps(method)
def start_task(*args: Any, **kwargs: Any) -> None:
def start_task(*args: Any, **kwargs: Any) -> None: # noqa: C901
def task_done_callback(task: asyncio.Task[None], name: str) -> None:
"""Handles tasks that have finished.
@ -179,8 +180,10 @@ class TaskManager:
if exception is not None:
# Handle the exception, or you can re-raise it.
logger.error(
f"Task '{name}' encountered an exception: "
f"{type(exception).__name__}: {exception}"
"Task '%s' encountered an exception: %s: %s",
name,
type(exception).__name__,
exception,
)
raise exception
@ -188,7 +191,7 @@ class TaskManager:
try:
await method(*args, **kwargs)
except asyncio.CancelledError:
logger.info(f"Task {name} was cancelled")
logger.info("Task '%s' was cancelled", name)
if not self.tasks.get(name):
# Get the signature of the coroutine method to start
@ -207,7 +210,7 @@ class TaskManager:
# with the 'kwargs' dictionary. If a parameter is specified in both
# 'args_padded' and 'kwargs', the value from 'kwargs' is used.
kwargs_updated = {
**dict(zip(parameter_names, args_padded)),
**dict(zip(parameter_names, args_padded, strict=True)),
**kwargs,
}
@ -230,6 +233,6 @@ class TaskManager:
for callback in self.task_status_change_callbacks:
callback(name, kwargs_updated)
else:
logger.error(f"Task `{name}` is already running!")
logger.error("Task '%s' is already running!", name)
return start_task

View File

@ -10,13 +10,12 @@ from types import FrameType
from typing import Any, Optional, Protocol, TypedDict
import uvicorn
from rpyc import ForkingServer, ThreadedServer # type: ignore
from rpyc import ForkingServer, ThreadedServer # type: ignore[import-untyped]
from uvicorn.server import HANDLED_SIGNALS
from pydase import DataService
from pydase.data_service.state_manager import StateManager
from pydase.utils.serializer import dump, get_nested_dict_by_path
from pydase.version import __version__
from .web_server import WebAPI
@ -110,8 +109,6 @@ class Server:
Filename of the file managing the service state persistence. Defaults to None.
use_forking_server: bool
Whether to use ForkingServer for multiprocessing. Default is False.
web_settings: dict[str, Any]
Additional settings for the web server. Default is {} (an empty dictionary).
additional_servers : list[AdditionalServer]
A list of additional servers to run alongside the main server. Each entry in the
list should be a dictionary with the following structure:
@ -164,27 +161,27 @@ class Server:
Additional keyword arguments.
"""
def __init__( # noqa: CFQ002
def __init__( # noqa: PLR0913
self,
service: DataService,
host: str = "0.0.0.0",
host: str = "127.0.0.1",
rpc_port: int = 18871,
web_port: int = 8001,
enable_rpc: bool = True,
enable_web: bool = True,
filename: Optional[str | Path] = None,
use_forking_server: bool = False,
web_settings: dict[str, Any] = {},
additional_servers: list[AdditionalServer] = [],
additional_servers: list[AdditionalServer] | None = None,
**kwargs: Any,
) -> None:
if additional_servers is None:
additional_servers = []
self._service = service
self._host = host
self._rpc_port = rpc_port
self._web_port = web_port
self._enable_rpc = enable_rpc
self._enable_web = enable_web
self._web_settings = web_settings
self._kwargs = kwargs
self._loop: asyncio.AbstractEventLoop
self._rpc_server_type = ForkingServer if use_forking_server else ThreadedServer
@ -192,17 +189,6 @@ class Server:
self.should_exit = False
self.servers: dict[str, asyncio.Future[Any]] = {}
self.executor: ThreadPoolExecutor | None = None
self._info: dict[str, Any] = {
"name": self._service.get_service_name(),
"version": __version__,
"rpc_port": self._rpc_port,
"web_port": self._web_port,
"enable_rpc": self._enable_rpc,
"enable_web": self._enable_web,
"web_settings": self._web_settings,
"additional_servers": [],
**kwargs,
}
self._state_manager = StateManager(self._service, filename)
if getattr(self._service, "_filename", None) is not None:
self._service._state_manager = self._state_manager
@ -234,7 +220,7 @@ class Server:
async def serve(self) -> None:
process_id = os.getpid()
logger.info(f"Started server process [{process_id}]")
logger.info("Started server process [%s]", process_id)
await self.startup()
if self.should_exit:
@ -242,7 +228,7 @@ class Server:
await self.main_loop()
await self.shutdown()
logger.info(f"Finished server process [{process_id}]")
logger.info("Finished server process [%s]", process_id)
async def startup(self) -> None: # noqa: C901
self._loop = asyncio.get_running_loop()
@ -270,28 +256,18 @@ class Server:
port=server["port"],
host=self._host,
state_manager=self._state_manager,
info=self._info,
**server["kwargs"],
)
server_name = (
addin_server.__module__ + "." + addin_server.__class__.__name__
)
self._info["additional_servers"].append(
{
"name": server_name,
"port": server["port"],
"host": self._host,
**server["kwargs"],
}
)
future_or_task = self._loop.create_task(addin_server.serve())
self.servers[server_name] = future_or_task
if self._enable_web:
self._wapi: WebAPI = WebAPI(
service=self._service,
info=self._info,
state_manager=self._state_manager,
**self._kwargs,
)
@ -302,10 +278,6 @@ class Server:
)
def sio_callback(parent_path: str, name: str, value: Any) -> None:
# TODO: an error happens when an attribute is set to a list
# > File "/usr/lib64/python3.11/json/encoder.py", line 180, in default
# > raise TypeError(f'Object of type {o.__class__.__name__} '
# > TypeError: Object of type list is not JSON serializable
full_access_path = ".".join([*parent_path.split(".")[1:], name])
cached_value_dict = deepcopy(
get_nested_dict_by_path(self._state_manager.cache, full_access_path)
@ -319,7 +291,7 @@ class Server:
async def notify() -> None:
try:
await self._wapi.sio.emit( # type: ignore
await self._wapi.sio.emit( # type: ignore[reportUnknownMemberType]
"notify",
{
"data": {
@ -330,7 +302,7 @@ class Server:
},
)
except Exception as e:
logger.warning(f"Failed to send notification: {e}")
logger.warning("Failed to send notification: %s", e)
self._loop.create_task(notify())
@ -338,7 +310,7 @@ class Server:
# overwrite uvicorn's signal handlers, otherwise it will bogart SIGINT and
# SIGTERM, which makes it impossible to escape out of
web_server.install_signal_handlers = lambda: None # type: ignore
web_server.install_signal_handlers = lambda: None # type: ignore[method-assign]
future_or_task = self._loop.create_task(web_server.serve())
self.servers["web"] = future_or_task
@ -349,7 +321,7 @@ class Server:
async def shutdown(self) -> None:
logger.info("Shutting down")
logger.info(f"Saving data to {self._state_manager.filename}.")
logger.info("Saving data to %s.", self._state_manager.filename)
if self._state_manager is not None:
self._state_manager.save_state()
@ -366,9 +338,9 @@ class Server:
try:
await task
except asyncio.CancelledError:
logger.debug(f"Cancelled {server_name} server.")
logger.debug("Cancelled '%s' server.", server_name)
except Exception as e:
logger.warning(f"Unexpected exception: {e}.")
logger.warning("Unexpected exception: %s", e)
async def __cancel_tasks(self) -> None:
for task in asyncio.all_tasks(self._loop):
@ -376,9 +348,9 @@ class Server:
try:
await task
except asyncio.CancelledError:
logger.debug(f"Cancelled task {task.get_coro()}.")
logger.debug("Cancelled task '%s'.", task.get_coro())
except Exception as e:
logger.warning(f"Unexpected exception: {e}.")
logger.exception("Unexpected exception: %s", e)
def install_signal_handlers(self) -> None:
if threading.current_thread() is not threading.main_thread():
@ -390,11 +362,13 @@ class Server:
def handle_exit(self, sig: int = 0, frame: Optional[FrameType] = None) -> None:
if self.should_exit and sig == signal.SIGINT:
logger.warning(f"Received signal {sig}, forcing exit...")
logger.warning("Received signal '%s', forcing exit...", sig)
os._exit(1)
else:
self.should_exit = True
logger.warning(f"Received signal {sig}, exiting... (CTRL+C to force quit)")
logger.warning(
"Received signal '%s', exiting... (CTRL+C to force quit)", sig
)
def custom_exception_handler(
self, loop: asyncio.AbstractEventLoop, context: dict[str, Any]
@ -411,7 +385,7 @@ class Server:
async def emit_exception() -> None:
try:
await self._wapi.sio.emit( # type: ignore
await self._wapi.sio.emit( # type: ignore[reportUnknownMemberType]
"exception",
{
"data": {
@ -421,7 +395,7 @@ class Server:
},
)
except Exception as e:
logger.warning(f"Failed to send notification: {e}")
logger.exception("Failed to send notification: %s", e)
loop.create_task(emit_exception())
else:

View File

@ -2,7 +2,7 @@ import logging
from pathlib import Path
from typing import Any, TypedDict
import socketio # type: ignore
import socketio # type: ignore[import-untyped]
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import FileResponse
@ -70,23 +70,21 @@ class WebAPI:
__sio_app: socketio.ASGIApp
__fastapi_app: FastAPI
def __init__( # noqa: CFQ002
def __init__( # noqa: PLR0913
self,
service: DataService,
state_manager: StateManager,
frontend: str | Path | None = None,
css: str | Path | None = None,
enable_CORS: bool = True,
info: dict[str, Any] = {},
enable_cors: bool = True,
*args: Any,
**kwargs: Any,
):
) -> None:
self.service = service
self.state_manager = state_manager
self.frontend = frontend
self.css = css
self.enable_CORS = enable_CORS
self.info = info
self.enable_cors = enable_cors
self.args = args
self.kwargs = kwargs
@ -100,14 +98,14 @@ class WebAPI:
def setup_socketio(self) -> None:
# the socketio ASGI app, to notify clients when params update
if self.enable_CORS:
if self.enable_cors:
sio = socketio.AsyncServer(async_mode="asgi", cors_allowed_origins="*")
else:
sio = socketio.AsyncServer(async_mode="asgi")
@sio.event # type: ignore
@sio.event # type: ignore[reportUnknownMemberType]
def set_attribute(sid: str, data: UpdateDict) -> Any:
logger.debug(f"Received frontend update: {data}")
logger.debug("Received frontend update: %s", data)
path_list = [*data["parent_path"].split("."), data["name"]]
path_list.remove("DataService") # always at the start, does not do anything
path = ".".join(path_list)
@ -115,9 +113,9 @@ class WebAPI:
path=path, value=data["value"]
)
@sio.event # type: ignore
@sio.event # type: ignore[reportUnknownMemberType]
def run_method(sid: str, data: RunMethodDict) -> Any:
logger.debug(f"Running method: {data}")
logger.debug("Running method: %s", data)
path_list = [*data["parent_path"].split("."), data["name"]]
path_list.remove("DataService") # always at the start, does not do anything
method = get_object_attr_from_path_list(self.service, path_list)
@ -126,10 +124,10 @@ class WebAPI:
self.__sio = sio
self.__sio_app = socketio.ASGIApp(self.__sio)
def setup_fastapi_app(self) -> None: # noqa
def setup_fastapi_app(self) -> None:
app = FastAPI()
if self.enable_CORS:
if self.enable_cors:
app.add_middleware(
CORSMiddleware,
allow_credentials=True,
@ -147,10 +145,6 @@ class WebAPI:
def name() -> str:
return self.service.get_service_name()
@app.get("/info")
def info() -> dict[str, Any]:
return self.info
@app.get("/service-properties")
def service_properties() -> dict[str, Any]:
return self.state_manager.cache

View File

@ -15,7 +15,7 @@ class QuantityDict(TypedDict):
def convert_to_quantity(
value: QuantityDict | float | int | Quantity, unit: str = ""
value: QuantityDict | float | Quantity, unit: str = ""
) -> Quantity:
"""
Convert a given value into a pint.Quantity object with the specified unit.
@ -53,4 +53,4 @@ def convert_to_quantity(
quantity = float(value["magnitude"]) * Unit(value["unit"])
else:
quantity = value
return quantity # type: ignore
return quantity # type: ignore[reportUnknownMemberType]

View File

@ -54,12 +54,12 @@ def get_object_attr_from_path_list(target_obj: Any, path: list[str]) -> Any:
index_str = index_str.replace("]", "")
index = int(index_str)
target_obj = getattr(target_obj, attr)[index]
except ValueError:
except ValueError: # noqa: PERF203
# No index, so just get the attribute
target_obj = getattr(target_obj, part)
except AttributeError:
# The attribute doesn't exist
logger.debug(f"Attribute {part} does not exist in the object.")
logger.debug("Attribute % does not exist in the object.", part)
return None
return target_obj
@ -141,7 +141,7 @@ def update_value_if_changed(
if getattr(target, attr_name_or_index) != new_value:
setattr(target, attr_name_or_index, new_value)
else:
logger.error(f"Incompatible arguments: {target}, {attr_name_or_index}.")
logger.error("Incompatible arguments: %s, %s.", target, attr_name_or_index)
def parse_list_attr_and_index(attr_string: str) -> tuple[str, Optional[int]]:
@ -175,7 +175,7 @@ def parse_list_attr_and_index(attr_string: str) -> tuple[str, Optional[int]]:
if index_part.isdigit():
index = int(index_part)
else:
logger.error(f"Invalid index format in key: {attr_name}")
logger.error("Invalid index format in key: %s", attr_name)
return attr_name, index

View File

@ -4,7 +4,7 @@ import sys
from copy import copy
from typing import Optional
import socketio
import socketio # type: ignore[import-untyped]
import uvicorn.logging
from uvicorn.config import LOGGING_CONFIG
@ -20,7 +20,7 @@ class DefaultFormatter(uvicorn.logging.ColourizedFormatter):
for formatting the output, instead of the plain text message.
"""
def formatMessage(self, record: logging.LogRecord) -> str:
def formatMessage(self, record: logging.LogRecord) -> str: # noqa: N802
recordcopy = copy(record)
levelname = recordcopy.levelname
seperator = " " * (8 - len(recordcopy.levelname))
@ -33,7 +33,7 @@ class DefaultFormatter(uvicorn.logging.ColourizedFormatter):
return logging.Formatter.formatMessage(self, recordcopy)
def should_use_colors(self) -> bool:
return sys.stderr.isatty() # pragma: no cover
return sys.stderr.isatty()
class SocketIOHandler(logging.Handler):
@ -74,7 +74,7 @@ def setup_logging(level: Optional[str | int] = None) -> None:
with an option to override the level. By default, in a development environment, the
log level is set to DEBUG, whereas in other environments, it is set to INFO.
Parameters:
Args:
level (Optional[str | int]):
A specific log level to set for the application. If None, the log level is
determined based on the application's operation mode. Accepts standard log
@ -123,7 +123,10 @@ def setup_logging(level: Optional[str | int] = None) -> None:
# add formatter to ch
ch.setFormatter(
DefaultFormatter(
fmt="%(asctime)s.%(msecs)03d | %(levelprefix)s | %(name)s:%(funcName)s:%(lineno)d - %(message)s",
fmt=(
"%(asctime)s.%(msecs)03d | %(levelprefix)s | "
"%(name)s:%(funcName)s:%(lineno)d - %(message)s"
),
datefmt="%Y-%m-%d %H:%M:%S",
)
)
@ -140,7 +143,8 @@ def setup_logging(level: Optional[str | int] = None) -> None:
"fmt"
] = "%(asctime)s.%(msecs)03d | %(levelprefix)s %(message)s"
LOGGING_CONFIG["formatters"]["default"]["datefmt"] = "%Y-%m-%d %H:%M:%S"
LOGGING_CONFIG["formatters"]["access"][
"fmt"
] = '%(asctime)s.%(msecs)03d | %(levelprefix)s %(client_addr)s - "%(request_line)s" %(status_code)s'
LOGGING_CONFIG["formatters"]["access"]["fmt"] = (
"%(asctime)s.%(msecs)03d | %(levelprefix)s %(client_addr)s "
'- "%(request_line)s" %(status_code)s'
)
LOGGING_CONFIG["formatters"]["access"]["datefmt"] = "%Y-%m-%d %H:%M:%S"

View File

@ -28,7 +28,7 @@ class Serializer:
def serialize_object(obj: Any) -> dict[str, Any]:
result: dict[str, Any] = {}
if isinstance(obj, AbstractDataService):
result = Serializer._serialize_DataService(obj)
result = Serializer._serialize_data_service(obj)
elif isinstance(obj, list):
result = Serializer._serialize_list(obj)
@ -38,7 +38,7 @@ class Serializer:
# Special handling for u.Quantity
elif isinstance(obj, u.Quantity):
result = Serializer._serialize_Quantity(obj)
result = Serializer._serialize_quantity(obj)
# Handling for Enums
elif isinstance(obj, Enum):
@ -83,7 +83,7 @@ class Serializer:
}
@staticmethod
def _serialize_Quantity(obj: u.Quantity) -> dict[str, Any]:
def _serialize_quantity(obj: u.Quantity) -> dict[str, Any]:
obj_type = "Quantity"
readonly = False
doc = get_attribute_doc(obj)
@ -154,7 +154,7 @@ class Serializer:
}
@staticmethod
def _serialize_DataService(obj: AbstractDataService) -> dict[str, Any]:
def _serialize_data_service(obj: AbstractDataService) -> dict[str, Any]:
readonly = False
doc = get_attribute_doc(obj)
obj_type = type(obj).__name__
@ -180,9 +180,7 @@ class Serializer:
# Skip keys that start with "start_" or "stop_" and end with an async
# method name
if (key.startswith("start_") or key.startswith("stop_")) and key.split(
"_", 1
)[1] in {
if key.startswith(("start_", "stop_")) and key.split("_", 1)[1] in {
name
for name, _ in inspect.getmembers(
obj, predicate=inspect.iscoroutinefunction
@ -293,6 +291,7 @@ def get_nested_dict_by_path(
def get_next_level_dict_by_key(
serialization_dict: dict[str, Any],
attr_name: str,
*,
allow_append: bool = False,
) -> dict[str, Any]:
"""
@ -366,23 +365,23 @@ def generate_serialized_data_paths(
attribute in the serialized data.
"""
paths = []
paths: list[str] = []
for key, value in data.items():
if value["type"] == "method":
# ignoring methods
continue
new_path = f"{parent_path}.{key}" if parent_path else key
if isinstance(value["value"], dict) and value["type"] != "Quantity":
paths.extend(generate_serialized_data_paths(value["value"], new_path)) # type: ignore
paths.extend(generate_serialized_data_paths(value["value"], new_path))
elif isinstance(value["value"], list):
for index, item in enumerate(value["value"]):
indexed_key_path = f"{new_path}[{index}]"
if isinstance(item["value"], dict):
paths.extend( # type: ignore
paths.extend(
generate_serialized_data_paths(item["value"], indexed_key_path)
)
else:
paths.append(indexed_key_path) # type: ignore
paths.append(indexed_key_path)
else:
paths.append(new_path) # type: ignore
paths.append(new_path)
return paths

View File

@ -5,7 +5,7 @@ from pydase.utils.helpers import get_component_class_names
logger = logging.getLogger(__name__)
def warn_if_instance_class_does_not_inherit_from_DataService(__value: object) -> None:
def warn_if_instance_class_does_not_inherit_from_data_service(__value: object) -> None:
base_class_name = __value.__class__.__base__.__name__
module_name = __value.__class__.__module__
@ -18,9 +18,10 @@ def warn_if_instance_class_does_not_inherit_from_DataService(__value: object) ->
"_abc",
]
and base_class_name
not in ["DataService", "list", "Enum"] + get_component_class_names()
not in ["DataService", "list", "Enum", *get_component_class_names()]
and type(__value).__name__ not in ["CallbackManager", "TaskManager", "Quantity"]
):
logger.warning(
f"Warning: Class {type(__value).__name__} does not inherit from DataService."
"Warning: Class '%s' does not inherit from DataService.",
type(__value).__name__,
)

View File

@ -1,4 +1,4 @@
from importlib.metadata import distribution
__version__ = distribution("pydase").version
__major__, __minor__, __patch__ = [int(v) for v in __version__.split(".")]
__major__, __minor__, __patch__ = (int(v) for v in __version__.split("."))

View File

@ -1,4 +1,4 @@
from pytest import CaptureFixture, LogCaptureFixture
from pytest import LogCaptureFixture
from pydase.components.number_slider import NumberSlider
from pydase.data_service.data_service import DataService
@ -38,6 +38,6 @@ def test_NumberSlider(caplog: LogCaptureFixture) -> None:
def test_init_error(caplog: LogCaptureFixture) -> None: # noqa
number_slider = NumberSlider(type="str") # type: ignore # noqa
number_slider = NumberSlider(type_="str") # type: ignore # noqa
assert "Unknown type 'str'. Using 'float'" in caplog.text

View File

@ -162,7 +162,7 @@ def test_load_state(tmp_path: Path, caplog: LogCaptureFixture):
"Ignoring value from JSON file..."
) in caplog.text
assert (
"Attribute type of 'removed_attr' changed from 'str' to None. "
"Attribute type of 'removed_attr' changed from 'str' to 'None'. "
"Ignoring value from JSON file..." in caplog.text
)
assert "Value of attribute 'subservice.name' has not changed..." in caplog.text

View File

@ -15,7 +15,7 @@ def test_setattr_warnings(caplog: LogCaptureFixture) -> None: # noqa
ServiceClass()
assert "Warning: Class SubClass does not inherit from DataService." in caplog.text
assert "Warning: Class 'SubClass' does not inherit from DataService." in caplog.text
def test_private_attribute_warning(caplog: LogCaptureFixture) -> None: # noqa