mirror of
https://github.com/tiqi-group/pydase.git
synced 2025-12-18 12:11:20 +01:00
Compare commits
306 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
49984b7c2e | ||
|
|
39270561b9 | ||
|
|
8ac2c39908 | ||
|
|
0694a3d1ee | ||
|
|
c15ad54e2d | ||
|
|
71721b1286 | ||
|
|
74ceb7f05c | ||
|
|
06d11fff49 | ||
|
|
6d23151d32 | ||
|
|
0faf347376 | ||
|
|
a5fddf7e45 | ||
|
|
83c763bd20 | ||
|
|
9778541ee4 | ||
|
|
8e641c1b84 | ||
|
|
f6bf229c8c | ||
|
|
5a76d76d2b | ||
|
|
3169531a24 | ||
|
|
4bd0092fbf | ||
|
|
569e343e89 | ||
|
|
f2b2ef8dcd | ||
|
|
f70ac05df6 | ||
|
|
e3367efda1 | ||
|
|
3d2de7109b | ||
|
|
534ff4c149 | ||
|
|
0e47f6c4d3 | ||
|
|
b4ef8201f3 | ||
|
|
a97a55712e | ||
|
|
e8a0a7c000 | ||
|
|
6f0d43aa5a | ||
|
|
0e210b8ba6 | ||
|
|
329e0acd81 | ||
|
|
f97cd7eb4e | ||
|
|
3c168243bb | ||
|
|
0944a404dc | ||
|
|
a9c6070ca3 | ||
|
|
75ee71cbf8 | ||
|
|
1e55a4d914 | ||
|
|
aab2b4ee77 | ||
|
|
52d571e551 | ||
|
|
bb415af460 | ||
|
|
c3c1669cf9 | ||
|
|
5378396958 | ||
|
|
b66e964155 | ||
|
|
4fc25c6752 | ||
|
|
44cd9597cb | ||
|
|
e48a7067ec | ||
|
|
8919f6106a | ||
|
|
89b5a9cc9e | ||
|
|
0aa1595da4 | ||
|
|
8f8b3e3bcf | ||
|
|
43e6adcb2e | ||
|
|
3992f491c9 | ||
|
|
df571a8260 | ||
|
|
53713794d6 | ||
|
|
06e642972f | ||
|
|
a7ec7c1536 | ||
|
|
c891642bda | ||
|
|
cc105106ee | ||
|
|
7c7bb193e4 | ||
|
|
92e79579ff | ||
|
|
5d2d34bea3 | ||
|
|
3497962fca | ||
|
|
114a1c6fdc | ||
|
|
1d2ac57ba7 | ||
|
|
99dea381a3 | ||
|
|
e6e5ac84b4 | ||
|
|
246148f513 | ||
|
|
eb0c819037 | ||
|
|
f5d8775141 | ||
|
|
1ec034a62e | ||
|
|
93f0627534 | ||
|
|
ad2ae704e9 | ||
|
|
de5340d6fd | ||
|
|
b80a3ec6a1 | ||
|
|
f3853ef836 | ||
|
|
56ae9086b5 | ||
|
|
5a2371353a | ||
|
|
09a55f50bd | ||
|
|
abafd1a2b2 | ||
|
|
145ff89072 | ||
|
|
ba5b4e7be4 | ||
|
|
8ee49469d6 | ||
|
|
6997c4a842 | ||
|
|
598449e893 | ||
|
|
4802f19720 | ||
|
|
a04bd14e50 | ||
|
|
c60730f21b | ||
|
|
d5cd97ea57 | ||
|
|
0136885207 | ||
|
|
c04e048e21 | ||
|
|
9e9d3f17bc | ||
|
|
e576f6eb80 | ||
|
|
e57fe10c9e | ||
|
|
f27f513bf8 | ||
|
|
de4e4ed178 | ||
|
|
cb2687a4b9 | ||
|
|
ab794d780b | ||
|
|
617eed4d96 | ||
|
|
d517bd0489 | ||
|
|
d0869b707b | ||
|
|
eab99df9d1 | ||
|
|
9d36f99404 | ||
|
|
7b7ef0eb97 | ||
|
|
92f14c6788 | ||
|
|
4746470aee | ||
|
|
f5627e6a2f | ||
|
|
a769f4eb3b | ||
|
|
3970d5a17b | ||
|
|
a89db46d5e | ||
|
|
f67591c7ac | ||
|
|
fdcaa1c1ed | ||
|
|
613b1dd6a4 | ||
|
|
914997cc6b | ||
|
|
667bb949cc | ||
|
|
acaac6f0a6 | ||
|
|
e9df89765d | ||
|
|
123edb9e86 | ||
|
|
69328d6f68 | ||
|
|
0cd3a7e8a8 | ||
|
|
abd77e053d | ||
|
|
ebb8b4be8b | ||
|
|
a83e0c6c7f | ||
|
|
64dc09faf7 | ||
|
|
e2fb9ebae5 | ||
|
|
4a43bda5e2 | ||
|
|
f693fa9ba2 | ||
|
|
9820bda4b5 | ||
|
|
f5116607b9 | ||
|
|
0ea997384c | ||
|
|
28410a97f5 | ||
|
|
f6eef7085e | ||
|
|
a76035f443 | ||
|
|
2ab4d1c00a | ||
|
|
a9d577820f | ||
|
|
f5e6dca16a | ||
|
|
4a45d0d438 | ||
|
|
3cc6399f60 | ||
|
|
dc1c7e80f4 | ||
|
|
95b5907a8d | ||
|
|
675fe86e7e | ||
|
|
60c2cca8f5 | ||
|
|
e4fb1c66a1 | ||
|
|
1af4f98a48 | ||
|
|
eddf3dd2fc | ||
|
|
c2a22d4456 | ||
|
|
aa9f1ba35a | ||
|
|
2208e5f66e | ||
|
|
96f1ee16b7 | ||
|
|
4f7c6ccde4 | ||
|
|
856f5d0c79 | ||
|
|
b60995d218 | ||
|
|
380f98edb5 | ||
|
|
30e4ebb670 | ||
|
|
bdf5512bcc | ||
|
|
a323ce169e | ||
|
|
d18be54284 | ||
|
|
a750644c20 | ||
|
|
45ede860d9 | ||
|
|
a060836304 | ||
|
|
963e449adb | ||
|
|
1776fc8623 | ||
|
|
aed0dd9493 | ||
|
|
784d49d90c | ||
|
|
8dd05ac5e3 | ||
|
|
27bb73a2da | ||
|
|
6b643210d7 | ||
|
|
24f1574168 | ||
|
|
b594a91a18 | ||
|
|
e708d6f1c3 | ||
|
|
6c2c5d4ad1 | ||
|
|
d0377be455 | ||
|
|
5e136c2784 | ||
|
|
0a94b32011 | ||
|
|
14b5219915 | ||
|
|
7c573cdc10 | ||
|
|
393b025648 | ||
|
|
03fee3f88c | ||
|
|
59c7d7bb6f | ||
|
|
dc70f3cfcf | ||
|
|
cdd657f895 | ||
|
|
c9b5547831 | ||
|
|
615bf294e1 | ||
|
|
b6953251b9 | ||
|
|
3440a632ad | ||
|
|
4ef4bab36e | ||
|
|
567617f4e6 | ||
|
|
76545b88de | ||
|
|
f38df58842 | ||
|
|
d057710b60 | ||
|
|
f071bda35f | ||
|
|
2b304cba03 | ||
|
|
f88493d97c | ||
|
|
53ce51991f | ||
|
|
0385e5732e | ||
|
|
20a64099a4 | ||
|
|
16b284da45 | ||
|
|
2833284239 | ||
|
|
8d9160d660 | ||
|
|
c196c82c52 | ||
|
|
d66a3ad015 | ||
|
|
08512e945b | ||
|
|
e4796102be | ||
|
|
2fd4d94dbb | ||
|
|
78c055acf0 | ||
|
|
75a69204b5 | ||
|
|
f852dea9e5 | ||
|
|
49070a7f38 | ||
|
|
fc7092f14c | ||
|
|
b0254daa17 | ||
|
|
b08a976d2a | ||
|
|
fccd5a7c36 | ||
|
|
d643923fd3 | ||
|
|
3132680c50 | ||
|
|
f47a5524b3 | ||
|
|
b32bdabfca | ||
|
|
c5beee5d50 | ||
|
|
55ce32e105 | ||
|
|
621bed94af | ||
|
|
a837e1bce8 | ||
|
|
6ab11394fa | ||
|
|
51c4e2f971 | ||
|
|
c5a2b38914 | ||
|
|
d45b835ea2 | ||
|
|
d2c0b6968e | ||
|
|
728fe958cb | ||
|
|
69c5e0397b | ||
|
|
7f402b45e7 | ||
|
|
c4056d3ca8 | ||
|
|
c13166dddb | ||
|
|
47d64243c3 | ||
|
|
f01ef057bf | ||
|
|
6804cdf3b1 | ||
|
|
2b57df5aac | ||
|
|
2eb0eb84cf | ||
|
|
f8495dc949 | ||
|
|
9ac6e2c56a | ||
|
|
8ae0b7818b | ||
|
|
61c6585ac6 | ||
|
|
b6c956fab8 | ||
|
|
743531c434 | ||
|
|
3ecb6384ad | ||
|
|
1d2325171b | ||
|
|
b149c1b411 | ||
|
|
7e5861ec22 | ||
|
|
5b4c74f1c2 | ||
|
|
7dcec88c9a | ||
|
|
3d42366ada | ||
|
|
eb46a088ee | ||
|
|
69cd86b601 | ||
|
|
81f2281002 | ||
|
|
f7f64bbe92 | ||
|
|
0504a50a08 | ||
|
|
8564df5adc | ||
|
|
a24eb928a8 | ||
|
|
2713dad423 | ||
|
|
6ea4cf3eb7 | ||
|
|
9054f05f30 | ||
|
|
b790b6a6ca | ||
|
|
22f832054e | ||
|
|
2e9ced4e5e | ||
|
|
b654c7d176 | ||
|
|
b5b2fb8c35 | ||
|
|
1bc2bb3605 | ||
|
|
0a77cc1f36 | ||
|
|
d334ec5284 | ||
|
|
d3a74a734a | ||
|
|
43e0c72018 | ||
|
|
27b430333a | ||
|
|
e25acb7e59 | ||
|
|
89f281bd3b | ||
|
|
829e73e2e7 | ||
|
|
04b9976a3b | ||
|
|
785ed92b45 | ||
|
|
6e14837e15 | ||
|
|
5ad15c1cae | ||
|
|
c1f0b7b74d | ||
|
|
5badd86d5a | ||
|
|
b5953f13f7 | ||
|
|
a3c2672458 | ||
|
|
7a78713388 | ||
|
|
8a8375735a | ||
|
|
e61b2a4969 | ||
|
|
453076da86 | ||
|
|
886b086180 | ||
|
|
7b04298ead | ||
|
|
c6a96ba6c0 | ||
|
|
5d7a7c6bdb | ||
|
|
1241d7a128 | ||
|
|
cdd60190a7 | ||
|
|
d144b6c42b | ||
|
|
4abea8785c | ||
|
|
dbc975bd85 | ||
|
|
b04ad0c6a3 | ||
|
|
48e8b7dbaf | ||
|
|
aa85f6453f | ||
|
|
343354e0ee | ||
|
|
b38bb05c69 | ||
|
|
a0dab630f9 | ||
|
|
a9db7848f7 | ||
|
|
a8b14180ad | ||
|
|
26a366842a | ||
|
|
b0e7de2d2c | ||
|
|
bbcba8b39f | ||
|
|
34e46e05ee | ||
|
|
93c2f5ab70 | ||
|
|
106ffbfc40 |
@@ -2,5 +2,3 @@
|
||||
exclude_lines =
|
||||
pragma: no cover
|
||||
if TYPE_CHECKING:
|
||||
omit =
|
||||
src/pydase/utils/logging.py
|
||||
8
.flake8
8
.flake8
@@ -1,8 +0,0 @@
|
||||
[flake8]
|
||||
ignore = E501,W503,FS003,F403,F405,E203
|
||||
include = src
|
||||
max-line-length = 88
|
||||
max-doc-length = 88
|
||||
max-complexity = 7
|
||||
max-expression-complexity = 5.5
|
||||
use_class_attributes_order_strict_mode=True
|
||||
25
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
25
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: 'bug'
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
## Describe the bug
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
## To Reproduce
|
||||
Provide steps to reproduce the behaviour, including a minimal code snippet (if applicable):
|
||||
```python
|
||||
# Minimal code snippet that reproduces the error
|
||||
```
|
||||
## Expected behaviour
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
## Screenshot/Video
|
||||
If applicable, add visual content that helps explain your problem.
|
||||
|
||||
## Additional context
|
||||
Add any other context about the problem here.
|
||||
14
.github/workflows/python-package.yml
vendored
14
.github/workflows/python-package.yml
vendored
@@ -20,6 +20,9 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: chartboost/ruff-action@v1
|
||||
with:
|
||||
src: "./src"
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v3
|
||||
with:
|
||||
@@ -28,14 +31,13 @@ jobs:
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
python -m pip install poetry
|
||||
poetry install
|
||||
- name: Lint with flake8
|
||||
run: |
|
||||
# stop the build if there are Python syntax errors or undefined names
|
||||
poetry run flake8 src/pydase --count --show-source --statistics
|
||||
poetry install --with dev
|
||||
- name: Test with pytest
|
||||
run: |
|
||||
poetry run pytest
|
||||
- name: Test with pyright
|
||||
run: |
|
||||
poetry run pyright src/pydase
|
||||
poetry run pyright
|
||||
- name: Test with mypy
|
||||
run: |
|
||||
poetry run mypy src
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -128,6 +128,9 @@ venv.bak/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# ruff
|
||||
.ruff_cache/
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
|
||||
7
.vscode/extensions.json
vendored
Normal file
7
.vscode/extensions.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"charliermarsh.ruff",
|
||||
"ms-python.python",
|
||||
"ms-python.vscode-pylance"
|
||||
]
|
||||
}
|
||||
9
.vscode/launch.json
vendored
9
.vscode/launch.json
vendored
@@ -1,7 +1,4 @@
|
||||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
@@ -19,7 +16,7 @@
|
||||
"type": "python",
|
||||
"request": "launch",
|
||||
"module": "bar",
|
||||
"justMyCode": true,
|
||||
"justMyCode": false,
|
||||
"env": {
|
||||
"ENVIRONMENT": "development"
|
||||
}
|
||||
@@ -29,7 +26,7 @@
|
||||
"request": "launch",
|
||||
"name": "react: firefox",
|
||||
"url": "http://localhost:3000",
|
||||
"webRoot": "${workspaceFolder}/frontend",
|
||||
"webRoot": "${workspaceFolder}/frontend"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
29
.vscode/settings.json
vendored
29
.vscode/settings.json
vendored
@@ -1,25 +1,15 @@
|
||||
{
|
||||
"autoDocstring.docstringFormat": "google",
|
||||
"autoDocstring.startOnNewLine": true,
|
||||
"autoDocstring.generateDocstringOnEnter": true,
|
||||
"editor.formatOnSave": true,
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.fixAll.eslint": true
|
||||
},
|
||||
"editor.rulers": [
|
||||
88
|
||||
],
|
||||
"python.defaultInterpreterPath": ".venv/bin/python",
|
||||
"python.formatting.provider": "black",
|
||||
"python.linting.lintOnSave": true,
|
||||
"python.linting.enabled": true,
|
||||
"python.linting.flake8Enabled": true,
|
||||
"python.linting.mypyEnabled": true,
|
||||
"[python]": {
|
||||
"editor.defaultFormatter": "charliermarsh.ruff",
|
||||
"editor.rulers": [
|
||||
88
|
||||
],
|
||||
"editor.tabSize": 4,
|
||||
"editor.detectIndentation": false,
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": true
|
||||
"source.organizeImports": true,
|
||||
"source.fixAll": true
|
||||
}
|
||||
},
|
||||
"[yaml]": {
|
||||
@@ -29,10 +19,9 @@
|
||||
"[typescript][javascript][vue][typescriptreact]": {
|
||||
"editor.tabSize": 2,
|
||||
"editor.defaultFormatter": "rvest.vs-code-prettier-eslint",
|
||||
"editor.formatOnPaste": false, // required
|
||||
"editor.formatOnType": false, // required
|
||||
"editor.formatOnSave": true, // optional
|
||||
"editor.formatOnSaveMode": "file", // required to format on save
|
||||
"editor.formatOnPaste": false,
|
||||
"editor.formatOnType": false,
|
||||
"editor.formatOnSaveMode": "file",
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.fixAll.eslint": true
|
||||
}
|
||||
|
||||
273
README.md
273
README.md
@@ -17,8 +17,13 @@
|
||||
- [Method Components](#method-components)
|
||||
- [DataService Instances (Nested Classes)](#dataservice-instances-nested-classes)
|
||||
- [Custom Components (`pydase.components`)](#custom-components-pydasecomponents)
|
||||
- [`Image`](#image)
|
||||
- [`NumberSlider`](#numberslider)
|
||||
- [`ColouredEnum`](#colouredenum)
|
||||
- [Extending with New Components](#extending-with-new-components)
|
||||
- [Customizing Web Interface Style](#customizing-web-interface-style)
|
||||
- [Understanding Service Persistence](#understanding-service-persistence)
|
||||
- [Controlling Property State Loading with `@load_state`](#controlling-property-state-loading-with-load_state)
|
||||
- [Understanding Tasks in pydase](#understanding-tasks-in-pydase)
|
||||
- [Understanding Units in pydase](#understanding-units-in-pydase)
|
||||
- [Changing the Log Level](#changing-the-log-level)
|
||||
@@ -29,18 +34,21 @@
|
||||
## Features
|
||||
|
||||
<!-- no toc -->
|
||||
* [Simple data service definition through class-based interface](#defining-a-dataService)
|
||||
* [Integrated web interface for interactive access and control of your data service](#accessing-the-web-interface)
|
||||
* [Support for `rpyc` connections, allowing for programmatic control and interaction with your service](#connecting-to-the-service-using-rpyc)
|
||||
* [Component system bridging Python backend with frontend visual representation](#understanding-the-component-system)
|
||||
* [Saving and restoring the service state for service persistence](#understanding-service-persistence)
|
||||
* [Automated task management with built-in start/stop controls and optional autostart](#understanding-tasks-in-pydase)
|
||||
* [Support for units](#understanding-units-in-pydase)
|
||||
- [Simple data service definition through class-based interface](#defining-a-dataService)
|
||||
- [Integrated web interface for interactive access and control of your data service](#accessing-the-web-interface)
|
||||
- [Support for `rpyc` connections, allowing for programmatic control and interaction with your service](#connecting-to-the-service-using-rpyc)
|
||||
- [Component system bridging Python backend with frontend visual representation](#understanding-the-component-system)
|
||||
- [Customizable styling for the web interface through user-defined CSS](#customizing-web-interface-style)
|
||||
- [Saving and restoring the service state for service persistence](#understanding-service-persistence)
|
||||
- [Automated task management with built-in start/stop controls and optional autostart](#understanding-tasks-in-pydase)
|
||||
- [Support for units](#understanding-units-in-pydase)
|
||||
<!-- * Event-based callback functionality for real-time updates
|
||||
* Support for additional servers for specific use-cases -->
|
||||
- Support for additional servers for specific use-cases -->
|
||||
|
||||
## Installation
|
||||
|
||||
<!--installation-start-->
|
||||
|
||||
Install pydase using [`poetry`](https://python-poetry.org/):
|
||||
|
||||
```bash
|
||||
@@ -52,10 +60,13 @@ or `pip`:
|
||||
```bash
|
||||
pip install pydase
|
||||
```
|
||||
|
||||
<!--installation-end-->
|
||||
|
||||
## Usage
|
||||
|
||||
<!--usage-start-->
|
||||
|
||||
Using `pydase` involves three main steps: defining a `DataService` subclass, running the server, and then connecting to the service either programmatically using `rpyc` or through the web interface.
|
||||
|
||||
### Defining a DataService
|
||||
@@ -129,7 +140,7 @@ if __name__ == "__main__":
|
||||
Server(service).run()
|
||||
```
|
||||
|
||||
This will start the server, making your Device service accessible via RPC and a web server at http://localhost:8001.
|
||||
This will start the server, making your Device service accessible via RPC and a web server at [http://localhost:8001](http://localhost:8001).
|
||||
|
||||
### Accessing the Web Interface
|
||||
|
||||
@@ -156,14 +167,19 @@ print(client.voltage) # prints 5.0
|
||||
```
|
||||
|
||||
In this example, replace `<ip_addr>` with the IP address of the machine where the service is running. After establishing a connection, you can interact with the service attributes as if they were local attributes.
|
||||
|
||||
<!--usage-end-->
|
||||
|
||||
## Understanding the Component System
|
||||
|
||||
<!-- Component User Guide Start -->
|
||||
|
||||
In `pydase`, components are fundamental building blocks that bridge the Python backend logic with frontend visual representation and interactions. This system can be understood based on the following categories:
|
||||
|
||||
### Built-in Type and Enum Components
|
||||
|
||||
`pydase` automatically maps standard Python data types to their corresponding frontend components:
|
||||
|
||||
- `str`: Translated into a `StringComponent` on the frontend.
|
||||
- `int` and `float`: Manifested as the `NumberComponent`.
|
||||
- `bool`: Rendered as a `ButtonComponent`.
|
||||
@@ -173,6 +189,7 @@ In `pydase`, components are fundamental building blocks that bridge the Python b
|
||||
### Method Components
|
||||
|
||||
Methods within the `DataService` class have frontend representations:
|
||||
|
||||
- Regular Methods: These are rendered as a `MethodComponent` in the frontend, allowing users to execute the method via an "execute" button.
|
||||
- Asynchronous Methods: These are manifested as the `AsyncMethodComponent` with "start"/"stop" buttons to manage the execution of [tasks](#understanding-tasks-in-pydase).
|
||||
|
||||
@@ -218,77 +235,153 @@ if __name__ == "__main__":
|
||||
|
||||

|
||||
|
||||
**Note** that defining classes within `DataService` classes is not supported (see [this issue](https://github.com/tiqi-group/pydase/issues/16)).
|
||||
**Note** that defining classes within `DataService` classes is not supported (see [this issue](https://github.com/tiqi-group/pydase/issues/16)).
|
||||
|
||||
### Custom Components (`pydase.components`)
|
||||
The custom components in `pydase` have two main parts:
|
||||
|
||||
The custom components in `pydase` have two main parts:
|
||||
|
||||
- A **Python Component Class** in the backend, implementing the logic needed to set, update, and manage the component's state and data.
|
||||
- A **Frontend React Component** that renders and manages user interaction in the browser.
|
||||
|
||||
Below are the components available in the `pydase.components` module, accompanied by their Python usage:
|
||||
|
||||
- `Image`: This component allows users to display and update images within the application.
|
||||
#### `Image`
|
||||
|
||||
```python
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
This component provides a versatile interface for displaying images within the application. Users can update and manage images from various sources, including local paths, URLs, and even matplotlib figures.
|
||||
|
||||
import pydase
|
||||
from pydase.components.image import Image
|
||||
The component offers methods to load images seamlessly, ensuring that visual content is easily integrated and displayed within the data service.
|
||||
|
||||
```python
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
|
||||
import pydase
|
||||
from pydase.components.image import Image
|
||||
|
||||
|
||||
class MyDataService(pydase.DataService):
|
||||
my_image = Image()
|
||||
class MyDataService(pydase.DataService):
|
||||
my_image = Image()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
service = MyDataService()
|
||||
# loading from local path
|
||||
service.my_image.load_from_path("/your/image/path/")
|
||||
if __name__ == "__main__":
|
||||
service = MyDataService()
|
||||
# loading from local path
|
||||
service.my_image.load_from_path("/your/image/path/")
|
||||
|
||||
# loading from a URL
|
||||
service.my_image.load_from_url("https://cataas.com/cat")
|
||||
# loading from a URL
|
||||
service.my_image.load_from_url("https://cataas.com/cat")
|
||||
|
||||
# loading a matplotlib figure
|
||||
fig = plt.figure()
|
||||
x = np.linspace(0, 2 * np.pi)
|
||||
plt.plot(x, np.sin(x))
|
||||
plt.grid()
|
||||
service.my_image.load_from_matplotlib_figure(fig)
|
||||
# loading a matplotlib figure
|
||||
fig = plt.figure()
|
||||
x = np.linspace(0, 2 * np.pi)
|
||||
plt.plot(x, np.sin(x))
|
||||
plt.grid()
|
||||
service.my_image.load_from_matplotlib_figure(fig)
|
||||
|
||||
pydase.Server(service).run()
|
||||
```
|
||||
pydase.Server(service).run()
|
||||
```
|
||||
|
||||

|
||||

|
||||
|
||||
- `NumberSlider`: An interactive slider component to adjust numerical values, including floats and integers, on the frontend while synchronizing the data with the backend in real-time.
|
||||
#### `NumberSlider`
|
||||
|
||||
```python
|
||||
import pydase
|
||||
from pydase.components import NumberSlider
|
||||
This component provides an interactive slider interface for adjusting numerical values on the frontend. It supports both floats and integers. The values adjusted on the frontend are synchronized with the backend in real-time, ensuring consistent data representation.
|
||||
|
||||
The slider can be customized with initial values, minimum and maximum limits, and step sizes to fit various use cases.
|
||||
|
||||
```python
|
||||
import pydase
|
||||
from pydase.components import NumberSlider
|
||||
|
||||
|
||||
class MyService(pydase.DataService):
|
||||
slider = NumberSlider(value=3.5, min=0, max=10, step_size=0.1)
|
||||
class MyService(pydase.DataService):
|
||||
slider = NumberSlider(value=3.5, min=0, max=10, step_size=0.1, type="float")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
service = MyService()
|
||||
pydase.Server(service).run()
|
||||
```
|
||||
if __name__ == "__main__":
|
||||
service = MyService()
|
||||
pydase.Server(service).run()
|
||||
```
|
||||
|
||||

|
||||

|
||||
|
||||
#### `ColouredEnum`
|
||||
|
||||
This component provides a way to visually represent different states or categories in a data service using colour-coded options. It behaves similarly to a standard `Enum`, but the values encode colours in a format understood by CSS. The colours can be defined using various methods like Hexadecimal, RGB, HSL, and more.
|
||||
|
||||
If the property associated with the `ColouredEnum` has a setter function, the keys of the enum will be rendered as a dropdown menu, allowing users to interact and select different options. Without a setter function, the selected key will simply be displayed as a coloured box with text inside, serving as a visual indicator.
|
||||
|
||||
```python
|
||||
import pydase
|
||||
import pydase.components as pyc
|
||||
|
||||
|
||||
class MyStatus(pyc.ColouredEnum):
|
||||
PENDING = "#FFA500" # Hexadecimal colour (Orange)
|
||||
RUNNING = "#0000FF80" # Hexadecimal colour with transparency (Blue)
|
||||
PAUSED = "rgb(169, 169, 169)" # RGB colour (Dark Gray)
|
||||
RETRYING = "rgba(255, 255, 0, 0.3)" # RGB colour with transparency (Yellow)
|
||||
COMPLETED = "hsl(120, 100%, 50%)" # HSL colour (Green)
|
||||
FAILED = "hsla(0, 100%, 50%, 0.7)" # HSL colour with transparency (Red)
|
||||
CANCELLED = "SlateGray" # Cross-browser colour name (Slate Gray)
|
||||
|
||||
|
||||
class StatusTest(pydase.DataService):
|
||||
_status = MyStatus.RUNNING
|
||||
|
||||
@property
|
||||
def status(self) -> MyStatus:
|
||||
return self._status
|
||||
|
||||
@status.setter
|
||||
def status(self, value: MyStatus) -> None:
|
||||
# do something ...
|
||||
self._status = value
|
||||
|
||||
# Modifying or accessing the status value:
|
||||
my_service = StatusExample()
|
||||
my_service.status = MyStatus.FAILED
|
||||
```
|
||||
|
||||

|
||||
|
||||
#### Extending with New Components
|
||||
|
||||
Users can also extend the library by creating custom components. This involves defining the behavior on the Python backend and the visual representation on the frontend. For those looking to introduce new components, the [guide on adding components](https://pydase.readthedocs.io/en/latest/dev-guide/Adding_Components/) provides detailed steps on achieving this.
|
||||
|
||||
<!-- Component User Guide End -->
|
||||
|
||||
## Customizing Web Interface Style
|
||||
|
||||
`pydase` allows you to enhance the user experience by customizing the web interface's appearance. You can apply your own styles globally across the web interface by passing a custom CSS file to the server during initialization.
|
||||
|
||||
Here's how you can use this feature:
|
||||
|
||||
1. Prepare your custom CSS file with the desired styles.
|
||||
|
||||
2. When initializing your server, use the `css` parameter of the `Server` class to specify the path to your custom CSS file.
|
||||
|
||||
```python
|
||||
from pydase import Server, DataService
|
||||
|
||||
class Device(DataService):
|
||||
# ... your service definition ...
|
||||
|
||||
if __name__ == "__main__":
|
||||
service = MyService()
|
||||
server = Server(service, css="path/to/your/custom.css").run()
|
||||
```
|
||||
|
||||
This will apply the styles defined in `custom.css` to the web interface, allowing you to maintain branding consistency or improve visual accessibility.
|
||||
|
||||
Please ensure that the CSS file path is accessible from the server's running location. Relative or absolute paths can be used depending on your setup.
|
||||
|
||||
## Understanding Service Persistence
|
||||
|
||||
`pydase` allows you to easily persist the state of your service by saving it to a file. This is especially useful when you want to maintain the service's state across different runs.
|
||||
`pydase` allows you to easily persist the state of your service by saving it to a file. This is especially useful when you want to maintain the service's state across different runs.
|
||||
|
||||
To save the state of your service, pass a `filename` keyword argument to the `__init__` method of the `DataService` base class. If the file specified by `filename` does not exist, the service will create this file and store its state in it when the service is shut down. If the file already exists, the service will load the state from this file, setting the values of its attributes to the values stored in the file.
|
||||
To save the state of your service, pass a `filename` keyword argument to the constructor of the `pydase.Server` class. If the file specified by `filename` does not exist, the state manager will create this file and store its state in it when the service is shut down. If the file already exists, the state manager will load the state from this file, setting the values of its attributes to the values stored in the file.
|
||||
|
||||
Here's an example:
|
||||
|
||||
@@ -296,23 +389,42 @@ Here's an example:
|
||||
from pydase import DataService, Server
|
||||
|
||||
class Device(DataService):
|
||||
def __init__(self, filename: str) -> None:
|
||||
# ... your init code ...
|
||||
|
||||
# Pass the filename argument to the parent class
|
||||
super().__init__(filename=filename)
|
||||
|
||||
# ... defining the Device class ...
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
service = Device("device_state.json")
|
||||
Server(service).run()
|
||||
service = Device()
|
||||
Server(service, filename="device_state.json").run()
|
||||
```
|
||||
|
||||
In this example, the state of the `Device` service will be saved to `device_state.json` when the service is shut down. If `device_state.json` exists when the service is started, the service will restore its state from this file.
|
||||
In this example, the state of the `Device` service will be saved to `device_state.json` when the service is shut down. If `device_state.json` exists when the server is started, the state manager will restore the state of the service from this file.
|
||||
|
||||
Note: If the service class structure has changed since the last time its state was saved, only the attributes that have remained the same will be restored from the settings file.
|
||||
### Controlling Property State Loading with `@load_state`
|
||||
|
||||
By default, the state manager only restores values for public attributes of your service. If you have properties that you want to control the loading for, you can use the `@load_state` decorator on your property setters. This indicates to the state manager that the value of the property should be loaded from the state file.
|
||||
|
||||
Here is how you can apply the `@load_state` decorator:
|
||||
|
||||
```python
|
||||
from pydase import DataService
|
||||
from pydase.data_service.state_manager import load_state
|
||||
|
||||
class Device(DataService):
|
||||
_name = "Default Device Name"
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return self._name
|
||||
|
||||
@name.setter
|
||||
@load_state
|
||||
def name(self, value: str) -> None:
|
||||
self._name = value
|
||||
```
|
||||
|
||||
With the `@load_state` decorator applied to the `name` property setter, the state manager will load and apply the `name` property's value from the file storing the state upon server startup, assuming it exists.
|
||||
|
||||
Note: If the service class structure has changed since the last time its state was saved, only the attributes and properties decorated with `@load_state` that have remained the same will be restored from the settings file.
|
||||
|
||||
## Understanding Tasks in pydase
|
||||
|
||||
@@ -419,27 +531,46 @@ if __name__ == "__main__":
|
||||
|
||||
For more information about what you can do with the units, please consult the documentation of [`pint`](https://pint.readthedocs.io/en/stable/).
|
||||
|
||||
## Changing the Log Level
|
||||
## Logging in pydase
|
||||
|
||||
You can change the log level of loguru by either
|
||||
The `pydase` library organizes its loggers on a per-module basis, mirroring the Python package hierarchy. This structured approach allows for granular control over logging levels and behaviour across different parts of the library.
|
||||
|
||||
1. (RECOMMENDED) setting the `ENVIRONMENT` environment variable to "production" or "development"
|
||||
### Changing the Log Level
|
||||
|
||||
```bash
|
||||
ENVIRONMENT="production" python -m <module_using_pydase>
|
||||
```
|
||||
|
||||
The production environment will only log messages above "INFO", the development environment (default) logs everything above "DEBUG".
|
||||
You have two primary ways to adjust the log levels in `pydase`:
|
||||
|
||||
2. calling the `pydase.utils.logging.setup_logging` function with the desired log level
|
||||
1. directly targeting `pydase` loggers
|
||||
|
||||
```python
|
||||
# <your_script.py>
|
||||
You can set the log level for any `pydase` logger directly in your code. This method is useful for fine-tuning logging levels for specific modules within `pydase`. For instance, if you want to change the log level of the main `pydase` logger or target a submodule like `pydase.data_service`, you can do so as follows:
|
||||
|
||||
```python
|
||||
# <your_script.py>
|
||||
import logging
|
||||
|
||||
# Set the log level for the main pydase logger
|
||||
logging.getLogger("pydase").setLevel(logging.INFO)
|
||||
|
||||
# Optionally, target a specific submodule logger
|
||||
# logging.getLogger("pydase.data_service").setLevel(logging.DEBUG)
|
||||
|
||||
# Your logger for the current script
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.info("My info message.")
|
||||
```
|
||||
|
||||
This approach allows for specific control over different parts of the `pydase` library, depending on your logging needs.
|
||||
|
||||
from pydase.utils.logging import setup_logging
|
||||
2. using the `ENVIRONMENT` environment variable
|
||||
|
||||
setup_logging("INFO")
|
||||
```
|
||||
For a more global setting that affects the entire `pydase` library, you can utilize the `ENVIRONMENT` environment variable. Setting this variable to "production" will configure all `pydase` loggers to only log messages of level "INFO" and above, filtering out more verbose logging. This is particularly useful for production environments where excessive logging can be overwhelming or unnecessary.
|
||||
|
||||
```bash
|
||||
ENVIRONMENT="production" python -m <module_using_pydase>
|
||||
```
|
||||
|
||||
In the absence of this setting, the default behavior is to log everything of level "DEBUG" and above, suitable for development environments where more detailed logs are beneficial.
|
||||
|
||||
**Note**: It is recommended to avoid calling the `pydase.utils.logging.setup_logging` function directly, as this may result in duplicated logging messages.
|
||||
|
||||
## Documentation
|
||||
|
||||
|
||||
@@ -107,19 +107,22 @@ Write the React component code, following the structure and patterns used in exi
|
||||
For example, for the `Image` component, a template could look like this:
|
||||
|
||||
```tsx
|
||||
import { emit_update } from '../socket'; // use this when your component should update values in the backend
|
||||
import { setAttribute, runMethod } from '../socket'; // use this when your component should sets values of attributes
|
||||
// or runs a method, respectively
|
||||
import { DocStringComponent } from './DocStringComponent';
|
||||
import React, { useEffect, useRef, useState } from 'react';
|
||||
import { Card, Collapse, Image } from 'react-bootstrap';
|
||||
import { DocStringComponent } from './DocStringComponent';
|
||||
import { ChevronDown, ChevronRight } from 'react-bootstrap-icons';
|
||||
import { getIdFromFullAccessPath } from '../utils/stringUtils';
|
||||
import { LevelName } from './NotificationsComponent';
|
||||
|
||||
interface ImageComponentProps {
|
||||
name: string;
|
||||
parentPath: string;
|
||||
readOnly: boolean;
|
||||
docString: string;
|
||||
addNotification: (string) => void;
|
||||
addNotification: (message: string, levelname?: LevelName) => void;
|
||||
// Define your component specific props here
|
||||
value: string;
|
||||
format: string;
|
||||
@@ -130,6 +133,8 @@ export const ImageComponent = React.memo((props: ImageComponentProps) => {
|
||||
|
||||
const renderCount = useRef(0);
|
||||
const [open, setOpen] = useState(true); // add this if you want to expand/collapse your component
|
||||
const fullAccessPath = parentPath.concat('.' + name);
|
||||
const id = getIdFromFullAccessPath(fullAccessPath);
|
||||
|
||||
useEffect(() => {
|
||||
renderCount.current++;
|
||||
@@ -143,7 +148,7 @@ export const ImageComponent = React.memo((props: ImageComponentProps) => {
|
||||
// Your component logic here
|
||||
|
||||
return (
|
||||
<div className={'imageComponent'} id={parentPath.concat('.' + name)}>
|
||||
<div className={'imageComponent'} id={id}>
|
||||
{/* Add the Card and Collapse components here if you want to be able to expand and
|
||||
collapse your component. */}
|
||||
<Card>
|
||||
@@ -170,52 +175,56 @@ export const ImageComponent = React.memo((props: ImageComponentProps) => {
|
||||
|
||||
### Step 3: Emitting Updates to the Backend
|
||||
|
||||
Often, React components in the frontend will need to send updates to the backend, especially when user interactions result in a change of state or data. In `pydase`, we use `socketio` to seamlessly communicate these changes. Here's a detailed guide on how to emit update events from your frontend component:
|
||||
React components in the frontend often need to send updates to the backend, particularly when user interactions modify the component's state or data. In `pydase`, we use `socketio` for smooth communication of these changes. To handle updates, we primarily use two events: `setAttribute` for updating attributes, and `runMethod` for executing backend methods. Below is a detailed guide on how to emit these events from your frontend component:
|
||||
|
||||
1. **Setting Up Emission**: Ensure you've imported the required functions and methods for emission. The main function we'll use for this is `emit_update` from the `socket` module:
|
||||
1. **Setup for emitting events**:
|
||||
First, ensure you've imported the necessary functions from the `socket` module for both updating attributes and executing methods:
|
||||
|
||||
```tsx
|
||||
import { emit_update } from '../socket';
|
||||
import { setAttribute, runMethod } from '../socket';
|
||||
```
|
||||
|
||||
2. **Understanding the Emission Parameters**:
|
||||
|
||||
When emitting an update, we send three main pieces of data:
|
||||
2. **Event Parameters**:
|
||||
|
||||
- `parentPath`: This is the access path for the parent object of the attribute to be updated. This forms the basis to create the full access path for the attribute. For instance, for the attribute access path `attr1.list_attr[0].attr2`, `attr1.list_attr[0]` would be the `parentPath`.
|
||||
- When using **`setAttribute`**, we send three main pieces of data:
|
||||
- `name`: The name of the attribute within the `DataService` instance to update.
|
||||
- `parentPath`: The access path for the parent object of the attribute to be updated.
|
||||
- `value`: The new value for the attribute, which must match the backend attribute type.
|
||||
- For **`runMethod`**, the parameters are slightly different:
|
||||
- `name`: The name of the method to be executed in the backend.
|
||||
- `parentPath`: Similar to `setAttribute`, it's the access path to the object containing the method.
|
||||
- `kwargs`: A dictionary of keyword arguments that the method requires.
|
||||
|
||||
- `name`: This represents the name of the attribute to be updated within the `DataService` instance. If the attribute is part of a nested structure, this would be the name of the attribute in the last nested object. So, for `attr1.list_attr[0].attr2`, `attr2` would be the name.
|
||||
3. **Implementation**:
|
||||
|
||||
- `value`: This is the new value intended for the attribute. Ensure that the type of this value matches the type of the attribute in the backend.
|
||||
For illustation, take the `ButtonComponent`. When the button state changes, we want to send this update to the backend:
|
||||
|
||||
3. **Implementing the Emission**:
|
||||
```tsx
|
||||
import { setAttribute } from '../socket';
|
||||
// ... (other imports)
|
||||
|
||||
export const ButtonComponent = React.memo((props: ButtonComponentProps) => {
|
||||
// ...
|
||||
const { name, parentPath, value } = props;
|
||||
|
||||
To illustrate the emission process, let's consider the `ButtonComponent`. When the button state changes, we want to send this update to the backend:
|
||||
const setChecked = (checked: boolean) => {
|
||||
setAttribute(name, parentPath, checked);
|
||||
};
|
||||
|
||||
```tsx
|
||||
// ... (other imports)
|
||||
|
||||
export const ButtonComponent = React.memo((props: ButtonComponentProps) => {
|
||||
// ...
|
||||
const { name, parentPath, value } = props;
|
||||
return (
|
||||
<ToggleButton
|
||||
checked={value}
|
||||
value={parentPath}
|
||||
// ... other props
|
||||
onChange={(e) => setChecked(e.currentTarget.checked)}>
|
||||
<p>{name}</p>
|
||||
</ToggleButton>
|
||||
);
|
||||
});
|
||||
```
|
||||
|
||||
const setChecked = (checked: boolean) => {
|
||||
emit_update(name, parentPath, checked);
|
||||
};
|
||||
In this example, whenever the button's checked state changes (`onChange` event), we invoke the `setChecked` method, which in turn emits the new state to the backend using `setAttribute`.
|
||||
|
||||
return (
|
||||
<ToggleButton
|
||||
checked={value}
|
||||
value={parentPath}
|
||||
// ... other props
|
||||
onChange={(e) => setChecked(e.currentTarget.checked)}>
|
||||
<p>{name}</p>
|
||||
</ToggleButton>
|
||||
);
|
||||
});
|
||||
```
|
||||
|
||||
In this example, whenever the button's checked state changes (`onChange` event), we invoke the `setChecked` method, which in turn emits the new state to the backend using `emit_update`.
|
||||
|
||||
### Step 4: Add the New Component to the GenericComponent
|
||||
|
||||
@@ -291,9 +300,10 @@ useEffect(() => {
|
||||
```
|
||||
|
||||
However, you might want to use the `addNotification` at different places. For an example, see the [MethodComponent](../../frontend/src/components/MethodComponent.tsx).
|
||||
**Note**: you can specify the notification level by passing a string of type `LevelName` (one of 'CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG'). The default value is 'DEBUG'.
|
||||
|
||||
### Step 6: Write Tests for the Component (TODO)
|
||||
|
||||
Test the frontend component to ensure that it renders correctly and interacts seamlessly
|
||||
with the backend. Consider writing unit tests using a testing library like Jest or React
|
||||
Testing Library, and manually test the component in the browser.
|
||||
Testing Library, and manually test the component in the browser.
|
||||
|
||||
27
docs/dev-guide/Observer_Pattern_Implementation.md
Normal file
27
docs/dev-guide/Observer_Pattern_Implementation.md
Normal file
@@ -0,0 +1,27 @@
|
||||
# Observer Pattern Implementation in Pydase
|
||||
|
||||
## Overview
|
||||
|
||||
The Observer Pattern is a fundamental design pattern in the `pydase` package, serving as the central communication mechanism for state updates to clients connected to a service.
|
||||
|
||||
## How it Works
|
||||
|
||||
### The Observable Class
|
||||
|
||||
The `Observable` class is at the core of the pattern. It maintains a list of observers and is responsible for notifying them about state changes. It does so by overriding the following methods:
|
||||
|
||||
- `__setattr__`: This function emits a notification before and after a new value is set. These two notifications are important to track which attributes are being set to avoid endless recursion (e.g. when accessing a property within another property). Moreover, when setting an attribute to another observable, the former class will add itself as an observer to the latter class, ensuring that nested classes are properly observed.
|
||||
- `__getattribute__`: This function notifies the observers when a property getter is called, allowing for monitoring state changes in remote devices, as opposed to local instance attributes.
|
||||
|
||||
### Custom Collection Classes
|
||||
|
||||
To handle collections (like lists and dictionaries), the `Observable` class converts them into custom collection classes `_ObservableList` and `_ObservableDict` that notify observers of any changes in their state. For this, they have to override the methods changing the state, e.g., `__setitem__` or `append` for lists.
|
||||
|
||||
### The Observer Class
|
||||
|
||||
The `Observer` is the final element in the chain of observers. The notifications of attribute changes it receives include the full access path (in dot-notation) and the new value. It implements logic to handle state changes, like caching, error logging for type changes, etc. This can be extended by custom notification callbacks (implemented using `add_notification_callback` in `DataServiceObserver`). This enables the user to perform specific actions in response to changes. In `pydase`, the web server adds an additional notification callback that emits the websocket events (`sio_callback`).
|
||||
|
||||
Furthermore, the `DataServiceObserver` implements logic to reload the values of properties when an attribute change occurs that a property depends on.
|
||||
|
||||
- **Dynamic Inspection**: The observer dynamically inspects the observable object (recursively) to create a mapping of properties and their dependencies. This mapping is constructed based on the class or instance attributes used within the source code of the property getters.
|
||||
- **Dependency Management**: When a change in an attribute occurs, `DataServiceObserver` updates any properties that depend on this attribute. This ensures that the overall state remains consistent and up-to-date, especially in complex scenarios where properties depend on other instance attribute or properties.
|
||||
BIN
docs/images/ColouredEnum_component.png
Normal file
BIN
docs/images/ColouredEnum_component.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 18 KiB |
6
docs/user-guide/Components.md
Normal file
6
docs/user-guide/Components.md
Normal file
@@ -0,0 +1,6 @@
|
||||
# Components Guide
|
||||
{%
|
||||
include-markdown "../../README.md"
|
||||
start="<!-- Component User Guide Start -->"
|
||||
end="<!-- Component User Guide End -->"
|
||||
%}
|
||||
@@ -7,12 +7,10 @@
|
||||
],
|
||||
"extends": [
|
||||
"eslint:recommended",
|
||||
"plugin:@typescript-eslint/eslint-recommended",
|
||||
"plugin:@typescript-eslint/recommended",
|
||||
"prettier"
|
||||
],
|
||||
"rules": {
|
||||
"no-console": 1, // Means warning
|
||||
"prettier/prettier": 2 // Means error }
|
||||
"prettier/prettier": "error"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
7006
frontend/package-lock.json
generated
7006
frontend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -5,6 +5,7 @@
|
||||
"dependencies": {
|
||||
"@emotion/react": "^11.11.1",
|
||||
"@emotion/styled": "^11.11.0",
|
||||
"@fsouza/prettierd": "^0.25.1",
|
||||
"@mui/material": "^5.14.1",
|
||||
"@testing-library/jest-dom": "^5.16.5",
|
||||
"@testing-library/react": "^13.4.0",
|
||||
@@ -46,9 +47,12 @@
|
||||
"@types/node": "^20.0.0",
|
||||
"@types/react": "^18.0.0",
|
||||
"@types/react-dom": "^18.0.0",
|
||||
"eslint-config-prettier": "^8.8.0",
|
||||
"eslint-plugin-prettier": "^5.0.0",
|
||||
"prettier": "^3.0.0",
|
||||
"@babel/plugin-proposal-private-property-in-object": "7.21.11"
|
||||
"@typescript-eslint/eslint-plugin": "^6.11.0",
|
||||
"@typescript-eslint/parser": "^6.9.0",
|
||||
"eslint": "^8.52.0",
|
||||
"eslint-config-prettier": "^9.0.0",
|
||||
"eslint-plugin-prettier": "^5.0.1",
|
||||
"prettier": "^3.0.3",
|
||||
"typescript": "^4.9.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
body {
|
||||
min-width: 576px;
|
||||
max-width: 1200px;
|
||||
max-width: 2000px;
|
||||
}
|
||||
input.instantUpdate {
|
||||
background-color: rgba(255, 0, 0, 0.1);
|
||||
@@ -12,14 +12,31 @@ input.instantUpdate {
|
||||
}
|
||||
.navbarOffset {
|
||||
padding-top: 60px !important;
|
||||
right: 20;
|
||||
}
|
||||
/* .toastContainer {
|
||||
position: fixed;
|
||||
} */
|
||||
.notificationToast {
|
||||
.toastContainer {
|
||||
position: fixed !important;
|
||||
padding: 5px;
|
||||
}
|
||||
.debugToast, .infoToast {
|
||||
background-color: rgba(114, 214, 253, 0.5) !important;
|
||||
}
|
||||
.exceptionToast {
|
||||
.warningToast {
|
||||
background-color: rgba(255, 181, 44, 0.603) !important;
|
||||
}
|
||||
.errorToast, .criticalToast {
|
||||
background-color: rgba(216, 41, 18, 0.678) !important;
|
||||
}
|
||||
}
|
||||
.buttonComponent {
|
||||
float: left !important;
|
||||
margin-right: 10px !important;
|
||||
}
|
||||
.stringComponent {
|
||||
float: left !important;
|
||||
margin-right: 10px !important;
|
||||
}
|
||||
.numberComponent {
|
||||
float: left !important;
|
||||
margin-right: 10px !important;
|
||||
width: 270px !important;
|
||||
}
|
||||
|
||||
|
||||
@@ -6,117 +6,48 @@ import {
|
||||
DataServiceJSON
|
||||
} from './components/DataServiceComponent';
|
||||
import './App.css';
|
||||
import { Notifications } from './components/NotificationsComponent';
|
||||
import {
|
||||
Notifications,
|
||||
Notification,
|
||||
LevelName
|
||||
} from './components/NotificationsComponent';
|
||||
import { ConnectionToast } from './components/ConnectionToast';
|
||||
import { SerializedValue, setNestedValueByPath, State } from './utils/stateUtils';
|
||||
|
||||
type ValueType = boolean | string | number | object;
|
||||
|
||||
type State = DataServiceJSON | null;
|
||||
type Action =
|
||||
| { type: 'SET_DATA'; data: DataServiceJSON }
|
||||
| { type: 'UPDATE_ATTRIBUTE'; parentPath: string; name: string; value: ValueType };
|
||||
type UpdateMessage = {
|
||||
data: { parent_path: string; name: string; value: object };
|
||||
};
|
||||
type ExceptionMessage = {
|
||||
data: { exception: string; type: string };
|
||||
};
|
||||
|
||||
/**
|
||||
* A function to update a specific property in a deeply nested object.
|
||||
* The property to be updated is specified by a path array.
|
||||
*
|
||||
* Each path element can be a regular object key or an array index of the
|
||||
* form "attribute[index]", where "attribute" is the key of the array in
|
||||
* the object and "index" is the index of the element in the array.
|
||||
*
|
||||
* For array indices, the element at the specified index in the array is
|
||||
* updated.
|
||||
*
|
||||
* If the property to be updated is an object or an array, it is updated
|
||||
* recursively.
|
||||
*
|
||||
* @param {Array<string>} path - An array where each element is a key in the object,
|
||||
* forming a path to the property to be updated.
|
||||
* @param {object} obj - The object to be updated.
|
||||
* @param {object} value - The new value for the property specified by the path.
|
||||
* @return {object} - A new object with the specified property updated.
|
||||
*/
|
||||
function updateNestedObject(path: Array<string>, obj: object, value: ValueType) {
|
||||
// Base case: If the path is empty, return the new value.
|
||||
// This means we've reached the nested property to be updated.
|
||||
if (path.length === 0) {
|
||||
return value;
|
||||
}
|
||||
|
||||
// Recursive case: If the path is not empty, split it into the first key and the rest
|
||||
// of the path.
|
||||
const [first, ...rest] = path;
|
||||
|
||||
// Check if 'first' is an array index.
|
||||
const indexMatch = first.match(/^(\w+)\[(\d+)\]$/);
|
||||
|
||||
// If 'first' is an array index of the form "attribute[index]", then update the
|
||||
// element at the specified index in the array. Otherwise, update the property
|
||||
// specified by 'first' in the object.
|
||||
if (indexMatch) {
|
||||
const attribute = indexMatch[1];
|
||||
const index = parseInt(indexMatch[2]);
|
||||
|
||||
if (Array.isArray(obj[attribute]?.value)) {
|
||||
return {
|
||||
...obj,
|
||||
[attribute]: {
|
||||
...obj[attribute],
|
||||
value: obj[attribute].value.map((item, i) =>
|
||||
i === index
|
||||
? {
|
||||
...item,
|
||||
value: updateNestedObject(rest, item.value || {}, value)
|
||||
}
|
||||
: item
|
||||
)
|
||||
}
|
||||
};
|
||||
} else {
|
||||
throw new Error(
|
||||
`Expected ${attribute}.value to be an array, but received ${typeof obj[
|
||||
attribute
|
||||
]?.value}`
|
||||
);
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
...obj,
|
||||
[first]: {
|
||||
...obj[first],
|
||||
value: updateNestedObject(rest, obj[first]?.value || {}, value)
|
||||
}
|
||||
| { type: 'SET_DATA'; data: State }
|
||||
| {
|
||||
type: 'UPDATE_ATTRIBUTE';
|
||||
fullAccessPath: string;
|
||||
newValue: SerializedValue;
|
||||
};
|
||||
}
|
||||
}
|
||||
type UpdateMessage = {
|
||||
data: { full_access_path: string; value: SerializedValue };
|
||||
};
|
||||
type LogMessage = {
|
||||
levelname: LevelName;
|
||||
message: string;
|
||||
};
|
||||
|
||||
const reducer = (state: State, action: Action): State => {
|
||||
switch (action.type) {
|
||||
case 'SET_DATA':
|
||||
return action.data;
|
||||
case 'UPDATE_ATTRIBUTE': {
|
||||
const path = action.parentPath.split('.').slice(1).concat(action.name);
|
||||
|
||||
return updateNestedObject(path, state, action.value);
|
||||
return setNestedValueByPath(state, action.fullAccessPath, action.newValue);
|
||||
}
|
||||
default:
|
||||
throw new Error();
|
||||
}
|
||||
};
|
||||
|
||||
const App = () => {
|
||||
const [state, dispatch] = useReducer(reducer, null);
|
||||
const stateRef = useRef(state); // Declare a reference to hold the current state
|
||||
const [isInstantUpdate, setIsInstantUpdate] = useState(false);
|
||||
const [showSettings, setShowSettings] = useState(false);
|
||||
const [showNotification, setShowNotification] = useState(true);
|
||||
const [notifications, setNotifications] = useState([]);
|
||||
const [exceptions, setExceptions] = useState([]);
|
||||
const [showNotification, setShowNotification] = useState(false);
|
||||
const [notifications, setNotifications] = useState<Notification[]>([]);
|
||||
const [connectionStatus, setConnectionStatus] = useState('connecting');
|
||||
|
||||
// Keep the state reference up to date
|
||||
useEffect(() => {
|
||||
@@ -124,81 +55,93 @@ const App = () => {
|
||||
}, [state]);
|
||||
|
||||
useEffect(() => {
|
||||
// Fetch data from the API when the component mounts
|
||||
fetch(`http://${hostname}:${port}/service-properties`)
|
||||
.then((response) => response.json())
|
||||
.then((data: DataServiceJSON) => dispatch({ type: 'SET_DATA', data }));
|
||||
// Allow the user to add a custom css file
|
||||
fetch(`http://${hostname}:${port}/custom.css`)
|
||||
.then((response) => {
|
||||
if (response.ok) {
|
||||
// If the file exists, create a link element for the custom CSS
|
||||
const link = document.createElement('link');
|
||||
link.href = `http://${hostname}:${port}/custom.css`;
|
||||
link.type = 'text/css';
|
||||
link.rel = 'stylesheet';
|
||||
document.head.appendChild(link);
|
||||
}
|
||||
})
|
||||
.catch(console.error); // Handle the error appropriately
|
||||
|
||||
socket.on('connect', () => {
|
||||
// Fetch data from the API when the client connects
|
||||
fetch(`http://${hostname}:${port}/service-properties`)
|
||||
.then((response) => response.json())
|
||||
.then((data: State) => dispatch({ type: 'SET_DATA', data }));
|
||||
setConnectionStatus('connected');
|
||||
});
|
||||
socket.on('disconnect', () => {
|
||||
setConnectionStatus('disconnected');
|
||||
setTimeout(() => {
|
||||
// Only set "reconnecting" is the state is still "disconnected"
|
||||
// E.g. when the client has already reconnected
|
||||
setConnectionStatus((currentState) =>
|
||||
currentState === 'disconnected' ? 'reconnecting' : currentState
|
||||
);
|
||||
}, 2000);
|
||||
});
|
||||
|
||||
socket.on('notify', onNotify);
|
||||
socket.on('exception', onException);
|
||||
socket.on('log', onLogMessage);
|
||||
|
||||
return () => {
|
||||
socket.off('notify', onNotify);
|
||||
socket.off('exception', onException);
|
||||
socket.off('log', onLogMessage);
|
||||
};
|
||||
}, []);
|
||||
|
||||
// Adding useCallback to prevent notify to change causing a re-render of all
|
||||
// components
|
||||
const addNotification = useCallback((text: string) => {
|
||||
// Getting the current time in the required format
|
||||
const timeString = new Date().toISOString().substring(11, 19);
|
||||
// Adding an id to the notification to provide a way of removing it
|
||||
const id = Math.random();
|
||||
const addNotification = useCallback(
|
||||
(message: string, levelname: LevelName = 'DEBUG') => {
|
||||
// Getting the current time in the required format
|
||||
const timeStamp = new Date().toISOString().substring(11, 19);
|
||||
// Adding an id to the notification to provide a way of removing it
|
||||
const id = Math.random();
|
||||
|
||||
// Custom logic for notifications
|
||||
setNotifications((prevNotifications) => [
|
||||
{ id, text, time: timeString },
|
||||
...prevNotifications
|
||||
]);
|
||||
}, []);
|
||||
// Custom logic for notifications
|
||||
setNotifications((prevNotifications) => [
|
||||
{ levelname, id, message, timeStamp },
|
||||
...prevNotifications
|
||||
]);
|
||||
},
|
||||
[]
|
||||
);
|
||||
|
||||
const notifyException = (text: string) => {
|
||||
// Getting the current time in the required format
|
||||
const timeString = new Date().toISOString().substring(11, 19);
|
||||
// Adding an id to the notification to provide a way of removing it
|
||||
const id = Math.random();
|
||||
|
||||
// Custom logic for notifications
|
||||
setExceptions((prevNotifications) => [
|
||||
{ id, text, time: timeString },
|
||||
...prevNotifications
|
||||
]);
|
||||
};
|
||||
const removeNotificationById = (id: number) => {
|
||||
setNotifications((prevNotifications) =>
|
||||
prevNotifications.filter((n) => n.id !== id)
|
||||
);
|
||||
};
|
||||
|
||||
const removeExceptionById = (id: number) => {
|
||||
setExceptions((prevNotifications) => prevNotifications.filter((n) => n.id !== id));
|
||||
};
|
||||
|
||||
const handleCloseSettings = () => setShowSettings(false);
|
||||
const handleShowSettings = () => setShowSettings(true);
|
||||
|
||||
function onNotify(value: UpdateMessage) {
|
||||
// Extracting data from the notification
|
||||
const { parent_path: parentPath, name, value: newValue } = value.data;
|
||||
const { full_access_path: fullAccessPath, value: newValue } = value.data;
|
||||
|
||||
// Dispatching the update to the reducer
|
||||
dispatch({
|
||||
type: 'UPDATE_ATTRIBUTE',
|
||||
parentPath,
|
||||
name,
|
||||
value: newValue
|
||||
fullAccessPath,
|
||||
newValue
|
||||
});
|
||||
}
|
||||
|
||||
function onException(value: ExceptionMessage) {
|
||||
const newException = `${value.data.type}: ${value.data.exception}.`;
|
||||
notifyException(newException);
|
||||
function onLogMessage(value: LogMessage) {
|
||||
addNotification(value.message, value.levelname);
|
||||
}
|
||||
|
||||
// While the data is loading
|
||||
if (!state) {
|
||||
return <p>Loading...</p>;
|
||||
return <ConnectionToast connectionStatus={connectionStatus} />;
|
||||
}
|
||||
return (
|
||||
<>
|
||||
@@ -212,9 +155,7 @@ const App = () => {
|
||||
<Notifications
|
||||
showNotification={showNotification}
|
||||
notifications={notifications}
|
||||
exceptions={exceptions}
|
||||
removeNotificationById={removeNotificationById}
|
||||
removeExceptionById={removeExceptionById}
|
||||
/>
|
||||
|
||||
<Offcanvas
|
||||
@@ -244,11 +185,13 @@ const App = () => {
|
||||
|
||||
<div className="App navbarOffset">
|
||||
<DataServiceComponent
|
||||
name={''}
|
||||
props={state as DataServiceJSON}
|
||||
isInstantUpdate={isInstantUpdate}
|
||||
addNotification={addNotification}
|
||||
/>
|
||||
</div>
|
||||
<ConnectionToast connectionStatus={connectionStatus} />
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import React, { useEffect, useRef } from 'react';
|
||||
import { emit_update } from '../socket';
|
||||
import { runMethod } from '../socket';
|
||||
import { InputGroup, Form, Button } from 'react-bootstrap';
|
||||
import { DocStringComponent } from './DocStringComponent';
|
||||
import { getIdFromFullAccessPath } from '../utils/stringUtils';
|
||||
import { LevelName } from './NotificationsComponent';
|
||||
|
||||
interface AsyncMethodProps {
|
||||
name: string;
|
||||
@@ -10,13 +12,14 @@ interface AsyncMethodProps {
|
||||
value: Record<string, string>;
|
||||
docString?: string;
|
||||
hideOutput?: boolean;
|
||||
addNotification: (string) => void;
|
||||
addNotification: (message: string, levelname?: LevelName) => void;
|
||||
}
|
||||
|
||||
export const AsyncMethodComponent = React.memo((props: AsyncMethodProps) => {
|
||||
const { name, parentPath, docString, value: runningTask, addNotification } = props;
|
||||
const renderCount = useRef(0);
|
||||
const formRef = useRef(null);
|
||||
const id = getIdFromFullAccessPath(parentPath.concat('.' + name));
|
||||
|
||||
useEffect(() => {
|
||||
renderCount.current++;
|
||||
@@ -54,18 +57,18 @@ export const AsyncMethodComponent = React.memo((props: AsyncMethodProps) => {
|
||||
const execute = async (event: React.FormEvent) => {
|
||||
event.preventDefault();
|
||||
let method_name: string;
|
||||
const args = {};
|
||||
const kwargs: Record<string, unknown> = {};
|
||||
|
||||
if (runningTask !== undefined && runningTask !== null) {
|
||||
method_name = `stop_${name}`;
|
||||
} else {
|
||||
Object.keys(props.parameters).forEach(
|
||||
(name) => (args[name] = event.target[name].value)
|
||||
(name) => (kwargs[name] = event.target[name].value)
|
||||
);
|
||||
method_name = `start_${name}`;
|
||||
}
|
||||
|
||||
emit_update(method_name, parentPath, { args: args });
|
||||
runMethod(method_name, parentPath, kwargs);
|
||||
};
|
||||
|
||||
const args = Object.entries(props.parameters).map(([name, type], index) => {
|
||||
@@ -87,11 +90,9 @@ export const AsyncMethodComponent = React.memo((props: AsyncMethodProps) => {
|
||||
});
|
||||
|
||||
return (
|
||||
<div
|
||||
className="align-items-center asyncMethodComponent"
|
||||
id={parentPath.concat('.' + name)}>
|
||||
<div className="align-items-center asyncMethodComponent" id={id}>
|
||||
{process.env.NODE_ENV === 'development' && (
|
||||
<p>Render count: {renderCount.current}</p>
|
||||
<div>Render count: {renderCount.current}</div>
|
||||
)}
|
||||
<h5>
|
||||
Function: {name}
|
||||
@@ -99,11 +100,7 @@ export const AsyncMethodComponent = React.memo((props: AsyncMethodProps) => {
|
||||
</h5>
|
||||
<Form onSubmit={execute} ref={formRef}>
|
||||
{args}
|
||||
<Button
|
||||
id={`button-${parentPath}.${name}`}
|
||||
name={name}
|
||||
value={parentPath}
|
||||
type="submit">
|
||||
<Button id={`button-${id}`} name={name} value={parentPath} type="submit">
|
||||
{runningTask ? 'Stop' : 'Start'}
|
||||
</Button>
|
||||
</Form>
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import React, { useEffect, useRef } from 'react';
|
||||
import { ToggleButton } from 'react-bootstrap';
|
||||
import { emit_update } from '../socket';
|
||||
import { setAttribute } from '../socket';
|
||||
import { DocStringComponent } from './DocStringComponent';
|
||||
import { getIdFromFullAccessPath } from '../utils/stringUtils';
|
||||
import { LevelName } from './NotificationsComponent';
|
||||
|
||||
interface ButtonComponentProps {
|
||||
name: string;
|
||||
@@ -10,13 +12,14 @@ interface ButtonComponentProps {
|
||||
readOnly: boolean;
|
||||
docString: string;
|
||||
mapping?: [string, string]; // Enforce a tuple of two strings
|
||||
addNotification: (string) => void;
|
||||
addNotification: (message: string, levelname?: LevelName) => void;
|
||||
}
|
||||
|
||||
export const ButtonComponent = React.memo((props: ButtonComponentProps) => {
|
||||
const { name, parentPath, value, readOnly, docString, mapping, addNotification } =
|
||||
props;
|
||||
const buttonName = mapping ? (value ? mapping[0] : mapping[1]) : name;
|
||||
const id = getIdFromFullAccessPath(parentPath.concat('.' + name));
|
||||
|
||||
const renderCount = useRef(0);
|
||||
|
||||
@@ -29,25 +32,25 @@ export const ButtonComponent = React.memo((props: ButtonComponentProps) => {
|
||||
}, [props.value]);
|
||||
|
||||
const setChecked = (checked: boolean) => {
|
||||
emit_update(name, parentPath, checked);
|
||||
setAttribute(name, parentPath, checked);
|
||||
};
|
||||
|
||||
return (
|
||||
<div className={'buttonComponent'} id={parentPath.concat('.' + name)}>
|
||||
<div className={'buttonComponent'} id={id}>
|
||||
{process.env.NODE_ENV === 'development' && (
|
||||
<p>Render count: {renderCount.current}</p>
|
||||
<div>Render count: {renderCount.current}</div>
|
||||
)}
|
||||
|
||||
<DocStringComponent docString={docString} />
|
||||
<ToggleButton
|
||||
id={`toggle-check-${parentPath}.${name}`}
|
||||
id={`toggle-check-${id}`}
|
||||
type="checkbox"
|
||||
variant={value ? 'success' : 'secondary'}
|
||||
checked={value}
|
||||
value={parentPath}
|
||||
disabled={readOnly}
|
||||
onChange={(e) => setChecked(e.currentTarget.checked)}>
|
||||
<p>{buttonName}</p>
|
||||
{buttonName}
|
||||
</ToggleButton>
|
||||
</div>
|
||||
);
|
||||
|
||||
77
frontend/src/components/ColouredEnumComponent.tsx
Normal file
77
frontend/src/components/ColouredEnumComponent.tsx
Normal file
@@ -0,0 +1,77 @@
|
||||
import React, { useEffect, useRef } from 'react';
|
||||
import { InputGroup, Form, Row, Col } from 'react-bootstrap';
|
||||
import { setAttribute } from '../socket';
|
||||
import { DocStringComponent } from './DocStringComponent';
|
||||
import { getIdFromFullAccessPath } from '../utils/stringUtils';
|
||||
import { LevelName } from './NotificationsComponent';
|
||||
|
||||
interface ColouredEnumComponentProps {
|
||||
name: string;
|
||||
parentPath: string;
|
||||
value: string;
|
||||
docString?: string;
|
||||
readOnly: boolean;
|
||||
enumDict: Record<string, string>;
|
||||
addNotification: (message: string, levelname?: LevelName) => void;
|
||||
}
|
||||
|
||||
export const ColouredEnumComponent = React.memo((props: ColouredEnumComponentProps) => {
|
||||
const {
|
||||
name,
|
||||
parentPath: parentPath,
|
||||
value,
|
||||
docString,
|
||||
enumDict,
|
||||
readOnly,
|
||||
addNotification
|
||||
} = props;
|
||||
const renderCount = useRef(0);
|
||||
const id = getIdFromFullAccessPath(parentPath.concat('.' + name));
|
||||
|
||||
useEffect(() => {
|
||||
renderCount.current++;
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
addNotification(`${parentPath}.${name} changed to ${value}.`);
|
||||
}, [props.value]);
|
||||
|
||||
const handleValueChange = (newValue: string) => {
|
||||
setAttribute(name, parentPath, newValue);
|
||||
};
|
||||
|
||||
return (
|
||||
<div className={'enumComponent'} id={id}>
|
||||
{process.env.NODE_ENV === 'development' && (
|
||||
<div>Render count: {renderCount.current}</div>
|
||||
)}
|
||||
<DocStringComponent docString={docString} />
|
||||
<Row>
|
||||
<Col className="d-flex align-items-center">
|
||||
<InputGroup.Text>{name}</InputGroup.Text>
|
||||
{readOnly ? (
|
||||
// Display the Form.Control when readOnly is true
|
||||
<Form.Control
|
||||
value={value}
|
||||
disabled={true}
|
||||
style={{ backgroundColor: enumDict[value] }}
|
||||
/>
|
||||
) : (
|
||||
// Display the Form.Select when readOnly is false
|
||||
<Form.Select
|
||||
aria-label="coloured-enum-select"
|
||||
value={value}
|
||||
style={{ backgroundColor: enumDict[value] }}
|
||||
onChange={(event) => handleValueChange(event.target.value)}>
|
||||
{Object.entries(enumDict).map(([key]) => (
|
||||
<option key={key} value={key}>
|
||||
{key}
|
||||
</option>
|
||||
))}
|
||||
</Form.Select>
|
||||
)}
|
||||
</Col>
|
||||
</Row>
|
||||
</div>
|
||||
);
|
||||
});
|
||||
86
frontend/src/components/ConnectionToast.tsx
Normal file
86
frontend/src/components/ConnectionToast.tsx
Normal file
@@ -0,0 +1,86 @@
|
||||
import React, { useEffect, useState } from 'react';
|
||||
import { Toast, Button, ToastContainer } from 'react-bootstrap';
|
||||
|
||||
type ConnectionToastProps = {
|
||||
connectionStatus: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* ConnectionToast Component
|
||||
*
|
||||
* Displays a toast notification that reflects the current connection status.
|
||||
*
|
||||
* Props:
|
||||
* - connectionStatus (string): The current status of the connection which can be
|
||||
* 'connecting', 'connected', 'disconnected', or 'reconnecting'. The component uses this
|
||||
* status to determine the message, background color (`bg`), and auto-hide delay of the toast.
|
||||
*
|
||||
* The toast is designed to automatically appear based on changes to the `connectionStatus` prop
|
||||
* and provides a close button to manually dismiss the toast. It uses `react-bootstrap`'s Toast
|
||||
* component to show the connection status in a stylized format, and Bootstrap's utility classes
|
||||
* for alignment and spacing.
|
||||
*/
|
||||
export const ConnectionToast = React.memo(
|
||||
({ connectionStatus }: ConnectionToastProps) => {
|
||||
const [show, setShow] = useState(true);
|
||||
|
||||
useEffect(() => {
|
||||
setShow(true);
|
||||
}, [connectionStatus]);
|
||||
|
||||
const handleClose = () => setShow(false);
|
||||
|
||||
const getToastContent = (): {
|
||||
message: string;
|
||||
bg: string; // bootstrap uses `bg` prop for background color
|
||||
delay: number | undefined;
|
||||
} => {
|
||||
switch (connectionStatus) {
|
||||
case 'connecting':
|
||||
return {
|
||||
message: 'Connecting...',
|
||||
bg: 'info',
|
||||
delay: undefined
|
||||
};
|
||||
case 'connected':
|
||||
return { message: 'Connected', bg: 'success', delay: 1000 };
|
||||
case 'disconnected':
|
||||
return {
|
||||
message: 'Disconnected',
|
||||
bg: 'danger',
|
||||
delay: undefined
|
||||
};
|
||||
case 'reconnecting':
|
||||
return {
|
||||
message: 'Reconnecting...',
|
||||
bg: 'info',
|
||||
delay: undefined
|
||||
};
|
||||
default:
|
||||
return {
|
||||
message: '',
|
||||
bg: 'info',
|
||||
delay: undefined
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
const { message, bg, delay } = getToastContent();
|
||||
|
||||
return (
|
||||
<ToastContainer position="bottom-center" className="toastContainer">
|
||||
<Toast
|
||||
show={show}
|
||||
onClose={handleClose}
|
||||
delay={delay}
|
||||
autohide={delay !== undefined}
|
||||
bg={bg}>
|
||||
<Toast.Body className="d-flex justify-content-between">
|
||||
{message}
|
||||
<Button variant="close" size="sm" onClick={handleClose} />
|
||||
</Toast.Body>
|
||||
</Toast>
|
||||
</ToastContainer>
|
||||
);
|
||||
}
|
||||
);
|
||||
@@ -3,33 +3,42 @@ import React from 'react';
|
||||
import { Card, Collapse } from 'react-bootstrap';
|
||||
import { ChevronDown, ChevronRight } from 'react-bootstrap-icons';
|
||||
import { Attribute, GenericComponent } from './GenericComponent';
|
||||
import { getIdFromFullAccessPath } from '../utils/stringUtils';
|
||||
import { LevelName } from './NotificationsComponent';
|
||||
|
||||
type DataServiceProps = {
|
||||
name: string;
|
||||
props: DataServiceJSON;
|
||||
parentPath?: string;
|
||||
isInstantUpdate: boolean;
|
||||
addNotification: (string) => void;
|
||||
addNotification: (message: string, levelname?: LevelName) => void;
|
||||
};
|
||||
|
||||
export type DataServiceJSON = Record<string, Attribute>;
|
||||
|
||||
export const DataServiceComponent = React.memo(
|
||||
({
|
||||
name,
|
||||
props,
|
||||
parentPath = 'DataService',
|
||||
isInstantUpdate,
|
||||
addNotification
|
||||
}: DataServiceProps) => {
|
||||
const [open, setOpen] = useState(true);
|
||||
let fullAccessPath = parentPath;
|
||||
if (name) {
|
||||
fullAccessPath = parentPath.concat('.' + name);
|
||||
}
|
||||
const id = getIdFromFullAccessPath(fullAccessPath);
|
||||
|
||||
return (
|
||||
<div className="dataServiceComponent">
|
||||
<div className="dataServiceComponent" id={id}>
|
||||
<Card className="mb-3">
|
||||
<Card.Header
|
||||
onClick={() => setOpen(!open)}
|
||||
style={{ cursor: 'pointer' }} // Change cursor style on hover
|
||||
>
|
||||
{parentPath} {open ? <ChevronDown /> : <ChevronRight />}
|
||||
{fullAccessPath} {open ? <ChevronDown /> : <ChevronRight />}
|
||||
</Card.Header>
|
||||
<Collapse in={open}>
|
||||
<Card.Body>
|
||||
@@ -39,7 +48,7 @@ export const DataServiceComponent = React.memo(
|
||||
key={key}
|
||||
attribute={value}
|
||||
name={key}
|
||||
parentPath={parentPath}
|
||||
parentPath={fullAccessPath}
|
||||
isInstantUpdate={isInstantUpdate}
|
||||
addNotification={addNotification}
|
||||
/>
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import React, { useEffect, useRef } from 'react';
|
||||
import { InputGroup, Form, Row, Col } from 'react-bootstrap';
|
||||
import { emit_update } from '../socket';
|
||||
import { setAttribute } from '../socket';
|
||||
import { DocStringComponent } from './DocStringComponent';
|
||||
import { LevelName } from './NotificationsComponent';
|
||||
|
||||
interface EnumComponentProps {
|
||||
name: string;
|
||||
@@ -9,7 +10,7 @@ interface EnumComponentProps {
|
||||
value: string;
|
||||
docString?: string;
|
||||
enumDict: Record<string, string>;
|
||||
addNotification: (string) => void;
|
||||
addNotification: (message: string, levelname?: LevelName) => void;
|
||||
}
|
||||
|
||||
export const EnumComponent = React.memo((props: EnumComponentProps) => {
|
||||
@@ -33,13 +34,13 @@ export const EnumComponent = React.memo((props: EnumComponentProps) => {
|
||||
}, [props.value]);
|
||||
|
||||
const handleValueChange = (newValue: string) => {
|
||||
emit_update(name, parentPath, newValue);
|
||||
setAttribute(name, parentPath, newValue);
|
||||
};
|
||||
|
||||
return (
|
||||
<div className={'enumComponent'} id={parentPath.concat('.' + name)}>
|
||||
{process.env.NODE_ENV === 'development' && (
|
||||
<p>Render count: {renderCount.current}</p>
|
||||
<div>Render count: {renderCount.current}</div>
|
||||
)}
|
||||
<DocStringComponent docString={docString} />
|
||||
<Row>
|
||||
|
||||
@@ -9,6 +9,8 @@ import { StringComponent } from './StringComponent';
|
||||
import { ListComponent } from './ListComponent';
|
||||
import { DataServiceComponent, DataServiceJSON } from './DataServiceComponent';
|
||||
import { ImageComponent } from './ImageComponent';
|
||||
import { ColouredEnumComponent } from './ColouredEnumComponent';
|
||||
import { LevelName } from './NotificationsComponent';
|
||||
|
||||
type AttributeType =
|
||||
| 'str'
|
||||
@@ -21,7 +23,8 @@ type AttributeType =
|
||||
| 'DataService'
|
||||
| 'Enum'
|
||||
| 'NumberSlider'
|
||||
| 'Image';
|
||||
| 'Image'
|
||||
| 'ColouredEnum';
|
||||
|
||||
type ValueType = boolean | string | number | object;
|
||||
export interface Attribute {
|
||||
@@ -38,7 +41,7 @@ type GenericComponentProps = {
|
||||
name: string;
|
||||
parentPath: string;
|
||||
isInstantUpdate: boolean;
|
||||
addNotification: (string) => void;
|
||||
addNotification: (message: string, levelname?: LevelName) => void;
|
||||
};
|
||||
|
||||
export const GenericComponent = React.memo(
|
||||
@@ -151,8 +154,9 @@ export const GenericComponent = React.memo(
|
||||
} else if (attribute.type === 'DataService') {
|
||||
return (
|
||||
<DataServiceComponent
|
||||
name={name}
|
||||
props={attribute.value as DataServiceJSON}
|
||||
parentPath={parentPath.concat('.', name)}
|
||||
parentPath={parentPath}
|
||||
isInstantUpdate={isInstantUpdate}
|
||||
addNotification={addNotification}
|
||||
/>
|
||||
@@ -181,6 +185,19 @@ export const GenericComponent = React.memo(
|
||||
addNotification={addNotification}
|
||||
/>
|
||||
);
|
||||
} else if (attribute.type === 'ColouredEnum') {
|
||||
console.log(attribute);
|
||||
return (
|
||||
<ColouredEnumComponent
|
||||
name={name}
|
||||
parentPath={parentPath}
|
||||
docString={attribute.doc}
|
||||
value={String(attribute.value)}
|
||||
readOnly={attribute.readonly}
|
||||
enumDict={attribute.enum}
|
||||
addNotification={addNotification}
|
||||
/>
|
||||
);
|
||||
} else {
|
||||
return <div key={name}>{name}</div>;
|
||||
}
|
||||
|
||||
@@ -2,6 +2,8 @@ import React, { useEffect, useRef, useState } from 'react';
|
||||
import { Card, Collapse, Image } from 'react-bootstrap';
|
||||
import { DocStringComponent } from './DocStringComponent';
|
||||
import { ChevronDown, ChevronRight } from 'react-bootstrap-icons';
|
||||
import { getIdFromFullAccessPath } from '../utils/stringUtils';
|
||||
import { LevelName } from './NotificationsComponent';
|
||||
|
||||
interface ImageComponentProps {
|
||||
name: string;
|
||||
@@ -10,7 +12,7 @@ interface ImageComponentProps {
|
||||
readOnly: boolean;
|
||||
docString: string;
|
||||
format: string;
|
||||
addNotification: (string) => void;
|
||||
addNotification: (message: string, levelname?: LevelName) => void;
|
||||
}
|
||||
|
||||
export const ImageComponent = React.memo((props: ImageComponentProps) => {
|
||||
@@ -18,6 +20,7 @@ export const ImageComponent = React.memo((props: ImageComponentProps) => {
|
||||
|
||||
const renderCount = useRef(0);
|
||||
const [open, setOpen] = useState(true);
|
||||
const id = getIdFromFullAccessPath(parentPath.concat('.' + name));
|
||||
|
||||
useEffect(() => {
|
||||
renderCount.current++;
|
||||
@@ -28,7 +31,10 @@ export const ImageComponent = React.memo((props: ImageComponentProps) => {
|
||||
}, [props.value]);
|
||||
|
||||
return (
|
||||
<div className={'imageComponent'} id={parentPath.concat('.' + name)}>
|
||||
<div className={'imageComponent'} id={id}>
|
||||
{process.env.NODE_ENV === 'development' && (
|
||||
<div>Render count: {renderCount.current}</div>
|
||||
)}
|
||||
<Card>
|
||||
<Card.Header
|
||||
onClick={() => setOpen(!open)}
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import React, { useEffect, useRef } from 'react';
|
||||
import { DocStringComponent } from './DocStringComponent';
|
||||
import { Attribute, GenericComponent } from './GenericComponent';
|
||||
import { getIdFromFullAccessPath } from '../utils/stringUtils';
|
||||
import { LevelName } from './NotificationsComponent';
|
||||
|
||||
interface ListComponentProps {
|
||||
name: string;
|
||||
@@ -8,7 +10,7 @@ interface ListComponentProps {
|
||||
value: Attribute[];
|
||||
docString: string;
|
||||
isInstantUpdate: boolean;
|
||||
addNotification: (string) => void;
|
||||
addNotification: (message: string, levelname?: LevelName) => void;
|
||||
}
|
||||
|
||||
export const ListComponent = React.memo((props: ListComponentProps) => {
|
||||
@@ -16,15 +18,16 @@ export const ListComponent = React.memo((props: ListComponentProps) => {
|
||||
props;
|
||||
|
||||
const renderCount = useRef(0);
|
||||
const id = getIdFromFullAccessPath(parentPath.concat('.' + name));
|
||||
|
||||
useEffect(() => {
|
||||
renderCount.current++;
|
||||
}, [props]);
|
||||
|
||||
return (
|
||||
<div className={'listComponent'} id={parentPath.concat(name)}>
|
||||
<div className={'listComponent'} id={id}>
|
||||
{process.env.NODE_ENV === 'development' && (
|
||||
<p>Render count: {renderCount.current}</p>
|
||||
<div>Render count: {renderCount.current}</div>
|
||||
)}
|
||||
<DocStringComponent docString={docString} />
|
||||
{value.map((item, index) => {
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import React, { useState, useEffect, useRef } from 'react';
|
||||
import { emit_update } from '../socket';
|
||||
import { runMethod } from '../socket';
|
||||
import { Button, InputGroup, Form, Collapse } from 'react-bootstrap';
|
||||
import { DocStringComponent } from './DocStringComponent';
|
||||
import { getIdFromFullAccessPath } from '../utils/stringUtils';
|
||||
import { LevelName } from './NotificationsComponent';
|
||||
|
||||
interface MethodProps {
|
||||
name: string;
|
||||
@@ -9,7 +11,7 @@ interface MethodProps {
|
||||
parameters: Record<string, string>;
|
||||
docString?: string;
|
||||
hideOutput?: boolean;
|
||||
addNotification: (string) => void;
|
||||
addNotification: (message: string, levelname?: LevelName) => void;
|
||||
}
|
||||
|
||||
export const MethodComponent = React.memo((props: MethodProps) => {
|
||||
@@ -19,6 +21,7 @@ export const MethodComponent = React.memo((props: MethodProps) => {
|
||||
const [hideOutput, setHideOutput] = useState(false);
|
||||
// Add a new state variable to hold the list of function calls
|
||||
const [functionCalls, setFunctionCalls] = useState([]);
|
||||
const id = getIdFromFullAccessPath(parentPath.concat('.' + name));
|
||||
|
||||
useEffect(() => {
|
||||
renderCount.current++;
|
||||
@@ -44,18 +47,21 @@ export const MethodComponent = React.memo((props: MethodProps) => {
|
||||
const execute = async (event: React.FormEvent) => {
|
||||
event.preventDefault();
|
||||
|
||||
const args = {};
|
||||
const kwargs = {};
|
||||
Object.keys(props.parameters).forEach(
|
||||
(name) => (args[name] = event.target[name].value)
|
||||
(name) => (kwargs[name] = event.target[name].value)
|
||||
);
|
||||
emit_update(name, parentPath, { args: args }, (ack) => {
|
||||
runMethod(name, parentPath, kwargs, (ack) => {
|
||||
// Update the functionCalls state with the new call if we get an acknowledge msg
|
||||
if (ack !== undefined) {
|
||||
setFunctionCalls((prevCalls) => [...prevCalls, { name, args, result: ack }]);
|
||||
setFunctionCalls((prevCalls) => [
|
||||
...prevCalls,
|
||||
{ name, args: kwargs, result: ack }
|
||||
]);
|
||||
}
|
||||
});
|
||||
|
||||
triggerNotification(args);
|
||||
triggerNotification(kwargs);
|
||||
};
|
||||
|
||||
const args = Object.entries(props.parameters).map(([name, type], index) => {
|
||||
@@ -69,11 +75,9 @@ export const MethodComponent = React.memo((props: MethodProps) => {
|
||||
});
|
||||
|
||||
return (
|
||||
<div
|
||||
className="align-items-center methodComponent"
|
||||
id={parentPath.concat('.' + name)}>
|
||||
<div className="align-items-center methodComponent" id={id}>
|
||||
{process.env.NODE_ENV === 'development' && (
|
||||
<p>Render count: {renderCount.current}</p>
|
||||
<div>Render count: {renderCount.current}</div>
|
||||
)}
|
||||
<h5 onClick={() => setHideOutput(!hideOutput)} style={{ cursor: 'pointer' }}>
|
||||
Function: {name}
|
||||
@@ -81,11 +85,9 @@ export const MethodComponent = React.memo((props: MethodProps) => {
|
||||
</h5>
|
||||
<Form onSubmit={execute}>
|
||||
{args}
|
||||
<div>
|
||||
<Button variant="primary" type="submit">
|
||||
Execute
|
||||
</Button>
|
||||
</div>
|
||||
<Button variant="primary" type="submit">
|
||||
Execute
|
||||
</Button>
|
||||
</Form>
|
||||
|
||||
<Collapse in={!hideOutput}>
|
||||
|
||||
@@ -1,73 +1,71 @@
|
||||
import React from 'react';
|
||||
import { ToastContainer, Toast } from 'react-bootstrap';
|
||||
|
||||
export type LevelName = 'CRITICAL' | 'ERROR' | 'WARNING' | 'INFO' | 'DEBUG';
|
||||
export type Notification = {
|
||||
id: number;
|
||||
time: string;
|
||||
text: string;
|
||||
timeStamp: string;
|
||||
message: string;
|
||||
levelname: LevelName;
|
||||
};
|
||||
|
||||
type NotificationProps = {
|
||||
showNotification: boolean;
|
||||
notifications: Notification[];
|
||||
exceptions: Notification[];
|
||||
removeNotificationById: (id: number) => void;
|
||||
removeExceptionById: (id: number) => void;
|
||||
};
|
||||
|
||||
export const Notifications = React.memo((props: NotificationProps) => {
|
||||
const {
|
||||
showNotification,
|
||||
notifications,
|
||||
exceptions,
|
||||
removeExceptionById,
|
||||
removeNotificationById
|
||||
} = props;
|
||||
const { showNotification, notifications, removeNotificationById } = props;
|
||||
|
||||
return (
|
||||
<ToastContainer
|
||||
className="navbarOffset toastContainer"
|
||||
position="top-end"
|
||||
style={{ position: 'fixed' }}>
|
||||
{showNotification &&
|
||||
notifications.map((notification) => (
|
||||
<ToastContainer className="navbarOffset toastContainer" position="top-end">
|
||||
{notifications.map((notification) => {
|
||||
// Determine if the toast should be shown
|
||||
const shouldShow =
|
||||
notification.levelname === 'ERROR' ||
|
||||
notification.levelname === 'CRITICAL' ||
|
||||
(showNotification &&
|
||||
['WARNING', 'INFO', 'DEBUG'].includes(notification.levelname));
|
||||
|
||||
if (!shouldShow) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<Toast
|
||||
className="notificationToast"
|
||||
className={notification.levelname.toLowerCase() + 'Toast'}
|
||||
key={notification.id}
|
||||
onClose={() => removeNotificationById(notification.id)}
|
||||
onClick={() => {
|
||||
removeNotificationById(notification.id);
|
||||
}}
|
||||
onClick={() => removeNotificationById(notification.id)}
|
||||
onMouseLeave={() => {
|
||||
removeNotificationById(notification.id);
|
||||
if (notification.levelname !== 'ERROR') {
|
||||
removeNotificationById(notification.id);
|
||||
}
|
||||
}}
|
||||
show={true}
|
||||
autohide={true}
|
||||
delay={2000}>
|
||||
<Toast.Header closeButton={false} className="notificationToast text-right">
|
||||
<strong className="me-auto">Notification</strong>
|
||||
<small>{notification.time}</small>
|
||||
autohide={
|
||||
notification.levelname === 'WARNING' ||
|
||||
notification.levelname === 'INFO' ||
|
||||
notification.levelname === 'DEBUG'
|
||||
}
|
||||
delay={
|
||||
notification.levelname === 'WARNING' ||
|
||||
notification.levelname === 'INFO' ||
|
||||
notification.levelname === 'DEBUG'
|
||||
? 2000
|
||||
: undefined
|
||||
}>
|
||||
<Toast.Header
|
||||
closeButton={false}
|
||||
className={notification.levelname.toLowerCase() + 'Toast text-right'}>
|
||||
<strong className="me-auto">{notification.levelname}</strong>
|
||||
<small>{notification.timeStamp}</small>
|
||||
</Toast.Header>
|
||||
<Toast.Body>{notification.text}</Toast.Body>
|
||||
<Toast.Body>{notification.message}</Toast.Body>
|
||||
</Toast>
|
||||
))}
|
||||
{exceptions.map((exception) => (
|
||||
<Toast
|
||||
className="exceptionToast"
|
||||
key={exception.id}
|
||||
onClose={() => removeExceptionById(exception.id)}
|
||||
onClick={() => {
|
||||
removeExceptionById(exception.id);
|
||||
}}
|
||||
show={true}
|
||||
autohide={false}>
|
||||
<Toast.Header closeButton className="exceptionToast text-right">
|
||||
<strong className="me-auto">Exception</strong>
|
||||
<small>{exception.time}</small>
|
||||
</Toast.Header>
|
||||
<Toast.Body>{exception.text}</Toast.Body>
|
||||
</Toast>
|
||||
))}
|
||||
);
|
||||
})}
|
||||
</ToastContainer>
|
||||
);
|
||||
});
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
import React, { useEffect, useRef, useState } from 'react';
|
||||
import { Form, InputGroup } from 'react-bootstrap';
|
||||
import { emit_update } from '../socket';
|
||||
import { setAttribute } from '../socket';
|
||||
import { DocStringComponent } from './DocStringComponent';
|
||||
import '../App.css';
|
||||
import { getIdFromFullAccessPath } from '../utils/stringUtils';
|
||||
import { LevelName } from './NotificationsComponent';
|
||||
|
||||
// TODO: add button functionality
|
||||
|
||||
@@ -22,7 +24,7 @@ interface NumberComponentProps {
|
||||
value: number,
|
||||
callback?: (ack: unknown) => void
|
||||
) => void;
|
||||
addNotification: (string) => void;
|
||||
addNotification: (message: string, levelname?: LevelName) => void;
|
||||
}
|
||||
|
||||
// TODO: highlight the digit that is being changed by setting both selectionStart and
|
||||
@@ -30,8 +32,8 @@ interface NumberComponentProps {
|
||||
const handleArrowKey = (
|
||||
key: string,
|
||||
value: string,
|
||||
selectionStart: number,
|
||||
selectionEnd: number
|
||||
selectionStart: number
|
||||
// selectionEnd: number
|
||||
) => {
|
||||
// Split the input value into the integer part and decimal part
|
||||
const parts = value.split('.');
|
||||
@@ -121,23 +123,25 @@ export const NumberComponent = React.memo((props: NumberComponentProps) => {
|
||||
|
||||
// Whether to show the name infront of the component (false if used with a slider)
|
||||
const showName = props.showName !== undefined ? props.showName : true;
|
||||
// If emitUpdate is passed, use this instead of the emit_update from the socket
|
||||
// If emitUpdate is passed, use this instead of the setAttribute from the socket
|
||||
// Also used when used with a slider
|
||||
const emitUpdate =
|
||||
props.customEmitUpdate !== undefined ? props.customEmitUpdate : emit_update;
|
||||
props.customEmitUpdate !== undefined ? props.customEmitUpdate : setAttribute;
|
||||
|
||||
const renderCount = useRef(0);
|
||||
// Create a state for the cursor position
|
||||
const [cursorPosition, setCursorPosition] = useState(null);
|
||||
// Create a state for the input string
|
||||
const [inputString, setInputString] = useState(props.value.toString());
|
||||
const fullAccessPath = parentPath.concat('.' + name);
|
||||
const id = getIdFromFullAccessPath(fullAccessPath);
|
||||
|
||||
useEffect(() => {
|
||||
renderCount.current++;
|
||||
|
||||
// Set the cursor position after the component re-renders
|
||||
const inputElement = document.getElementsByName(
|
||||
parentPath.concat(name)
|
||||
fullAccessPath
|
||||
)[0] as HTMLInputElement;
|
||||
if (inputElement && cursorPosition !== null) {
|
||||
inputElement.setSelectionRange(cursorPosition, cursorPosition);
|
||||
@@ -214,6 +218,16 @@ export const NumberComponent = React.memo((props: NumberComponentProps) => {
|
||||
// Select everything when pressing Ctrl + a
|
||||
target.setSelectionRange(0, target.value.length);
|
||||
return;
|
||||
} else if (key === '-') {
|
||||
if (selectionStart === 0 && !value.startsWith('-')) {
|
||||
newValue = '-' + value;
|
||||
selectionStart++;
|
||||
} else if (value.startsWith('-') && selectionStart === 1) {
|
||||
newValue = value.substring(1); // remove minus sign
|
||||
selectionStart--;
|
||||
} else {
|
||||
return; // Ignore "-" pressed in other positions
|
||||
}
|
||||
} else if (!isNaN(key) && key !== ' ') {
|
||||
// Check if a number key or a decimal point key is pressed
|
||||
({ value: newValue, selectionStart } = handleNumericKey(
|
||||
@@ -233,8 +247,8 @@ export const NumberComponent = React.memo((props: NumberComponentProps) => {
|
||||
({ value: newValue, selectionStart } = handleArrowKey(
|
||||
key,
|
||||
value,
|
||||
selectionStart,
|
||||
selectionEnd
|
||||
selectionStart
|
||||
// selectionEnd
|
||||
));
|
||||
} else if (key === 'Backspace') {
|
||||
({ value: newValue, selectionStart } = handleBackspaceKey(
|
||||
@@ -275,9 +289,9 @@ export const NumberComponent = React.memo((props: NumberComponentProps) => {
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="numberComponent" id={parentPath.concat('.' + name)}>
|
||||
{process.env.NODE_ENV === 'development' && showName && (
|
||||
<p>Render count: {renderCount.current}</p>
|
||||
<div className="numberComponent" id={id}>
|
||||
{process.env.NODE_ENV === 'development' && (
|
||||
<div>Render count: {renderCount.current}</div>
|
||||
)}
|
||||
<DocStringComponent docString={docString} />
|
||||
<div className="d-flex">
|
||||
@@ -287,7 +301,7 @@ export const NumberComponent = React.memo((props: NumberComponentProps) => {
|
||||
type="text"
|
||||
value={inputString}
|
||||
disabled={readOnly}
|
||||
name={parentPath.concat(name)}
|
||||
name={fullAccessPath}
|
||||
onKeyDown={handleKeyDown}
|
||||
onBlur={handleBlur}
|
||||
className={isInstantUpdate && !readOnly ? 'instantUpdate' : ''}
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
import React, { useEffect, useRef, useState } from 'react';
|
||||
import { InputGroup, Form, Row, Col, Collapse, ToggleButton } from 'react-bootstrap';
|
||||
import { emit_update } from '../socket';
|
||||
import { setAttribute } from '../socket';
|
||||
import { DocStringComponent } from './DocStringComponent';
|
||||
import { Slider } from '@mui/material';
|
||||
import { NumberComponent } from './NumberComponent';
|
||||
import { getIdFromFullAccessPath } from '../utils/stringUtils';
|
||||
import { LevelName } from './NotificationsComponent';
|
||||
|
||||
interface SliderComponentProps {
|
||||
name: string;
|
||||
@@ -15,17 +17,12 @@ interface SliderComponentProps {
|
||||
docString: string;
|
||||
stepSize: number;
|
||||
isInstantUpdate: boolean;
|
||||
addNotification: (string) => void;
|
||||
addNotification: (message: string, levelname?: LevelName) => void;
|
||||
}
|
||||
|
||||
export const SliderComponent = React.memo((props: SliderComponentProps) => {
|
||||
const renderCount = useRef(0);
|
||||
const [open, setOpen] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
renderCount.current++;
|
||||
});
|
||||
|
||||
const {
|
||||
name,
|
||||
parentPath,
|
||||
@@ -38,6 +35,12 @@ export const SliderComponent = React.memo((props: SliderComponentProps) => {
|
||||
isInstantUpdate,
|
||||
addNotification
|
||||
} = props;
|
||||
const fullAccessPath = parentPath.concat('.' + name);
|
||||
const id = getIdFromFullAccessPath(fullAccessPath);
|
||||
|
||||
useEffect(() => {
|
||||
renderCount.current++;
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
addNotification(`${parentPath}.${name} changed to ${value}.`);
|
||||
@@ -64,7 +67,7 @@ export const SliderComponent = React.memo((props: SliderComponentProps) => {
|
||||
max: number = props.max,
|
||||
stepSize: number = props.stepSize
|
||||
) => {
|
||||
emit_update(
|
||||
setAttribute(
|
||||
name,
|
||||
parentPath,
|
||||
{
|
||||
@@ -102,9 +105,9 @@ export const SliderComponent = React.memo((props: SliderComponentProps) => {
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="sliderComponent" id={parentPath.concat('.' + name)}>
|
||||
<div className="sliderComponent" id={id}>
|
||||
{process.env.NODE_ENV === 'development' && (
|
||||
<p>Render count: {renderCount.current}</p>
|
||||
<div>Render count: {renderCount.current}</div>
|
||||
)}
|
||||
|
||||
<DocStringComponent docString={docString} />
|
||||
@@ -145,6 +148,7 @@ export const SliderComponent = React.memo((props: SliderComponentProps) => {
|
||||
</Col>
|
||||
<Col xs="auto">
|
||||
<ToggleButton
|
||||
id={`button-${id}`}
|
||||
onClick={() => setOpen(!open)}
|
||||
type="checkbox"
|
||||
checked={open}
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
import React, { useEffect, useRef, useState } from 'react';
|
||||
import { Form, InputGroup } from 'react-bootstrap';
|
||||
import { emit_update } from '../socket';
|
||||
import { setAttribute } from '../socket';
|
||||
import { DocStringComponent } from './DocStringComponent';
|
||||
import '../App.css';
|
||||
import { getIdFromFullAccessPath } from '../utils/stringUtils';
|
||||
import { LevelName } from './NotificationsComponent';
|
||||
|
||||
// TODO: add button functionality
|
||||
|
||||
@@ -13,7 +15,7 @@ interface StringComponentProps {
|
||||
readOnly: boolean;
|
||||
docString: string;
|
||||
isInstantUpdate: boolean;
|
||||
addNotification: (string) => void;
|
||||
addNotification: (message: string, levelname?: LevelName) => void;
|
||||
}
|
||||
|
||||
export const StringComponent = React.memo((props: StringComponentProps) => {
|
||||
@@ -22,6 +24,8 @@ export const StringComponent = React.memo((props: StringComponentProps) => {
|
||||
|
||||
const renderCount = useRef(0);
|
||||
const [inputString, setInputString] = useState(props.value);
|
||||
const fullAccessPath = parentPath.concat('.' + name);
|
||||
const id = getIdFromFullAccessPath(fullAccessPath);
|
||||
|
||||
useEffect(() => {
|
||||
renderCount.current++;
|
||||
@@ -38,26 +42,26 @@ export const StringComponent = React.memo((props: StringComponentProps) => {
|
||||
const handleChange = (event) => {
|
||||
setInputString(event.target.value);
|
||||
if (isInstantUpdate) {
|
||||
emit_update(name, parentPath, event.target.value);
|
||||
setAttribute(name, parentPath, event.target.value);
|
||||
}
|
||||
};
|
||||
|
||||
const handleKeyDown = (event) => {
|
||||
if (event.key === 'Enter' && !isInstantUpdate) {
|
||||
emit_update(name, parentPath, inputString);
|
||||
setAttribute(name, parentPath, inputString);
|
||||
}
|
||||
};
|
||||
|
||||
const handleBlur = () => {
|
||||
if (!isInstantUpdate) {
|
||||
emit_update(name, parentPath, inputString);
|
||||
setAttribute(name, parentPath, inputString);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className={'stringComponent'} id={parentPath.concat(name)}>
|
||||
<div className={'stringComponent'} id={id}>
|
||||
{process.env.NODE_ENV === 'development' && (
|
||||
<p>Render count: {renderCount.current}</p>
|
||||
<div>Render count: {renderCount.current}</div>
|
||||
)}
|
||||
<DocStringComponent docString={docString} />
|
||||
<InputGroup>
|
||||
|
||||
@@ -9,15 +9,28 @@ console.debug('Websocket: ', URL);
|
||||
|
||||
export const socket = io(URL, { path: '/ws/socket.io', transports: ['websocket'] });
|
||||
|
||||
export const emit_update = (
|
||||
export const setAttribute = (
|
||||
name: string,
|
||||
parentPath: string,
|
||||
value: unknown,
|
||||
callback?: (ack: unknown) => void
|
||||
) => {
|
||||
if (callback) {
|
||||
socket.emit('frontend_update', { name, parent_path: parentPath, value }, callback);
|
||||
socket.emit('set_attribute', { name, parent_path: parentPath, value }, callback);
|
||||
} else {
|
||||
socket.emit('frontend_update', { name, parent_path: parentPath, value });
|
||||
socket.emit('set_attribute', { name, parent_path: parentPath, value });
|
||||
}
|
||||
};
|
||||
|
||||
export const runMethod = (
|
||||
name: string,
|
||||
parentPath: string,
|
||||
kwargs: Record<string, unknown>,
|
||||
callback?: (ack: unknown) => void
|
||||
) => {
|
||||
if (callback) {
|
||||
socket.emit('run_method', { name, parent_path: parentPath, kwargs }, callback);
|
||||
} else {
|
||||
socket.emit('run_method', { name, parent_path: parentPath, kwargs });
|
||||
}
|
||||
};
|
||||
|
||||
108
frontend/src/utils/stateUtils.ts
Normal file
108
frontend/src/utils/stateUtils.ts
Normal file
@@ -0,0 +1,108 @@
|
||||
export interface SerializedValue {
|
||||
type: string;
|
||||
value: Record<string, unknown> | Array<Record<string, unknown>>;
|
||||
readonly: boolean;
|
||||
doc: string | null;
|
||||
async?: boolean;
|
||||
parameters?: unknown;
|
||||
}
|
||||
export type State = Record<string, SerializedValue> | null;
|
||||
|
||||
export function setNestedValueByPath(
|
||||
serializationDict: Record<string, SerializedValue>,
|
||||
path: string,
|
||||
serializedValue: SerializedValue
|
||||
): Record<string, SerializedValue> {
|
||||
const parentPathParts = path.split('.').slice(0, -1);
|
||||
const attrName = path.split('.').pop();
|
||||
|
||||
if (!attrName) {
|
||||
throw new Error('Invalid path');
|
||||
}
|
||||
|
||||
let currentSerializedValue: SerializedValue;
|
||||
const newSerializationDict: Record<string, SerializedValue> = JSON.parse(
|
||||
JSON.stringify(serializationDict)
|
||||
);
|
||||
|
||||
let currentDict = newSerializationDict;
|
||||
|
||||
try {
|
||||
for (const pathPart of parentPathParts) {
|
||||
currentSerializedValue = getNextLevelDictByKey(currentDict, pathPart, false);
|
||||
// @ts-expect-error The value will be of type SerializedValue as we are still
|
||||
// looping through the parent parts
|
||||
currentDict = currentSerializedValue['value'];
|
||||
}
|
||||
|
||||
currentSerializedValue = getNextLevelDictByKey(currentDict, attrName, true);
|
||||
|
||||
Object.assign(currentSerializedValue, serializedValue);
|
||||
return newSerializationDict;
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return currentDict;
|
||||
}
|
||||
}
|
||||
|
||||
function getNextLevelDictByKey(
|
||||
serializationDict: Record<string, SerializedValue>,
|
||||
attrName: string,
|
||||
allowAppend: boolean = false
|
||||
): SerializedValue {
|
||||
const [key, index] = parseListAttrAndIndex(attrName);
|
||||
let currentDict: SerializedValue;
|
||||
|
||||
try {
|
||||
if (index !== null) {
|
||||
if (!serializationDict[key] || !Array.isArray(serializationDict[key]['value'])) {
|
||||
throw new Error(`Expected an array at '${key}', but found something else.`);
|
||||
}
|
||||
|
||||
if (index < serializationDict[key]['value'].length) {
|
||||
currentDict = serializationDict[key]['value'][index];
|
||||
} else if (allowAppend && index === serializationDict[key]['value'].length) {
|
||||
// Appending to list
|
||||
// @ts-expect-error When the index is not null, I expect an array
|
||||
serializationDict[key]['value'].push({});
|
||||
currentDict = serializationDict[key]['value'][index];
|
||||
} else {
|
||||
throw new Error(`Index out of range for '${key}[${index}]'.`);
|
||||
}
|
||||
} else {
|
||||
if (!serializationDict[key]) {
|
||||
throw new Error(`Key '${key}' not found.`);
|
||||
}
|
||||
currentDict = serializationDict[key];
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error(`Error occurred trying to access '${attrName}': ${error}`);
|
||||
}
|
||||
|
||||
if (typeof currentDict !== 'object' || currentDict === null) {
|
||||
throw new Error(
|
||||
`Expected a dictionary at '${attrName}', but found type '${typeof currentDict}' instead.`
|
||||
);
|
||||
}
|
||||
|
||||
return currentDict;
|
||||
}
|
||||
|
||||
function parseListAttrAndIndex(attrString: string): [string, number | null] {
|
||||
let index: number | null = null;
|
||||
let attrName = attrString;
|
||||
|
||||
if (attrString.includes('[') && attrString.endsWith(']')) {
|
||||
const parts = attrString.split('[');
|
||||
attrName = parts[0];
|
||||
const indexPart = parts[1].slice(0, -1); // Removes the closing ']'
|
||||
|
||||
if (!isNaN(parseInt(indexPart))) {
|
||||
index = parseInt(indexPart);
|
||||
} else {
|
||||
console.error(`Invalid index format in key: ${attrString}`);
|
||||
}
|
||||
}
|
||||
|
||||
return [attrName, index];
|
||||
}
|
||||
12
frontend/src/utils/stringUtils.ts
Normal file
12
frontend/src/utils/stringUtils.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
export function getIdFromFullAccessPath(fullAccessPath: string) {
|
||||
// Replace '].' with a single dash
|
||||
let id = fullAccessPath.replace(/\]\./g, '-');
|
||||
|
||||
// Replace any character that is not a word character or underscore with a dash
|
||||
id = id.replace(/[^\w_]+/g, '-');
|
||||
|
||||
// Remove any trailing dashes
|
||||
id = id.replace(/-+$/, '');
|
||||
|
||||
return id;
|
||||
}
|
||||
@@ -4,10 +4,13 @@ edit_uri: blob/docs/docs/
|
||||
nav:
|
||||
- Home: index.md
|
||||
- Getting Started: getting-started.md
|
||||
- User Guide:
|
||||
- Components Guide: user-guide/Components.md
|
||||
- Developer Guide:
|
||||
- Developer Guide: dev-guide/README.md
|
||||
- API Reference: dev-guide/api.md
|
||||
- Adding Components: dev-guide/Adding_Components.md
|
||||
- Observer Pattern Implementation: dev-guide/Observer_Pattern_Implementation.md # <-- New section
|
||||
- About:
|
||||
- Release Notes: about/release-notes.md
|
||||
- Contributing: about/contributing.md
|
||||
@@ -22,7 +25,6 @@ markdown_extensions:
|
||||
- smarty
|
||||
- toc:
|
||||
permalink: true
|
||||
baselevel: 4
|
||||
- pymdownx.highlight:
|
||||
anchor_linenums: true
|
||||
- pymdownx.snippets
|
||||
@@ -38,5 +40,3 @@ plugins:
|
||||
|
||||
watch:
|
||||
- src/pydase
|
||||
|
||||
|
||||
1508
poetry.lock
generated
1508
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,2 +0,0 @@
|
||||
[virtualenvs]
|
||||
in-project = true
|
||||
@@ -1,6 +1,6 @@
|
||||
[tool.poetry]
|
||||
name = "pydase"
|
||||
version = "0.1.2"
|
||||
version = "0.4.0"
|
||||
description = "A flexible and robust Python library for creating, managing, and interacting with data services, with built-in support for web and RPC servers, and customizable features for diverse use cases."
|
||||
authors = ["Mose Mueller <mosmuell@ethz.ch>"]
|
||||
readme = "README.md"
|
||||
@@ -10,7 +10,6 @@ packages = [{ include = "pydase", from = "src" }]
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.10"
|
||||
rpyc = "^5.3.1"
|
||||
loguru = "^0.7.0"
|
||||
fastapi = "^0.100.0"
|
||||
uvicorn = "^0.22.0"
|
||||
toml = "^0.10.2"
|
||||
@@ -20,23 +19,21 @@ confz = "^2.0.0"
|
||||
pint = "^0.22"
|
||||
pillow = "^10.0.0"
|
||||
|
||||
[tool.poetry.group.dev]
|
||||
optional = true
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
types-toml = "^0.10.8.6"
|
||||
pytest = "^7.4.0"
|
||||
pytest-cov = "^4.1.0"
|
||||
mypy = "^1.4.1"
|
||||
black = "^23.1.0"
|
||||
isort = "^5.12.0"
|
||||
flake8 = "^5.0.4"
|
||||
flake8-use-fstring = "^1.4"
|
||||
flake8-functions = "^0.0.7"
|
||||
flake8-comprehensions = "^3.11.1"
|
||||
flake8-pep585 = "^0.1.7"
|
||||
flake8-pep604 = "^0.1.0"
|
||||
flake8-eradicate = "^1.4.0"
|
||||
matplotlib = "^3.7.2"
|
||||
pyright = "^1.1.323"
|
||||
pytest-mock = "^3.11.1"
|
||||
ruff = "^0.1.5"
|
||||
|
||||
[tool.poetry.group.docs]
|
||||
optional = true
|
||||
|
||||
[tool.poetry.group.docs.dependencies]
|
||||
mkdocs = "^1.5.2"
|
||||
@@ -48,38 +45,59 @@ pymdown-extensions = "^10.1"
|
||||
requires = ["poetry-core"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.ruff]
|
||||
target-version = "py310" # Always generate Python 3.10-compatible code
|
||||
select = [
|
||||
"ASYNC", # flake8-async
|
||||
"C4", # flake8-comprehensions
|
||||
"C901", # mccabe complex-structure
|
||||
"E", # pycodestyle errors
|
||||
"ERA", # eradicate
|
||||
"F", # pyflakes
|
||||
"FLY", # flynt
|
||||
"G", # flake8-logging-format
|
||||
"I", # isort
|
||||
"ICN", # flake8-import-conventions
|
||||
"INP", # flake8-no-pep420
|
||||
"ISC", # flake8-implicit-str-concat
|
||||
"N", # pep8-naming
|
||||
"NPY", # NumPy-specific rules
|
||||
"PERF", # perflint
|
||||
"PIE", # flake8-pie
|
||||
"PL", # pylint
|
||||
"PYI", # flake8-pyi
|
||||
"Q", # flake8-quotes
|
||||
"RET", # flake8-return
|
||||
"RUF", # Ruff-specific rules
|
||||
"SIM", # flake8-simplify
|
||||
"TID", # flake8-tidy-imports
|
||||
"TCH", # flake8-type-checking
|
||||
"UP", # pyupgrade
|
||||
"YTT", # flake8-2020
|
||||
"W", # pycodestyle warnings
|
||||
]
|
||||
ignore = [
|
||||
"E203", # whitespace-before-punctuation
|
||||
"W292", # missing-newline-at-end-of-file
|
||||
"PERF203", # try-except-in-loop
|
||||
]
|
||||
extend-exclude = [
|
||||
"docs", "frontend"
|
||||
]
|
||||
|
||||
[tool.ruff.lint.mccabe]
|
||||
max-complexity = 7
|
||||
|
||||
|
||||
[tool.pyright]
|
||||
include = ["src/pydase", "tests"]
|
||||
exclude = ["**/node_modules", "**/__pycache__", "docs", "frontend"]
|
||||
venvPath = "."
|
||||
venv = ".venv"
|
||||
include = ["src/pydase"]
|
||||
typeCheckingMode = "basic"
|
||||
reportUnknownMemberType = true
|
||||
|
||||
[tool.black]
|
||||
line-length = 88
|
||||
exclude = '''
|
||||
/(
|
||||
\.git
|
||||
| \.mypy_cache
|
||||
| \.tox
|
||||
| venv
|
||||
| \.venv
|
||||
| _build
|
||||
| buck-out
|
||||
| build
|
||||
| dist
|
||||
)/
|
||||
'''
|
||||
|
||||
[tool.isort]
|
||||
profile = "black"
|
||||
|
||||
[tool.mypy]
|
||||
mypy_path = "src/"
|
||||
show_error_codes = true
|
||||
disallow_untyped_defs = true
|
||||
disallow_untyped_calls = true
|
||||
disallow_incomplete_defs = true
|
||||
disallow_any_generics = true
|
||||
check_untyped_defs = true
|
||||
ignore_missing_imports = false
|
||||
|
||||
@@ -27,10 +27,12 @@ print(my_service.voltage.value) # Output: 5
|
||||
```
|
||||
"""
|
||||
|
||||
from pydase.components.coloured_enum import ColouredEnum
|
||||
from pydase.components.image import Image
|
||||
from pydase.components.number_slider import NumberSlider
|
||||
|
||||
__all__ = [
|
||||
"NumberSlider",
|
||||
"Image",
|
||||
"ColouredEnum",
|
||||
]
|
||||
|
||||
59
src/pydase/components/coloured_enum.py
Normal file
59
src/pydase/components/coloured_enum.py
Normal file
@@ -0,0 +1,59 @@
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class ColouredEnum(Enum):
|
||||
"""
|
||||
Represents a UI element that can display colour-coded text based on its value.
|
||||
|
||||
This class extends the standard Enum but requires its values to be valid CSS
|
||||
colour codes. Supported colour formats include:
|
||||
- Hexadecimal colours
|
||||
- Hexadecimal colours with transparency
|
||||
- RGB colours
|
||||
- RGBA colours
|
||||
- HSL colours
|
||||
- HSLA colours
|
||||
- Predefined/Cross-browser colour names
|
||||
Refer to the this website for more details on colour formats:
|
||||
(https://www.w3schools.com/cssref/css_colours_legal.php)
|
||||
|
||||
The behavior of this component in the UI depends on how it's defined in the data
|
||||
service:
|
||||
- As property with a setter or as attribute: Renders as a dropdown menu,
|
||||
allowing users to select and change its value from the frontend.
|
||||
- As property without a setter: Displays as a coloured box with the key of the
|
||||
`ColouredEnum` as text inside, serving as a visual indicator without user
|
||||
interaction.
|
||||
|
||||
Example:
|
||||
--------
|
||||
```python
|
||||
import pydase.components as pyc
|
||||
import pydase
|
||||
|
||||
class MyStatus(pyc.ColouredEnum):
|
||||
PENDING = "#FFA500" # Orange
|
||||
RUNNING = "#0000FF80" # Transparent Blue
|
||||
PAUSED = "rgb(169, 169, 169)" # Dark Gray
|
||||
RETRYING = "rgba(255, 255, 0, 0.3)" # Transparent Yellow
|
||||
COMPLETED = "hsl(120, 100%, 50%)" # Green
|
||||
FAILED = "hsla(0, 100%, 50%, 0.7)" # Transparent Red
|
||||
CANCELLED = "SlateGray" # Slate Gray
|
||||
|
||||
class StatusExample(pydase.DataService):
|
||||
_status = MyStatus.RUNNING
|
||||
|
||||
@property
|
||||
def status(self) -> MyStatus:
|
||||
return self._status
|
||||
|
||||
@status.setter
|
||||
def status(self, value: MyStatus) -> None:
|
||||
# Custom logic here...
|
||||
self._status = value
|
||||
|
||||
# Example usage:
|
||||
my_service = StatusExample()
|
||||
my_service.status = MyStatus.FAILED
|
||||
```
|
||||
"""
|
||||
@@ -1,25 +1,27 @@
|
||||
import base64
|
||||
import io
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
from typing import TYPE_CHECKING
|
||||
from urllib.request import urlopen
|
||||
|
||||
import PIL.Image
|
||||
from loguru import logger
|
||||
import PIL.Image # type: ignore[import-untyped]
|
||||
|
||||
from pydase.data_service.data_service import DataService
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from matplotlib.figure import Figure
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Image(DataService):
|
||||
def __init__(
|
||||
self,
|
||||
) -> None:
|
||||
super().__init__()
|
||||
self._value: str = ""
|
||||
self._format: str = ""
|
||||
super().__init__()
|
||||
|
||||
@property
|
||||
def value(self) -> str:
|
||||
@@ -31,19 +33,19 @@ class Image(DataService):
|
||||
|
||||
def load_from_path(self, path: Path | str) -> None:
|
||||
with PIL.Image.open(path) as image:
|
||||
self._load_from_PIL(image)
|
||||
self._load_from_pil(image)
|
||||
|
||||
def load_from_matplotlib_figure(self, fig: "Figure", format_: str = "png") -> None:
|
||||
buffer = io.BytesIO()
|
||||
fig.savefig(buffer, format=format_) # type: ignore
|
||||
fig.savefig(buffer, format=format_)
|
||||
value_ = base64.b64encode(buffer.getvalue())
|
||||
self._load_from_base64(value_, format_)
|
||||
|
||||
def load_from_url(self, url: str) -> None:
|
||||
image = PIL.Image.open(urlopen(url))
|
||||
self._load_from_PIL(image)
|
||||
self._load_from_pil(image)
|
||||
|
||||
def load_from_base64(self, value_: bytes, format_: Optional[str] = None) -> None:
|
||||
def load_from_base64(self, value_: bytes, format_: str | None = None) -> None:
|
||||
if format_ is None:
|
||||
format_ = self._get_image_format_from_bytes(value_)
|
||||
if format_ is None:
|
||||
@@ -54,11 +56,11 @@ class Image(DataService):
|
||||
self._load_from_base64(value_, format_)
|
||||
|
||||
def _load_from_base64(self, value_: bytes, format_: str) -> None:
|
||||
value = value_.decode("utf-8") if isinstance(value_, bytes) else value_
|
||||
value = value_.decode("utf-8")
|
||||
self._value = value
|
||||
self._format = format_
|
||||
|
||||
def _load_from_PIL(self, image: PIL.Image.Image) -> None:
|
||||
def _load_from_pil(self, image: PIL.Image.Image) -> None:
|
||||
if image.format is not None:
|
||||
format_ = image.format
|
||||
buffer = io.BytesIO()
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import logging
|
||||
from typing import Any, Literal
|
||||
|
||||
from loguru import logger
|
||||
|
||||
from pydase.data_service.data_service import DataService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class NumberSlider(DataService):
|
||||
"""
|
||||
@@ -12,15 +13,15 @@ class NumberSlider(DataService):
|
||||
|
||||
Parameters:
|
||||
-----------
|
||||
value (float | int, optional):
|
||||
value (float, optional):
|
||||
The initial value of the slider. Defaults to 0.
|
||||
min (float, optional):
|
||||
The minimum value of the slider. Defaults to 0.
|
||||
max (float, optional):
|
||||
The maximum value of the slider. Defaults to 100.
|
||||
step_size (float | int, optional):
|
||||
step_size (float, optional):
|
||||
The increment/decrement step size of the slider. Defaults to 1.0.
|
||||
type (Literal["int"] | Literal["float"], optional):
|
||||
type (Literal["int", "float"], optional):
|
||||
The type of the slider value. Determines if the value is an integer or float.
|
||||
Defaults to "float".
|
||||
|
||||
@@ -37,25 +38,24 @@ class NumberSlider(DataService):
|
||||
```
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
def __init__( # noqa: PLR0913
|
||||
self,
|
||||
value: float | int = 0,
|
||||
min: float = 0.0,
|
||||
max: float = 100.0,
|
||||
step_size: float | int = 1.0,
|
||||
type: Literal["int"] | Literal["float"] = "float",
|
||||
value: float = 0,
|
||||
min_: float = 0.0,
|
||||
max_: float = 100.0,
|
||||
step_size: float = 1.0,
|
||||
type_: Literal["int", "float"] = "float",
|
||||
) -> None:
|
||||
if type not in {"float", "int"}:
|
||||
logger.error(f"Unknown type '{type}'. Using 'float'.")
|
||||
type = "float"
|
||||
super().__init__()
|
||||
if type_ not in {"float", "int"}:
|
||||
logger.error("Unknown type '%s'. Using 'float'.", type_)
|
||||
type_ = "float"
|
||||
|
||||
self._type = type
|
||||
self._type = type_
|
||||
self.step_size = step_size
|
||||
self.value = value
|
||||
self.min = min
|
||||
self.max = max
|
||||
|
||||
super().__init__()
|
||||
self.min = min_
|
||||
self.max = max_
|
||||
|
||||
def __setattr__(self, name: str, value: Any) -> None:
|
||||
if name in ["value", "step_size"]:
|
||||
|
||||
@@ -3,7 +3,7 @@ from typing import Literal
|
||||
from confz import BaseConfig, EnvSource
|
||||
|
||||
|
||||
class OperationMode(BaseConfig): # type: ignore
|
||||
environment: Literal["development"] | Literal["production"] = "development"
|
||||
class OperationMode(BaseConfig): # type: ignore[misc]
|
||||
environment: Literal["development", "production"] = "development"
|
||||
|
||||
CONFIG_SOURCES = EnvSource(allow=["ENVIRONMENT"])
|
||||
|
||||
@@ -1,16 +1,15 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import ABC
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from pydase.observer_pattern.observable.observable import Observable
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .callback_manager import CallbackManager
|
||||
from .data_service import DataService
|
||||
from .task_manager import TaskManager
|
||||
from pydase.data_service.data_service import DataService
|
||||
from pydase.data_service.task_manager import TaskManager
|
||||
|
||||
|
||||
class AbstractDataService(ABC):
|
||||
class AbstractDataService(Observable):
|
||||
__root__: DataService
|
||||
_task_manager: TaskManager
|
||||
_callback_manager: CallbackManager
|
||||
_autostart_tasks: dict[str, tuple[Any]]
|
||||
|
||||
@@ -1,400 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import inspect
|
||||
from collections.abc import Callable
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from loguru import logger
|
||||
|
||||
from pydase.data_service.abstract_data_service import AbstractDataService
|
||||
from pydase.utils.helpers import get_class_and_instance_attributes
|
||||
|
||||
from .data_service_list import DataServiceList
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .data_service import DataService
|
||||
|
||||
|
||||
class CallbackManager:
|
||||
_notification_callbacks: list[Callable[[str, str, Any], Any]] = []
|
||||
"""
|
||||
A list of callback functions that are executed when a change occurs in the
|
||||
DataService instance. These functions are intended to handle or respond to these
|
||||
changes in some way, such as emitting a socket.io message to the frontend.
|
||||
|
||||
Each function in this list should be a callable that accepts three parameters:
|
||||
|
||||
- parent_path (str): The path to the parent of the attribute that was changed.
|
||||
- name (str): The name of the attribute that was changed.
|
||||
- value (Any): The new value of the attribute.
|
||||
|
||||
A callback function can be added to this list using the add_notification_callback
|
||||
method. Whenever a change in the DataService instance occurs (or in its nested
|
||||
DataService or DataServiceList instances), the emit_notification method is invoked,
|
||||
which in turn calls all the callback functions in _notification_callbacks with the
|
||||
appropriate arguments.
|
||||
|
||||
This implementation follows the observer pattern, with the DataService instance as
|
||||
the "subject" and the callback functions as the "observers".
|
||||
"""
|
||||
_list_mapping: dict[int, DataServiceList] = {}
|
||||
"""
|
||||
A dictionary mapping the id of the original lists to the corresponding
|
||||
DataServiceList instances.
|
||||
This is used to ensure that all references to the same list within the DataService
|
||||
object point to the same DataServiceList, so that any modifications to that list can
|
||||
be tracked consistently. The keys of the dictionary are the ids of the original
|
||||
lists, and the values are the DataServiceList instances that wrap these lists.
|
||||
"""
|
||||
|
||||
def __init__(self, service: DataService) -> None:
|
||||
self.callbacks: set[Callable[[str, Any], None]] = set()
|
||||
self.service = service
|
||||
|
||||
def _register_list_change_callbacks( # noqa: C901
|
||||
self, obj: "AbstractDataService", parent_path: str
|
||||
) -> None:
|
||||
"""
|
||||
This method ensures that notifications are emitted whenever a list attribute of
|
||||
a DataService instance changes. These notifications pertain solely to the list
|
||||
item changes, not to changes in attributes of objects within the list.
|
||||
|
||||
The method works by converting all list attributes (both at the class and
|
||||
instance levels) into DataServiceList objects. Each DataServiceList is then
|
||||
assigned a callback that is triggered whenever an item in the list is updated.
|
||||
The callback emits a notification, but only if the DataService instance was the
|
||||
root instance when the callback was registered.
|
||||
|
||||
This method operates recursively, processing the input object and all nested
|
||||
attributes that are instances of DataService. While navigating the structure,
|
||||
it constructs a path for each attribute that traces back to the root. This path
|
||||
is included in any emitted notifications to facilitate identification of the
|
||||
source of a change.
|
||||
|
||||
Parameters:
|
||||
-----------
|
||||
obj: DataService
|
||||
The target object to be processed. All list attributes (and those of its
|
||||
nested DataService attributes) will be converted into DataServiceList
|
||||
objects.
|
||||
parent_path: str
|
||||
The access path for the parent object. Used to construct the full access
|
||||
path for the notifications.
|
||||
"""
|
||||
|
||||
# Convert all list attributes (both class and instance) to DataServiceList
|
||||
attrs = get_class_and_instance_attributes(obj)
|
||||
|
||||
for attr_name, attr_value in attrs.items():
|
||||
if isinstance(attr_value, AbstractDataService):
|
||||
new_path = f"{parent_path}.{attr_name}"
|
||||
self._register_list_change_callbacks(attr_value, new_path)
|
||||
elif isinstance(attr_value, list):
|
||||
# Create callback for current attr_name
|
||||
# Default arguments solve the late binding problem by capturing the
|
||||
# value at the time the lambda is defined, not when it is called. This
|
||||
# prevents attr_name from being overwritten in the next loop iteration.
|
||||
callback = (
|
||||
lambda index, value, attr_name=attr_name: self.service._callback_manager.emit_notification(
|
||||
parent_path=parent_path,
|
||||
name=f"{attr_name}[{index}]",
|
||||
value=value,
|
||||
)
|
||||
if self.service == self.service.__root__
|
||||
else None
|
||||
)
|
||||
|
||||
# Check if attr_value is already a DataServiceList or in the mapping
|
||||
if isinstance(attr_value, DataServiceList):
|
||||
attr_value.add_callback(callback)
|
||||
continue
|
||||
if id(attr_value) in self._list_mapping:
|
||||
notifying_list = self._list_mapping[id(attr_value)]
|
||||
notifying_list.add_callback(callback)
|
||||
else:
|
||||
notifying_list = DataServiceList(attr_value, callback=[callback])
|
||||
self._list_mapping[id(attr_value)] = notifying_list
|
||||
|
||||
setattr(obj, attr_name, notifying_list)
|
||||
|
||||
# recursively add callbacks to list attributes of DataService instances
|
||||
for i, item in enumerate(attr_value):
|
||||
if isinstance(item, AbstractDataService):
|
||||
new_path = f"{parent_path}.{attr_name}[{i}]"
|
||||
self._register_list_change_callbacks(item, new_path)
|
||||
|
||||
def _register_DataService_instance_callbacks(
|
||||
self, obj: "AbstractDataService", parent_path: str
|
||||
) -> None:
|
||||
"""
|
||||
This function is a key part of the observer pattern implemented by the
|
||||
DataService class.
|
||||
Its purpose is to allow the system to automatically send out notifications
|
||||
whenever an attribute of a DataService instance is updated, which is especially
|
||||
useful when the DataService instance is part of a nested structure.
|
||||
|
||||
It works by recursively registering callbacks for a given DataService instance
|
||||
and all of its nested attributes. Each callback is responsible for emitting a
|
||||
notification when the attribute it is attached to is modified.
|
||||
|
||||
This function ensures that only the root DataService instance (the one directly
|
||||
exposed to the user or another system via rpyc) emits notifications.
|
||||
|
||||
Each notification contains a 'parent_path' that traces the attribute's location
|
||||
within the nested DataService structure, starting from the root. This makes it
|
||||
easier for observers to determine exactly where a change has occurred.
|
||||
|
||||
Parameters:
|
||||
-----------
|
||||
obj: DataService
|
||||
The target object on which callbacks are to be registered.
|
||||
parent_path: str
|
||||
The access path for the parent object. This is used to construct the full
|
||||
access path for the notifications.
|
||||
"""
|
||||
|
||||
# Create and register a callback for the object
|
||||
# only emit the notification when the call was registered by the root object
|
||||
callback: Callable[[str, Any], None] = (
|
||||
lambda name, value: obj._callback_manager.emit_notification(
|
||||
parent_path=parent_path, name=name, value=value
|
||||
)
|
||||
if self.service == obj.__root__
|
||||
and not name.startswith("_") # we are only interested in public attributes
|
||||
and not isinstance(
|
||||
getattr(type(obj), name, None), property
|
||||
) # exlude proerty notifications -> those are handled in separate callbacks
|
||||
else None
|
||||
)
|
||||
|
||||
obj._callback_manager.callbacks.add(callback)
|
||||
|
||||
# Recursively register callbacks for all nested attributes of the object
|
||||
attrs = get_class_and_instance_attributes(obj)
|
||||
|
||||
for nested_attr_name, nested_attr in attrs.items():
|
||||
if isinstance(nested_attr, DataServiceList):
|
||||
self._register_list_callbacks(
|
||||
nested_attr, parent_path, nested_attr_name
|
||||
)
|
||||
elif isinstance(nested_attr, AbstractDataService):
|
||||
self._register_service_callbacks(
|
||||
nested_attr, parent_path, nested_attr_name
|
||||
)
|
||||
|
||||
def _register_list_callbacks(
|
||||
self, nested_attr: list[Any], parent_path: str, attr_name: str
|
||||
) -> None:
|
||||
"""Handles registration of callbacks for list attributes"""
|
||||
for i, list_item in enumerate(nested_attr):
|
||||
if isinstance(list_item, AbstractDataService):
|
||||
self._register_service_callbacks(
|
||||
list_item, parent_path, f"{attr_name}[{i}]"
|
||||
)
|
||||
|
||||
def _register_service_callbacks(
|
||||
self, nested_attr: "AbstractDataService", parent_path: str, attr_name: str
|
||||
) -> None:
|
||||
"""Handles registration of callbacks for DataService attributes"""
|
||||
|
||||
# as the DataService is an attribute of self, change the root object
|
||||
# use the dictionary to not trigger callbacks on initialised objects
|
||||
nested_attr.__dict__["__root__"] = self.service.__root__
|
||||
|
||||
new_path = f"{parent_path}.{attr_name}"
|
||||
self._register_DataService_instance_callbacks(nested_attr, new_path)
|
||||
|
||||
def __register_recursive_parameter_callback(
|
||||
self,
|
||||
obj: "AbstractDataService | DataServiceList",
|
||||
callback: Callable[[str | int, Any], None],
|
||||
) -> None:
|
||||
"""
|
||||
Register callback to a DataService or DataServiceList instance and its nested
|
||||
instances.
|
||||
|
||||
For a DataService, this method traverses its attributes and recursively adds the
|
||||
callback for nested DataService or DataServiceList instances. For a
|
||||
DataServiceList,
|
||||
the callback is also triggered when an item gets reassigned.
|
||||
"""
|
||||
|
||||
if isinstance(obj, DataServiceList):
|
||||
# emits callback when item in list gets reassigned
|
||||
obj.add_callback(callback=callback)
|
||||
obj_list: DataServiceList | list[AbstractDataService] = obj
|
||||
else:
|
||||
obj_list = [obj]
|
||||
|
||||
# this enables notifications when a class instance was changed (-> item is
|
||||
# changed, not reassigned)
|
||||
for item in obj_list:
|
||||
if isinstance(item, AbstractDataService):
|
||||
item._callback_manager.callbacks.add(callback)
|
||||
for attr_name in set(dir(item)) - set(dir(object)) - {"__root__"}:
|
||||
attr_value = getattr(item, attr_name)
|
||||
if isinstance(attr_value, (AbstractDataService, DataServiceList)):
|
||||
self.__register_recursive_parameter_callback(
|
||||
attr_value, callback
|
||||
)
|
||||
|
||||
def _register_property_callbacks( # noqa: C901
|
||||
self,
|
||||
obj: "AbstractDataService",
|
||||
parent_path: str,
|
||||
) -> None:
|
||||
"""
|
||||
Register callbacks to notify when properties or their dependencies change.
|
||||
|
||||
This method cycles through all attributes (both class and instance level) of the
|
||||
input `obj`. For each attribute that is a property, it identifies dependencies
|
||||
used in the getter method and creates a callback for each one.
|
||||
|
||||
The method is recursive for attributes that are of type DataService or
|
||||
DataServiceList. It attaches the callback directly to DataServiceList items or
|
||||
propagates it through nested DataService instances.
|
||||
"""
|
||||
|
||||
attrs = get_class_and_instance_attributes(obj)
|
||||
|
||||
for attr_name, attr_value in attrs.items():
|
||||
if isinstance(attr_value, AbstractDataService):
|
||||
self._register_property_callbacks(
|
||||
attr_value, parent_path=f"{parent_path}.{attr_name}"
|
||||
)
|
||||
elif isinstance(attr_value, DataServiceList):
|
||||
for i, item in enumerate(attr_value):
|
||||
if isinstance(item, AbstractDataService):
|
||||
self._register_property_callbacks(
|
||||
item, parent_path=f"{parent_path}.{attr_name}[{i}]"
|
||||
)
|
||||
if isinstance(attr_value, property):
|
||||
dependencies = attr_value.fget.__code__.co_names # type: ignore
|
||||
source_code_string = inspect.getsource(attr_value.fget) # type: ignore
|
||||
|
||||
for dependency in dependencies:
|
||||
# check if the dependencies are attributes of obj
|
||||
# This doesn't have to be the case like, for example, here:
|
||||
# >>> @property
|
||||
# >>> def power(self) -> float:
|
||||
# >>> return self.class_attr.voltage * self.current
|
||||
#
|
||||
# The dependencies for this property are:
|
||||
# > ('class_attr', 'voltage', 'current')
|
||||
if f"self.{dependency}" not in source_code_string:
|
||||
continue
|
||||
|
||||
# use `obj` instead of `type(obj)` to get DataServiceList
|
||||
# instead of list
|
||||
dependency_value = getattr(obj, dependency)
|
||||
|
||||
if isinstance(
|
||||
dependency_value, (DataServiceList, AbstractDataService)
|
||||
):
|
||||
callback = (
|
||||
lambda name, value, dependent_attr=attr_name: obj._callback_manager.emit_notification(
|
||||
parent_path=parent_path,
|
||||
name=dependent_attr,
|
||||
value=getattr(obj, dependent_attr),
|
||||
)
|
||||
if self.service == obj.__root__
|
||||
else None
|
||||
)
|
||||
|
||||
self.__register_recursive_parameter_callback(
|
||||
dependency_value,
|
||||
callback=callback,
|
||||
)
|
||||
else:
|
||||
callback = (
|
||||
lambda name, _, dep_attr=attr_name, dep=dependency: obj._callback_manager.emit_notification( # type: ignore
|
||||
parent_path=parent_path,
|
||||
name=dep_attr,
|
||||
value=getattr(obj, dep_attr),
|
||||
)
|
||||
if name == dep and self.service == obj.__root__
|
||||
else None
|
||||
)
|
||||
# Add to callbacks
|
||||
obj._callback_manager.callbacks.add(callback)
|
||||
|
||||
def _register_start_stop_task_callbacks(
|
||||
self, obj: "AbstractDataService", parent_path: str
|
||||
) -> None:
|
||||
"""
|
||||
This function registers callbacks for start and stop methods of async functions.
|
||||
These callbacks are stored in the '_task_status_change_callbacks' attribute and
|
||||
are called when the status of a task changes.
|
||||
|
||||
Parameters:
|
||||
-----------
|
||||
obj: AbstractDataService
|
||||
The target object on which callbacks are to be registered.
|
||||
parent_path: str
|
||||
The access path for the parent object. This is used to construct the full
|
||||
access path for the notifications.
|
||||
"""
|
||||
|
||||
# Create and register a callback for the object
|
||||
# only emit the notification when the call was registered by the root object
|
||||
callback: Callable[[str, dict[str, Any] | None], None] = (
|
||||
lambda name, status: obj._callback_manager.emit_notification(
|
||||
parent_path=parent_path, name=name, value=status
|
||||
)
|
||||
if self.service == obj.__root__
|
||||
and not name.startswith("_") # we are only interested in public attributes
|
||||
else None
|
||||
)
|
||||
|
||||
obj._task_manager.task_status_change_callbacks.append(callback)
|
||||
|
||||
# Recursively register callbacks for all nested attributes of the object
|
||||
attrs: dict[str, Any] = get_class_and_instance_attributes(obj)
|
||||
|
||||
for nested_attr_name, nested_attr in attrs.items():
|
||||
if isinstance(nested_attr, AbstractDataService):
|
||||
self._register_start_stop_task_callbacks(
|
||||
nested_attr, parent_path=f"{parent_path}.{nested_attr_name}"
|
||||
)
|
||||
|
||||
def register_callbacks(self) -> None:
|
||||
self._register_list_change_callbacks(
|
||||
self.service, f"{self.service.__class__.__name__}"
|
||||
)
|
||||
self._register_DataService_instance_callbacks(
|
||||
self.service, f"{self.service.__class__.__name__}"
|
||||
)
|
||||
self._register_property_callbacks(
|
||||
self.service, f"{self.service.__class__.__name__}"
|
||||
)
|
||||
self._register_start_stop_task_callbacks(
|
||||
self.service, f"{self.service.__class__.__name__}"
|
||||
)
|
||||
|
||||
def emit_notification(self, parent_path: str, name: str, value: Any) -> None:
|
||||
logger.debug(f"{parent_path}.{name} changed to {value}!")
|
||||
|
||||
for callback in self._notification_callbacks:
|
||||
try:
|
||||
callback(parent_path, name, value)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
|
||||
def add_notification_callback(
|
||||
self, callback: Callable[[str, str, Any], None]
|
||||
) -> None:
|
||||
"""
|
||||
Adds a new notification callback function to the list of callbacks.
|
||||
|
||||
This function is intended to be used for registering a function that will be
|
||||
called whenever a the value of an attribute changes.
|
||||
|
||||
Args:
|
||||
callback (Callable[[str, str, Any], None]): The callback function to
|
||||
register.
|
||||
It should accept three parameters:
|
||||
- parent_path (str): The parent path of the parameter.
|
||||
- name (str): The name of the changed parameter.
|
||||
- value (Any): The value of the parameter.
|
||||
"""
|
||||
self._notification_callbacks.append(callback)
|
||||
@@ -1,32 +1,36 @@
|
||||
import asyncio
|
||||
import inspect
|
||||
import json
|
||||
import os
|
||||
import logging
|
||||
import warnings
|
||||
from enum import Enum
|
||||
from typing import Any, Optional, cast, get_type_hints
|
||||
from typing import TYPE_CHECKING, Any, get_type_hints
|
||||
|
||||
import rpyc
|
||||
from loguru import logger
|
||||
import rpyc # type: ignore[import-untyped]
|
||||
|
||||
import pydase.units as u
|
||||
from pydase.data_service.abstract_data_service import AbstractDataService
|
||||
from pydase.data_service.callback_manager import CallbackManager
|
||||
from pydase.data_service.task_manager import TaskManager
|
||||
from pydase.observer_pattern.observable.observable import (
|
||||
Observable,
|
||||
)
|
||||
from pydase.utils.helpers import (
|
||||
convert_arguments_to_hinted_types,
|
||||
generate_paths_from_DataService_dict,
|
||||
get_class_and_instance_attributes,
|
||||
get_component_class_names,
|
||||
get_nested_value_from_DataService_by_path_and_key,
|
||||
get_object_attr_from_path,
|
||||
get_object_attr_from_path_list,
|
||||
is_property_attribute,
|
||||
parse_list_attr_and_index,
|
||||
update_value_if_changed,
|
||||
)
|
||||
from pydase.utils.warnings import (
|
||||
warn_if_instance_class_does_not_inherit_from_DataService,
|
||||
from pydase.utils.serializer import (
|
||||
Serializer,
|
||||
generate_serialized_data_paths,
|
||||
get_nested_dict_by_path,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pathlib import Path
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def process_callable_attribute(attr: Any, args: dict[str, Any]) -> Any:
|
||||
converted_args_or_error_msg = convert_arguments_to_hinted_types(
|
||||
@@ -40,61 +44,94 @@ def process_callable_attribute(attr: Any, args: dict[str, Any]) -> Any:
|
||||
|
||||
|
||||
class DataService(rpyc.Service, AbstractDataService):
|
||||
def __init__(self, filename: Optional[str] = None) -> None:
|
||||
self._callback_manager: CallbackManager = CallbackManager(self)
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
super().__init__()
|
||||
self._task_manager = TaskManager(self)
|
||||
|
||||
if not hasattr(self, "_autostart_tasks"):
|
||||
self._autostart_tasks = {}
|
||||
|
||||
self.__root__: "DataService" = self
|
||||
"""Keep track of the root object. This helps to filter the emission of
|
||||
notifications. This overwrite the TaksManager's __root__ attribute."""
|
||||
filename = kwargs.pop("filename", None)
|
||||
if filename is not None:
|
||||
warnings.warn(
|
||||
"The 'filename' argument is deprecated and will be removed in a future "
|
||||
"version. Please pass the 'filename' argument to `pydase.Server`.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
self._filename: str | Path = filename
|
||||
|
||||
self._filename: Optional[str] = filename
|
||||
|
||||
self._callback_manager.register_callbacks()
|
||||
self.__check_instance_classes()
|
||||
self._initialised = True
|
||||
self._load_values_from_json()
|
||||
|
||||
def __setattr__(self, __name: str, __value: Any) -> None:
|
||||
# converting attributes that are not properties
|
||||
if not isinstance(getattr(type(self), __name, None), property):
|
||||
current_value = getattr(self, __name, None)
|
||||
# parse ints into floats if current value is a float
|
||||
if isinstance(current_value, float) and isinstance(__value, int):
|
||||
__value = float(__value)
|
||||
# Check and warn for unexpected type changes in attributes
|
||||
self._warn_on_type_change(__name, __value)
|
||||
|
||||
if isinstance(current_value, u.Quantity):
|
||||
__value = u.convert_to_quantity(__value, str(current_value.u))
|
||||
# every class defined by the user should inherit from DataService if it is
|
||||
# assigned to a public attribute
|
||||
if not __name.startswith("_") and not inspect.isfunction(__value):
|
||||
self.__warn_if_not_observable(__value)
|
||||
|
||||
# Set the attribute
|
||||
super().__setattr__(__name, __value)
|
||||
|
||||
if self.__dict__.get("_initialised") and not __name == "_initialised":
|
||||
for callback in self._callback_manager.callbacks:
|
||||
callback(__name, __value)
|
||||
elif __name.startswith(f"_{self.__class__.__name__}__"):
|
||||
def _warn_on_type_change(self, attr_name: str, new_value: Any) -> None:
|
||||
if is_property_attribute(self, attr_name):
|
||||
return
|
||||
|
||||
current_value = getattr(self, attr_name, None)
|
||||
if self._is_unexpected_type_change(current_value, new_value):
|
||||
logger.warning(
|
||||
f"Warning: You should not set private but rather protected attributes! "
|
||||
f"Use {__name.replace(f'_{self.__class__.__name__}__', '_')} instead "
|
||||
f"of {__name.replace(f'_{self.__class__.__name__}__', '__')}."
|
||||
"Type of '%s' changed from '%s' to '%s'. This may have unwanted "
|
||||
"side effects! Consider setting it to '%s' directly.",
|
||||
attr_name,
|
||||
type(current_value).__name__,
|
||||
type(new_value).__name__,
|
||||
type(current_value).__name__,
|
||||
)
|
||||
|
||||
def _is_unexpected_type_change(self, current_value: Any, new_value: Any) -> bool:
|
||||
return (
|
||||
isinstance(current_value, float)
|
||||
and not isinstance(new_value, float)
|
||||
or (
|
||||
isinstance(current_value, u.Quantity)
|
||||
and not isinstance(new_value, u.Quantity)
|
||||
)
|
||||
)
|
||||
|
||||
def __warn_if_not_observable(self, __value: Any) -> None:
|
||||
value_class = __value if inspect.isclass(__value) else __value.__class__
|
||||
|
||||
if not issubclass(
|
||||
value_class,
|
||||
(int | float | bool | str | list | Enum | u.Quantity | Observable),
|
||||
):
|
||||
logger.warning(
|
||||
"Class '%s' does not inherit from DataService. This may lead to"
|
||||
" unexpected behaviour!",
|
||||
value_class.__name__,
|
||||
)
|
||||
|
||||
def __check_instance_classes(self) -> None:
|
||||
for attr_name, attr_value in get_class_and_instance_attributes(self).items():
|
||||
# every class defined by the user should inherit from DataService if it is
|
||||
# assigned to a public attribute
|
||||
if not attr_name.startswith("_"):
|
||||
warn_if_instance_class_does_not_inherit_from_DataService(attr_value)
|
||||
if (
|
||||
not attr_name.startswith("_")
|
||||
and not inspect.isfunction(attr_value)
|
||||
and not isinstance(attr_value, property)
|
||||
):
|
||||
self.__warn_if_not_observable(attr_value)
|
||||
|
||||
def __set_attribute_based_on_type( # noqa:CFQ002
|
||||
def __set_attribute_based_on_type( # noqa: PLR0913
|
||||
self,
|
||||
target_obj: Any,
|
||||
attr_name: str,
|
||||
attr: Any,
|
||||
value: Any,
|
||||
index: Optional[int],
|
||||
index: int | None,
|
||||
path_list: list[str],
|
||||
) -> None:
|
||||
if isinstance(attr, Enum):
|
||||
@@ -129,68 +166,72 @@ class DataService(rpyc.Service, AbstractDataService):
|
||||
# allow all other attributes
|
||||
setattr(self, name, value)
|
||||
|
||||
def _load_values_from_json(self) -> None:
|
||||
if self._filename is not None:
|
||||
# Check if the file specified by the filename exists
|
||||
if os.path.exists(self._filename):
|
||||
with open(self._filename, "r") as f:
|
||||
# Load JSON data from file and update class attributes with these
|
||||
# values
|
||||
self.load_DataService_from_JSON(cast(dict[str, Any], json.load(f)))
|
||||
|
||||
def write_to_file(self) -> None:
|
||||
"""
|
||||
Serialize the DataService instance and write it to a JSON file.
|
||||
|
||||
Args:
|
||||
filename (str): The name of the file to write to.
|
||||
This method is deprecated and will be removed in a future version.
|
||||
Service persistence is handled by `pydase.Server` now, instead.
|
||||
"""
|
||||
if self._filename is not None:
|
||||
with open(self._filename, "w") as f:
|
||||
json.dump(self.serialize(), f, indent=4)
|
||||
else:
|
||||
logger.error(
|
||||
f"Class {self.__class__.__name__} was not initialised with a filename. "
|
||||
'Skipping "write_to_file"...'
|
||||
)
|
||||
|
||||
def load_DataService_from_JSON(self, json_dict: dict[str, Any]) -> None:
|
||||
warnings.warn(
|
||||
"'write_to_file' is deprecated and will be removed in a future version. "
|
||||
"Service persistence is handled by `pydase.Server` now, instead.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
|
||||
if hasattr(self, "_state_manager"):
|
||||
self._state_manager.save_state()
|
||||
|
||||
def load_DataService_from_JSON( # noqa: N802
|
||||
self, json_dict: dict[str, Any]
|
||||
) -> None:
|
||||
warnings.warn(
|
||||
"'load_DataService_from_JSON' is deprecated and will be removed in a "
|
||||
"future version. "
|
||||
"Service persistence is handled by `pydase.Server` now, instead.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
|
||||
# Traverse the serialized representation and set the attributes of the class
|
||||
serialized_class = self.serialize()
|
||||
for path in generate_paths_from_DataService_dict(json_dict):
|
||||
value = get_nested_value_from_DataService_by_path_and_key(
|
||||
json_dict, path=path
|
||||
)
|
||||
value_type = get_nested_value_from_DataService_by_path_and_key(
|
||||
json_dict, path=path, key="type"
|
||||
)
|
||||
class_value_type = get_nested_value_from_DataService_by_path_and_key(
|
||||
serialized_class, path=path, key="type"
|
||||
)
|
||||
for path in generate_serialized_data_paths(json_dict):
|
||||
nested_json_dict = get_nested_dict_by_path(json_dict, path)
|
||||
value = nested_json_dict["value"]
|
||||
value_type = nested_json_dict["type"]
|
||||
|
||||
nested_class_dict = get_nested_dict_by_path(serialized_class, path)
|
||||
class_value_type = nested_class_dict.get("type", None)
|
||||
if class_value_type == value_type:
|
||||
class_attr_is_read_only = (
|
||||
get_nested_value_from_DataService_by_path_and_key(
|
||||
serialized_class, path=path, key="readonly"
|
||||
)
|
||||
)
|
||||
class_attr_is_read_only = nested_class_dict["readonly"]
|
||||
if class_attr_is_read_only:
|
||||
logger.debug(
|
||||
f'Attribute "{path}" is read-only. Ignoring value from JSON '
|
||||
"file..."
|
||||
"Attribute '%s' is read-only. Ignoring value from JSON "
|
||||
"file...",
|
||||
path,
|
||||
)
|
||||
continue
|
||||
# Split the path into parts
|
||||
parts = path.split(".")
|
||||
attr_name = parts[-1]
|
||||
|
||||
# Convert dictionary into Quantity
|
||||
if class_value_type == "Quantity":
|
||||
value = u.convert_to_quantity(value)
|
||||
|
||||
self.update_DataService_attribute(parts[:-1], attr_name, value)
|
||||
else:
|
||||
logger.info(
|
||||
f'Attribute type of "{path}" changed from "{value_type}" to '
|
||||
f'"{class_value_type}". Ignoring value from JSON file...'
|
||||
"Attribute type of '%s' changed from '%s' to "
|
||||
"'%s'. Ignoring value from JSON file...",
|
||||
path,
|
||||
value_type,
|
||||
class_value_type,
|
||||
)
|
||||
|
||||
def serialize(self) -> dict[str, dict[str, Any]]: # noqa
|
||||
def serialize(self) -> dict[str, dict[str, Any]]:
|
||||
"""
|
||||
Serializes the instance into a dictionary, preserving the structure of the
|
||||
instance.
|
||||
@@ -207,147 +248,27 @@ class DataService(rpyc.Service, AbstractDataService):
|
||||
Returns:
|
||||
dict: The serialized instance.
|
||||
"""
|
||||
result: dict[str, dict[str, Any]] = {}
|
||||
return Serializer.serialize_object(self)["value"]
|
||||
|
||||
# Get the dictionary of the base class
|
||||
base_set = set(type(super()).__dict__)
|
||||
# Get the dictionary of the derived class
|
||||
derived_set = set(type(self).__dict__)
|
||||
# Get the difference between the two dictionaries
|
||||
derived_only_set = derived_set - base_set
|
||||
|
||||
instance_dict = set(self.__dict__)
|
||||
# Merge the class and instance dictionaries
|
||||
merged_set = derived_only_set | instance_dict
|
||||
|
||||
def get_attribute_doc(attr: Any) -> Optional[str]:
|
||||
"""This function takes an input attribute attr and returns its documentation
|
||||
string if it's different from the documentation of its type, otherwise,
|
||||
it returns None.
|
||||
"""
|
||||
attr_doc = inspect.getdoc(attr)
|
||||
attr_class_doc = inspect.getdoc(type(attr))
|
||||
if attr_class_doc != attr_doc:
|
||||
return attr_doc
|
||||
else:
|
||||
return None
|
||||
|
||||
# Iterate over attributes, properties, class attributes, and methods
|
||||
for key in sorted(merged_set):
|
||||
if key.startswith("_"):
|
||||
continue # Skip attributes that start with underscore
|
||||
|
||||
# Skip keys that start with "start_" or "stop_" and end with an async method
|
||||
# name
|
||||
if (key.startswith("start_") or key.startswith("stop_")) and key.split(
|
||||
"_", 1
|
||||
)[1] in {
|
||||
name
|
||||
for name, _ in inspect.getmembers(
|
||||
self, predicate=inspect.iscoroutinefunction
|
||||
)
|
||||
}:
|
||||
continue
|
||||
|
||||
# Get the value of the current attribute or method
|
||||
value = getattr(self, key)
|
||||
|
||||
if isinstance(value, DataService):
|
||||
result[key] = {
|
||||
"type": type(value).__name__
|
||||
if type(value).__name__ in get_component_class_names()
|
||||
else "DataService",
|
||||
"value": value.serialize(),
|
||||
"readonly": False,
|
||||
"doc": get_attribute_doc(value),
|
||||
}
|
||||
elif isinstance(value, list):
|
||||
result[key] = {
|
||||
"type": "list",
|
||||
"value": [
|
||||
{
|
||||
"type": type(item).__name__
|
||||
if not isinstance(item, DataService)
|
||||
or type(item).__name__ in get_component_class_names()
|
||||
else "DataService",
|
||||
"value": item.serialize()
|
||||
if isinstance(item, DataService)
|
||||
else item,
|
||||
"readonly": False,
|
||||
"doc": get_attribute_doc(value),
|
||||
}
|
||||
for item in value
|
||||
],
|
||||
"readonly": False,
|
||||
}
|
||||
elif inspect.isfunction(value) or inspect.ismethod(value):
|
||||
sig = inspect.signature(value)
|
||||
|
||||
# Store parameters and their anotations in a dictionary
|
||||
parameters: dict[str, Optional[str]] = {}
|
||||
for k, v in sig.parameters.items():
|
||||
annotation = v.annotation
|
||||
if annotation is not inspect._empty:
|
||||
if isinstance(annotation, type):
|
||||
# Handle regular types
|
||||
parameters[k] = annotation.__name__
|
||||
else:
|
||||
parameters[k] = str(annotation)
|
||||
else:
|
||||
parameters[k] = None
|
||||
running_task_info = None
|
||||
if (
|
||||
key in self._task_manager.tasks
|
||||
): # If there's a running task for this method
|
||||
task_info = self._task_manager.tasks[key]
|
||||
running_task_info = task_info["kwargs"]
|
||||
|
||||
result[key] = {
|
||||
"type": "method",
|
||||
"async": asyncio.iscoroutinefunction(value),
|
||||
"parameters": parameters,
|
||||
"doc": get_attribute_doc(value),
|
||||
"readonly": True,
|
||||
"value": running_task_info,
|
||||
}
|
||||
elif isinstance(value, Enum):
|
||||
result[key] = {
|
||||
"type": "Enum",
|
||||
"value": value.name,
|
||||
"enum": {
|
||||
name: member.value
|
||||
for name, member in value.__class__.__members__.items()
|
||||
},
|
||||
"readonly": False,
|
||||
"doc": get_attribute_doc(value),
|
||||
}
|
||||
else:
|
||||
result[key] = {
|
||||
"type": type(value).__name__,
|
||||
"value": value
|
||||
if not isinstance(value, u.Quantity)
|
||||
else {"magnitude": value.m, "unit": str(value.u)},
|
||||
"readonly": False,
|
||||
"doc": get_attribute_doc(value),
|
||||
}
|
||||
|
||||
if isinstance(getattr(self.__class__, key, None), property):
|
||||
prop: property = getattr(self.__class__, key)
|
||||
result[key]["readonly"] = prop.fset is None
|
||||
result[key]["doc"] = get_attribute_doc(prop)
|
||||
|
||||
return result
|
||||
|
||||
def update_DataService_attribute(
|
||||
def update_DataService_attribute( # noqa: N802
|
||||
self,
|
||||
path_list: list[str],
|
||||
attr_name: str,
|
||||
value: Any,
|
||||
) -> None:
|
||||
warnings.warn(
|
||||
"'update_DataService_attribute' is deprecated and will be removed in a "
|
||||
"future version. "
|
||||
"Service state management is handled by `pydase.data_service.state_manager`"
|
||||
"now, instead.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
|
||||
# If attr_name corresponds to a list entry, extract the attr_name and the index
|
||||
attr_name, index = parse_list_attr_and_index(attr_name)
|
||||
# Traverse the object according to the path parts
|
||||
target_obj = get_object_attr_from_path(self, path_list)
|
||||
target_obj = get_object_attr_from_path_list(self, path_list)
|
||||
|
||||
# If the attribute is a property, change it using the setter without getting the
|
||||
# property value (would otherwise be bad for expensive getter methods)
|
||||
@@ -355,7 +276,7 @@ class DataService(rpyc.Service, AbstractDataService):
|
||||
setattr(target_obj, attr_name, value)
|
||||
return
|
||||
|
||||
attr = get_object_attr_from_path(target_obj, [attr_name])
|
||||
attr = get_object_attr_from_path_list(target_obj, [attr_name])
|
||||
if attr is None:
|
||||
return
|
||||
|
||||
|
||||
39
src/pydase/data_service/data_service_cache.py
Normal file
39
src/pydase/data_service/data_service_cache.py
Normal file
@@ -0,0 +1,39 @@
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from pydase.utils.serializer import (
|
||||
SerializationPathError,
|
||||
SerializationValueError,
|
||||
get_nested_dict_by_path,
|
||||
set_nested_value_by_path,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pydase import DataService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DataServiceCache:
|
||||
def __init__(self, service: "DataService") -> None:
|
||||
self._cache: dict[str, Any] = {}
|
||||
self.service = service
|
||||
self._initialize_cache()
|
||||
|
||||
@property
|
||||
def cache(self) -> dict[str, Any]:
|
||||
return self._cache
|
||||
|
||||
def _initialize_cache(self) -> None:
|
||||
"""Initializes the cache and sets up the callback."""
|
||||
logger.debug("Initializing cache.")
|
||||
self._cache = self.service.serialize()
|
||||
|
||||
def update_cache(self, full_access_path: str, value: Any) -> None:
|
||||
set_nested_value_by_path(self._cache, full_access_path, value)
|
||||
|
||||
def get_value_dict_from_cache(self, full_access_path: str) -> dict[str, Any]:
|
||||
try:
|
||||
return get_nested_dict_by_path(self._cache, full_access_path)
|
||||
except (SerializationPathError, SerializationValueError, KeyError):
|
||||
return {}
|
||||
@@ -1,63 +0,0 @@
|
||||
from collections.abc import Callable
|
||||
from typing import Any
|
||||
|
||||
from pydase.utils.warnings import (
|
||||
warn_if_instance_class_does_not_inherit_from_DataService,
|
||||
)
|
||||
|
||||
|
||||
class DataServiceList(list):
|
||||
"""
|
||||
DataServiceList is a list with additional functionality to trigger callbacks
|
||||
whenever an item is set. This can be used to track changes in the list items.
|
||||
|
||||
The class takes the same arguments as the list superclass during initialization,
|
||||
with an additional optional 'callback' argument that is a list of functions.
|
||||
These callbacks are stored and executed whenever an item in the DataServiceList
|
||||
is set via the __setitem__ method. The callbacks receive the index of the changed
|
||||
item and its new value as arguments.
|
||||
|
||||
The original list that is passed during initialization is kept as a private
|
||||
attribute to prevent it from being garbage collected.
|
||||
|
||||
Additional callbacks can be added after initialization using the `add_callback`
|
||||
method.
|
||||
|
||||
Attributes:
|
||||
callbacks (list):
|
||||
List of callback functions to be executed on item set.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*args: list[Any],
|
||||
callback: list[Callable[[int, Any], None]] | None = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
self.callbacks: list[Callable[[int, Any], None]] = []
|
||||
if isinstance(callback, list):
|
||||
self.callbacks = callback
|
||||
|
||||
for item in args[0]:
|
||||
warn_if_instance_class_does_not_inherit_from_DataService(item)
|
||||
|
||||
# prevent gc to delete the passed list by keeping a reference
|
||||
self._original_list = args[0]
|
||||
|
||||
super().__init__(*args, **kwargs) # type: ignore
|
||||
|
||||
def __setitem__(self, key: int, value: Any) -> None: # type: ignore
|
||||
super().__setitem__(key, value) # type: ignore
|
||||
|
||||
for callback in self.callbacks:
|
||||
callback(key, value)
|
||||
|
||||
def add_callback(self, callback: Callable[[int, Any], None]) -> None:
|
||||
"""
|
||||
Add a new callback function to be executed on item set.
|
||||
|
||||
Args:
|
||||
callback (Callable[[int, Any], None]): Callback function that takes two
|
||||
arguments - index of the changed item and its new value.
|
||||
"""
|
||||
self.callbacks.append(callback)
|
||||
107
src/pydase/data_service/data_service_observer.py
Normal file
107
src/pydase/data_service/data_service_observer.py
Normal file
@@ -0,0 +1,107 @@
|
||||
import logging
|
||||
from collections.abc import Callable
|
||||
from copy import deepcopy
|
||||
from typing import Any
|
||||
|
||||
from pydase.data_service.state_manager import StateManager
|
||||
from pydase.observer_pattern.observable.observable_object import ObservableObject
|
||||
from pydase.observer_pattern.observer.property_observer import (
|
||||
PropertyObserver,
|
||||
)
|
||||
from pydase.utils.helpers import get_object_attr_from_path_list
|
||||
from pydase.utils.serializer import dump
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DataServiceObserver(PropertyObserver):
|
||||
def __init__(self, state_manager: StateManager) -> None:
|
||||
self.state_manager = state_manager
|
||||
self._notification_callbacks: list[
|
||||
Callable[[str, Any, dict[str, Any]], None]
|
||||
] = []
|
||||
super().__init__(state_manager.service)
|
||||
|
||||
def on_change(self, full_access_path: str, value: Any) -> None:
|
||||
cached_value_dict = deepcopy(
|
||||
self.state_manager._data_service_cache.get_value_dict_from_cache(
|
||||
full_access_path
|
||||
)
|
||||
)
|
||||
|
||||
cached_value = cached_value_dict.get("value")
|
||||
if cached_value != dump(value)["value"] and all(
|
||||
part[0] != "_" for part in full_access_path.split(".")
|
||||
):
|
||||
logger.debug("'%s' changed to '%s'", full_access_path, value)
|
||||
|
||||
self._update_cache_value(full_access_path, value, cached_value_dict)
|
||||
|
||||
for callback in self._notification_callbacks:
|
||||
callback(full_access_path, value, cached_value_dict)
|
||||
|
||||
if isinstance(value, ObservableObject):
|
||||
self._update_property_deps_dict()
|
||||
|
||||
self._notify_dependent_property_changes(full_access_path)
|
||||
|
||||
def _update_cache_value(
|
||||
self, full_access_path: str, value: Any, cached_value_dict: dict[str, Any]
|
||||
) -> None:
|
||||
value_dict = dump(value)
|
||||
if cached_value_dict != {}:
|
||||
if (
|
||||
cached_value_dict["type"] != "method"
|
||||
and cached_value_dict["type"] != value_dict["type"]
|
||||
):
|
||||
logger.warning(
|
||||
"Type of '%s' changed from '%s' to '%s'. This could have unwanted "
|
||||
"side effects! Consider setting it to '%s' directly.",
|
||||
full_access_path,
|
||||
cached_value_dict["type"],
|
||||
value_dict["type"],
|
||||
cached_value_dict["type"],
|
||||
)
|
||||
self.state_manager._data_service_cache.update_cache(
|
||||
full_access_path,
|
||||
value,
|
||||
)
|
||||
|
||||
def _notify_dependent_property_changes(self, changed_attr_path: str) -> None:
|
||||
changed_props = self.property_deps_dict.get(changed_attr_path, [])
|
||||
for prop in changed_props:
|
||||
# only notify about changing attribute if it is not currently being
|
||||
# "changed" e.g. when calling the getter of a property within another
|
||||
# property
|
||||
if prop not in self.changing_attributes:
|
||||
self._notify_changed(
|
||||
prop,
|
||||
get_object_attr_from_path_list(self.observable, prop.split(".")),
|
||||
)
|
||||
|
||||
def add_notification_callback(
|
||||
self, callback: Callable[[str, Any, dict[str, Any]], None]
|
||||
) -> None:
|
||||
"""
|
||||
Registers a callback function to be invoked upon attribute changes in the
|
||||
observed object.
|
||||
|
||||
This method allows for the addition of custom callback functions that will be
|
||||
executed whenever there is a change in the value of an observed attribute. The
|
||||
callback function is called with detailed information about the change, enabling
|
||||
external logic to respond to specific state changes within the observable
|
||||
object.
|
||||
|
||||
Args:
|
||||
callback (Callable[[str, Any, dict[str, Any]]): The callback function to be
|
||||
registered. The function should have the following signature:
|
||||
- full_access_path (str): The full dot-notation access path of the
|
||||
changed attribute. This path indicates the location of the changed
|
||||
attribute within the observable object's structure.
|
||||
- value (Any): The new value of the changed attribute.
|
||||
- cached_value_dict (dict[str, Any]): A dictionary representing the
|
||||
cached state of the attribute prior to the change. This can be useful
|
||||
for understanding the nature of the change and for historical
|
||||
comparison.
|
||||
"""
|
||||
self._notification_callbacks.append(callback)
|
||||
273
src/pydase/data_service/state_manager.py
Normal file
273
src/pydase/data_service/state_manager.py
Normal file
@@ -0,0 +1,273 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from collections.abc import Callable
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
|
||||
import pydase.units as u
|
||||
from pydase.data_service.data_service_cache import DataServiceCache
|
||||
from pydase.utils.helpers import (
|
||||
get_object_attr_from_path_list,
|
||||
parse_list_attr_and_index,
|
||||
)
|
||||
from pydase.utils.serializer import (
|
||||
dump,
|
||||
generate_serialized_data_paths,
|
||||
get_nested_dict_by_path,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pydase import DataService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def load_state(func: Callable[..., Any]) -> Callable[..., Any]:
|
||||
"""This function should be used as a decorator on property setters to indicate that
|
||||
the value should be loaded from the JSON file.
|
||||
|
||||
Example:
|
||||
>>> class Service(pydase.DataService):
|
||||
... _name = "Service"
|
||||
...
|
||||
... @property
|
||||
... def name(self) -> str:
|
||||
... return self._name
|
||||
...
|
||||
... @name.setter
|
||||
... @load_state
|
||||
... def name(self, value: str) -> None:
|
||||
... self._name = value
|
||||
"""
|
||||
|
||||
func._load_state = True # type: ignore[attr-defined]
|
||||
return func
|
||||
|
||||
|
||||
def has_load_state_decorator(prop: property) -> bool:
|
||||
"""Determines if the property's setter method is decorated with the `@load_state`
|
||||
decorator.
|
||||
"""
|
||||
|
||||
try:
|
||||
return prop.fset._load_state # type: ignore[union-attr]
|
||||
except AttributeError:
|
||||
return False
|
||||
|
||||
|
||||
class StateManager:
|
||||
"""
|
||||
Manages the state of a DataService instance, serving as both a cache and a
|
||||
persistence layer. It is designed to provide quick access to the latest known state
|
||||
for newly connecting web clients without the need for expensive property accesses
|
||||
that may involve complex calculations or I/O operations.
|
||||
|
||||
The StateManager listens for state change notifications from the DataService's
|
||||
callback manager and updates its cache accordingly. This cache does not always
|
||||
reflect the most current complex property states but rather retains the value from
|
||||
the last known state, optimizing for performance and reducing the load on the
|
||||
system.
|
||||
|
||||
While the StateManager ensures that the cached state is as up-to-date as possible,
|
||||
it does not autonomously update complex properties of the DataService. Such
|
||||
properties must be updated programmatically, for instance, by invoking specific
|
||||
tasks or methods that trigger the necessary operations to refresh their state.
|
||||
|
||||
The cached state maintained by the StateManager is particularly useful for web
|
||||
clients that connect to the system and need immediate access to the current state of
|
||||
the DataService. By avoiding direct and potentially costly property accesses, the
|
||||
StateManager provides a snapshot of the DataService's state that is sufficiently
|
||||
accurate for initial rendering and interaction.
|
||||
|
||||
Attributes:
|
||||
cache (dict[str, Any]):
|
||||
A dictionary cache of the DataService's state.
|
||||
filename (str):
|
||||
The file name used for storing the DataService's state.
|
||||
service (DataService):
|
||||
The DataService instance whose state is being managed.
|
||||
|
||||
Note:
|
||||
The StateManager's cache updates are triggered by notifications and do not
|
||||
include autonomous updates of complex DataService properties, which must be
|
||||
managed programmatically. The cache serves the purpose of providing immediate
|
||||
state information to web clients, reflecting the state after the last property
|
||||
update.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, service: "DataService", filename: str | Path | None = None
|
||||
) -> None:
|
||||
self.filename = getattr(service, "_filename", None)
|
||||
|
||||
if filename is not None:
|
||||
if self.filename is not None:
|
||||
logger.warning(
|
||||
"Overwriting filename '%s' with '%s'.", self.filename, filename
|
||||
)
|
||||
self.filename = filename
|
||||
|
||||
self.service = service
|
||||
self._data_service_cache = DataServiceCache(self.service)
|
||||
|
||||
@property
|
||||
def cache(self) -> dict[str, Any]:
|
||||
"""Returns the cached DataService state."""
|
||||
return self._data_service_cache.cache
|
||||
|
||||
def save_state(self) -> None:
|
||||
"""
|
||||
Saves the DataService's current state to a JSON file defined by `self.filename`.
|
||||
Logs an error if `self.filename` is not set.
|
||||
"""
|
||||
|
||||
if self.filename is not None:
|
||||
with open(self.filename, "w") as f:
|
||||
json.dump(self.cache, f, indent=4)
|
||||
else:
|
||||
logger.info(
|
||||
"State manager was not initialised with a filename. Skipping "
|
||||
"'save_state'..."
|
||||
)
|
||||
|
||||
def load_state(self) -> None:
|
||||
"""
|
||||
Loads the DataService's state from a JSON file defined by `self.filename`.
|
||||
Updates the service's attributes, respecting type and read-only constraints.
|
||||
"""
|
||||
|
||||
# Traverse the serialized representation and set the attributes of the class
|
||||
json_dict = self._get_state_dict_from_json_file()
|
||||
if json_dict == {}:
|
||||
logger.debug("Could not load the service state.")
|
||||
return
|
||||
|
||||
for path in generate_serialized_data_paths(json_dict):
|
||||
nested_json_dict = get_nested_dict_by_path(json_dict, path)
|
||||
nested_class_dict = self._data_service_cache.get_value_dict_from_cache(path)
|
||||
|
||||
value, value_type = nested_json_dict["value"], nested_json_dict["type"]
|
||||
class_attr_value_type = nested_class_dict.get("type", None)
|
||||
|
||||
if class_attr_value_type == value_type:
|
||||
if self.__is_loadable_state_attribute(path):
|
||||
self.set_service_attribute_value_by_path(path, value)
|
||||
else:
|
||||
logger.info(
|
||||
"Attribute type of '%s' changed from '%s' to "
|
||||
"'%s'. Ignoring value from JSON file...",
|
||||
path,
|
||||
value_type,
|
||||
class_attr_value_type,
|
||||
)
|
||||
|
||||
def _get_state_dict_from_json_file(self) -> dict[str, Any]:
|
||||
if self.filename is not None and os.path.exists(self.filename):
|
||||
with open(self.filename) as f:
|
||||
# Load JSON data from file and update class attributes with these
|
||||
# values
|
||||
return cast(dict[str, Any], json.load(f))
|
||||
return {}
|
||||
|
||||
def set_service_attribute_value_by_path(
|
||||
self,
|
||||
path: str,
|
||||
value: Any,
|
||||
) -> None:
|
||||
"""
|
||||
Sets the value of an attribute in the service managed by the `StateManager`
|
||||
given its path as a dot-separated string.
|
||||
|
||||
This method updates the attribute specified by 'path' with 'value' only if the
|
||||
attribute is not read-only and the new value differs from the current one.
|
||||
It also handles type-specific conversions for the new value before setting it.
|
||||
|
||||
Args:
|
||||
path: A dot-separated string indicating the hierarchical path to the
|
||||
attribute.
|
||||
value: The new value to set for the attribute.
|
||||
"""
|
||||
|
||||
current_value_dict = get_nested_dict_by_path(self.cache, path)
|
||||
|
||||
# This will also filter out methods as they are 'read-only'
|
||||
if current_value_dict["readonly"]:
|
||||
logger.debug("Attribute '%s' is read-only. Ignoring new value...", path)
|
||||
return
|
||||
|
||||
converted_value = self.__convert_value_if_needed(value, current_value_dict)
|
||||
|
||||
# only set value when it has changed
|
||||
if self.__attr_value_has_changed(converted_value, current_value_dict["value"]):
|
||||
self.__update_attribute_by_path(path, converted_value)
|
||||
else:
|
||||
logger.debug("Value of attribute '%s' has not changed...", path)
|
||||
|
||||
def __attr_value_has_changed(self, value_object: Any, current_value: Any) -> bool:
|
||||
"""Check if the serialized value of `value_object` differs from `current_value`.
|
||||
|
||||
The method serializes `value_object` to compare it, which is mainly
|
||||
necessary for handling Quantity objects.
|
||||
"""
|
||||
|
||||
return dump(value_object)["value"] != current_value
|
||||
|
||||
def __convert_value_if_needed(
|
||||
self, value: Any, current_value_dict: dict[str, Any]
|
||||
) -> Any:
|
||||
if current_value_dict["type"] == "Quantity":
|
||||
return u.convert_to_quantity(value, current_value_dict["value"]["unit"])
|
||||
if current_value_dict["type"] == "float" and not isinstance(value, float):
|
||||
return float(value)
|
||||
return value
|
||||
|
||||
def __update_attribute_by_path(self, path: str, value: Any) -> None:
|
||||
parent_path_list, attr_name = path.split(".")[:-1], path.split(".")[-1]
|
||||
|
||||
# If attr_name corresponds to a list entry, extract the attr_name and the
|
||||
# index
|
||||
attr_name, index = parse_list_attr_and_index(attr_name)
|
||||
|
||||
# Update path to reflect the attribute without list indices
|
||||
path = ".".join([*parent_path_list, attr_name])
|
||||
|
||||
attr_cache_type = get_nested_dict_by_path(self.cache, path)["type"]
|
||||
|
||||
# Traverse the object according to the path parts
|
||||
target_obj = get_object_attr_from_path_list(self.service, parent_path_list)
|
||||
|
||||
if attr_cache_type in ("ColouredEnum", "Enum"):
|
||||
enum_attr = get_object_attr_from_path_list(target_obj, [attr_name])
|
||||
setattr(target_obj, attr_name, enum_attr.__class__[value])
|
||||
elif attr_cache_type == "list":
|
||||
list_obj = get_object_attr_from_path_list(target_obj, [attr_name])
|
||||
list_obj[index] = value
|
||||
else:
|
||||
setattr(target_obj, attr_name, value)
|
||||
|
||||
def __is_loadable_state_attribute(self, property_path: str) -> bool:
|
||||
"""Checks if an attribute defined by a dot-separated path should be loaded from
|
||||
storage.
|
||||
|
||||
For properties, it verifies the presence of the '@load_state' decorator. Regular
|
||||
attributes default to being loadable.
|
||||
"""
|
||||
|
||||
parent_object = get_object_attr_from_path_list(
|
||||
self.service, property_path.split(".")[:-1]
|
||||
)
|
||||
attr_name = property_path.split(".")[-1]
|
||||
|
||||
prop = getattr(type(parent_object), attr_name, None)
|
||||
|
||||
if isinstance(prop, property):
|
||||
has_decorator = has_load_state_decorator(prop)
|
||||
if not has_decorator:
|
||||
logger.debug(
|
||||
"Property '%s' has no '@load_state' decorator. "
|
||||
"Ignoring value from JSON file...",
|
||||
attr_name,
|
||||
)
|
||||
return has_decorator
|
||||
return True
|
||||
@@ -2,15 +2,20 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import inspect
|
||||
from collections.abc import Callable
|
||||
import logging
|
||||
from functools import wraps
|
||||
from typing import TYPE_CHECKING, Any, TypedDict
|
||||
|
||||
from loguru import logger
|
||||
from pydase.data_service.abstract_data_service import AbstractDataService
|
||||
from pydase.utils.helpers import get_class_and_instance_attributes
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Callable
|
||||
|
||||
from .data_service import DataService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TaskDict(TypedDict):
|
||||
task: asyncio.Task[None]
|
||||
@@ -81,105 +86,18 @@ class TaskManager:
|
||||
its kwargs.
|
||||
"""
|
||||
|
||||
self.task_status_change_callbacks: list[
|
||||
Callable[[str, dict[str, Any] | None], Any]
|
||||
] = []
|
||||
"""A list of callback functions to be invoked when the status of a task (start
|
||||
or stop) changes."""
|
||||
|
||||
self._set_start_and_stop_for_async_methods()
|
||||
|
||||
def _set_start_and_stop_for_async_methods(self) -> None: # noqa: C901
|
||||
def _set_start_and_stop_for_async_methods(self) -> None:
|
||||
# inspect the methods of the class
|
||||
for name, method in inspect.getmembers(
|
||||
self.service, predicate=inspect.iscoroutinefunction
|
||||
):
|
||||
|
||||
@wraps(method)
|
||||
def start_task(*args: Any, **kwargs: Any) -> None:
|
||||
def task_done_callback(task: asyncio.Task, name: str) -> None:
|
||||
"""Handles tasks that have finished.
|
||||
|
||||
Removes a task from the tasks dictionary, calls the defined
|
||||
callbacks, and logs and re-raises exceptions."""
|
||||
|
||||
# removing the finished task from the tasks i
|
||||
self.tasks.pop(name, None)
|
||||
|
||||
# emit the notification that the task was stopped
|
||||
for callback in self.task_status_change_callbacks:
|
||||
callback(name, None)
|
||||
|
||||
exception = task.exception()
|
||||
if exception is not None:
|
||||
# Handle the exception, or you can re-raise it.
|
||||
logger.error(
|
||||
f"Task '{name}' encountered an exception: "
|
||||
f"{type(exception).__name__}: {exception}"
|
||||
)
|
||||
raise exception
|
||||
|
||||
async def task(*args: Any, **kwargs: Any) -> None:
|
||||
try:
|
||||
await method(*args, **kwargs)
|
||||
except asyncio.CancelledError:
|
||||
print(f"Task {name} was cancelled")
|
||||
|
||||
if not self.tasks.get(name):
|
||||
# Get the signature of the coroutine method to start
|
||||
sig = inspect.signature(method)
|
||||
|
||||
# Create a list of the parameter names from the method signature.
|
||||
parameter_names = list(sig.parameters.keys())
|
||||
|
||||
# Extend the list of positional arguments with None values to match
|
||||
# the length of the parameter names list. This is done to ensure
|
||||
# that zip can pair each parameter name with a corresponding value.
|
||||
args_padded = list(args) + [None] * (
|
||||
len(parameter_names) - len(args)
|
||||
)
|
||||
|
||||
# Create a dictionary of keyword arguments by pairing the parameter
|
||||
# names with the values in 'args_padded'. Then merge this dictionary
|
||||
# with the 'kwargs' dictionary. If a parameter is specified in both
|
||||
# 'args_padded' and 'kwargs', the value from 'kwargs' is used.
|
||||
kwargs_updated = {
|
||||
**dict(zip(parameter_names, args_padded)),
|
||||
**kwargs,
|
||||
}
|
||||
|
||||
# creating the task and adding the task_done_callback which checks
|
||||
# if an exception has occured during the task execution
|
||||
task_object = self._loop.create_task(task(*args, **kwargs))
|
||||
task_object.add_done_callback(
|
||||
lambda task: task_done_callback(task, name)
|
||||
)
|
||||
|
||||
# Store the task and its arguments in the '__tasks' dictionary. The
|
||||
# key is the name of the method, and the value is a dictionary
|
||||
# containing the task object and the updated keyword arguments.
|
||||
self.tasks[name] = {
|
||||
"task": task_object,
|
||||
"kwargs": kwargs_updated,
|
||||
}
|
||||
|
||||
# emit the notification that the task was started
|
||||
for callback in self.task_status_change_callbacks:
|
||||
callback(name, kwargs_updated)
|
||||
else:
|
||||
logger.error(f"Task `{name}` is already running!")
|
||||
|
||||
def stop_task() -> None:
|
||||
# cancel the task
|
||||
task = self.tasks.get(name, None)
|
||||
if task is not None:
|
||||
self._loop.call_soon_threadsafe(task["task"].cancel)
|
||||
|
||||
# create start and stop methods for each coroutine
|
||||
setattr(self.service, f"start_{name}", start_task)
|
||||
setattr(self.service, f"stop_{name}", stop_task)
|
||||
setattr(self.service, f"start_{name}", self._make_start_task(name, method))
|
||||
setattr(self.service, f"stop_{name}", self._make_stop_task(name))
|
||||
|
||||
def start_autostart_tasks(self) -> None:
|
||||
def _initiate_task_startup(self) -> None:
|
||||
if self.service._autostart_tasks is not None:
|
||||
for service_name, args in self.service._autostart_tasks.items():
|
||||
start_method = getattr(self.service, f"start_{service_name}", None)
|
||||
@@ -187,5 +105,125 @@ class TaskManager:
|
||||
start_method(*args)
|
||||
else:
|
||||
logger.warning(
|
||||
f"No start method found for service '{service_name}'"
|
||||
"No start method found for service '%s'", service_name
|
||||
)
|
||||
|
||||
def start_autostart_tasks(self) -> None:
|
||||
self._initiate_task_startup()
|
||||
attrs = get_class_and_instance_attributes(self.service)
|
||||
|
||||
for attr_value in attrs.values():
|
||||
if isinstance(attr_value, AbstractDataService):
|
||||
attr_value._task_manager.start_autostart_tasks()
|
||||
elif isinstance(attr_value, list):
|
||||
for item in attr_value:
|
||||
if isinstance(item, AbstractDataService):
|
||||
item._task_manager.start_autostart_tasks()
|
||||
|
||||
def _make_stop_task(self, name: str) -> Callable[..., Any]:
|
||||
"""
|
||||
Factory function to create a 'stop_task' function for a running task.
|
||||
|
||||
The generated function cancels the associated asyncio task using 'name' for
|
||||
identification, ensuring proper cleanup. Avoids closure and late binding issues.
|
||||
|
||||
Args:
|
||||
name (str): The name of the coroutine task, used for its identification.
|
||||
"""
|
||||
|
||||
def stop_task() -> None:
|
||||
# cancel the task
|
||||
task = self.tasks.get(name, None)
|
||||
if task is not None:
|
||||
self._loop.call_soon_threadsafe(task["task"].cancel)
|
||||
|
||||
return stop_task
|
||||
|
||||
def _make_start_task(
|
||||
self, name: str, method: Callable[..., Any]
|
||||
) -> Callable[..., Any]:
|
||||
"""
|
||||
Factory function to create a 'start_task' function for a coroutine.
|
||||
|
||||
The generated function starts the coroutine as an asyncio task, handling
|
||||
registration and monitoring.
|
||||
It uses 'name' and 'method' to avoid the closure and late binding issue.
|
||||
|
||||
Args:
|
||||
name (str): The name of the coroutine, used for task management.
|
||||
method (callable): The coroutine to be turned into an asyncio task.
|
||||
"""
|
||||
|
||||
@wraps(method)
|
||||
def start_task(*args: Any, **kwargs: Any) -> None:
|
||||
def task_done_callback(task: asyncio.Task[None], name: str) -> None:
|
||||
"""Handles tasks that have finished.
|
||||
|
||||
Removes a task from the tasks dictionary, calls the defined
|
||||
callbacks, and logs and re-raises exceptions."""
|
||||
|
||||
# removing the finished task from the tasks i
|
||||
self.tasks.pop(name, None)
|
||||
|
||||
# emit the notification that the task was stopped
|
||||
self.service._notify_changed(name, None)
|
||||
|
||||
exception = task.exception()
|
||||
if exception is not None:
|
||||
# Handle the exception, or you can re-raise it.
|
||||
logger.error(
|
||||
"Task '%s' encountered an exception: %s: %s",
|
||||
name,
|
||||
type(exception).__name__,
|
||||
exception,
|
||||
)
|
||||
raise exception
|
||||
|
||||
async def task(*args: Any, **kwargs: Any) -> None:
|
||||
try:
|
||||
await method(*args, **kwargs)
|
||||
except asyncio.CancelledError:
|
||||
logger.info("Task '%s' was cancelled", name)
|
||||
|
||||
if not self.tasks.get(name):
|
||||
# Get the signature of the coroutine method to start
|
||||
sig = inspect.signature(method)
|
||||
|
||||
# Create a list of the parameter names from the method signature.
|
||||
parameter_names = list(sig.parameters.keys())
|
||||
|
||||
# Extend the list of positional arguments with None values to match
|
||||
# the length of the parameter names list. This is done to ensure
|
||||
# that zip can pair each parameter name with a corresponding value.
|
||||
args_padded = list(args) + [None] * (len(parameter_names) - len(args))
|
||||
|
||||
# Create a dictionary of keyword arguments by pairing the parameter
|
||||
# names with the values in 'args_padded'. Then merge this dictionary
|
||||
# with the 'kwargs' dictionary. If a parameter is specified in both
|
||||
# 'args_padded' and 'kwargs', the value from 'kwargs' is used.
|
||||
kwargs_updated = {
|
||||
**dict(zip(parameter_names, args_padded, strict=True)),
|
||||
**kwargs,
|
||||
}
|
||||
|
||||
# creating the task and adding the task_done_callback which checks
|
||||
# if an exception has occured during the task execution
|
||||
task_object = self._loop.create_task(task(*args, **kwargs))
|
||||
task_object.add_done_callback(
|
||||
lambda task: task_done_callback(task, name)
|
||||
)
|
||||
|
||||
# Store the task and its arguments in the '__tasks' dictionary. The
|
||||
# key is the name of the method, and the value is a dictionary
|
||||
# containing the task object and the updated keyword arguments.
|
||||
self.tasks[name] = {
|
||||
"task": task_object,
|
||||
"kwargs": kwargs_updated,
|
||||
}
|
||||
|
||||
# emit the notification that the task was started
|
||||
self.service._notify_changed(name, kwargs_updated)
|
||||
else:
|
||||
logger.error("Task '%s' is already running!", name)
|
||||
|
||||
return start_task
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
{
|
||||
"files": {
|
||||
"main.css": "/static/css/main.398bc7f8.css",
|
||||
"main.js": "/static/js/main.c348625e.js",
|
||||
"main.css": "/static/css/main.2d8458eb.css",
|
||||
"main.js": "/static/js/main.7f907b0f.js",
|
||||
"index.html": "/index.html",
|
||||
"main.398bc7f8.css.map": "/static/css/main.398bc7f8.css.map",
|
||||
"main.c348625e.js.map": "/static/js/main.c348625e.js.map"
|
||||
"main.2d8458eb.css.map": "/static/css/main.2d8458eb.css.map",
|
||||
"main.7f907b0f.js.map": "/static/js/main.7f907b0f.js.map"
|
||||
},
|
||||
"entrypoints": [
|
||||
"static/css/main.398bc7f8.css",
|
||||
"static/js/main.c348625e.js"
|
||||
"static/css/main.2d8458eb.css",
|
||||
"static/js/main.7f907b0f.js"
|
||||
]
|
||||
}
|
||||
@@ -1 +1 @@
|
||||
<!doctype html><html lang="en"><head><meta charset="utf-8"/><link rel="icon" href="/favicon.ico"/><meta name="viewport" content="width=device-width,initial-scale=1"/><meta name="theme-color" content="#000000"/><meta name="description" content="Web site displaying a pydase UI."/><link rel="apple-touch-icon" href="/logo192.png"/><link rel="manifest" href="/manifest.json"/><title>pydase App</title><script defer="defer" src="/static/js/main.c348625e.js"></script><link href="/static/css/main.398bc7f8.css" rel="stylesheet"></head><body><noscript>You need to enable JavaScript to run this app.</noscript><div id="root"></div></body></html>
|
||||
<!doctype html><html lang="en"><head><meta charset="utf-8"/><link rel="icon" href="/favicon.ico"/><meta name="viewport" content="width=device-width,initial-scale=1"/><meta name="theme-color" content="#000000"/><meta name="description" content="Web site displaying a pydase UI."/><link rel="apple-touch-icon" href="/logo192.png"/><link rel="manifest" href="/manifest.json"/><title>pydase App</title><script defer="defer" src="/static/js/main.7f907b0f.js"></script><link href="/static/css/main.2d8458eb.css" rel="stylesheet"></head><body><noscript>You need to enable JavaScript to run this app.</noscript><div id="root"></div></body></html>
|
||||
6
src/pydase/frontend/static/css/main.2d8458eb.css
Normal file
6
src/pydase/frontend/static/css/main.2d8458eb.css
Normal file
File diff suppressed because one or more lines are too long
1
src/pydase/frontend/static/css/main.2d8458eb.css.map
Normal file
1
src/pydase/frontend/static/css/main.2d8458eb.css.map
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
3
src/pydase/frontend/static/js/main.7f907b0f.js
Normal file
3
src/pydase/frontend/static/js/main.7f907b0f.js
Normal file
File diff suppressed because one or more lines are too long
@@ -4,8 +4,6 @@
|
||||
http://jedwatson.github.io/classnames
|
||||
*/
|
||||
|
||||
/*! regenerator-runtime -- Copyright (c) 2014-present, Facebook, Inc. -- license (MIT): https://github.com/facebook/regenerator/blob/main/LICENSE */
|
||||
|
||||
/**
|
||||
* @license React
|
||||
* react-dom.production.min.js
|
||||
@@ -45,11 +43,3 @@
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
/**
|
||||
* @mui/styled-engine v5.13.2
|
||||
*
|
||||
* @license MIT
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
1
src/pydase/frontend/static/js/main.7f907b0f.js.map
Normal file
1
src/pydase/frontend/static/js/main.7f907b0f.js.map
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
3
src/pydase/observer_pattern/observable/__init__.py
Normal file
3
src/pydase/observer_pattern/observable/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from pydase.observer_pattern.observable.observable import Observable
|
||||
|
||||
__all__ = ["Observable"]
|
||||
71
src/pydase/observer_pattern/observable/observable.py
Normal file
71
src/pydase/observer_pattern/observable/observable.py
Normal file
@@ -0,0 +1,71 @@
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pydase.observer_pattern.observable.observable_object import ObservableObject
|
||||
from pydase.utils.helpers import is_property_attribute
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Observable(ObservableObject):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
class_attrs = {
|
||||
k: type(self).__dict__[k]
|
||||
for k in set(type(self).__dict__)
|
||||
- set(Observable.__dict__)
|
||||
- set(self.__dict__)
|
||||
}
|
||||
for name, value in class_attrs.items():
|
||||
if isinstance(value, property) or callable(value):
|
||||
continue
|
||||
self.__dict__[name] = self._initialise_new_objects(name, value)
|
||||
|
||||
def __setattr__(self, name: str, value: Any) -> None:
|
||||
if not hasattr(self, "_observers") and name != "_observers":
|
||||
logger.warning(
|
||||
"Ensure that super().__init__() is called at the start of the '%s' "
|
||||
"constructor! Failing to do so may lead to unexpected behavior.",
|
||||
type(self).__name__,
|
||||
)
|
||||
self._observers = {}
|
||||
|
||||
value = self._handle_observable_setattr(name, value)
|
||||
|
||||
super().__setattr__(name, value)
|
||||
|
||||
self._notify_changed(name, value)
|
||||
|
||||
def __getattribute__(self, name: str) -> Any:
|
||||
if is_property_attribute(self, name):
|
||||
self._notify_change_start(name)
|
||||
|
||||
value = super().__getattribute__(name)
|
||||
|
||||
if is_property_attribute(self, name):
|
||||
self._notify_changed(name, value)
|
||||
|
||||
return value
|
||||
|
||||
def _handle_observable_setattr(self, name: str, value: Any) -> Any:
|
||||
if name == "_observers":
|
||||
return value
|
||||
|
||||
self._remove_observer_if_observable(name)
|
||||
value = self._initialise_new_objects(name, value)
|
||||
self._notify_change_start(name)
|
||||
return value
|
||||
|
||||
def _remove_observer_if_observable(self, name: str) -> None:
|
||||
if not is_property_attribute(self, name):
|
||||
current_value = getattr(self, name, None)
|
||||
|
||||
if isinstance(current_value, ObservableObject):
|
||||
current_value._remove_observer(self, name)
|
||||
|
||||
def _construct_extended_attr_path(
|
||||
self, observer_attr_name: str, instance_attr_name: str
|
||||
) -> str:
|
||||
if observer_attr_name != "":
|
||||
return f"{observer_attr_name}.{instance_attr_name}"
|
||||
return instance_attr_name
|
||||
263
src/pydase/observer_pattern/observable/observable_object.py
Normal file
263
src/pydase/observer_pattern/observable/observable_object.py
Normal file
@@ -0,0 +1,263 @@
|
||||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from collections.abc import Iterable
|
||||
from typing import TYPE_CHECKING, Any, ClassVar, SupportsIndex
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pydase.observer_pattern.observer.observer import Observer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ObservableObject(ABC):
|
||||
_list_mapping: ClassVar[dict[int, "_ObservableList"]] = {}
|
||||
_dict_mapping: ClassVar[dict[int, "_ObservableDict"]] = {}
|
||||
|
||||
def __init__(self) -> None:
|
||||
if not hasattr(self, "_observers"):
|
||||
self._observers: dict[str, list["ObservableObject | Observer"]] = {}
|
||||
|
||||
def add_observer(
|
||||
self, observer: "ObservableObject | Observer", attr_name: str = ""
|
||||
) -> None:
|
||||
if attr_name not in self._observers:
|
||||
self._observers[attr_name] = []
|
||||
if observer not in self._observers[attr_name]:
|
||||
self._observers[attr_name].append(observer)
|
||||
|
||||
def _remove_observer(self, observer: "ObservableObject", attribute: str) -> None:
|
||||
if attribute in self._observers:
|
||||
self._observers[attribute].remove(observer)
|
||||
|
||||
@abstractmethod
|
||||
def _remove_observer_if_observable(self, name: str) -> None:
|
||||
"""Removes the current object as an observer from an observable attribute.
|
||||
|
||||
This method is called before an attribute of the observable object is
|
||||
changed. If the current value of the attribute is an instance of
|
||||
`ObservableObject`, this method removes the current object from its list
|
||||
of observers. This is a crucial step to avoid unwanted notifications from
|
||||
the old value of the attribute.
|
||||
"""
|
||||
|
||||
def _notify_changed(self, changed_attribute: str, value: Any) -> None:
|
||||
"""Notifies all observers about changes to an attribute.
|
||||
|
||||
This method iterates through all observers registered for the object and
|
||||
invokes their notification method. It is called whenever an attribute of
|
||||
the observable object is changed.
|
||||
|
||||
Args:
|
||||
changed_attribute (str): The name of the changed attribute.
|
||||
value (Any): The value that the attribute was set to.
|
||||
"""
|
||||
for attr_name, observer_list in self._observers.items():
|
||||
for observer in observer_list:
|
||||
extendend_attr_path = self._construct_extended_attr_path(
|
||||
attr_name, changed_attribute
|
||||
)
|
||||
observer._notify_changed(extendend_attr_path, value)
|
||||
|
||||
def _notify_change_start(self, changing_attribute: str) -> None:
|
||||
"""Notify observers that an attribute or item change process has started.
|
||||
|
||||
This method is called at the start of the process of modifying an attribute in
|
||||
the observed `Observable` object. It registers the attribute as currently
|
||||
undergoing a change. This registration helps in managing and tracking changes as
|
||||
they occur, especially in scenarios where the order of changes or their state
|
||||
during the transition is significant.
|
||||
|
||||
Args:
|
||||
changing_attribute (str): The name of the attribute that is starting to
|
||||
change. This is typically the full access path of the attribute in the
|
||||
`Observable`.
|
||||
value (Any): The value that the attribute is being set to.
|
||||
"""
|
||||
|
||||
for attr_name, observer_list in self._observers.items():
|
||||
for observer in observer_list:
|
||||
extended_attr_path = self._construct_extended_attr_path(
|
||||
attr_name, changing_attribute
|
||||
)
|
||||
observer._notify_change_start(extended_attr_path)
|
||||
|
||||
def _initialise_new_objects(self, attr_name_or_key: Any, value: Any) -> Any:
|
||||
new_value = value
|
||||
if isinstance(value, list):
|
||||
if id(value) in self._list_mapping:
|
||||
# If the list `value` was already referenced somewhere else
|
||||
new_value = self._list_mapping[id(value)]
|
||||
else:
|
||||
# convert the builtin list into a ObservableList
|
||||
new_value = _ObservableList(original_list=value)
|
||||
self._list_mapping[id(value)] = new_value
|
||||
elif isinstance(value, dict):
|
||||
if id(value) in self._dict_mapping:
|
||||
# If the list `value` was already referenced somewhere else
|
||||
new_value = self._dict_mapping[id(value)]
|
||||
else:
|
||||
# convert the builtin list into a ObservableList
|
||||
new_value = _ObservableDict(original_dict=value)
|
||||
self._dict_mapping[id(value)] = new_value
|
||||
if isinstance(new_value, ObservableObject):
|
||||
new_value.add_observer(self, str(attr_name_or_key))
|
||||
return new_value
|
||||
|
||||
@abstractmethod
|
||||
def _construct_extended_attr_path(
|
||||
self, observer_attr_name: str, instance_attr_name: str
|
||||
) -> str:
|
||||
"""
|
||||
Constructs the extended attribute path for notification purposes, which is used
|
||||
in the observer pattern to specify the full path of an observed attribute.
|
||||
|
||||
This abstract method is implemented by the classes inheriting from
|
||||
`ObservableObject`.
|
||||
|
||||
Args:
|
||||
observer_attr_name (str): The name of the attribute in the observer that
|
||||
holds a reference to the instance. Equals `""` if observer itself is of type
|
||||
`Observer`.
|
||||
instance_attr_name (str): The name of the attribute within the instance that
|
||||
has changed.
|
||||
|
||||
Returns:
|
||||
str: The constructed extended attribute path.
|
||||
"""
|
||||
|
||||
|
||||
class _ObservableList(ObservableObject, list[Any]):
|
||||
def __init__(
|
||||
self,
|
||||
original_list: list[Any],
|
||||
) -> None:
|
||||
self._original_list = original_list
|
||||
ObservableObject.__init__(self)
|
||||
list.__init__(self, self._original_list)
|
||||
for i, item in enumerate(self._original_list):
|
||||
super().__setitem__(i, self._initialise_new_objects(f"[{i}]", item))
|
||||
|
||||
def __setitem__(self, key: int, value: Any) -> None: # type: ignore[override]
|
||||
if hasattr(self, "_observers"):
|
||||
self._remove_observer_if_observable(f"[{key}]")
|
||||
value = self._initialise_new_objects(f"[{key}]", value)
|
||||
self._notify_change_start(f"[{key}]")
|
||||
|
||||
super().__setitem__(key, value)
|
||||
|
||||
self._notify_changed(f"[{key}]", value)
|
||||
|
||||
def append(self, __object: Any) -> None:
|
||||
self._initialise_new_objects(f"[{len(self)}]", __object)
|
||||
super().append(__object)
|
||||
self._notify_changed("", self)
|
||||
|
||||
def clear(self) -> None:
|
||||
self._remove_self_from_observables()
|
||||
|
||||
super().clear()
|
||||
|
||||
self._notify_changed("", self)
|
||||
|
||||
def extend(self, __iterable: Iterable[Any]) -> None:
|
||||
self._remove_self_from_observables()
|
||||
|
||||
try:
|
||||
super().extend(__iterable)
|
||||
finally:
|
||||
for i, item in enumerate(self):
|
||||
super().__setitem__(i, self._initialise_new_objects(f"[{i}]", item))
|
||||
|
||||
self._notify_changed("", self)
|
||||
|
||||
def insert(self, __index: SupportsIndex, __object: Any) -> None:
|
||||
self._remove_self_from_observables()
|
||||
|
||||
try:
|
||||
super().insert(__index, __object)
|
||||
finally:
|
||||
for i, item in enumerate(self):
|
||||
super().__setitem__(i, self._initialise_new_objects(f"[{i}]", item))
|
||||
|
||||
self._notify_changed("", self)
|
||||
|
||||
def pop(self, __index: SupportsIndex = -1) -> Any:
|
||||
self._remove_self_from_observables()
|
||||
|
||||
try:
|
||||
popped_item = super().pop(__index)
|
||||
finally:
|
||||
for i, item in enumerate(self):
|
||||
super().__setitem__(i, self._initialise_new_objects(f"[{i}]", item))
|
||||
|
||||
self._notify_changed("", self)
|
||||
return popped_item
|
||||
|
||||
def remove(self, __value: Any) -> None:
|
||||
self._remove_self_from_observables()
|
||||
|
||||
try:
|
||||
super().remove(__value)
|
||||
finally:
|
||||
for i, item in enumerate(self):
|
||||
super().__setitem__(i, self._initialise_new_objects(f"[{i}]", item))
|
||||
|
||||
self._notify_changed("", self)
|
||||
|
||||
def _remove_self_from_observables(self) -> None:
|
||||
for i in range(len(self)):
|
||||
self._remove_observer_if_observable(f"[{i}]")
|
||||
|
||||
def _remove_observer_if_observable(self, name: str) -> None:
|
||||
key = int(name[1:-1])
|
||||
current_value = self.__getitem__(key)
|
||||
|
||||
if isinstance(current_value, ObservableObject):
|
||||
current_value._remove_observer(self, name)
|
||||
|
||||
def _construct_extended_attr_path(
|
||||
self, observer_attr_name: str, instance_attr_name: str
|
||||
) -> str:
|
||||
if observer_attr_name != "":
|
||||
return f"{observer_attr_name}{instance_attr_name}"
|
||||
return instance_attr_name
|
||||
|
||||
|
||||
class _ObservableDict(dict[str, Any], ObservableObject):
|
||||
def __init__(
|
||||
self,
|
||||
original_dict: dict[str, Any],
|
||||
) -> None:
|
||||
self._original_dict = original_dict
|
||||
ObservableObject.__init__(self)
|
||||
dict.__init__(self)
|
||||
for key, value in self._original_dict.items():
|
||||
super().__setitem__(key, self._initialise_new_objects(f"['{key}']", value))
|
||||
|
||||
def __setitem__(self, key: str, value: Any) -> None:
|
||||
if not isinstance(key, str):
|
||||
logger.warning("Converting non-string dictionary key %s to string.", key)
|
||||
key = str(key)
|
||||
|
||||
if hasattr(self, "_observers"):
|
||||
self._remove_observer_if_observable(f"['{key}']")
|
||||
value = self._initialise_new_objects(key, value)
|
||||
self._notify_change_start(f"['{key}']")
|
||||
|
||||
super().__setitem__(key, value)
|
||||
|
||||
self._notify_changed(f"['{key}']", value)
|
||||
|
||||
def _remove_observer_if_observable(self, name: str) -> None:
|
||||
key = name[2:-2]
|
||||
current_value = self.get(key, None)
|
||||
|
||||
if isinstance(current_value, ObservableObject):
|
||||
current_value._remove_observer(self, name)
|
||||
|
||||
def _construct_extended_attr_path(
|
||||
self, observer_attr_name: str, instance_attr_name: str
|
||||
) -> str:
|
||||
if observer_attr_name != "":
|
||||
return f"{observer_attr_name}{instance_attr_name}"
|
||||
return instance_attr_name
|
||||
7
src/pydase/observer_pattern/observer/__init__.py
Normal file
7
src/pydase/observer_pattern/observer/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
from pydase.observer_pattern.observer.observer import Observer
|
||||
from pydase.observer_pattern.observer.property_observer import PropertyObserver
|
||||
|
||||
__all__ = [
|
||||
"Observer",
|
||||
"PropertyObserver",
|
||||
]
|
||||
31
src/pydase/observer_pattern/observer/observer.py
Normal file
31
src/pydase/observer_pattern/observer/observer.py
Normal file
@@ -0,0 +1,31 @@
|
||||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Any
|
||||
|
||||
from pydase.observer_pattern.observable import Observable
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Observer(ABC):
|
||||
def __init__(self, observable: Observable) -> None:
|
||||
self.observable = observable
|
||||
self.observable.add_observer(self)
|
||||
self.changing_attributes: list[str] = []
|
||||
|
||||
def _notify_changed(self, changed_attribute: str, value: Any) -> None:
|
||||
if changed_attribute in self.changing_attributes:
|
||||
self.changing_attributes.remove(changed_attribute)
|
||||
|
||||
self.on_change(full_access_path=changed_attribute, value=value)
|
||||
|
||||
def _notify_change_start(self, changing_attribute: str) -> None:
|
||||
self.changing_attributes.append(changing_attribute)
|
||||
self.on_change_start(changing_attribute)
|
||||
|
||||
@abstractmethod
|
||||
def on_change(self, full_access_path: str, value: Any) -> None:
|
||||
...
|
||||
|
||||
def on_change_start(self, full_access_path: str) -> None:
|
||||
return
|
||||
95
src/pydase/observer_pattern/observer/property_observer.py
Normal file
95
src/pydase/observer_pattern/observer/property_observer.py
Normal file
@@ -0,0 +1,95 @@
|
||||
import inspect
|
||||
import logging
|
||||
import re
|
||||
from typing import Any
|
||||
|
||||
from pydase.observer_pattern.observable.observable import Observable
|
||||
from pydase.observer_pattern.observer.observer import Observer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def reverse_dict(original_dict: dict[str, list[str]]) -> dict[str, list[str]]:
|
||||
reversed_dict: dict[str, list[str]] = {
|
||||
value: [] for values in original_dict.values() for value in values
|
||||
}
|
||||
for key, values in original_dict.items():
|
||||
for value in values:
|
||||
reversed_dict[value].append(key)
|
||||
return reversed_dict
|
||||
|
||||
|
||||
def get_property_dependencies(prop: property, prefix: str = "") -> list[str]:
|
||||
source_code_string = inspect.getsource(prop.fget) # type: ignore[arg-type]
|
||||
pattern = r"self\.([^\s\{\}]+)"
|
||||
matches = re.findall(pattern, source_code_string)
|
||||
return [prefix + match for match in matches if "(" not in match]
|
||||
|
||||
|
||||
class PropertyObserver(Observer):
|
||||
def __init__(self, observable: Observable) -> None:
|
||||
super().__init__(observable)
|
||||
self._update_property_deps_dict()
|
||||
|
||||
def _update_property_deps_dict(self) -> None:
|
||||
self.property_deps_dict = reverse_dict(
|
||||
self._get_properties_and_their_dependencies(self.observable)
|
||||
)
|
||||
|
||||
def _get_properties_and_their_dependencies(
|
||||
self, obj: Observable, prefix: str = ""
|
||||
) -> dict[str, list[str]]:
|
||||
deps: dict[str, Any] = {}
|
||||
|
||||
self._process_observable_properties(obj, deps, prefix)
|
||||
self._process_nested_observables_properties(obj, deps, prefix)
|
||||
|
||||
return deps
|
||||
|
||||
def _process_observable_properties(
|
||||
self, obj: Observable, deps: dict[str, Any], prefix: str
|
||||
) -> None:
|
||||
for k, value in vars(type(obj)).items():
|
||||
prefix = (
|
||||
f"{prefix}." if prefix != "" and not prefix.endswith(".") else prefix
|
||||
)
|
||||
key = f"{prefix}{k}"
|
||||
if isinstance(value, property):
|
||||
deps[key] = get_property_dependencies(value, prefix)
|
||||
|
||||
def _process_nested_observables_properties(
|
||||
self, obj: Observable, deps: dict[str, Any], prefix: str
|
||||
) -> None:
|
||||
for k, value in vars(obj).items():
|
||||
prefix = (
|
||||
f"{prefix}." if prefix != "" and not prefix.endswith(".") else prefix
|
||||
)
|
||||
parent_path = f"{prefix}{k}"
|
||||
if isinstance(value, Observable):
|
||||
new_prefix = f"{parent_path}."
|
||||
deps.update(
|
||||
self._get_properties_and_their_dependencies(value, new_prefix)
|
||||
)
|
||||
elif isinstance(value, list | dict):
|
||||
self._process_collection_item_properties(value, deps, parent_path)
|
||||
|
||||
def _process_collection_item_properties(
|
||||
self,
|
||||
collection: list[Any] | dict[str, Any],
|
||||
deps: dict[str, Any],
|
||||
parent_path: str,
|
||||
) -> None:
|
||||
if isinstance(collection, list):
|
||||
for i, item in enumerate(collection):
|
||||
if isinstance(item, Observable):
|
||||
new_prefix = f"{parent_path}[{i}]"
|
||||
deps.update(
|
||||
self._get_properties_and_their_dependencies(item, new_prefix)
|
||||
)
|
||||
elif isinstance(collection, dict):
|
||||
for key, val in collection.items():
|
||||
if isinstance(val, Observable):
|
||||
new_prefix = f"{parent_path}['{key}']"
|
||||
deps.update(
|
||||
self._get_properties_and_their_dependencies(val, new_prefix)
|
||||
)
|
||||
@@ -1,26 +1,26 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
import signal
|
||||
import threading
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from types import FrameType
|
||||
from typing import Any, Optional, Protocol, TypedDict
|
||||
from typing import Any, Protocol, TypedDict
|
||||
|
||||
import uvicorn
|
||||
from loguru import logger
|
||||
from rpyc import (
|
||||
ForkingServer, # can be used for multiprocessing, e.g. a database interface server
|
||||
)
|
||||
from rpyc import ThreadedServer
|
||||
from rpyc import ForkingServer, ThreadedServer # type: ignore[import-untyped]
|
||||
from uvicorn.server import HANDLED_SIGNALS
|
||||
|
||||
import pydase.units as u
|
||||
from pydase import DataService
|
||||
from pydase.version import __version__
|
||||
from pydase.data_service.data_service_observer import DataServiceObserver
|
||||
from pydase.data_service.state_manager import StateManager
|
||||
from pydase.utils.serializer import dump
|
||||
|
||||
from .web_server import WebAPI
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AdditionalServerProtocol(Protocol):
|
||||
"""
|
||||
@@ -45,13 +45,22 @@ class AdditionalServerProtocol(Protocol):
|
||||
The hostname or IP address at which the server will be hosted. This could be a
|
||||
local address (like '127.0.0.1' for localhost) or a public IP address.
|
||||
|
||||
state_manager: StateManager
|
||||
The state manager managing the state cache and persistence of the exposed
|
||||
service.
|
||||
|
||||
**kwargs: Any
|
||||
Any additional parameters required for initializing the server. These parameters
|
||||
are specific to the server's implementation.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, service: DataService, port: int, host: str, **kwargs: Any
|
||||
self,
|
||||
service: DataService,
|
||||
port: int,
|
||||
host: str,
|
||||
state_manager: StateManager,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
...
|
||||
|
||||
@@ -59,7 +68,6 @@ class AdditionalServerProtocol(Protocol):
|
||||
"""Starts the server. This method should be implemented as an asynchronous
|
||||
method, which means that it should be able to run concurrently with other tasks.
|
||||
"""
|
||||
...
|
||||
|
||||
|
||||
class AdditionalServer(TypedDict):
|
||||
@@ -96,11 +104,10 @@ class Server:
|
||||
Whether to enable the RPC server. Default is True.
|
||||
enable_web: bool
|
||||
Whether to enable the web server. Default is True.
|
||||
filename: str | Path | None
|
||||
Filename of the file managing the service state persistence. Defaults to None.
|
||||
use_forking_server: bool
|
||||
Whether to use ForkingServer for multiprocessing (e.g. for a database interface
|
||||
server). Default is False.
|
||||
web_settings: dict[str, Any]
|
||||
Additional settings for the web server. Default is {} (an empty dictionary).
|
||||
Whether to use ForkingServer for multiprocessing. Default is False.
|
||||
additional_servers : list[AdditionalServer]
|
||||
A list of additional servers to run alongside the main server. Each entry in the
|
||||
list should be a dictionary with the following structure:
|
||||
@@ -118,9 +125,15 @@ class Server:
|
||||
|
||||
>>> class MyCustomServer:
|
||||
... def __init__(
|
||||
... self, service: DataService, port: int, host: str, **kwargs: Any
|
||||
... self,
|
||||
... service: DataService,
|
||||
... port: int,
|
||||
... host: str,
|
||||
... state_manager: StateManager,
|
||||
... **kwargs: Any
|
||||
... ):
|
||||
... self.service = service
|
||||
... self.state_manager = state_manager
|
||||
... self.port = port
|
||||
... self.host = host
|
||||
... # handle any additional arguments...
|
||||
@@ -147,7 +160,7 @@ class Server:
|
||||
Additional keyword arguments.
|
||||
"""
|
||||
|
||||
def __init__( # noqa: CFQ002
|
||||
def __init__( # noqa: PLR0913
|
||||
self,
|
||||
service: DataService,
|
||||
host: str = "0.0.0.0",
|
||||
@@ -155,18 +168,19 @@ class Server:
|
||||
web_port: int = 8001,
|
||||
enable_rpc: bool = True,
|
||||
enable_web: bool = True,
|
||||
filename: str | Path | None = None,
|
||||
use_forking_server: bool = False,
|
||||
web_settings: dict[str, Any] = {},
|
||||
additional_servers: list[AdditionalServer] = [],
|
||||
additional_servers: list[AdditionalServer] | None = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
if additional_servers is None:
|
||||
additional_servers = []
|
||||
self._service = service
|
||||
self._host = host
|
||||
self._rpc_port = rpc_port
|
||||
self._web_port = web_port
|
||||
self._enable_rpc = enable_rpc
|
||||
self._enable_web = enable_web
|
||||
self._web_settings = web_settings
|
||||
self._kwargs = kwargs
|
||||
self._loop: asyncio.AbstractEventLoop
|
||||
self._rpc_server_type = ForkingServer if use_forking_server else ThreadedServer
|
||||
@@ -174,17 +188,11 @@ class Server:
|
||||
self.should_exit = False
|
||||
self.servers: dict[str, asyncio.Future[Any]] = {}
|
||||
self.executor: ThreadPoolExecutor | None = None
|
||||
self._info: dict[str, Any] = {
|
||||
"name": self._service.get_service_name(),
|
||||
"version": __version__,
|
||||
"rpc_port": self._rpc_port,
|
||||
"web_port": self._web_port,
|
||||
"enable_rpc": self._enable_rpc,
|
||||
"enable_web": self._enable_web,
|
||||
"web_settings": self._web_settings,
|
||||
"additional_servers": [],
|
||||
**kwargs,
|
||||
}
|
||||
self._state_manager = StateManager(self._service, filename)
|
||||
if getattr(self._service, "_filename", None) is not None:
|
||||
self._service._state_manager = self._state_manager
|
||||
self._state_manager.load_state()
|
||||
self._observer = DataServiceObserver(self._state_manager)
|
||||
|
||||
def run(self) -> None:
|
||||
"""
|
||||
@@ -212,7 +220,7 @@ class Server:
|
||||
async def serve(self) -> None:
|
||||
process_id = os.getpid()
|
||||
|
||||
logger.info(f"Started server process [{process_id}]")
|
||||
logger.info("Started server process [%s]", process_id)
|
||||
|
||||
await self.startup()
|
||||
if self.should_exit:
|
||||
@@ -220,7 +228,7 @@ class Server:
|
||||
await self.main_loop()
|
||||
await self.shutdown()
|
||||
|
||||
logger.info(f"Finished server process [{process_id}]")
|
||||
logger.info("Finished server process [%s]", process_id)
|
||||
|
||||
async def startup(self) -> None: # noqa: C901
|
||||
self._loop = asyncio.get_running_loop()
|
||||
@@ -247,28 +255,20 @@ class Server:
|
||||
self._service,
|
||||
port=server["port"],
|
||||
host=self._host,
|
||||
info=self._info,
|
||||
state_manager=self._state_manager,
|
||||
**server["kwargs"],
|
||||
)
|
||||
|
||||
server_name = (
|
||||
addin_server.__module__ + "." + addin_server.__class__.__name__
|
||||
)
|
||||
self._info["additional_servers"].append(
|
||||
{
|
||||
"name": server_name,
|
||||
"port": server["port"],
|
||||
"host": self._host,
|
||||
**server["kwargs"],
|
||||
}
|
||||
)
|
||||
|
||||
future_or_task = self._loop.create_task(addin_server.serve())
|
||||
self.servers[server_name] = future_or_task
|
||||
if self._enable_web:
|
||||
self._wapi: WebAPI = WebAPI(
|
||||
self._wapi = WebAPI(
|
||||
service=self._service,
|
||||
info=self._info,
|
||||
state_manager=self._state_manager,
|
||||
**self._kwargs,
|
||||
)
|
||||
web_server = uvicorn.Server(
|
||||
@@ -277,39 +277,37 @@ class Server:
|
||||
)
|
||||
)
|
||||
|
||||
def sio_callback(parent_path: str, name: str, value: Any) -> None:
|
||||
# TODO: an error happens when an attribute is set to a list
|
||||
# > File "/usr/lib64/python3.11/json/encoder.py", line 180, in default
|
||||
# > raise TypeError(f'Object of type {o.__class__.__name__} '
|
||||
# > TypeError: Object of type list is not JSON serializable
|
||||
notify_value = value
|
||||
if isinstance(value, Enum):
|
||||
notify_value = value.name
|
||||
if isinstance(value, u.Quantity):
|
||||
notify_value = {"magnitude": value.m, "unit": str(value.u)}
|
||||
def sio_callback(
|
||||
full_access_path: str, value: Any, cached_value_dict: dict[str, Any]
|
||||
) -> None:
|
||||
if cached_value_dict != {}:
|
||||
serialized_value = dump(value)
|
||||
if cached_value_dict["type"] != "method":
|
||||
cached_value_dict["type"] = serialized_value["type"]
|
||||
|
||||
async def notify() -> None:
|
||||
try:
|
||||
await self._wapi.sio.emit( # type: ignore
|
||||
"notify",
|
||||
{
|
||||
"data": {
|
||||
"parent_path": parent_path,
|
||||
"name": name,
|
||||
"value": notify_value,
|
||||
}
|
||||
},
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to send notification: {e}")
|
||||
cached_value_dict["value"] = serialized_value["value"]
|
||||
|
||||
self._loop.create_task(notify())
|
||||
async def notify() -> None:
|
||||
try:
|
||||
await self._wapi.sio.emit(
|
||||
"notify",
|
||||
{
|
||||
"data": {
|
||||
"full_access_path": full_access_path,
|
||||
"value": cached_value_dict,
|
||||
}
|
||||
},
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning("Failed to send notification: %s", e)
|
||||
|
||||
self._service._callback_manager.add_notification_callback(sio_callback)
|
||||
self._loop.create_task(notify())
|
||||
|
||||
self._observer.add_notification_callback(sio_callback)
|
||||
|
||||
# overwrite uvicorn's signal handlers, otherwise it will bogart SIGINT and
|
||||
# SIGTERM, which makes it impossible to escape out of
|
||||
web_server.install_signal_handlers = lambda: None # type: ignore
|
||||
web_server.install_signal_handlers = lambda: None # type: ignore[method-assign]
|
||||
future_or_task = self._loop.create_task(web_server.serve())
|
||||
self.servers["web"] = future_or_task
|
||||
|
||||
@@ -320,9 +318,9 @@ class Server:
|
||||
async def shutdown(self) -> None:
|
||||
logger.info("Shutting down")
|
||||
|
||||
logger.info(f"Saving data to {self._service._filename}.")
|
||||
if self._service._filename is not None:
|
||||
self._service.write_to_file()
|
||||
logger.info("Saving data to %s.", self._state_manager.filename)
|
||||
if self._state_manager is not None:
|
||||
self._state_manager.save_state()
|
||||
|
||||
await self.__cancel_servers()
|
||||
await self.__cancel_tasks()
|
||||
@@ -337,9 +335,9 @@ class Server:
|
||||
try:
|
||||
await task
|
||||
except asyncio.CancelledError:
|
||||
logger.debug(f"Cancelled {server_name} server.")
|
||||
logger.debug("Cancelled '%s' server.", server_name)
|
||||
except Exception as e:
|
||||
logger.warning(f"Unexpected exception: {e}.")
|
||||
logger.warning("Unexpected exception: %s", e)
|
||||
|
||||
async def __cancel_tasks(self) -> None:
|
||||
for task in asyncio.all_tasks(self._loop):
|
||||
@@ -347,29 +345,27 @@ class Server:
|
||||
try:
|
||||
await task
|
||||
except asyncio.CancelledError:
|
||||
logger.debug(f"Cancelled task {task.get_coro()}.")
|
||||
logger.debug("Cancelled task '%s'.", task.get_coro())
|
||||
except Exception as e:
|
||||
logger.warning(f"Unexpected exception: {e}.")
|
||||
logger.exception("Unexpected exception: %s", e)
|
||||
|
||||
def install_signal_handlers(self) -> None:
|
||||
if threading.current_thread() is not threading.main_thread():
|
||||
# Signals can only be listened to from the main thread.
|
||||
return
|
||||
|
||||
try:
|
||||
for sig in HANDLED_SIGNALS:
|
||||
self._loop.add_signal_handler(sig, self.handle_exit, sig, None)
|
||||
except NotImplementedError:
|
||||
# Windows
|
||||
for sig in HANDLED_SIGNALS:
|
||||
signal.signal(sig, self.handle_exit)
|
||||
for sig in HANDLED_SIGNALS:
|
||||
signal.signal(sig, self.handle_exit)
|
||||
|
||||
def handle_exit(self, sig: int = 0, frame: Optional[FrameType] = None) -> None:
|
||||
logger.info("Handling exit")
|
||||
def handle_exit(self, sig: int = 0, frame: FrameType | None = None) -> None:
|
||||
if self.should_exit and sig == signal.SIGINT:
|
||||
self.force_exit = True
|
||||
logger.warning("Received signal '%s', forcing exit...", sig)
|
||||
os._exit(1)
|
||||
else:
|
||||
self.should_exit = True
|
||||
logger.warning(
|
||||
"Received signal '%s', exiting... (CTRL+C to force quit)", sig
|
||||
)
|
||||
|
||||
def custom_exception_handler(
|
||||
self, loop: asyncio.AbstractEventLoop, context: dict[str, Any]
|
||||
@@ -386,7 +382,7 @@ class Server:
|
||||
|
||||
async def emit_exception() -> None:
|
||||
try:
|
||||
await self._wapi.sio.emit( # type: ignore
|
||||
await self._wapi.sio.emit(
|
||||
"exception",
|
||||
{
|
||||
"data": {
|
||||
@@ -396,7 +392,7 @@ class Server:
|
||||
},
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to send notification: {e}")
|
||||
logger.exception("Failed to send notification: %s", e)
|
||||
|
||||
loop.create_task(emit_exception())
|
||||
else:
|
||||
|
||||
@@ -1,15 +1,22 @@
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any, TypedDict
|
||||
|
||||
import socketio
|
||||
import socketio # type: ignore[import-untyped]
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.responses import FileResponse
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from loguru import logger
|
||||
|
||||
from pydase import DataService
|
||||
from pydase.data_service.data_service import process_callable_attribute
|
||||
from pydase.data_service.state_manager import StateManager
|
||||
from pydase.utils.helpers import get_object_attr_from_path_list
|
||||
from pydase.utils.logging import SocketIOHandler
|
||||
from pydase.version import __version__
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class UpdateDict(TypedDict):
|
||||
"""
|
||||
@@ -40,54 +47,87 @@ class UpdateDict(TypedDict):
|
||||
value: Any
|
||||
|
||||
|
||||
class RunMethodDict(TypedDict):
|
||||
"""
|
||||
A TypedDict subclass representing a dictionary used for running methods from the
|
||||
exposed DataService.
|
||||
|
||||
Attributes:
|
||||
name (str): The name of the method to be run.
|
||||
parent_path (str): The access path for the parent object of the method to be
|
||||
run. This is used to construct the full access path for the method. For
|
||||
example, for an method with access path 'attr1.list_attr[0].method_name',
|
||||
'attr1.list_attr[0]' would be the parent_path.
|
||||
kwargs (dict[str, Any]): The arguments passed to the method.
|
||||
"""
|
||||
|
||||
name: str
|
||||
parent_path: str
|
||||
kwargs: dict[str, Any]
|
||||
|
||||
|
||||
class WebAPI:
|
||||
__sio_app: socketio.ASGIApp
|
||||
__fastapi_app: FastAPI
|
||||
|
||||
def __init__( # noqa: CFQ002
|
||||
def __init__( # noqa: PLR0913
|
||||
self,
|
||||
service: DataService,
|
||||
state_manager: StateManager,
|
||||
frontend: str | Path | None = None,
|
||||
css: str | Path | None = None,
|
||||
enable_CORS: bool = True,
|
||||
info: dict[str, Any] = {},
|
||||
enable_cors: bool = True,
|
||||
*args: Any,
|
||||
**kwargs: Any,
|
||||
):
|
||||
) -> None:
|
||||
self.service = service
|
||||
self.state_manager = state_manager
|
||||
self.frontend = frontend
|
||||
self.css = css
|
||||
self.enable_CORS = enable_CORS
|
||||
self.info = info
|
||||
self.enable_cors = enable_cors
|
||||
self.args = args
|
||||
self.kwargs = kwargs
|
||||
|
||||
self.setup_socketio()
|
||||
self.setup_fastapi_app()
|
||||
self.setup_logging_handler()
|
||||
|
||||
def setup_logging_handler(self) -> None:
|
||||
logger = logging.getLogger()
|
||||
logger.addHandler(SocketIOHandler(self.__sio))
|
||||
|
||||
def setup_socketio(self) -> None:
|
||||
# the socketio ASGI app, to notify clients when params update
|
||||
if self.enable_CORS:
|
||||
if self.enable_cors:
|
||||
sio = socketio.AsyncServer(async_mode="asgi", cors_allowed_origins="*")
|
||||
else:
|
||||
sio = socketio.AsyncServer(async_mode="asgi")
|
||||
|
||||
@sio.event # type: ignore
|
||||
def frontend_update(sid: str, data: UpdateDict) -> Any:
|
||||
logger.debug(f"Received frontend update: {data}")
|
||||
path_list, attr_name = data["parent_path"].split("."), data["name"]
|
||||
@sio.event
|
||||
def set_attribute(sid: str, data: UpdateDict) -> Any:
|
||||
logger.debug("Received frontend update: %s", data)
|
||||
path_list = [*data["parent_path"].split("."), data["name"]]
|
||||
path_list.remove("DataService") # always at the start, does not do anything
|
||||
return self.service.update_DataService_attribute(
|
||||
path_list=path_list, attr_name=attr_name, value=data["value"]
|
||||
path = ".".join(path_list)
|
||||
return self.state_manager.set_service_attribute_value_by_path(
|
||||
path=path, value=data["value"]
|
||||
)
|
||||
|
||||
@sio.event
|
||||
def run_method(sid: str, data: RunMethodDict) -> Any:
|
||||
logger.debug("Running method: %s", data)
|
||||
path_list = [*data["parent_path"].split("."), data["name"]]
|
||||
path_list.remove("DataService") # always at the start, does not do anything
|
||||
method = get_object_attr_from_path_list(self.service, path_list)
|
||||
return process_callable_attribute(method, data["kwargs"])
|
||||
|
||||
self.__sio = sio
|
||||
self.__sio_app = socketio.ASGIApp(self.__sio)
|
||||
|
||||
def setup_fastapi_app(self) -> None: # noqa: CFQ004
|
||||
def setup_fastapi_app(self) -> None:
|
||||
app = FastAPI()
|
||||
|
||||
if self.enable_CORS:
|
||||
if self.enable_cors:
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_credentials=True,
|
||||
@@ -97,7 +137,6 @@ class WebAPI:
|
||||
)
|
||||
app.mount("/ws", self.__sio_app)
|
||||
|
||||
# @app.get("/version", include_in_schema=False)
|
||||
@app.get("/version")
|
||||
def version() -> str:
|
||||
return __version__
|
||||
@@ -106,13 +145,16 @@ class WebAPI:
|
||||
def name() -> str:
|
||||
return self.service.get_service_name()
|
||||
|
||||
@app.get("/info")
|
||||
def info() -> dict[str, Any]:
|
||||
return self.info
|
||||
|
||||
@app.get("/service-properties")
|
||||
def service_properties() -> dict[str, Any]:
|
||||
return self.service.serialize()
|
||||
return self.state_manager.cache
|
||||
|
||||
# exposing custom.css file provided by user
|
||||
if self.css is not None:
|
||||
|
||||
@app.get("/custom.css")
|
||||
async def styles() -> FileResponse:
|
||||
return FileResponse(str(self.css))
|
||||
|
||||
app.mount(
|
||||
"/",
|
||||
@@ -124,14 +166,6 @@ class WebAPI:
|
||||
|
||||
self.__fastapi_app = app
|
||||
|
||||
def add_endpoint(self, name: str) -> None:
|
||||
# your endpoint creation code
|
||||
pass
|
||||
|
||||
def get_custom_openapi(self) -> None:
|
||||
# your custom openapi generation code
|
||||
pass
|
||||
|
||||
@property
|
||||
def sio(self) -> socketio.AsyncServer:
|
||||
return self.__sio
|
||||
|
||||
@@ -2,7 +2,7 @@ from typing import TypedDict
|
||||
|
||||
import pint
|
||||
|
||||
units: pint.UnitRegistry = pint.UnitRegistry()
|
||||
units: pint.UnitRegistry = pint.UnitRegistry(autoconvert_offset_to_baseunit=True)
|
||||
units.default_format = "~P" # pretty and short format
|
||||
|
||||
Quantity = pint.Quantity
|
||||
@@ -15,7 +15,7 @@ class QuantityDict(TypedDict):
|
||||
|
||||
|
||||
def convert_to_quantity(
|
||||
value: QuantityDict | float | int | Quantity, unit: str = ""
|
||||
value: QuantityDict | float | Quantity, unit: str = ""
|
||||
) -> Quantity:
|
||||
"""
|
||||
Convert a given value into a pint.Quantity object with the specified unit.
|
||||
@@ -53,4 +53,4 @@ def convert_to_quantity(
|
||||
quantity = float(value["magnitude"]) * Unit(value["unit"])
|
||||
else:
|
||||
quantity = value
|
||||
return quantity # type: ignore
|
||||
return quantity
|
||||
|
||||
@@ -1,10 +1,19 @@
|
||||
import re
|
||||
import inspect
|
||||
import logging
|
||||
from itertools import chain
|
||||
from typing import Any, Optional, cast
|
||||
from typing import Any
|
||||
|
||||
from loguru import logger
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
STANDARD_TYPES = ("int", "float", "bool", "str", "Enum", "NoneType", "Quantity")
|
||||
|
||||
def get_attribute_doc(attr: Any) -> str | None:
|
||||
"""This function takes an input attribute attr and returns its documentation
|
||||
string if it's different from the documentation of its type, otherwise,
|
||||
it returns None.
|
||||
"""
|
||||
attr_doc = inspect.getdoc(attr)
|
||||
attr_class_doc = inspect.getdoc(type(attr))
|
||||
return attr_doc if attr_class_doc != attr_doc else None
|
||||
|
||||
|
||||
def get_class_and_instance_attributes(obj: object) -> dict[str, Any]:
|
||||
@@ -17,12 +26,10 @@ def get_class_and_instance_attributes(obj: object) -> dict[str, Any]:
|
||||
loops.
|
||||
"""
|
||||
|
||||
attrs = dict(chain(type(obj).__dict__.items(), obj.__dict__.items()))
|
||||
attrs.pop("__root__")
|
||||
return attrs
|
||||
return dict(chain(type(obj).__dict__.items(), obj.__dict__.items()))
|
||||
|
||||
|
||||
def get_object_attr_from_path(target_obj: Any, path: list[str]) -> Any:
|
||||
def get_object_attr_from_path_list(target_obj: Any, path: list[str]) -> Any:
|
||||
"""
|
||||
Traverse the object tree according to the given path.
|
||||
|
||||
@@ -50,218 +57,11 @@ def get_object_attr_from_path(target_obj: Any, path: list[str]) -> Any:
|
||||
target_obj = getattr(target_obj, part)
|
||||
except AttributeError:
|
||||
# The attribute doesn't exist
|
||||
logger.debug(f"Attribute {part} does not exist in the object.")
|
||||
logger.debug("Attribute % does not exist in the object.", part)
|
||||
return None
|
||||
return target_obj
|
||||
|
||||
|
||||
def generate_paths_from_DataService_dict(
|
||||
data: dict, parent_path: str = ""
|
||||
) -> list[str]:
|
||||
"""
|
||||
Recursively generate paths from a dictionary representing a DataService object.
|
||||
|
||||
This function traverses through a nested dictionary, which is typically obtained
|
||||
from serializing a DataService object. The function generates a list where each
|
||||
element is a string representing the path to each terminal value in the original
|
||||
dictionary.
|
||||
|
||||
The paths are represented as strings, with dots ('.') denoting nesting levels and
|
||||
square brackets ('[]') denoting list indices.
|
||||
|
||||
Args:
|
||||
data (dict): The input dictionary to generate paths from. This is typically
|
||||
obtained from serializing a DataService object.
|
||||
parent_path (str, optional): The current path up to the current level of
|
||||
recursion. Defaults to ''.
|
||||
|
||||
Returns:
|
||||
list[str]: A list with paths as elements.
|
||||
|
||||
Note:
|
||||
The function ignores keys whose "type" is "method", as these represent methods
|
||||
of the DataService object and not its state.
|
||||
|
||||
Example:
|
||||
-------
|
||||
|
||||
>>> {
|
||||
... "attr1": {"type": "int", "value": 10},
|
||||
... "attr2": {
|
||||
... "type": "list",
|
||||
... "value": [{"type": "int", "value": 1}, {"type": "int", "value": 2}],
|
||||
... },
|
||||
... "add": {
|
||||
... "type": "method",
|
||||
... "async": False,
|
||||
... "parameters": {"a": "float", "b": "int"},
|
||||
... "doc": "Returns the sum of the numbers a and b.",
|
||||
... },
|
||||
... }
|
||||
>>> print(generate_paths_from_DataService_dict(nested_dict))
|
||||
[attr1, attr2[0], attr2[1]]
|
||||
"""
|
||||
|
||||
paths = []
|
||||
for key, value in data.items():
|
||||
if value["type"] == "method":
|
||||
# ignoring methods
|
||||
continue
|
||||
new_path = f"{parent_path}.{key}" if parent_path else key
|
||||
if isinstance(value["value"], dict) and value["type"] != "Quantity":
|
||||
paths.extend(generate_paths_from_DataService_dict(value["value"], new_path)) # type: ignore
|
||||
elif isinstance(value["value"], list):
|
||||
for index, item in enumerate(value["value"]):
|
||||
indexed_key_path = f"{new_path}[{index}]"
|
||||
if isinstance(item["value"], dict):
|
||||
paths.extend( # type: ignore
|
||||
generate_paths_from_DataService_dict(
|
||||
item["value"], indexed_key_path
|
||||
)
|
||||
)
|
||||
else:
|
||||
paths.append(indexed_key_path) # type: ignore
|
||||
else:
|
||||
paths.append(new_path) # type: ignore
|
||||
return paths
|
||||
|
||||
|
||||
def extract_dict_or_list_entry(data: dict[str, Any], key: str) -> dict[str, Any] | None:
|
||||
"""
|
||||
Extract a nested dictionary or list entry based on the provided key.
|
||||
|
||||
Given a dictionary and a key, this function retrieves the corresponding nested
|
||||
dictionary or list entry. If the key includes an index in the format "[<index>]",
|
||||
the function assumes that the corresponding entry in the dictionary is a list, and
|
||||
it will attempt to retrieve the indexed item from that list.
|
||||
|
||||
Args:
|
||||
data (dict): The input dictionary containing nested dictionaries or lists.
|
||||
key (str): The key specifying the desired entry within the dictionary. The key
|
||||
can be a regular dictionary key or can include an index in the format
|
||||
"[<index>]" to retrieve an item from a nested list.
|
||||
|
||||
Returns:
|
||||
dict | None: The nested dictionary or list item found for the given key. If the
|
||||
key is invalid, or if the specified index is out of bounds for a list, it
|
||||
returns None.
|
||||
|
||||
Example:
|
||||
>>> data = {
|
||||
... "attr1": [
|
||||
... {"type": "int", "value": 10}, {"type": "string", "value": "hello"}
|
||||
... ],
|
||||
... "attr2": {
|
||||
... "type": "MyClass",
|
||||
... "value": {"sub_attr": {"type": "float", "value": 20.5}}
|
||||
... }
|
||||
... }
|
||||
|
||||
>>> extract_dict_or_list_entry(data, "attr1[1]")
|
||||
{"type": "string", "value": "hello"}
|
||||
|
||||
>>> extract_dict_or_list_entry(data, "attr2")
|
||||
{"type": "MyClass", "value": {"sub_attr": {"type": "float", "value": 20.5}}}
|
||||
"""
|
||||
|
||||
attr_name = key
|
||||
index: Optional[int] = None
|
||||
|
||||
# Check if the key contains an index part like '[<index>]'
|
||||
if "[" in key and key.endswith("]"):
|
||||
attr_name, index_part = key.split("[", 1)
|
||||
index_part = index_part.rstrip("]") # remove the closing bracket
|
||||
|
||||
# Convert the index part to an integer
|
||||
if index_part.isdigit():
|
||||
index = int(index_part)
|
||||
else:
|
||||
logger.error(f"Invalid index format in key: {key}")
|
||||
|
||||
current_data: dict[str, Any] | list[dict[str, Any]] | None = data.get(
|
||||
attr_name, None
|
||||
)
|
||||
if not isinstance(current_data, dict):
|
||||
# key does not exist in dictionary, e.g. when class does not have this
|
||||
# attribute
|
||||
return None
|
||||
|
||||
if isinstance(current_data["value"], list):
|
||||
current_data = current_data["value"]
|
||||
|
||||
if index is not None and 0 <= index < len(current_data):
|
||||
current_data = current_data[index]
|
||||
else:
|
||||
return None
|
||||
|
||||
# When the attribute is a class instance, the attributes are nested in the
|
||||
# "value" key
|
||||
if current_data["type"] not in STANDARD_TYPES:
|
||||
current_data = cast(dict[str, Any], current_data.get("value", None)) # type: ignore
|
||||
assert isinstance(current_data, dict)
|
||||
|
||||
return current_data
|
||||
|
||||
|
||||
def get_nested_value_from_DataService_by_path_and_key(
|
||||
data: dict[str, Any], path: str, key: str = "value"
|
||||
) -> Any:
|
||||
"""
|
||||
Get the value associated with a specific key from a dictionary given a path.
|
||||
|
||||
This function traverses the dictionary according to the path provided and
|
||||
returns the value associated with the specified key at that path. The path is
|
||||
a string with dots connecting the levels and brackets indicating list indices.
|
||||
|
||||
The function can handle complex dictionaries where data is nested within different
|
||||
types of objects. It checks the type of each object it encounters and correctly
|
||||
descends into the object if it is not a standard type (i.e., int, float, bool, str,
|
||||
Enum).
|
||||
|
||||
Args:
|
||||
data (dict): The input dictionary to get the value from.
|
||||
path (str): The path to the value in the dictionary.
|
||||
key (str, optional): The key associated with the value to be returned.
|
||||
Default is "value".
|
||||
|
||||
Returns:
|
||||
Any: The value associated with the specified key at the given path in the
|
||||
dictionary.
|
||||
|
||||
Examples:
|
||||
Let's consider the following dictionary:
|
||||
|
||||
>>> data = {
|
||||
>>> "attr1": {"type": "int", "value": 10},
|
||||
>>> "attr2": {
|
||||
"type": "MyClass",
|
||||
"value": {"attr3": {"type": "float", "value": 20.5}}
|
||||
}
|
||||
>>> }
|
||||
|
||||
The function can be used to get the value of 'attr1' as follows:
|
||||
>>> get_nested_value_by_path_and_key(data, "attr1")
|
||||
10
|
||||
|
||||
It can also be used to get the value of 'attr3', which is nested within 'attr2',
|
||||
as follows:
|
||||
>>> get_nested_value_by_path_and_key(data, "attr2.attr3", "type")
|
||||
float
|
||||
"""
|
||||
|
||||
# Split the path into parts
|
||||
parts: list[str] = re.split(r"\.", path) # Split by '.'
|
||||
current_data: dict[str, Any] | None = data
|
||||
|
||||
for part in parts:
|
||||
if current_data is None:
|
||||
return
|
||||
current_data = extract_dict_or_list_entry(current_data, part)
|
||||
|
||||
if isinstance(current_data, dict):
|
||||
return current_data.get(key, None)
|
||||
|
||||
|
||||
def convert_arguments_to_hinted_types(
|
||||
args: dict[str, Any], type_hints: dict[str, Any]
|
||||
) -> dict[str, Any] | str:
|
||||
@@ -339,45 +139,41 @@ def update_value_if_changed(
|
||||
if getattr(target, attr_name_or_index) != new_value:
|
||||
setattr(target, attr_name_or_index, new_value)
|
||||
else:
|
||||
logger.error(f"Incompatible arguments: {target}, {attr_name_or_index}.")
|
||||
logger.error("Incompatible arguments: %s, %s.", target, attr_name_or_index)
|
||||
|
||||
|
||||
def parse_list_attr_and_index(attr_string: str) -> tuple[str, Optional[int]]:
|
||||
def parse_list_attr_and_index(attr_string: str) -> tuple[str, int | None]:
|
||||
"""
|
||||
Parses an attribute string and extracts a potential list attribute name and its
|
||||
index.
|
||||
Logs an error if the index is not a valid digit.
|
||||
|
||||
This function examines the provided attribute string. If the string contains square
|
||||
brackets, it assumes that it's a list attribute and the string within brackets is
|
||||
the index of an element. It then returns the attribute name and the index as an
|
||||
integer. If no brackets are present, the function assumes it's a regular attribute
|
||||
and returns the attribute name and None as the index.
|
||||
|
||||
Parameters:
|
||||
-----------
|
||||
attr_string: str
|
||||
The attribute string to parse. Can be a regular attribute name (e.g.
|
||||
'attr_name') or a list attribute with an index (e.g. 'list_attr[2]').
|
||||
Args:
|
||||
attr_string (str):
|
||||
The attribute string to parse. Can be a regular attribute name (e.g.,
|
||||
'attr_name') or a list attribute with an index (e.g., 'list_attr[2]').
|
||||
|
||||
Returns:
|
||||
--------
|
||||
tuple: (str, Optional[int])
|
||||
A tuple containing the attribute name as a string and the index as an integer if
|
||||
present, otherwise None.
|
||||
tuple[str, Optional[int]]:
|
||||
A tuple containing the attribute name as a string and the index as an
|
||||
integer if present, otherwise None.
|
||||
|
||||
Example:
|
||||
--------
|
||||
>>> parse_list_attr_and_index('list_attr[2]')
|
||||
('list_attr', 2)
|
||||
>>> parse_list_attr_and_index('attr_name')
|
||||
('attr_name', None)
|
||||
Examples:
|
||||
>>> parse_attribute_and_index('list_attr[2]')
|
||||
('list_attr', 2)
|
||||
>>> parse_attribute_and_index('attr_name')
|
||||
('attr_name', None)
|
||||
"""
|
||||
|
||||
attr_name = attr_string
|
||||
index = None
|
||||
if "[" in attr_string and "]" in attr_string:
|
||||
attr_name, idx = attr_string[:-1].split("[")
|
||||
index = int(idx)
|
||||
attr_name = attr_string
|
||||
if "[" in attr_string and attr_string.endswith("]"):
|
||||
attr_name, index_part = attr_string.split("[", 1)
|
||||
index_part = index_part.rstrip("]")
|
||||
if index_part.isdigit():
|
||||
index = int(index_part)
|
||||
else:
|
||||
logger.error("Invalid index format in key: %s", attr_name)
|
||||
return attr_name, index
|
||||
|
||||
|
||||
|
||||
@@ -1,82 +1,149 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import sys
|
||||
from types import FrameType
|
||||
from typing import Optional
|
||||
from copy import copy
|
||||
|
||||
import loguru
|
||||
import rpyc
|
||||
import socketio # type: ignore[import-untyped]
|
||||
import uvicorn.logging
|
||||
from uvicorn.config import LOGGING_CONFIG
|
||||
|
||||
import pydase.config
|
||||
|
||||
ALLOWED_LOG_LEVELS = ["DEBUG", "INFO", "ERROR"]
|
||||
|
||||
class DefaultFormatter(uvicorn.logging.ColourizedFormatter):
|
||||
"""
|
||||
A custom log formatter class that:
|
||||
|
||||
* Outputs the LOG_LEVEL with an appropriate color.
|
||||
* If a log call includes an `extras={"color_message": ...}` it will be used
|
||||
for formatting the output, instead of the plain text message.
|
||||
"""
|
||||
|
||||
def formatMessage(self, record: logging.LogRecord) -> str: # noqa: N802
|
||||
recordcopy = copy(record)
|
||||
levelname = recordcopy.levelname
|
||||
seperator = " " * (8 - len(recordcopy.levelname))
|
||||
if self.use_colors:
|
||||
levelname = self.color_level_name(levelname, recordcopy.levelno)
|
||||
if "color_message" in recordcopy.__dict__:
|
||||
recordcopy.msg = recordcopy.__dict__["color_message"]
|
||||
recordcopy.__dict__["message"] = recordcopy.getMessage()
|
||||
recordcopy.__dict__["levelprefix"] = levelname + seperator
|
||||
return logging.Formatter.formatMessage(self, recordcopy)
|
||||
|
||||
def should_use_colors(self) -> bool:
|
||||
return sys.stderr.isatty()
|
||||
|
||||
|
||||
# from: https://github.com/Delgan/loguru section
|
||||
# "Entirely compatible with standard logging"
|
||||
class InterceptHandler(logging.Handler):
|
||||
class SocketIOHandler(logging.Handler):
|
||||
"""
|
||||
Custom logging handler that emits ERROR and CRITICAL log records to a Socket.IO
|
||||
server, allowing for real-time logging in applications that use Socket.IO for
|
||||
communication.
|
||||
"""
|
||||
|
||||
def __init__(self, sio: socketio.AsyncServer) -> None:
|
||||
super().__init__(logging.ERROR)
|
||||
self._sio = sio
|
||||
|
||||
def format(self, record: logging.LogRecord) -> str:
|
||||
return f"{record.name}:{record.funcName}:{record.lineno} - {record.message}"
|
||||
|
||||
def emit(self, record: logging.LogRecord) -> None:
|
||||
# Ignore "asyncio.CancelledError" raised by uvicorn
|
||||
if record.name == "uvicorn.error" and "CancelledError" in record.msg:
|
||||
return
|
||||
log_entry = self.format(record)
|
||||
|
||||
# Get corresponding Loguru level if it exists.
|
||||
level: int | str
|
||||
try:
|
||||
level = loguru.logger.level(record.levelname).name
|
||||
except ValueError:
|
||||
level = record.levelno
|
||||
|
||||
# Find caller from where originated the logged message.
|
||||
frame: Optional[FrameType] = sys._getframe(6)
|
||||
depth = 6
|
||||
while frame and frame.f_code.co_filename == logging.__file__:
|
||||
frame = frame.f_back
|
||||
depth += 1
|
||||
|
||||
try:
|
||||
msg = record.getMessage()
|
||||
except TypeError:
|
||||
# A `TypeError` is raised when the `msg` string expects more arguments
|
||||
# than are provided by `args`. This can happen when intercepting log
|
||||
# messages with a certain format, like
|
||||
# > logger.debug("call: %s%r", method_name, *args) # in tiqi_rpc
|
||||
# where `*args` unpacks a sequence of values that should replace
|
||||
# placeholders in the string.
|
||||
msg = record.msg % (record.args[0], record.args[2:]) # type: ignore
|
||||
|
||||
loguru.logger.opt(depth=depth, exception=record.exc_info).log(level, msg)
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.create_task(
|
||||
self._sio.emit(
|
||||
"log",
|
||||
{
|
||||
"levelname": record.levelname,
|
||||
"message": log_entry,
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def setup_logging(level: Optional[str] = None) -> None:
|
||||
loguru.logger.debug("Configuring service logging.")
|
||||
def setup_logging(level: str | int | None = None) -> None:
|
||||
"""
|
||||
Configures the logging settings for the application.
|
||||
|
||||
This function sets up logging with specific formatting and colorization of log
|
||||
messages. The log level is determined based on the application's operation mode,
|
||||
with an option to override the level. By default, in a development environment, the
|
||||
log level is set to DEBUG, whereas in other environments, it is set to INFO.
|
||||
|
||||
Args:
|
||||
level (Optional[str | int]):
|
||||
A specific log level to set for the application. If None, the log level is
|
||||
determined based on the application's operation mode. Accepts standard log
|
||||
level names ('DEBUG', 'INFO', etc.) and corresponding numerical values.
|
||||
|
||||
Example:
|
||||
|
||||
```python
|
||||
>>> import logging
|
||||
>>> setup_logging(logging.DEBUG)
|
||||
>>> setup_logging("INFO")
|
||||
```
|
||||
"""
|
||||
|
||||
logger = logging.getLogger()
|
||||
|
||||
if pydase.config.OperationMode().environment == "development":
|
||||
log_level = "DEBUG"
|
||||
log_level = logging.DEBUG
|
||||
else:
|
||||
log_level = "INFO"
|
||||
log_level = logging.INFO
|
||||
|
||||
if level is not None and level in ALLOWED_LOG_LEVELS:
|
||||
log_level = level
|
||||
# If a level is specified, check whether it's a string or an integer.
|
||||
if level is not None:
|
||||
if isinstance(level, str):
|
||||
# Convert known log level strings directly to their corresponding logging
|
||||
# module constants.
|
||||
level_name = level.upper() # Ensure level names are uppercase
|
||||
if hasattr(logging, level_name):
|
||||
log_level = getattr(logging, level_name)
|
||||
else:
|
||||
raise ValueError(
|
||||
f"Invalid log level: {level}. Must be one of 'DEBUG', 'INFO', "
|
||||
"'WARNING', 'ERROR', etc."
|
||||
)
|
||||
elif isinstance(level, int):
|
||||
log_level = level # Directly use integer levels
|
||||
else:
|
||||
raise ValueError("Log level must be a string or an integer.")
|
||||
|
||||
loguru.logger.remove()
|
||||
loguru.logger.add(sys.stderr, level=log_level)
|
||||
# Set the logger's level.
|
||||
logger.setLevel(log_level)
|
||||
|
||||
# set up the rpyc logger *before* adding the InterceptHandler to the logging module
|
||||
rpyc.setup_logger(quiet=True) # type: ignore
|
||||
# create console handler and set level to debug
|
||||
ch = logging.StreamHandler()
|
||||
|
||||
logging.basicConfig(handlers=[InterceptHandler()], level=0)
|
||||
# add formatter to ch
|
||||
ch.setFormatter(
|
||||
DefaultFormatter(
|
||||
fmt=(
|
||||
"%(asctime)s.%(msecs)03d | %(levelprefix)s | "
|
||||
"%(name)s:%(funcName)s:%(lineno)d - %(message)s"
|
||||
),
|
||||
datefmt="%Y-%m-%d %H:%M:%S",
|
||||
)
|
||||
)
|
||||
|
||||
# add ch to logger
|
||||
logger.addHandler(ch)
|
||||
|
||||
logger.debug("Configuring service logging.")
|
||||
logging.getLogger("asyncio").setLevel(logging.INFO)
|
||||
logging.getLogger("urllib3").setLevel(logging.INFO)
|
||||
|
||||
# overwriting the uvicorn logging config to use the loguru intercept handler
|
||||
LOGGING_CONFIG["handlers"] = {
|
||||
"default": {
|
||||
"()": InterceptHandler,
|
||||
"formatter": "default",
|
||||
},
|
||||
"access": {
|
||||
"()": InterceptHandler,
|
||||
"formatter": "access",
|
||||
},
|
||||
}
|
||||
# configuring uvicorn logger
|
||||
LOGGING_CONFIG["formatters"]["default"][
|
||||
"fmt"
|
||||
] = "%(asctime)s.%(msecs)03d | %(levelprefix)s %(message)s"
|
||||
LOGGING_CONFIG["formatters"]["default"]["datefmt"] = "%Y-%m-%d %H:%M:%S"
|
||||
LOGGING_CONFIG["formatters"]["access"]["fmt"] = (
|
||||
"%(asctime)s.%(msecs)03d | %(levelprefix)s %(client_addr)s "
|
||||
'- "%(request_line)s" %(status_code)s'
|
||||
)
|
||||
LOGGING_CONFIG["formatters"]["access"]["datefmt"] = "%Y-%m-%d %H:%M:%S"
|
||||
|
||||
378
src/pydase/utils/serializer.py
Normal file
378
src/pydase/utils/serializer.py
Normal file
@@ -0,0 +1,378 @@
|
||||
import inspect
|
||||
import logging
|
||||
from collections.abc import Callable
|
||||
from enum import Enum
|
||||
from typing import Any
|
||||
|
||||
import pydase.units as u
|
||||
from pydase.data_service.abstract_data_service import AbstractDataService
|
||||
from pydase.utils.helpers import (
|
||||
get_attribute_doc,
|
||||
get_component_class_names,
|
||||
parse_list_attr_and_index,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SerializationPathError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class SerializationValueError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class Serializer:
|
||||
@staticmethod
|
||||
def serialize_object(obj: Any) -> dict[str, Any]:
|
||||
result: dict[str, Any] = {}
|
||||
if isinstance(obj, AbstractDataService):
|
||||
result = Serializer._serialize_data_service(obj)
|
||||
|
||||
elif isinstance(obj, list):
|
||||
result = Serializer._serialize_list(obj)
|
||||
|
||||
elif isinstance(obj, dict):
|
||||
result = Serializer._serialize_dict(obj)
|
||||
|
||||
# Special handling for u.Quantity
|
||||
elif isinstance(obj, u.Quantity):
|
||||
result = Serializer._serialize_quantity(obj)
|
||||
|
||||
# Handling for Enums
|
||||
elif isinstance(obj, Enum):
|
||||
result = Serializer._serialize_enum(obj)
|
||||
|
||||
# Methods and coroutines
|
||||
elif inspect.isfunction(obj) or inspect.ismethod(obj):
|
||||
result = Serializer._serialize_method(obj)
|
||||
|
||||
else:
|
||||
obj_type = type(obj).__name__
|
||||
value = obj
|
||||
readonly = False
|
||||
doc = get_attribute_doc(obj)
|
||||
result = {
|
||||
"type": obj_type,
|
||||
"value": value,
|
||||
"readonly": readonly,
|
||||
"doc": doc,
|
||||
}
|
||||
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def _serialize_enum(obj: Enum) -> dict[str, Any]:
|
||||
value = obj.name
|
||||
readonly = False
|
||||
doc = get_attribute_doc(obj)
|
||||
if type(obj).__base__.__name__ == "ColouredEnum":
|
||||
obj_type = "ColouredEnum"
|
||||
else:
|
||||
obj_type = "Enum"
|
||||
|
||||
return {
|
||||
"type": obj_type,
|
||||
"value": value,
|
||||
"readonly": readonly,
|
||||
"doc": doc,
|
||||
"enum": {
|
||||
name: member.value for name, member in obj.__class__.__members__.items()
|
||||
},
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _serialize_quantity(obj: u.Quantity) -> dict[str, Any]:
|
||||
obj_type = "Quantity"
|
||||
readonly = False
|
||||
doc = get_attribute_doc(obj)
|
||||
value = {"magnitude": obj.m, "unit": str(obj.u)}
|
||||
return {
|
||||
"type": obj_type,
|
||||
"value": value,
|
||||
"readonly": readonly,
|
||||
"doc": doc,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _serialize_dict(obj: dict[str, Any]) -> dict[str, Any]:
|
||||
obj_type = "dict"
|
||||
readonly = False
|
||||
doc = get_attribute_doc(obj)
|
||||
value = {key: Serializer.serialize_object(val) for key, val in obj.items()}
|
||||
return {
|
||||
"type": obj_type,
|
||||
"value": value,
|
||||
"readonly": readonly,
|
||||
"doc": doc,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _serialize_list(obj: list[Any]) -> dict[str, Any]:
|
||||
obj_type = "list"
|
||||
readonly = False
|
||||
doc = get_attribute_doc(obj)
|
||||
value = [Serializer.serialize_object(o) for o in obj]
|
||||
return {
|
||||
"type": obj_type,
|
||||
"value": value,
|
||||
"readonly": readonly,
|
||||
"doc": doc,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _serialize_method(obj: Callable[..., Any]) -> dict[str, Any]:
|
||||
obj_type = "method"
|
||||
value = None
|
||||
readonly = True
|
||||
doc = get_attribute_doc(obj)
|
||||
|
||||
# Store parameters and their anotations in a dictionary
|
||||
sig = inspect.signature(obj)
|
||||
parameters: dict[str, str | None] = {}
|
||||
|
||||
for k, v in sig.parameters.items():
|
||||
annotation = v.annotation
|
||||
if annotation is not inspect._empty:
|
||||
if isinstance(annotation, type):
|
||||
# Handle regular types
|
||||
parameters[k] = annotation.__name__
|
||||
else:
|
||||
# Union, string annotation, Literal types, ...
|
||||
parameters[k] = str(annotation)
|
||||
else:
|
||||
parameters[k] = None
|
||||
|
||||
return {
|
||||
"type": obj_type,
|
||||
"value": value,
|
||||
"readonly": readonly,
|
||||
"doc": doc,
|
||||
"async": inspect.iscoroutinefunction(obj),
|
||||
"parameters": parameters,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _serialize_data_service(obj: AbstractDataService) -> dict[str, Any]:
|
||||
readonly = False
|
||||
doc = get_attribute_doc(obj)
|
||||
obj_type = type(obj).__name__
|
||||
if type(obj).__name__ not in get_component_class_names():
|
||||
obj_type = "DataService"
|
||||
|
||||
# Get the dictionary of the base class
|
||||
base_set = set(type(obj).__base__.__dict__)
|
||||
# Get the dictionary of the derived class
|
||||
derived_set = set(type(obj).__dict__)
|
||||
# Get the difference between the two dictionaries
|
||||
derived_only_set = derived_set - base_set
|
||||
|
||||
instance_dict = set(obj.__dict__)
|
||||
# Merge the class and instance dictionaries
|
||||
merged_set = derived_only_set | instance_dict
|
||||
value = {}
|
||||
|
||||
# Iterate over attributes, properties, class attributes, and methods
|
||||
for key in sorted(merged_set):
|
||||
if key.startswith("_"):
|
||||
continue # Skip attributes that start with underscore
|
||||
|
||||
# Skip keys that start with "start_" or "stop_" and end with an async
|
||||
# method name
|
||||
if key.startswith(("start_", "stop_")) and key.split("_", 1)[1] in {
|
||||
name
|
||||
for name, _ in inspect.getmembers(
|
||||
obj, predicate=inspect.iscoroutinefunction
|
||||
)
|
||||
}:
|
||||
continue
|
||||
|
||||
val = getattr(obj, key)
|
||||
|
||||
value[key] = Serializer.serialize_object(val)
|
||||
|
||||
# If there's a running task for this method
|
||||
if key in obj._task_manager.tasks:
|
||||
task_info = obj._task_manager.tasks[key]
|
||||
value[key]["value"] = task_info["kwargs"]
|
||||
|
||||
# If the DataService attribute is a property
|
||||
if isinstance(getattr(obj.__class__, key, None), property):
|
||||
prop: property = getattr(obj.__class__, key)
|
||||
value[key]["readonly"] = prop.fset is None
|
||||
value[key]["doc"] = get_attribute_doc(prop) # overwrite the doc
|
||||
|
||||
return {
|
||||
"type": obj_type,
|
||||
"value": value,
|
||||
"readonly": readonly,
|
||||
"doc": doc,
|
||||
}
|
||||
|
||||
|
||||
def dump(obj: Any) -> dict[str, Any]:
|
||||
return Serializer.serialize_object(obj)
|
||||
|
||||
|
||||
def set_nested_value_by_path(
|
||||
serialization_dict: dict[str, Any], path: str, value: Any
|
||||
) -> None:
|
||||
"""
|
||||
Set a value in a nested dictionary structure, which conforms to the serialization
|
||||
format used by `pydase.utils.serializer.Serializer`, using a dot-notation path.
|
||||
|
||||
Args:
|
||||
serialization_dict:
|
||||
The base dictionary representing data serialized with
|
||||
`pydase.utils.serializer.Serializer`.
|
||||
path:
|
||||
The dot-notation path (e.g., 'attr1.attr2[0].attr3') indicating where to
|
||||
set the value.
|
||||
value:
|
||||
The new value to set at the specified path.
|
||||
|
||||
Note:
|
||||
- If the index equals the length of the list, the function will append the
|
||||
serialized representation of the 'value' to the list.
|
||||
"""
|
||||
|
||||
parent_path_parts, attr_name = path.split(".")[:-1], path.split(".")[-1]
|
||||
current_dict: dict[str, Any] = serialization_dict
|
||||
|
||||
try:
|
||||
for path_part in parent_path_parts:
|
||||
current_dict = get_next_level_dict_by_key(
|
||||
current_dict, path_part, allow_append=False
|
||||
)
|
||||
current_dict = current_dict["value"]
|
||||
|
||||
current_dict = get_next_level_dict_by_key(
|
||||
current_dict, attr_name, allow_append=True
|
||||
)
|
||||
except (SerializationPathError, SerializationValueError, KeyError) as e:
|
||||
logger.error(e)
|
||||
return
|
||||
|
||||
# setting the new value
|
||||
serialized_value = dump(value)
|
||||
if "readonly" in current_dict:
|
||||
if current_dict["type"] != "method":
|
||||
current_dict["type"] = serialized_value["type"]
|
||||
current_dict["value"] = serialized_value["value"]
|
||||
else:
|
||||
current_dict.update(serialized_value)
|
||||
|
||||
|
||||
def get_nested_dict_by_path(
|
||||
serialization_dict: dict[str, Any],
|
||||
path: str,
|
||||
) -> dict[str, Any]:
|
||||
parent_path_parts, attr_name = path.split(".")[:-1], path.split(".")[-1]
|
||||
current_dict: dict[str, Any] = serialization_dict
|
||||
|
||||
for path_part in parent_path_parts:
|
||||
current_dict = get_next_level_dict_by_key(
|
||||
current_dict, path_part, allow_append=False
|
||||
)
|
||||
current_dict = current_dict["value"]
|
||||
return get_next_level_dict_by_key(current_dict, attr_name, allow_append=False)
|
||||
|
||||
|
||||
def get_next_level_dict_by_key(
|
||||
serialization_dict: dict[str, Any],
|
||||
attr_name: str,
|
||||
*,
|
||||
allow_append: bool = False,
|
||||
) -> dict[str, Any]:
|
||||
"""
|
||||
Retrieve a nested dictionary entry or list item from a data structure serialized
|
||||
with `pydase.utils.serializer.Serializer`.
|
||||
|
||||
Args:
|
||||
serialization_dict: The base dictionary representing serialized data.
|
||||
attr_name: The key name representing the attribute in the dictionary,
|
||||
e.g. 'list_attr[0]' or 'attr'
|
||||
allow_append: Flag to allow appending a new entry if `index` is out of range by
|
||||
one.
|
||||
|
||||
Returns:
|
||||
The dictionary or list item corresponding to the attribute and index.
|
||||
|
||||
Raises:
|
||||
SerializationPathError: If the path composed of `attr_name` and `index` is
|
||||
invalid or leads to an IndexError or KeyError.
|
||||
SerializationValueError: If the expected nested structure is not a dictionary.
|
||||
"""
|
||||
# Check if the key contains an index part like 'attr_name[<index>]'
|
||||
attr_name, index = parse_list_attr_and_index(attr_name)
|
||||
|
||||
try:
|
||||
if index is not None:
|
||||
serialization_dict = serialization_dict[attr_name]["value"][index]
|
||||
else:
|
||||
serialization_dict = serialization_dict[attr_name]
|
||||
except IndexError as e:
|
||||
if allow_append and index == len(serialization_dict[attr_name]["value"]):
|
||||
# Appending to list
|
||||
serialization_dict[attr_name]["value"].append({})
|
||||
serialization_dict = serialization_dict[attr_name]["value"][index]
|
||||
else:
|
||||
raise SerializationPathError(
|
||||
f"Error occured trying to change '{attr_name}[{index}]': {e}"
|
||||
)
|
||||
except KeyError:
|
||||
raise SerializationPathError(
|
||||
f"Error occured trying to access the key '{attr_name}': it is either "
|
||||
"not present in the current dictionary or its value does not contain "
|
||||
"a 'value' key."
|
||||
)
|
||||
|
||||
if not isinstance(serialization_dict, dict):
|
||||
raise SerializationValueError(
|
||||
f"Expected a dictionary at '{attr_name}', but found type "
|
||||
f"'{type(serialization_dict).__name__}' instead."
|
||||
)
|
||||
|
||||
return serialization_dict
|
||||
|
||||
|
||||
def generate_serialized_data_paths(
|
||||
data: dict[str, Any], parent_path: str = ""
|
||||
) -> list[str]:
|
||||
"""
|
||||
Generate a list of access paths for all attributes in a dictionary representing
|
||||
data serialized with `pydase.utils.serializer.Serializer`, excluding those that are
|
||||
methods.
|
||||
|
||||
Args:
|
||||
data: The dictionary representing serialized data, typically produced by
|
||||
`pydase.utils.serializer.Serializer`.
|
||||
parent_path: The base path to prepend to the keys in the `data` dictionary to
|
||||
form the access paths. Defaults to an empty string.
|
||||
|
||||
Returns:
|
||||
A list of strings where each string is a dot-notation access path to an
|
||||
attribute in the serialized data.
|
||||
"""
|
||||
|
||||
paths: list[str] = []
|
||||
for key, value in data.items():
|
||||
if value["type"] == "method":
|
||||
# ignoring methods
|
||||
continue
|
||||
new_path = f"{parent_path}.{key}" if parent_path else key
|
||||
if isinstance(value["value"], dict) and value["type"] != "Quantity":
|
||||
paths.extend(generate_serialized_data_paths(value["value"], new_path))
|
||||
elif isinstance(value["value"], list):
|
||||
for index, item in enumerate(value["value"]):
|
||||
indexed_key_path = f"{new_path}[{index}]"
|
||||
if isinstance(item["value"], dict):
|
||||
paths.extend(
|
||||
generate_serialized_data_paths(item["value"], indexed_key_path)
|
||||
)
|
||||
else:
|
||||
paths.append(indexed_key_path)
|
||||
else:
|
||||
paths.append(new_path)
|
||||
return paths
|
||||
@@ -1,21 +0,0 @@
|
||||
from loguru import logger
|
||||
|
||||
|
||||
def warn_if_instance_class_does_not_inherit_from_DataService(__value: object) -> None:
|
||||
base_class_name = __value.__class__.__base__.__name__
|
||||
module_name = __value.__class__.__module__
|
||||
|
||||
if (
|
||||
module_name
|
||||
not in [
|
||||
"builtins",
|
||||
"__builtin__",
|
||||
"asyncio.unix_events",
|
||||
"_abc",
|
||||
]
|
||||
and base_class_name not in ["DataService", "list", "Enum"]
|
||||
and type(__value).__name__ not in ["CallbackManager", "TaskManager", "Quantity"]
|
||||
):
|
||||
logger.warning(
|
||||
f"Warning: Class {type(__value).__name__} does not inherit from DataService."
|
||||
)
|
||||
@@ -1,4 +1,4 @@
|
||||
from importlib.metadata import distribution
|
||||
|
||||
__version__ = distribution("pydase").version
|
||||
__major__, __minor__, __patch__ = [int(v) for v in __version__.split(".")]
|
||||
__major__, __minor__, __patch__ = (int(v) for v in __version__.split("."))
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
from collections.abc import Generator
|
||||
from typing import Any
|
||||
|
||||
import pytest
|
||||
from loguru import logger
|
||||
from pytest import LogCaptureFixture
|
||||
|
||||
from pydase import DataService
|
||||
from pydase.data_service.callback_manager import CallbackManager
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def caplog(caplog: LogCaptureFixture) -> Generator[LogCaptureFixture, Any, None]:
|
||||
handler_id = logger.add(caplog.handler, format="{message}")
|
||||
yield caplog
|
||||
logger.remove(handler_id)
|
||||
|
||||
|
||||
def emit(self: Any, parent_path: str, name: str, value: Any) -> None:
|
||||
if isinstance(value, DataService):
|
||||
value = value.serialize()
|
||||
|
||||
print(f"{parent_path}.{name} = {value}")
|
||||
|
||||
|
||||
CallbackManager.emit_notification = emit # type: ignore
|
||||
|
||||
47
tests/components/test_coloured_enum.py
Normal file
47
tests/components/test_coloured_enum.py
Normal file
@@ -0,0 +1,47 @@
|
||||
from pydase.components.coloured_enum import ColouredEnum
|
||||
from pydase.data_service.data_service import DataService
|
||||
from pydase.data_service.data_service_observer import DataServiceObserver
|
||||
from pydase.data_service.state_manager import StateManager
|
||||
from pytest import LogCaptureFixture
|
||||
|
||||
|
||||
def test_ColouredEnum(caplog: LogCaptureFixture) -> None:
|
||||
class MyStatus(ColouredEnum):
|
||||
RUNNING = "#00FF00"
|
||||
FAILING = "#FF0000"
|
||||
|
||||
class ServiceClass(DataService):
|
||||
_status = MyStatus.RUNNING
|
||||
|
||||
@property
|
||||
def status(self) -> MyStatus:
|
||||
return self._status
|
||||
|
||||
@status.setter
|
||||
def status(self, value: MyStatus) -> None:
|
||||
# do something ...
|
||||
self._status = value
|
||||
|
||||
service_instance = ServiceClass()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
|
||||
service_instance.status = MyStatus.FAILING
|
||||
|
||||
assert "'status' changed to 'MyStatus.FAILING'" in caplog.text
|
||||
|
||||
|
||||
def test_warning(caplog: LogCaptureFixture) -> None:
|
||||
class MyStatus(ColouredEnum):
|
||||
RUNNING = "#00FF00"
|
||||
FAILING = "#FF0000"
|
||||
|
||||
class ServiceClass(DataService):
|
||||
status = MyStatus.RUNNING
|
||||
|
||||
ServiceClass()
|
||||
|
||||
assert (
|
||||
"Class 'MyStatus' does not inherit from DataService. This may lead to "
|
||||
"unexpected behaviour!" not in caplog.text
|
||||
)
|
||||
@@ -1,60 +1,46 @@
|
||||
from pytest import CaptureFixture, LogCaptureFixture
|
||||
|
||||
from pydase.components.number_slider import NumberSlider
|
||||
from pydase.data_service.data_service import DataService
|
||||
|
||||
from .. import caplog # noqa
|
||||
from pydase.data_service.data_service_observer import DataServiceObserver
|
||||
from pydase.data_service.state_manager import StateManager
|
||||
from pytest import LogCaptureFixture
|
||||
|
||||
|
||||
def test_NumberSlider(capsys: CaptureFixture) -> None:
|
||||
def test_NumberSlider(caplog: LogCaptureFixture) -> None:
|
||||
class ServiceClass(DataService):
|
||||
number_slider = NumberSlider(1, 0, 10, 1)
|
||||
int_number_slider = NumberSlider(1, 0, 10, 1, "int")
|
||||
|
||||
service = ServiceClass()
|
||||
service_instance = ServiceClass()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
|
||||
assert service.number_slider.value == 1
|
||||
assert isinstance(service.number_slider.value, float)
|
||||
assert service.number_slider.min == 0
|
||||
assert isinstance(service.number_slider.min, float)
|
||||
assert service.number_slider.max == 10
|
||||
assert isinstance(service.number_slider.max, float)
|
||||
assert service.number_slider.step_size == 1
|
||||
assert isinstance(service.number_slider.step_size, float)
|
||||
assert service_instance.number_slider.value == 1
|
||||
assert isinstance(service_instance.number_slider.value, float)
|
||||
assert service_instance.number_slider.min == 0
|
||||
assert isinstance(service_instance.number_slider.min, float)
|
||||
assert service_instance.number_slider.max == 10
|
||||
assert isinstance(service_instance.number_slider.max, float)
|
||||
assert service_instance.number_slider.step_size == 1
|
||||
assert isinstance(service_instance.number_slider.step_size, float)
|
||||
|
||||
assert service.int_number_slider.value == 1
|
||||
assert isinstance(service.int_number_slider.value, int)
|
||||
assert service.int_number_slider.step_size == 1
|
||||
assert isinstance(service.int_number_slider.step_size, int)
|
||||
assert service_instance.int_number_slider.value == 1
|
||||
assert isinstance(service_instance.int_number_slider.value, int)
|
||||
assert service_instance.int_number_slider.step_size == 1
|
||||
assert isinstance(service_instance.int_number_slider.step_size, int)
|
||||
|
||||
service.number_slider.value = 10.0
|
||||
service.int_number_slider.value = 10.1
|
||||
service_instance.number_slider.value = 10.0
|
||||
service_instance.int_number_slider.value = 10.1
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert "'number_slider.value' changed to '10.0'" in caplog.text
|
||||
assert "'int_number_slider.value' changed to '10'" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.number_slider.value = 10.0",
|
||||
"ServiceClass.int_number_slider.value = 10",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n")) # type: ignore
|
||||
assert actual_output == expected_output
|
||||
service_instance.number_slider.min = 1.1
|
||||
|
||||
service.number_slider.min = 1.1
|
||||
|
||||
captured = capsys.readouterr()
|
||||
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.number_slider.min = 1.1",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n")) # type: ignore
|
||||
assert actual_output == expected_output
|
||||
assert "'number_slider.min' changed to '1.1'" in caplog.text
|
||||
|
||||
|
||||
def test_init_error(caplog: LogCaptureFixture) -> None: # noqa
|
||||
number_slider = NumberSlider(type="str") # type: ignore # noqa
|
||||
def test_init_error(caplog: LogCaptureFixture) -> None:
|
||||
number_slider = NumberSlider(type_="str") # type: ignore # noqa
|
||||
|
||||
assert "Unknown type 'str'. Using 'float'" in caplog.text
|
||||
|
||||
@@ -1,64 +1,116 @@
|
||||
from enum import Enum
|
||||
|
||||
import pydase
|
||||
import pydase.units as u
|
||||
from pydase import DataService
|
||||
from pydase.data_service.data_service_observer import DataServiceObserver
|
||||
from pydase.data_service.state_manager import StateManager
|
||||
from pytest import LogCaptureFixture
|
||||
|
||||
|
||||
def test_enum_serialize() -> None:
|
||||
class EnumClass(Enum):
|
||||
FOO = "foo"
|
||||
BAR = "bar"
|
||||
def test_unexpected_type_change_warning(caplog: LogCaptureFixture) -> None:
|
||||
class ServiceClass(DataService):
|
||||
attr_1 = 1.0
|
||||
current = 1.0 * u.units.A
|
||||
|
||||
class EnumAttribute(pydase.DataService):
|
||||
def __init__(self) -> None:
|
||||
self.some_enum = EnumClass.FOO
|
||||
super().__init__()
|
||||
service_instance = ServiceClass()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
service_instance.attr_1 = 2
|
||||
|
||||
class EnumPropertyWithoutSetter(pydase.DataService):
|
||||
def __init__(self) -> None:
|
||||
self._some_enum = EnumClass.FOO
|
||||
super().__init__()
|
||||
assert "'attr_1' changed to '2'" in caplog.text
|
||||
assert (
|
||||
"Type of 'attr_1' changed from 'float' to 'int'. This may have unwanted "
|
||||
"side effects! Consider setting it to 'float' directly." in caplog.text
|
||||
)
|
||||
|
||||
service_instance.current = 2
|
||||
assert "'current' changed to '2'" in caplog.text
|
||||
assert (
|
||||
"Type of 'current' changed from 'Quantity' to 'int'. This may have unwanted "
|
||||
"side effects! Consider setting it to 'Quantity' directly." in caplog.text
|
||||
)
|
||||
|
||||
|
||||
def test_basic_inheritance_warning(caplog: LogCaptureFixture) -> None:
|
||||
class SubService(DataService):
|
||||
...
|
||||
|
||||
class SomeEnum(Enum):
|
||||
HI = 0
|
||||
|
||||
class ServiceClass(DataService):
|
||||
sub_service = SubService()
|
||||
some_int = 1
|
||||
some_float = 1.0
|
||||
some_bool = True
|
||||
some_quantity = 1.0 * u.units.A
|
||||
some_list = [1, 2]
|
||||
some_string = "Hello"
|
||||
some_enum = SomeEnum.HI
|
||||
_name = "Service"
|
||||
|
||||
@property
|
||||
def some_enum(self) -> EnumClass:
|
||||
return self._some_enum
|
||||
def name(self) -> str:
|
||||
return self._name
|
||||
|
||||
class EnumPropertyWithSetter(pydase.DataService):
|
||||
def some_method(self) -> None:
|
||||
...
|
||||
|
||||
async def some_task(self) -> None:
|
||||
...
|
||||
|
||||
ServiceClass()
|
||||
|
||||
# neither of the attributes, methods or properties cause a warning log
|
||||
assert "WARNING" not in caplog.text
|
||||
|
||||
|
||||
def test_class_attr_inheritance_warning(caplog: LogCaptureFixture) -> None:
|
||||
class SubClass:
|
||||
name = "Hello"
|
||||
|
||||
class ServiceClass(DataService):
|
||||
attr_1 = SubClass()
|
||||
|
||||
ServiceClass()
|
||||
|
||||
assert (
|
||||
"Class 'SubClass' does not inherit from DataService. This may lead to "
|
||||
"unexpected behaviour!"
|
||||
) in caplog.text
|
||||
|
||||
|
||||
def test_instance_attr_inheritance_warning(caplog: LogCaptureFixture) -> None:
|
||||
class SubClass:
|
||||
name = "Hello"
|
||||
|
||||
class ServiceClass(DataService):
|
||||
def __init__(self) -> None:
|
||||
self._some_enum = EnumClass.FOO
|
||||
super().__init__()
|
||||
self.attr_1 = SubClass()
|
||||
|
||||
@property
|
||||
def some_enum(self) -> EnumClass:
|
||||
return self._some_enum
|
||||
ServiceClass()
|
||||
|
||||
@some_enum.setter
|
||||
def some_enum(self, value: EnumClass) -> None:
|
||||
self._some_enum = value
|
||||
assert (
|
||||
"Class 'SubClass' does not inherit from DataService. This may lead to "
|
||||
"unexpected behaviour!"
|
||||
) in caplog.text
|
||||
|
||||
assert EnumAttribute().serialize() == {
|
||||
"some_enum": {
|
||||
"type": "Enum",
|
||||
"value": "FOO",
|
||||
"enum": {"FOO": "foo", "BAR": "bar"},
|
||||
"readonly": False,
|
||||
"doc": None,
|
||||
}
|
||||
}
|
||||
assert EnumPropertyWithoutSetter().serialize() == {
|
||||
"some_enum": {
|
||||
"type": "Enum",
|
||||
"value": "FOO",
|
||||
"enum": {"FOO": "foo", "BAR": "bar"},
|
||||
"readonly": True,
|
||||
"doc": None,
|
||||
}
|
||||
}
|
||||
assert EnumPropertyWithSetter().serialize() == {
|
||||
"some_enum": {
|
||||
"type": "Enum",
|
||||
"value": "FOO",
|
||||
"enum": {"FOO": "foo", "BAR": "bar"},
|
||||
"readonly": False,
|
||||
"doc": None,
|
||||
}
|
||||
}
|
||||
|
||||
def test_protected_and_private_attribute_warning(caplog: LogCaptureFixture) -> None:
|
||||
class SubClass:
|
||||
name = "Hello"
|
||||
|
||||
class ServiceClass(DataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self._subclass = SubClass()
|
||||
self.__other_subclass = SubClass()
|
||||
|
||||
ServiceClass()
|
||||
|
||||
# Protected and private attributes are not checked
|
||||
assert (
|
||||
"Class 'SubClass' does not inherit from DataService. This may lead to "
|
||||
"unexpected behaviour!"
|
||||
) not in caplog.text
|
||||
|
||||
69
tests/data_service/test_data_service_cache.py
Normal file
69
tests/data_service/test_data_service_cache.py
Normal file
@@ -0,0 +1,69 @@
|
||||
import logging
|
||||
|
||||
import pydase
|
||||
from pydase.data_service.data_service_observer import DataServiceObserver
|
||||
from pydase.data_service.state_manager import StateManager
|
||||
|
||||
logger = logging.getLogger()
|
||||
|
||||
|
||||
def test_nested_attributes_cache_callback() -> None:
|
||||
class SubClass(pydase.DataService):
|
||||
name = "Hello"
|
||||
|
||||
class ServiceClass(pydase.DataService):
|
||||
class_attr = SubClass()
|
||||
name = "World"
|
||||
|
||||
service_instance = ServiceClass()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
|
||||
service_instance.name = "Peepz"
|
||||
assert (
|
||||
state_manager._data_service_cache.get_value_dict_from_cache("name")["value"]
|
||||
== "Peepz"
|
||||
)
|
||||
|
||||
service_instance.class_attr.name = "Ciao"
|
||||
assert (
|
||||
state_manager._data_service_cache.get_value_dict_from_cache("class_attr.name")[
|
||||
"value"
|
||||
]
|
||||
== "Ciao"
|
||||
)
|
||||
|
||||
|
||||
def test_task_status_update() -> None:
|
||||
class ServiceClass(pydase.DataService):
|
||||
name = "World"
|
||||
|
||||
async def my_method(self) -> None:
|
||||
pass
|
||||
|
||||
service_instance = ServiceClass()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
|
||||
assert (
|
||||
state_manager._data_service_cache.get_value_dict_from_cache("my_method")["type"]
|
||||
== "method"
|
||||
)
|
||||
assert (
|
||||
state_manager._data_service_cache.get_value_dict_from_cache("my_method")[
|
||||
"value"
|
||||
]
|
||||
is None
|
||||
)
|
||||
|
||||
service_instance.start_my_method() # type: ignore
|
||||
assert (
|
||||
state_manager._data_service_cache.get_value_dict_from_cache("my_method")["type"]
|
||||
== "method"
|
||||
)
|
||||
assert (
|
||||
state_manager._data_service_cache.get_value_dict_from_cache("my_method")[
|
||||
"value"
|
||||
]
|
||||
== {}
|
||||
)
|
||||
96
tests/data_service/test_data_service_observer.py
Normal file
96
tests/data_service/test_data_service_observer.py
Normal file
@@ -0,0 +1,96 @@
|
||||
import logging
|
||||
|
||||
import pydase
|
||||
import pytest
|
||||
from pydase.data_service.data_service_observer import DataServiceObserver
|
||||
from pydase.data_service.state_manager import StateManager
|
||||
|
||||
logger = logging.getLogger()
|
||||
|
||||
|
||||
def test_static_property_dependencies() -> None:
|
||||
class SubClass(pydase.DataService):
|
||||
_name = "SubClass"
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return self._name
|
||||
|
||||
@name.setter
|
||||
def name(self, value: str) -> None:
|
||||
self._name = value
|
||||
|
||||
class ServiceClass(pydase.DataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.list_attr = [SubClass()]
|
||||
self._name = "ServiceClass"
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return self._name
|
||||
|
||||
@name.setter
|
||||
def name(self, value: str) -> None:
|
||||
self._name = value
|
||||
|
||||
service_instance = ServiceClass()
|
||||
state_manager = StateManager(service_instance)
|
||||
observer = DataServiceObserver(state_manager)
|
||||
logger.debug(observer.property_deps_dict)
|
||||
assert observer.property_deps_dict == {
|
||||
"list_attr[0]._name": ["list_attr[0].name"],
|
||||
"_name": ["name"],
|
||||
}
|
||||
|
||||
|
||||
def test_dynamic_list_property_dependencies() -> None:
|
||||
class SubClass(pydase.DataService):
|
||||
_name = "SubClass"
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return self._name
|
||||
|
||||
@name.setter
|
||||
def name(self, value: str) -> None:
|
||||
self._name = value
|
||||
|
||||
class ServiceClass(pydase.DataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.list_attr = [SubClass()]
|
||||
|
||||
service_instance = ServiceClass()
|
||||
state_manager = StateManager(service_instance)
|
||||
observer = DataServiceObserver(state_manager)
|
||||
|
||||
assert observer.property_deps_dict == {
|
||||
"list_attr[0]._name": ["list_attr[0].name"],
|
||||
}
|
||||
|
||||
service_instance.list_attr.append(SubClass())
|
||||
|
||||
assert observer.property_deps_dict == {
|
||||
"list_attr[0]._name": ["list_attr[0].name"],
|
||||
"list_attr[1]._name": ["list_attr[1].name"],
|
||||
}
|
||||
|
||||
|
||||
def test_protected_or_private_change_logs(caplog: pytest.LogCaptureFixture) -> None:
|
||||
class OtherService(pydase.DataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self._name = "Hi"
|
||||
|
||||
class MyService(pydase.DataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.subclass = OtherService()
|
||||
|
||||
service = MyService()
|
||||
state_manager = StateManager(service)
|
||||
DataServiceObserver(state_manager)
|
||||
|
||||
service.subclass._name = "Hello"
|
||||
assert "'subclass._name' changed to 'Hello'" not in caplog.text
|
||||
282
tests/data_service/test_state_manager.py
Normal file
282
tests/data_service/test_state_manager.py
Normal file
@@ -0,0 +1,282 @@
|
||||
import json
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import pydase
|
||||
import pydase.units as u
|
||||
import pytest
|
||||
from pydase.components.coloured_enum import ColouredEnum
|
||||
from pydase.data_service.data_service_observer import DataServiceObserver
|
||||
from pydase.data_service.state_manager import (
|
||||
StateManager,
|
||||
has_load_state_decorator,
|
||||
load_state,
|
||||
)
|
||||
from pytest import LogCaptureFixture
|
||||
|
||||
|
||||
class SubService(pydase.DataService):
|
||||
name = "SubService"
|
||||
|
||||
|
||||
class State(ColouredEnum):
|
||||
RUNNING = "#0000FF80"
|
||||
COMPLETED = "hsl(120, 100%, 50%)"
|
||||
FAILED = "hsla(0, 100%, 50%, 0.7)"
|
||||
|
||||
|
||||
class Service(pydase.DataService):
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
super().__init__(**kwargs)
|
||||
self.subservice = SubService()
|
||||
self.some_unit: u.Quantity = 1.2 * u.units.A
|
||||
self.some_float = 1.0
|
||||
self.list_attr = [1.0, 2.0]
|
||||
self._property_attr = 1337.0
|
||||
self._name = "Service"
|
||||
self.state = State.RUNNING
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def property_attr(self) -> float:
|
||||
return self._property_attr
|
||||
|
||||
@property_attr.setter
|
||||
def property_attr(self, value: float) -> None:
|
||||
self._property_attr = value
|
||||
|
||||
|
||||
CURRENT_STATE = Service().serialize()
|
||||
|
||||
LOAD_STATE = {
|
||||
"list_attr": {
|
||||
"type": "list",
|
||||
"value": [
|
||||
{"type": "float", "value": 1.4, "readonly": False, "doc": None},
|
||||
{"type": "float", "value": 2.0, "readonly": False, "doc": None},
|
||||
],
|
||||
"readonly": False,
|
||||
"doc": None,
|
||||
},
|
||||
"name": {
|
||||
"type": "str",
|
||||
"value": "Another name",
|
||||
"readonly": True,
|
||||
"doc": None,
|
||||
},
|
||||
"some_float": {
|
||||
"type": "int",
|
||||
"value": 10,
|
||||
"readonly": False,
|
||||
"doc": None,
|
||||
},
|
||||
"property_attr": {
|
||||
"type": "float",
|
||||
"value": 1337.1,
|
||||
"readonly": False,
|
||||
"doc": None,
|
||||
},
|
||||
"some_unit": {
|
||||
"type": "Quantity",
|
||||
"value": {"magnitude": 12.0, "unit": "A"},
|
||||
"readonly": False,
|
||||
"doc": None,
|
||||
},
|
||||
"state": {
|
||||
"type": "ColouredEnum",
|
||||
"value": "FAILED",
|
||||
"readonly": True,
|
||||
"doc": None,
|
||||
"enum": {
|
||||
"RUNNING": "#0000FF80",
|
||||
"COMPLETED": "hsl(120, 100%, 50%)",
|
||||
"FAILED": "hsla(0, 100%, 50%, 0.7)",
|
||||
},
|
||||
},
|
||||
"subservice": {
|
||||
"type": "DataService",
|
||||
"value": {
|
||||
"name": {
|
||||
"type": "str",
|
||||
"value": "SubService",
|
||||
"readonly": False,
|
||||
"doc": None,
|
||||
}
|
||||
},
|
||||
"readonly": False,
|
||||
"doc": None,
|
||||
},
|
||||
"removed_attr": {
|
||||
"type": "str",
|
||||
"value": "removed",
|
||||
"readonly": False,
|
||||
"doc": None,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def test_save_state(tmp_path: Path) -> None:
|
||||
# Create a StateManager instance with a temporary file
|
||||
file = tmp_path / "test_state.json"
|
||||
manager = StateManager(service=Service(), filename=str(file))
|
||||
|
||||
# Trigger the saving action
|
||||
manager.save_state()
|
||||
|
||||
# Now check that the file was written correctly
|
||||
assert file.read_text() == json.dumps(CURRENT_STATE, indent=4)
|
||||
|
||||
|
||||
def test_load_state(tmp_path: Path, caplog: LogCaptureFixture) -> None:
|
||||
# Create a StateManager instance with a temporary file
|
||||
file = tmp_path / "test_state.json"
|
||||
|
||||
# Write a temporary JSON file to read back
|
||||
with open(file, "w") as f:
|
||||
json.dump(LOAD_STATE, f, indent=4)
|
||||
|
||||
service = Service()
|
||||
state_manager = StateManager(service=service, filename=str(file))
|
||||
DataServiceObserver(state_manager)
|
||||
state_manager.load_state()
|
||||
|
||||
assert service.some_unit == u.Quantity(12, "A") # has changed
|
||||
assert service.list_attr[0] == 1.4 # has changed
|
||||
assert service.list_attr[1] == 2.0 # has not changed
|
||||
assert (
|
||||
service.property_attr == 1337
|
||||
) # has not changed as property has not @load_state decorator
|
||||
assert service.state == State.FAILED # has changed
|
||||
assert service.name == "Service" # has not changed as readonly
|
||||
assert service.some_float == 1.0 # has not changed due to different type
|
||||
assert service.subservice.name == "SubService" # didn't change
|
||||
|
||||
assert "'some_unit' changed to '12.0 A'" in caplog.text
|
||||
assert (
|
||||
"Property 'name' has no '@load_state' decorator. "
|
||||
"Ignoring value from JSON file..." in caplog.text
|
||||
)
|
||||
assert (
|
||||
"Attribute type of 'some_float' changed from 'int' to 'float'. "
|
||||
"Ignoring value from JSON file..."
|
||||
) in caplog.text
|
||||
assert (
|
||||
"Attribute type of 'removed_attr' changed from 'str' to 'None'. "
|
||||
"Ignoring value from JSON file..." in caplog.text
|
||||
)
|
||||
assert "Value of attribute 'subservice.name' has not changed..." in caplog.text
|
||||
|
||||
|
||||
def test_filename_warning(tmp_path: Path, caplog: LogCaptureFixture) -> None:
|
||||
file = tmp_path / "test_state.json"
|
||||
|
||||
with pytest.warns(DeprecationWarning):
|
||||
service = Service(filename=str(file))
|
||||
StateManager(service=service, filename=str(file))
|
||||
|
||||
assert f"Overwriting filename {str(file)!r} with {str(file)!r}." in caplog.text
|
||||
|
||||
|
||||
def test_filename_error(caplog: LogCaptureFixture) -> None:
|
||||
service = Service()
|
||||
manager = StateManager(service=service)
|
||||
|
||||
manager.save_state()
|
||||
assert (
|
||||
"State manager was not initialised with a filename. Skipping 'save_state'..."
|
||||
in caplog.text
|
||||
)
|
||||
|
||||
|
||||
def test_readonly_attribute(tmp_path: Path, caplog: LogCaptureFixture) -> None:
|
||||
# Create a StateManager instance with a temporary file
|
||||
file = tmp_path / "test_state.json"
|
||||
|
||||
# Write a temporary JSON file to read back
|
||||
with open(file, "w") as f:
|
||||
json.dump(LOAD_STATE, f, indent=4)
|
||||
|
||||
service = Service()
|
||||
manager = StateManager(service=service, filename=str(file))
|
||||
manager.load_state()
|
||||
assert service.name == "Service"
|
||||
assert (
|
||||
"Property 'name' has no '@load_state' decorator. "
|
||||
"Ignoring value from JSON file..." in caplog.text
|
||||
)
|
||||
|
||||
|
||||
def test_changed_type(tmp_path: Path, caplog: LogCaptureFixture) -> None:
|
||||
# Create a StateManager instance with a temporary file
|
||||
file = tmp_path / "test_state.json"
|
||||
|
||||
# Write a temporary JSON file to read back
|
||||
with open(file, "w") as f:
|
||||
json.dump(LOAD_STATE, f, indent=4)
|
||||
|
||||
service = Service()
|
||||
manager = StateManager(service=service, filename=str(file))
|
||||
manager.load_state()
|
||||
assert (
|
||||
"Attribute type of 'some_float' changed from 'int' to "
|
||||
"'float'. Ignoring value from JSON file..."
|
||||
) in caplog.text
|
||||
|
||||
|
||||
def test_property_load_state(tmp_path: Path) -> None:
|
||||
# Create a StateManager instance with a temporary file
|
||||
file = tmp_path / "test_state.json"
|
||||
|
||||
LOAD_STATE = {
|
||||
"name": {
|
||||
"type": "str",
|
||||
"value": "Some other name",
|
||||
"readonly": False,
|
||||
"doc": None,
|
||||
},
|
||||
"not_loadable_attr": {
|
||||
"type": "str",
|
||||
"value": "But I AM loadable!?",
|
||||
"readonly": False,
|
||||
"doc": None,
|
||||
},
|
||||
}
|
||||
|
||||
# Write a temporary JSON file to read back
|
||||
with open(file, "w") as f:
|
||||
json.dump(LOAD_STATE, f, indent=4)
|
||||
|
||||
class Service(pydase.DataService):
|
||||
_name = "Service"
|
||||
_not_loadable_attr = "Not loadable"
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return self._name
|
||||
|
||||
@name.setter
|
||||
@load_state
|
||||
def name(self, value: str) -> None:
|
||||
self._name = value
|
||||
|
||||
@property
|
||||
def not_loadable_attr(self) -> str:
|
||||
return self._not_loadable_attr
|
||||
|
||||
@not_loadable_attr.setter
|
||||
def not_loadable_attr(self, value: str) -> None:
|
||||
self._not_loadable_attr = value
|
||||
|
||||
@property
|
||||
def property_without_setter(self) -> None:
|
||||
return
|
||||
|
||||
service_instance = Service()
|
||||
StateManager(service_instance, filename=file).load_state()
|
||||
|
||||
assert service_instance.name == "Some other name"
|
||||
assert service_instance.not_loadable_attr == "Not loadable"
|
||||
assert not has_load_state_decorator(type(service_instance).property_without_setter)
|
||||
92
tests/data_service/test_task_manager.py
Normal file
92
tests/data_service/test_task_manager.py
Normal file
@@ -0,0 +1,92 @@
|
||||
import logging
|
||||
|
||||
import pydase
|
||||
from pydase.data_service.data_service_observer import DataServiceObserver
|
||||
from pydase.data_service.state_manager import StateManager
|
||||
from pytest import LogCaptureFixture
|
||||
|
||||
logger = logging.getLogger()
|
||||
|
||||
|
||||
def test_autostart_task_callback(caplog: LogCaptureFixture) -> None:
|
||||
class MyService(pydase.DataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self._autostart_tasks = { # type: ignore
|
||||
"my_task": (),
|
||||
"my_other_task": (),
|
||||
}
|
||||
|
||||
async def my_task(self) -> None:
|
||||
logger.info("Triggered task.")
|
||||
|
||||
async def my_other_task(self) -> None:
|
||||
logger.info("Triggered other task.")
|
||||
|
||||
service_instance = MyService()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
service_instance._task_manager.start_autostart_tasks()
|
||||
|
||||
assert "'my_task' changed to '{}'" in caplog.text
|
||||
assert "'my_other_task' changed to '{}'" in caplog.text
|
||||
|
||||
|
||||
def test_DataService_subclass_autostart_task_callback(
|
||||
caplog: LogCaptureFixture,
|
||||
) -> None:
|
||||
class MySubService(pydase.DataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self._autostart_tasks = { # type: ignore
|
||||
"my_task": (),
|
||||
"my_other_task": (),
|
||||
}
|
||||
|
||||
async def my_task(self) -> None:
|
||||
logger.info("Triggered task.")
|
||||
|
||||
async def my_other_task(self) -> None:
|
||||
logger.info("Triggered other task.")
|
||||
|
||||
class MyService(pydase.DataService):
|
||||
sub_service = MySubService()
|
||||
|
||||
service_instance = MyService()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
service_instance._task_manager.start_autostart_tasks()
|
||||
|
||||
assert "'sub_service.my_task' changed to '{}'" in caplog.text
|
||||
assert "'sub_service.my_other_task' changed to '{}'" in caplog.text
|
||||
|
||||
|
||||
def test_DataService_subclass_list_autostart_task_callback(
|
||||
caplog: LogCaptureFixture,
|
||||
) -> None:
|
||||
class MySubService(pydase.DataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self._autostart_tasks = { # type: ignore
|
||||
"my_task": (),
|
||||
"my_other_task": (),
|
||||
}
|
||||
|
||||
async def my_task(self) -> None:
|
||||
logger.info("Triggered task.")
|
||||
|
||||
async def my_other_task(self) -> None:
|
||||
logger.info("Triggered other task.")
|
||||
|
||||
class MyService(pydase.DataService):
|
||||
sub_services_list = [MySubService() for i in range(2)]
|
||||
|
||||
service_instance = MyService()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
service_instance._task_manager.start_autostart_tasks()
|
||||
|
||||
assert "'sub_services_list[0].my_task' changed to '{}'" in caplog.text
|
||||
assert "'sub_services_list[0].my_other_task' changed to '{}'" in caplog.text
|
||||
assert "'sub_services_list[1].my_task' changed to '{}'" in caplog.text
|
||||
assert "'sub_services_list[1].my_other_task' changed to '{}'" in caplog.text
|
||||
173
tests/observer_pattern/observable/test_observable.py
Normal file
173
tests/observer_pattern/observable/test_observable.py
Normal file
@@ -0,0 +1,173 @@
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import pytest
|
||||
from pydase.observer_pattern.observable import Observable
|
||||
from pydase.observer_pattern.observer import Observer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MyObserver(Observer):
|
||||
def on_change(self, full_access_path: str, value: Any) -> None:
|
||||
logger.info("'%s' changed to '%s'", full_access_path, value)
|
||||
|
||||
|
||||
def test_constructor_error_message(caplog: pytest.LogCaptureFixture) -> None:
|
||||
class MyObservable(Observable):
|
||||
def __init__(self) -> None:
|
||||
self.attr = 1
|
||||
super().__init__()
|
||||
|
||||
MyObservable()
|
||||
|
||||
assert (
|
||||
"Ensure that super().__init__() is called at the start of the 'MyObservable' "
|
||||
"constructor! Failing to do so may lead to unexpected behavior." in caplog.text
|
||||
)
|
||||
|
||||
|
||||
def test_simple_class_attribute(caplog: pytest.LogCaptureFixture) -> None:
|
||||
class MyObservable(Observable):
|
||||
int_attribute = 10
|
||||
|
||||
instance = MyObservable()
|
||||
observer = MyObserver(instance)
|
||||
instance.int_attribute = 12
|
||||
|
||||
assert "'int_attribute' changed to '12'" in caplog.text
|
||||
|
||||
|
||||
def test_simple_instance_attribute(caplog: pytest.LogCaptureFixture) -> None:
|
||||
class MyObservable(Observable):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.int_attribute = 10
|
||||
|
||||
instance = MyObservable()
|
||||
observer = MyObserver(instance)
|
||||
instance.int_attribute = 12
|
||||
|
||||
assert "'int_attribute' changed to '12'" in caplog.text
|
||||
|
||||
|
||||
def test_nested_class_attribute(caplog: pytest.LogCaptureFixture) -> None:
|
||||
class MySubclass(Observable):
|
||||
name = "My Subclass"
|
||||
|
||||
class MyObservable(Observable):
|
||||
subclass = MySubclass()
|
||||
|
||||
instance = MyObservable()
|
||||
observer = MyObserver(instance)
|
||||
instance.subclass.name = "Other name"
|
||||
|
||||
assert "'subclass.name' changed to 'Other name'" in caplog.text
|
||||
|
||||
|
||||
def test_nested_instance_attribute(caplog: pytest.LogCaptureFixture) -> None:
|
||||
class MySubclass(Observable):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.name = "Subclass name"
|
||||
|
||||
class MyObservable(Observable):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.subclass = MySubclass()
|
||||
|
||||
instance = MyObservable()
|
||||
observer = MyObserver(instance)
|
||||
instance.subclass.name = "Other name"
|
||||
|
||||
assert "'subclass.name' changed to 'Other name'" in caplog.text
|
||||
|
||||
|
||||
def test_removed_observer_on_class_attr(caplog: pytest.LogCaptureFixture) -> None:
|
||||
class NestedObservable(Observable):
|
||||
name = "Hello"
|
||||
|
||||
nested_instance = NestedObservable()
|
||||
|
||||
class MyObservable(Observable):
|
||||
nested_attr = nested_instance
|
||||
changed_attr = nested_instance
|
||||
|
||||
instance = MyObservable()
|
||||
observer = MyObserver(instance)
|
||||
instance.changed_attr = "Ciao"
|
||||
|
||||
assert "'changed_attr' changed to 'Ciao'" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
instance.nested_attr.name = "Hi"
|
||||
|
||||
assert "'nested_attr.name' changed to 'Hi'" in caplog.text
|
||||
assert "'changed_attr.name' changed to 'Hi'" not in caplog.text
|
||||
|
||||
|
||||
def test_removed_observer_on_instance_attr(caplog: pytest.LogCaptureFixture) -> None:
|
||||
class NestedObservable(Observable):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.name = "Hello"
|
||||
|
||||
nested_instance = NestedObservable()
|
||||
|
||||
class MyObservable(Observable):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.nested_attr = nested_instance
|
||||
self.changed_attr = nested_instance
|
||||
|
||||
instance = MyObservable()
|
||||
observer = MyObserver(instance)
|
||||
instance.changed_attr = "Ciao"
|
||||
|
||||
assert "'changed_attr' changed to 'Ciao'" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
instance.nested_attr.name = "Hi"
|
||||
|
||||
assert "'nested_attr.name' changed to 'Hi'" in caplog.text
|
||||
assert "'changed_attr.name' changed to 'Hi'" not in caplog.text
|
||||
|
||||
|
||||
def test_property_getter(caplog: pytest.LogCaptureFixture) -> None:
|
||||
class MyObservable(Observable):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self._name = "Hello"
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""The name property."""
|
||||
return self._name
|
||||
|
||||
instance = MyObservable()
|
||||
observer = MyObserver(instance)
|
||||
_ = instance.name
|
||||
|
||||
assert "'name' changed to 'Hello'" in caplog.text
|
||||
|
||||
|
||||
def test_property_setter(caplog: pytest.LogCaptureFixture) -> None:
|
||||
class MyObservable(Observable):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self._name = "Hello"
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return self._name
|
||||
|
||||
@name.setter
|
||||
def name(self, value: str) -> None:
|
||||
self._name = value
|
||||
|
||||
instance = MyObservable()
|
||||
observer = MyObserver(instance)
|
||||
instance.name = "Ciao"
|
||||
|
||||
assert "'name' changed to 'Hello'" not in caplog.text
|
||||
assert "'name' changed to 'Ciao'" in caplog.text
|
||||
474
tests/observer_pattern/observable/test_observable_object.py
Normal file
474
tests/observer_pattern/observable/test_observable_object.py
Normal file
@@ -0,0 +1,474 @@
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import pytest
|
||||
from pydase.observer_pattern.observable import Observable
|
||||
from pydase.observer_pattern.observer import Observer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MyObserver(Observer):
|
||||
def on_change(self, full_access_path: str, value: Any) -> None:
|
||||
logger.info("'%s' changed to '%s'", full_access_path, value)
|
||||
|
||||
|
||||
def test_simple_instance_list_attribute(caplog: pytest.LogCaptureFixture) -> None:
|
||||
class MyObservable(Observable):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.list_attr = [1, 2]
|
||||
|
||||
instance = MyObservable()
|
||||
MyObserver(instance)
|
||||
instance.list_attr[0] = 12
|
||||
|
||||
assert "'list_attr[0]' changed to '12'" in caplog.text
|
||||
|
||||
|
||||
def test_instance_object_list_attribute(caplog: pytest.LogCaptureFixture) -> None:
|
||||
class NestedObservable(Observable):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.name = "Hello"
|
||||
|
||||
class MyObservable(Observable):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.list_attr = [NestedObservable()]
|
||||
|
||||
instance = MyObservable()
|
||||
MyObserver(instance)
|
||||
instance.list_attr[0].name = "Ciao"
|
||||
|
||||
assert "'list_attr[0].name' changed to 'Ciao'" in caplog.text
|
||||
|
||||
|
||||
def test_simple_class_list_attribute(caplog: pytest.LogCaptureFixture) -> None:
|
||||
class MyObservable(Observable):
|
||||
list_attr = [1, 2]
|
||||
|
||||
instance = MyObservable()
|
||||
MyObserver(instance)
|
||||
instance.list_attr[0] = 12
|
||||
|
||||
assert "'list_attr[0]' changed to '12'" in caplog.text
|
||||
|
||||
|
||||
def test_class_object_list_attribute(caplog: pytest.LogCaptureFixture) -> None:
|
||||
class NestedObservable(Observable):
|
||||
name = "Hello"
|
||||
|
||||
class MyObservable(Observable):
|
||||
list_attr = [NestedObservable()]
|
||||
|
||||
instance = MyObservable()
|
||||
MyObserver(instance)
|
||||
instance.list_attr[0].name = "Ciao"
|
||||
|
||||
assert "'list_attr[0].name' changed to 'Ciao'" in caplog.text
|
||||
|
||||
|
||||
def test_simple_instance_dict_attribute(caplog: pytest.LogCaptureFixture) -> None:
|
||||
class MyObservable(Observable):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.dict_attr = {"first": "Hello"}
|
||||
|
||||
instance = MyObservable()
|
||||
MyObserver(instance)
|
||||
instance.dict_attr["first"] = "Ciao"
|
||||
instance.dict_attr["second"] = "World"
|
||||
|
||||
assert "'dict_attr['first']' changed to 'Ciao'" in caplog.text
|
||||
assert "'dict_attr['second']' changed to 'World'" in caplog.text
|
||||
|
||||
|
||||
def test_simple_class_dict_attribute(caplog: pytest.LogCaptureFixture) -> None:
|
||||
class MyObservable(Observable):
|
||||
dict_attr = {"first": "Hello"}
|
||||
|
||||
instance = MyObservable()
|
||||
MyObserver(instance)
|
||||
instance.dict_attr["first"] = "Ciao"
|
||||
instance.dict_attr["second"] = "World"
|
||||
|
||||
assert "'dict_attr['first']' changed to 'Ciao'" in caplog.text
|
||||
assert "'dict_attr['second']' changed to 'World'" in caplog.text
|
||||
|
||||
|
||||
def test_instance_dict_attribute(caplog: pytest.LogCaptureFixture) -> None:
|
||||
class NestedObservable(Observable):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.name = "Hello"
|
||||
|
||||
class MyObservable(Observable):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.dict_attr = {"first": NestedObservable()}
|
||||
|
||||
instance = MyObservable()
|
||||
MyObserver(instance)
|
||||
instance.dict_attr["first"].name = "Ciao"
|
||||
|
||||
assert "'dict_attr['first'].name' changed to 'Ciao'" in caplog.text
|
||||
|
||||
|
||||
def test_class_dict_attribute(caplog: pytest.LogCaptureFixture) -> None:
|
||||
class NestedObservable(Observable):
|
||||
name = "Hello"
|
||||
|
||||
class MyObservable(Observable):
|
||||
dict_attr = {"first": NestedObservable()}
|
||||
|
||||
instance = MyObservable()
|
||||
MyObserver(instance)
|
||||
instance.dict_attr["first"].name = "Ciao"
|
||||
|
||||
assert "'dict_attr['first'].name' changed to 'Ciao'" in caplog.text
|
||||
|
||||
|
||||
def test_removed_observer_on_class_list_attr(caplog: pytest.LogCaptureFixture) -> None:
|
||||
class NestedObservable(Observable):
|
||||
name = "Hello"
|
||||
|
||||
nested_instance = NestedObservable()
|
||||
|
||||
class MyObservable(Observable):
|
||||
nested_attr = nested_instance
|
||||
changed_list_attr = [nested_instance]
|
||||
|
||||
instance = MyObservable()
|
||||
MyObserver(instance)
|
||||
instance.changed_list_attr[0] = "Ciao"
|
||||
|
||||
assert "'changed_list_attr[0]' changed to 'Ciao'" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
instance.nested_attr.name = "Hi"
|
||||
|
||||
assert "'nested_attr.name' changed to 'Hi'" in caplog.text
|
||||
assert "'changed_list_attr[0].name' changed to 'Hi'" not in caplog.text
|
||||
|
||||
|
||||
def test_removed_observer_on_instance_dict_attr(
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
class NestedObservable(Observable):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.name = "Hello"
|
||||
|
||||
nested_instance = NestedObservable()
|
||||
|
||||
class MyObservable(Observable):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.nested_attr = nested_instance
|
||||
self.changed_dict_attr = {"nested": nested_instance}
|
||||
|
||||
instance = MyObservable()
|
||||
MyObserver(instance)
|
||||
instance.changed_dict_attr["nested"] = "Ciao"
|
||||
|
||||
assert "'changed_dict_attr['nested']' changed to 'Ciao'" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
instance.nested_attr.name = "Hi"
|
||||
|
||||
assert "'nested_attr.name' changed to 'Hi'" in caplog.text
|
||||
assert "'changed_dict_attr['nested'].name' changed to 'Hi'" not in caplog.text
|
||||
|
||||
|
||||
def test_removed_observer_on_instance_list_attr(
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
class NestedObservable(Observable):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.name = "Hello"
|
||||
|
||||
nested_instance = NestedObservable()
|
||||
|
||||
class MyObservable(Observable):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.nested_attr = nested_instance
|
||||
self.changed_list_attr = [nested_instance]
|
||||
|
||||
instance = MyObservable()
|
||||
MyObserver(instance)
|
||||
instance.changed_list_attr[0] = "Ciao"
|
||||
|
||||
assert "'changed_list_attr[0]' changed to 'Ciao'" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
instance.nested_attr.name = "Hi"
|
||||
|
||||
assert "'nested_attr.name' changed to 'Hi'" in caplog.text
|
||||
assert "'changed_list_attr[0].name' changed to 'Hi'" not in caplog.text
|
||||
|
||||
|
||||
def test_removed_observer_on_class_dict_attr(caplog: pytest.LogCaptureFixture) -> None:
|
||||
class NestedObservable(Observable):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.name = "Hello"
|
||||
|
||||
nested_instance = NestedObservable()
|
||||
|
||||
class MyObservable(Observable):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.nested_attr = nested_instance
|
||||
self.changed_dict_attr = {"nested": nested_instance}
|
||||
|
||||
instance = MyObservable()
|
||||
MyObserver(instance)
|
||||
instance.changed_dict_attr["nested"] = "Ciao"
|
||||
|
||||
assert "'changed_dict_attr['nested']' changed to 'Ciao'" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
instance.nested_attr.name = "Hi"
|
||||
|
||||
assert "'nested_attr.name' changed to 'Hi'" in caplog.text
|
||||
assert "'changed_dict_attr['nested'].name' changed to 'Hi'" not in caplog.text
|
||||
|
||||
|
||||
def test_nested_dict_instances(caplog: pytest.LogCaptureFixture) -> None:
|
||||
dict_instance = {"first": "Hello", "second": "World"}
|
||||
|
||||
class MyObservable(Observable):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.nested_dict_attr = {"nested": dict_instance}
|
||||
|
||||
instance = MyObservable()
|
||||
MyObserver(instance)
|
||||
instance.nested_dict_attr["nested"]["first"] = "Ciao"
|
||||
|
||||
assert "'nested_dict_attr['nested']['first']' changed to 'Ciao'" in caplog.text
|
||||
|
||||
|
||||
def test_dict_in_list_instance(caplog: pytest.LogCaptureFixture) -> None:
|
||||
dict_instance = {"first": "Hello", "second": "World"}
|
||||
|
||||
class MyObservable(Observable):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.dict_in_list = [dict_instance]
|
||||
|
||||
instance = MyObservable()
|
||||
MyObserver(instance)
|
||||
instance.dict_in_list[0]["first"] = "Ciao"
|
||||
|
||||
assert "'dict_in_list[0]['first']' changed to 'Ciao'" in caplog.text
|
||||
|
||||
|
||||
def test_list_in_dict_instance(caplog: pytest.LogCaptureFixture) -> None:
|
||||
list_instance: list[Any] = [1, 2, 3]
|
||||
|
||||
class MyObservable(Observable):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.list_in_dict = {"some_list": list_instance}
|
||||
|
||||
instance = MyObservable()
|
||||
MyObserver(instance)
|
||||
instance.list_in_dict["some_list"][0] = "Ciao"
|
||||
|
||||
assert "'list_in_dict['some_list'][0]' changed to 'Ciao'" in caplog.text
|
||||
|
||||
|
||||
def test_list_append(caplog: pytest.LogCaptureFixture) -> None:
|
||||
class OtherObservable(Observable):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.greeting = "Other Observable"
|
||||
|
||||
class MyObservable(Observable):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.my_list = []
|
||||
|
||||
observable_instance = MyObservable()
|
||||
MyObserver(observable_instance)
|
||||
|
||||
observable_instance.my_list.append(OtherObservable())
|
||||
assert f"'my_list' changed to '{observable_instance.my_list}'" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
observable_instance.my_list.append(OtherObservable())
|
||||
assert f"'my_list' changed to '{observable_instance.my_list}'" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
observable_instance.my_list[0].greeting = "Hi"
|
||||
observable_instance.my_list[1].greeting = "Hello"
|
||||
|
||||
assert observable_instance.my_list[0].greeting == "Hi"
|
||||
assert observable_instance.my_list[1].greeting == "Hello"
|
||||
assert "'my_list[0].greeting' changed to 'Hi'" in caplog.text
|
||||
assert "'my_list[1].greeting' changed to 'Hello'" in caplog.text
|
||||
|
||||
|
||||
def test_list_pop(caplog: pytest.LogCaptureFixture) -> None:
|
||||
class OtherObservable(Observable):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.greeting = "Hello there!"
|
||||
|
||||
class MyObservable(Observable):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.my_list = [OtherObservable() for _ in range(2)]
|
||||
|
||||
observable_instance = MyObservable()
|
||||
MyObserver(observable_instance)
|
||||
|
||||
popped_instance = observable_instance.my_list.pop(0)
|
||||
|
||||
assert len(observable_instance.my_list) == 1
|
||||
assert f"'my_list' changed to '{observable_instance.my_list}'" in caplog.text
|
||||
|
||||
# checks if observer is removed
|
||||
popped_instance.greeting = "Ciao"
|
||||
assert "'my_list[0].greeting' changed to 'Ciao'" not in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
# checks if observer keys have been updated (index 1 moved to 0)
|
||||
observable_instance.my_list[0].greeting = "Hi"
|
||||
assert "'my_list[0].greeting' changed to 'Hi'" in caplog.text
|
||||
|
||||
|
||||
def test_list_clear(caplog: pytest.LogCaptureFixture) -> None:
|
||||
class OtherObservable(Observable):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.greeting = "Hello there!"
|
||||
|
||||
other_observable_instance = OtherObservable()
|
||||
|
||||
class MyObservable(Observable):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.my_list = [other_observable_instance]
|
||||
|
||||
observable_instance = MyObservable()
|
||||
MyObserver(observable_instance)
|
||||
|
||||
other_observable_instance.greeting = "Hello"
|
||||
assert "'my_list[0].greeting' changed to 'Hello'" in caplog.text
|
||||
|
||||
observable_instance.my_list.clear()
|
||||
|
||||
assert len(observable_instance.my_list) == 0
|
||||
assert "'my_list' changed to '[]'" in caplog.text
|
||||
|
||||
# checks if observer has been removed
|
||||
other_observable_instance.greeting = "Hi"
|
||||
assert "'my_list[0].greeting' changed to 'Hi'" not in caplog.text
|
||||
|
||||
|
||||
def test_list_extend(caplog: pytest.LogCaptureFixture) -> None:
|
||||
class OtherObservable(Observable):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.greeting = "Hello there!"
|
||||
|
||||
other_observable_instance = OtherObservable()
|
||||
|
||||
class MyObservable(Observable):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.my_list = []
|
||||
|
||||
observable_instance = MyObservable()
|
||||
MyObserver(observable_instance)
|
||||
|
||||
other_observable_instance.greeting = "Hello"
|
||||
assert "'my_list[0].greeting' changed to 'Hello'" not in caplog.text
|
||||
|
||||
observable_instance.my_list.extend([other_observable_instance, OtherObservable()])
|
||||
|
||||
assert len(observable_instance.my_list) == 2
|
||||
assert f"'my_list' changed to '{observable_instance.my_list}'" in caplog.text
|
||||
|
||||
# checks if observer has been removed
|
||||
other_observable_instance.greeting = "Hi"
|
||||
assert "'my_list[0].greeting' changed to 'Hi'" in caplog.text
|
||||
observable_instance.my_list[1].greeting = "Ciao"
|
||||
assert "'my_list[1].greeting' changed to 'Ciao'" in caplog.text
|
||||
|
||||
|
||||
def test_list_insert(caplog: pytest.LogCaptureFixture) -> None:
|
||||
class OtherObservable(Observable):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.greeting = "Hello there!"
|
||||
|
||||
other_observable_instance_1 = OtherObservable()
|
||||
other_observable_instance_2 = OtherObservable()
|
||||
|
||||
class MyObservable(Observable):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.my_list = [other_observable_instance_1, OtherObservable()]
|
||||
|
||||
observable_instance = MyObservable()
|
||||
MyObserver(observable_instance)
|
||||
|
||||
other_observable_instance_1.greeting = "Hello"
|
||||
assert "'my_list[0].greeting' changed to 'Hello'" in caplog.text
|
||||
|
||||
observable_instance.my_list.insert(0, other_observable_instance_2)
|
||||
|
||||
assert len(observable_instance.my_list) == 3
|
||||
assert f"'my_list' changed to '{observable_instance.my_list}'" in caplog.text
|
||||
|
||||
# checks if observer keys have been updated
|
||||
other_observable_instance_2.greeting = "Hey"
|
||||
other_observable_instance_1.greeting = "Hi"
|
||||
observable_instance.my_list[2].greeting = "Ciao"
|
||||
|
||||
assert "'my_list[0].greeting' changed to 'Hey'" in caplog.text
|
||||
assert "'my_list[1].greeting' changed to 'Hi'" in caplog.text
|
||||
assert "'my_list[2].greeting' changed to 'Ciao'" in caplog.text
|
||||
|
||||
|
||||
def test_list_remove(caplog: pytest.LogCaptureFixture) -> None:
|
||||
class OtherObservable(Observable):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.greeting = "Hello there!"
|
||||
|
||||
other_observable_instance_1 = OtherObservable()
|
||||
other_observable_instance_2 = OtherObservable()
|
||||
|
||||
class MyObservable(Observable):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.my_list = [other_observable_instance_1, other_observable_instance_2]
|
||||
|
||||
observable_instance = MyObservable()
|
||||
MyObserver(observable_instance)
|
||||
|
||||
other_observable_instance_1.greeting = "Hello"
|
||||
other_observable_instance_2.greeting = "Hi"
|
||||
caplog.clear()
|
||||
|
||||
observable_instance.my_list.remove(other_observable_instance_1)
|
||||
|
||||
assert len(observable_instance.my_list) == 1
|
||||
assert f"'my_list' changed to '{observable_instance.my_list}'" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
# checks if observer has been removed
|
||||
other_observable_instance_1.greeting = "Hi"
|
||||
assert "'my_list[0].greeting' changed to 'Hi'" not in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
# checks if observer key was updated correctly (was index 1)
|
||||
other_observable_instance_2.greeting = "Ciao"
|
||||
assert "'my_list[0].greeting' changed to 'Ciao'" in caplog.text
|
||||
25
tests/observer_pattern/observer/test_observer.py
Normal file
25
tests/observer_pattern/observer/test_observer.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from typing import Any
|
||||
|
||||
import pytest
|
||||
from pydase.observer_pattern.observable import Observable
|
||||
from pydase.observer_pattern.observer import Observer
|
||||
|
||||
|
||||
def test_abstract_method_error() -> None:
|
||||
class MyObserver(Observer):
|
||||
pass
|
||||
|
||||
class MyObservable(Observable):
|
||||
pass
|
||||
|
||||
with pytest.raises(TypeError):
|
||||
MyObserver(MyObservable())
|
||||
|
||||
|
||||
def test_constructor_error() -> None:
|
||||
class MyObserver(Observer):
|
||||
def on_change(self, full_access_path: str, value: Any) -> None:
|
||||
pass
|
||||
|
||||
with pytest.raises(TypeError):
|
||||
MyObserver()
|
||||
35
tests/server/test_server.py
Normal file
35
tests/server/test_server.py
Normal file
@@ -0,0 +1,35 @@
|
||||
import signal
|
||||
|
||||
from pytest_mock import MockerFixture
|
||||
|
||||
import pydase
|
||||
|
||||
|
||||
def test_signal_handling(mocker: MockerFixture):
|
||||
# Mock os._exit and signal.signal
|
||||
mock_exit = mocker.patch("os._exit")
|
||||
mock_signal = mocker.patch("signal.signal")
|
||||
|
||||
class MyService(pydase.DataService):
|
||||
pass
|
||||
|
||||
# Instantiate your server object
|
||||
server = pydase.Server(MyService())
|
||||
|
||||
# Call the method to install signal handlers
|
||||
server.install_signal_handlers()
|
||||
|
||||
# Check if the signal handlers were registered correctly
|
||||
assert mock_signal.call_args_list == [
|
||||
mocker.call(signal.SIGINT, server.handle_exit),
|
||||
mocker.call(signal.SIGTERM, server.handle_exit),
|
||||
]
|
||||
|
||||
# Simulate receiving a SIGINT signal for the first time
|
||||
server.handle_exit(signal.SIGINT, None)
|
||||
assert server.should_exit # assuming should_exit is public
|
||||
mock_exit.assert_not_called()
|
||||
|
||||
# Simulate receiving a SIGINT signal for the second time
|
||||
server.handle_exit(signal.SIGINT, None)
|
||||
mock_exit.assert_called_once_with(1)
|
||||
@@ -1,101 +0,0 @@
|
||||
from pytest import CaptureFixture
|
||||
|
||||
from pydase import DataService
|
||||
|
||||
|
||||
def test_class_list_attribute(capsys: CaptureFixture) -> None:
|
||||
class ServiceClass(DataService):
|
||||
attr = [0, 1]
|
||||
|
||||
service_instance = ServiceClass()
|
||||
|
||||
service_instance.attr[0] = 1337
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "ServiceClass.attr[0] = 1337\n"
|
||||
|
||||
|
||||
def test_instance_list_attribute(capsys: CaptureFixture) -> None:
|
||||
class SubClass(DataService):
|
||||
name = "SubClass"
|
||||
|
||||
class ServiceClass(DataService):
|
||||
def __init__(self) -> None:
|
||||
self.attr = [0, SubClass()]
|
||||
super().__init__()
|
||||
|
||||
service_instance = ServiceClass()
|
||||
|
||||
_ = capsys.readouterr()
|
||||
|
||||
service_instance.attr[0] = "Hello"
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "ServiceClass.attr[0] = Hello\n"
|
||||
|
||||
service_instance.attr[1] = SubClass()
|
||||
captured = capsys.readouterr()
|
||||
assert (
|
||||
captured.out.strip()
|
||||
== "ServiceClass.attr[1] = {'name': {'type': 'str', 'value': 'SubClass',"
|
||||
" 'readonly': False, 'doc': None}}"
|
||||
)
|
||||
|
||||
|
||||
def test_reused_instance_list_attribute(capsys: CaptureFixture) -> None:
|
||||
some_list = [0, 1, 2]
|
||||
|
||||
class ServiceClass(DataService):
|
||||
def __init__(self) -> None:
|
||||
self.attr = some_list
|
||||
self.attr_2 = some_list
|
||||
self.attr_3 = [0, 1, 2]
|
||||
super().__init__()
|
||||
|
||||
service_instance = ServiceClass()
|
||||
|
||||
service_instance.attr[0] = 20
|
||||
captured = capsys.readouterr()
|
||||
|
||||
assert service_instance.attr == service_instance.attr_2
|
||||
assert service_instance.attr != service_instance.attr_3
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.attr[0] = 20",
|
||||
"ServiceClass.attr_2[0] = 20",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n"))
|
||||
assert actual_output == expected_output
|
||||
|
||||
|
||||
def test_nested_reused_instance_list_attribute(capsys: CaptureFixture) -> None:
|
||||
some_list = [0, 1, 2]
|
||||
|
||||
class SubClass(DataService):
|
||||
attr_list = some_list
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.attr_list_2 = some_list
|
||||
super().__init__()
|
||||
|
||||
class ServiceClass(DataService):
|
||||
def __init__(self) -> None:
|
||||
self.attr = some_list
|
||||
self.subclass = SubClass()
|
||||
super().__init__()
|
||||
|
||||
service_instance = ServiceClass()
|
||||
|
||||
_ = capsys.readouterr()
|
||||
service_instance.attr[0] = 20
|
||||
captured = capsys.readouterr()
|
||||
|
||||
assert service_instance.attr == service_instance.subclass.attr_list
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.subclass.attr_list_2[0] = 20",
|
||||
"ServiceClass.subclass.attr_list[0] = 20",
|
||||
"ServiceClass.attr[0] = 20",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n"))
|
||||
assert actual_output == expected_output
|
||||
@@ -1,568 +0,0 @@
|
||||
from pytest import CaptureFixture
|
||||
|
||||
from pydase import DataService
|
||||
|
||||
|
||||
def test_class_attributes(capsys: CaptureFixture) -> None:
|
||||
class SubClass(DataService):
|
||||
name = "Hello"
|
||||
|
||||
class ServiceClass(DataService):
|
||||
attr_1 = SubClass()
|
||||
|
||||
service_instance = ServiceClass()
|
||||
_ = capsys.readouterr()
|
||||
service_instance.attr_1.name = "Hi"
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out.strip() == "ServiceClass.attr_1.name = Hi"
|
||||
|
||||
|
||||
def test_instance_attributes(capsys: CaptureFixture) -> None:
|
||||
class SubClass(DataService):
|
||||
name = "Hello"
|
||||
|
||||
class ServiceClass(DataService):
|
||||
def __init__(self) -> None:
|
||||
self.attr_1 = SubClass()
|
||||
super().__init__()
|
||||
|
||||
service_instance = ServiceClass()
|
||||
_ = capsys.readouterr()
|
||||
service_instance.attr_1.name = "Hi"
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out.strip() == "ServiceClass.attr_1.name = Hi"
|
||||
|
||||
|
||||
def test_class_attribute(capsys: CaptureFixture) -> None:
|
||||
class ServiceClass(DataService):
|
||||
attr = 0
|
||||
|
||||
service_instance = ServiceClass()
|
||||
|
||||
service_instance.attr = 1
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "ServiceClass.attr = 1\n"
|
||||
|
||||
|
||||
def test_instance_attribute(capsys: CaptureFixture) -> None:
|
||||
class ServiceClass(DataService):
|
||||
def __init__(self) -> None:
|
||||
self.attr = "Hello World"
|
||||
super().__init__()
|
||||
|
||||
service_instance = ServiceClass()
|
||||
|
||||
service_instance.attr = "Hello"
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "ServiceClass.attr = Hello\n"
|
||||
|
||||
|
||||
def test_reused_instance_attributes(capsys: CaptureFixture) -> None:
|
||||
class SubClass(DataService):
|
||||
name = "Hello"
|
||||
|
||||
subclass_instance = SubClass()
|
||||
|
||||
class ServiceClass(DataService):
|
||||
def __init__(self) -> None:
|
||||
self.attr_1 = subclass_instance
|
||||
self.attr_2 = subclass_instance
|
||||
super().__init__()
|
||||
|
||||
service_instance = ServiceClass()
|
||||
_ = capsys.readouterr()
|
||||
service_instance.attr_1.name = "Hi"
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert service_instance.attr_1 == service_instance.attr_2
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.attr_1.name = Hi",
|
||||
"ServiceClass.attr_2.name = Hi",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n"))
|
||||
assert actual_output == expected_output
|
||||
|
||||
|
||||
def test_reused_attributes_mixed(capsys: CaptureFixture) -> None:
|
||||
class SubClass(DataService):
|
||||
pass
|
||||
|
||||
subclass_instance = SubClass()
|
||||
|
||||
class ServiceClass(DataService):
|
||||
attr_1 = subclass_instance
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.attr_2 = subclass_instance
|
||||
super().__init__()
|
||||
|
||||
service_instance = ServiceClass()
|
||||
_ = capsys.readouterr()
|
||||
service_instance.attr_1.name = "Hi"
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert service_instance.attr_1 == service_instance.attr_2
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.attr_1.name = Hi",
|
||||
"ServiceClass.attr_2.name = Hi",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n"))
|
||||
assert actual_output == expected_output
|
||||
|
||||
|
||||
def test_nested_class_attributes(capsys: CaptureFixture) -> None:
|
||||
class SubSubSubClass(DataService):
|
||||
name = "Hello"
|
||||
|
||||
class SubSubClass(DataService):
|
||||
name = "Hello"
|
||||
attr = SubSubSubClass()
|
||||
|
||||
class SubClass(DataService):
|
||||
name = "Hello"
|
||||
attr = SubSubClass()
|
||||
|
||||
class ServiceClass(DataService):
|
||||
name = "Hello"
|
||||
attr = SubClass()
|
||||
|
||||
service_instance = ServiceClass()
|
||||
_ = capsys.readouterr()
|
||||
service_instance.attr.attr.attr.name = "Hi"
|
||||
service_instance.attr.attr.name = "Hou"
|
||||
service_instance.attr.name = "foo"
|
||||
service_instance.name = "bar"
|
||||
|
||||
captured = capsys.readouterr()
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.attr.attr.attr.name = Hi",
|
||||
"ServiceClass.attr.attr.name = Hou",
|
||||
"ServiceClass.attr.name = foo",
|
||||
"ServiceClass.name = bar",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n"))
|
||||
assert actual_output == expected_output
|
||||
|
||||
|
||||
def test_nested_instance_attributes(capsys: CaptureFixture) -> None:
|
||||
class SubSubSubClass(DataService):
|
||||
name = "Hello"
|
||||
|
||||
class SubSubClass(DataService):
|
||||
def __init__(self) -> None:
|
||||
self.attr = SubSubSubClass()
|
||||
self.name = "Hello"
|
||||
super().__init__()
|
||||
|
||||
class SubClass(DataService):
|
||||
def __init__(self) -> None:
|
||||
self.attr = SubSubClass()
|
||||
self.name = "Hello"
|
||||
super().__init__()
|
||||
|
||||
class ServiceClass(DataService):
|
||||
def __init__(self) -> None:
|
||||
self.attr = SubClass()
|
||||
self.name = "Hello"
|
||||
super().__init__()
|
||||
|
||||
service_instance = ServiceClass()
|
||||
_ = capsys.readouterr()
|
||||
service_instance.attr.attr.attr.name = "Hi"
|
||||
service_instance.attr.attr.name = "Hou"
|
||||
service_instance.attr.name = "foo"
|
||||
service_instance.name = "bar"
|
||||
|
||||
captured = capsys.readouterr()
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.attr.attr.attr.name = Hi",
|
||||
"ServiceClass.attr.attr.name = Hou",
|
||||
"ServiceClass.attr.name = foo",
|
||||
"ServiceClass.name = bar",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n"))
|
||||
assert actual_output == expected_output
|
||||
|
||||
|
||||
def test_advanced_nested_class_attributes(capsys: CaptureFixture) -> None:
|
||||
class SubSubSubClass(DataService):
|
||||
name = "Hello"
|
||||
|
||||
class SubSubClass(DataService):
|
||||
attr = SubSubSubClass()
|
||||
|
||||
class SubClass(DataService):
|
||||
attr = SubSubClass()
|
||||
|
||||
class ServiceClass(DataService):
|
||||
attr = SubClass()
|
||||
subattr = SubSubClass()
|
||||
|
||||
service_instance = ServiceClass()
|
||||
_ = capsys.readouterr()
|
||||
service_instance.attr.attr.attr.name = "Hi"
|
||||
|
||||
captured = capsys.readouterr()
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.attr.attr.attr.name = Hi",
|
||||
"ServiceClass.subattr.attr.name = Hi",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n"))
|
||||
assert actual_output == expected_output
|
||||
service_instance.subattr.attr.name = "Ho"
|
||||
|
||||
captured = capsys.readouterr()
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.attr.attr.attr.name = Ho",
|
||||
"ServiceClass.subattr.attr.name = Ho",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n"))
|
||||
assert actual_output == expected_output
|
||||
|
||||
|
||||
def test_advanced_nested_instance_attributes(capsys: CaptureFixture) -> None:
|
||||
class SubSubSubClass(DataService):
|
||||
name = "Hello"
|
||||
|
||||
class SubSubClass(DataService):
|
||||
def __init__(self) -> None:
|
||||
self.attr = SubSubSubClass()
|
||||
super().__init__()
|
||||
|
||||
subsubclass_instance = SubSubClass()
|
||||
|
||||
class SubClass(DataService):
|
||||
def __init__(self) -> None:
|
||||
self.attr = subsubclass_instance
|
||||
super().__init__()
|
||||
|
||||
class ServiceClass(DataService):
|
||||
def __init__(self) -> None:
|
||||
self.attr = SubClass()
|
||||
self.subattr = subsubclass_instance
|
||||
super().__init__()
|
||||
|
||||
service_instance = ServiceClass()
|
||||
_ = capsys.readouterr()
|
||||
service_instance.attr.attr.attr.name = "Hi"
|
||||
|
||||
captured = capsys.readouterr()
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.attr.attr.attr.name = Hi",
|
||||
"ServiceClass.subattr.attr.name = Hi",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n"))
|
||||
assert actual_output == expected_output
|
||||
service_instance.subattr.attr.name = "Ho"
|
||||
|
||||
captured = capsys.readouterr()
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.attr.attr.attr.name = Ho",
|
||||
"ServiceClass.subattr.attr.name = Ho",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n"))
|
||||
assert actual_output == expected_output
|
||||
|
||||
|
||||
def test_advanced_nested_attributes_mixed(capsys: CaptureFixture) -> None:
|
||||
class SubSubClass(DataService):
|
||||
name = "Hello"
|
||||
|
||||
class SubClass(DataService):
|
||||
class_attr = SubSubClass()
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.attr_1 = SubSubClass()
|
||||
super().__init__()
|
||||
|
||||
class ServiceClass(DataService):
|
||||
class_attr = SubClass()
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.attr = SubClass()
|
||||
super().__init__()
|
||||
|
||||
service_instance = ServiceClass()
|
||||
# Subclass.attr is the same for all instances
|
||||
assert service_instance.attr.class_attr == service_instance.class_attr.class_attr
|
||||
|
||||
# attr_1 is different for all instances of SubClass
|
||||
assert service_instance.attr.attr_1 != service_instance.class_attr.attr_1
|
||||
|
||||
# instances of SubSubClass are unequal
|
||||
assert service_instance.attr.attr_1 != service_instance.class_attr.class_attr
|
||||
|
||||
_ = capsys.readouterr()
|
||||
|
||||
service_instance.class_attr.class_attr.name = "Ho"
|
||||
captured = capsys.readouterr()
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.class_attr.class_attr.name = Ho",
|
||||
"ServiceClass.attr.class_attr.name = Ho",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n"))
|
||||
assert actual_output == expected_output
|
||||
|
||||
service_instance.class_attr.attr_1.name = "Ho"
|
||||
captured = capsys.readouterr()
|
||||
expected_output = sorted(["ServiceClass.class_attr.attr_1.name = Ho"])
|
||||
actual_output = sorted(captured.out.strip().split("\n"))
|
||||
assert actual_output == expected_output
|
||||
|
||||
service_instance.attr.class_attr.name = "Ho"
|
||||
captured = capsys.readouterr()
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.attr.class_attr.name = Ho",
|
||||
"ServiceClass.class_attr.class_attr.name = Ho",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n"))
|
||||
assert actual_output == expected_output
|
||||
|
||||
service_instance.attr.attr_1.name = "Ho"
|
||||
captured = capsys.readouterr()
|
||||
expected_output = sorted(["ServiceClass.attr.attr_1.name = Ho"])
|
||||
actual_output = sorted(captured.out.strip().split("\n"))
|
||||
assert actual_output == expected_output
|
||||
|
||||
|
||||
def test_class_list_attributes(capsys: CaptureFixture) -> None:
|
||||
class SubClass(DataService):
|
||||
name = "Hello"
|
||||
|
||||
subclass_instance = SubClass()
|
||||
|
||||
class ServiceClass(DataService):
|
||||
attr_list = [SubClass() for _ in range(2)]
|
||||
attr_list_2 = [subclass_instance, subclass_instance]
|
||||
attr = subclass_instance
|
||||
|
||||
service_instance = ServiceClass()
|
||||
_ = capsys.readouterr()
|
||||
|
||||
assert service_instance.attr_list[0] != service_instance.attr_list[1]
|
||||
|
||||
service_instance.attr_list[0].name = "Ho"
|
||||
captured = capsys.readouterr()
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.attr_list[0].name = Ho",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n"))
|
||||
assert actual_output == expected_output
|
||||
|
||||
service_instance.attr_list[1].name = "Ho"
|
||||
captured = capsys.readouterr()
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.attr_list[1].name = Ho",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n"))
|
||||
assert actual_output == expected_output
|
||||
|
||||
assert service_instance.attr_list_2[0] == service_instance.attr
|
||||
assert service_instance.attr_list_2[0] == service_instance.attr_list_2[1]
|
||||
|
||||
service_instance.attr_list_2[0].name = "Ho"
|
||||
captured = capsys.readouterr()
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.attr_list_2[0].name = Ho",
|
||||
"ServiceClass.attr_list_2[1].name = Ho",
|
||||
"ServiceClass.attr.name = Ho",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n"))
|
||||
assert actual_output == expected_output
|
||||
|
||||
service_instance.attr_list_2[1].name = "Ho"
|
||||
captured = capsys.readouterr()
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.attr_list_2[0].name = Ho",
|
||||
"ServiceClass.attr_list_2[1].name = Ho",
|
||||
"ServiceClass.attr.name = Ho",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n"))
|
||||
assert actual_output == expected_output
|
||||
|
||||
|
||||
def test_nested_class_list_attributes(capsys: CaptureFixture) -> None:
|
||||
class SubSubClass(DataService):
|
||||
name = "Hello"
|
||||
|
||||
subsubclass_instance = SubSubClass()
|
||||
|
||||
class SubClass(DataService):
|
||||
attr_list = [subsubclass_instance]
|
||||
|
||||
class ServiceClass(DataService):
|
||||
attr = [SubClass()]
|
||||
subattr = subsubclass_instance
|
||||
|
||||
service_instance = ServiceClass()
|
||||
_ = capsys.readouterr()
|
||||
|
||||
assert service_instance.attr[0].attr_list[0] == service_instance.subattr
|
||||
|
||||
service_instance.attr[0].attr_list[0].name = "Ho"
|
||||
captured = capsys.readouterr()
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.attr[0].attr_list[0].name = Ho",
|
||||
"ServiceClass.subattr.name = Ho",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n"))
|
||||
assert actual_output == expected_output
|
||||
|
||||
service_instance.subattr.name = "Ho"
|
||||
captured = capsys.readouterr()
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.attr[0].attr_list[0].name = Ho",
|
||||
"ServiceClass.subattr.name = Ho",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n"))
|
||||
assert actual_output == expected_output
|
||||
|
||||
|
||||
def test_instance_list_attributes(capsys: CaptureFixture) -> None:
|
||||
class SubClass(DataService):
|
||||
name = "Hello"
|
||||
|
||||
subclass_instance = SubClass()
|
||||
|
||||
class ServiceClass(DataService):
|
||||
def __init__(self) -> None:
|
||||
self.attr_list = [SubClass() for _ in range(2)]
|
||||
self.attr_list_2 = [subclass_instance, subclass_instance]
|
||||
self.attr = subclass_instance
|
||||
super().__init__()
|
||||
|
||||
service_instance = ServiceClass()
|
||||
_ = capsys.readouterr()
|
||||
|
||||
assert service_instance.attr_list[0] != service_instance.attr_list[1]
|
||||
|
||||
service_instance.attr_list[0].name = "Ho"
|
||||
captured = capsys.readouterr()
|
||||
expected_output = sorted(["ServiceClass.attr_list[0].name = Ho"])
|
||||
actual_output = sorted(captured.out.strip().split("\n"))
|
||||
assert actual_output == expected_output
|
||||
|
||||
service_instance.attr_list[1].name = "Ho"
|
||||
captured = capsys.readouterr()
|
||||
expected_output = sorted(["ServiceClass.attr_list[1].name = Ho"])
|
||||
actual_output = sorted(captured.out.strip().split("\n"))
|
||||
assert actual_output == expected_output
|
||||
|
||||
assert service_instance.attr_list_2[0] == service_instance.attr
|
||||
assert service_instance.attr_list_2[0] == service_instance.attr_list_2[1]
|
||||
|
||||
service_instance.attr_list_2[0].name = "Ho"
|
||||
captured = capsys.readouterr()
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.attr.name = Ho",
|
||||
"ServiceClass.attr_list_2[0].name = Ho",
|
||||
"ServiceClass.attr_list_2[1].name = Ho",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n"))
|
||||
assert actual_output == expected_output
|
||||
|
||||
service_instance.attr_list_2[1].name = "Ho"
|
||||
captured = capsys.readouterr()
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.attr.name = Ho",
|
||||
"ServiceClass.attr_list_2[0].name = Ho",
|
||||
"ServiceClass.attr_list_2[1].name = Ho",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n"))
|
||||
assert actual_output == expected_output
|
||||
|
||||
service_instance.attr.name = "Ho"
|
||||
captured = capsys.readouterr()
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.attr.name = Ho",
|
||||
"ServiceClass.attr_list_2[0].name = Ho",
|
||||
"ServiceClass.attr_list_2[1].name = Ho",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n"))
|
||||
assert actual_output == expected_output
|
||||
|
||||
|
||||
def test_nested_instance_list_attributes(capsys: CaptureFixture) -> None:
|
||||
class SubSubClass(DataService):
|
||||
name = "Hello"
|
||||
|
||||
subsubclass_instance = SubSubClass()
|
||||
|
||||
class SubClass(DataService):
|
||||
def __init__(self) -> None:
|
||||
self.attr_list = [subsubclass_instance]
|
||||
super().__init__()
|
||||
|
||||
class ServiceClass(DataService):
|
||||
class_attr = subsubclass_instance
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.attr = [SubClass()]
|
||||
super().__init__()
|
||||
|
||||
service_instance = ServiceClass()
|
||||
_ = capsys.readouterr()
|
||||
|
||||
assert service_instance.attr[0].attr_list[0] == service_instance.class_attr
|
||||
|
||||
service_instance.attr[0].attr_list[0].name = "Ho"
|
||||
captured = capsys.readouterr()
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.attr[0].attr_list[0].name = Ho",
|
||||
"ServiceClass.class_attr.name = Ho",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n"))
|
||||
assert actual_output == expected_output
|
||||
|
||||
service_instance.class_attr.name = "Ho"
|
||||
captured = capsys.readouterr()
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.attr[0].attr_list[0].name = Ho",
|
||||
"ServiceClass.class_attr.name = Ho",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n"))
|
||||
assert actual_output == expected_output
|
||||
455
tests/test_data_service_attribute_callbacks.py
Normal file
455
tests/test_data_service_attribute_callbacks.py
Normal file
@@ -0,0 +1,455 @@
|
||||
from pydase import DataService
|
||||
from pydase.data_service.data_service_observer import DataServiceObserver
|
||||
from pydase.data_service.state_manager import StateManager
|
||||
from pytest import LogCaptureFixture
|
||||
|
||||
|
||||
def test_class_attributes(caplog: LogCaptureFixture) -> None:
|
||||
class SubClass(DataService):
|
||||
name = "Hello"
|
||||
|
||||
class ServiceClass(DataService):
|
||||
attr_1 = SubClass()
|
||||
|
||||
service_instance = ServiceClass()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
service_instance.attr_1.name = "Hi"
|
||||
|
||||
assert "'attr_1.name' changed to 'Hi'" in caplog.text
|
||||
|
||||
|
||||
def test_instance_attributes(caplog: LogCaptureFixture) -> None:
|
||||
class SubClass(DataService):
|
||||
name = "Hello"
|
||||
|
||||
class ServiceClass(DataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.attr_1 = SubClass()
|
||||
|
||||
service_instance = ServiceClass()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
service_instance.attr_1.name = "Hi"
|
||||
|
||||
assert "'attr_1.name' changed to 'Hi'" in caplog.text
|
||||
|
||||
|
||||
def test_class_attribute(caplog: LogCaptureFixture) -> None:
|
||||
class ServiceClass(DataService):
|
||||
attr = 0
|
||||
|
||||
service_instance = ServiceClass()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
|
||||
service_instance.attr = 1
|
||||
assert "'attr' changed to '1'" in caplog.text
|
||||
|
||||
|
||||
def test_instance_attribute(caplog: LogCaptureFixture) -> None:
|
||||
class ServiceClass(DataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.attr = "Hello World"
|
||||
|
||||
service_instance = ServiceClass()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
|
||||
service_instance.attr = "Hello"
|
||||
assert "'attr' changed to 'Hello'" in caplog.text
|
||||
|
||||
|
||||
def test_reused_instance_attributes(caplog: LogCaptureFixture) -> None:
|
||||
class SubClass(DataService):
|
||||
name = "Hello"
|
||||
|
||||
subclass_instance = SubClass()
|
||||
|
||||
class ServiceClass(DataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.attr_1 = subclass_instance
|
||||
self.attr_2 = subclass_instance
|
||||
|
||||
service_instance = ServiceClass()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
|
||||
service_instance.attr_1.name = "Hi"
|
||||
|
||||
assert service_instance.attr_1 == service_instance.attr_2
|
||||
assert "'attr_1.name' changed to 'Hi'" in caplog.text
|
||||
assert "'attr_2.name' changed to 'Hi'" in caplog.text
|
||||
|
||||
|
||||
def test_reused_attributes_mixed(caplog: LogCaptureFixture) -> None:
|
||||
class SubClass(DataService):
|
||||
pass
|
||||
|
||||
subclass_instance = SubClass()
|
||||
|
||||
class ServiceClass(DataService):
|
||||
attr_1 = subclass_instance
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.attr_2 = subclass_instance
|
||||
|
||||
service_instance = ServiceClass()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
|
||||
service_instance.attr_1.name = "Hi"
|
||||
|
||||
assert service_instance.attr_1 == service_instance.attr_2
|
||||
assert "'attr_1.name' changed to 'Hi'" in caplog.text
|
||||
assert "'attr_2.name' changed to 'Hi'" in caplog.text
|
||||
|
||||
|
||||
def test_nested_class_attributes(caplog: LogCaptureFixture) -> None:
|
||||
class SubSubSubClass(DataService):
|
||||
name = "Hello"
|
||||
|
||||
class SubSubClass(DataService):
|
||||
name = "Hello"
|
||||
attr = SubSubSubClass()
|
||||
|
||||
class SubClass(DataService):
|
||||
name = "Hello"
|
||||
attr = SubSubClass()
|
||||
|
||||
class ServiceClass(DataService):
|
||||
name = "Hello"
|
||||
attr = SubClass()
|
||||
|
||||
service_instance = ServiceClass()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
|
||||
service_instance.attr.attr.attr.name = "Hi"
|
||||
service_instance.attr.attr.name = "Hou"
|
||||
service_instance.attr.name = "foo"
|
||||
service_instance.name = "bar"
|
||||
|
||||
assert "'attr.attr.attr.name' changed to 'Hi'" in caplog.text
|
||||
assert "'attr.attr.name' changed to 'Hou'" in caplog.text
|
||||
assert "'attr.name' changed to 'foo'" in caplog.text
|
||||
assert "'name' changed to 'bar'" in caplog.text
|
||||
|
||||
|
||||
def test_nested_instance_attributes(caplog: LogCaptureFixture) -> None:
|
||||
class SubSubSubClass(DataService):
|
||||
name = "Hello"
|
||||
|
||||
class SubSubClass(DataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.attr = SubSubSubClass()
|
||||
self.name = "Hello"
|
||||
|
||||
class SubClass(DataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.attr = SubSubClass()
|
||||
self.name = "Hello"
|
||||
|
||||
class ServiceClass(DataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.attr = SubClass()
|
||||
self.name = "Hello"
|
||||
|
||||
service_instance = ServiceClass()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
|
||||
service_instance.attr.attr.attr.name = "Hi"
|
||||
service_instance.attr.attr.name = "Hou"
|
||||
service_instance.attr.name = "foo"
|
||||
service_instance.name = "bar"
|
||||
|
||||
assert "'attr.attr.attr.name' changed to 'Hi'" in caplog.text
|
||||
assert "'attr.attr.name' changed to 'Hou'" in caplog.text
|
||||
assert "'attr.name' changed to 'foo'" in caplog.text
|
||||
assert "'name' changed to 'bar'" in caplog.text
|
||||
|
||||
|
||||
def test_advanced_nested_class_attributes(caplog: LogCaptureFixture) -> None:
|
||||
class SubSubSubClass(DataService):
|
||||
name = "Hello"
|
||||
|
||||
class SubSubClass(DataService):
|
||||
attr = SubSubSubClass()
|
||||
|
||||
class SubClass(DataService):
|
||||
attr = SubSubClass()
|
||||
|
||||
class ServiceClass(DataService):
|
||||
attr = SubClass()
|
||||
subattr = SubSubClass()
|
||||
|
||||
service_instance = ServiceClass()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
|
||||
service_instance.attr.attr.attr.name = "Hi"
|
||||
|
||||
assert "'attr.attr.attr.name' changed to 'Hi'" in caplog.text
|
||||
assert "'subattr.attr.name' changed to 'Hi'" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
service_instance.subattr.attr.name = "Ho"
|
||||
|
||||
assert "'attr.attr.attr.name' changed to 'Ho'" in caplog.text
|
||||
assert "'subattr.attr.name' changed to 'Ho'" in caplog.text
|
||||
|
||||
|
||||
def test_advanced_nested_instance_attributes(caplog: LogCaptureFixture) -> None:
|
||||
class SubSubSubClass(DataService):
|
||||
name = "Hello"
|
||||
|
||||
class SubSubClass(DataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.attr = SubSubSubClass()
|
||||
|
||||
subsubclass_instance = SubSubClass()
|
||||
|
||||
class SubClass(DataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.attr = subsubclass_instance
|
||||
|
||||
class ServiceClass(DataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.attr = SubClass()
|
||||
self.subattr = subsubclass_instance
|
||||
|
||||
service_instance = ServiceClass()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
|
||||
service_instance.attr.attr.attr.name = "Hi"
|
||||
assert "'attr.attr.attr.name' changed to 'Hi'" in caplog.text
|
||||
assert "'subattr.attr.name' changed to 'Hi'" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
service_instance.subattr.attr.name = "Ho"
|
||||
assert "'attr.attr.attr.name' changed to 'Ho'" in caplog.text
|
||||
assert "'subattr.attr.name' changed to 'Ho'" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
|
||||
def test_advanced_nested_attributes_mixed(caplog: LogCaptureFixture) -> None:
|
||||
class SubSubClass(DataService):
|
||||
name = "Hello"
|
||||
|
||||
class SubClass(DataService):
|
||||
class_attr = SubSubClass()
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.attr_1 = SubSubClass()
|
||||
|
||||
class ServiceClass(DataService):
|
||||
class_attr = SubClass()
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.attr = SubClass()
|
||||
|
||||
service_instance = ServiceClass()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
|
||||
# Subclass.attr is the same for all instances
|
||||
assert service_instance.attr.class_attr == service_instance.class_attr.class_attr
|
||||
|
||||
# attr_1 is different for all instances of SubClass
|
||||
assert service_instance.attr.attr_1 != service_instance.class_attr.attr_1
|
||||
|
||||
# instances of SubSubClass are unequal
|
||||
assert service_instance.attr.attr_1 != service_instance.class_attr.class_attr
|
||||
|
||||
service_instance.class_attr.class_attr.name = "Ho"
|
||||
assert "'class_attr.class_attr.name' changed to 'Ho'" in caplog.text
|
||||
assert "'attr.class_attr.name' changed to 'Ho'" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
service_instance.class_attr.attr_1.name = "Ho"
|
||||
assert "'class_attr.attr_1.name' changed to 'Ho'" in caplog.text
|
||||
assert "'attr.attr_1.name' changed to 'Ho'" not in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
service_instance.attr.class_attr.name = "Hello"
|
||||
assert "'class_attr.class_attr.name' changed to 'Hello'" in caplog.text
|
||||
assert "'attr.class_attr.name' changed to 'Hello'" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
service_instance.attr.attr_1.name = "Ho"
|
||||
assert "'attr.attr_1.name' changed to 'Ho'" in caplog.text
|
||||
assert "'class_attr.attr_1.name' changed to 'Ho'" not in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
|
||||
def test_class_list_attributes(caplog: LogCaptureFixture) -> None:
|
||||
class SubClass(DataService):
|
||||
name = "Hello"
|
||||
|
||||
subclass_instance = SubClass()
|
||||
|
||||
class ServiceClass(DataService):
|
||||
attr_list = [SubClass() for _ in range(2)]
|
||||
attr_list_2 = [subclass_instance, subclass_instance]
|
||||
attr = subclass_instance
|
||||
|
||||
service_instance = ServiceClass()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
|
||||
assert service_instance.attr_list[0] != service_instance.attr_list[1]
|
||||
|
||||
service_instance.attr_list[0].name = "Ho"
|
||||
assert "'attr_list[0].name' changed to 'Ho'" in caplog.text
|
||||
assert "'attr_list[1].name' changed to 'Ho'" not in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
service_instance.attr_list[1].name = "Ho"
|
||||
assert "'attr_list[0].name' changed to 'Ho'" not in caplog.text
|
||||
assert "'attr_list[1].name' changed to 'Ho'" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
assert service_instance.attr_list_2[0] == service_instance.attr
|
||||
assert service_instance.attr_list_2[0] == service_instance.attr_list_2[1]
|
||||
|
||||
service_instance.attr_list_2[0].name = "Ciao"
|
||||
assert "'attr_list_2[0].name' changed to 'Ciao'" in caplog.text
|
||||
assert "'attr_list_2[1].name' changed to 'Ciao'" in caplog.text
|
||||
assert "'attr.name' changed to 'Ciao'" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
service_instance.attr_list_2[1].name = "Bye"
|
||||
assert "'attr_list_2[0].name' changed to 'Bye'" in caplog.text
|
||||
assert "'attr_list_2[1].name' changed to 'Bye'" in caplog.text
|
||||
assert "'attr.name' changed to 'Bye'" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
|
||||
def test_nested_class_list_attributes(caplog: LogCaptureFixture) -> None:
|
||||
class SubSubClass(DataService):
|
||||
name = "Hello"
|
||||
|
||||
subsubclass_instance = SubSubClass()
|
||||
|
||||
class SubClass(DataService):
|
||||
attr_list = [subsubclass_instance]
|
||||
|
||||
class ServiceClass(DataService):
|
||||
attr = [SubClass()]
|
||||
subattr = subsubclass_instance
|
||||
|
||||
service_instance = ServiceClass()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
|
||||
assert service_instance.attr[0].attr_list[0] == service_instance.subattr
|
||||
|
||||
service_instance.attr[0].attr_list[0].name = "Ho"
|
||||
assert "'attr[0].attr_list[0].name' changed to 'Ho'" in caplog.text
|
||||
assert "'subattr.name' changed to 'Ho'" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
service_instance.subattr.name = "Hi"
|
||||
assert "'attr[0].attr_list[0].name' changed to 'Hi'" in caplog.text
|
||||
assert "'subattr.name' changed to 'Hi'" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
|
||||
def test_instance_list_attributes(caplog: LogCaptureFixture) -> None:
|
||||
class SubClass(DataService):
|
||||
name = "Hello"
|
||||
|
||||
subclass_instance = SubClass()
|
||||
|
||||
class ServiceClass(DataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.attr_list = [SubClass() for _ in range(2)]
|
||||
self.attr_list_2 = [subclass_instance, subclass_instance]
|
||||
self.attr = subclass_instance
|
||||
|
||||
service_instance = ServiceClass()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
|
||||
assert service_instance.attr_list[0] != service_instance.attr_list[1]
|
||||
|
||||
service_instance.attr_list[0].name = "Ho"
|
||||
assert "'attr_list[0].name' changed to 'Ho'" in caplog.text
|
||||
assert "'attr_list[1].name' changed to 'Ho'" not in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
service_instance.attr_list[1].name = "Hi"
|
||||
assert "'attr_list[0].name' changed to 'Hi'" not in caplog.text
|
||||
assert "'attr_list[1].name' changed to 'Hi'" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
assert service_instance.attr_list_2[0] == service_instance.attr
|
||||
assert service_instance.attr_list_2[0] == service_instance.attr_list_2[1]
|
||||
|
||||
service_instance.attr_list_2[0].name = "Ciao"
|
||||
assert "'attr.name' changed to 'Ciao'" in caplog.text
|
||||
assert "'attr_list_2[0].name' changed to 'Ciao'" in caplog.text
|
||||
assert "'attr_list_2[1].name' changed to 'Ciao'" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
service_instance.attr_list_2[1].name = "Bye"
|
||||
assert "'attr.name' changed to 'Bye'" in caplog.text
|
||||
assert "'attr_list_2[0].name' changed to 'Bye'" in caplog.text
|
||||
assert "'attr_list_2[1].name' changed to 'Bye'" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
service_instance.attr.name = "Ho"
|
||||
assert "'attr.name' changed to 'Ho'" in caplog.text
|
||||
assert "'attr_list_2[0].name' changed to 'Ho'" in caplog.text
|
||||
assert "'attr_list_2[1].name' changed to 'Ho'" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
|
||||
def test_nested_instance_list_attributes(caplog: LogCaptureFixture) -> None:
|
||||
class SubSubClass(DataService):
|
||||
name = "Hello"
|
||||
|
||||
subsubclass_instance = SubSubClass()
|
||||
|
||||
class SubClass(DataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.attr_list = [subsubclass_instance]
|
||||
|
||||
class ServiceClass(DataService):
|
||||
class_attr = subsubclass_instance
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.attr = [SubClass()]
|
||||
|
||||
service_instance = ServiceClass()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
|
||||
assert service_instance.attr[0].attr_list[0] == service_instance.class_attr
|
||||
|
||||
service_instance.attr[0].attr_list[0].name = "Ho"
|
||||
assert "'attr[0].attr_list[0].name' changed to 'Ho'" in caplog.text
|
||||
assert "'class_attr.name' changed to 'Ho'" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
service_instance.class_attr.name = "Hi"
|
||||
assert "'attr[0].attr_list[0].name' changed to 'Hi'" in caplog.text
|
||||
assert "'class_attr.name' changed to 'Hi'" in caplog.text
|
||||
caplog.clear()
|
||||
@@ -1,9 +1,10 @@
|
||||
from pytest import CaptureFixture
|
||||
|
||||
from pydase import DataService
|
||||
from pydase.data_service.data_service_observer import DataServiceObserver
|
||||
from pydase.data_service.state_manager import StateManager
|
||||
from pytest import LogCaptureFixture
|
||||
|
||||
|
||||
def test_properties(capsys: CaptureFixture) -> None:
|
||||
def test_properties(caplog: LogCaptureFixture) -> None:
|
||||
class ServiceClass(DataService):
|
||||
_voltage = 10.0
|
||||
_current = 1.0
|
||||
@@ -28,33 +29,23 @@ def test_properties(capsys: CaptureFixture) -> None:
|
||||
def current(self, value: float) -> None:
|
||||
self._current = value
|
||||
|
||||
test_service = ServiceClass()
|
||||
test_service.voltage = 1
|
||||
service_instance = ServiceClass()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.power = 1.0",
|
||||
"ServiceClass.voltage = 1.0",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n"))
|
||||
assert actual_output == expected_output
|
||||
service_instance.voltage = 1.0
|
||||
|
||||
test_service.current = 12.0
|
||||
assert "'power' changed to '1.0'" in caplog.text
|
||||
assert "'voltage' changed to '1.0'" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
captured = capsys.readouterr()
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.power = 12.0",
|
||||
"ServiceClass.current = 12.0",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n"))
|
||||
assert actual_output == expected_output
|
||||
service_instance.current = 12.0
|
||||
|
||||
assert "'power' changed to '12.0'" in caplog.text
|
||||
assert "'current' changed to '12.0'" in caplog.text
|
||||
|
||||
|
||||
def test_nested_properties(capsys: CaptureFixture) -> None:
|
||||
def test_nested_properties(caplog: LogCaptureFixture) -> None:
|
||||
class SubSubClass(DataService):
|
||||
name = "Hello"
|
||||
|
||||
@@ -74,48 +65,36 @@ def test_nested_properties(capsys: CaptureFixture) -> None:
|
||||
def sub_name(self) -> str:
|
||||
return f"{self.class_attr.name} {self.name}"
|
||||
|
||||
test_service = ServiceClass()
|
||||
test_service.name = "Peepz"
|
||||
service_instance = ServiceClass()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.name = Peepz",
|
||||
"ServiceClass.sub_name = Hello Peepz",
|
||||
"ServiceClass.subsub_name = Hello Peepz",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n"))
|
||||
assert actual_output == expected_output
|
||||
service_instance.name = "Peepz"
|
||||
|
||||
test_service.class_attr.name = "Hi"
|
||||
assert "'name' changed to 'Peepz'" in caplog.text
|
||||
assert "'sub_name' changed to 'Hello Peepz'" in caplog.text
|
||||
assert "'subsub_name' changed to 'Hello Peepz'" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
captured = capsys.readouterr()
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.sub_name = Hi Peepz",
|
||||
"ServiceClass.subsub_name = Hello Peepz", # registers subclass changes
|
||||
"ServiceClass.class_attr.name = Hi",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n"))
|
||||
assert actual_output == expected_output
|
||||
service_instance.class_attr.name = "Hi"
|
||||
assert service_instance.subsub_name == "Hello Peepz"
|
||||
|
||||
test_service.class_attr.class_attr.name = "Ciao"
|
||||
assert "'sub_name' changed to 'Hi Peepz'" in caplog.text
|
||||
assert "'subsub_name' " not in caplog.text # subsub_name does not depend on change
|
||||
assert "'class_attr.name' changed to 'Hi'" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
captured = capsys.readouterr()
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.sub_name = Hi Peepz", # registers subclass changes
|
||||
"ServiceClass.subsub_name = Ciao Peepz",
|
||||
"ServiceClass.class_attr.class_attr.name = Ciao",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n"))
|
||||
assert actual_output == expected_output
|
||||
service_instance.class_attr.class_attr.name = "Ciao"
|
||||
|
||||
assert (
|
||||
"'sub_name' changed to" not in caplog.text
|
||||
) # sub_name does not depend on change
|
||||
assert "'subsub_name' changed to 'Ciao Peepz'" in caplog.text
|
||||
assert "'class_attr.class_attr.name' changed to 'Ciao'" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
|
||||
def test_simple_list_properties(capsys: CaptureFixture) -> None:
|
||||
def test_simple_list_properties(caplog: LogCaptureFixture) -> None:
|
||||
class ServiceClass(DataService):
|
||||
list = ["Hello", "Ciao"]
|
||||
name = "World"
|
||||
@@ -124,33 +103,23 @@ def test_simple_list_properties(capsys: CaptureFixture) -> None:
|
||||
def total_name(self) -> str:
|
||||
return f"{self.list[0]} {self.name}"
|
||||
|
||||
test_service = ServiceClass()
|
||||
test_service.name = "Peepz"
|
||||
service_instance = ServiceClass()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.name = Peepz",
|
||||
"ServiceClass.total_name = Hello Peepz",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n"))
|
||||
assert actual_output == expected_output
|
||||
service_instance.name = "Peepz"
|
||||
|
||||
test_service.list[0] = "Hi"
|
||||
assert "'name' changed to 'Peepz'" in caplog.text
|
||||
assert "'total_name' changed to 'Hello Peepz'" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
captured = capsys.readouterr()
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.total_name = Hi Peepz",
|
||||
"ServiceClass.list[0] = Hi",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n"))
|
||||
assert actual_output == expected_output
|
||||
service_instance.list[0] = "Hi"
|
||||
|
||||
assert "'total_name' changed to 'Hi Peepz'" in caplog.text
|
||||
assert "'list[0]' changed to 'Hi'" in caplog.text
|
||||
|
||||
|
||||
def test_class_list_properties(capsys: CaptureFixture) -> None:
|
||||
def test_class_list_properties(caplog: LogCaptureFixture) -> None:
|
||||
class SubClass(DataService):
|
||||
name = "Hello"
|
||||
|
||||
@@ -162,79 +131,26 @@ def test_class_list_properties(capsys: CaptureFixture) -> None:
|
||||
def total_name(self) -> str:
|
||||
return f"{self.list[0].name} {self.name}"
|
||||
|
||||
test_service = ServiceClass()
|
||||
test_service.name = "Peepz"
|
||||
service_instance = ServiceClass()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.name = Peepz",
|
||||
"ServiceClass.total_name = Hello Peepz",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n"))
|
||||
assert actual_output == expected_output
|
||||
service_instance.name = "Peepz"
|
||||
|
||||
test_service.list[0].name = "Hi"
|
||||
assert "'name' changed to 'Peepz'" in caplog.text
|
||||
assert "'total_name' changed to 'Hello Peepz'" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
captured = capsys.readouterr()
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.total_name = Hi Peepz",
|
||||
"ServiceClass.list[0].name = Hi",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n"))
|
||||
assert actual_output == expected_output
|
||||
service_instance.list[0].name = "Hi"
|
||||
|
||||
assert "'total_name' changed to 'Hi Peepz'" in caplog.text
|
||||
assert "'list[0].name' changed to 'Hi'" in caplog.text
|
||||
|
||||
|
||||
def test_subclass_properties(capsys: CaptureFixture) -> None:
|
||||
def test_subclass_properties(caplog: LogCaptureFixture) -> None:
|
||||
class SubClass(DataService):
|
||||
name = "Hello"
|
||||
_voltage = 10.0
|
||||
_current = 1.0
|
||||
|
||||
@property
|
||||
def power(self) -> float:
|
||||
return self._voltage * self.current
|
||||
|
||||
@property
|
||||
def voltage(self) -> float:
|
||||
return self._voltage
|
||||
|
||||
@voltage.setter
|
||||
def voltage(self, value: float) -> None:
|
||||
self._voltage = value
|
||||
|
||||
@property
|
||||
def current(self) -> float:
|
||||
return self._current
|
||||
|
||||
@current.setter
|
||||
def current(self, value: float) -> None:
|
||||
self._current = value
|
||||
|
||||
class ServiceClass(DataService):
|
||||
class_attr = SubClass()
|
||||
|
||||
test_service = ServiceClass()
|
||||
test_service.class_attr.voltage = 10.0
|
||||
|
||||
captured = capsys.readouterr()
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.class_attr.voltage = 10.0",
|
||||
"ServiceClass.class_attr.power = 10.0",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n"))
|
||||
assert actual_output == expected_output
|
||||
|
||||
|
||||
def test_subclass_properties(capsys: CaptureFixture) -> None:
|
||||
class SubClass(DataService):
|
||||
name = "Hello"
|
||||
_voltage = 10.0
|
||||
_voltage = 11.0
|
||||
_current = 1.0
|
||||
|
||||
@property
|
||||
@@ -264,24 +180,19 @@ def test_subclass_properties(capsys: CaptureFixture) -> None:
|
||||
def voltage(self) -> float:
|
||||
return self.class_attr.voltage
|
||||
|
||||
test_service = ServiceClass()
|
||||
test_service.class_attr.voltage = 10.0
|
||||
service_instance = ServiceClass()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
expected_output = sorted(
|
||||
{
|
||||
"ServiceClass.class_attr.voltage = 10.0",
|
||||
"ServiceClass.class_attr.power = 10.0",
|
||||
"ServiceClass.voltage = 10.0",
|
||||
}
|
||||
)
|
||||
# using a set here as "ServiceClass.voltage = 10.0" is emitted twice. Once for
|
||||
# changing voltage, and once for changing power.
|
||||
actual_output = sorted(set(captured.out.strip().split("\n")))
|
||||
assert actual_output == expected_output
|
||||
service_instance.class_attr.voltage = 10.0
|
||||
|
||||
assert "'class_attr.voltage' changed to '10.0'" in caplog.text
|
||||
assert "'class_attr.power' changed to '10.0'" in caplog.text
|
||||
assert "'voltage' changed to '10.0'" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
|
||||
def test_subclass_properties_2(capsys: CaptureFixture) -> None:
|
||||
def test_subclass_properties_2(caplog: LogCaptureFixture) -> None:
|
||||
class SubClass(DataService):
|
||||
name = "Hello"
|
||||
_voltage = 10.0
|
||||
@@ -314,27 +225,23 @@ def test_subclass_properties_2(capsys: CaptureFixture) -> None:
|
||||
def voltage(self) -> float:
|
||||
return self.class_attr[0].voltage
|
||||
|
||||
test_service = ServiceClass()
|
||||
test_service.class_attr[1].current = 10.0
|
||||
service_instance = ServiceClass()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
expected_output = sorted(
|
||||
{
|
||||
"ServiceClass.class_attr[1].current = 10.0",
|
||||
"ServiceClass.class_attr[1].power = 100.0",
|
||||
"ServiceClass.voltage = 10.0",
|
||||
}
|
||||
)
|
||||
# using a set here as "ServiceClass.voltage = 10.0" is emitted twice. Once for
|
||||
# changing current, and once for changing power. Note that the voltage property is
|
||||
# only dependent on class_attr[0] but still emits an update notification. This is
|
||||
# because every time any item in the list `test_service.class_attr` is changed,
|
||||
# a notification will be emitted.
|
||||
actual_output = sorted(set(captured.out.strip().split("\n")))
|
||||
assert actual_output == expected_output
|
||||
service_instance.class_attr[0].current = 10.0
|
||||
|
||||
assert "'class_attr[0].current' changed to '10.0'" in caplog.text
|
||||
assert "'class_attr[0].power' changed to '100.0'" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
service_instance.class_attr[0].voltage = 11.0
|
||||
assert "'class_attr[0].voltage' changed to '11.0'" in caplog.text
|
||||
assert "'class_attr[0].power' changed to '110.0'" in caplog.text
|
||||
assert "'voltage' changed to '11.0'" in caplog.text
|
||||
|
||||
|
||||
def test_subsubclass_properties(capsys: CaptureFixture) -> None:
|
||||
def test_subsubclass_properties(caplog: LogCaptureFixture) -> None:
|
||||
class SubSubClass(DataService):
|
||||
_voltage = 10.0
|
||||
|
||||
@@ -361,28 +268,23 @@ def test_subsubclass_properties(capsys: CaptureFixture) -> None:
|
||||
def power(self) -> float:
|
||||
return self.class_attr[0].power
|
||||
|
||||
test_service = ServiceClass()
|
||||
service_instance = ServiceClass()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
|
||||
test_service.class_attr[1].class_attr.voltage = 100.0
|
||||
captured = capsys.readouterr()
|
||||
expected_output = sorted(
|
||||
{
|
||||
"ServiceClass.class_attr[0].class_attr.voltage = 100.0",
|
||||
"ServiceClass.class_attr[1].class_attr.voltage = 100.0",
|
||||
"ServiceClass.class_attr[0].power = 50.0",
|
||||
"ServiceClass.class_attr[1].power = 50.0",
|
||||
"ServiceClass.power = 50.0",
|
||||
}
|
||||
)
|
||||
actual_output = sorted(set(captured.out.strip().split("\n")))
|
||||
assert actual_output == expected_output
|
||||
service_instance.class_attr[1].class_attr.voltage = 100.0
|
||||
assert "'class_attr[0].class_attr.voltage' changed to '100.0'" in caplog.text
|
||||
assert "'class_attr[1].class_attr.voltage' changed to '100.0'" in caplog.text
|
||||
assert "'class_attr[0].power' changed to '50.0'" in caplog.text
|
||||
assert "'class_attr[1].power' changed to '50.0'" in caplog.text
|
||||
assert "'power' changed to '50.0'" in caplog.text
|
||||
|
||||
|
||||
def test_subsubclass_instance_properties(capsys: CaptureFixture) -> None:
|
||||
def test_subsubclass_instance_properties(caplog: LogCaptureFixture) -> None:
|
||||
class SubSubClass(DataService):
|
||||
def __init__(self) -> None:
|
||||
self._voltage = 10.0
|
||||
super().__init__()
|
||||
self._voltage = 10.0
|
||||
|
||||
@property
|
||||
def voltage(self) -> float:
|
||||
@@ -394,9 +296,9 @@ def test_subsubclass_instance_properties(capsys: CaptureFixture) -> None:
|
||||
|
||||
class SubClass(DataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.attr = [SubSubClass()]
|
||||
self.current = 0.5
|
||||
super().__init__()
|
||||
|
||||
@property
|
||||
def power(self) -> float:
|
||||
@@ -409,19 +311,11 @@ def test_subsubclass_instance_properties(capsys: CaptureFixture) -> None:
|
||||
def power(self) -> float:
|
||||
return self.class_attr[0].power
|
||||
|
||||
test_service = ServiceClass()
|
||||
service_instance = ServiceClass()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
|
||||
test_service.class_attr[1].attr[0].voltage = 100.0
|
||||
captured = capsys.readouterr()
|
||||
# again, changing an item in a list will trigger the callbacks. This is why a
|
||||
# notification for `ServiceClass.power` is emitted although it did not change its
|
||||
# value
|
||||
expected_output = sorted(
|
||||
{
|
||||
"ServiceClass.class_attr[1].attr[0].voltage = 100.0",
|
||||
"ServiceClass.class_attr[1].power = 50.0",
|
||||
"ServiceClass.power = 5.0",
|
||||
}
|
||||
)
|
||||
actual_output = sorted(set(captured.out.strip().split("\n")))
|
||||
assert actual_output == expected_output
|
||||
service_instance.class_attr[0].attr[0].voltage = 100.0
|
||||
assert "'class_attr[0].attr[0].voltage' changed to '100.0'" in caplog.text
|
||||
assert "'class_attr[0].power' changed to '50.0'" in caplog.text
|
||||
assert "'power' changed to '50.0'" in caplog.text
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
from typing import Any
|
||||
|
||||
from pytest import CaptureFixture
|
||||
|
||||
import pydase.units as u
|
||||
from pydase.data_service.data_service import DataService
|
||||
from pydase.data_service.data_service_observer import DataServiceObserver
|
||||
from pydase.data_service.state_manager import StateManager
|
||||
from pytest import LogCaptureFixture
|
||||
|
||||
|
||||
def test_DataService_setattr(capsys: CaptureFixture) -> None:
|
||||
def test_DataService_setattr(caplog: LogCaptureFixture) -> None:
|
||||
class ServiceClass(DataService):
|
||||
voltage = 1.0 * u.units.V
|
||||
_current: u.Quantity = 1.0 * u.units.mA
|
||||
@@ -19,40 +20,28 @@ def test_DataService_setattr(capsys: CaptureFixture) -> None:
|
||||
def current(self, value: Any) -> None:
|
||||
self._current = value
|
||||
|
||||
service = ServiceClass()
|
||||
service_instance = ServiceClass()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
|
||||
# You can just set floats to the Quantity objects. The DataService __setattr__ will
|
||||
# automatically convert this
|
||||
service.voltage = 10.0 # type: ignore
|
||||
service.current = 1.5
|
||||
service_instance.voltage = 10.0 * u.units.V
|
||||
service_instance.current = 1.5 * u.units.mA
|
||||
|
||||
assert service.voltage == 10.0 * u.units.V # type: ignore
|
||||
assert service.current == 1.5 * u.units.mA
|
||||
captured = capsys.readouterr()
|
||||
assert "'voltage' changed to '10.0 V'" in caplog.text
|
||||
assert "'current' changed to '1.5 mA'" in caplog.text
|
||||
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.voltage = 10.0 V",
|
||||
"ServiceClass.current = 1.5 mA",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n")) # type: ignore
|
||||
assert actual_output == expected_output
|
||||
assert service_instance.voltage == 10.0 * u.units.V
|
||||
assert service_instance.current == 1.5 * u.units.mA
|
||||
caplog.clear()
|
||||
|
||||
service.voltage = 12.0 * u.units.V # type: ignore
|
||||
service.current = 1.51 * u.units.A
|
||||
assert service.voltage == 12.0 * u.units.V # type: ignore
|
||||
assert service.current == 1.51 * u.units.A
|
||||
captured = capsys.readouterr()
|
||||
service_instance.voltage = 12.0 * u.units.V
|
||||
service_instance.current = 1.51 * u.units.A
|
||||
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.voltage = 12.0 V",
|
||||
"ServiceClass.current = 1.51 A",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n")) # type: ignore
|
||||
assert actual_output == expected_output
|
||||
assert "'voltage' changed to '12.0 V'" in caplog.text
|
||||
assert "'current' changed to '1.51 A'" in caplog.text
|
||||
|
||||
assert service_instance.voltage == 12.0 * u.units.V
|
||||
assert service_instance.current == 1.51 * u.units.A
|
||||
|
||||
|
||||
def test_convert_to_quantity() -> None:
|
||||
@@ -62,7 +51,7 @@ def test_convert_to_quantity() -> None:
|
||||
assert u.convert_to_quantity(1.0 * u.units.mV) == 1.0 * u.units.mV
|
||||
|
||||
|
||||
def test_update_DataService_attribute(capsys: CaptureFixture) -> None:
|
||||
def test_set_service_attribute_value_by_path(caplog: LogCaptureFixture) -> None:
|
||||
class ServiceClass(DataService):
|
||||
voltage = 1.0 * u.units.V
|
||||
_current: u.Quantity = 1.0 * u.units.mA
|
||||
@@ -75,41 +64,68 @@ def test_update_DataService_attribute(capsys: CaptureFixture) -> None:
|
||||
def current(self, value: Any) -> None:
|
||||
self._current = value
|
||||
|
||||
service = ServiceClass()
|
||||
service_instance = ServiceClass()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
|
||||
service.update_DataService_attribute(
|
||||
path_list=[], attr_name="voltage", value=1.0 * u.units.mV
|
||||
state_manager.set_service_attribute_value_by_path(
|
||||
path="voltage", value=1.0 * u.units.mV
|
||||
)
|
||||
captured = capsys.readouterr()
|
||||
assert "'voltage' changed to '1.0 mV'" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.voltage = 1.0 mV",
|
||||
]
|
||||
state_manager.set_service_attribute_value_by_path(path="voltage", value=2)
|
||||
|
||||
assert "'voltage' changed to '2.0 mV'" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
state_manager.set_service_attribute_value_by_path(
|
||||
path="voltage", value={"magnitude": 123, "unit": "kV"}
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n")) # type: ignore
|
||||
assert actual_output == expected_output
|
||||
assert "'voltage' changed to '123.0 kV'" in caplog.text
|
||||
|
||||
service.update_DataService_attribute(path_list=[], attr_name="voltage", value=2)
|
||||
captured = capsys.readouterr()
|
||||
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.voltage = 2.0 mV",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n")) # type: ignore
|
||||
assert actual_output == expected_output
|
||||
def test_autoconvert_offset_to_baseunit() -> None:
|
||||
import pint
|
||||
|
||||
service.update_DataService_attribute(
|
||||
path_list=[], attr_name="voltage", value={"magnitude": 123, "unit": "kV"}
|
||||
)
|
||||
captured = capsys.readouterr()
|
||||
assert u.units.autoconvert_offset_to_baseunit is True
|
||||
|
||||
expected_output = sorted(
|
||||
[
|
||||
"ServiceClass.voltage = 123.0 kV",
|
||||
]
|
||||
)
|
||||
actual_output = sorted(captured.out.strip().split("\n")) # type: ignore
|
||||
assert actual_output == expected_output
|
||||
try:
|
||||
quantity = 10 * u.units.degC
|
||||
except pint.errors.OffsetUnitCalculusError as exc:
|
||||
assert False, f"Offset unit raises exception {exc}"
|
||||
|
||||
|
||||
def test_loading_from_json(caplog: LogCaptureFixture) -> None:
|
||||
"""This function tests if the quantity read from the json description is actually
|
||||
passed as a quantity to the property setter."""
|
||||
JSON_DICT = {
|
||||
"some_unit": {
|
||||
"type": "Quantity",
|
||||
"value": {"magnitude": 10.0, "unit": "A"},
|
||||
"readonly": False,
|
||||
"doc": None,
|
||||
}
|
||||
}
|
||||
|
||||
class ServiceClass(DataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self._unit: u.Quantity = 1 * u.units.A
|
||||
|
||||
@property
|
||||
def some_unit(self) -> u.Quantity:
|
||||
return self._unit
|
||||
|
||||
@some_unit.setter
|
||||
def some_unit(self, value: u.Quantity) -> None:
|
||||
assert isinstance(value, u.Quantity)
|
||||
self._unit = value
|
||||
|
||||
service_instance = ServiceClass()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
|
||||
service_instance.load_DataService_from_JSON(JSON_DICT)
|
||||
|
||||
assert "'some_unit' changed to '10.0 A'" in caplog.text
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import toml
|
||||
|
||||
import pydase.version
|
||||
import toml
|
||||
|
||||
|
||||
def test_project_version() -> None:
|
||||
|
||||
@@ -1,70 +1,6 @@
|
||||
import pytest
|
||||
|
||||
from pydase.utils.helpers import (
|
||||
extract_dict_or_list_entry,
|
||||
get_nested_value_from_DataService_by_path_and_key,
|
||||
is_property_attribute,
|
||||
)
|
||||
|
||||
# Sample data for the tests
|
||||
data_sample = {
|
||||
"attr1": {"type": "bool", "value": False, "readonly": False, "doc": None},
|
||||
"class_attr": {
|
||||
"type": "MyClass",
|
||||
"value": {"sub_attr": {"type": "float", "value": 20.5}},
|
||||
},
|
||||
"list_attr": {
|
||||
"type": "list",
|
||||
"value": [
|
||||
{"type": "int", "value": 0, "readonly": False, "doc": None},
|
||||
{"type": "float", "value": 1.0, "readonly": False, "doc": None},
|
||||
],
|
||||
"readonly": False,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
# Tests for extract_dict_or_list_entry
|
||||
def test_extract_dict_with_valid_list_index() -> None:
|
||||
result = extract_dict_or_list_entry(data_sample, "list_attr[1]")
|
||||
assert result == {"type": "float", "value": 1.0, "readonly": False, "doc": None}
|
||||
|
||||
|
||||
def test_extract_dict_without_list_index() -> None:
|
||||
result = extract_dict_or_list_entry(data_sample, "attr1")
|
||||
assert result == {"type": "bool", "value": False, "readonly": False, "doc": None}
|
||||
|
||||
|
||||
def test_extract_dict_with_invalid_key() -> None:
|
||||
result = extract_dict_or_list_entry(data_sample, "attr_not_exist")
|
||||
assert result is None
|
||||
|
||||
|
||||
def test_extract_dict_with_invalid_list_index() -> None:
|
||||
result = extract_dict_or_list_entry(data_sample, "list_attr[5]")
|
||||
assert result is None
|
||||
|
||||
|
||||
# Tests for get_nested_value_from_DataService_by_path_and_key
|
||||
def test_get_nested_value_with_default_key() -> None:
|
||||
result = get_nested_value_from_DataService_by_path_and_key(
|
||||
data_sample, "list_attr[0]"
|
||||
)
|
||||
assert result == 0
|
||||
|
||||
|
||||
def test_get_nested_value_with_custom_key() -> None:
|
||||
result = get_nested_value_from_DataService_by_path_and_key(
|
||||
data_sample, "class_attr.sub_attr", "type"
|
||||
)
|
||||
assert result == "float"
|
||||
|
||||
|
||||
def test_get_nested_value_with_invalid_path() -> None:
|
||||
result = get_nested_value_from_DataService_by_path_and_key(
|
||||
data_sample, "class_attr.nonexistent_attr"
|
||||
)
|
||||
assert result is None
|
||||
from pydase.utils.helpers import is_property_attribute
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
||||
71
tests/utils/test_logging.py
Normal file
71
tests/utils/test_logging.py
Normal file
@@ -0,0 +1,71 @@
|
||||
import logging
|
||||
|
||||
from pytest import LogCaptureFixture
|
||||
|
||||
from pydase.utils.logging import setup_logging
|
||||
|
||||
|
||||
def test_log_error(caplog: LogCaptureFixture):
|
||||
setup_logging("ERROR")
|
||||
logger = logging.getLogger()
|
||||
logger.debug("This is a debug message")
|
||||
logger.info("This is an info message")
|
||||
logger.warning("This is a warning message")
|
||||
logger.error("This is an error message")
|
||||
|
||||
# Check the log records as well as the level.
|
||||
assert "This is a debug message" not in caplog.text
|
||||
assert "This is an info message" not in caplog.text
|
||||
assert "This is a warning message" not in caplog.text
|
||||
assert "This is an error message" in caplog.text
|
||||
assert any(record.levelname == "ERROR" for record in caplog.records)
|
||||
|
||||
|
||||
def test_log_warning(caplog: LogCaptureFixture):
|
||||
setup_logging("WARNING")
|
||||
logger = logging.getLogger()
|
||||
logger.debug("This is a debug message")
|
||||
logger.info("This is an info message")
|
||||
logger.warning("This is a warning message")
|
||||
logger.error("This is an error message")
|
||||
|
||||
# Check the log records as well as the level.
|
||||
assert "This is a debug message" not in caplog.text
|
||||
assert "This is an info message" not in caplog.text
|
||||
assert "This is a warning message" in caplog.text
|
||||
assert "This is an error message" in caplog.text
|
||||
assert any(record.levelname == "ERROR" for record in caplog.records)
|
||||
|
||||
|
||||
def test_log_debug(caplog: LogCaptureFixture):
|
||||
setup_logging("DEBUG")
|
||||
logger = (
|
||||
logging.getLogger()
|
||||
) # Get the root logger or replace with the appropriate logger.
|
||||
logger.debug("This is a debug message")
|
||||
logger.info("This is an info message")
|
||||
logger.warning("This is a warning message")
|
||||
logger.error("This is an error message")
|
||||
|
||||
# Now, check that the message is in the log records.
|
||||
assert "This is a debug message" in caplog.text
|
||||
assert "This is an info message" in caplog.text
|
||||
assert "This is a warning message" in caplog.text
|
||||
assert "This is an error message" in caplog.text
|
||||
|
||||
|
||||
def test_log_info(caplog: LogCaptureFixture):
|
||||
setup_logging("INFO")
|
||||
logger = (
|
||||
logging.getLogger()
|
||||
) # Get the root logger or replace with the appropriate logger.
|
||||
logger.debug("This is a debug message")
|
||||
logger.info("This is an info message")
|
||||
logger.warning("This is a warning message")
|
||||
logger.error("This is an error message")
|
||||
|
||||
# Now, check that the message is in the log records.
|
||||
assert "This is a debug message" not in caplog.text
|
||||
assert "This is an info message" in caplog.text
|
||||
assert "This is a warning message" in caplog.text
|
||||
assert "This is an error message" in caplog.text
|
||||
410
tests/utils/test_serializer.py
Normal file
410
tests/utils/test_serializer.py
Normal file
@@ -0,0 +1,410 @@
|
||||
import asyncio
|
||||
from enum import Enum
|
||||
from typing import Any
|
||||
|
||||
import pydase
|
||||
import pydase.units as u
|
||||
import pytest
|
||||
from pydase.components.coloured_enum import ColouredEnum
|
||||
from pydase.utils.serializer import (
|
||||
SerializationPathError,
|
||||
dump,
|
||||
get_nested_dict_by_path,
|
||||
get_next_level_dict_by_key,
|
||||
set_nested_value_by_path,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"test_input, expected",
|
||||
[
|
||||
(1, {"type": "int", "value": 1, "readonly": False, "doc": None}),
|
||||
(1.0, {"type": "float", "value": 1.0, "readonly": False, "doc": None}),
|
||||
(True, {"type": "bool", "value": True, "readonly": False, "doc": None}),
|
||||
(
|
||||
u.Quantity(10, "m"),
|
||||
{
|
||||
"type": "Quantity",
|
||||
"value": {"magnitude": 10, "unit": "meter"},
|
||||
"readonly": False,
|
||||
"doc": None,
|
||||
},
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_dump(test_input: Any, expected: dict[str, Any]) -> None:
|
||||
assert dump(test_input) == expected
|
||||
|
||||
|
||||
def test_enum_serialize() -> None:
|
||||
class EnumClass(Enum):
|
||||
FOO = "foo"
|
||||
BAR = "bar"
|
||||
|
||||
class EnumAttribute(pydase.DataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.some_enum = EnumClass.FOO
|
||||
|
||||
class EnumPropertyWithoutSetter(pydase.DataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self._some_enum = EnumClass.FOO
|
||||
|
||||
@property
|
||||
def some_enum(self) -> EnumClass:
|
||||
return self._some_enum
|
||||
|
||||
class EnumPropertyWithSetter(pydase.DataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self._some_enum = EnumClass.FOO
|
||||
|
||||
@property
|
||||
def some_enum(self) -> EnumClass:
|
||||
return self._some_enum
|
||||
|
||||
@some_enum.setter
|
||||
def some_enum(self, value: EnumClass) -> None:
|
||||
self._some_enum = value
|
||||
|
||||
assert dump(EnumAttribute())["value"] == {
|
||||
"some_enum": {
|
||||
"type": "Enum",
|
||||
"value": "FOO",
|
||||
"enum": {"FOO": "foo", "BAR": "bar"},
|
||||
"readonly": False,
|
||||
"doc": None,
|
||||
}
|
||||
}
|
||||
assert dump(EnumPropertyWithoutSetter())["value"] == {
|
||||
"some_enum": {
|
||||
"type": "Enum",
|
||||
"value": "FOO",
|
||||
"enum": {"FOO": "foo", "BAR": "bar"},
|
||||
"readonly": True,
|
||||
"doc": None,
|
||||
}
|
||||
}
|
||||
assert dump(EnumPropertyWithSetter())["value"] == {
|
||||
"some_enum": {
|
||||
"type": "Enum",
|
||||
"value": "FOO",
|
||||
"enum": {"FOO": "foo", "BAR": "bar"},
|
||||
"readonly": False,
|
||||
"doc": None,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def test_ColouredEnum_serialize() -> None:
|
||||
class Status(ColouredEnum):
|
||||
PENDING = "#FFA500"
|
||||
RUNNING = "#0000FF80"
|
||||
PAUSED = "rgb(169, 169, 169)"
|
||||
RETRYING = "rgba(255, 255, 0, 0.3)"
|
||||
COMPLETED = "hsl(120, 100%, 50%)"
|
||||
FAILED = "hsla(0, 100%, 50%, 0.7)"
|
||||
CANCELLED = "SlateGray"
|
||||
|
||||
assert dump(Status.FAILED) == {
|
||||
"type": "ColouredEnum",
|
||||
"value": "FAILED",
|
||||
"enum": {
|
||||
"CANCELLED": "SlateGray",
|
||||
"COMPLETED": "hsl(120, 100%, 50%)",
|
||||
"FAILED": "hsla(0, 100%, 50%, 0.7)",
|
||||
"PAUSED": "rgb(169, 169, 169)",
|
||||
"PENDING": "#FFA500",
|
||||
"RETRYING": "rgba(255, 255, 0, 0.3)",
|
||||
"RUNNING": "#0000FF80",
|
||||
},
|
||||
"readonly": False,
|
||||
"doc": None,
|
||||
}
|
||||
|
||||
|
||||
def test_method_serialization() -> None:
|
||||
class ClassWithMethod(pydase.DataService):
|
||||
def some_method(self) -> str:
|
||||
return "some method"
|
||||
|
||||
async def some_task(self, sleep_time: int) -> None:
|
||||
while True:
|
||||
await asyncio.sleep(sleep_time)
|
||||
|
||||
instance = ClassWithMethod()
|
||||
instance.start_some_task(10) # type: ignore
|
||||
|
||||
assert dump(instance)["value"] == {
|
||||
"some_method": {
|
||||
"async": False,
|
||||
"doc": None,
|
||||
"parameters": {},
|
||||
"readonly": True,
|
||||
"type": "method",
|
||||
"value": None,
|
||||
},
|
||||
"some_task": {
|
||||
"async": True,
|
||||
"doc": None,
|
||||
"parameters": {"sleep_time": "int"},
|
||||
"readonly": True,
|
||||
"type": "method",
|
||||
"value": {"sleep_time": 10},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def test_methods_with_type_hints() -> None:
|
||||
def method_without_type_hint(arg_without_type_hint) -> None:
|
||||
pass
|
||||
|
||||
def method_with_type_hint(some_argument: int) -> None:
|
||||
pass
|
||||
|
||||
def method_with_union_type_hint(some_argument: int | float) -> None:
|
||||
pass
|
||||
|
||||
assert dump(method_without_type_hint) == {
|
||||
"async": False,
|
||||
"doc": None,
|
||||
"parameters": {"arg_without_type_hint": None},
|
||||
"readonly": True,
|
||||
"type": "method",
|
||||
"value": None,
|
||||
}
|
||||
|
||||
assert dump(method_with_type_hint) == {
|
||||
"async": False,
|
||||
"doc": None,
|
||||
"parameters": {"some_argument": "int"},
|
||||
"readonly": True,
|
||||
"type": "method",
|
||||
"value": None,
|
||||
}
|
||||
|
||||
assert dump(method_with_union_type_hint) == {
|
||||
"async": False,
|
||||
"doc": None,
|
||||
"parameters": {"some_argument": "int | float"},
|
||||
"readonly": True,
|
||||
"type": "method",
|
||||
"value": None,
|
||||
}
|
||||
|
||||
|
||||
def test_list_serialization() -> None:
|
||||
class MySubclass(pydase.DataService):
|
||||
_name = "hi"
|
||||
bool_attr = True
|
||||
int_attr = 1
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return self._name
|
||||
|
||||
class ClassWithListAttribute(pydase.DataService):
|
||||
list_attr = [1, MySubclass()]
|
||||
|
||||
instance = ClassWithListAttribute()
|
||||
|
||||
assert dump(instance)["value"] == {
|
||||
"list_attr": {
|
||||
"doc": None,
|
||||
"readonly": False,
|
||||
"type": "list",
|
||||
"value": [
|
||||
{"doc": None, "readonly": False, "type": "int", "value": 1},
|
||||
{
|
||||
"doc": None,
|
||||
"readonly": False,
|
||||
"type": "DataService",
|
||||
"value": {
|
||||
"bool_attr": {
|
||||
"doc": None,
|
||||
"readonly": False,
|
||||
"type": "bool",
|
||||
"value": True,
|
||||
},
|
||||
"int_attr": {
|
||||
"doc": None,
|
||||
"readonly": False,
|
||||
"type": "int",
|
||||
"value": 1,
|
||||
},
|
||||
"name": {
|
||||
"doc": None,
|
||||
"readonly": True,
|
||||
"type": "str",
|
||||
"value": "hi",
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def test_dict_serialization() -> None:
|
||||
class MyClass(pydase.DataService):
|
||||
name = "my class"
|
||||
|
||||
test_dict = {
|
||||
"int_key": 1,
|
||||
"float_key": 1.0,
|
||||
"bool_key": True,
|
||||
"Quantity_key": 1.0 * u.units.s,
|
||||
"DataService_key": MyClass(),
|
||||
}
|
||||
|
||||
assert dump(test_dict) == {
|
||||
"doc": None,
|
||||
"readonly": False,
|
||||
"type": "dict",
|
||||
"value": {
|
||||
"DataService_key": {
|
||||
"doc": None,
|
||||
"readonly": False,
|
||||
"type": "DataService",
|
||||
"value": {
|
||||
"name": {
|
||||
"doc": None,
|
||||
"readonly": False,
|
||||
"type": "str",
|
||||
"value": "my class",
|
||||
}
|
||||
},
|
||||
},
|
||||
"Quantity_key": {
|
||||
"doc": None,
|
||||
"readonly": False,
|
||||
"type": "Quantity",
|
||||
"value": {"magnitude": 1.0, "unit": "s"},
|
||||
},
|
||||
"bool_key": {"doc": None, "readonly": False, "type": "bool", "value": True},
|
||||
"float_key": {
|
||||
"doc": None,
|
||||
"readonly": False,
|
||||
"type": "float",
|
||||
"value": 1.0,
|
||||
},
|
||||
"int_key": {"doc": None, "readonly": False, "type": "int", "value": 1},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def setup_dict():
|
||||
class MySubclass(pydase.DataService):
|
||||
attr3 = 1.0
|
||||
list_attr = [1.0, 1]
|
||||
|
||||
class ServiceClass(pydase.DataService):
|
||||
attr1 = 1.0
|
||||
attr2 = MySubclass()
|
||||
attr_list = [0, 1, MySubclass()]
|
||||
|
||||
return ServiceClass().serialize()
|
||||
|
||||
|
||||
def test_update_attribute(setup_dict):
|
||||
set_nested_value_by_path(setup_dict, "attr1", 15)
|
||||
assert setup_dict["attr1"]["value"] == 15
|
||||
|
||||
|
||||
def test_update_nested_attribute(setup_dict):
|
||||
set_nested_value_by_path(setup_dict, "attr2.attr3", 25.0)
|
||||
assert setup_dict["attr2"]["value"]["attr3"]["value"] == 25.0
|
||||
|
||||
|
||||
def test_update_list_entry(setup_dict):
|
||||
set_nested_value_by_path(setup_dict, "attr_list[1]", 20)
|
||||
assert setup_dict["attr_list"]["value"][1]["value"] == 20
|
||||
|
||||
|
||||
def test_update_list_append(setup_dict):
|
||||
set_nested_value_by_path(setup_dict, "attr_list[3]", 20)
|
||||
assert setup_dict["attr_list"]["value"][3]["value"] == 20
|
||||
|
||||
|
||||
def test_update_invalid_list_index(setup_dict, caplog: pytest.LogCaptureFixture):
|
||||
set_nested_value_by_path(setup_dict, "attr_list[10]", 30)
|
||||
assert (
|
||||
"Error occured trying to change 'attr_list[10]': list index "
|
||||
"out of range" in caplog.text
|
||||
)
|
||||
|
||||
|
||||
def test_update_invalid_path(setup_dict, caplog: pytest.LogCaptureFixture):
|
||||
set_nested_value_by_path(setup_dict, "invalid_path", 30)
|
||||
assert (
|
||||
"Error occured trying to access the key 'invalid_path': it is either "
|
||||
"not present in the current dictionary or its value does not contain "
|
||||
"a 'value' key." in caplog.text
|
||||
)
|
||||
|
||||
|
||||
def test_update_list_inside_class(setup_dict):
|
||||
set_nested_value_by_path(setup_dict, "attr2.list_attr[1]", 40)
|
||||
assert setup_dict["attr2"]["value"]["list_attr"]["value"][1]["value"] == 40
|
||||
|
||||
|
||||
def test_update_class_attribute_inside_list(setup_dict):
|
||||
set_nested_value_by_path(setup_dict, "attr_list[2].attr3", 50)
|
||||
assert setup_dict["attr_list"]["value"][2]["value"]["attr3"]["value"] == 50
|
||||
|
||||
|
||||
def test_get_next_level_attribute_nested_dict(setup_dict):
|
||||
nested_dict = get_next_level_dict_by_key(setup_dict, "attr1")
|
||||
assert nested_dict == setup_dict["attr1"]
|
||||
|
||||
|
||||
def test_get_next_level_list_entry_nested_dict(setup_dict):
|
||||
nested_dict = get_next_level_dict_by_key(setup_dict, "attr_list[0]")
|
||||
assert nested_dict == setup_dict["attr_list"]["value"][0]
|
||||
|
||||
|
||||
def test_get_next_level_invalid_path_nested_dict(setup_dict):
|
||||
with pytest.raises(SerializationPathError):
|
||||
get_next_level_dict_by_key(setup_dict, "invalid_path")
|
||||
|
||||
|
||||
def test_get_next_level_invalid_list_index(setup_dict):
|
||||
with pytest.raises(SerializationPathError):
|
||||
get_next_level_dict_by_key(setup_dict, "attr_list[10]")
|
||||
|
||||
|
||||
def test_get_attribute(setup_dict):
|
||||
nested_dict = get_nested_dict_by_path(setup_dict, "attr1")
|
||||
assert nested_dict["value"] == 1.0
|
||||
|
||||
|
||||
def test_get_nested_attribute(setup_dict):
|
||||
nested_dict = get_nested_dict_by_path(setup_dict, "attr2.attr3")
|
||||
assert nested_dict["value"] == 1.0
|
||||
|
||||
|
||||
def test_get_list_entry(setup_dict):
|
||||
nested_dict = get_nested_dict_by_path(setup_dict, "attr_list[1]")
|
||||
assert nested_dict["value"] == 1
|
||||
|
||||
|
||||
def test_get_list_inside_class(setup_dict):
|
||||
nested_dict = get_nested_dict_by_path(setup_dict, "attr2.list_attr[1]")
|
||||
assert nested_dict["value"] == 1.0
|
||||
|
||||
|
||||
def test_get_class_attribute_inside_list(setup_dict):
|
||||
nested_dict = get_nested_dict_by_path(setup_dict, "attr_list[2].attr3")
|
||||
assert nested_dict["value"] == 1.0
|
||||
|
||||
|
||||
def test_get_invalid_list_index(setup_dict, caplog: pytest.LogCaptureFixture):
|
||||
with pytest.raises(SerializationPathError):
|
||||
get_nested_dict_by_path(setup_dict, "attr_list[10]")
|
||||
|
||||
|
||||
def test_get_invalid_path(setup_dict, caplog: pytest.LogCaptureFixture):
|
||||
with pytest.raises(SerializationPathError):
|
||||
get_nested_dict_by_path(setup_dict, "invalid_path")
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user