38 Commits

Author SHA1 Message Date
Mose Müller
75c5bc6877 updates to version v0.7.4 2024-03-19 08:28:13 +01:00
Mose Müller
a606194c48 Merge pull request #116 from tiqi-group/feat/customisable_frontend_src
feat: adds option for custom frontend_src directory
2024-03-14 16:45:04 +01:00
Mose Müller
5da7bdea78 updates Readme 2024-03-14 16:43:59 +01:00
Mose Müller
c6a52914c5 adds option for custom frontend_src directory 2024-03-14 16:30:57 +01:00
Mose Müller
ae68a89f48 Merge pull request #115 from tiqi-group/feat/add_custom_css_default_response
feat: add "custom.css" endpoint default Response
2024-03-12 14:41:09 +01:00
Mose Müller
386e69b048 custom.css endpoint defaults to empty Response now 2024-03-12 14:37:12 +01:00
Mose Müller
8310a51a74 Merge pull request #113 from tiqi-group/feat/frontend_display_toggle
Feat: frontend display toggle
2024-03-12 07:40:50 +01:00
Mose Müller
2a8cbf7a4a updates Readme 2024-03-12 07:35:45 +01:00
Mose Müller
857b81d213 updates tests 2024-03-11 15:37:56 +01:00
Mose Müller
25834534ad npm run build 2024-03-11 15:37:56 +01:00
Mose Müller
4a948f9155 adds "display" web settings support to frontend
Components with a "display" equals false in the web settings will not be displayed
in the frontend.
2024-03-11 15:37:56 +01:00
Mose Müller
df42f41f53 adds "display" key in web settings 2024-03-11 15:37:56 +01:00
Mose Müller
b8d421eb90 fix: readonly value is not overwritten anymore when changing attribute type 2024-03-11 15:37:26 +01:00
Mose Müller
877ab42905 fixes webserver (apparently FastAPI need the correct type hints...) 2024-03-07 17:52:03 +01:00
Mose Müller
51ffd8be4d simplifies serializer logic 2024-03-06 18:56:15 +01:00
Mose Müller
a88a0c6133 Updates python dependencies 2024-03-06 18:28:11 +01:00
Mose Müller
390a375777 Merge pull request #111 from tiqi-group/refactor/updates_serialized_object_type_hints
updates type hints for serialized objects
2024-03-06 18:27:21 +01:00
Mose Müller
4aee899dbe updates type hints for serialized objects 2024-03-06 18:23:26 +01:00
Mose Müller
c7d452d7db adds tests for Image component 2024-03-05 16:32:20 +01:00
Mose Müller
b7926b730d updates version to v0.7.3 2024-03-05 16:32:07 +01:00
Mose Müller
0c175fc706 Merge pull request #109 from tiqi-group/fix/task_disappears_after_changing_state
Fix/task disappears after changing state
2024-03-05 16:08:55 +01:00
Mose Müller
7d21bca8b1 adds test for changing task state 2024-03-05 16:05:09 +01:00
Mose Müller
d1628ae8c9 fixes updating task state 2024-03-05 16:05:01 +01:00
Mose Müller
441658ebc1 Merge pull request #108 from tiqi-group/fix/cache_update_on_type_change
Fix/cache update on type change
2024-03-05 14:44:19 +01:00
Mose Müller
99c7ad0ec8 updates serializer tests 2024-03-05 14:28:53 +01:00
Mose Müller
24a01c0982 removes keys from cache entry if they are not part of the new value serialization 2024-03-05 14:17:05 +01:00
Mose Müller
b8a52c2e6a only update cache and execute notification callbacks if attribute is public and has changed 2024-03-05 13:56:02 +01:00
Mose Müller
7aacc21010 removes processing of value from sio_callback (cached value is up-to-date already) 2024-03-05 13:54:24 +01:00
Mose Müller
8787cb0509 get cached value before executing custom notification callbacks 2024-03-05 13:53:41 +01:00
Mose Müller
8971cebfcd adds todos 2024-03-05 13:24:54 +01:00
Mose Müller
f2cf0d9c1a fixes update of cache when the type has changed
When an attribute changes from, say, a quantity to an enumeration, the enum key in the serialization was not added to the
cache, and thus the frontend was not able to render the enum.
2024-03-05 13:23:26 +01:00
Mose Müller
36c863e845 Merge pull request #107 from tiqi-group/fix/update_frontend_before_setting_state
Fix/update frontend before setting state
2024-03-05 13:20:54 +01:00
Mose Müller
836c1e14df npm run build 2024-03-05 13:19:10 +01:00
Mose Müller
dba036c6b3 do not try to update state if it is not yet set
This happens when the backend pushes updates before the frontend has received and set the state when loading the page, first.
2024-03-05 13:19:02 +01:00
Mose Müller
8b1f1ef1b1 updates to version v0.7.2 2024-03-04 17:46:44 +01:00
Mose Müller
698db4881b Merge pull request #106 from tiqi-group/fix/enum_sio_callback
fixes sio callback when attribute changes to an enum which was not present before
2024-03-04 17:38:33 +01:00
Mose Müller
d709d43d75 ignores complexity of sio_server setup (will be changed anyway soon 2024-03-04 17:36:09 +01:00
Mose Müller
691bf809cb fixes sio callback when attribute changes to an enum which was not present before 2024-03-04 17:32:45 +01:00
22 changed files with 477 additions and 146 deletions

View File

@@ -18,6 +18,8 @@
- [DataService Instances (Nested Classes)](#dataservice-instances-nested-classes)
- [Custom Components (`pydase.components`)](#custom-components-pydasecomponents)
- [`DeviceConnection`](#deviceconnection)
- [Customizing Connection Logic](#customizing-connection-logic)
- [Reconnection Interval](#reconnection-interval)
- [`Image`](#image)
- [`NumberSlider`](#numberslider)
- [`ColouredEnum`](#colouredenum)
@@ -30,6 +32,7 @@
- [Customizing the Web Interface](#customizing-the-web-interface)
- [Enhancing the Web Interface Style with Custom CSS](#enhancing-the-web-interface-style-with-custom-css)
- [Tailoring Frontend Component Layout](#tailoring-frontend-component-layout)
- [Specifying a Custom Frontend Source](#specifying-a-custom-frontend-source)
- [Logging in pydase](#logging-in-pydase)
- [Changing the Log Level](#changing-the-log-level)
- [Documentation](#documentation)
@@ -831,10 +834,33 @@ Please ensure that the CSS file path is accessible from the server's running loc
`pydase` enables users to customize the frontend layout via the `web_settings.json` file. Each key in the file corresponds to the full access path of public attributes, properties, and methods of the exposed service, using dot-notation.
- **Custom Display Names**: Modify the `"displayName"` value in the file to change how each component appears in the frontend.
<!-- - **Adjustable Component Order**: The `"index"` values determine the order of components. Alter these values to rearrange the components as desired. -->
- **Control Component Visibility**: Utilize the `"display"` key-value pair to control whether a component is rendered in the frontend. Set the value to `true` to make the component visible or `false` to hide it.
<!-- - **Adjustable Component Order**: The `"displayOrder"` values determine the order of components. Alter these values to rearrange the components as desired. -->
The `web_settings.json` file will be stored in the directory specified by `SERVICE_CONFIG_DIR`. You can generate a `web_settings.json` file by setting the `GENERATE_WEB_SETTINGS` to `True`. For more information, see the [configuration section](#configuring-pydase-via-environment-variables).
### Specifying a Custom Frontend Source
To further personalize your web interface, you can provide `pydase` with a custom frontend GUI. To do so, you can use the `frontend_src` keyword in the `pydase.Server`:
```python
from pathlib import Path
import pydase
class MyService(pydase.DataService):
# Service definition
if __name__ == "__main__":
service = MyService()
pydase.Server(
service,
frontend_src=Path("path/to/your/frontend/directory"),
).run()
```
## Logging in pydase
The `pydase` library organizes its loggers on a per-module basis, mirroring the Python package hierarchy. This structured approach allows for granular control over logging levels and behaviour across different parts of the library.

View File

@@ -32,6 +32,9 @@ const reducer = (state: State, action: Action): State => {
case 'SET_DATA':
return action.data;
case 'UPDATE_ATTRIBUTE': {
if (state === null) {
return null;
}
return {
...state,
value: setNestedValueByPath(state.value, action.fullAccessPath, action.newValue)

View File

@@ -4,5 +4,6 @@ export const WebSettingsContext = createContext<Record<string, WebSetting>>({});
export type WebSetting = {
displayName: string;
display: boolean;
index: number;
};

View File

@@ -62,8 +62,13 @@ export const GenericComponent = React.memo(
const webSettings = useContext(WebSettingsContext);
let displayName = name;
if (webSettings[fullAccessPath] && webSettings[fullAccessPath].displayName) {
displayName = webSettings[fullAccessPath].displayName;
if (webSettings[fullAccessPath]) {
if (webSettings[fullAccessPath].display === false) {
return null;
}
if (webSettings[fullAccessPath].displayName) {
displayName = webSettings[fullAccessPath].displayName;
}
}
function changeCallback(

75
poetry.lock generated
View File

@@ -684,17 +684,18 @@ min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-imp
[[package]]
name = "mkdocs-autorefs"
version = "0.5.0"
version = "1.0.1"
description = "Automatically link across pages in MkDocs."
optional = false
python-versions = ">=3.8"
files = [
{file = "mkdocs_autorefs-0.5.0-py3-none-any.whl", hash = "sha256:7930fcb8ac1249f10e683967aeaddc0af49d90702af111a5e390e8b20b3d97ff"},
{file = "mkdocs_autorefs-0.5.0.tar.gz", hash = "sha256:9a5054a94c08d28855cfab967ada10ed5be76e2bfad642302a610b252c3274c0"},
{file = "mkdocs_autorefs-1.0.1-py3-none-any.whl", hash = "sha256:aacdfae1ab197780fb7a2dac92ad8a3d8f7ca8049a9cbe56a4218cd52e8da570"},
{file = "mkdocs_autorefs-1.0.1.tar.gz", hash = "sha256:f684edf847eced40b570b57846b15f0bf57fb93ac2c510450775dcf16accb971"},
]
[package.dependencies]
Markdown = ">=3.3"
markupsafe = ">=2.0.1"
mkdocs = ">=1.1"
[[package]]
@@ -1035,13 +1036,13 @@ ssh = ["paramiko"]
[[package]]
name = "pydantic"
version = "2.6.2"
version = "2.6.3"
description = "Data validation using Python type hints"
optional = false
python-versions = ">=3.8"
files = [
{file = "pydantic-2.6.2-py3-none-any.whl", hash = "sha256:37a5432e54b12fecaa1049c5195f3d860a10e01bdfd24f1840ef14bd0d3aeab3"},
{file = "pydantic-2.6.2.tar.gz", hash = "sha256:a09be1c3d28f3abe37f8a78af58284b236a92ce520105ddc91a6d29ea1176ba7"},
{file = "pydantic-2.6.3-py3-none-any.whl", hash = "sha256:72c6034df47f46ccdf81869fddb81aade68056003900a8724a4f160700016a2a"},
{file = "pydantic-2.6.3.tar.gz", hash = "sha256:e07805c4c7f5c6826e33a1d4c9d47950d7eaf34868e2690f8594d2e30241f11f"},
]
[package.dependencies]
@@ -1145,13 +1146,13 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
[[package]]
name = "pymdown-extensions"
version = "10.7"
version = "10.7.1"
description = "Extension pack for Python Markdown."
optional = false
python-versions = ">=3.8"
files = [
{file = "pymdown_extensions-10.7-py3-none-any.whl", hash = "sha256:6ca215bc57bc12bf32b414887a68b810637d039124ed9b2e5bd3325cbb2c050c"},
{file = "pymdown_extensions-10.7.tar.gz", hash = "sha256:c0d64d5cf62566f59e6b2b690a4095c931107c250a8c8e1351c1de5f6b036deb"},
{file = "pymdown_extensions-10.7.1-py3-none-any.whl", hash = "sha256:f5cc7000d7ff0d1ce9395d216017fa4df3dde800afb1fb72d1c7d3fd35e710f4"},
{file = "pymdown_extensions-10.7.1.tar.gz", hash = "sha256:c70e146bdd83c744ffc766b4671999796aba18842b268510a329f7f64700d584"},
]
[package.dependencies]
@@ -1163,13 +1164,13 @@ extra = ["pygments (>=2.12)"]
[[package]]
name = "pyparsing"
version = "3.1.1"
version = "3.1.2"
description = "pyparsing module - Classes and methods to define and execute parsing grammars"
optional = false
python-versions = ">=3.6.8"
files = [
{file = "pyparsing-3.1.1-py3-none-any.whl", hash = "sha256:32c7c0b711493c72ff18a981d24f28aaf9c1fb7ed5e9667c9e84e3db623bdbfb"},
{file = "pyparsing-3.1.1.tar.gz", hash = "sha256:ede28a1a32462f5a9705e07aea48001a08f7cf81a021585011deba701581a0db"},
{file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"},
{file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"},
]
[package.extras]
@@ -1177,13 +1178,13 @@ diagrams = ["jinja2", "railroad-diagrams"]
[[package]]
name = "pyright"
version = "1.1.351"
version = "1.1.352"
description = "Command line wrapper for pyright"
optional = false
python-versions = ">=3.7"
files = [
{file = "pyright-1.1.351-py3-none-any.whl", hash = "sha256:83b44b25396ae20661fc5f133c3fce30928ff1296d4f2e5ff0bca5fcf03eb89d"},
{file = "pyright-1.1.351.tar.gz", hash = "sha256:01124099714eebd7f6525d8cbfa350626b56dfaf771cfcd55c03e69f0f1efbbd"},
{file = "pyright-1.1.352-py3-none-any.whl", hash = "sha256:0040cf173c6a60704e553bfd129dfe54de59cc76d0b2b80f77cfab4f50701d64"},
{file = "pyright-1.1.352.tar.gz", hash = "sha256:a621c0dfbcf1291b3610641a07380fefaa1d0e182890a1b2a7f13b446e8109a9"},
]
[package.dependencies]
@@ -1270,13 +1271,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"]
[[package]]
name = "python-dateutil"
version = "2.8.2"
version = "2.9.0.post0"
description = "Extensions to the standard Python datetime module"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
files = [
{file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
{file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
{file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"},
{file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"},
]
[package.dependencies]
@@ -1437,28 +1438,28 @@ plumbum = "*"
[[package]]
name = "ruff"
version = "0.1.15"
version = "0.2.2"
description = "An extremely fast Python linter and code formatter, written in Rust."
optional = false
python-versions = ">=3.7"
files = [
{file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5fe8d54df166ecc24106db7dd6a68d44852d14eb0729ea4672bb4d96c320b7df"},
{file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6f0bfbb53c4b4de117ac4d6ddfd33aa5fc31beeaa21d23c45c6dd249faf9126f"},
{file = "ruff-0.1.15-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0d432aec35bfc0d800d4f70eba26e23a352386be3a6cf157083d18f6f5881c8"},
{file = "ruff-0.1.15-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9405fa9ac0e97f35aaddf185a1be194a589424b8713e3b97b762336ec79ff807"},
{file = "ruff-0.1.15-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66ec24fe36841636e814b8f90f572a8c0cb0e54d8b5c2d0e300d28a0d7bffec"},
{file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6f8ad828f01e8dd32cc58bc28375150171d198491fc901f6f98d2a39ba8e3ff5"},
{file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86811954eec63e9ea162af0ffa9f8d09088bab51b7438e8b6488b9401863c25e"},
{file = "ruff-0.1.15-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd4025ac5e87d9b80e1f300207eb2fd099ff8200fa2320d7dc066a3f4622dc6b"},
{file = "ruff-0.1.15-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b17b93c02cdb6aeb696effecea1095ac93f3884a49a554a9afa76bb125c114c1"},
{file = "ruff-0.1.15-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ddb87643be40f034e97e97f5bc2ef7ce39de20e34608f3f829db727a93fb82c5"},
{file = "ruff-0.1.15-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:abf4822129ed3a5ce54383d5f0e964e7fef74a41e48eb1dfad404151efc130a2"},
{file = "ruff-0.1.15-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6c629cf64bacfd136c07c78ac10a54578ec9d1bd2a9d395efbee0935868bf852"},
{file = "ruff-0.1.15-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1bab866aafb53da39c2cadfb8e1c4550ac5340bb40300083eb8967ba25481447"},
{file = "ruff-0.1.15-py3-none-win32.whl", hash = "sha256:2417e1cb6e2068389b07e6fa74c306b2810fe3ee3476d5b8a96616633f40d14f"},
{file = "ruff-0.1.15-py3-none-win_amd64.whl", hash = "sha256:3837ac73d869efc4182d9036b1405ef4c73d9b1f88da2413875e34e0d6919587"},
{file = "ruff-0.1.15-py3-none-win_arm64.whl", hash = "sha256:9a933dfb1c14ec7a33cceb1e49ec4a16b51ce3c20fd42663198746efc0427360"},
{file = "ruff-0.1.15.tar.gz", hash = "sha256:f6dfa8c1b21c913c326919056c390966648b680966febcb796cc9d1aaab8564e"},
{file = "ruff-0.2.2-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0a9efb032855ffb3c21f6405751d5e147b0c6b631e3ca3f6b20f917572b97eb6"},
{file = "ruff-0.2.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:d450b7fbff85913f866a5384d8912710936e2b96da74541c82c1b458472ddb39"},
{file = "ruff-0.2.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecd46e3106850a5c26aee114e562c329f9a1fbe9e4821b008c4404f64ff9ce73"},
{file = "ruff-0.2.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e22676a5b875bd72acd3d11d5fa9075d3a5f53b877fe7b4793e4673499318ba"},
{file = "ruff-0.2.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1695700d1e25a99d28f7a1636d85bafcc5030bba9d0578c0781ba1790dbcf51c"},
{file = "ruff-0.2.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:b0c232af3d0bd8f521806223723456ffebf8e323bd1e4e82b0befb20ba18388e"},
{file = "ruff-0.2.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f63d96494eeec2fc70d909393bcd76c69f35334cdbd9e20d089fb3f0640216ca"},
{file = "ruff-0.2.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a61ea0ff048e06de273b2e45bd72629f470f5da8f71daf09fe481278b175001"},
{file = "ruff-0.2.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e1439c8f407e4f356470e54cdecdca1bd5439a0673792dbe34a2b0a551a2fe3"},
{file = "ruff-0.2.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:940de32dc8853eba0f67f7198b3e79bc6ba95c2edbfdfac2144c8235114d6726"},
{file = "ruff-0.2.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:0c126da55c38dd917621552ab430213bdb3273bb10ddb67bc4b761989210eb6e"},
{file = "ruff-0.2.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:3b65494f7e4bed2e74110dac1f0d17dc8e1f42faaa784e7c58a98e335ec83d7e"},
{file = "ruff-0.2.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1ec49be4fe6ddac0503833f3ed8930528e26d1e60ad35c2446da372d16651ce9"},
{file = "ruff-0.2.2-py3-none-win32.whl", hash = "sha256:d920499b576f6c68295bc04e7b17b6544d9d05f196bb3aac4358792ef6f34325"},
{file = "ruff-0.2.2-py3-none-win_amd64.whl", hash = "sha256:cc9a91ae137d687f43a44c900e5d95e9617cb37d4c989e462980ba27039d239d"},
{file = "ruff-0.2.2-py3-none-win_arm64.whl", hash = "sha256:c9d15fc41e6054bfc7200478720570078f0b41c9ae4f010bcc16bd6f4d1aacdd"},
{file = "ruff-0.2.2.tar.gz", hash = "sha256:e62ed7f36b3068a30ba39193a14274cd706bc486fad521276458022f7bccb31d"},
]
[[package]]
@@ -1654,4 +1655,4 @@ h11 = ">=0.9.0,<1"
[metadata]
lock-version = "2.0"
python-versions = "^3.10"
content-hash = "e09fb1271e128382de19a4f3c15547f5171cf468b5078a6ee0d5529212802739"
content-hash = "a14d2f729ab70da439f59c5732eae1a069719ab4a261967a8d25fab3f933da4e"

View File

@@ -1,6 +1,6 @@
[tool.poetry]
name = "pydase"
version = "0.7.1"
version = "0.7.4"
description = "A flexible and robust Python library for creating, managing, and interacting with data services, with built-in support for web and RPC servers, and customizable features for diverse use cases."
authors = ["Mose Mueller <mosmuell@ethz.ch>"]
readme = "README.md"
@@ -29,7 +29,7 @@ mypy = "^1.4.1"
matplotlib = "^3.7.2"
pyright = "^1.1.323"
pytest-mock = "^3.11.1"
ruff = "^0.1.5"
ruff = "^0.2.0"
pytest-asyncio = "^0.23.2"
[tool.poetry.group.docs]

View File

@@ -17,6 +17,7 @@ from pydase.utils.helpers import (
is_property_attribute,
)
from pydase.utils.serializer import (
SerializedObject,
Serializer,
)
@@ -125,7 +126,7 @@ class DataService(rpyc.Service, AbstractDataService):
# allow all other attributes
setattr(self, name, value)
def serialize(self) -> dict[str, dict[str, Any]]:
def serialize(self) -> SerializedObject:
"""
Serializes the instance into a dictionary, preserving the structure of the
instance.

View File

@@ -1,9 +1,10 @@
import logging
from typing import TYPE_CHECKING, Any
from typing import TYPE_CHECKING, Any, cast
from pydase.utils.serializer import (
SerializationPathError,
SerializationValueError,
SerializedObject,
get_nested_dict_by_path,
set_nested_value_by_path,
)
@@ -16,12 +17,12 @@ logger = logging.getLogger(__name__)
class DataServiceCache:
def __init__(self, service: "DataService") -> None:
self._cache: dict[str, Any] = {}
self._cache: SerializedObject
self.service = service
self._initialize_cache()
@property
def cache(self) -> dict[str, Any]:
def cache(self) -> SerializedObject:
return self._cache
def _initialize_cache(self) -> None:
@@ -30,10 +31,22 @@ class DataServiceCache:
self._cache = self.service.serialize()
def update_cache(self, full_access_path: str, value: Any) -> None:
set_nested_value_by_path(self._cache["value"], full_access_path, value)
set_nested_value_by_path(
cast(dict[str, SerializedObject], self._cache["value"]),
full_access_path,
value,
)
def get_value_dict_from_cache(self, full_access_path: str) -> dict[str, Any]:
def get_value_dict_from_cache(self, full_access_path: str) -> SerializedObject:
try:
return get_nested_dict_by_path(self._cache["value"], full_access_path)
return get_nested_dict_by_path(
cast(dict[str, SerializedObject], self._cache["value"]),
full_access_path,
)
except (SerializationPathError, SerializationValueError, KeyError):
return {}
return {
"value": None,
"type": None,
"doc": None,
"readonly": False,
}

View File

@@ -9,7 +9,7 @@ from pydase.observer_pattern.observer.property_observer import (
PropertyObserver,
)
from pydase.utils.helpers import get_object_attr_from_path_list
from pydase.utils.serializer import dump
from pydase.utils.serializer import SerializedObject, dump
logger = logging.getLogger(__name__)
@@ -18,7 +18,7 @@ class DataServiceObserver(PropertyObserver):
def __init__(self, state_manager: StateManager) -> None:
self.state_manager = state_manager
self._notification_callbacks: list[
Callable[[str, Any, dict[str, Any]], None]
Callable[[str, Any, SerializedObject], None]
] = []
super().__init__(state_manager.service)
@@ -42,10 +42,16 @@ class DataServiceObserver(PropertyObserver):
):
logger.debug("'%s' changed to '%s'", full_access_path, value)
self._update_cache_value(full_access_path, value, cached_value_dict)
self._update_cache_value(full_access_path, value, cached_value_dict)
for callback in self._notification_callbacks:
callback(full_access_path, value, cached_value_dict)
cached_value_dict = deepcopy(
self.state_manager._data_service_cache.get_value_dict_from_cache(
full_access_path
)
)
for callback in self._notification_callbacks:
callback(full_access_path, value, cached_value_dict)
if isinstance(value, ObservableObject):
self._update_property_deps_dict()
@@ -53,7 +59,10 @@ class DataServiceObserver(PropertyObserver):
self._notify_dependent_property_changes(full_access_path)
def _update_cache_value(
self, full_access_path: str, value: Any, cached_value_dict: dict[str, Any]
self,
full_access_path: str,
value: Any,
cached_value_dict: SerializedObject | dict[str, Any],
) -> None:
value_dict = dump(value)
if cached_value_dict != {}:
@@ -87,7 +96,7 @@ class DataServiceObserver(PropertyObserver):
)
def add_notification_callback(
self, callback: Callable[[str, Any, dict[str, Any]], None]
self, callback: Callable[[str, Any, SerializedObject], None]
) -> None:
"""
Registers a callback function to be invoked upon attribute changes in the

View File

@@ -13,6 +13,7 @@ from pydase.utils.helpers import (
parse_list_attr_and_index,
)
from pydase.utils.serializer import (
SerializedObject,
dump,
generate_serialized_data_paths,
get_nested_dict_by_path,
@@ -114,10 +115,17 @@ class StateManager:
self._data_service_cache = DataServiceCache(self.service)
@property
def cache(self) -> dict[str, Any]:
def cache(self) -> SerializedObject:
"""Returns the cached DataService state."""
return self._data_service_cache.cache
@property
def cache_value(self) -> dict[str, SerializedObject]:
"""Returns the "value" value of the DataService serialization."""
return cast(
dict[str, SerializedObject], self._data_service_cache.cache["value"]
)
def save_state(self) -> None:
"""
Saves the DataService's current state to a JSON file defined by `self.filename`.
@@ -126,7 +134,7 @@ class StateManager:
if self.filename is not None:
with open(self.filename, "w") as f:
json.dump(self.cache["value"], f, indent=4)
json.dump(self.cache_value, f, indent=4)
else:
logger.info(
"State manager was not initialised with a filename. Skipping "
@@ -191,7 +199,7 @@ class StateManager:
value: The new value to set for the attribute.
"""
current_value_dict = get_nested_dict_by_path(self.cache["value"], path)
current_value_dict = get_nested_dict_by_path(self.cache_value, path)
# This will also filter out methods as they are 'read-only'
if current_value_dict["readonly"]:
@@ -216,10 +224,12 @@ class StateManager:
return dump(value_object)["value"] != current_value
def __convert_value_if_needed(
self, value: Any, current_value_dict: dict[str, Any]
self, value: Any, current_value_dict: SerializedObject
) -> Any:
if current_value_dict["type"] == "Quantity":
return u.convert_to_quantity(value, current_value_dict["value"]["unit"])
return u.convert_to_quantity(
value, cast(dict[str, Any], current_value_dict["value"])["unit"]
)
if current_value_dict["type"] == "float" and not isinstance(value, float):
return float(value)
return value
@@ -234,7 +244,7 @@ class StateManager:
# Update path to reflect the attribute without list indices
path = ".".join([*parent_path_list, attr_name])
attr_cache_type = get_nested_dict_by_path(self.cache["value"], path)["type"]
attr_cache_type = get_nested_dict_by_path(self.cache_value, path)["type"]
# Traverse the object according to the path parts
target_obj = get_object_attr_from_path_list(self.service, parent_path_list)
@@ -273,7 +283,7 @@ class StateManager:
return has_decorator
cached_serialization_dict = get_nested_dict_by_path(
self.cache["value"], full_access_path
self.cache_value, full_access_path
)
if cached_serialization_dict["value"] == "method":

View File

@@ -1,13 +1,13 @@
{
"files": {
"main.css": "/static/css/main.7ef670d5.css",
"main.js": "/static/js/main.6d1d080e.js",
"main.js": "/static/js/main.97ef73ea.js",
"index.html": "/index.html",
"main.7ef670d5.css.map": "/static/css/main.7ef670d5.css.map",
"main.6d1d080e.js.map": "/static/js/main.6d1d080e.js.map"
"main.97ef73ea.js.map": "/static/js/main.97ef73ea.js.map"
},
"entrypoints": [
"static/css/main.7ef670d5.css",
"static/js/main.6d1d080e.js"
"static/js/main.97ef73ea.js"
]
}

View File

@@ -1 +1 @@
<!doctype html><html lang="en"><head><meta charset="utf-8"/><link rel="icon" href="/favicon.ico"/><meta name="viewport" content="width=device-width,initial-scale=1"/><meta name="theme-color" content="#000000"/><meta name="description" content="Web site displaying a pydase UI."/><link rel="apple-touch-icon" href="/logo192.png"/><link rel="manifest" href="/manifest.json"/><title>pydase App</title><script defer="defer" src="/static/js/main.6d1d080e.js"></script><link href="/static/css/main.7ef670d5.css" rel="stylesheet"></head><body><noscript>You need to enable JavaScript to run this app.</noscript><div id="root"></div></body></html>
<!doctype html><html lang="en"><head><meta charset="utf-8"/><link rel="icon" href="/favicon.ico"/><meta name="viewport" content="width=device-width,initial-scale=1"/><meta name="theme-color" content="#000000"/><meta name="description" content="Web site displaying a pydase UI."/><link rel="apple-touch-icon" href="/logo192.png"/><link rel="manifest" href="/manifest.json"/><title>pydase App</title><script defer="defer" src="/static/js/main.97ef73ea.js"></script><link href="/static/css/main.7ef670d5.css" rel="stylesheet"></head><body><noscript>You need to enable JavaScript to run this app.</noscript><div id="root"></div></body></html>

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -9,7 +9,7 @@ from pydase.data_service.data_service_observer import DataServiceObserver
from pydase.data_service.state_manager import StateManager
from pydase.utils.helpers import get_object_attr_from_path_list
from pydase.utils.logging import SocketIOHandler
from pydase.utils.serializer import dump
from pydase.utils.serializer import SerializedObject
logger = logging.getLogger(__name__)
@@ -94,14 +94,9 @@ def setup_sio_server(
# Add notification callback to observer
def sio_callback(
full_access_path: str, value: Any, cached_value_dict: dict[str, Any]
full_access_path: str, value: Any, cached_value_dict: SerializedObject
) -> None:
if cached_value_dict != {}:
serialized_value = dump(value)
if cached_value_dict["type"] != "method":
cached_value_dict["type"] = serialized_value["type"]
cached_value_dict["value"] = serialized_value["value"]
async def notify() -> None:
try:

View File

@@ -6,7 +6,7 @@ from typing import Any
import socketio # type: ignore[import-untyped]
import uvicorn
from fastapi import FastAPI
from fastapi import FastAPI, Response
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import FileResponse
from fastapi.staticfiles import StaticFiles
@@ -70,7 +70,7 @@ class WebServer:
enable_cors: bool = True,
config_dir: Path = ServiceConfig().config_dir,
generate_web_settings: bool = WebServerConfig().generate_web_settings,
**kwargs: Any,
frontend_src: Path = Path(__file__).parent.parent.parent / "frontend",
) -> None:
self.observer = data_service_observer
self.state_manager = self.observer.state_manager
@@ -79,6 +79,7 @@ class WebServer:
self.host = host
self.css = css
self.enable_cors = enable_cors
self.frontend_src = frontend_src
self._service_config_dir = config_dir
self._generate_web_settings = generate_web_settings
self._loop: asyncio.AbstractEventLoop
@@ -126,11 +127,14 @@ class WebServer:
@property
def web_settings(self) -> dict[str, dict[str, Any]]:
current_web_settings = self._get_web_settings_from_file()
for path in generate_serialized_data_paths(self.state_manager.cache["value"]):
for path in generate_serialized_data_paths(self.state_manager.cache_value):
if path in current_web_settings:
continue
current_web_settings[path] = {"displayName": path.split(".")[-1]}
current_web_settings[path] = {
"displayName": path.split(".")[-1],
"display": True,
}
return current_web_settings
@@ -161,23 +165,24 @@ class WebServer:
@app.get("/service-properties")
def service_properties() -> dict[str, Any]:
return self.state_manager.cache
return self.state_manager.cache # type: ignore
@app.get("/web-settings")
def web_settings() -> dict[str, Any]:
return self.web_settings
# exposing custom.css file provided by user
if self.css is not None:
@app.get("/custom.css")
async def styles() -> FileResponse:
@app.get("/custom.css")
async def styles() -> Response:
if self.css is not None:
return FileResponse(str(self.css))
return Response(content="", media_type="text/css")
app.mount(
"/",
StaticFiles(
directory=Path(__file__).parent.parent.parent / "frontend",
directory=self.frontend_src,
html=True,
),
)

View File

@@ -1,9 +1,15 @@
from __future__ import annotations
import inspect
import logging
import sys
from collections.abc import Callable
from enum import Enum
from typing import Any, TypedDict
from typing import TYPE_CHECKING, Any, TypedDict, cast
if sys.version_info < (3, 11):
from typing_extensions import NotRequired
else:
from typing import NotRequired
import pydase.units as u
from pydase.data_service.abstract_data_service import AbstractDataService
@@ -16,6 +22,9 @@ from pydase.utils.helpers import (
render_in_frontend,
)
if TYPE_CHECKING:
from collections.abc import Callable
logger = logging.getLogger(__name__)
@@ -27,10 +36,31 @@ class SerializationValueError(Exception):
pass
class SignatureDict(TypedDict):
parameters: dict[str, dict[str, Any]]
return_annotation: dict[str, Any]
SerializedObject = TypedDict(
"SerializedObject",
{
"name": NotRequired[str],
"value": "list[SerializedObject] | float | int | str | bool | dict[str, Any] | None", # noqa: E501
"type": str | None,
"doc": str | None,
"readonly": bool,
"enum": NotRequired[dict[str, Any]],
"async": NotRequired[bool],
"signature": NotRequired[SignatureDict],
"frontend_render": NotRequired[bool],
},
)
class Serializer:
@staticmethod
def serialize_object(obj: Any) -> dict[str, Any]:
result: dict[str, Any] = {}
def serialize_object(obj: Any) -> SerializedObject:
result: SerializedObject
if isinstance(obj, AbstractDataService):
result = Serializer._serialize_data_service(obj)
@@ -67,7 +97,7 @@ class Serializer:
return result
@staticmethod
def _serialize_enum(obj: Enum) -> dict[str, Any]:
def _serialize_enum(obj: Enum) -> SerializedObject:
import pydase.components.coloured_enum
value = obj.name
@@ -91,7 +121,7 @@ class Serializer:
}
@staticmethod
def _serialize_quantity(obj: u.Quantity) -> dict[str, Any]:
def _serialize_quantity(obj: u.Quantity) -> SerializedObject:
obj_type = "Quantity"
readonly = False
doc = get_attribute_doc(obj)
@@ -104,7 +134,7 @@ class Serializer:
}
@staticmethod
def _serialize_dict(obj: dict[str, Any]) -> dict[str, Any]:
def _serialize_dict(obj: dict[str, Any]) -> SerializedObject:
obj_type = "dict"
readonly = False
doc = get_attribute_doc(obj)
@@ -117,7 +147,7 @@ class Serializer:
}
@staticmethod
def _serialize_list(obj: list[Any]) -> dict[str, Any]:
def _serialize_list(obj: list[Any]) -> SerializedObject:
obj_type = "list"
readonly = False
doc = get_attribute_doc(obj)
@@ -130,7 +160,7 @@ class Serializer:
}
@staticmethod
def _serialize_method(obj: Callable[..., Any]) -> dict[str, Any]:
def _serialize_method(obj: Callable[..., Any]) -> SerializedObject:
obj_type = "method"
value = None
readonly = True
@@ -141,16 +171,12 @@ class Serializer:
sig = inspect.signature(obj)
sig.return_annotation
class SignatureDict(TypedDict):
parameters: dict[str, dict[str, Any]]
return_annotation: dict[str, Any]
signature: SignatureDict = {"parameters": {}, "return_annotation": {}}
for k, v in sig.parameters.items():
signature["parameters"][k] = {
"annotation": str(v.annotation),
"default": dump(v.default) if v.default != inspect._empty else {},
"default": {} if v.default == inspect._empty else dump(v.default),
}
return {
@@ -164,7 +190,7 @@ class Serializer:
}
@staticmethod
def _serialize_data_service(obj: AbstractDataService) -> dict[str, Any]:
def _serialize_data_service(obj: AbstractDataService) -> SerializedObject:
readonly = False
doc = get_attribute_doc(obj)
obj_type = "DataService"
@@ -184,7 +210,7 @@ class Serializer:
# Get the difference between the two sets
derived_only_attr_set = obj_attr_set - data_service_attr_set
value = {}
value: dict[str, SerializedObject] = {}
# Iterate over attributes, properties, class attributes, and methods
for key in sorted(derived_only_attr_set):
@@ -224,12 +250,12 @@ class Serializer:
}
def dump(obj: Any) -> dict[str, Any]:
def dump(obj: Any) -> SerializedObject:
return Serializer.serialize_object(obj)
def set_nested_value_by_path(
serialization_dict: dict[str, Any], path: str, value: Any
serialization_dict: dict[str, SerializedObject], path: str, value: Any
) -> None:
"""
Set a value in a nested dictionary structure, which conforms to the serialization
@@ -251,53 +277,67 @@ def set_nested_value_by_path(
"""
parent_path_parts, attr_name = path.split(".")[:-1], path.split(".")[-1]
current_dict: dict[str, Any] = serialization_dict
current_dict: dict[str, SerializedObject] = serialization_dict
try:
for path_part in parent_path_parts:
current_dict = get_next_level_dict_by_key(
next_level_serialized_object = get_next_level_dict_by_key(
current_dict, path_part, allow_append=False
)
current_dict = current_dict["value"]
current_dict = cast(
dict[str, SerializedObject], next_level_serialized_object["value"]
)
current_dict = get_next_level_dict_by_key(
next_level_serialized_object = get_next_level_dict_by_key(
current_dict, attr_name, allow_append=True
)
except (SerializationPathError, SerializationValueError, KeyError) as e:
logger.error(e)
return
# setting the new value
serialized_value = dump(value)
if "readonly" in current_dict:
if current_dict["type"] != "method":
current_dict["type"] = serialized_value["type"]
current_dict["value"] = serialized_value["value"]
if next_level_serialized_object["type"] == "method": # state change of task
next_level_serialized_object["value"] = (
value.name if isinstance(value, Enum) else None
)
else:
current_dict.update(serialized_value)
serialized_value = dump(value)
keys_to_keep = set(serialized_value.keys())
# TODO: you might also want to pop "doc" from serialized_value if
# it is overwriting the value of the current dict
serialized_value.pop("readonly") # type: ignore
next_level_serialized_object.update(serialized_value)
# removes keys that are not present in the serialized new value
for key in list(next_level_serialized_object.keys()):
if key not in keys_to_keep:
next_level_serialized_object.pop(key, None) # type: ignore
def get_nested_dict_by_path(
serialization_dict: dict[str, Any],
serialization_dict: dict[str, SerializedObject],
path: str,
) -> dict[str, Any]:
) -> SerializedObject:
parent_path_parts, attr_name = path.split(".")[:-1], path.split(".")[-1]
current_dict: dict[str, Any] = serialization_dict
current_dict: dict[str, SerializedObject] = serialization_dict
for path_part in parent_path_parts:
current_dict = get_next_level_dict_by_key(
next_level_serialized_object = get_next_level_dict_by_key(
current_dict, path_part, allow_append=False
)
current_dict = current_dict["value"]
current_dict = cast(
dict[str, SerializedObject], next_level_serialized_object["value"]
)
return get_next_level_dict_by_key(current_dict, attr_name, allow_append=False)
def get_next_level_dict_by_key(
serialization_dict: dict[str, Any],
serialization_dict: dict[str, SerializedObject],
attr_name: str,
*,
allow_append: bool = False,
) -> dict[str, Any]:
) -> SerializedObject:
"""
Retrieve a nested dictionary entry or list item from a data structure serialized
with `pydase.utils.serializer.Serializer`.
@@ -322,14 +362,30 @@ def get_next_level_dict_by_key(
try:
if index is not None:
serialization_dict = serialization_dict[attr_name]["value"][index]
next_level_serialized_object = cast(
list[SerializedObject], serialization_dict[attr_name]["value"]
)[index]
else:
serialization_dict = serialization_dict[attr_name]
next_level_serialized_object = serialization_dict[attr_name]
except IndexError as e:
if allow_append and index == len(serialization_dict[attr_name]["value"]):
if (
index is not None
and allow_append
and index
== len(cast(list[SerializedObject], serialization_dict[attr_name]["value"]))
):
# Appending to list
serialization_dict[attr_name]["value"].append({})
serialization_dict = serialization_dict[attr_name]["value"][index]
cast(list[SerializedObject], serialization_dict[attr_name]["value"]).append(
{
"value": None,
"type": None,
"doc": None,
"readonly": False,
}
)
next_level_serialized_object = cast(
list[SerializedObject], serialization_dict[attr_name]["value"]
)[index]
else:
raise SerializationPathError(
f"Error occured trying to change '{attr_name}[{index}]': {e}"
@@ -341,17 +397,17 @@ def get_next_level_dict_by_key(
"a 'value' key."
)
if not isinstance(serialization_dict, dict):
if not isinstance(next_level_serialized_object, dict):
raise SerializationValueError(
f"Expected a dictionary at '{attr_name}', but found type "
f"'{type(serialization_dict).__name__}' instead."
f"'{type(next_level_serialized_object).__name__}' instead."
)
return serialization_dict
return next_level_serialized_object
def generate_serialized_data_paths(
data: dict[str, dict[str, Any]], parent_path: str = ""
data: dict[str, Any], parent_path: str = ""
) -> list[str]:
"""
Generate a list of access paths for all attributes in a dictionary representing
@@ -391,7 +447,7 @@ def generate_serialized_data_paths(
return paths
def serialized_dict_is_nested_object(serialized_dict: dict[str, Any]) -> bool:
def serialized_dict_is_nested_object(serialized_dict: SerializedObject) -> bool:
return (
serialized_dict["type"] != "Quantity"
and isinstance(serialized_dict["value"], dict)

View File

@@ -0,0 +1,141 @@
import logging
import pydase
import pydase.components
from pydase.data_service.data_service_observer import DataServiceObserver
from pydase.data_service.state_manager import StateManager
from pydase.utils.serializer import dump
from pytest import LogCaptureFixture
logger = logging.getLogger(__name__)
def test_image_functions(caplog: LogCaptureFixture) -> None:
class MyService(pydase.DataService):
def __init__(self) -> None:
super().__init__()
self.my_image = pydase.components.Image()
service_instance = MyService()
state_manager = StateManager(service_instance)
DataServiceObserver(state_manager)
service_instance.my_image.load_from_url("https://cataas.com/cat")
caplog.clear()
def test_image_serialization() -> None:
class MyService(pydase.DataService):
def __init__(self) -> None:
super().__init__()
self.my_image = pydase.components.Image()
assert dump(MyService()) == {
"name": "MyService",
"type": "DataService",
"value": {
"my_image": {
"name": "Image",
"type": "Image",
"value": {
"format": {
"type": "str",
"value": "",
"readonly": True,
"doc": None,
},
"load_from_base64": {
"type": "method",
"value": None,
"readonly": True,
"doc": None,
"async": False,
"signature": {
"parameters": {
"value_": {
"annotation": "<class 'bytes'>",
"default": {},
},
"format_": {
"annotation": "str | None",
"default": {
"type": "NoneType",
"value": None,
"readonly": False,
"doc": None,
},
},
},
"return_annotation": {},
},
"frontend_render": False,
},
"load_from_matplotlib_figure": {
"type": "method",
"value": None,
"readonly": True,
"doc": None,
"async": False,
"signature": {
"parameters": {
"fig": {"annotation": "Figure", "default": {}},
"format_": {
"annotation": "<class 'str'>",
"default": {
"type": "str",
"value": "png",
"readonly": False,
"doc": None,
},
},
},
"return_annotation": {},
},
"frontend_render": False,
},
"load_from_path": {
"type": "method",
"value": None,
"readonly": True,
"doc": None,
"async": False,
"signature": {
"parameters": {
"path": {
"annotation": "pathlib.Path | str",
"default": {},
}
},
"return_annotation": {},
},
"frontend_render": False,
},
"load_from_url": {
"type": "method",
"value": None,
"readonly": True,
"doc": None,
"async": False,
"signature": {
"parameters": {
"url": {"annotation": "<class 'str'>", "default": {}}
},
"return_annotation": {},
},
"frontend_render": False,
},
"value": {
"type": "str",
"value": "",
"readonly": True,
"doc": None,
},
},
"readonly": False,
"doc": None,
}
},
"readonly": False,
"doc": None,
}

View File

@@ -26,8 +26,8 @@ def test_web_settings() -> None:
observer = DataServiceObserver(state_manager)
with tempfile.TemporaryDirectory() as tmp:
web_settings = {
"attr_1": {"displayName": "Attribute"},
"attr_1.name": {"displayName": "Attribute name"},
"attr_1": {"displayName": "Attribute", "display": False},
"attr_1.name": {"displayName": "Attribute name", "display": True},
}
web_settings_file = Path(tmp) / "web_settings.json"
@@ -44,8 +44,11 @@ def test_web_settings() -> None:
new_web_settings = server.web_settings
# existing entries are not overwritten, new entries are appended
assert new_web_settings == {**web_settings, "added": {"displayName": "added"}}
assert new_web_settings == {
**web_settings,
"added": {"displayName": "added", "display": True},
}
assert json.loads(web_settings_file.read_text()) == {
**web_settings,
"added": {"displayName": "added"},
"added": {"displayName": "added", "display": True},
}

View File

@@ -1,4 +1,5 @@
import asyncio
import enum
from enum import Enum
from typing import Any
@@ -10,6 +11,7 @@ from pydase.data_service.task_manager import TaskStatus
from pydase.utils.decorators import frontend
from pydase.utils.serializer import (
SerializationPathError,
SerializedObject,
dump,
get_nested_dict_by_path,
get_next_level_dict_by_key,
@@ -18,6 +20,13 @@ from pydase.utils.serializer import (
)
class MyEnum(enum.Enum):
"""MyEnum description"""
RUNNING = "running"
FINISHED = "finished"
@pytest.mark.parametrize(
"test_input, expected",
[
@@ -396,33 +405,86 @@ def setup_dict() -> dict[str, Any]:
class ServiceClass(pydase.DataService):
attr1 = 1.0
attr2 = MySubclass()
enum_attr = MyEnum.RUNNING
attr_list = [0, 1, MySubclass()]
def my_task(self) -> None:
pass
return ServiceClass().serialize()["value"]
def test_update_attribute(setup_dict) -> None:
def test_update_attribute(setup_dict: dict[str, Any]) -> None:
set_nested_value_by_path(setup_dict, "attr1", 15)
assert setup_dict["attr1"]["value"] == 15
def test_update_nested_attribute(setup_dict) -> None:
def test_update_nested_attribute(setup_dict: dict[str, Any]) -> None:
set_nested_value_by_path(setup_dict, "attr2.attr3", 25.0)
assert setup_dict["attr2"]["value"]["attr3"]["value"] == 25.0
def test_update_list_entry(setup_dict) -> None:
def test_update_float_attribute_to_enum(setup_dict: dict[str, Any]) -> None:
set_nested_value_by_path(setup_dict, "attr2.attr3", MyEnum.RUNNING)
assert setup_dict["attr2"]["value"]["attr3"] == {
"doc": "MyEnum description",
"enum": {"FINISHED": "finished", "RUNNING": "running"},
"readonly": False,
"type": "Enum",
"value": "RUNNING",
}
def test_update_enum_attribute_to_float(setup_dict: dict[str, Any]) -> None:
set_nested_value_by_path(setup_dict, "enum_attr", 1.01)
assert setup_dict["enum_attr"] == {
"doc": None,
"readonly": False,
"type": "float",
"value": 1.01,
}
def test_update_task_state(setup_dict: dict[str, Any]) -> None:
assert setup_dict["my_task"] == {
"async": False,
"doc": None,
"frontend_render": False,
"readonly": True,
"signature": {"parameters": {}, "return_annotation": {}},
"type": "method",
"value": None,
}
set_nested_value_by_path(setup_dict, "my_task", TaskStatus.RUNNING)
assert setup_dict["my_task"] == {
"async": False,
"doc": None,
"frontend_render": False,
"readonly": True,
"signature": {"parameters": {}, "return_annotation": {}},
"type": "method",
"value": "RUNNING",
}
def test_update_list_entry(setup_dict: dict[str, SerializedObject]) -> None:
set_nested_value_by_path(setup_dict, "attr_list[1]", 20)
assert setup_dict["attr_list"]["value"][1]["value"] == 20
def test_update_list_append(setup_dict) -> None:
set_nested_value_by_path(setup_dict, "attr_list[3]", 20)
assert setup_dict["attr_list"]["value"][3]["value"] == 20
def test_update_list_append(setup_dict: dict[str, SerializedObject]) -> None:
set_nested_value_by_path(setup_dict, "attr_list[3]", MyEnum.RUNNING)
assert setup_dict["attr_list"]["value"][3] == {
"doc": "MyEnum description",
"enum": {"FINISHED": "finished", "RUNNING": "running"},
"readonly": False,
"type": "Enum",
"value": "RUNNING",
}
def test_update_invalid_list_index(
setup_dict, caplog: pytest.LogCaptureFixture
setup_dict: dict[str, Any], caplog: pytest.LogCaptureFixture
) -> None:
set_nested_value_by_path(setup_dict, "attr_list[10]", 30)
assert (