534 Commits

Author SHA1 Message Date
Mose Müller
75c5bc6877 updates to version v0.7.4 2024-03-19 08:28:13 +01:00
Mose Müller
a606194c48 Merge pull request #116 from tiqi-group/feat/customisable_frontend_src
feat: adds option for custom frontend_src directory
2024-03-14 16:45:04 +01:00
Mose Müller
5da7bdea78 updates Readme 2024-03-14 16:43:59 +01:00
Mose Müller
c6a52914c5 adds option for custom frontend_src directory 2024-03-14 16:30:57 +01:00
Mose Müller
ae68a89f48 Merge pull request #115 from tiqi-group/feat/add_custom_css_default_response
feat: add "custom.css" endpoint default Response
2024-03-12 14:41:09 +01:00
Mose Müller
386e69b048 custom.css endpoint defaults to empty Response now 2024-03-12 14:37:12 +01:00
Mose Müller
8310a51a74 Merge pull request #113 from tiqi-group/feat/frontend_display_toggle
Feat: frontend display toggle
2024-03-12 07:40:50 +01:00
Mose Müller
2a8cbf7a4a updates Readme 2024-03-12 07:35:45 +01:00
Mose Müller
857b81d213 updates tests 2024-03-11 15:37:56 +01:00
Mose Müller
25834534ad npm run build 2024-03-11 15:37:56 +01:00
Mose Müller
4a948f9155 adds "display" web settings support to frontend
Components with a "display" equals false in the web settings will not be displayed
in the frontend.
2024-03-11 15:37:56 +01:00
Mose Müller
df42f41f53 adds "display" key in web settings 2024-03-11 15:37:56 +01:00
Mose Müller
b8d421eb90 fix: readonly value is not overwritten anymore when changing attribute type 2024-03-11 15:37:26 +01:00
Mose Müller
877ab42905 fixes webserver (apparently FastAPI need the correct type hints...) 2024-03-07 17:52:03 +01:00
Mose Müller
51ffd8be4d simplifies serializer logic 2024-03-06 18:56:15 +01:00
Mose Müller
a88a0c6133 Updates python dependencies 2024-03-06 18:28:11 +01:00
Mose Müller
390a375777 Merge pull request #111 from tiqi-group/refactor/updates_serialized_object_type_hints
updates type hints for serialized objects
2024-03-06 18:27:21 +01:00
Mose Müller
4aee899dbe updates type hints for serialized objects 2024-03-06 18:23:26 +01:00
Mose Müller
c7d452d7db adds tests for Image component 2024-03-05 16:32:20 +01:00
Mose Müller
b7926b730d updates version to v0.7.3 2024-03-05 16:32:07 +01:00
Mose Müller
0c175fc706 Merge pull request #109 from tiqi-group/fix/task_disappears_after_changing_state
Fix/task disappears after changing state
2024-03-05 16:08:55 +01:00
Mose Müller
7d21bca8b1 adds test for changing task state 2024-03-05 16:05:09 +01:00
Mose Müller
d1628ae8c9 fixes updating task state 2024-03-05 16:05:01 +01:00
Mose Müller
441658ebc1 Merge pull request #108 from tiqi-group/fix/cache_update_on_type_change
Fix/cache update on type change
2024-03-05 14:44:19 +01:00
Mose Müller
99c7ad0ec8 updates serializer tests 2024-03-05 14:28:53 +01:00
Mose Müller
24a01c0982 removes keys from cache entry if they are not part of the new value serialization 2024-03-05 14:17:05 +01:00
Mose Müller
b8a52c2e6a only update cache and execute notification callbacks if attribute is public and has changed 2024-03-05 13:56:02 +01:00
Mose Müller
7aacc21010 removes processing of value from sio_callback (cached value is up-to-date already) 2024-03-05 13:54:24 +01:00
Mose Müller
8787cb0509 get cached value before executing custom notification callbacks 2024-03-05 13:53:41 +01:00
Mose Müller
8971cebfcd adds todos 2024-03-05 13:24:54 +01:00
Mose Müller
f2cf0d9c1a fixes update of cache when the type has changed
When an attribute changes from, say, a quantity to an enumeration, the enum key in the serialization was not added to the
cache, and thus the frontend was not able to render the enum.
2024-03-05 13:23:26 +01:00
Mose Müller
36c863e845 Merge pull request #107 from tiqi-group/fix/update_frontend_before_setting_state
Fix/update frontend before setting state
2024-03-05 13:20:54 +01:00
Mose Müller
836c1e14df npm run build 2024-03-05 13:19:10 +01:00
Mose Müller
dba036c6b3 do not try to update state if it is not yet set
This happens when the backend pushes updates before the frontend has received and set the state when loading the page, first.
2024-03-05 13:19:02 +01:00
Mose Müller
8b1f1ef1b1 updates to version v0.7.2 2024-03-04 17:46:44 +01:00
Mose Müller
698db4881b Merge pull request #106 from tiqi-group/fix/enum_sio_callback
fixes sio callback when attribute changes to an enum which was not present before
2024-03-04 17:38:33 +01:00
Mose Müller
d709d43d75 ignores complexity of sio_server setup (will be changed anyway soon 2024-03-04 17:36:09 +01:00
Mose Müller
691bf809cb fixes sio callback when attribute changes to an enum which was not present before 2024-03-04 17:32:45 +01:00
Mose Müller
86ccdd77f1 updates to version v0.7.1 2024-03-04 11:52:06 +01:00
Mose Müller
f29fb87054 Merge pull request #105 from tiqi-group/fix/enum_rendering
Fix/enum rendering
2024-03-04 11:51:31 +01:00
Mose Müller
cf5bc1e4e6 npm run build 2024-03-04 11:48:22 +01:00
Mose Müller
af36ed6c43 changes rendering of enums 2024-03-04 11:48:01 +01:00
Mose Müller
853472be94 updates enumValue when backend value changes 2024-03-04 11:47:51 +01:00
Mose Müller
f97a138e65 updates version to v0.7.0 2024-02-28 11:37:07 +01:00
Mose Müller
e5d7f4709f Merge pull request #103 from tiqi-group/90-display-the-functions-its-names-differently-in-the-ui
feat: updates functions and how they are rendered
2024-02-28 11:28:04 +01:00
Mose Müller
416ae6f0b4 updates Adding_Components.md to account for new component structure 2024-02-28 11:15:37 +01:00
Mose Müller
8f0a9ad21a npm run build 2024-02-28 11:01:23 +01:00
Mose Müller
6ed6fe5be1 cleanup: changing some frontend components 2024-02-28 10:59:28 +01:00
Mose Müller
9c6323d38f updates Readme 2024-02-28 09:12:34 +01:00
Mose Müller
5c11202e08 removes print statement 2024-02-27 18:04:09 +01:00
Mose Müller
e551af68f9 adds image to Readme 2024-02-27 17:44:08 +01:00
Mose Müller
e213931cb7 npm run build 2024-02-27 17:41:55 +01:00
Mose Müller
fe29530eb6 updates Readme 2024-02-27 17:38:39 +01:00
Mose Müller
151467b36f fixes tests 2024-02-27 17:38:09 +01:00
Mose Müller
990add216c moves frontend decorator into decorators module 2024-02-27 17:35:35 +01:00
Mose Müller
a05b703bb8 adds tests for methods exposed by DataService 2024-02-27 16:38:08 +01:00
Mose Müller
9616c57c38 changes exception raised by @frontend decorator 2024-02-27 16:37:43 +01:00
Mose Müller
a7ce321506 updates / fixes method serialization tests 2024-02-27 16:32:47 +01:00
Mose Müller
a72a551f54 fixes tests for DataServiceCache and TaskManager 2024-02-27 16:19:11 +01:00
Mose Müller
26689d8578 updates AsyncMethodComponent to work with backend 2024-02-27 16:07:54 +01:00
Mose Müller
74fc5d9aab updates task serialization 2024-02-27 16:07:29 +01:00
Mose Müller
da8d07a8b2 frontend decorator uses helper function (function_has_arguments) now 2024-02-27 15:59:35 +01:00
Mose Müller
ca2182c19b tasks are not allowed to have arguments anymore 2024-02-27 15:59:35 +01:00
Mose Müller
b2f828ff6f adds function_has_arguments helper function 2024-02-27 15:30:47 +01:00
Mose Müller
affc63219f removes name from function signature parameter serialization 2024-02-27 14:35:09 +01:00
Mose Müller
a01cf273fe fixes render_in_frontend function 2024-02-27 12:58:43 +01:00
Mose Müller
acd0c80316 updated use of method components 2024-02-27 12:58:28 +01:00
Mose Müller
2337aa9d6d only methods without arguments can be rendered 2024-02-27 12:58:08 +01:00
Mose Müller
b6f6b3058e updates render_in_frontend method (takes async functions into account) 2024-02-27 11:32:18 +01:00
Mose Müller
d33e9f9dbf method serialization contains signature instead of parameter key-value pair 2024-02-27 11:30:00 +01:00
Mose Müller
53676131a6 replaces no_frontend decorator with "frontend" decorator 2024-02-27 11:28:42 +01:00
Mose Müller
7f407ae6e7 extracts method to get default value of function keyword argument 2024-02-27 09:20:22 +01:00
Mose Müller
3c2f425dee adds "no_frontend" decorator for emitting frontend rendering of method
The method serialization now contains a "frontend_render" key with boolean value.
2024-02-27 08:25:11 +01:00
Mose Müller
ccc53c395e adds "name" key-value pair to DataService serialization 2024-02-27 08:13:09 +01:00
Mose Müller
c672989768 Merge pull request #104 from tiqi-group/update/dependencies
Updates fastapi and uvicorn dependenciees
2024-02-26 09:41:37 +01:00
Mose Müller
5ff279d5bd Updates fastapi and uvicorn dependenciees 2024-02-26 09:37:24 +01:00
Mose Müller
883ec6d6ae updates MethodComponent
Keyword arguments have a default value now which is displayed in the frontend. The following types can be rendered now:
- numbers (ints, floats, quantities)
- enums (including coloured enums)

I still have to fix the `convert_argument_to_hinted_types` method to make Quantity and Enums work.
2024-02-21 16:30:47 +01:00
Mose Müller
22fd2d099d stores enum value within component - now usable within method form 2024-02-21 16:20:58 +01:00
Mose Müller
f8926ea823 prevents Enter key within StringComponent to submit form in MethodComponent 2024-02-21 16:09:28 +01:00
Mose Müller
ceed62c8f2 merges NumberInputField back into NumberComponent 2024-02-21 15:46:27 +01:00
Mose Müller
5313ef6e8c fixes StringComponent for use as method argument (adds name to control form) 2024-02-21 15:46:14 +01:00
Mose Müller
2d98ba51f4 moves displayName and id to GenericComponent and pass them as props 2024-02-21 15:45:37 +01:00
Mose Müller
2f2544b978 removes unnecessary props from button 2024-02-21 09:36:29 +01:00
Mose Müller
fffe679bf0 defines changeCallback function in GenericComponent and passes it to components (instead of setAttribute)
The components do not use the setAttribute method themselves anymore. This way, you can provide
the changeCallback function if you want and thus reuse the components.
2024-02-21 08:32:59 +01:00
Mose Müller
2bb02a5558 separating out NumberInputField from NumberComponent (to be used in MethodComponent) 2024-02-20 17:20:20 +01:00
Mose Müller
1c029e301b updates types 2024-02-20 16:39:06 +01:00
Mose Müller
f0384b817c updates method serialization 2024-02-20 14:49:35 +01:00
Mose Müller
8042f9b390 removes card header of root component 2024-02-20 14:49:35 +01:00
Mose Müller
838145a778 allows to use .env file to configure ServiceConfig 2024-02-20 12:54:04 +01:00
Mose Müller
7d753b2fc6 Merge pull request #102 from tiqi-group/fix/dynamic_list_entry_with_property
Fix: dynamic list entry with property
2024-02-20 12:53:08 +01:00
Mose Müller
72f6a8ddee ignores some ruff rule 2024-02-20 12:51:52 +01:00
Mose Müller
dfb6f966aa adds test for dynamic list entries with properties 2024-02-20 12:29:44 +01:00
Mose Müller
dc42bfaa9b removes changed_attribute path after on_change method 2024-02-20 12:29:30 +01:00
Mose Müller
c0ba23b0b2 appending to a list now also triggers _notify_change_start
This helps in understanding if the list entries being added are "changing" themselves. Properties within
the added objects will trigger property changes when they are serialized, so we have to tell the observer
that he should not listen to them.
2024-02-20 12:28:34 +01:00
Mose Müller
bd7a46ddc1 changes are only registered if the containing object is not being changed as a whole 2024-02-20 12:26:43 +01:00
Mose Müller
5bea0892c7 Merge pull request #94 from tiqi-group/92-add-connection-component
feat: adds device connection component
2024-02-15 09:24:17 +01:00
Mose Müller
9631a7d467 adds device connection image 2024-02-15 09:23:14 +01:00
Mose Müller
1e8c7bd141 Merge pull request #101 from tiqi-group/fix/ruff_config_for_2.0
fixes pyproject.toml ruff configuration
2024-02-15 09:11:15 +01:00
Mose Müller
10dc1436d0 fixes pyproject.toml ruff configuration 2024-02-15 09:08:16 +01:00
Mose Müller
551b8f0158 udpates ruff configuration 2024-02-15 09:01:53 +01:00
Mose Müller
25139b3d4d adds device connection test 2024-02-15 08:56:13 +01:00
Mose Müller
6b1227fcbb fixes mypy error 2024-02-15 08:43:08 +01:00
Mose Müller
fd3338f99f updates DeviceConnection Readme section 2024-02-15 08:33:39 +01:00
Mose Müller
c23d0372a5 updates DeviceConnection Readme section 2024-02-14 16:03:09 +01:00
Mose Müller
b646acc994 updates device connection component
DeviceConnection is not an ABC anymore. I have updated the docstring to highlight that the
user should mostly just override the "connect" method, but the "connected" property can also
be overridden if necessary. The user is not required though to override any of those methods
and thus can make use of the "connected" frontend property only.
2024-02-14 15:50:47 +01:00
Mose Müller
9b31362f5b moving device connection component out of module 2024-02-14 14:39:49 +01:00
Mose Müller
63edcffe7e adds DeviceConnection section to Readme 2024-02-01 13:33:22 +01:00
Mose Müller
8c5c6d0f6d npm run build 2024-02-01 13:33:22 +01:00
Mose Müller
71b84525dd updates DeviceConnection docstring 2024-02-01 13:33:22 +01:00
Mose Müller
e78dc2defb moves device_connection.py to device_connection module 2024-02-01 13:33:22 +01:00
Mose Müller
529d61c77d fixes DeviceConnection overlay message when directly exposed 2024-02-01 13:33:22 +01:00
Mose Müller
c7c88178d4 npm run build 2024-02-01 13:33:22 +01:00
Mose Müller
7f082b6f95 fixes border radius of DeviceComponent when directly exposed 2024-02-01 13:33:22 +01:00
Mose Müller
30138bcb45 renaming file containing DeviceConnection, updating component 2024-02-01 13:33:22 +01:00
Mose Müller
1318bbc8a8 update Readme (autostart code) 2024-02-01 13:33:22 +01:00
Mose Müller
ae9761bd11 adds docstring to DeviceConnection 2024-02-01 13:33:22 +01:00
Mose Müller
04d19a853f renaming available to connected 2024-02-01 13:33:22 +01:00
Mose Müller
fc28b83bc5 adds handle_connection autostart task to DeviceConnection 2024-02-01 13:33:22 +01:00
Mose Müller
f1384b25a1 updates DeviceConnection component 2024-02-01 13:33:22 +01:00
Mose Müller
7ef82e61e5 frontend styling 2024-02-01 13:33:22 +01:00
Mose Müller
6d9191fe18 npm run build 2024-02-01 13:33:22 +01:00
Mose Müller
4f71633c5e adds backend DeviceConnection component 2024-02-01 13:33:22 +01:00
Mose Müller
2c95a2496c adds frontend DeviceConnection component 2024-02-01 13:33:22 +01:00
Mose Müller
aca5aab1ef removes unused attribute 2024-02-01 13:25:53 +01:00
Mose Müller
4f1cc4787d Merge pull request #99 from tiqi-group/cleanup/removes_deprecated_code
Cleanup/removes deprecated code
2024-02-01 11:11:43 +01:00
Mose Müller
8efd67d9f3 fixes tests 2024-02-01 10:18:58 +01:00
Mose Müller
34fc0f8739 removes deprecated code 2024-02-01 10:18:49 +01:00
Mose Müller
e60880fd30 Merge pull request #98 from tiqi-group/refactor/passing_full_serialization_dict_to_frontend
Refactor: passing full serialization dict to frontend
2024-02-01 09:27:29 +01:00
Mose Müller
036b0c681a updates version to v0.6.0 (due to breaking changes) 2024-02-01 09:25:47 +01:00
Mose Müller
dd268a4f9b npm run build 2024-02-01 09:18:24 +01:00
Mose Müller
e8638f1f3a fixes tests 2024-02-01 08:45:40 +01:00
Mose Müller
7279fed2aa frontend will can now display any serialization dict 2024-02-01 08:45:40 +01:00
Mose Müller
a2518671da DataService's serialize method now returns whole serialization dict (also passed to frontend) 2024-02-01 08:45:40 +01:00
Mose Müller
bcabd2dc48 Merge pull request #95 from tiqi-group/fix/service_configuration
Fix/service configuration
2024-01-29 15:26:27 +01:00
Mose Müller
7ac9c557c2 updates version to v0.5.2 2024-01-29 15:24:13 +01:00
Mose Müller
656529d1fb fixes service configuration (allow all environment variables) 2024-01-29 15:23:27 +01:00
Mose Müller
14601105a7 Merge pull request #93 from tiqi-group/45-placing-the-explanation-question-mark-next-to-the-variable-instead-of-above
feat: placing the explanation question mark next to the variable instead of above
2024-01-16 14:16:38 +01:00
Mose Müller
484b5131e9 fixing enum serialization for python 3.10 2024-01-16 14:13:36 +01:00
Mose Müller
616a5cea21 npm run build 2024-01-16 13:44:37 +01:00
Mose Müller
300bd6ca9a updates Enum serialization 2024-01-16 13:37:39 +01:00
Mose Müller
3e1517e905 udpates dev-guide for adding components 2024-01-16 13:00:01 +01:00
Mose Müller
0ecaeac3fb replaces js interfaces with types 2024-01-16 12:57:35 +01:00
Mose Müller
0e9832e2f1 updates DocStringComponent placement 2024-01-16 12:55:18 +01:00
Mose Müller
0343abd0b0 Merge pull request #91 from tiqi-group/fix/load_from_file
Fix/load from file
2024-01-09 16:39:59 +01:00
Mose Müller
0c149b85b5 updates version to v0.5.1 2024-01-09 16:39:12 +01:00
Mose Müller
0e331e58ff adds tests for server to check if loading from file is working 2024-01-09 16:36:35 +01:00
Mose Müller
45135927e6 initialises observer before loading state from json file 2024-01-09 16:21:57 +01:00
Mose Müller
d3866010a8 updates version to v0.5.0 2024-01-09 10:01:48 +01:00
Mose Müller
3c0f019af8 Merge pull request #48 from tiqi-group/10-frontend-user-should-be-able-to-add-custom-display-names
Feat: adds web settings file containing display name configuration
2024-01-08 17:17:06 +01:00
Mose Müller
8aa7fd31f8 updates Adding_Components guide 2024-01-08 17:11:55 +01:00
Mose Müller
c9ff3db9e9 Updates Readme 2024-01-08 16:57:46 +01:00
Mose Müller
9e77bae5e7 renaming config option from generate_new_web_settings to generate_web_settings 2024-01-08 16:49:36 +01:00
Mose Müller
6a6d1b27aa updates sio_setup tests (removes mock objects) 2024-01-08 16:35:32 +01:00
Mose Müller
2d3e7d8c1b adds web_server tests 2024-01-08 16:32:36 +01:00
Mose Müller
c7b039beb7 replaces method with read-only property 2024-01-08 16:32:25 +01:00
Mose Müller
62e647c667 generate_new_web_settings will now append to existing config file (not overwrite entries) 2024-01-08 15:45:02 +01:00
Mose Müller
6382be5735 removes index from generated web settings file (move to other PR) 2024-01-08 15:27:46 +01:00
Mose Müller
ea158bf8de adds sio_setup tests 2024-01-08 15:11:03 +01:00
Mose Müller
63ad6d7b93 removes web_settings sio event 2024-01-08 15:10:40 +01:00
Mose Müller
b8e758e479 updates docstring 2024-01-08 15:09:23 +01:00
Mose Müller
a12a708385 udpates Readme 2023-12-21 16:10:44 +01:00
Mose Müller
edb24f5439 Server uses ServiceConfig for web/rpc port default values, configurable through env variables 2023-12-21 15:48:28 +01:00
Mose Müller
2a2b7b800d updates ServiceConfig class 2023-12-21 15:48:28 +01:00
Mose Müller
b6b20c21e4 updates WebServer options to directly default to config class values 2023-12-21 15:25:57 +01:00
Mose Müller
53be794a3c renaming service configuration dir parameter 2023-12-21 13:36:08 +01:00
Mose Müller
a303ba7f0b adds pytest-asyncio to dev dependencies 2023-12-21 13:25:54 +01:00
Mose Müller
2461f85ef0 adds test for starting and stopping tasks 2023-12-21 13:24:54 +01:00
Mose Müller
ca41e12014 updates server to use asyncio.run 2023-12-21 13:13:45 +01:00
Mose Müller
f69723dd58 updates some tests to have a running event loop 2023-12-21 13:11:49 +01:00
Mose Müller
c733026522 fixes task manager loop 2023-12-21 13:11:17 +01:00
Mose Müller
316ce5c7e7 updates type hints 2023-12-21 11:33:00 +01:00
Mose Müller
43c3f746fa npm run build 2023-12-21 11:00:23 +01:00
Mose Müller
fea96c044c removes start_task wrapper 2023-12-21 11:00:19 +01:00
Mose Müller
6543bc6b39 rewrites web server to hot-reload the web settings from the settings file 2023-12-21 10:32:37 +01:00
Mose Müller
ef36c01407 updates serializer and state_manager to deal with serialized methods
I need to get the access path from methods when generating the
web_settings.json file. Thus, methods will not be skipped anymore,
instead, the method checking if the attribute is loadable makes the
distinction.
2023-12-21 10:31:02 +01:00
Mose Müller
9d90fd2b81 displayName of components is now taken from WebSettingsContext 2023-12-21 10:30:21 +01:00
Mose Müller
9fc6d6f910 updates WebSettings.tsx 2023-12-21 10:04:55 +01:00
Mose Müller
805e270107 updates sio_setup to not expect DataService in the parent path 2023-12-21 10:04:55 +01:00
Mose Müller
8e3a1694ce updates frontend components to not have DataService in the fullAccessPath 2023-12-21 10:03:17 +01:00
Mose Müller
32a1d14a40 changes display_name to displayName in web settings 2023-12-21 07:48:03 +01:00
Mose Müller
8940a61d4e adds WebSettings context 2023-12-21 07:48:03 +01:00
Mose Müller
393bde3280 frontend: removes unused stateRef 2023-12-20 16:57:28 +01:00
Mose Müller
eb2da1c5dc adds index to web_settings 2023-12-20 16:52:28 +01:00
Mose Müller
e7b73a99da WebServer uses serializer method now to generate serialized data paths 2023-12-20 10:21:48 +01:00
Mose Müller
392831e0fd uses new serializer method to check if attribute is loadable 2023-12-20 10:16:01 +01:00
Mose Müller
32bda8d910 updates generate_serialized_data_paths method, adds tests 2023-12-20 10:15:25 +01:00
Mose Müller
e106cc4927 adds NumberSlider to state manager tests 2023-12-20 10:14:35 +01:00
Mose Müller
464478cda9 removes helper function to create config folder 2023-12-20 10:14:07 +01:00
Mose Müller
97c026afe0 adds function to initialise web settings (also creating settings if requested), creates web-settings fastapi endpoint 2023-12-19 16:38:46 +01:00
Mose Müller
2f5c415cd5 updates webserver docstring 2023-12-19 16:21:03 +01:00
Mose Müller
728eea09f6 adds configs to WebServer (can also be passed to constructor) 2023-12-19 16:16:13 +01:00
Mose Müller
e3eaf5ffe2 adds ServiceConfig and WebServerConfig 2023-12-19 16:11:32 +01:00
Mose Müller
1dc3b62060 removes usage of rpyc-specific method in WebServer 2023-12-19 14:57:06 +01:00
Mose Müller
8214faf5cb removes ForkingServer rpyc configuration 2023-12-19 13:07:25 +01:00
Mose Müller
232eb53249 renames file 2023-12-19 12:59:18 +01:00
Mose Müller
439f514ea5 fixes WebServer 2023-12-19 12:58:32 +01:00
Mose Müller
c7d63f5139 replaces SioServerWrapper with setup function 2023-12-19 12:58:32 +01:00
Mose Müller
f64b5c35ab renaming sio_server file 2023-12-19 12:56:03 +01:00
Mose Müller
bb4de988e9 updates Server docstring 2023-12-19 11:44:50 +01:00
Mose Müller
36a8e916f6 updates kwargs passed to servers 2023-12-19 11:44:36 +01:00
Mose Müller
1a00f37372 fixes exception emission to web clients 2023-12-19 11:43:29 +01:00
Mose Müller
6630173cec fixes mypy issue 2023-12-19 11:43:29 +01:00
Mose Müller
08a62b2119 updates WebServer docstring 2023-12-19 11:43:29 +01:00
Mose Müller
37ae34ecc0 makes WebServer functions protected 2023-12-19 11:42:39 +01:00
Mose Müller
8b78099178 udpates AdditionalServerProtocol and WebServer
updates WebServer
2023-12-19 11:42:39 +01:00
Mose Müller
3186e04cc1 creates web_server module with WebServer complying with AdditionalServerProtocol 2023-12-19 10:59:24 +01:00
Mose Müller
055acbe591 using get_running_loop instead of soon-deprecated get_event_loop 2023-12-19 10:55:38 +01:00
Mose Müller
0d08c2ce0d removes unnecessary condition check 2023-12-19 10:55:07 +01:00
Mose Müller
68cc5b693e adds socketio event for web_settings 2023-12-18 12:04:33 +01:00
Mose Müller
4fcd5b4d44 adds helper function to create config folder 2023-12-18 12:04:31 +01:00
Mose Müller
9cbc639d0f updates vscode settings 2023-12-18 12:03:20 +01:00
Mose Müller
a48cce32e4 chore: formatting tests 2023-12-18 11:59:20 +01:00
Mose Müller
8c24f5dd67 updates version number 2023-12-13 11:29:28 +01:00
Mose Müller
1c4a878aa8 Merge pull request #86 from tiqi-group/9-add-units-support-for-numberslider
updates Readme explaining how to use units with number sliders
2023-12-13 11:25:58 +01:00
Mose Müller
31967d0d43 updates Readme explaining how to use units with number sliders 2023-12-13 11:23:44 +01:00
Mose Müller
b4edc31030 Merge pull request #84 from tiqi-group/75-numberslider-component-is-not-working
75 numberslider component is not working
2023-12-13 11:12:56 +01:00
Mose Müller
ff7c92547e updates Readme 2023-12-13 11:09:18 +01:00
Mose Müller
fab91f3221 updates number slider test file 2023-12-13 10:39:26 +01:00
Mose Müller
bd77995d96 npm run build 2023-12-13 10:36:00 +01:00
Mose Müller
729f375901 adds support for quantities in slider component (passing object instead of number) 2023-12-13 10:35:28 +01:00
Mose Müller
e643dd6f5c adds number object types to NumberComponent 2023-12-13 10:34:32 +01:00
Mose Müller
53f4cf6690 removes setters for min, max and step_size in NumberSlider, updates docstring 2023-12-13 09:30:21 +01:00
Mose Müller
c0c8591fc4 updates number slider component 2023-12-11 17:46:08 +01:00
Mose Müller
13fba6d3d6 npm run build 2023-12-11 17:30:12 +01:00
Mose Müller
dc4c9ff58f removes unused customEmitUpdate prop from NumberComponent 2023-12-11 17:30:12 +01:00
Mose Müller
83cd07feee updates SliderComponent to emit attribute updates (instead of full state dict) 2023-12-11 17:30:12 +01:00
Mose Müller
09f73a2b1d Merge pull request #83 from tiqi-group/feat/improve_data_service_serialization
fixed serialization of class deriving from class which derives from DataService
2023-12-11 17:28:11 +01:00
Mose Müller
88886e3fd6 fixed serialization of class deriving from class which derives from DataService 2023-12-11 17:25:03 +01:00
Mose Müller
49984b7c2e updates version to v0.4.0 2023-12-11 14:13:14 +01:00
Mose Müller
39270561b9 updates Readme with logging information 2023-12-11 14:06:02 +01:00
Mose Müller
8ac2c39908 fix: dont log private and protected attribute changes 2023-12-11 12:52:58 +01:00
Mose Müller
0694a3d1ee fix: removes inheritance warning for functions 2023-12-11 12:28:37 +01:00
Mose Müller
c15ad54e2d updates test 2023-12-11 11:55:55 +01:00
Mose Müller
71721b1286 fix: remove inheritance warning for lists 2023-12-11 11:53:55 +01:00
Mose Müller
74ceb7f05c fix image component warning 2023-12-11 11:51:24 +01:00
Mose Müller
06d11fff49 Merge pull request #80 from tiqi-group/feat/improve_inheritance_warning
Feat: improves DataService inheritance warning
2023-12-11 09:25:03 +01:00
Mose Müller
6d23151d32 updates tests 2023-12-11 09:22:38 +01:00
Mose Müller
0faf347376 moves inheritance warning into DataService, improves logic 2023-12-11 09:15:08 +01:00
Mose Müller
a5fddf7e45 Merge pull request #79 from tiqi-group/feat/improves_state_manager_debug_messages
fix: improves debug message for properties (load_state decorator)
2023-12-07 11:44:46 +01:00
Mose Müller
83c763bd20 improves debug message for properties (load_state decorator) 2023-12-07 11:39:56 +01:00
Mose Müller
9778541ee4 Merge pull request #77 from tiqi-group/69-add-support-for-adding-objects-to-a-list
69 add support for adding objects to a list
2023-12-06 18:04:49 +01:00
Mose Müller
8e641c1b84 implements clear, insert, remove, extend and pop for observable lists 2023-12-06 18:02:26 +01:00
Mose Müller
f6bf229c8c updates ruff config (and workflow) 2023-12-06 17:25:09 +01:00
Mose Müller
5a76d76d2b adds test for (dynamic / static) property dependencies 2023-12-06 09:17:43 +01:00
Mose Müller
3169531a24 updates property dependencies when changing to an observable object 2023-12-06 09:17:43 +01:00
Mose Müller
4bd0092fbf adds warnings for non-overridden observable-list methods 2023-12-06 09:17:43 +01:00
Mose Müller
569e343e89 overrides append in _ObservableList 2023-12-06 09:17:43 +01:00
Mose Müller
f2b2ef8dcd Merge pull request #78 from tiqi-group/feat/removes_private_attr_set_warning
Feat: removes warning if private attribute is set
2023-12-06 09:17:08 +01:00
Mose Müller
f70ac05df6 ruff does not check tests anymore 2023-12-06 09:16:02 +01:00
Mose Müller
e3367efda1 removes corresponding test 2023-12-06 09:12:00 +01:00
Mose Müller
3d2de7109b removes warning if setting private attributes (should work now) 2023-12-06 09:11:14 +01:00
Mose Müller
534ff4c149 updates pyproject toml (ruff config) 2023-12-06 09:07:19 +01:00
Mose Müller
0e47f6c4d3 Merge pull request #76 from tiqi-group/72-support-for-dynamic-attribute-handling-and-collection-management
72 support for dynamic attribute handling and collection management
2023-12-06 09:05:54 +01:00
Mose Müller
b4ef8201f3 adds tests for data service type change warnings 2023-12-06 09:04:08 +01:00
Mose Müller
a97a55712e adds warning message when super().__init__() is not called at the start of the constructor 2023-12-06 08:49:24 +01:00
Mose Müller
e8a0a7c000 adds Observer Pattern documentation 2023-12-05 16:17:12 +01:00
Mose Müller
6f0d43aa5a chore: formatting 2023-12-05 12:50:31 +01:00
Mose Müller
0e210b8ba6 renames test file 2023-12-05 12:50:02 +01:00
Mose Müller
329e0acd81 adds observer_pattern tests 2023-12-05 12:48:58 +01:00
Mose Müller
f97cd7eb4e adds observers to observer namespace 2023-12-05 12:48:46 +01:00
Mose Müller
3c168243bb removes unused type: ignore statements 2023-12-05 11:50:06 +01:00
Mose Müller
0944a404dc moves property-related stuff from DataServiceObserver to PropertyObserver 2023-12-05 11:48:13 +01:00
Mose Müller
a9c6070ca3 reduces complexity of DataServiceObserver functions 2023-12-05 11:35:58 +01:00
Mose Müller
75ee71cbf8 fixes warnings tests 2023-12-05 11:24:17 +01:00
Mose Müller
1e55a4d914 npm run build 2023-12-05 10:49:57 +01:00
Mose Müller
aab2b4ee77 updates frontend reducer to accept new sio_callback event data 2023-12-05 10:49:33 +01:00
Mose Müller
52d571e551 updates Server (adds Observer, updates sio_callback) 2023-12-05 10:49:00 +01:00
Mose Müller
bb415af460 creates deepcopy of cached dict instead of copy, removes warnings for methods 2023-12-05 10:48:30 +01:00
Mose Müller
c3c1669cf9 __convert_value_if_needed now also converts to float if needed 2023-12-05 10:20:12 +01:00
Mose Müller
5378396958 updates units tests 2023-12-05 10:14:01 +01:00
Mose Müller
b66e964155 adds warning to DataService when types change, types will not be converted anymore 2023-12-05 10:12:57 +01:00
Mose Müller
4fc25c6752 improves check for updated value in Observer 2023-12-05 10:12:18 +01:00
Mose Müller
44cd9597cb adds warnings if types change in cache 2023-12-05 10:12:00 +01:00
Mose Müller
e48a7067ec removes duplicate code from DataServiceObserver (already in Observer) 2023-12-05 10:11:12 +01:00
Mose Müller
8919f6106a adds add_notification_callback method to DataServiceObserver 2023-12-05 10:10:35 +01:00
Mose Müller
89b5a9cc9e updates tests 2023-12-04 17:23:42 +01:00
Mose Müller
0aa1595da4 updates data service observer 2023-12-04 17:23:39 +01:00
Mose Müller
8f8b3e3bcf updates __getattribute__ of Observable 2023-12-04 17:16:01 +01:00
Mose Müller
43e6adcb2e removes unnecessary "..." literal 2023-12-04 14:21:51 +01:00
Mose Müller
3992f491c9 updates data service observer's cache dict check 2023-12-04 13:36:16 +01:00
Mose Müller
df571a8260 uses cache method to retrieve value dict in state manager 2023-12-04 13:36:16 +01:00
Mose Müller
53713794d6 updates method to get value dict from cache 2023-12-04 13:36:16 +01:00
Mose Müller
06e642972f fixes task manager notifications 2023-12-04 13:36:16 +01:00
Mose Müller
a7ec7c1536 fixes number slider constructor 2023-12-04 13:36:16 +01:00
Mose Müller
c891642bda updates tests 2023-12-04 13:36:16 +01:00
Mose Müller
cc105106ee removes try catch from serializer function to not log error but rather raise exception 2023-12-04 13:36:16 +01:00
Mose Müller
7c7bb193e4 reusing util function 2023-12-04 13:36:16 +01:00
Mose Müller
92e79579ff chore: type hints 2023-12-04 13:36:16 +01:00
Mose Müller
5d2d34bea3 adds DataServiceObserver 2023-12-04 13:36:16 +01:00
Mose Müller
3497962fca updates data service cache (methods to set and get values) 2023-12-04 13:36:16 +01:00
Mose Müller
114a1c6fdc removes data service list and callback manager, make DataService an Observable 2023-12-04 13:36:16 +01:00
Mose Müller
1d2ac57ba7 udpates observable list and dict types 2023-12-04 13:36:16 +01:00
Mose Müller
99dea381a3 adds first version of observer_pattern module 2023-12-04 13:36:16 +01:00
Mose Müller
e6e5ac84b4 resets default host to 0.0.0.0 2023-12-04 08:42:52 +01:00
Mose Müller
246148f513 updates vscode folder 2023-11-30 11:31:29 +01:00
Mose Müller
eb0c819037 removes reportUnknownParameterType (pyright), disallows any generics (mypy) 2023-11-30 09:49:29 +01:00
Mose Müller
f5d8775141 removes reportUnknownMemberType from pyright config 2023-11-30 09:20:58 +01:00
Mose Müller
1ec034a62e updates pyproject config (removes black and isort) 2023-11-30 09:12:51 +01:00
Mose Müller
93f0627534 removes Optional typing and unused comments 2023-11-30 09:01:39 +01:00
Mose Müller
ad2ae704e9 updates ruff config 2023-11-30 09:01:26 +01:00
Mose Müller
de5340d6fd updates python-package testing workflow 2023-11-29 15:51:13 +01:00
Mose Müller
b80a3ec6a1 updates pyright and mypy config 2023-11-29 15:50:36 +01:00
Mose Müller
f3853ef836 removes poetry.toml (user specific file, use your global config instead) 2023-11-29 15:35:42 +01:00
Mose Müller
56ae9086b5 poetry: makes dev and docs groups optional, removes venv and venvPath from pyright config 2023-11-29 15:34:49 +01:00
Mose Müller
5a2371353a replaces state manager error with info log when no filename was provided 2023-11-28 16:39:27 +01:00
Mose Müller
09a55f50bd Create bug_report.md issue template 2023-11-28 16:31:41 +01:00
Mose Müller
abafd1a2b2 Merge pull request #74 from tiqi-group/cleanup/ruff_linting
Cleanup: switching to ruff linter and formatter
2023-11-28 15:23:53 +01:00
Mose Müller
145ff89072 fix ruff errors 2023-11-28 15:21:27 +01:00
Mose Müller
ba5b4e7be4 updates github linting workflow (ruff instead of flake8) 2023-11-28 15:20:17 +01:00
Mose Müller
8ee49469d6 removes flake8 config 2023-11-28 15:18:12 +01:00
Mose Müller
6997c4a842 updates python dependencies 2023-11-28 15:17:59 +01:00
Mose Müller
598449e893 implement ruff recommendations 2023-11-28 15:17:23 +01:00
Mose Müller
4802f19720 removes unused web_settings kwarg from server 2023-11-28 15:17:13 +01:00
Mose Müller
a04bd14e50 fix number slider test 2023-11-28 14:58:53 +01:00
Mose Müller
c60730f21b removes unused "info" endpoint from web server 2023-11-28 14:57:45 +01:00
Mose Müller
d5cd97ea57 updates utils.logging 2023-11-28 14:53:51 +01:00
Mose Müller
0136885207 updates callback manager 2023-11-28 14:53:48 +01:00
Mose Müller
c04e048e21 updates NumberSlider (constructor kwargs) 2023-11-28 14:41:28 +01:00
Mose Müller
9e9d3f17bc implements ruff suggestions 2023-11-27 17:37:37 +01:00
Mose Müller
e576f6eb80 updates ruff config 2023-11-27 17:37:37 +01:00
Mose Müller
e57fe10c9e Removes unnecessary pass statement 2023-11-27 17:36:28 +01:00
Mose Müller
f27f513bf8 Updates gitignore 2023-11-27 17:16:15 +01:00
Mose Müller
de4e4ed178 update python deps 2023-11-27 17:16:15 +01:00
Mose Müller
cb2687a4b9 only import Callable when TYPE_CHECKING 2023-11-27 17:16:15 +01:00
Mose Müller
ab794d780b implements logging suggestions (no f-strings) 2023-11-27 17:16:15 +01:00
Mose Müller
617eed4d96 implements ruff suggestions 2023-11-27 17:16:15 +01:00
Mose Müller
d517bd0489 updates Adding_Components.md 2023-11-27 16:29:25 +01:00
Mose Müller
d0869b707b Merge pull request #73 from tiqi-group/feat/notify_frontend_about_logged_errors
Adds capability of notifying frontend about logged errors
2023-11-27 16:17:33 +01:00
Mose Müller
eab99df9d1 npm run build 2023-11-27 16:16:15 +01:00
Mose Müller
9d36f99404 adds CRITICAL log level 2023-11-27 16:15:53 +01:00
Mose Müller
7b7ef0eb97 npm run build 2023-11-27 16:09:39 +01:00
Mose Müller
92f14c6788 updates App.css 2023-11-27 16:09:12 +01:00
Mose Müller
4746470aee error toasts always show even when showNotifications is false 2023-11-27 16:08:49 +01:00
Mose Müller
f5627e6a2f frontend: error toast only goes away when clicked 2023-11-27 16:08:08 +01:00
Mose Müller
a769f4eb3b updates SocketIOHandler 2023-11-27 16:01:36 +01:00
Mose Müller
3970d5a17b removes unused import 2023-11-27 15:58:05 +01:00
Mose Müller
a89db46d5e updates VS Code settings.json 2023-11-27 15:43:36 +01:00
Mose Müller
f67591c7ac npm run build 2023-11-27 15:42:33 +01:00
Mose Müller
fdcaa1c1ed udpates App.css 2023-11-27 15:41:40 +01:00
Mose Müller
613b1dd6a4 updates addNotification type hints in components 2023-11-27 15:41:30 +01:00
Mose Müller
914997cc6b updates App.tsx to use new NotificationComponent 2023-11-27 15:41:02 +01:00
Mose Müller
667bb949cc rewrites NotificationsComponent to handle various notification levels 2023-11-27 15:40:25 +01:00
Mose Müller
acaac6f0a6 initialises SocketIOHandler in web server 2023-11-27 15:39:00 +01:00
Mose Müller
e9df89765d adds SocketIOHandler emitting error messages via socketio.AsyncServer 2023-11-27 15:38:35 +01:00
Mose Müller
123edb9e86 frontend: removes unused code from stateUtils 2023-11-27 15:37:58 +01:00
Mose Müller
69328d6f68 fix: sio_callback creates correct full_access_path now 2023-11-27 13:38:28 +01:00
Mose Müller
0cd3a7e8a8 Merge pull request #71 from tiqi-group/fix/update_task_status
Fix: update task status
2023-11-16 10:26:22 +01:00
Mose Müller
abd77e053d removes debug msg 2023-11-16 10:23:53 +01:00
Mose Müller
ebb8b4be8b adds cache test for task status update 2023-11-16 10:22:13 +01:00
Mose Müller
a83e0c6c7f only update type value in serialized dict if its not a method 2023-11-16 09:42:41 +01:00
Mose Müller
64dc09faf7 Merge pull request #70 from tiqi-group/feat/emit_serialized_value_to_frontend
Feat: emit serialized object to frontend
2023-11-16 09:24:13 +01:00
Mose Müller
e2fb9ebae5 npm run build 2023-11-16 09:15:22 +01:00
Mose Müller
4a43bda5e2 frontend: updates reducer to process serialized values 2023-11-16 09:14:48 +01:00
Mose Müller
f693fa9ba2 frontend: adds stateUtils module 2023-11-16 09:14:01 +01:00
Mose Müller
9820bda4b5 webserver sio callback emits serialized value to frontend clients now 2023-11-16 09:13:37 +01:00
Mose Müller
f5116607b9 replaces lambda functions with functions in callback manager 2023-11-16 09:10:23 +01:00
Mose Müller
0ea997384c chore: type hints, mypy issues 2023-11-16 08:33:54 +01:00
Mose Müller
28410a97f5 udpates DataServiceList (constructor and attributes) 2023-11-16 08:33:17 +01:00
Mose Müller
f6eef7085e updates frontend packages 2023-11-16 08:13:29 +01:00
Mose Müller
a76035f443 Merge pull request #68 from tiqi-group/fix/only_load_state_properties_can_be_updated
fix: only load state properties can be updated
2023-11-09 17:35:28 +01:00
Mose Müller
2ab4d1c00a updates to v0.3.1 2023-11-09 17:33:03 +01:00
Mose Müller
a9d577820f updates tests 2023-11-09 17:32:35 +01:00
Mose Müller
f5e6dca16a moves check for load_state decorator to load_state method in StateManager 2023-11-09 17:32:30 +01:00
Mose Müller
4a45d0d438 npm run build 2023-11-09 17:10:56 +01:00
Mose Müller
3cc6399f60 frontend: update style (fix button appearance) 2023-11-09 17:10:21 +01:00
Mose Müller
dc1c7e80f4 docs: updates Readme TOC 2023-11-09 16:05:31 +01:00
Mose Müller
95b5907a8d update to version v0.3.0 2023-11-09 15:56:00 +01:00
Mose Müller
675fe86e7e Merge pull request #67 from tiqi-group/46-setter-functions-being-called-at-startup-when-loading-json-file
46 setter functions being called at startup when loading json file
2023-11-09 15:54:05 +01:00
Mose Müller
60c2cca8f5 updates Readme.md 2023-11-09 15:52:25 +01:00
Mose Müller
e4fb1c66a1 updates StateManager tests 2023-11-09 15:52:25 +01:00
Mose Müller
1af4f98a48 updates has_load_state_decorator logic 2023-11-09 15:52:22 +01:00
Mose Müller
eddf3dd2fc adds docstring 2023-11-09 15:52:06 +01:00
Mose Müller
c2a22d4456 adds tests for load_state decorator 2023-11-09 15:29:47 +01:00
Mose Müller
aa9f1ba35a adds load_state decorator 2023-11-09 15:29:39 +01:00
Mose Müller
2208e5f66e npm run build 2023-11-09 15:16:53 +01:00
Mose Müller
96f1ee16b7 docs: updates Adding_Components.md 2023-11-09 14:36:45 +01:00
Mose Müller
4f7c6ccde4 docs: updates Adding_Components.md 2023-11-09 14:15:30 +01:00
Mose Müller
856f5d0c79 update flake8 and pyright configs 2023-11-09 14:10:58 +01:00
Mose Müller
b60995d218 removes unnecessary log msg 2023-11-09 14:10:27 +01:00
Mose Müller
380f98edb5 adds type hint 2023-11-09 14:10:14 +01:00
Mose Müller
30e4ebb670 Merge pull request #66 from tiqi-group/fix/executing_methods_through_frontend
Fix/executing methods through frontend
2023-11-09 14:04:48 +01:00
Mose Müller
bdf5512bcc adds run_method socketio event to web server 2023-11-09 14:03:20 +01:00
Mose Müller
a323ce169e renames frontend_update socketio event to set_attribute 2023-11-09 13:53:13 +01:00
Mose Müller
d18be54284 updates frontend components to use new methods from socket.ts 2023-11-09 13:52:23 +01:00
Mose Müller
a750644c20 updates socket.ts (renames and add method) 2023-11-09 13:52:00 +01:00
Mose Müller
45ede860d9 removes JSDoc types (already in typescript) 2023-11-09 13:51:26 +01:00
Mose Müller
a060836304 updates DataServiceList tests 2023-11-09 11:59:48 +01:00
Mose Müller
963e449adb moves DataServiceList test file 2023-11-09 11:50:08 +01:00
Mose Müller
1776fc8623 converts values (ints and quantities) when setting list entries 2023-11-09 11:49:42 +01:00
Mose Müller
aed0dd9493 updates StateManager tests 2023-11-09 11:37:07 +01:00
Mose Müller
784d49d90c refactores __update_attribute_by_path of StateManager 2023-11-09 11:36:45 +01:00
Mose Müller
8dd05ac5e3 renames helper function 2023-11-09 11:35:04 +01:00
Mose Müller
27bb73a2da adds docstring 2023-11-09 08:20:51 +01:00
Mose Müller
6b643210d7 adds tests for StateManager 2023-11-08 17:09:05 +01:00
Mose Müller
24f1574168 web server now uses StateManager method to update DataService attributes 2023-11-08 17:08:31 +01:00
Mose Müller
b594a91a18 refactors load_state method 2023-11-08 17:08:00 +01:00
Mose Müller
e708d6f1c3 adds logic of updating DataService attributes to StateManager 2023-11-08 17:07:37 +01:00
Mose Müller
6c2c5d4ad1 deprecates update_DataService_attribute function 2023-11-08 17:05:55 +01:00
Mose Müller
d0377be455 Merge pull request #63 from tiqi-group/52-add-a-cache-storing-the-state-of-the-service
52 add a cache storing the state of the service
2023-11-07 18:34:46 +01:00
Mose Müller
5e136c2784 renames test file 2023-11-07 18:26:13 +01:00
Mose Müller
0a94b32011 updates serializer tests 2023-11-07 18:25:57 +01:00
Mose Müller
14b5219915 refactoring serializer module methods 2023-11-07 18:23:24 +01:00
Mose Müller
7c573cdc10 updating docstring 2023-11-07 17:16:02 +01:00
Mose Müller
393b025648 renaming function, updating docstring 2023-11-07 17:15:54 +01:00
Mose Müller
03fee3f88c moves generate_paths_from_DataService_dict to serializer module 2023-11-07 17:06:35 +01:00
Mose Müller
59c7d7bb6f formatting 2023-11-07 17:03:36 +01:00
Mose Müller
dc70f3cfcf renames functions, adds docstrings 2023-11-07 16:59:59 +01:00
Mose Müller
cdd657f895 adds tests for update_serialization_dict method 2023-11-07 16:43:09 +01:00
Mose Müller
c9b5547831 refactoring serializer.py 2023-11-07 16:41:22 +01:00
Mose Müller
615bf294e1 moves get_attribute_doc to helpers 2023-11-07 16:14:41 +01:00
Mose Müller
b6953251b9 updating helper function 2023-11-06 18:27:41 +01:00
Mose Müller
3440a632ad moving set_nested_value_in_dict to Serializer, renaming module 2023-11-06 18:27:00 +01:00
Mose Müller
4ef4bab36e fixing mypy issues 2023-11-06 18:25:40 +01:00
Mose Müller
567617f4e6 docs: Updating Readme 2023-11-06 17:35:47 +01:00
Mose Müller
76545b88de removes _filename attribute from DataService (unless specified) 2023-11-06 17:30:22 +01:00
Mose Müller
f38df58842 updates logging message formatting 2023-11-06 17:22:42 +01:00
Mose Müller
d057710b60 adds StateManager tests 2023-11-06 17:22:26 +01:00
Mose Müller
f071bda35f adds data service cache tests 2023-11-06 15:08:36 +01:00
Mose Müller
2b304cba03 docs: updating docstrings 2023-11-06 15:08:15 +01:00
Mose Müller
f88493d97c fix: removes monkey path of emit_notification, adapts affected tests 2023-11-06 13:46:08 +01:00
Mose Müller
53ce51991f initializes cache in the constructor of the DataServiceCache 2023-11-06 11:13:44 +01:00
Mose Müller
0385e5732e adds docstring 2023-11-06 10:49:41 +01:00
Mose Müller
20a64099a4 updates protocol for additional servers 2023-11-06 10:49:35 +01:00
Mose Müller
16b284da45 adds state manager to additional servers 2023-11-06 10:49:16 +01:00
Mose Müller
2833284239 chore: updating types, removes unused imports 2023-11-06 10:06:08 +01:00
Mose Müller
8d9160d660 adds docstring 2023-11-06 09:58:48 +01:00
Mose Müller
c196c82c52 refactor StateManager: adds cache property for direct access 2023-11-06 09:58:06 +01:00
Mose Müller
d66a3ad015 updates comments and docstrings 2023-11-06 09:54:50 +01:00
Mose Müller
08512e945b adds deprecation warnings to DataService 2023-11-06 09:54:33 +01:00
Mose Müller
e4796102be removes filename argument from DataService constructor 2023-11-06 09:54:23 +01:00
Mose Müller
2fd4d94dbb moves cache from StateManager to DataServiceCache 2023-11-06 09:53:09 +01:00
Mose Müller
78c055acf0 adds DataServiceCache class 2023-11-06 09:50:38 +01:00
Mose Müller
75a69204b5 moves state manager from DataService to Server 2023-11-06 09:32:25 +01:00
Mose Müller
f852dea9e5 Removes state manager from all service instances that have no filename set or are not exposed 2023-11-03 09:55:40 +01:00
Mose Müller
49070a7f38 removing unused imports
removing unused import
2023-11-03 09:55:40 +01:00
Mose Müller
fc7092f14c using StateManger in DataService 2023-11-03 09:55:40 +01:00
Mose Müller
b0254daa17 adds StateManager 2023-11-03 09:55:40 +01:00
Mose Müller
b08a976d2a removes tests from pyright includes, updates poetry.lock 2023-11-03 09:54:46 +01:00
Mose Müller
fccd5a7c36 Merge pull request #62 from tiqi-group/fix/connection_toast_timeout
Fix/connection toast timeout
2023-11-03 09:18:13 +01:00
Mose Müller
d643923fd3 fix: only update connection toast to reconnecting when still disconnected 2023-11-03 09:14:36 +01:00
Mose Müller
3132680c50 removing unnecessary console log commands 2023-11-03 09:14:36 +01:00
Mose Müller
f47a5524b3 Merge pull request #61 from tiqi-group/revert-60-fix/connection_toast_timeout
Revert "Fix/connection toast timeout"
2023-11-03 09:11:59 +01:00
Mose Müller
b32bdabfca Revert "Fix/connection toast timeout" 2023-11-03 09:11:40 +01:00
Mose Müller
c5beee5d50 Merge pull request #60 from tiqi-group/fix/connection_toast_timeout
Fix/connection toast timeout
2023-11-03 08:52:17 +01:00
Mose Müller
55ce32e105 fix: only update connection toast to reconnecting when still disconnected 2023-11-03 08:50:33 +01:00
Mose Müller
621bed94af removing unnecessary console log commands 2023-11-03 08:50:03 +01:00
Mose Müller
a837e1bce8 removing unused imports 2023-11-02 18:25:55 +01:00
Mose Müller
6ab11394fa using StateManger in DataService 2023-11-02 18:22:32 +01:00
Mose Müller
51c4e2f971 adds StateManager 2023-11-02 18:21:43 +01:00
Mose Müller
c5a2b38914 removing duplicate test 2023-11-02 17:50:43 +01:00
Mose Müller
d45b835ea2 docs: correcting docstring 2023-11-02 16:00:54 +01:00
Mose Müller
d2c0b6968e Merge pull request #58 from tiqi-group/37-update-task-status-in-frontend-when-restarting-the-service
Service data will be fetched as soon as the client connects to the websocket server
2023-11-02 15:47:30 +01:00
Mose Müller
728fe958cb npm run build 2023-11-02 15:43:18 +01:00
Mose Müller
69c5e0397b fetch data as soon as the client connects to the websocket server 2023-11-02 15:43:11 +01:00
Mose Müller
7f402b45e7 docs: adding docstring to ConnectionToast 2023-11-02 15:34:07 +01:00
Mose Müller
c4056d3ca8 chore: formatting, renaming 2023-11-02 15:31:46 +01:00
Mose Müller
c13166dddb Merge pull request #57 from tiqi-group/feat/adding_connection_toast
adds connection toast component to app
2023-11-02 15:26:47 +01:00
Mose Müller
47d64243c3 adds connection toast component to app 2023-11-02 15:23:31 +01:00
Mose Müller
f01ef057bf Merge pull request #56 from tiqi-group/cleanup/refactoring_serialization
Refactors DataService serialization
2023-11-02 14:36:12 +01:00
Mose Müller
6804cdf3b1 refactoring Serializer class 2023-11-02 14:33:16 +01:00
Mose Müller
2b57df5aac adds tests for serialization (and moves tests from test_data_service) 2023-11-02 14:11:08 +01:00
Mose Müller
2eb0eb84cf moves serialization into separate class in the utils module 2023-11-02 14:10:33 +01:00
Mose Müller
f8495dc949 Merge pull request #55 from tiqi-group/50-problem-with-negative-number
feat: pressing "-" at the start of a number component toggles the sign
2023-10-30 14:39:59 +01:00
Mose Müller
9ac6e2c56a npm run build 2023-10-30 14:37:25 +01:00
Mose Müller
8ae0b7818b feat (frontend): pressing "-" at the beginning of a number component will add a minus sign 2023-10-30 14:36:52 +01:00
Mose Müller
61c6585ac6 Merge pull request #54 from tiqi-group/fix/frontend-div-ids
Fix: frontend div ids adhere to html guidelines now
2023-10-30 14:21:55 +01:00
Mose Müller
b6c956fab8 docs: updating Adding_Components description 2023-10-30 14:17:30 +01:00
Mose Müller
743531c434 updating frontend packages and config 2023-10-30 14:15:53 +01:00
Mose Müller
3ecb6384ad npm run build 2023-10-30 14:15:38 +01:00
Mose Müller
1d2325171b fixing eslint errors 2023-10-30 14:14:32 +01:00
Mose Müller
b149c1b411 fix: component ids adhere to html guidelines now 2023-10-30 14:05:39 +01:00
Mose Müller
7e5861ec22 feat: adding utils module (string manipulation function) 2023-10-30 14:04:25 +01:00
Mose Müller
5b4c74f1c2 npm run build 2023-10-30 13:27:02 +01:00
Mose Müller
7dcec88c9a frontend: updating addNotification type hints 2023-10-30 13:26:25 +01:00
Mose Müller
3d42366ada Merge pull request #53 from tiqi-group/27-task-autostart-not-working-in-nested-classes
27 task autostart not working in nested classes
2023-10-25 16:50:55 +02:00
Mose Müller
eb46a088ee chore: refactoring method 2023-10-25 16:48:33 +02:00
Mose Müller
69cd86b601 feat: adds autostart_tasks test 2023-10-25 16:39:11 +02:00
Mose Müller
81f2281002 fix: autostart_tasks capbability in sub-classes 2023-10-25 16:33:29 +02:00
Mose Müller
f7f64bbe92 adding callback_manager tests 2023-10-25 16:23:06 +02:00
Mose Müller
0504a50a08 fix: creates property functions to avoid closure and late binding issue
When having multiple tasks, they all pointed to the one defined last.
2023-10-25 16:23:06 +02:00
Mose Müller
8564df5adc fix: adds start_stop_task callbacks to lists 2023-10-25 16:23:06 +02:00
Mose Müller
a24eb928a8 Updating launch.json (nvim compatible) 2023-10-25 16:22:57 +02:00
Mose Müller
2713dad423 Merge pull request #51 from tiqi-group/49-wrong-types-for-non-standard-saved-variables
fix: loading of ColouredEnum and Quantity from settings file
2023-10-25 16:16:43 +02:00
Mose Müller
6ea4cf3eb7 adds test for loading units from json 2023-10-25 16:15:31 +02:00
Mose Müller
9054f05f30 fix: convert quantity dict to quantity when loading from json 2023-10-25 16:15:19 +02:00
Mose Müller
b790b6a6ca fix: adds ColouredEnum to STANDARD_TYPES 2023-10-25 10:47:15 +02:00
Mose Müller
22f832054e Updating logging message 2023-10-19 17:48:05 +02:00
Mose Müller
2e9ced4e5e Merge pull request #43 from tiqi-group/42-enhanced-signal-handling-for-asyncio-loop
Enhances signal handling, adds force exit capability
2023-10-19 11:14:06 +02:00
Mose Müller
b654c7d176 adds pytest-mock to python dependencies 2023-10-19 11:12:32 +02:00
Mose Müller
b5b2fb8c35 adding signal-handling test 2023-10-19 11:11:56 +02:00
Mose Müller
1bc2bb3605 Enhances signal handling, adds force exit capability 2023-10-19 10:59:00 +02:00
Mose Müller
0a77cc1f36 pytest coverage: do not omit logging.py anymore (after switch from loguru to logging) 2023-10-19 08:02:23 +02:00
Mose Müller
d334ec5284 Merge pull request #41 from tiqi-group/39-feat-add-customcss-option-to-pydaseserver
adds custom css option to pydase.Server
2023-10-17 17:04:30 +02:00
Mose Müller
d3a74a734a updating Readme 2023-10-17 13:13:13 +02:00
Mose Müller
43e0c72018 updating comments 2023-10-17 12:52:08 +02:00
Mose Müller
27b430333a ignoring flake8 error 2023-10-17 12:52:00 +02:00
Mose Müller
e25acb7e59 removing unused web server functions 2023-10-17 12:49:18 +02:00
Mose Müller
89f281bd3b Merge pull request #40 from tiqi-group/turn-of-frontend-notifications-by-default
turns of frontend notifications by default
2023-10-17 12:45:22 +02:00
Mose Müller
829e73e2e7 npm run build 2023-10-17 11:50:35 +02:00
Mose Müller
04b9976a3b turns of frontend notifications by default 2023-10-17 11:49:42 +02:00
Mose Müller
785ed92b45 adds link element to frontend header if services exposes /custom.css endpoint 2023-10-17 11:48:07 +02:00
Mose Müller
6e14837e15 adding custom.css endpoint to web server 2023-10-17 11:47:34 +02:00
Mose Müller
5ad15c1cae frontend: fix div ids 2023-10-17 11:45:50 +02:00
Mose Müller
c1f0b7b74d using logger instead of print statement 2023-10-16 17:29:00 +02:00
Mose Müller
5badd86d5a chore: formatting 2023-10-16 17:26:26 +02:00
Mose Müller
b5953f13f7 fix: updating uvicorn logger config 2023-10-16 17:24:51 +02:00
Mose Müller
a3c2672458 docs: updating readme 2023-10-16 17:17:39 +02:00
Mose Müller
7a78713388 Merge pull request #35 from tiqi-group/34-remove-loguru-dependency-and-use-std-logging
34 remove loguru dependency and use std logging
2023-10-16 17:16:33 +02:00
Mose Müller
8a8375735a extend simple logging example 2023-10-16 17:11:46 +02:00
Mose Müller
e61b2a4969 fix: pyright issue 2023-10-16 17:06:11 +02:00
Mose Müller
453076da86 removing loguru python dependency 2023-10-16 16:58:16 +02:00
Mose Müller
886b086180 docs: update Readme 2023-10-16 15:52:45 +02:00
Mose Müller
7b04298ead add logging tests 2023-10-16 15:52:09 +02:00
Mose Müller
c6a96ba6c0 update tests 2023-10-16 15:52:04 +02:00
Mose Müller
5d7a7c6bdb update logging module 2023-10-16 15:51:52 +02:00
Mose Müller
1241d7a128 using logging instead of loguru 2023-10-16 15:51:37 +02:00
Mose Müller
cdd60190a7 Merge pull request #33 from tiqi-group/32-configure-pint-to-autoconvert-offset-unit-to-base-unit
configures pint to autoconvert offset units to base units
2023-10-16 12:07:53 +02:00
Mose Müller
d144b6c42b configures pint to autoconvert offset units to base units 2023-10-16 12:05:07 +02:00
Mose Müller
4abea8785c Merge pull request #30 from tiqi-group/29-protected-lists-crash-pydase
fix: removes notification for updating protected lists
2023-10-12 14:25:23 +02:00
Mose Müller
dbc975bd85 updating version 2023-10-12 14:24:46 +02:00
Mose Müller
b04ad0c6a3 fix: removes notification for updating protected lists 2023-10-12 14:12:48 +02:00
Mose Müller
48e8b7dbaf Update docs 2023-10-11 14:30:49 +02:00
Mose Müller
aa85f6453f update version to v0.2.0 2023-10-11 14:20:35 +02:00
Mose Müller
343354e0ee Update README.md 2023-10-11 14:17:40 +02:00
Mose Müller
b38bb05c69 Merge pull request #28 from tiqi-group/5-adding-status-component
5 adding coloured enum component
2023-10-11 14:15:34 +02:00
Mose Müller
a0dab630f9 Update README.md 2023-10-11 14:03:07 +02:00
Mose Müller
a9db7848f7 fix: pytest failed after moving from StrEnum to Enum 2023-10-11 14:03:07 +02:00
Mose Müller
a8b14180ad fix: using Enum instead of StrEnum (>=3.11 only) 2023-10-11 14:03:07 +02:00
Mose Müller
26a366842a frontend: npm run build 2023-10-11 14:03:07 +02:00
Mose Müller
b0e7de2d2c docs: updating Readme 2023-10-11 14:03:07 +02:00
Mose Müller
bbcba8b39f test: adding test for ColouredEnum component 2023-10-11 14:03:07 +02:00
Mose Müller
34e46e05ee feat: adding ColouredEnum component 2023-10-11 14:03:07 +02:00
Mose Müller
93c2f5ab70 docs: updating mkdocs documentation
- adding user guide section
- removing "baselevel: 4"
2023-10-11 13:58:55 +02:00
Mose Müller
106ffbfc40 removing .python-version 2023-10-11 13:52:13 +02:00
121 changed files with 11505 additions and 9089 deletions

View File

@@ -2,5 +2,3 @@
exclude_lines =
pragma: no cover
if TYPE_CHECKING:
omit =
src/pydase/utils/logging.py

View File

@@ -1,8 +0,0 @@
[flake8]
ignore = E501,W503,FS003,F403,F405,E203
include = src
max-line-length = 88
max-doc-length = 88
max-complexity = 7
max-expression-complexity = 5.5
use_class_attributes_order_strict_mode=True

25
.github/ISSUE_TEMPLATE/bug_report.md vendored Normal file
View File

@@ -0,0 +1,25 @@
---
name: Bug report
about: Create a report to help us improve
title: ''
labels: 'bug'
assignees: ''
---
## Describe the bug
A clear and concise description of what the bug is.
## To Reproduce
Provide steps to reproduce the behaviour, including a minimal code snippet (if applicable):
```python
# Minimal code snippet that reproduces the error
```
## Expected behaviour
A clear and concise description of what you expected to happen.
## Screenshot/Video
If applicable, add visual content that helps explain your problem.
## Additional context
Add any other context about the problem here.

View File

@@ -20,6 +20,9 @@ jobs:
steps:
- uses: actions/checkout@v3
- uses: chartboost/ruff-action@v1
with:
src: "./src"
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v3
with:
@@ -28,14 +31,13 @@ jobs:
run: |
python -m pip install --upgrade pip
python -m pip install poetry
poetry install
- name: Lint with flake8
run: |
# stop the build if there are Python syntax errors or undefined names
poetry run flake8 src/pydase --count --show-source --statistics
poetry install --with dev
- name: Test with pytest
run: |
poetry run pytest
- name: Test with pyright
run: |
poetry run pyright src/pydase
poetry run pyright
- name: Test with mypy
run: |
poetry run mypy src

3
.gitignore vendored
View File

@@ -128,6 +128,9 @@ venv.bak/
.dmypy.json
dmypy.json
# ruff
.ruff_cache/
# Pyre type checker
.pyre/

8
.vscode/extensions.json vendored Normal file
View File

@@ -0,0 +1,8 @@
{
"recommendations": [
"charliermarsh.ruff",
"ms-python.python",
"ms-python.vscode-pylance",
"ms-python.mypy-type-checker"
]
}

9
.vscode/launch.json vendored
View File

@@ -1,7 +1,4 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
@@ -9,7 +6,7 @@
"type": "python",
"request": "launch",
"module": "foo",
"justMyCode": true,
"justMyCode": false,
"env": {
"ENVIRONMENT": "development"
}
@@ -19,7 +16,7 @@
"type": "python",
"request": "launch",
"module": "bar",
"justMyCode": true,
"justMyCode": false,
"env": {
"ENVIRONMENT": "development"
}
@@ -29,7 +26,7 @@
"request": "launch",
"name": "react: firefox",
"url": "http://localhost:3000",
"webRoot": "${workspaceFolder}/frontend",
"webRoot": "${workspaceFolder}/frontend"
}
]
}

27
.vscode/settings.json vendored
View File

@@ -1,25 +1,15 @@
{
"autoDocstring.docstringFormat": "google",
"autoDocstring.startOnNewLine": true,
"autoDocstring.generateDocstringOnEnter": true,
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.fixAll.eslint": true
},
"[python]": {
"editor.defaultFormatter": "charliermarsh.ruff",
"editor.rulers": [
88
],
"python.defaultInterpreterPath": ".venv/bin/python",
"python.formatting.provider": "black",
"python.linting.lintOnSave": true,
"python.linting.enabled": true,
"python.linting.flake8Enabled": true,
"python.linting.mypyEnabled": true,
"[python]": {
"editor.tabSize": 4,
"editor.detectIndentation": false,
"editor.codeActionsOnSave": {
"source.organizeImports": true
"source.organizeImports": "explicit",
"source.fixAll": "explicit"
}
},
"[yaml]": {
@@ -29,12 +19,11 @@
"[typescript][javascript][vue][typescriptreact]": {
"editor.tabSize": 2,
"editor.defaultFormatter": "rvest.vs-code-prettier-eslint",
"editor.formatOnPaste": false, // required
"editor.formatOnType": false, // required
"editor.formatOnSave": true, // optional
"editor.formatOnSaveMode": "file", // required to format on save
"editor.formatOnPaste": false,
"editor.formatOnType": false,
"editor.formatOnSaveMode": "file",
"editor.codeActionsOnSave": {
"source.fixAll.eslint": true
"source.fixAll.eslint": "explicit"
}
}
}

585
README.md
View File

@@ -17,10 +17,23 @@
- [Method Components](#method-components)
- [DataService Instances (Nested Classes)](#dataservice-instances-nested-classes)
- [Custom Components (`pydase.components`)](#custom-components-pydasecomponents)
- [`DeviceConnection`](#deviceconnection)
- [Customizing Connection Logic](#customizing-connection-logic)
- [Reconnection Interval](#reconnection-interval)
- [`Image`](#image)
- [`NumberSlider`](#numberslider)
- [`ColouredEnum`](#colouredenum)
- [Extending with New Components](#extending-with-new-components)
- [Understanding Service Persistence](#understanding-service-persistence)
- [Controlling Property State Loading with `@load_state`](#controlling-property-state-loading-with-load_state)
- [Understanding Tasks in pydase](#understanding-tasks-in-pydase)
- [Understanding Units in pydase](#understanding-units-in-pydase)
- [Configuring pydase via Environment Variables](#configuring-pydase-via-environment-variables)
- [Customizing the Web Interface](#customizing-the-web-interface)
- [Enhancing the Web Interface Style with Custom CSS](#enhancing-the-web-interface-style-with-custom-css)
- [Tailoring Frontend Component Layout](#tailoring-frontend-component-layout)
- [Specifying a Custom Frontend Source](#specifying-a-custom-frontend-source)
- [Logging in pydase](#logging-in-pydase)
- [Changing the Log Level](#changing-the-log-level)
- [Documentation](#documentation)
- [Contributing](#contributing)
@@ -29,19 +42,21 @@
## Features
<!-- no toc -->
* [Simple data service definition through class-based interface](#defining-a-dataService)
* [Integrated web interface for interactive access and control of your data service](#accessing-the-web-interface)
* [Support for `rpyc` connections, allowing for programmatic control and interaction with your service](#connecting-to-the-service-using-rpyc)
* [Component system bridging Python backend with frontend visual representation](#understanding-the-component-system)
* [Saving and restoring the service state for service persistence](#understanding-service-persistence)
* [Automated task management with built-in start/stop controls and optional autostart](#understanding-tasks-in-pydase)
* [Support for units](#understanding-units-in-pydase)
<!-- * Event-based callback functionality for real-time updates
* Support for additional servers for specific use-cases -->
- [Simple data service definition through class-based interface](#defining-a-dataService)
- [Integrated web interface for interactive access and control of your data service](#accessing-the-web-interface)
- [Support for `rpyc` connections, allowing for programmatic control and interaction with your service](#connecting-to-the-service-using-rpyc)
- [Component system bridging Python backend with frontend visual representation](#understanding-the-component-system)
- [Customizable styling for the web interface through user-defined CSS](#customizing-web-interface-style)
- [Saving and restoring the service state for service persistence](#understanding-service-persistence)
- [Automated task management with built-in start/stop controls and optional autostart](#understanding-tasks-in-pydase)
- [Support for units](#understanding-units-in-pydase)
<!-- Support for additional servers for specific use-cases -->
## Installation
<!--installation-start-->
Install pydase using [`poetry`](https://python-poetry.org/):
Install `pydase` using [`poetry`](https://python-poetry.org/):
```bash
poetry add pydase
@@ -52,10 +67,13 @@ or `pip`:
```bash
pip install pydase
```
<!--installation-end-->
## Usage
<!--usage-start-->
Using `pydase` involves three main steps: defining a `DataService` subclass, running the server, and then connecting to the service either programmatically using `rpyc` or through the web interface.
### Defining a DataService
@@ -66,6 +84,7 @@ Here's an example:
```python
from pydase import DataService, Server
from pydase.utils.decorators import frontend
class Device(DataService):
@@ -103,6 +122,7 @@ class Device(DataService):
# run code to set power state
self._power = value
@frontend
def reset(self) -> None:
self.current = 0.0
self.voltage = 0.0
@@ -129,7 +149,7 @@ if __name__ == "__main__":
Server(service).run()
```
This will start the server, making your Device service accessible via RPC and a web server at http://localhost:8001.
This will start the server, making your Device service accessible via RPC and a web server at [http://localhost:8001](http://localhost:8001).
### Accessing the Web Interface
@@ -156,14 +176,19 @@ print(client.voltage) # prints 5.0
```
In this example, replace `<ip_addr>` with the IP address of the machine where the service is running. After establishing a connection, you can interact with the service attributes as if they were local attributes.
<!--usage-end-->
## Understanding the Component System
<!-- Component User Guide Start -->
In `pydase`, components are fundamental building blocks that bridge the Python backend logic with frontend visual representation and interactions. This system can be understood based on the following categories:
### Built-in Type and Enum Components
`pydase` automatically maps standard Python data types to their corresponding frontend components:
- `str`: Translated into a `StringComponent` on the frontend.
- `int` and `float`: Manifested as the `NumberComponent`.
- `bool`: Rendered as a `ButtonComponent`.
@@ -171,10 +196,35 @@ In `pydase`, components are fundamental building blocks that bridge the Python b
- `enum.Enum`: Presented as an `EnumComponent`, facilitating dropdown selection.
### Method Components
Within the `DataService` class of `pydase`, only methods devoid of arguments can be represented in the frontend, classified into two distinct categories
Methods within the `DataService` class have frontend representations:
- Regular Methods: These are rendered as a `MethodComponent` in the frontend, allowing users to execute the method via an "execute" button.
- Asynchronous Methods: These are manifested as the `AsyncMethodComponent` with "start"/"stop" buttons to manage the execution of [tasks](#understanding-tasks-in-pydase).
1. [**Tasks**](#understanding-tasks-in-pydase): Argument-free asynchronous functions, identified within `pydase` as tasks, are inherently designed for frontend interaction. These tasks are automatically rendered with a start/stop button, allowing users to initiate or halt the task execution directly from the web interface.
2. **Synchronous Methods with `@frontend` Decorator**: Synchronous methods without arguments can also be presented in the frontend. For this, they have to be decorated with the `@frontend` decorator.
```python
import pydase
import pydase.components
import pydase.units as u
from pydase.utils.decorators import frontend
class MyService(pydase.DataService):
@frontend
def exposed_method(self) -> None:
...
async def my_task(self) -> None:
while True:
# ...
```
![Method Components](docs/images/method_components.png)
You can still define synchronous tasks with arguments and call them using a python client. However, decorating them with the `@frontend` decorator will raise a `FunctionDefinitionError`. Defining a task with arguments will raise a `TaskDefinitionError`.
I decided against supporting function arguments for functions rendered in the frontend due to the following reasons:
1. Feature Request Pitfall: supporting function arguments create a bottomless pit of feature requests. As users encounter the limitations of supported types, demands for extending support to more complex types would grow.
2. Complexity in Supported Argument Types: while simple types like `int`, `float`, `bool` and `str` could be easily supported, more complicated types are not (representation, (de-)serialization).
### DataService Instances (Nested Classes)
@@ -188,9 +238,9 @@ from pydase import DataService, Server
class Channel(DataService):
def __init__(self, channel_id: int) -> None:
super().__init__()
self._channel_id = channel_id
self._current = 0.0
super().__init__()
@property
def current(self) -> float:
@@ -206,9 +256,8 @@ class Channel(DataService):
class Device(DataService):
def __init__(self) -> None:
self.channels = [Channel(i) for i in range(2)]
super().__init__()
self.channels = [Channel(i) for i in range(2)]
if __name__ == "__main__":
@@ -221,6 +270,7 @@ if __name__ == "__main__":
**Note** that defining classes within `DataService` classes is not supported (see [this issue](https://github.com/tiqi-group/pydase/issues/16)).
### Custom Components (`pydase.components`)
The custom components in `pydase` have two main parts:
- A **Python Component Class** in the backend, implementing the logic needed to set, update, and manage the component's state and data.
@@ -228,12 +278,98 @@ The custom components in `pydase` have two main parts:
Below are the components available in the `pydase.components` module, accompanied by their Python usage:
- `Image`: This component allows users to display and update images within the application.
#### `DeviceConnection`
The `DeviceConnection` component acts as a base class within the `pydase` framework for managing device connections. It provides a structured approach to handle connections by offering a customizable `connect` method and a `connected` property. This setup facilitates the implementation of automatic reconnection logic, which periodically attempts reconnection whenever the connection is lost.
In the frontend, this class abstracts away the direct interaction with the `connect` method and the `connected` property. Instead, it showcases user-defined attributes, methods, and properties. When the `connected` status is `False`, the frontend displays an overlay that prompts manual reconnection through the `connect()` method. Successful reconnection removes the overlay.
```python
import pydase.components
import pydase.units as u
class Device(pydase.components.DeviceConnection):
def __init__(self) -> None:
super().__init__()
self._voltage = 10 * u.units.V
def connect(self) -> None:
if not self._connected:
self._connected = True
@property
def voltage(self) -> float:
return self._voltage
class MyService(pydase.DataService):
def __init__(self) -> None:
super().__init__()
self.device = Device()
if __name__ == "__main__":
service_instance = MyService()
pydase.Server(service_instance).run()
```
![DeviceConnection Component](docs/images/DeviceConnection_component.png)
##### Customizing Connection Logic
Users are encouraged to primarily override the `connect` method to tailor the connection process to their specific device. This method should adjust the `self._connected` attribute based on the outcome of the connection attempt:
```python
import pydase.components
class MyDeviceConnection(pydase.components.DeviceConnection):
def __init__(self) -> None:
super().__init__()
# Add any necessary initialization code here
def connect(self) -> None:
# Implement device-specific connection logic here
# Update self._connected to `True` if the connection is successful,
# or `False` if unsuccessful
...
```
Moreover, if the connection status requires additional logic, users can override the `connected` property:
```python
import pydase.components
class MyDeviceConnection(pydase.components.DeviceConnection):
def __init__(self) -> None:
super().__init__()
# Add any necessary initialization code here
def connect(self) -> None:
# Implement device-specific connection logic here
# Ensure self._connected reflects the connection status accurately
...
@property
def connected(self) -> bool:
# Implement custom logic to accurately report connection status
return self._connected
```
##### Reconnection Interval
The `DeviceConnection` component automatically executes a task that checks for device availability at a default interval of 10 seconds. This interval is adjustable by modifying the `_reconnection_wait_time` attribute on the class instance.
#### `Image`
This component provides a versatile interface for displaying images within the application. Users can update and manage images from various sources, including local paths, URLs, and even matplotlib figures.
The component offers methods to load images seamlessly, ensuring that visual content is easily integrated and displayed within the data service.
```python
import matplotlib.pyplot as plt
import numpy as np
import pydase
from pydase.components.image import Image
@@ -262,33 +398,227 @@ Below are the components available in the `pydase.components` module, accompanie
![Image Component](docs/images/Image_component.png)
- `NumberSlider`: An interactive slider component to adjust numerical values, including floats and integers, on the frontend while synchronizing the data with the backend in real-time.
#### `NumberSlider`
The `NumberSlider` component in the `pydase` package provides an interactive slider interface for adjusting numerical values on the frontend. It is designed to support both numbers and quantities and ensures that values adjusted on the frontend are synchronized with the backend.
To utilize the `NumberSlider`, users should implement a class that derives from `NumberSlider`. This class can then define the initial values, minimum and maximum limits, step sizes, and additional logic as needed.
Here's an example of how to implement and use a custom slider:
```python
import pydase
from pydase.components import NumberSlider
import pydase.components
class MySlider(pydase.components.NumberSlider):
def __init__(
self,
value: float = 0.0,
min_: float = 0.0,
max_: float = 100.0,
step_size: float = 1.0,
) -> None:
super().__init__(value, min_, max_, step_size)
@property
def min(self) -> float:
return self._min
@min.setter
def min(self, value: float) -> None:
self._min = value
@property
def max(self) -> float:
return self._max
@max.setter
def max(self, value: float) -> None:
self._max = value
@property
def step_size(self) -> float:
return self._step_size
@step_size.setter
def step_size(self, value: float) -> None:
self._step_size = value
@property
def value(self) -> float:
"""Slider value."""
return self._value
@value.setter
def value(self, value: float) -> None:
if value < self._min or value > self._max:
raise ValueError("Value is either below allowed min or above max value.")
self._value = value
class MyService(pydase.DataService):
slider = NumberSlider(value=3.5, min=0, max=10, step_size=0.1)
def __init__(self) -> None:
super().__init__()
self.voltage = MySlider()
if __name__ == "__main__":
service = MyService()
pydase.Server(service).run()
service_instance = MyService()
service_instance.voltage.value = 5
print(service_instance.voltage.value) # Output: 5
pydase.Server(service_instance).run()
```
In this example, `MySlider` overrides the `min`, `max`, `step_size`, and `value` properties. Users can make any of these properties read-only by omitting the corresponding setter method.
![Slider Component](docs/images/Slider_component.png)
- Accessing parent class resources in `NumberSlider`
In scenarios where you need the slider component to interact with or access resources from its parent class, you can achieve this by passing a callback function to it. This method avoids directly passing the entire parent class instance (`self`) and offers a more encapsulated approach. The callback function can be designed to utilize specific attributes or methods of the parent class, allowing the slider to perform actions or retrieve data in response to slider events.
Here's an illustrative example:
```python
from collections.abc import Callable
import pydase
import pydase.components
class MySlider(pydase.components.NumberSlider):
def __init__(
self,
value: float,
on_change: Callable[[float], None],
) -> None:
super().__init__(value=value)
self._on_change = on_change
# ... other properties ...
@property
def value(self) -> float:
return self._value
@value.setter
def value(self, new_value: float) -> None:
if new_value < self._min or new_value > self._max:
raise ValueError("Value is either below allowed min or above max value.")
self._value = new_value
self._on_change(new_value)
class MyService(pydase.DataService):
def __init__(self) -> None:
self.voltage = MySlider(
5,
on_change=self.handle_voltage_change,
)
def handle_voltage_change(self, new_voltage: float) -> None:
print(f"Voltage changed to: {new_voltage}")
# Additional logic here
if __name__ == "__main__":
service_instance = MyService()
my_service.voltage.value = 7 # Output: "Voltage changed to: 7"
pydase.Server(service_instance).run()
```
- Incorporating units in `NumberSlider`
The `NumberSlider` is capable of [displaying units](#understanding-units-in-pydase) alongside values, enhancing its usability in contexts where unit representation is crucial. When utilizing `pydase.units`, you can specify units for the slider's value, allowing the component to reflect these units in the frontend.
Here's how to implement a `NumberSlider` with unit display:
```python
import pydase
import pydase.components
import pydase.units as u
class MySlider(pydase.components.NumberSlider):
def __init__(
self,
value: u.Quantity = 0.0 * u.units.V,
) -> None:
super().__init__(value)
@property
def value(self) -> u.Quantity:
return self._value
@value.setter
def value(self, value: u.Quantity) -> None:
if value.m < self._min or value.m > self._max:
raise ValueError("Value is either below allowed min or above max value.")
self._value = value
class MyService(pydase.DataService):
def __init__(self) -> None:
super().__init__()
self.voltage = MySlider()
if __name__ == "__main__":
service_instance = MyService()
service_instance.voltage.value = 5 * u.units.V
print(service_instance.voltage.value) # Output: 5 V
pydase.Server(service_instance).run()
```
#### `ColouredEnum`
This component provides a way to visually represent different states or categories in a data service using colour-coded options. It behaves similarly to a standard `Enum`, but the values encode colours in a format understood by CSS. The colours can be defined using various methods like Hexadecimal, RGB, HSL, and more.
If the property associated with the `ColouredEnum` has a setter function, the keys of the enum will be rendered as a dropdown menu, allowing users to interact and select different options. Without a setter function, the selected key will simply be displayed as a coloured box with text inside, serving as a visual indicator.
```python
import pydase
import pydase.components as pyc
class MyStatus(pyc.ColouredEnum):
PENDING = "#FFA500" # Hexadecimal colour (Orange)
RUNNING = "#0000FF80" # Hexadecimal colour with transparency (Blue)
PAUSED = "rgb(169, 169, 169)" # RGB colour (Dark Gray)
RETRYING = "rgba(255, 255, 0, 0.3)" # RGB colour with transparency (Yellow)
COMPLETED = "hsl(120, 100%, 50%)" # HSL colour (Green)
FAILED = "hsla(0, 100%, 50%, 0.7)" # HSL colour with transparency (Red)
CANCELLED = "SlateGray" # Cross-browser colour name (Slate Gray)
class StatusTest(pydase.DataService):
_status = MyStatus.RUNNING
@property
def status(self) -> MyStatus:
return self._status
@status.setter
def status(self, value: MyStatus) -> None:
# do something ...
self._status = value
# Modifying or accessing the status value:
my_service = StatusExample()
my_service.status = MyStatus.FAILED
```
![ColouredEnum Component](docs/images/ColouredEnum_component.png)
#### Extending with New Components
Users can also extend the library by creating custom components. This involves defining the behavior on the Python backend and the visual representation on the frontend. For those looking to introduce new components, the [guide on adding components](https://pydase.readthedocs.io/en/latest/dev-guide/Adding_Components/) provides detailed steps on achieving this.
<!-- Component User Guide End -->
## Understanding Service Persistence
`pydase` allows you to easily persist the state of your service by saving it to a file. This is especially useful when you want to maintain the service's state across different runs.
To save the state of your service, pass a `filename` keyword argument to the `__init__` method of the `DataService` base class. If the file specified by `filename` does not exist, the service will create this file and store its state in it when the service is shut down. If the file already exists, the service will load the state from this file, setting the values of its attributes to the values stored in the file.
To save the state of your service, pass a `filename` keyword argument to the constructor of the `pydase.Server` class. If the file specified by `filename` does not exist, the state manager will create this file and store its state in it when the service is shut down. If the file already exists, the state manager will load the state from this file, setting the values of its attributes to the values stored in the file.
Here's an example:
@@ -296,29 +626,48 @@ Here's an example:
from pydase import DataService, Server
class Device(DataService):
def __init__(self, filename: str) -> None:
# ... your init code ...
# Pass the filename argument to the parent class
super().__init__(filename=filename)
# ... defining the Device class ...
if __name__ == "__main__":
service = Device("device_state.json")
Server(service).run()
service = Device()
Server(service, filename="device_state.json").run()
```
In this example, the state of the `Device` service will be saved to `device_state.json` when the service is shut down. If `device_state.json` exists when the service is started, the service will restore its state from this file.
In this example, the state of the `Device` service will be saved to `device_state.json` when the service is shut down. If `device_state.json` exists when the server is started, the state manager will restore the state of the service from this file.
Note: If the service class structure has changed since the last time its state was saved, only the attributes that have remained the same will be restored from the settings file.
### Controlling Property State Loading with `@load_state`
By default, the state manager only restores values for public attributes of your service. If you have properties that you want to control the loading for, you can use the `@load_state` decorator on your property setters. This indicates to the state manager that the value of the property should be loaded from the state file.
Here is how you can apply the `@load_state` decorator:
```python
from pydase import DataService
from pydase.data_service.state_manager import load_state
class Device(DataService):
_name = "Default Device Name"
@property
def name(self) -> str:
return self._name
@name.setter
@load_state
def name(self, value: str) -> None:
self._name = value
```
With the `@load_state` decorator applied to the `name` property setter, the state manager will load and apply the `name` property's value from the file storing the state upon server startup, assuming it exists.
Note: If the service class structure has changed since the last time its state was saved, only the attributes and properties decorated with `@load_state` that have remained the same will be restored from the settings file.
## Understanding Tasks in pydase
In `pydase`, a task is defined as an asynchronous function contained in a class that inherits from `DataService`. These tasks usually contain a while loop and are designed to carry out periodic functions.
In `pydase`, a task is defined as an asynchronous function without arguments contained in a class that inherits from `DataService`. These tasks usually contain a while loop and are designed to carry out periodic functions.
For example, a task might be used to periodically read sensor data, update a database, or perform any other recurring job. The core feature of `pydase` is its ability to automatically generate start and stop functions for these tasks. This allows you to control task execution via both the frontend and an `rpyc` client, giving you flexible and powerful control over your service's operation.
For example, a task might be used to periodically read sensor data, update a database, or perform any other recurring job. One core feature of `pydase` is its ability to automatically generate start and stop functions for these tasks. This allows you to control task execution via both the frontend and python clients, giving you flexible and powerful control over your service's operation.
Another powerful feature of `pydase` is its ability to automatically start tasks upon initialization of the service. By specifying the tasks and their arguments in the `_autostart_tasks` dictionary in your service class's `__init__` method, `pydase` will automatically start these tasks when the server is started. Here's an example:
@@ -327,9 +676,9 @@ from pydase import DataService, Server
class SensorService(DataService):
def __init__(self):
self.readout_frequency = 1.0
self._autostart_tasks = {"read_sensor_data": ()} # args passed to the function go there
super().__init__()
self.readout_frequency = 1.0
self._autostart_tasks["read_sensor_data"] = ()
def _process_data(self, data: ...) -> None:
...
@@ -349,22 +698,22 @@ if __name__ == "__main__":
Server(service).run()
```
In this example, `read_sensor_data` is a task that continuously reads data from a sensor. The readout frequency can be updated using the `readout_frequency` attribute.
By listing it in the `_autostart_tasks` dictionary, it will automatically start running when `Server(service).run()` is executed.
As with all tasks, `pydase` will also generate `start_read_sensor_data` and `stop_read_sensor_data` methods, which can be called to manually start and stop the data reading task.
In this example, `read_sensor_data` is a task that continuously reads data from a sensor. By adding it to the `_autostart_tasks` dictionary, it will automatically start running when `Server(service).run()` is executed.
As with all tasks, `pydase` will generate `start_read_sensor_data` and `stop_read_sensor_data` methods, which can be called to manually start and stop the data reading task. The readout frequency can be updated using the `readout_frequency` attribute.
## Understanding Units in pydase
`pydase` integrates with the [`pint`](https://pint.readthedocs.io/en/stable/) package to allow you to work with physical quantities within your service. This enables you to define attributes with units, making your service more expressive and ensuring consistency in the handling of physical quantities.
You can define quantities in your `DataService` subclass using `pydase`'s `units` functionality. These quantities can be set and accessed like regular attributes, and `pydase` will automatically handle the conversion between floats and quantities with units.
You can define quantities in your `DataService` subclass using `pydase`'s `units` functionality.
Here's an example:
```python
from typing import Any
from pydase import DataService, Server
import pydase.units as u
from pydase import DataService, Server
class ServiceClass(DataService):
@@ -376,17 +725,15 @@ class ServiceClass(DataService):
return self._current
@current.setter
def current(self, value: Any) -> None:
def current(self, value: u.Quantity) -> None:
self._current = value
if __name__ == "__main__":
service = ServiceClass()
# You can just set floats to the Quantity objects. The DataService __setattr__ will
# automatically convert this
service.voltage = 10.0
service.current = 1.5
service.voltage = 10.0 * u.units.V
service.current = 1.5 * u.units.mA
Server(service).run()
```
@@ -419,27 +766,141 @@ if __name__ == "__main__":
For more information about what you can do with the units, please consult the documentation of [`pint`](https://pint.readthedocs.io/en/stable/).
## Changing the Log Level
## Configuring pydase via Environment Variables
You can change the log level of loguru by either
Configuring `pydase` through environment variables enhances flexibility, security, and reusability. This approach allows for easy adaptation of services across different environments without code changes, promoting scalability and maintainability. With that, it simplifies deployment processes and facilitates centralized configuration management. Moreover, environment variables enable separation of configuration from code, aiding in secure and collaborative development.
1. (RECOMMENDED) setting the `ENVIRONMENT` environment variable to "production" or "development"
`pydase` offers various configurable options:
- **`ENVIRONMENT`**: Sets the operation mode to either "development" or "production". Affects logging behaviour (see [logging section](#logging-in-pydase)).
- **`SERVICE_CONFIG_DIR`**: Specifies the directory for service configuration files, like `web_settings.json`. This directory can also be used to hold user-defined configuration files. Default is the `config` folder in the service root folder. The variable can be accessed through:
```python
import pydase.config
pydase.config.ServiceConfig().config_dir
```
- **`SERVICE_WEB_PORT`**: Defines the port number for the web server. This has to be different for each services running on the same host. Default is 8001.
- **`SERVICE_RPC_PORT`**: Defines the port number for the rpc server. This has to be different for each services running on the same host. Default is 18871.
- **`GENERATE_WEB_SETTINGS`**: When set to true, generates / updates the `web_settings.json` file. If the file already exists, only new entries are appended.
Some of those settings can also be altered directly in code when initializing the server:
```python
import pathlib
from pydase import Server
from your_service_module import YourService
server = Server(
YourService(),
web_port=8080,
rpc_port=18880,
config_dir=pathlib.Path("other_config_dir"), # note that you need to provide an argument of type pathlib.Path
generate_web_settings=True
).run()
```
## Customizing the Web Interface
### Enhancing the Web Interface Style with Custom CSS
`pydase` allows you to enhance the user experience by customizing the web interface's appearance. You can apply your own styles globally across the web interface by passing a custom CSS file to the server during initialization.
Here's how you can use this feature:
1. Prepare your custom CSS file with the desired styles.
2. When initializing your server, use the `css` parameter of the `Server` class to specify the path to your custom CSS file.
```python
from pydase import Server, DataService
class MyService(DataService):
# ... your service definition ...
if __name__ == "__main__":
service = MyService()
server = Server(service, css="path/to/your/custom.css").run()
```
This will apply the styles defined in `custom.css` to the web interface, allowing you to maintain branding consistency or improve visual accessibility.
Please ensure that the CSS file path is accessible from the server's running location. Relative or absolute paths can be used depending on your setup.
### Tailoring Frontend Component Layout
`pydase` enables users to customize the frontend layout via the `web_settings.json` file. Each key in the file corresponds to the full access path of public attributes, properties, and methods of the exposed service, using dot-notation.
- **Custom Display Names**: Modify the `"displayName"` value in the file to change how each component appears in the frontend.
- **Control Component Visibility**: Utilize the `"display"` key-value pair to control whether a component is rendered in the frontend. Set the value to `true` to make the component visible or `false` to hide it.
<!-- - **Adjustable Component Order**: The `"displayOrder"` values determine the order of components. Alter these values to rearrange the components as desired. -->
The `web_settings.json` file will be stored in the directory specified by `SERVICE_CONFIG_DIR`. You can generate a `web_settings.json` file by setting the `GENERATE_WEB_SETTINGS` to `True`. For more information, see the [configuration section](#configuring-pydase-via-environment-variables).
### Specifying a Custom Frontend Source
To further personalize your web interface, you can provide `pydase` with a custom frontend GUI. To do so, you can use the `frontend_src` keyword in the `pydase.Server`:
```python
from pathlib import Path
import pydase
class MyService(pydase.DataService):
# Service definition
if __name__ == "__main__":
service = MyService()
pydase.Server(
service,
frontend_src=Path("path/to/your/frontend/directory"),
).run()
```
## Logging in pydase
The `pydase` library organizes its loggers on a per-module basis, mirroring the Python package hierarchy. This structured approach allows for granular control over logging levels and behaviour across different parts of the library.
### Changing the Log Level
You have two primary ways to adjust the log levels in `pydase`:
1. directly targeting `pydase` loggers
You can set the log level for any `pydase` logger directly in your code. This method is useful for fine-tuning logging levels for specific modules within `pydase`. For instance, if you want to change the log level of the main `pydase` logger or target a submodule like `pydase.data_service`, you can do so as follows:
```python
# <your_script.py>
import logging
# Set the log level for the main pydase logger
logging.getLogger("pydase").setLevel(logging.INFO)
# Optionally, target a specific submodule logger
# logging.getLogger("pydase.data_service").setLevel(logging.DEBUG)
# Your logger for the current script
logger = logging.getLogger(__name__)
logger.info("My info message.")
```
This approach allows for specific control over different parts of the `pydase` library, depending on your logging needs.
2. using the `ENVIRONMENT` environment variable
For a more global setting that affects the entire `pydase` library, you can utilize the `ENVIRONMENT` environment variable. Setting this variable to "production" will configure all `pydase` loggers to only log messages of level "INFO" and above, filtering out more verbose logging. This is particularly useful for production environments where excessive logging can be overwhelming or unnecessary.
```bash
ENVIRONMENT="production" python -m <module_using_pydase>
```
The production environment will only log messages above "INFO", the development environment (default) logs everything above "DEBUG".
In the absence of this setting, the default behavior is to log everything of level "DEBUG" and above, suitable for development environments where more detailed logs are beneficial.
2. calling the `pydase.utils.logging.setup_logging` function with the desired log level
```python
# <your_script.py>
from pydase.utils.logging import setup_logging
setup_logging("INFO")
```
**Note**: It is recommended to avoid calling the `pydase.utils.logging.setup_logging` function directly, as this may result in duplicated logging messages.
## Documentation

View File

@@ -18,7 +18,7 @@ For example, for a `Image` component, create a file named `image.py`.
### Step 2: Define the Backend Class
Within the newly created file, define a Python class representing the component. This class should inherit from `DataService` and contains the attributes that the frontend needs to render the component. Every public attribute defined in this class will synchronise across the clients. It can also contain methods which can be used to interact with the component from the backend.
Within the newly created file, define a Python class representing the component. This class should inherit from `DataService` and contains the attributes that the frontend needs to render the component. Every public attribute defined in this class will synchronise across the clients. It can also contain (public) methods which you can provide for the user to interact with the component from the backend (or python clients).
For the `Image` component, the class may look like this:
@@ -31,21 +31,25 @@ from pydase.data_service.data_service import DataService
class Image(DataService):
def __init__(
self,
image_representation: bytes = b"",
) -> None:
self.image_representation = image_representation
super().__init__()
self._value: str = ""
self._format: str = ""
# need to decode the bytes
def __setattr__(self, __name: str, __value: Any) -> None:
if __name == "value":
if isinstance(__value, bytes):
__value = __value.decode()
return super().__setattr__(__name, __value)
@property
def value(self) -> str:
return self._value
@property
def format(self) -> str:
return self._format
def load_from_path(self, path: Path | str) -> None:
# changing self._value and self._format
...
```
So, changing the `image_representation` will push the updated value to the browsers connected to the service.
So, calling `load_from_path` will push the updated value and format to the browsers clients connected to the service.
### Step 3: Register the Backend Class
@@ -85,10 +89,11 @@ def test_Image(capsys: CaptureFixture) -> None:
class ServiceClass(DataService):
image = Image()
service = ServiceClass()
# ...
```
service_instance = ServiceClass()
service_instance.image.load_from_path("<path/to/image>.png")
assert service_instance.image.format == "PNG"
```
## Adding a Frontend Component to `pydase`
@@ -107,29 +112,41 @@ Write the React component code, following the structure and patterns used in exi
For example, for the `Image` component, a template could look like this:
```tsx
import { emit_update } from '../socket'; // use this when your component should update values in the backend
import { DocStringComponent } from './DocStringComponent';
import React, { useEffect, useRef, useState } from 'react';
import { Card, Collapse, Image } from 'react-bootstrap';
import { DocStringComponent } from './DocStringComponent';
import { ChevronDown, ChevronRight } from 'react-bootstrap-icons';
import { LevelName } from './NotificationsComponent';
interface ImageComponentProps {
name: string;
parentPath: string;
readOnly: boolean;
docString: string;
addNotification: (string) => void;
// Define your component specific props here
type ImageComponentProps = {
name: string; // needed to create the fullAccessPath
parentPath: string; // needed to create the fullAccessPath
readOnly: boolean; // component changable through frontend?
docString: string; // contains docstring of your component
displayName: string; // name defined in the web_settings.json
id: string; // unique identifier - created from fullAccessPath
addNotification: (message: string, levelname?: LevelName) => void;
changeCallback?: ( // function used to communicate changes to the backend
value: unknown,
attributeName?: string,
prefix?: string,
callback?: (ack: unknown) => void
) => void;
// component-specific properties
value: string;
format: string;
}
};
export const ImageComponent = React.memo((props: ImageComponentProps) => {
const { name, parentPath, value, docString, format, addNotification } = props;
const { value, docString, format, addNotification, displayName, id } = props;
const renderCount = useRef(0);
const [open, setOpen] = useState(true); // add this if you want to expand/collapse your component
const fullAccessPath = [props.parentPath, props.name]
.filter((element) => element)
.join('.');
// Your component logic here
useEffect(() => {
renderCount.current++;
@@ -137,13 +154,11 @@ export const ImageComponent = React.memo((props: ImageComponentProps) => {
// This will trigger a notification if notifications are enabled.
useEffect(() => {
addNotification(`${parentPath}.${name} changed to ${value}.`);
addNotification(`${fullAccessPath} changed.`);
}, [props.value]);
// Your component logic here
return (
<div className={'imageComponent'} id={parentPath.concat('.' + name)}>
<div className="component imageComponent" id={id}>
{/* Add the Card and Collapse components here if you want to be able to expand and
collapse your component. */}
<Card>
@@ -151,14 +166,15 @@ export const ImageComponent = React.memo((props: ImageComponentProps) => {
onClick={() => setOpen(!open)}
style={{ cursor: 'pointer' }} // Change cursor style on hover
>
{name} {open ? <ChevronDown /> : <ChevronRight />}
{displayName}
<DocStringComponent docString={docString} />
{open ? <ChevronDown /> : <ChevronRight />}
</Card.Header>
<Collapse in={open}>
<Card.Body>
{process.env.NODE_ENV === 'development' && (
<p>Render count: {renderCount.current}</p>
)}
<DocStringComponent docString={docString} />
{/* Your component TSX here */}
</Card.Body>
</Collapse>
@@ -170,52 +186,98 @@ export const ImageComponent = React.memo((props: ImageComponentProps) => {
### Step 3: Emitting Updates to the Backend
Often, React components in the frontend will need to send updates to the backend, especially when user interactions result in a change of state or data. In `pydase`, we use `socketio` to seamlessly communicate these changes. Here's a detailed guide on how to emit update events from your frontend component:
React components in the frontend often need to send updates to the backend, particularly when user interactions modify the component's state or data. In `pydase`, we use `socketio` for communicating these changes.<br>
There are two different events a component might want to trigger: updating an attribute or triggering a method. Below is a guide on how to emit these events from your frontend component:
1. **Setting Up Emission**: Ensure you've imported the required functions and methods for emission. The main function we'll use for this is `emit_update` from the `socket` module:
1. **Updating Attributes**
Updating the value of an attribute or property in the backend is a very common requirement. However, we want to define components in a reusable way, i.e. they can be linked to the backend but also be used without emitting change events.<br>
This is why we pass a `changeCallback` function as a prop to the component which it can use to communicate changes. If no function is passed, the component can be used in forms, for example.
The `changeCallback` function takes the following arguments:
- `value`: the new value for the attribute, which must match the backend attribute type.
- `attributeName`: the name of the attribute within the `DataService` instance to update. Defaults to the `name` prop of the component.
- `prefix`: the access path for the parent object of the attribute to be updated. Defaults to the `parentPath` prop of the component.
- `callback`: the function that will be called when the server sends an acknowledgement. Defaults to `undefined`
For illustration, take the `ButtonComponent`. When the button state changes, we want to send this update to the backend:
```tsx
import { emit_update } from '../socket';
```
// file: frontend/src/components/ButtonComponent.tsx
// ... (import statements)
2. **Understanding the Emission Parameters**:
When emitting an update, we send three main pieces of data:
- `parentPath`: This is the access path for the parent object of the attribute to be updated. This forms the basis to create the full access path for the attribute. For instance, for the attribute access path `attr1.list_attr[0].attr2`, `attr1.list_attr[0]` would be the `parentPath`.
- `name`: This represents the name of the attribute to be updated within the `DataService` instance. If the attribute is part of a nested structure, this would be the name of the attribute in the last nested object. So, for `attr1.list_attr[0].attr2`, `attr2` would be the name.
- `value`: This is the new value intended for the attribute. Ensure that the type of this value matches the type of the attribute in the backend.
3. **Implementing the Emission**:
To illustrate the emission process, let's consider the `ButtonComponent`. When the button state changes, we want to send this update to the backend:
```tsx
// ... (other imports)
type ButtonComponentProps = {
// ...
changeCallback?: (
value: unknown,
attributeName?: string,
prefix?: string,
callback?: (ack: unknown) => void
) => void;
};
export const ButtonComponent = React.memo((props: ButtonComponentProps) => {
const {
// ...
const { name, parentPath, value } = props;
changeCallback = () => {},
} = props;
const setChecked = (checked: boolean) => {
emit_update(name, parentPath, checked);
changeCallback(checked);
};
return (
<ToggleButton
checked={value}
value={parentPath}
// ... other props
onChange={(e) => setChecked(e.currentTarget.checked)}>
<p>{name}</p>
{/* component TSX */}
</ToggleButton>
);
});
```
In this example, whenever the button's checked state changes (`onChange` event), we invoke the `setChecked` method, which in turn emits the new state to the backend using `emit_update`.
In this example, whenever the button's checked state changes (`onChange` event), we invoke the `setChecked` method, which in turn emits the new state to the backend using `changeCallback`.
2. **Triggering Methods**
To trigger method through your component, you can either use the `MethodComponent` (which will render a button in the frontend), or use the low-level `runMethod` function. Its parameters are slightly different to the `changeCallback` function:
- `name`: the name of the method to be executed in the backend.
- `parentPath`: the access path to the object containing the method.
- `kwargs`: a dictionary of keyword arguments that the method requires.
To see how to use the `MethodComponent` in your component, have a look at the `DeviceConnection.tsx` file. Here is an example that demonstrates the usage of the `runMethod` function (also, have a look at the `MethodComponent.tsx` file):
```tsx
import { runMethod } from '../socket';
// ... (other imports)
type ComponentProps = {
name: string;
parentPath: string;
// ...
};
export const Component = React.memo((props: ComponentProps) => {
const {
name,
parentPath,
// ...
} = props;
// ...
const someFunction = () => {
// ...
runMethod(name, parentPath, {});
};
return (
{/* component TSX */}
);
});
```
### Step 4: Add the New Component to the GenericComponent
@@ -262,15 +324,17 @@ Inside the `GenericComponent` function, add a new conditional branch to render t
<ImageComponent
name={name}
parentPath={parentPath}
readOnly={attribute.readonly}
docString={attribute.doc}
docString={attribute.value['value'].doc}
displayName={displayName}
id={id}
addNotification={addNotification}
changeCallback={changeCallback}
// Add any other specific props for the ImageComponent here
value={attribute.value['value']['value'] as string}
format={attribute.value['format']['value'] as string}
/>
);
} else {
} else if (...) {
// other code
```
@@ -285,12 +349,15 @@ For example, updating an `Image` component corresponds to setting a very long st
To create a custom notification message, you can update the message passed to the `addNotification` method in the `useEffect` hook in the component file file. For the `ImageComponent`, this could look like this:
```tsx
const fullAccessPath = [parentPath, name].filter((element) => element).join('.');
useEffect(() => {
addNotification(`${parentPath}.${name} changed.`);
addNotification(`${fullAccessPath} changed.`);
}, [props.value]);
```
However, you might want to use the `addNotification` at different places. For an example, see the [MethodComponent](../../frontend/src/components/MethodComponent.tsx).
However, you might want to use the `addNotification` at different places. For an example, see the `MethodComponent`.
**Note**: you can specify the notification level by passing a string of type `LevelName` (one of 'CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG'). The default value is 'DEBUG'.
### Step 6: Write Tests for the Component (TODO)

View File

@@ -0,0 +1,27 @@
# Observer Pattern Implementation in Pydase
## Overview
The Observer Pattern is a fundamental design pattern in the `pydase` package, serving as the central communication mechanism for state updates to clients connected to a service.
## How it Works
### The Observable Class
The `Observable` class is at the core of the pattern. It maintains a list of observers and is responsible for notifying them about state changes. It does so by overriding the following methods:
- `__setattr__`: This function emits a notification before and after a new value is set. These two notifications are important to track which attributes are being set to avoid endless recursion (e.g. when accessing a property within another property). Moreover, when setting an attribute to another observable, the former class will add itself as an observer to the latter class, ensuring that nested classes are properly observed.
- `__getattribute__`: This function notifies the observers when a property getter is called, allowing for monitoring state changes in remote devices, as opposed to local instance attributes.
### Custom Collection Classes
To handle collections (like lists and dictionaries), the `Observable` class converts them into custom collection classes `_ObservableList` and `_ObservableDict` that notify observers of any changes in their state. For this, they have to override the methods changing the state, e.g., `__setitem__` or `append` for lists.
### The Observer Class
The `Observer` is the final element in the chain of observers. The notifications of attribute changes it receives include the full access path (in dot-notation) and the new value. It implements logic to handle state changes, like caching, error logging for type changes, etc. This can be extended by custom notification callbacks (implemented using `add_notification_callback` in `DataServiceObserver`). This enables the user to perform specific actions in response to changes. In `pydase`, the web server adds an additional notification callback that emits the websocket events (`sio_callback`).
Furthermore, the `DataServiceObserver` implements logic to reload the values of properties when an attribute change occurs that a property depends on.
- **Dynamic Inspection**: The observer dynamically inspects the observable object (recursively) to create a mapping of properties and their dependencies. This mapping is constructed based on the class or instance attributes used within the source code of the property getters.
- **Dependency Management**: When a change in an attribute occurs, `DataServiceObserver` updates any properties that depend on this attribute. This ensures that the overall state remains consistent and up-to-date, especially in complex scenarios where properties depend on other instance attribute or properties.

Binary file not shown.

After

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 22 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 33 KiB

After

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 52 KiB

After

Width:  |  Height:  |  Size: 69 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 38 KiB

After

Width:  |  Height:  |  Size: 21 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 23 KiB

After

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 24 KiB

After

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

View File

@@ -0,0 +1,6 @@
# Components Guide
{%
include-markdown "../../README.md"
start="<!-- Component User Guide Start -->"
end="<!-- Component User Guide End -->"
%}

View File

@@ -7,12 +7,10 @@
],
"extends": [
"eslint:recommended",
"plugin:@typescript-eslint/eslint-recommended",
"plugin:@typescript-eslint/recommended",
"prettier"
],
"rules": {
"no-console": 1, // Means warning
"prettier/prettier": 2 // Means error }
"prettier/prettier": "error"
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -5,6 +5,7 @@
"dependencies": {
"@emotion/react": "^11.11.1",
"@emotion/styled": "^11.11.0",
"@fsouza/prettierd": "^0.25.1",
"@mui/material": "^5.14.1",
"@testing-library/jest-dom": "^5.16.5",
"@testing-library/react": "^13.4.0",
@@ -46,9 +47,12 @@
"@types/node": "^20.0.0",
"@types/react": "^18.0.0",
"@types/react-dom": "^18.0.0",
"eslint-config-prettier": "^8.8.0",
"eslint-plugin-prettier": "^5.0.0",
"prettier": "^3.0.0",
"@babel/plugin-proposal-private-property-in-object": "7.21.11"
"@typescript-eslint/eslint-plugin": "^6.11.0",
"@typescript-eslint/parser": "^6.9.0",
"eslint": "^8.52.0",
"eslint-config-prettier": "^9.0.0",
"eslint-plugin-prettier": "^5.0.1",
"prettier": "^3.0.3",
"typescript": "^4.9.0"
}
}

View File

@@ -1,25 +1,52 @@
body {
min-width: 576px;
max-width: 1200px;
max-width: 2000px;
}
input.instantUpdate {
background-color: rgba(255, 0, 0, 0.1);
}
.numberComponentButton {
padding: 0.15em 6px !important;
font-size: 0.70rem !important;
}
.navbarOffset {
padding-top: 60px !important;
right: 20;
}
/* .toastContainer {
position: fixed;
} */
.notificationToast {
.toastContainer {
position: fixed !important;
padding: 5px;
}
.debugToast,
.infoToast {
background-color: rgba(114, 214, 253, 0.5) !important;
}
.exceptionToast {
.warningToast {
background-color: rgba(255, 181, 44, 0.603) !important;
}
.errorToast,
.criticalToast {
background-color: rgba(216, 41, 18, 0.678) !important;
}
.component {
position: relative;
float: left !important;
padding: 5px !important;
z-index: 1;
}
.dataServiceComponent {
width: 100%;
}
.deviceConnectionComponent {
position: relative;
float: left !important;
width: 100%;
z-index: 1;
}
.overlayContent {
position: absolute;
inset: 5px; /* (see https://developer.mozilla.org/en-US/docs/Web/CSS/inset) */
background: rgba(155, 155, 155, 0.75);
z-index: 2;
display: flex;
justify-content: center;
align-items: center;
flex-direction: column; /* Stack children vertically */
border-radius: var(--bs-border-radius);
border: var(--bs-border-width) solid var(--bs-border-color-translucent)
}

View File

@@ -1,204 +1,149 @@
import { useCallback, useEffect, useReducer, useRef, useState } from 'react';
import { useCallback, useEffect, useReducer, useState } from 'react';
import { Navbar, Form, Offcanvas, Container } from 'react-bootstrap';
import { hostname, port, socket } from './socket';
import {
DataServiceComponent,
DataServiceJSON
} from './components/DataServiceComponent';
import './App.css';
import { Notifications } from './components/NotificationsComponent';
import {
Notifications,
Notification,
LevelName
} from './components/NotificationsComponent';
import { ConnectionToast } from './components/ConnectionToast';
import { setNestedValueByPath, State } from './utils/stateUtils';
import { WebSettingsContext, WebSetting } from './WebSettings';
import { SerializedValue, GenericComponent } from './components/GenericComponent';
type ValueType = boolean | string | number | object;
type State = DataServiceJSON | null;
type Action =
| { type: 'SET_DATA'; data: DataServiceJSON }
| { type: 'UPDATE_ATTRIBUTE'; parentPath: string; name: string; value: ValueType };
| { type: 'SET_DATA'; data: State }
| {
type: 'UPDATE_ATTRIBUTE';
fullAccessPath: string;
newValue: SerializedValue;
};
type UpdateMessage = {
data: { parent_path: string; name: string; value: object };
data: { full_access_path: string; value: SerializedValue };
};
type ExceptionMessage = {
data: { exception: string; type: string };
type LogMessage = {
levelname: LevelName;
message: string;
};
/**
* A function to update a specific property in a deeply nested object.
* The property to be updated is specified by a path array.
*
* Each path element can be a regular object key or an array index of the
* form "attribute[index]", where "attribute" is the key of the array in
* the object and "index" is the index of the element in the array.
*
* For array indices, the element at the specified index in the array is
* updated.
*
* If the property to be updated is an object or an array, it is updated
* recursively.
*
* @param {Array<string>} path - An array where each element is a key in the object,
* forming a path to the property to be updated.
* @param {object} obj - The object to be updated.
* @param {object} value - The new value for the property specified by the path.
* @return {object} - A new object with the specified property updated.
*/
function updateNestedObject(path: Array<string>, obj: object, value: ValueType) {
// Base case: If the path is empty, return the new value.
// This means we've reached the nested property to be updated.
if (path.length === 0) {
return value;
}
// Recursive case: If the path is not empty, split it into the first key and the rest
// of the path.
const [first, ...rest] = path;
// Check if 'first' is an array index.
const indexMatch = first.match(/^(\w+)\[(\d+)\]$/);
// If 'first' is an array index of the form "attribute[index]", then update the
// element at the specified index in the array. Otherwise, update the property
// specified by 'first' in the object.
if (indexMatch) {
const attribute = indexMatch[1];
const index = parseInt(indexMatch[2]);
if (Array.isArray(obj[attribute]?.value)) {
return {
...obj,
[attribute]: {
...obj[attribute],
value: obj[attribute].value.map((item, i) =>
i === index
? {
...item,
value: updateNestedObject(rest, item.value || {}, value)
}
: item
)
}
};
} else {
throw new Error(
`Expected ${attribute}.value to be an array, but received ${typeof obj[
attribute
]?.value}`
);
}
} else {
return {
...obj,
[first]: {
...obj[first],
value: updateNestedObject(rest, obj[first]?.value || {}, value)
}
};
}
}
const reducer = (state: State, action: Action): State => {
switch (action.type) {
case 'SET_DATA':
return action.data;
case 'UPDATE_ATTRIBUTE': {
const path = action.parentPath.split('.').slice(1).concat(action.name);
return updateNestedObject(path, state, action.value);
if (state === null) {
return null;
}
return {
...state,
value: setNestedValueByPath(state.value, action.fullAccessPath, action.newValue)
};
}
default:
throw new Error();
}
};
const App = () => {
const [state, dispatch] = useReducer(reducer, null);
const stateRef = useRef(state); // Declare a reference to hold the current state
const [webSettings, setWebSettings] = useState<Record<string, WebSetting>>({});
const [isInstantUpdate, setIsInstantUpdate] = useState(false);
const [showSettings, setShowSettings] = useState(false);
const [showNotification, setShowNotification] = useState(true);
const [notifications, setNotifications] = useState([]);
const [exceptions, setExceptions] = useState([]);
// Keep the state reference up to date
useEffect(() => {
stateRef.current = state;
}, [state]);
const [showNotification, setShowNotification] = useState(false);
const [notifications, setNotifications] = useState<Notification[]>([]);
const [connectionStatus, setConnectionStatus] = useState('connecting');
useEffect(() => {
// Fetch data from the API when the component mounts
// Allow the user to add a custom css file
fetch(`http://${hostname}:${port}/custom.css`)
.then((response) => {
if (response.ok) {
// If the file exists, create a link element for the custom CSS
const link = document.createElement('link');
link.href = `http://${hostname}:${port}/custom.css`;
link.type = 'text/css';
link.rel = 'stylesheet';
document.head.appendChild(link);
}
})
.catch(console.error); // Handle the error appropriately
socket.on('connect', () => {
// Fetch data from the API when the client connects
fetch(`http://${hostname}:${port}/service-properties`)
.then((response) => response.json())
.then((data: DataServiceJSON) => dispatch({ type: 'SET_DATA', data }));
.then((data: State) => dispatch({ type: 'SET_DATA', data }));
fetch(`http://${hostname}:${port}/web-settings`)
.then((response) => response.json())
.then((data: Record<string, WebSetting>) => setWebSettings(data));
setConnectionStatus('connected');
});
socket.on('disconnect', () => {
setConnectionStatus('disconnected');
setTimeout(() => {
// Only set "reconnecting" is the state is still "disconnected"
// E.g. when the client has already reconnected
setConnectionStatus((currentState) =>
currentState === 'disconnected' ? 'reconnecting' : currentState
);
}, 2000);
});
socket.on('notify', onNotify);
socket.on('exception', onException);
socket.on('log', onLogMessage);
return () => {
socket.off('notify', onNotify);
socket.off('exception', onException);
socket.off('log', onLogMessage);
};
}, []);
// Adding useCallback to prevent notify to change causing a re-render of all
// components
const addNotification = useCallback((text: string) => {
const addNotification = useCallback(
(message: string, levelname: LevelName = 'DEBUG') => {
// Getting the current time in the required format
const timeString = new Date().toISOString().substring(11, 19);
const timeStamp = new Date().toISOString().substring(11, 19);
// Adding an id to the notification to provide a way of removing it
const id = Math.random();
// Custom logic for notifications
setNotifications((prevNotifications) => [
{ id, text, time: timeString },
{ levelname, id, message, timeStamp },
...prevNotifications
]);
}, []);
},
[]
);
const notifyException = (text: string) => {
// Getting the current time in the required format
const timeString = new Date().toISOString().substring(11, 19);
// Adding an id to the notification to provide a way of removing it
const id = Math.random();
// Custom logic for notifications
setExceptions((prevNotifications) => [
{ id, text, time: timeString },
...prevNotifications
]);
};
const removeNotificationById = (id: number) => {
setNotifications((prevNotifications) =>
prevNotifications.filter((n) => n.id !== id)
);
};
const removeExceptionById = (id: number) => {
setExceptions((prevNotifications) => prevNotifications.filter((n) => n.id !== id));
};
const handleCloseSettings = () => setShowSettings(false);
const handleShowSettings = () => setShowSettings(true);
function onNotify(value: UpdateMessage) {
// Extracting data from the notification
const { parent_path: parentPath, name, value: newValue } = value.data;
const { full_access_path: fullAccessPath, value: newValue } = value.data;
// Dispatching the update to the reducer
dispatch({
type: 'UPDATE_ATTRIBUTE',
parentPath,
name,
value: newValue
fullAccessPath,
newValue
});
}
function onException(value: ExceptionMessage) {
const newException = `${value.data.type}: ${value.data.exception}.`;
notifyException(newException);
function onLogMessage(value: LogMessage) {
addNotification(value.message, value.levelname);
}
// While the data is loading
if (!state) {
return <p>Loading...</p>;
return <ConnectionToast connectionStatus={connectionStatus} />;
}
return (
<>
@@ -212,9 +157,7 @@ const App = () => {
<Notifications
showNotification={showNotification}
notifications={notifications}
exceptions={exceptions}
removeNotificationById={removeNotificationById}
removeExceptionById={removeExceptionById}
/>
<Offcanvas
@@ -243,12 +186,17 @@ const App = () => {
</Offcanvas>
<div className="App navbarOffset">
<DataServiceComponent
props={state as DataServiceJSON}
<WebSettingsContext.Provider value={webSettings}>
<GenericComponent
name=""
parentPath=""
attribute={state as SerializedValue}
isInstantUpdate={isInstantUpdate}
addNotification={addNotification}
/>
</WebSettingsContext.Provider>
</div>
<ConnectionToast connectionStatus={connectionStatus} />
</>
);
};

View File

@@ -0,0 +1,9 @@
import { createContext } from 'react';
export const WebSettingsContext = createContext<Record<string, WebSetting>>({});
export type WebSetting = {
displayName: string;
display: boolean;
index: number;
};

View File

@@ -1,52 +1,49 @@
import React, { useEffect, useRef } from 'react';
import { emit_update } from '../socket';
import { InputGroup, Form, Button } from 'react-bootstrap';
import { runMethod } from '../socket';
import { Form, Button, InputGroup } from 'react-bootstrap';
import { DocStringComponent } from './DocStringComponent';
import { LevelName } from './NotificationsComponent';
interface AsyncMethodProps {
type AsyncMethodProps = {
name: string;
parentPath: string;
parameters: Record<string, string>;
value: Record<string, string>;
value: 'RUNNING' | null;
docString?: string;
hideOutput?: boolean;
addNotification: (string) => void;
}
addNotification: (message: string, levelname?: LevelName) => void;
displayName: string;
id: string;
render: boolean;
};
export const AsyncMethodComponent = React.memo((props: AsyncMethodProps) => {
const { name, parentPath, docString, value: runningTask, addNotification } = props;
const {
name,
parentPath,
docString,
value: runningTask,
addNotification,
displayName,
id
} = props;
// Conditional rendering based on the 'render' prop.
if (!props.render) {
return null;
}
const renderCount = useRef(0);
const formRef = useRef(null);
const fullAccessPath = [parentPath, name].filter((element) => element).join('.');
useEffect(() => {
renderCount.current++;
// updates the value of each form control that has a matching name in the
// runningTask object
if (runningTask) {
const formElement = formRef.current;
if (formElement) {
Object.entries(runningTask).forEach(([name, value]) => {
const inputElement = formElement.elements.namedItem(name);
if (inputElement) {
inputElement.value = value;
}
});
}
}
}, [runningTask]);
useEffect(() => {
let message: string;
if (runningTask === null) {
message = `${parentPath}.${name} task was stopped.`;
message = `${fullAccessPath} task was stopped.`;
} else {
const runningTaskEntries = Object.entries(runningTask)
.map(([key, value]) => `${key}: "${value}"`)
.join(', ');
message = `${parentPath}.${name} was started with parameters { ${runningTaskEntries} }.`;
message = `${fullAccessPath} was started.`;
}
addNotification(message);
}, [props.value]);
@@ -54,58 +51,31 @@ export const AsyncMethodComponent = React.memo((props: AsyncMethodProps) => {
const execute = async (event: React.FormEvent) => {
event.preventDefault();
let method_name: string;
const args = {};
if (runningTask !== undefined && runningTask !== null) {
method_name = `stop_${name}`;
} else {
Object.keys(props.parameters).forEach(
(name) => (args[name] = event.target[name].value)
);
method_name = `start_${name}`;
}
emit_update(method_name, parentPath, { args: args });
runMethod(method_name, parentPath, {});
};
const args = Object.entries(props.parameters).map(([name, type], index) => {
const form_name = `${name} (${type})`;
const value = runningTask && runningTask[name];
const isRunning = value !== undefined && value !== null;
return (
<InputGroup key={index}>
<InputGroup.Text className="component-label">{form_name}</InputGroup.Text>
<Form.Control
type="text"
name={name}
defaultValue={isRunning ? value : ''}
disabled={isRunning}
/>
</InputGroup>
);
});
return (
<div
className="align-items-center asyncMethodComponent"
id={parentPath.concat('.' + name)}>
<div className="component asyncMethodComponent" id={id}>
{process.env.NODE_ENV === 'development' && (
<p>Render count: {renderCount.current}</p>
<div>Render count: {renderCount.current}</div>
)}
<h5>
Function: {name}
<DocStringComponent docString={docString} />
</h5>
<Form onSubmit={execute} ref={formRef}>
{args}
<Button
id={`button-${parentPath}.${name}`}
name={name}
value={parentPath}
type="submit">
{runningTask ? 'Stop' : 'Start'}
<InputGroup>
<InputGroup.Text>
{displayName}
<DocStringComponent docString={docString} />
</InputGroup.Text>
<Button id={`button-${id}`} type="submit">
{runningTask === 'RUNNING' ? 'Stop ' : 'Start '}
</Button>
</InputGroup>
</Form>
</div>
);

View File

@@ -1,22 +1,40 @@
import React, { useEffect, useRef } from 'react';
import { ToggleButton } from 'react-bootstrap';
import { emit_update } from '../socket';
import { DocStringComponent } from './DocStringComponent';
import { LevelName } from './NotificationsComponent';
interface ButtonComponentProps {
type ButtonComponentProps = {
name: string;
parentPath?: string;
value: boolean;
readOnly: boolean;
docString: string;
mapping?: [string, string]; // Enforce a tuple of two strings
addNotification: (string) => void;
}
addNotification: (message: string, levelname?: LevelName) => void;
changeCallback?: (
value: unknown,
attributeName?: string,
prefix?: string,
callback?: (ack: unknown) => void
) => void;
displayName: string;
id: string;
};
export const ButtonComponent = React.memo((props: ButtonComponentProps) => {
const { name, parentPath, value, readOnly, docString, mapping, addNotification } =
props;
const buttonName = mapping ? (value ? mapping[0] : mapping[1]) : name;
const {
value,
readOnly,
docString,
addNotification,
changeCallback = () => {},
displayName,
id
} = props;
// const buttonName = props.mapping ? (value ? props.mapping[0] : props.mapping[1]) : name;
const fullAccessPath = [props.parentPath, props.name]
.filter((element) => element)
.join('.');
const renderCount = useRef(0);
@@ -25,29 +43,29 @@ export const ButtonComponent = React.memo((props: ButtonComponentProps) => {
});
useEffect(() => {
addNotification(`${parentPath}.${name} changed to ${value}.`);
addNotification(`${fullAccessPath} changed to ${value}.`);
}, [props.value]);
const setChecked = (checked: boolean) => {
emit_update(name, parentPath, checked);
changeCallback(checked);
};
return (
<div className={'buttonComponent'} id={parentPath.concat('.' + name)}>
<div className={'component buttonComponent'} id={id}>
{process.env.NODE_ENV === 'development' && (
<p>Render count: {renderCount.current}</p>
<div>Render count: {renderCount.current}</div>
)}
<DocStringComponent docString={docString} />
<ToggleButton
id={`toggle-check-${parentPath}.${name}`}
id={`toggle-check-${id}`}
type="checkbox"
variant={value ? 'success' : 'secondary'}
checked={value}
value={parentPath}
value={displayName}
disabled={readOnly}
onChange={(e) => setChecked(e.currentTarget.checked)}>
<p>{buttonName}</p>
{displayName}
<DocStringComponent docString={docString} />
</ToggleButton>
</div>
);

View File

@@ -0,0 +1,100 @@
import React, { useEffect, useRef, useState } from 'react';
import { InputGroup, Form, Row, Col } from 'react-bootstrap';
import { DocStringComponent } from './DocStringComponent';
import { LevelName } from './NotificationsComponent';
type ColouredEnumComponentProps = {
name: string;
parentPath: string;
value: string;
docString?: string;
readOnly: boolean;
enumDict: Record<string, string>;
addNotification: (message: string, levelname?: LevelName) => void;
changeCallback?: (
value: unknown,
attributeName?: string,
prefix?: string,
callback?: (ack: unknown) => void
) => void;
displayName: string;
id: string;
};
export const ColouredEnumComponent = React.memo((props: ColouredEnumComponentProps) => {
const {
name,
value,
docString,
enumDict,
readOnly,
addNotification,
displayName,
id
} = props;
let { changeCallback } = props;
if (changeCallback === undefined) {
changeCallback = (value: string) => {
setEnumValue(() => {
return value;
});
};
}
const renderCount = useRef(0);
const [enumValue, setEnumValue] = useState(value);
const fullAccessPath = [props.parentPath, props.name]
.filter((element) => element)
.join('.');
useEffect(() => {
renderCount.current++;
});
useEffect(() => {
setEnumValue(() => {
return props.value;
});
addNotification(`${fullAccessPath} changed to ${value}.`);
}, [props.value]);
return (
<div className={'component enumComponent'} id={id}>
{process.env.NODE_ENV === 'development' && (
<div>Render count: {renderCount.current}</div>
)}
<Row>
<Col className="d-flex align-items-center">
<InputGroup.Text>
{displayName}
<DocStringComponent docString={docString} />
</InputGroup.Text>
{readOnly ? (
// Display the Form.Control when readOnly is true
<Form.Control
value={enumValue}
name={name}
disabled={true}
style={{ backgroundColor: enumDict[enumValue] }}
/>
) : (
// Display the Form.Select when readOnly is false
<Form.Select
aria-label="coloured-enum-select"
value={enumValue}
name={name}
style={{ backgroundColor: enumDict[enumValue] }}
onChange={(event) => changeCallback(event.target.value)}>
{Object.entries(enumDict).map(([key]) => (
<option key={key} value={key}>
{key}
</option>
))}
</Form.Select>
)}
</Col>
</Row>
</div>
);
});

View File

@@ -0,0 +1,86 @@
import React, { useEffect, useState } from 'react';
import { Toast, Button, ToastContainer } from 'react-bootstrap';
type ConnectionToastProps = {
connectionStatus: string;
};
/**
* ConnectionToast Component
*
* Displays a toast notification that reflects the current connection status.
*
* Props:
* - connectionStatus (string): The current status of the connection which can be
* 'connecting', 'connected', 'disconnected', or 'reconnecting'. The component uses this
* status to determine the message, background color (`bg`), and auto-hide delay of the toast.
*
* The toast is designed to automatically appear based on changes to the `connectionStatus` prop
* and provides a close button to manually dismiss the toast. It uses `react-bootstrap`'s Toast
* component to show the connection status in a stylized format, and Bootstrap's utility classes
* for alignment and spacing.
*/
export const ConnectionToast = React.memo(
({ connectionStatus }: ConnectionToastProps) => {
const [show, setShow] = useState(true);
useEffect(() => {
setShow(true);
}, [connectionStatus]);
const handleClose = () => setShow(false);
const getToastContent = (): {
message: string;
bg: string; // bootstrap uses `bg` prop for background color
delay: number | undefined;
} => {
switch (connectionStatus) {
case 'connecting':
return {
message: 'Connecting...',
bg: 'info',
delay: undefined
};
case 'connected':
return { message: 'Connected', bg: 'success', delay: 1000 };
case 'disconnected':
return {
message: 'Disconnected',
bg: 'danger',
delay: undefined
};
case 'reconnecting':
return {
message: 'Reconnecting...',
bg: 'info',
delay: undefined
};
default:
return {
message: '',
bg: 'info',
delay: undefined
};
}
};
const { message, bg, delay } = getToastContent();
return (
<ToastContainer position="bottom-center" className="toastContainer">
<Toast
show={show}
onClose={handleClose}
delay={delay}
autohide={delay !== undefined}
bg={bg}>
<Toast.Body className="d-flex justify-content-between">
{message}
<Button variant="close" size="sm" onClick={handleClose} />
</Toast.Body>
</Toast>
</ToastContainer>
);
}
);

View File

@@ -2,53 +2,73 @@ import { useState } from 'react';
import React from 'react';
import { Card, Collapse } from 'react-bootstrap';
import { ChevronDown, ChevronRight } from 'react-bootstrap-icons';
import { Attribute, GenericComponent } from './GenericComponent';
import { SerializedValue, GenericComponent } from './GenericComponent';
import { LevelName } from './NotificationsComponent';
type DataServiceProps = {
name: string;
props: DataServiceJSON;
parentPath?: string;
isInstantUpdate: boolean;
addNotification: (string) => void;
addNotification: (message: string, levelname?: LevelName) => void;
displayName: string;
id: string;
};
export type DataServiceJSON = Record<string, Attribute>;
export type DataServiceJSON = Record<string, SerializedValue>;
export const DataServiceComponent = React.memo(
({
name,
props,
parentPath = 'DataService',
parentPath = undefined,
isInstantUpdate,
addNotification
addNotification,
displayName,
id
}: DataServiceProps) => {
const [open, setOpen] = useState(true);
const fullAccessPath = [parentPath, name].filter((element) => element).join('.');
if (displayName !== '') {
return (
<div className="dataServiceComponent">
<Card className="mb-3">
<Card.Header
onClick={() => setOpen(!open)}
style={{ cursor: 'pointer' }} // Change cursor style on hover
>
{parentPath} {open ? <ChevronDown /> : <ChevronRight />}
<div className="component dataServiceComponent" id={id}>
<Card>
<Card.Header onClick={() => setOpen(!open)} style={{ cursor: 'pointer' }}>
{displayName} {open ? <ChevronDown /> : <ChevronRight />}
</Card.Header>
<Collapse in={open}>
<Card.Body>
{Object.entries(props).map(([key, value]) => {
return (
{Object.entries(props).map(([key, value]) => (
<GenericComponent
key={key}
attribute={value}
name={key}
parentPath={parentPath}
parentPath={fullAccessPath}
isInstantUpdate={isInstantUpdate}
addNotification={addNotification}
/>
);
})}
))}
</Card.Body>
</Collapse>
</Card>
</div>
);
} else {
return (
<div className="component dataServiceComponent" id={id}>
{Object.entries(props).map(([key, value]) => (
<GenericComponent
key={key}
attribute={value}
name={key}
parentPath={fullAccessPath}
isInstantUpdate={isInstantUpdate}
addNotification={addNotification}
/>
))}
</div>
);
}
}
);

View File

@@ -0,0 +1,61 @@
import React from 'react';
import { LevelName } from './NotificationsComponent';
import { DataServiceComponent, DataServiceJSON } from './DataServiceComponent';
import { MethodComponent } from './MethodComponent';
type DeviceConnectionProps = {
name: string;
props: DataServiceJSON;
parentPath: string;
isInstantUpdate: boolean;
addNotification: (message: string, levelname?: LevelName) => void;
displayName: string;
id: string;
};
export const DeviceConnectionComponent = React.memo(
({
name,
props,
parentPath,
isInstantUpdate,
addNotification,
displayName,
id
}: DeviceConnectionProps) => {
const { connected, connect, ...updatedProps } = props;
const connectedVal = connected.value;
const fullAccessPath = [parentPath, name].filter((element) => element).join('.');
return (
<div className="deviceConnectionComponent" id={id}>
{!connectedVal && (
<div className="overlayContent">
<div>
{displayName != '' ? displayName : 'Device'} is currently not available!
</div>
<MethodComponent
name="connect"
parentPath={fullAccessPath}
docString={connect.doc}
addNotification={addNotification}
displayName={'reconnect'}
id={id + '-connect'}
render={true}
/>
</div>
)}
<DataServiceComponent
name={name}
props={updatedProps}
parentPath={parentPath}
isInstantUpdate={isInstantUpdate}
addNotification={addNotification}
displayName={displayName}
id={id}
/>
</div>
);
}
);

View File

@@ -1,9 +1,9 @@
import { Badge, Tooltip, OverlayTrigger } from 'react-bootstrap';
import React from 'react';
interface DocStringProps {
type DocStringProps = {
docString?: string;
}
};
export const DocStringComponent = React.memo((props: DocStringProps) => {
const { docString } = props;

View File

@@ -1,60 +1,93 @@
import React, { useEffect, useRef } from 'react';
import React, { useEffect, useRef, useState } from 'react';
import { InputGroup, Form, Row, Col } from 'react-bootstrap';
import { emit_update } from '../socket';
import { DocStringComponent } from './DocStringComponent';
import { LevelName } from './NotificationsComponent';
interface EnumComponentProps {
type EnumComponentProps = {
name: string;
parentPath: string;
value: string;
docString?: string;
readOnly: boolean;
enumDict: Record<string, string>;
addNotification: (string) => void;
}
addNotification: (message: string, levelname?: LevelName) => void;
changeCallback?: (
value: unknown,
attributeName?: string,
prefix?: string,
callback?: (ack: unknown) => void
) => void;
displayName: string;
id: string;
};
export const EnumComponent = React.memo((props: EnumComponentProps) => {
const {
name,
parentPath: parentPath,
value,
docString,
enumDict,
addNotification
addNotification,
displayName,
id,
readOnly
} = props;
let { changeCallback } = props;
if (changeCallback === undefined) {
changeCallback = (value: string) => {
setEnumValue(() => {
return value;
});
};
}
const renderCount = useRef(0);
const [enumValue, setEnumValue] = useState(value);
const fullAccessPath = [props.parentPath, props.name]
.filter((element) => element)
.join('.');
useEffect(() => {
renderCount.current++;
});
useEffect(() => {
addNotification(`${parentPath}.${name} changed to ${value}.`);
setEnumValue(() => {
return props.value;
});
addNotification(`${fullAccessPath} changed to ${value}.`);
}, [props.value]);
const handleValueChange = (newValue: string) => {
emit_update(name, parentPath, newValue);
};
return (
<div className={'enumComponent'} id={parentPath.concat('.' + name)}>
<div className={'component enumComponent'} id={id}>
{process.env.NODE_ENV === 'development' && (
<p>Render count: {renderCount.current}</p>
<div>Render count: {renderCount.current}</div>
)}
<DocStringComponent docString={docString} />
<Row>
<Col className="d-flex align-items-center">
<InputGroup.Text>{name}</InputGroup.Text>
<InputGroup.Text>
{displayName}
<DocStringComponent docString={docString} />
</InputGroup.Text>
{readOnly ? (
// Display the Form.Control when readOnly is true
<Form.Control value={enumDict[enumValue]} name={name} disabled={true} />
) : (
// Display the Form.Select when readOnly is false
<Form.Select
aria-label="Default select example"
value={value}
onChange={(event) => handleValueChange(event.target.value)}>
aria-label="example-select"
value={enumValue}
name={name}
onChange={(event) => changeCallback(event.target.value)}>
{Object.entries(enumDict).map(([key, val]) => (
<option key={key} value={key}>
{key} - {val}
{val}
</option>
))}
</Form.Select>
)}
</Col>
</Row>
</div>

View File

@@ -1,4 +1,4 @@
import React from 'react';
import React, { useContext } from 'react';
import { ButtonComponent } from './ButtonComponent';
import { NumberComponent } from './NumberComponent';
import { SliderComponent } from './SliderComponent';
@@ -8,7 +8,13 @@ import { AsyncMethodComponent } from './AsyncMethodComponent';
import { StringComponent } from './StringComponent';
import { ListComponent } from './ListComponent';
import { DataServiceComponent, DataServiceJSON } from './DataServiceComponent';
import { DeviceConnectionComponent } from './DeviceConnection';
import { ImageComponent } from './ImageComponent';
import { ColouredEnumComponent } from './ColouredEnumComponent';
import { LevelName } from './NotificationsComponent';
import { getIdFromFullAccessPath } from '../utils/stringUtils';
import { WebSettingsContext } from '../WebSettings';
import { setAttribute } from '../socket';
type AttributeType =
| 'str'
@@ -19,26 +25,28 @@ type AttributeType =
| 'list'
| 'method'
| 'DataService'
| 'DeviceConnection'
| 'Enum'
| 'NumberSlider'
| 'Image';
| 'Image'
| 'ColouredEnum';
type ValueType = boolean | string | number | object;
export interface Attribute {
type ValueType = boolean | string | number | Record<string, unknown>;
export type SerializedValue = {
type: AttributeType;
value?: ValueType | ValueType[];
readonly: boolean;
doc?: string | null;
parameters?: Record<string, string>;
async?: boolean;
frontend_render?: boolean;
enum?: Record<string, string>;
}
};
type GenericComponentProps = {
attribute: Attribute;
attribute: SerializedValue;
name: string;
parentPath: string;
isInstantUpdate: boolean;
addNotification: (string) => void;
addNotification: (message: string, levelname?: LevelName) => void;
};
export const GenericComponent = React.memo(
@@ -49,6 +57,29 @@ export const GenericComponent = React.memo(
isInstantUpdate,
addNotification
}: GenericComponentProps) => {
const fullAccessPath = [parentPath, name].filter((element) => element).join('.');
const id = getIdFromFullAccessPath(fullAccessPath);
const webSettings = useContext(WebSettingsContext);
let displayName = name;
if (webSettings[fullAccessPath]) {
if (webSettings[fullAccessPath].display === false) {
return null;
}
if (webSettings[fullAccessPath].displayName) {
displayName = webSettings[fullAccessPath].displayName;
}
}
function changeCallback(
value: unknown,
attributeName: string = name,
prefix: string = parentPath,
callback: (ack: unknown) => void = undefined
) {
setAttribute(attributeName, prefix, value, callback);
}
if (attribute.type === 'bool') {
return (
<ButtonComponent
@@ -58,6 +89,9 @@ export const GenericComponent = React.memo(
readOnly={attribute.readonly}
value={Boolean(attribute.value)}
addNotification={addNotification}
changeCallback={changeCallback}
displayName={displayName}
id={id}
/>
);
} else if (attribute.type === 'float' || attribute.type === 'int') {
@@ -71,6 +105,9 @@ export const GenericComponent = React.memo(
value={Number(attribute.value)}
isInstantUpdate={isInstantUpdate}
addNotification={addNotification}
changeCallback={changeCallback}
displayName={displayName}
id={id}
/>
);
} else if (attribute.type === 'Quantity') {
@@ -85,6 +122,9 @@ export const GenericComponent = React.memo(
unit={attribute.value['unit']}
isInstantUpdate={isInstantUpdate}
addNotification={addNotification}
changeCallback={changeCallback}
displayName={displayName}
id={id}
/>
);
} else if (attribute.type === 'NumberSlider') {
@@ -92,14 +132,17 @@ export const GenericComponent = React.memo(
<SliderComponent
name={name}
parentPath={parentPath}
docString={attribute.doc}
docString={attribute.value['value'].doc}
readOnly={attribute.readonly}
value={attribute.value['value']['value']}
min={attribute.value['min']['value']}
max={attribute.value['max']['value']}
stepSize={attribute.value['step_size']['value']}
value={attribute.value['value']}
min={attribute.value['min']}
max={attribute.value['max']}
stepSize={attribute.value['step_size']}
isInstantUpdate={isInstantUpdate}
addNotification={addNotification}
changeCallback={changeCallback}
displayName={displayName}
id={id}
/>
);
} else if (attribute.type === 'Enum') {
@@ -109,8 +152,12 @@ export const GenericComponent = React.memo(
parentPath={parentPath}
docString={attribute.doc}
value={String(attribute.value)}
readOnly={attribute.readonly}
enumDict={attribute.enum}
addNotification={addNotification}
changeCallback={changeCallback}
displayName={displayName}
id={id}
/>
);
} else if (attribute.type === 'method') {
@@ -120,8 +167,10 @@ export const GenericComponent = React.memo(
name={name}
parentPath={parentPath}
docString={attribute.doc}
parameters={attribute.parameters}
addNotification={addNotification}
displayName={displayName}
id={id}
render={attribute.frontend_render}
/>
);
} else {
@@ -130,9 +179,11 @@ export const GenericComponent = React.memo(
name={name}
parentPath={parentPath}
docString={attribute.doc}
parameters={attribute.parameters}
value={attribute.value as Record<string, string>}
addNotification={addNotification}
displayName={displayName}
id={id}
render={attribute.frontend_render}
/>
);
}
@@ -146,26 +197,45 @@ export const GenericComponent = React.memo(
parentPath={parentPath}
isInstantUpdate={isInstantUpdate}
addNotification={addNotification}
changeCallback={changeCallback}
displayName={displayName}
id={id}
/>
);
} else if (attribute.type === 'DataService') {
return (
<DataServiceComponent
name={name}
props={attribute.value as DataServiceJSON}
parentPath={parentPath.concat('.', name)}
parentPath={parentPath}
isInstantUpdate={isInstantUpdate}
addNotification={addNotification}
displayName={displayName}
id={id}
/>
);
} else if (attribute.type === 'DeviceConnection') {
return (
<DeviceConnectionComponent
name={name}
props={attribute.value as DataServiceJSON}
parentPath={parentPath}
isInstantUpdate={isInstantUpdate}
addNotification={addNotification}
displayName={displayName}
id={id}
/>
);
} else if (attribute.type === 'list') {
return (
<ListComponent
name={name}
value={attribute.value as Attribute[]}
value={attribute.value as SerializedValue[]}
docString={attribute.doc}
parentPath={parentPath}
isInstantUpdate={isInstantUpdate}
addNotification={addNotification}
id={id}
/>
);
} else if (attribute.type === 'Image') {
@@ -173,12 +243,28 @@ export const GenericComponent = React.memo(
<ImageComponent
name={name}
parentPath={parentPath}
value={attribute.value['value']['value'] as string}
readOnly={attribute.readonly}
docString={attribute.doc}
// Add any other specific props for the ImageComponent here
format={attribute.value['format']['value'] as string}
docString={attribute.value['value'].doc}
displayName={displayName}
id={id}
addNotification={addNotification}
// Add any other specific props for the ImageComponent here
value={attribute.value['value']['value'] as string}
format={attribute.value['format']['value'] as string}
/>
);
} else if (attribute.type === 'ColouredEnum') {
return (
<ColouredEnumComponent
name={name}
parentPath={parentPath}
docString={attribute.doc}
value={String(attribute.value)}
readOnly={attribute.readonly}
enumDict={attribute.enum}
addNotification={addNotification}
changeCallback={changeCallback}
displayName={displayName}
id={id}
/>
);
} else {

View File

@@ -2,47 +2,52 @@ import React, { useEffect, useRef, useState } from 'react';
import { Card, Collapse, Image } from 'react-bootstrap';
import { DocStringComponent } from './DocStringComponent';
import { ChevronDown, ChevronRight } from 'react-bootstrap-icons';
import { LevelName } from './NotificationsComponent';
interface ImageComponentProps {
type ImageComponentProps = {
name: string;
parentPath: string;
value: string;
readOnly: boolean;
docString: string;
format: string;
addNotification: (string) => void;
}
addNotification: (message: string, levelname?: LevelName) => void;
displayName: string;
id: string;
};
export const ImageComponent = React.memo((props: ImageComponentProps) => {
const { name, parentPath, value, docString, format, addNotification } = props;
const { value, docString, format, addNotification, displayName, id } = props;
const renderCount = useRef(0);
const [open, setOpen] = useState(true);
const fullAccessPath = [props.parentPath, props.name]
.filter((element) => element)
.join('.');
useEffect(() => {
renderCount.current++;
});
useEffect(() => {
addNotification(`${parentPath}.${name} changed.`);
addNotification(`${fullAccessPath} changed.`);
}, [props.value]);
return (
<div className={'imageComponent'} id={parentPath.concat('.' + name)}>
<div className="component imageComponent" id={id}>
<Card>
<Card.Header
onClick={() => setOpen(!open)}
style={{ cursor: 'pointer' }} // Change cursor style on hover
>
{name} {open ? <ChevronDown /> : <ChevronRight />}
{displayName}
<DocStringComponent docString={docString} />
{open ? <ChevronDown /> : <ChevronRight />}
</Card.Header>
<Collapse in={open}>
<Card.Body>
{process.env.NODE_ENV === 'development' && (
<p>Render count: {renderCount.current}</p>
)}
<DocStringComponent docString={docString} />
{/* Your component JSX here */}
{format === '' && value === '' ? (
<p>No image set in the backend.</p>
) : (

View File

@@ -1,18 +1,20 @@
import React, { useEffect, useRef } from 'react';
import { DocStringComponent } from './DocStringComponent';
import { Attribute, GenericComponent } from './GenericComponent';
import { SerializedValue, GenericComponent } from './GenericComponent';
import { LevelName } from './NotificationsComponent';
interface ListComponentProps {
type ListComponentProps = {
name: string;
parentPath?: string;
value: Attribute[];
value: SerializedValue[];
docString: string;
isInstantUpdate: boolean;
addNotification: (string) => void;
}
addNotification: (message: string, levelname?: LevelName) => void;
id: string;
};
export const ListComponent = React.memo((props: ListComponentProps) => {
const { name, parentPath, value, docString, isInstantUpdate, addNotification } =
const { name, parentPath, value, docString, isInstantUpdate, addNotification, id } =
props;
const renderCount = useRef(0);
@@ -22,9 +24,9 @@ export const ListComponent = React.memo((props: ListComponentProps) => {
}, [props]);
return (
<div className={'listComponent'} id={parentPath.concat(name)}>
<div className={'listComponent'} id={id}>
{process.env.NODE_ENV === 'development' && (
<p>Render count: {renderCount.current}</p>
<div>Render count: {renderCount.current}</div>
)}
<DocStringComponent docString={docString} />
{value.map((item, index) => {

View File

@@ -1,108 +1,59 @@
import React, { useState, useEffect, useRef } from 'react';
import { emit_update } from '../socket';
import { Button, InputGroup, Form, Collapse } from 'react-bootstrap';
import React, { useEffect, useRef } from 'react';
import { runMethod } from '../socket';
import { Button, Form } from 'react-bootstrap';
import { DocStringComponent } from './DocStringComponent';
import { LevelName } from './NotificationsComponent';
interface MethodProps {
type MethodProps = {
name: string;
parentPath: string;
parameters: Record<string, string>;
docString?: string;
hideOutput?: boolean;
addNotification: (string) => void;
}
addNotification: (message: string, levelname?: LevelName) => void;
displayName: string;
id: string;
render: boolean;
};
export const MethodComponent = React.memo((props: MethodProps) => {
const { name, parentPath, docString, addNotification } = props;
const { name, parentPath, docString, addNotification, displayName, id } = props;
// Conditional rendering based on the 'render' prop.
if (!props.render) {
return null;
}
const renderCount = useRef(0);
const [hideOutput, setHideOutput] = useState(false);
// Add a new state variable to hold the list of function calls
const [functionCalls, setFunctionCalls] = useState([]);
const formRef = useRef(null);
const fullAccessPath = [parentPath, name].filter((element) => element).join('.');
useEffect(() => {
renderCount.current++;
if (props.hideOutput !== undefined) {
setHideOutput(props.hideOutput);
}
});
const triggerNotification = () => {
const message = `Method ${fullAccessPath} was triggered.`;
const triggerNotification = (args: Record<string, string>) => {
const argsString = Object.entries(args)
.map(([key, value]) => `${key}: "${value}"`)
.join(', ');
let message = `Method ${parentPath}.${name} was triggered`;
if (argsString === '') {
message += '.';
} else {
message += ` with arguments {${argsString}}.`;
}
addNotification(message);
};
const execute = async (event: React.FormEvent) => {
event.preventDefault();
runMethod(name, parentPath, {});
const args = {};
Object.keys(props.parameters).forEach(
(name) => (args[name] = event.target[name].value)
);
emit_update(name, parentPath, { args: args }, (ack) => {
// Update the functionCalls state with the new call if we get an acknowledge msg
if (ack !== undefined) {
setFunctionCalls((prevCalls) => [...prevCalls, { name, args, result: ack }]);
}
});
triggerNotification(args);
triggerNotification();
};
const args = Object.entries(props.parameters).map(([name, type], index) => {
const form_name = `${name} (${type})`;
return (
<InputGroup key={index}>
<InputGroup.Text className="component-label">{form_name}</InputGroup.Text>
<Form.Control type="text" name={name} />
</InputGroup>
);
useEffect(() => {
renderCount.current++;
});
return (
<div
className="align-items-center methodComponent"
id={parentPath.concat('.' + name)}>
<div className="component methodComponent" id={id}>
{process.env.NODE_ENV === 'development' && (
<p>Render count: {renderCount.current}</p>
<div>Render count: {renderCount.current}</div>
)}
<h5 onClick={() => setHideOutput(!hideOutput)} style={{ cursor: 'pointer' }}>
Function: {name}
<Form onSubmit={execute} ref={formRef}>
<Button className="component" variant="primary" type="submit">
{`${displayName} `}
<DocStringComponent docString={docString} />
</h5>
<Form onSubmit={execute}>
{args}
<div>
<Button variant="primary" type="submit">
Execute
</Button>
</div>
</Form>
<Collapse in={!hideOutput}>
<div id="function-output">
{functionCalls.map((call, index) => (
<div key={index}>
<div style={{ color: 'grey', fontSize: 'small' }}>
{Object.entries(call.args)
.map(([key, val]) => `${key}=${JSON.stringify(val)}`)
.join(', ') +
' => ' +
JSON.stringify(call.result)}
</div>
</div>
))}
</div>
</Collapse>
</div>
);
});

View File

@@ -1,73 +1,71 @@
import React from 'react';
import { ToastContainer, Toast } from 'react-bootstrap';
export type LevelName = 'CRITICAL' | 'ERROR' | 'WARNING' | 'INFO' | 'DEBUG';
export type Notification = {
id: number;
time: string;
text: string;
timeStamp: string;
message: string;
levelname: LevelName;
};
type NotificationProps = {
showNotification: boolean;
notifications: Notification[];
exceptions: Notification[];
removeNotificationById: (id: number) => void;
removeExceptionById: (id: number) => void;
};
export const Notifications = React.memo((props: NotificationProps) => {
const {
showNotification,
notifications,
exceptions,
removeExceptionById,
removeNotificationById
} = props;
const { showNotification, notifications, removeNotificationById } = props;
return (
<ToastContainer className="navbarOffset toastContainer" position="top-end">
{notifications.map((notification) => {
// Determine if the toast should be shown
const shouldShow =
notification.levelname === 'ERROR' ||
notification.levelname === 'CRITICAL' ||
(showNotification &&
['WARNING', 'INFO', 'DEBUG'].includes(notification.levelname));
if (!shouldShow) {
return null;
}
return (
<ToastContainer
className="navbarOffset toastContainer"
position="top-end"
style={{ position: 'fixed' }}>
{showNotification &&
notifications.map((notification) => (
<Toast
className="notificationToast"
className={notification.levelname.toLowerCase() + 'Toast'}
key={notification.id}
onClose={() => removeNotificationById(notification.id)}
onClick={() => {
removeNotificationById(notification.id);
}}
onClick={() => removeNotificationById(notification.id)}
onMouseLeave={() => {
if (notification.levelname !== 'ERROR') {
removeNotificationById(notification.id);
}
}}
show={true}
autohide={true}
delay={2000}>
<Toast.Header closeButton={false} className="notificationToast text-right">
<strong className="me-auto">Notification</strong>
<small>{notification.time}</small>
autohide={
notification.levelname === 'WARNING' ||
notification.levelname === 'INFO' ||
notification.levelname === 'DEBUG'
}
delay={
notification.levelname === 'WARNING' ||
notification.levelname === 'INFO' ||
notification.levelname === 'DEBUG'
? 2000
: undefined
}>
<Toast.Header
closeButton={false}
className={notification.levelname.toLowerCase() + 'Toast text-right'}>
<strong className="me-auto">{notification.levelname}</strong>
<small>{notification.timeStamp}</small>
</Toast.Header>
<Toast.Body>{notification.text}</Toast.Body>
<Toast.Body>{notification.message}</Toast.Body>
</Toast>
))}
{exceptions.map((exception) => (
<Toast
className="exceptionToast"
key={exception.id}
onClose={() => removeExceptionById(exception.id)}
onClick={() => {
removeExceptionById(exception.id);
}}
show={true}
autohide={false}>
<Toast.Header closeButton className="exceptionToast text-right">
<strong className="me-auto">Exception</strong>
<small>{exception.time}</small>
</Toast.Header>
<Toast.Body>{exception.text}</Toast.Body>
</Toast>
))}
);
})}
</ToastContainer>
);
});

View File

@@ -1,12 +1,35 @@
import React, { useEffect, useRef, useState } from 'react';
import React, { useEffect, useState, useRef } from 'react';
import { Form, InputGroup } from 'react-bootstrap';
import { emit_update } from '../socket';
import { DocStringComponent } from './DocStringComponent';
import '../App.css';
import { LevelName } from './NotificationsComponent';
// TODO: add button functionality
interface NumberComponentProps {
export type QuantityObject = {
type: 'Quantity';
readonly: boolean;
value: {
magnitude: number;
unit: string;
};
doc?: string;
};
export type IntObject = {
type: 'int';
readonly: boolean;
value: number;
doc?: string;
};
export type FloatObject = {
type: 'float';
readonly: boolean;
value: number;
doc?: string;
};
export type NumberObject = IntObject | FloatObject | QuantityObject;
type NumberComponentProps = {
name: string;
type: 'float' | 'int';
parentPath?: string;
@@ -15,23 +38,24 @@ interface NumberComponentProps {
docString: string;
isInstantUpdate: boolean;
unit?: string;
showName?: boolean;
customEmitUpdate?: (
name: string,
parent_path: string,
value: number,
addNotification: (message: string, levelname?: LevelName) => void;
changeCallback?: (
value: unknown,
attributeName?: string,
prefix?: string,
callback?: (ack: unknown) => void
) => void;
addNotification: (string) => void;
}
displayName?: string;
id: string;
};
// TODO: highlight the digit that is being changed by setting both selectionStart and
// selectionEnd
const handleArrowKey = (
key: string,
value: string,
selectionStart: number,
selectionEnd: number
selectionStart: number
// selectionEnd: number
) => {
// Split the input value into the integer part and decimal part
const parts = value.split('.');
@@ -108,61 +132,6 @@ const handleDeleteKey = (
return { value, selectionStart };
};
export const NumberComponent = React.memo((props: NumberComponentProps) => {
const {
name,
parentPath,
readOnly,
docString,
isInstantUpdate,
unit,
addNotification
} = props;
// Whether to show the name infront of the component (false if used with a slider)
const showName = props.showName !== undefined ? props.showName : true;
// If emitUpdate is passed, use this instead of the emit_update from the socket
// Also used when used with a slider
const emitUpdate =
props.customEmitUpdate !== undefined ? props.customEmitUpdate : emit_update;
const renderCount = useRef(0);
// Create a state for the cursor position
const [cursorPosition, setCursorPosition] = useState(null);
// Create a state for the input string
const [inputString, setInputString] = useState(props.value.toString());
useEffect(() => {
renderCount.current++;
// Set the cursor position after the component re-renders
const inputElement = document.getElementsByName(
parentPath.concat(name)
)[0] as HTMLInputElement;
if (inputElement && cursorPosition !== null) {
inputElement.setSelectionRange(cursorPosition, cursorPosition);
}
});
useEffect(() => {
// Parse the input string to a number for comparison
const numericInputString =
props.type === 'int' ? parseInt(inputString) : parseFloat(inputString);
// Only update the inputString if it's different from the prop value
if (props.value !== numericInputString) {
setInputString(props.value.toString());
}
// emitting notification
let notificationMsg = `${parentPath}.${name} changed to ${props.value}`;
if (unit === undefined) {
notificationMsg += '.';
} else {
notificationMsg += ` ${unit}.`;
}
addNotification(notificationMsg);
}, [props.value]);
const handleNumericKey = (
key: string,
value: string,
@@ -170,9 +139,8 @@ export const NumberComponent = React.memo((props: NumberComponentProps) => {
selectionEnd: number
) => {
// Check if a number key or a decimal point key is pressed
if (key === '.' && (value.includes('.') || props.type === 'int')) {
if (key === '.' && value.includes('.')) {
// Check if value already contains a decimal. If so, ignore input.
// eslint-disable-next-line no-console
console.warn('Invalid input! Ignoring...');
return { value, selectionStart };
}
@@ -190,6 +158,31 @@ export const NumberComponent = React.memo((props: NumberComponentProps) => {
return { value: newValue, selectionStart: selectionStart + 1 };
};
export const NumberComponent = React.memo((props: NumberComponentProps) => {
const {
name,
value,
readOnly,
type,
docString,
isInstantUpdate,
unit,
addNotification,
changeCallback = () => {},
displayName,
id
} = props;
// Create a state for the cursor position
const [cursorPosition, setCursorPosition] = useState(null);
// Create a state for the input string
const [inputString, setInputString] = useState(value.toString());
const renderCount = useRef(0);
const fullAccessPath = [props.parentPath, props.name]
.filter((element) => element)
.join('.');
const handleKeyDown = (event) => {
const { key, target } = event;
if (
@@ -214,6 +207,16 @@ export const NumberComponent = React.memo((props: NumberComponentProps) => {
// Select everything when pressing Ctrl + a
target.setSelectionRange(0, target.value.length);
return;
} else if (key === '-') {
if (selectionStart === 0 && !value.startsWith('-')) {
newValue = '-' + value;
selectionStart++;
} else if (value.startsWith('-') && selectionStart === 1) {
newValue = value.substring(1); // remove minus sign
selectionStart--;
} else {
return; // Ignore "-" pressed in other positions
}
} else if (!isNaN(key) && key !== ' ') {
// Check if a number key or a decimal point key is pressed
({ value: newValue, selectionStart } = handleNumericKey(
@@ -222,7 +225,7 @@ export const NumberComponent = React.memo((props: NumberComponentProps) => {
selectionStart,
selectionEnd
));
} else if (key === '.') {
} else if (key === '.' && type === 'float') {
({ value: newValue, selectionStart } = handleNumericKey(
key,
value,
@@ -233,8 +236,8 @@ export const NumberComponent = React.memo((props: NumberComponentProps) => {
({ value: newValue, selectionStart } = handleArrowKey(
key,
value,
selectionStart,
selectionEnd
selectionStart
// selectionEnd
));
} else if (key === 'Backspace') {
({ value: newValue, selectionStart } = handleBackspaceKey(
@@ -249,7 +252,7 @@ export const NumberComponent = React.memo((props: NumberComponentProps) => {
selectionEnd
));
} else if (key === 'Enter' && !isInstantUpdate) {
emitUpdate(name, parentPath, Number(newValue));
changeCallback(Number(newValue));
return;
} else {
console.debug(key);
@@ -258,7 +261,7 @@ export const NumberComponent = React.memo((props: NumberComponentProps) => {
// Update the input value and maintain the cursor position
if (isInstantUpdate) {
emitUpdate(name, parentPath, Number(newValue));
changeCallback(Number(newValue));
}
setInputString(newValue);
@@ -270,24 +273,53 @@ export const NumberComponent = React.memo((props: NumberComponentProps) => {
const handleBlur = () => {
if (!isInstantUpdate) {
// If not in "instant update" mode, emit an update when the input field loses focus
emitUpdate(name, parentPath, Number(inputString));
changeCallback(Number(inputString));
}
};
useEffect(() => {
// Parse the input string to a number for comparison
const numericInputString =
type === 'int' ? parseInt(inputString) : parseFloat(inputString);
// Only update the inputString if it's different from the prop value
if (value !== numericInputString) {
setInputString(value.toString());
}
// emitting notification
let notificationMsg = `${fullAccessPath} changed to ${props.value}`;
if (unit === undefined) {
notificationMsg += '.';
} else {
notificationMsg += ` ${unit}.`;
}
addNotification(notificationMsg);
}, [value]);
useEffect(() => {
// Set the cursor position after the component re-renders
const inputElement = document.getElementsByName(name)[0] as HTMLInputElement;
if (inputElement && cursorPosition !== null) {
inputElement.setSelectionRange(cursorPosition, cursorPosition);
}
});
return (
<div className="numberComponent" id={parentPath.concat('.' + name)}>
{process.env.NODE_ENV === 'development' && showName && (
<p>Render count: {renderCount.current}</p>
<div className="component numberComponent" id={id}>
{process.env.NODE_ENV === 'development' && (
<div>Render count: {renderCount.current}</div>
)}
<DocStringComponent docString={docString} />
<div className="d-flex">
<InputGroup>
{showName && <InputGroup.Text>{name}</InputGroup.Text>}
{displayName && (
<InputGroup.Text>
{displayName}
<DocStringComponent docString={docString} />
</InputGroup.Text>
)}
<Form.Control
type="text"
value={inputString}
disabled={readOnly}
name={parentPath.concat(name)}
name={name}
onKeyDown={handleKeyDown}
onBlur={handleBlur}
className={isInstantUpdate && !readOnly ? 'instantUpdate' : ''}
@@ -295,6 +327,5 @@ export const NumberComponent = React.memo((props: NumberComponentProps) => {
{unit && <InputGroup.Text>{unit}</InputGroup.Text>}
</InputGroup>
</div>
</div>
);
});

View File

@@ -1,31 +1,34 @@
import React, { useEffect, useRef, useState } from 'react';
import { InputGroup, Form, Row, Col, Collapse, ToggleButton } from 'react-bootstrap';
import { emit_update } from '../socket';
import { DocStringComponent } from './DocStringComponent';
import { Slider } from '@mui/material';
import { NumberComponent } from './NumberComponent';
import { NumberComponent, NumberObject } from './NumberComponent';
import { LevelName } from './NotificationsComponent';
interface SliderComponentProps {
type SliderComponentProps = {
name: string;
min: number;
max: number;
min: NumberObject;
max: NumberObject;
parentPath?: string;
value: number;
value: NumberObject;
readOnly: boolean;
docString: string;
stepSize: number;
stepSize: NumberObject;
isInstantUpdate: boolean;
addNotification: (string) => void;
}
addNotification: (message: string, levelname?: LevelName) => void;
changeCallback?: (
value: unknown,
attributeName?: string,
prefix?: string,
callback?: (ack: unknown) => void
) => void;
displayName: string;
id: string;
};
export const SliderComponent = React.memo((props: SliderComponentProps) => {
const renderCount = useRef(0);
const [open, setOpen] = useState(false);
useEffect(() => {
renderCount.current++;
});
const {
name,
parentPath,
@@ -33,99 +36,97 @@ export const SliderComponent = React.memo((props: SliderComponentProps) => {
min,
max,
stepSize,
readOnly,
docString,
isInstantUpdate,
addNotification
addNotification,
changeCallback = () => {},
displayName,
id
} = props;
const fullAccessPath = [parentPath, name].filter((element) => element).join('.');
useEffect(() => {
addNotification(`${parentPath}.${name} changed to ${value}.`);
renderCount.current++;
});
useEffect(() => {
addNotification(`${fullAccessPath} changed to ${value.value}.`);
}, [props.value]);
useEffect(() => {
addNotification(`${parentPath}.${name}.min changed to ${min}.`);
addNotification(`${fullAccessPath}.min changed to ${min.value}.`);
}, [props.min]);
useEffect(() => {
addNotification(`${parentPath}.${name}.max changed to ${max}.`);
addNotification(`${fullAccessPath}.max changed to ${max.value}.`);
}, [props.max]);
useEffect(() => {
addNotification(`${parentPath}.${name}.stepSize changed to ${stepSize}.`);
addNotification(`${fullAccessPath}.stepSize changed to ${stepSize.value}.`);
}, [props.stepSize]);
const emitSliderUpdate = (
name: string,
parentPath: string,
value: number,
callback?: (ack: unknown) => void,
min: number = props.min,
max: number = props.max,
stepSize: number = props.stepSize
) => {
emit_update(
name,
parentPath,
{
value: value,
min: min,
max: max,
step_size: stepSize
},
callback
);
};
const handleOnChange = (event, newNumber: number | number[]) => {
// This will never be the case as we do not have a range slider. However, we should
// make sure this is properly handled.
if (Array.isArray(newNumber)) {
newNumber = newNumber[0];
}
emitSliderUpdate(name, parentPath, newNumber);
changeCallback(newNumber, `${name}.value`);
};
const handleValueChange = (newValue: number, valueType: string) => {
switch (valueType) {
case 'min':
emitSliderUpdate(name, parentPath, value, undefined, newValue);
break;
case 'max':
emitSliderUpdate(name, parentPath, value, undefined, min, newValue);
break;
case 'stepSize':
emitSliderUpdate(name, parentPath, value, undefined, min, max, newValue);
break;
default:
break;
}
changeCallback(newValue, `${name}.${valueType}`);
};
const deconstructNumberDict = (
numberDict: NumberObject
): [number, boolean, string | null] => {
let numberMagnitude: number;
let numberUnit: string | null = null;
const numberReadOnly = numberDict.readonly;
if (numberDict.type === 'int' || numberDict.type === 'float') {
numberMagnitude = numberDict.value;
} else if (numberDict.type === 'Quantity') {
numberMagnitude = numberDict.value.magnitude;
numberUnit = numberDict.value.unit;
}
return [numberMagnitude, numberReadOnly, numberUnit];
};
const [valueMagnitude, valueReadOnly, valueUnit] = deconstructNumberDict(value);
const [minMagnitude, minReadOnly] = deconstructNumberDict(min);
const [maxMagnitude, maxReadOnly] = deconstructNumberDict(max);
const [stepSizeMagnitude, stepSizeReadOnly] = deconstructNumberDict(stepSize);
return (
<div className="sliderComponent" id={parentPath.concat('.' + name)}>
<div className="component sliderComponent" id={id}>
{process.env.NODE_ENV === 'development' && (
<p>Render count: {renderCount.current}</p>
<div>Render count: {renderCount.current}</div>
)}
<DocStringComponent docString={docString} />
<Row>
<Col xs="auto" xl="auto">
<InputGroup.Text>{name}</InputGroup.Text>
<InputGroup.Text>
{displayName}
<DocStringComponent docString={docString} />
</InputGroup.Text>
</Col>
<Col xs="5" xl>
<Slider
style={{ margin: '0px 0px 10px 0px' }}
aria-label="Always visible"
// valueLabelDisplay="on"
disabled={readOnly}
value={value}
disabled={valueReadOnly}
value={valueMagnitude}
onChange={(event, newNumber) => handleOnChange(event, newNumber)}
min={min}
max={max}
step={stepSize}
min={minMagnitude}
max={maxMagnitude}
step={stepSizeMagnitude}
marks={[
{ value: min, label: `${min}` },
{ value: max, label: `${max}` }
{ value: minMagnitude, label: `${minMagnitude}` },
{ value: maxMagnitude, label: `${maxMagnitude}` }
]}
/>
</Col>
@@ -133,18 +134,20 @@ export const SliderComponent = React.memo((props: SliderComponentProps) => {
<NumberComponent
isInstantUpdate={isInstantUpdate}
parentPath={parentPath}
name={name}
name={`${name}.value`}
docString=""
readOnly={readOnly}
readOnly={valueReadOnly}
type="float"
value={value}
showName={false}
customEmitUpdate={emitSliderUpdate}
addNotification={() => null}
value={valueMagnitude}
unit={valueUnit}
addNotification={() => {}}
changeCallback={(value) => changeCallback(value, name + '.value')}
id={id + '-value'}
/>
</Col>
<Col xs="auto">
<ToggleButton
id={`button-${id}`}
onClick={() => setOpen(!open)}
type="checkbox"
checked={open}
@@ -174,7 +177,8 @@ export const SliderComponent = React.memo((props: SliderComponentProps) => {
<Form.Label>Min Value</Form.Label>
<Form.Control
type="number"
value={min}
value={minMagnitude}
disabled={minReadOnly}
onChange={(e) => handleValueChange(Number(e.target.value), 'min')}
/>
</Col>
@@ -183,7 +187,8 @@ export const SliderComponent = React.memo((props: SliderComponentProps) => {
<Form.Label>Max Value</Form.Label>
<Form.Control
type="number"
value={max}
value={maxMagnitude}
disabled={maxReadOnly}
onChange={(e) => handleValueChange(Number(e.target.value), 'max')}
/>
</Col>
@@ -192,8 +197,9 @@ export const SliderComponent = React.memo((props: SliderComponentProps) => {
<Form.Label>Step Size</Form.Label>
<Form.Control
type="number"
value={stepSize}
onChange={(e) => handleValueChange(Number(e.target.value), 'stepSize')}
value={stepSizeMagnitude}
disabled={stepSizeReadOnly}
onChange={(e) => handleValueChange(Number(e.target.value), 'step_size')}
/>
</Col>
</Row>

View File

@@ -1,27 +1,46 @@
import React, { useEffect, useRef, useState } from 'react';
import { Form, InputGroup } from 'react-bootstrap';
import { emit_update } from '../socket';
import { DocStringComponent } from './DocStringComponent';
import '../App.css';
import { LevelName } from './NotificationsComponent';
// TODO: add button functionality
interface StringComponentProps {
type StringComponentProps = {
name: string;
parentPath?: string;
value: string;
readOnly: boolean;
docString: string;
isInstantUpdate: boolean;
addNotification: (string) => void;
}
addNotification: (message: string, levelname?: LevelName) => void;
changeCallback?: (
value: unknown,
attributeName?: string,
prefix?: string,
callback?: (ack: unknown) => void
) => void;
displayName: string;
id: string;
};
export const StringComponent = React.memo((props: StringComponentProps) => {
const { name, parentPath, readOnly, docString, isInstantUpdate, addNotification } =
props;
const {
name,
readOnly,
docString,
isInstantUpdate,
addNotification,
changeCallback = () => {},
displayName,
id
} = props;
const renderCount = useRef(0);
const [inputString, setInputString] = useState(props.value);
const fullAccessPath = [props.parentPath, props.name]
.filter((element) => element)
.join('.');
useEffect(() => {
renderCount.current++;
@@ -32,41 +51,44 @@ export const StringComponent = React.memo((props: StringComponentProps) => {
if (props.value !== inputString) {
setInputString(props.value);
}
addNotification(`${parentPath}.${name} changed to ${props.value}.`);
addNotification(`${fullAccessPath} changed to ${props.value}.`);
}, [props.value]);
const handleChange = (event) => {
setInputString(event.target.value);
if (isInstantUpdate) {
emit_update(name, parentPath, event.target.value);
changeCallback(event.target.value);
}
};
const handleKeyDown = (event) => {
if (event.key === 'Enter' && !isInstantUpdate) {
emit_update(name, parentPath, inputString);
changeCallback(inputString);
event.preventDefault();
}
};
const handleBlur = () => {
if (!isInstantUpdate) {
emit_update(name, parentPath, inputString);
changeCallback(inputString);
}
};
return (
<div className={'stringComponent'} id={parentPath.concat(name)}>
<div className="component stringComponent" id={id}>
{process.env.NODE_ENV === 'development' && (
<p>Render count: {renderCount.current}</p>
<div>Render count: {renderCount.current}</div>
)}
<DocStringComponent docString={docString} />
<InputGroup>
<InputGroup.Text>{name}</InputGroup.Text>
<InputGroup.Text>
{displayName}
<DocStringComponent docString={docString} />
</InputGroup.Text>
<Form.Control
type="text"
name={name}
value={inputString}
disabled={readOnly}
name={name}
onChange={handleChange}
onKeyDown={handleKeyDown}
onBlur={handleBlur}

View File

@@ -9,15 +9,28 @@ console.debug('Websocket: ', URL);
export const socket = io(URL, { path: '/ws/socket.io', transports: ['websocket'] });
export const emit_update = (
export const setAttribute = (
name: string,
parentPath: string,
value: unknown,
callback?: (ack: unknown) => void
) => {
if (callback) {
socket.emit('frontend_update', { name, parent_path: parentPath, value }, callback);
socket.emit('set_attribute', { name, parent_path: parentPath, value }, callback);
} else {
socket.emit('frontend_update', { name, parent_path: parentPath, value });
socket.emit('set_attribute', { name, parent_path: parentPath, value });
}
};
export const runMethod = (
name: string,
parentPath: string,
kwargs: Record<string, unknown>,
callback?: (ack: unknown) => void
) => {
if (callback) {
socket.emit('run_method', { name, parent_path: parentPath, kwargs }, callback);
} else {
socket.emit('run_method', { name, parent_path: parentPath, kwargs });
}
};

View File

@@ -0,0 +1,107 @@
import { SerializedValue } from '../components/GenericComponent';
export type State = {
type: string;
value: Record<string, SerializedValue> | null;
readonly: boolean;
doc: string | null;
};
export function setNestedValueByPath(
serializationDict: Record<string, SerializedValue>,
path: string,
serializedValue: SerializedValue
): Record<string, SerializedValue> {
const parentPathParts = path.split('.').slice(0, -1);
const attrName = path.split('.').pop();
if (!attrName) {
throw new Error('Invalid path');
}
let currentSerializedValue: SerializedValue;
const newSerializationDict: Record<string, SerializedValue> = JSON.parse(
JSON.stringify(serializationDict)
);
let currentDict = newSerializationDict;
try {
for (const pathPart of parentPathParts) {
currentSerializedValue = getNextLevelDictByKey(currentDict, pathPart, false);
// @ts-expect-error The value will be of type SerializedValue as we are still
// looping through the parent parts
currentDict = currentSerializedValue['value'];
}
currentSerializedValue = getNextLevelDictByKey(currentDict, attrName, true);
Object.assign(currentSerializedValue, serializedValue);
return newSerializationDict;
} catch (error) {
console.error(error);
return currentDict;
}
}
function getNextLevelDictByKey(
serializationDict: Record<string, SerializedValue>,
attrName: string,
allowAppend: boolean = false
): SerializedValue {
const [key, index] = parseListAttrAndIndex(attrName);
let currentDict: SerializedValue;
try {
if (index !== null) {
if (!serializationDict[key] || !Array.isArray(serializationDict[key]['value'])) {
throw new Error(`Expected an array at '${key}', but found something else.`);
}
if (index < serializationDict[key]['value'].length) {
currentDict = serializationDict[key]['value'][index];
} else if (allowAppend && index === serializationDict[key]['value'].length) {
// Appending to list
// @ts-expect-error When the index is not null, I expect an array
serializationDict[key]['value'].push({});
currentDict = serializationDict[key]['value'][index];
} else {
throw new Error(`Index out of range for '${key}[${index}]'.`);
}
} else {
if (!serializationDict[key]) {
throw new Error(`Key '${key}' not found.`);
}
currentDict = serializationDict[key];
}
} catch (error) {
throw new Error(`Error occurred trying to access '${attrName}': ${error}`);
}
if (typeof currentDict !== 'object' || currentDict === null) {
throw new Error(
`Expected a dictionary at '${attrName}', but found type '${typeof currentDict}' instead.`
);
}
return currentDict;
}
function parseListAttrAndIndex(attrString: string): [string, number | null] {
let index: number | null = null;
let attrName = attrString;
if (attrString.includes('[') && attrString.endsWith(']')) {
const parts = attrString.split('[');
attrName = parts[0];
const indexPart = parts[1].slice(0, -1); // Removes the closing ']'
if (!isNaN(parseInt(indexPart))) {
index = parseInt(indexPart);
} else {
console.error(`Invalid index format in key: ${attrString}`);
}
}
return [attrName, index];
}

View File

@@ -0,0 +1,16 @@
export function getIdFromFullAccessPath(fullAccessPath: string) {
if (fullAccessPath) {
// Replace '].' with a single dash
let id = fullAccessPath.replace(/\]\./g, '-');
// Replace any character that is not a word character or underscore with a dash
id = id.replace(/[^\w_]+/g, '-');
// Remove any trailing dashes
id = id.replace(/-+$/, '');
return id;
} else {
return 'main';
}
}

View File

@@ -4,10 +4,13 @@ edit_uri: blob/docs/docs/
nav:
- Home: index.md
- Getting Started: getting-started.md
- User Guide:
- Components Guide: user-guide/Components.md
- Developer Guide:
- Developer Guide: dev-guide/README.md
- API Reference: dev-guide/api.md
- Adding Components: dev-guide/Adding_Components.md
- Observer Pattern Implementation: dev-guide/Observer_Pattern_Implementation.md # <-- New section
- About:
- Release Notes: about/release-notes.md
- Contributing: about/contributing.md
@@ -22,7 +25,6 @@ markdown_extensions:
- smarty
- toc:
permalink: true
baselevel: 4
- pymdownx.highlight:
anchor_linenums: true
- pymdownx.snippets
@@ -38,5 +40,3 @@ plugins:
watch:
- src/pydase

1887
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,2 +0,0 @@
[virtualenvs]
in-project = true

View File

@@ -1,6 +1,6 @@
[tool.poetry]
name = "pydase"
version = "0.1.2"
version = "0.7.4"
description = "A flexible and robust Python library for creating, managing, and interacting with data services, with built-in support for web and RPC servers, and customizable features for diverse use cases."
authors = ["Mose Mueller <mosmuell@ethz.ch>"]
readme = "README.md"
@@ -10,33 +10,30 @@ packages = [{ include = "pydase", from = "src" }]
[tool.poetry.dependencies]
python = "^3.10"
rpyc = "^5.3.1"
loguru = "^0.7.0"
fastapi = "^0.100.0"
uvicorn = "^0.22.0"
fastapi = "^0.108.0"
uvicorn = "^0.27.0"
toml = "^0.10.2"
python-socketio = "^5.8.0"
websockets = "^11.0.3"
confz = "^2.0.0"
pint = "^0.22"
pillow = "^10.0.0"
[tool.poetry.group.dev]
optional = true
[tool.poetry.group.dev.dependencies]
types-toml = "^0.10.8.6"
pytest = "^7.4.0"
pytest-cov = "^4.1.0"
mypy = "^1.4.1"
black = "^23.1.0"
isort = "^5.12.0"
flake8 = "^5.0.4"
flake8-use-fstring = "^1.4"
flake8-functions = "^0.0.7"
flake8-comprehensions = "^3.11.1"
flake8-pep585 = "^0.1.7"
flake8-pep604 = "^0.1.0"
flake8-eradicate = "^1.4.0"
matplotlib = "^3.7.2"
pyright = "^1.1.323"
pytest-mock = "^3.11.1"
ruff = "^0.2.0"
pytest-asyncio = "^0.23.2"
[tool.poetry.group.docs]
optional = true
[tool.poetry.group.docs.dependencies]
mkdocs = "^1.5.2"
@@ -48,38 +45,60 @@ pymdown-extensions = "^10.1"
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
[tool.ruff]
target-version = "py310" # Always generate Python 3.10-compatible code
extend-exclude = [
"docs", "frontend"
]
[tool.ruff.lint]
select = [
"ASYNC", # flake8-async
"C4", # flake8-comprehensions
"C901", # mccabe complex-structure
"E", # pycodestyle errors
"ERA", # eradicate
"F", # pyflakes
"FLY", # flynt
"G", # flake8-logging-format
"I", # isort
"ICN", # flake8-import-conventions
"INP", # flake8-no-pep420
"ISC", # flake8-implicit-str-concat
"N", # pep8-naming
"NPY", # NumPy-specific rules
"PERF", # perflint
"PIE", # flake8-pie
"PL", # pylint
"PYI", # flake8-pyi
"Q", # flake8-quotes
"RET", # flake8-return
"RUF", # Ruff-specific rules
"SIM", # flake8-simplify
"TID", # flake8-tidy-imports
"TCH", # flake8-type-checking
"UP", # pyupgrade
"YTT", # flake8-2020
"W", # pycodestyle warnings
]
ignore = [
"RUF006", # asyncio-dangling-task
"PERF203", # try-except-in-loop
]
[tool.ruff.lint.mccabe]
max-complexity = 7
[tool.pyright]
include = ["src/pydase", "tests"]
exclude = ["**/node_modules", "**/__pycache__", "docs", "frontend"]
venvPath = "."
venv = ".venv"
include = ["src/pydase"]
typeCheckingMode = "basic"
reportUnknownMemberType = true
[tool.black]
line-length = 88
exclude = '''
/(
\.git
| \.mypy_cache
| \.tox
| venv
| \.venv
| _build
| buck-out
| build
| dist
)/
'''
[tool.isort]
profile = "black"
[tool.mypy]
mypy_path = "src/"
show_error_codes = true
disallow_untyped_defs = true
disallow_untyped_calls = true
disallow_incomplete_defs = true
disallow_any_generics = true
check_untyped_defs = true
ignore_missing_imports = false

View File

@@ -27,10 +27,14 @@ print(my_service.voltage.value) # Output: 5
```
"""
from pydase.components.coloured_enum import ColouredEnum
from pydase.components.device_connection import DeviceConnection
from pydase.components.image import Image
from pydase.components.number_slider import NumberSlider
__all__ = [
"NumberSlider",
"Image",
"ColouredEnum",
"DeviceConnection",
]

View File

@@ -0,0 +1,59 @@
from enum import Enum
class ColouredEnum(Enum):
"""
Represents a UI element that can display colour-coded text based on its value.
This class extends the standard Enum but requires its values to be valid CSS
colour codes. Supported colour formats include:
- Hexadecimal colours
- Hexadecimal colours with transparency
- RGB colours
- RGBA colours
- HSL colours
- HSLA colours
- Predefined/Cross-browser colour names
Refer to the this website for more details on colour formats:
(https://www.w3schools.com/cssref/css_colours_legal.php)
The behavior of this component in the UI depends on how it's defined in the data
service:
- As property with a setter or as attribute: Renders as a dropdown menu,
allowing users to select and change its value from the frontend.
- As property without a setter: Displays as a coloured box with the key of the
`ColouredEnum` as text inside, serving as a visual indicator without user
interaction.
Example:
--------
```python
import pydase.components as pyc
import pydase
class MyStatus(pyc.ColouredEnum):
PENDING = "#FFA500" # Orange
RUNNING = "#0000FF80" # Transparent Blue
PAUSED = "rgb(169, 169, 169)" # Dark Gray
RETRYING = "rgba(255, 255, 0, 0.3)" # Transparent Yellow
COMPLETED = "hsl(120, 100%, 50%)" # Green
FAILED = "hsla(0, 100%, 50%, 0.7)" # Transparent Red
CANCELLED = "SlateGray" # Slate Gray
class StatusExample(pydase.DataService):
_status = MyStatus.RUNNING
@property
def status(self) -> MyStatus:
return self._status
@status.setter
def status(self, value: MyStatus) -> None:
# Custom logic here...
self._status = value
# Example usage:
my_service = StatusExample()
my_service.status = MyStatus.FAILED
```
"""

View File

@@ -0,0 +1,77 @@
import asyncio
import pydase
class DeviceConnection(pydase.DataService):
"""
Base class for device connection management within the pydase framework.
This class serves as the foundation for subclasses that manage connections to
specific devices. It implements automatic reconnection logic that periodically
checks the device's availability and attempts to reconnect if the connection is
lost. The frequency of these checks is controlled by the `_reconnection_wait_time`
attribute.
Subclassing
-----------
Users should primarily override the `connect` method to establish a connection
to the device. This method should update the `self._connected` attribute to reflect
the connection status:
>>> class MyDeviceConnection(DeviceConnection):
... def connect(self) -> None:
... # Implementation to connect to the device
... # Update self._connected to `True` if connection is successful,
... # `False` otherwise
... ...
Optionally, if additional logic is needed to determine the connection status,
the `connected` property can also be overridden:
>>> class MyDeviceConnection(DeviceConnection):
... @property
... def connected(self) -> bool:
... # Custom logic to determine connection status
... return some_custom_condition
...
Frontend Representation
-----------------------
In the frontend, this class is represented without directly exposing the `connect`
method and `connected` attribute. Instead, user-defined attributes, methods, and
properties are displayed. When `self.connected` is `False`, the frontend component
shows an overlay that allows manual triggering of the `connect()` method. This
overlay disappears once the connection is successfully re-established.
"""
def __init__(self) -> None:
super().__init__()
self._connected = False
self._autostart_tasks["_handle_connection"] = () # type: ignore
self._reconnection_wait_time = 10.0
def connect(self) -> None:
"""Tries connecting to the device and changes `self._connected` status
accordingly. This method is called every `self._reconnection_wait_time` seconds
when `self.connected` is False. Users should override this method to implement
device-specific connection logic.
"""
@property
def connected(self) -> bool:
"""Indicates if the device is currently connected or was recently connected.
Users may override this property to incorporate custom logic for determining
the connection status.
"""
return self._connected
async def _handle_connection(self) -> None:
"""Automatically tries reconnecting to the device if it is not connected.
This method leverages the `connect` method and the `connected` property to
manage the connection status.
"""
while True:
if not self.connected:
self.connect()
await asyncio.sleep(self._reconnection_wait_time)

View File

@@ -1,25 +1,27 @@
import base64
import io
import logging
from pathlib import Path
from typing import TYPE_CHECKING, Optional
from typing import TYPE_CHECKING
from urllib.request import urlopen
import PIL.Image
from loguru import logger
import PIL.Image # type: ignore[import-untyped]
from pydase.data_service.data_service import DataService
if TYPE_CHECKING:
from matplotlib.figure import Figure
logger = logging.getLogger(__name__)
class Image(DataService):
def __init__(
self,
) -> None:
super().__init__()
self._value: str = ""
self._format: str = ""
super().__init__()
@property
def value(self) -> str:
@@ -31,19 +33,19 @@ class Image(DataService):
def load_from_path(self, path: Path | str) -> None:
with PIL.Image.open(path) as image:
self._load_from_PIL(image)
self._load_from_pil(image)
def load_from_matplotlib_figure(self, fig: "Figure", format_: str = "png") -> None:
buffer = io.BytesIO()
fig.savefig(buffer, format=format_) # type: ignore
fig.savefig(buffer, format=format_)
value_ = base64.b64encode(buffer.getvalue())
self._load_from_base64(value_, format_)
def load_from_url(self, url: str) -> None:
image = PIL.Image.open(urlopen(url))
self._load_from_PIL(image)
self._load_from_pil(image)
def load_from_base64(self, value_: bytes, format_: Optional[str] = None) -> None:
def load_from_base64(self, value_: bytes, format_: str | None = None) -> None:
if format_ is None:
format_ = self._get_image_format_from_bytes(value_)
if format_ is None:
@@ -54,11 +56,11 @@ class Image(DataService):
self._load_from_base64(value_, format_)
def _load_from_base64(self, value_: bytes, format_: str) -> None:
value = value_.decode("utf-8") if isinstance(value_, bytes) else value_
value = value_.decode("utf-8")
self._value = value
self._format = format_
def _load_from_PIL(self, image: PIL.Image.Image) -> None:
def _load_from_pil(self, image: PIL.Image.Image) -> None:
if image.format is not None:
format_ = image.format
buffer = io.BytesIO()

View File

@@ -1,9 +1,10 @@
from typing import Any, Literal
from loguru import logger
import logging
from typing import Any
from pydase.data_service.data_service import DataService
logger = logging.getLogger(__name__)
class NumberSlider(DataService):
"""
@@ -12,23 +13,68 @@ class NumberSlider(DataService):
Parameters:
-----------
value (float | int, optional):
value (float, optional):
The initial value of the slider. Defaults to 0.
min (float, optional):
The minimum value of the slider. Defaults to 0.
max (float, optional):
The maximum value of the slider. Defaults to 100.
step_size (float | int, optional):
step_size (float, optional):
The increment/decrement step size of the slider. Defaults to 1.0.
type (Literal["int"] | Literal["float"], optional):
The type of the slider value. Determines if the value is an integer or float.
Defaults to "float".
Example:
--------
```python
class MyService(DataService):
voltage = NumberSlider(1, 0, 10, 0.1, "int")
class MySlider(pydase.components.NumberSlider):
def __init__(
self,
value: float = 0.0,
min_: float = 0.0,
max_: float = 100.0,
step_size: float = 1.0,
) -> None:
super().__init__(value, min_, max_, step_size)
@property
def min(self) -> float:
return self._min
@min.setter
def min(self, value: float) -> None:
self._min = value
@property
def max(self) -> float:
return self._max
@max.setter
def max(self, value: float) -> None:
self._max = value
@property
def step_size(self) -> float:
return self._step_size
@step_size.setter
def step_size(self, value: float) -> None:
self._step_size = value
@property
def value(self) -> float:
return self._value
@value.setter
def value(self, value: float) -> None:
if value < self._min or value > self._max:
raise ValueError(
"Value is either below allowed min or above max value."
)
self._value = value
class MyService(pydase.DataService):
def __init__(self) -> None:
self.voltage = MyService()
# Modifying or accessing the voltage value:
my_service = MyService()
@@ -39,28 +85,37 @@ class NumberSlider(DataService):
def __init__(
self,
value: float | int = 0,
min: float = 0.0,
max: float = 100.0,
step_size: float | int = 1.0,
type: Literal["int"] | Literal["float"] = "float",
value: Any = 0.0,
min_: float = 0.0,
max_: float = 100.0,
step_size: float = 1.0,
) -> None:
if type not in {"float", "int"}:
logger.error(f"Unknown type '{type}'. Using 'float'.")
type = "float"
self._type = type
self.step_size = step_size
self.value = value
self.min = min
self.max = max
super().__init__()
self._step_size = step_size
self._value = value
self._min = min_
self._max = max_
def __setattr__(self, name: str, value: Any) -> None:
if name in ["value", "step_size"]:
value = int(value) if self._type == "int" else float(value)
elif not name.startswith("_"):
value = float(value)
@property
def min(self) -> float:
"""The min property."""
return self._min
return super().__setattr__(name, value)
@property
def max(self) -> float:
"""The min property."""
return self._max
@property
def step_size(self) -> float:
"""The min property."""
return self._step_size
@property
def value(self) -> Any:
"""The value property."""
return self._value
@value.setter
def value(self, value: Any) -> None:
self._value = value

View File

@@ -1,9 +1,24 @@
from pathlib import Path
from typing import Literal
from confz import BaseConfig, EnvSource
class OperationMode(BaseConfig): # type: ignore
environment: Literal["development"] | Literal["production"] = "development"
class OperationMode(BaseConfig): # type: ignore[misc]
environment: Literal["development", "production"] = "development"
CONFIG_SOURCES = EnvSource(allow=["ENVIRONMENT"])
class ServiceConfig(BaseConfig): # type: ignore[misc]
config_dir: Path = Path("config")
web_port: int = 8001
rpc_port: int = 18871
CONFIG_SOURCES = EnvSource(allow_all=True, prefix="SERVICE_", file=".env")
class WebServerConfig(BaseConfig): # type: ignore[misc]
generate_web_settings: bool = False
CONFIG_SOURCES = EnvSource(allow=["GENERATE_WEB_SETTINGS"])

View File

@@ -1,16 +1,15 @@
from __future__ import annotations
from abc import ABC
from typing import TYPE_CHECKING, Any
from pydase.observer_pattern.observable.observable import Observable
if TYPE_CHECKING:
from .callback_manager import CallbackManager
from .data_service import DataService
from .task_manager import TaskManager
from pydase.data_service.data_service import DataService
from pydase.data_service.task_manager import TaskManager
class AbstractDataService(ABC):
class AbstractDataService(Observable):
__root__: DataService
_task_manager: TaskManager
_callback_manager: CallbackManager
_autostart_tasks: dict[str, tuple[Any]]

View File

@@ -1,400 +0,0 @@
from __future__ import annotations
import inspect
from collections.abc import Callable
from typing import TYPE_CHECKING, Any
from loguru import logger
from pydase.data_service.abstract_data_service import AbstractDataService
from pydase.utils.helpers import get_class_and_instance_attributes
from .data_service_list import DataServiceList
if TYPE_CHECKING:
from .data_service import DataService
class CallbackManager:
_notification_callbacks: list[Callable[[str, str, Any], Any]] = []
"""
A list of callback functions that are executed when a change occurs in the
DataService instance. These functions are intended to handle or respond to these
changes in some way, such as emitting a socket.io message to the frontend.
Each function in this list should be a callable that accepts three parameters:
- parent_path (str): The path to the parent of the attribute that was changed.
- name (str): The name of the attribute that was changed.
- value (Any): The new value of the attribute.
A callback function can be added to this list using the add_notification_callback
method. Whenever a change in the DataService instance occurs (or in its nested
DataService or DataServiceList instances), the emit_notification method is invoked,
which in turn calls all the callback functions in _notification_callbacks with the
appropriate arguments.
This implementation follows the observer pattern, with the DataService instance as
the "subject" and the callback functions as the "observers".
"""
_list_mapping: dict[int, DataServiceList] = {}
"""
A dictionary mapping the id of the original lists to the corresponding
DataServiceList instances.
This is used to ensure that all references to the same list within the DataService
object point to the same DataServiceList, so that any modifications to that list can
be tracked consistently. The keys of the dictionary are the ids of the original
lists, and the values are the DataServiceList instances that wrap these lists.
"""
def __init__(self, service: DataService) -> None:
self.callbacks: set[Callable[[str, Any], None]] = set()
self.service = service
def _register_list_change_callbacks( # noqa: C901
self, obj: "AbstractDataService", parent_path: str
) -> None:
"""
This method ensures that notifications are emitted whenever a list attribute of
a DataService instance changes. These notifications pertain solely to the list
item changes, not to changes in attributes of objects within the list.
The method works by converting all list attributes (both at the class and
instance levels) into DataServiceList objects. Each DataServiceList is then
assigned a callback that is triggered whenever an item in the list is updated.
The callback emits a notification, but only if the DataService instance was the
root instance when the callback was registered.
This method operates recursively, processing the input object and all nested
attributes that are instances of DataService. While navigating the structure,
it constructs a path for each attribute that traces back to the root. This path
is included in any emitted notifications to facilitate identification of the
source of a change.
Parameters:
-----------
obj: DataService
The target object to be processed. All list attributes (and those of its
nested DataService attributes) will be converted into DataServiceList
objects.
parent_path: str
The access path for the parent object. Used to construct the full access
path for the notifications.
"""
# Convert all list attributes (both class and instance) to DataServiceList
attrs = get_class_and_instance_attributes(obj)
for attr_name, attr_value in attrs.items():
if isinstance(attr_value, AbstractDataService):
new_path = f"{parent_path}.{attr_name}"
self._register_list_change_callbacks(attr_value, new_path)
elif isinstance(attr_value, list):
# Create callback for current attr_name
# Default arguments solve the late binding problem by capturing the
# value at the time the lambda is defined, not when it is called. This
# prevents attr_name from being overwritten in the next loop iteration.
callback = (
lambda index, value, attr_name=attr_name: self.service._callback_manager.emit_notification(
parent_path=parent_path,
name=f"{attr_name}[{index}]",
value=value,
)
if self.service == self.service.__root__
else None
)
# Check if attr_value is already a DataServiceList or in the mapping
if isinstance(attr_value, DataServiceList):
attr_value.add_callback(callback)
continue
if id(attr_value) in self._list_mapping:
notifying_list = self._list_mapping[id(attr_value)]
notifying_list.add_callback(callback)
else:
notifying_list = DataServiceList(attr_value, callback=[callback])
self._list_mapping[id(attr_value)] = notifying_list
setattr(obj, attr_name, notifying_list)
# recursively add callbacks to list attributes of DataService instances
for i, item in enumerate(attr_value):
if isinstance(item, AbstractDataService):
new_path = f"{parent_path}.{attr_name}[{i}]"
self._register_list_change_callbacks(item, new_path)
def _register_DataService_instance_callbacks(
self, obj: "AbstractDataService", parent_path: str
) -> None:
"""
This function is a key part of the observer pattern implemented by the
DataService class.
Its purpose is to allow the system to automatically send out notifications
whenever an attribute of a DataService instance is updated, which is especially
useful when the DataService instance is part of a nested structure.
It works by recursively registering callbacks for a given DataService instance
and all of its nested attributes. Each callback is responsible for emitting a
notification when the attribute it is attached to is modified.
This function ensures that only the root DataService instance (the one directly
exposed to the user or another system via rpyc) emits notifications.
Each notification contains a 'parent_path' that traces the attribute's location
within the nested DataService structure, starting from the root. This makes it
easier for observers to determine exactly where a change has occurred.
Parameters:
-----------
obj: DataService
The target object on which callbacks are to be registered.
parent_path: str
The access path for the parent object. This is used to construct the full
access path for the notifications.
"""
# Create and register a callback for the object
# only emit the notification when the call was registered by the root object
callback: Callable[[str, Any], None] = (
lambda name, value: obj._callback_manager.emit_notification(
parent_path=parent_path, name=name, value=value
)
if self.service == obj.__root__
and not name.startswith("_") # we are only interested in public attributes
and not isinstance(
getattr(type(obj), name, None), property
) # exlude proerty notifications -> those are handled in separate callbacks
else None
)
obj._callback_manager.callbacks.add(callback)
# Recursively register callbacks for all nested attributes of the object
attrs = get_class_and_instance_attributes(obj)
for nested_attr_name, nested_attr in attrs.items():
if isinstance(nested_attr, DataServiceList):
self._register_list_callbacks(
nested_attr, parent_path, nested_attr_name
)
elif isinstance(nested_attr, AbstractDataService):
self._register_service_callbacks(
nested_attr, parent_path, nested_attr_name
)
def _register_list_callbacks(
self, nested_attr: list[Any], parent_path: str, attr_name: str
) -> None:
"""Handles registration of callbacks for list attributes"""
for i, list_item in enumerate(nested_attr):
if isinstance(list_item, AbstractDataService):
self._register_service_callbacks(
list_item, parent_path, f"{attr_name}[{i}]"
)
def _register_service_callbacks(
self, nested_attr: "AbstractDataService", parent_path: str, attr_name: str
) -> None:
"""Handles registration of callbacks for DataService attributes"""
# as the DataService is an attribute of self, change the root object
# use the dictionary to not trigger callbacks on initialised objects
nested_attr.__dict__["__root__"] = self.service.__root__
new_path = f"{parent_path}.{attr_name}"
self._register_DataService_instance_callbacks(nested_attr, new_path)
def __register_recursive_parameter_callback(
self,
obj: "AbstractDataService | DataServiceList",
callback: Callable[[str | int, Any], None],
) -> None:
"""
Register callback to a DataService or DataServiceList instance and its nested
instances.
For a DataService, this method traverses its attributes and recursively adds the
callback for nested DataService or DataServiceList instances. For a
DataServiceList,
the callback is also triggered when an item gets reassigned.
"""
if isinstance(obj, DataServiceList):
# emits callback when item in list gets reassigned
obj.add_callback(callback=callback)
obj_list: DataServiceList | list[AbstractDataService] = obj
else:
obj_list = [obj]
# this enables notifications when a class instance was changed (-> item is
# changed, not reassigned)
for item in obj_list:
if isinstance(item, AbstractDataService):
item._callback_manager.callbacks.add(callback)
for attr_name in set(dir(item)) - set(dir(object)) - {"__root__"}:
attr_value = getattr(item, attr_name)
if isinstance(attr_value, (AbstractDataService, DataServiceList)):
self.__register_recursive_parameter_callback(
attr_value, callback
)
def _register_property_callbacks( # noqa: C901
self,
obj: "AbstractDataService",
parent_path: str,
) -> None:
"""
Register callbacks to notify when properties or their dependencies change.
This method cycles through all attributes (both class and instance level) of the
input `obj`. For each attribute that is a property, it identifies dependencies
used in the getter method and creates a callback for each one.
The method is recursive for attributes that are of type DataService or
DataServiceList. It attaches the callback directly to DataServiceList items or
propagates it through nested DataService instances.
"""
attrs = get_class_and_instance_attributes(obj)
for attr_name, attr_value in attrs.items():
if isinstance(attr_value, AbstractDataService):
self._register_property_callbacks(
attr_value, parent_path=f"{parent_path}.{attr_name}"
)
elif isinstance(attr_value, DataServiceList):
for i, item in enumerate(attr_value):
if isinstance(item, AbstractDataService):
self._register_property_callbacks(
item, parent_path=f"{parent_path}.{attr_name}[{i}]"
)
if isinstance(attr_value, property):
dependencies = attr_value.fget.__code__.co_names # type: ignore
source_code_string = inspect.getsource(attr_value.fget) # type: ignore
for dependency in dependencies:
# check if the dependencies are attributes of obj
# This doesn't have to be the case like, for example, here:
# >>> @property
# >>> def power(self) -> float:
# >>> return self.class_attr.voltage * self.current
#
# The dependencies for this property are:
# > ('class_attr', 'voltage', 'current')
if f"self.{dependency}" not in source_code_string:
continue
# use `obj` instead of `type(obj)` to get DataServiceList
# instead of list
dependency_value = getattr(obj, dependency)
if isinstance(
dependency_value, (DataServiceList, AbstractDataService)
):
callback = (
lambda name, value, dependent_attr=attr_name: obj._callback_manager.emit_notification(
parent_path=parent_path,
name=dependent_attr,
value=getattr(obj, dependent_attr),
)
if self.service == obj.__root__
else None
)
self.__register_recursive_parameter_callback(
dependency_value,
callback=callback,
)
else:
callback = (
lambda name, _, dep_attr=attr_name, dep=dependency: obj._callback_manager.emit_notification( # type: ignore
parent_path=parent_path,
name=dep_attr,
value=getattr(obj, dep_attr),
)
if name == dep and self.service == obj.__root__
else None
)
# Add to callbacks
obj._callback_manager.callbacks.add(callback)
def _register_start_stop_task_callbacks(
self, obj: "AbstractDataService", parent_path: str
) -> None:
"""
This function registers callbacks for start and stop methods of async functions.
These callbacks are stored in the '_task_status_change_callbacks' attribute and
are called when the status of a task changes.
Parameters:
-----------
obj: AbstractDataService
The target object on which callbacks are to be registered.
parent_path: str
The access path for the parent object. This is used to construct the full
access path for the notifications.
"""
# Create and register a callback for the object
# only emit the notification when the call was registered by the root object
callback: Callable[[str, dict[str, Any] | None], None] = (
lambda name, status: obj._callback_manager.emit_notification(
parent_path=parent_path, name=name, value=status
)
if self.service == obj.__root__
and not name.startswith("_") # we are only interested in public attributes
else None
)
obj._task_manager.task_status_change_callbacks.append(callback)
# Recursively register callbacks for all nested attributes of the object
attrs: dict[str, Any] = get_class_and_instance_attributes(obj)
for nested_attr_name, nested_attr in attrs.items():
if isinstance(nested_attr, AbstractDataService):
self._register_start_stop_task_callbacks(
nested_attr, parent_path=f"{parent_path}.{nested_attr_name}"
)
def register_callbacks(self) -> None:
self._register_list_change_callbacks(
self.service, f"{self.service.__class__.__name__}"
)
self._register_DataService_instance_callbacks(
self.service, f"{self.service.__class__.__name__}"
)
self._register_property_callbacks(
self.service, f"{self.service.__class__.__name__}"
)
self._register_start_stop_task_callbacks(
self.service, f"{self.service.__class__.__name__}"
)
def emit_notification(self, parent_path: str, name: str, value: Any) -> None:
logger.debug(f"{parent_path}.{name} changed to {value}!")
for callback in self._notification_callbacks:
try:
callback(parent_path, name, value)
except Exception as e:
logger.error(e)
def add_notification_callback(
self, callback: Callable[[str, str, Any], None]
) -> None:
"""
Adds a new notification callback function to the list of callbacks.
This function is intended to be used for registering a function that will be
called whenever a the value of an attribute changes.
Args:
callback (Callable[[str, str, Any], None]): The callback function to
register.
It should accept three parameters:
- parent_path (str): The parent path of the parameter.
- name (str): The name of the changed parameter.
- value (Any): The value of the parameter.
"""
self._notification_callbacks.append(callback)

View File

@@ -1,32 +1,28 @@
import asyncio
import inspect
import json
import os
import logging
from enum import Enum
from typing import Any, Optional, cast, get_type_hints
from typing import Any, get_type_hints
import rpyc
from loguru import logger
import rpyc # type: ignore[import-untyped]
import pydase.units as u
from pydase.data_service.abstract_data_service import AbstractDataService
from pydase.data_service.callback_manager import CallbackManager
from pydase.data_service.task_manager import TaskManager
from pydase.observer_pattern.observable.observable import (
Observable,
)
from pydase.utils.helpers import (
convert_arguments_to_hinted_types,
generate_paths_from_DataService_dict,
get_class_and_instance_attributes,
get_component_class_names,
get_nested_value_from_DataService_by_path_and_key,
get_object_attr_from_path,
is_property_attribute,
parse_list_attr_and_index,
update_value_if_changed,
)
from pydase.utils.warnings import (
warn_if_instance_class_does_not_inherit_from_DataService,
from pydase.utils.serializer import (
SerializedObject,
Serializer,
)
logger = logging.getLogger(__name__)
def process_callable_attribute(attr: Any, args: dict[str, Any]) -> Any:
converted_args_or_error_msg = convert_arguments_to_hinted_types(
@@ -40,74 +36,75 @@ def process_callable_attribute(attr: Any, args: dict[str, Any]) -> Any:
class DataService(rpyc.Service, AbstractDataService):
def __init__(self, filename: Optional[str] = None) -> None:
self._callback_manager: CallbackManager = CallbackManager(self)
def __init__(self, **kwargs: Any) -> None:
super().__init__()
self._task_manager = TaskManager(self)
if not hasattr(self, "_autostart_tasks"):
self._autostart_tasks = {}
self.__root__: "DataService" = self
"""Keep track of the root object. This helps to filter the emission of
notifications. This overwrite the TaksManager's __root__ attribute."""
self._filename: Optional[str] = filename
self._callback_manager.register_callbacks()
self.__check_instance_classes()
self._initialised = True
self._load_values_from_json()
def __setattr__(self, __name: str, __value: Any) -> None:
# converting attributes that are not properties
if not isinstance(getattr(type(self), __name, None), property):
current_value = getattr(self, __name, None)
# parse ints into floats if current value is a float
if isinstance(current_value, float) and isinstance(__value, int):
__value = float(__value)
# Check and warn for unexpected type changes in attributes
self._warn_on_type_change(__name, __value)
if isinstance(current_value, u.Quantity):
__value = u.convert_to_quantity(__value, str(current_value.u))
# every class defined by the user should inherit from DataService if it is
# assigned to a public attribute
if not __name.startswith("_") and not inspect.isfunction(__value):
self.__warn_if_not_observable(__value)
# Set the attribute
super().__setattr__(__name, __value)
if self.__dict__.get("_initialised") and not __name == "_initialised":
for callback in self._callback_manager.callbacks:
callback(__name, __value)
elif __name.startswith(f"_{self.__class__.__name__}__"):
def _warn_on_type_change(self, attr_name: str, new_value: Any) -> None:
if is_property_attribute(self, attr_name):
return
current_value = getattr(self, attr_name, None)
if self._is_unexpected_type_change(current_value, new_value):
logger.warning(
f"Warning: You should not set private but rather protected attributes! "
f"Use {__name.replace(f'_{self.__class__.__name__}__', '_')} instead "
f"of {__name.replace(f'_{self.__class__.__name__}__', '__')}."
"Type of '%s' changed from '%s' to '%s'. This may have unwanted "
"side effects! Consider setting it to '%s' directly.",
attr_name,
type(current_value).__name__,
type(new_value).__name__,
type(current_value).__name__,
)
def _is_unexpected_type_change(self, current_value: Any, new_value: Any) -> bool:
return (
isinstance(current_value, float)
and not isinstance(new_value, float)
or (
isinstance(current_value, u.Quantity)
and not isinstance(new_value, u.Quantity)
)
)
def __warn_if_not_observable(self, __value: Any) -> None:
value_class = __value if inspect.isclass(__value) else __value.__class__
if not issubclass(
value_class,
(int | float | bool | str | list | Enum | u.Quantity | Observable),
):
logger.warning(
"Class '%s' does not inherit from DataService. This may lead to"
" unexpected behaviour!",
value_class.__name__,
)
def __check_instance_classes(self) -> None:
for attr_name, attr_value in get_class_and_instance_attributes(self).items():
# every class defined by the user should inherit from DataService if it is
# assigned to a public attribute
if not attr_name.startswith("_"):
warn_if_instance_class_does_not_inherit_from_DataService(attr_value)
def __set_attribute_based_on_type( # noqa:CFQ002
self,
target_obj: Any,
attr_name: str,
attr: Any,
value: Any,
index: Optional[int],
path_list: list[str],
) -> None:
if isinstance(attr, Enum):
update_value_if_changed(target_obj, attr_name, attr.__class__[value])
elif isinstance(attr, list) and index is not None:
update_value_if_changed(attr, index, value)
elif isinstance(attr, DataService) and isinstance(value, dict):
for key, v in value.items():
self.update_DataService_attribute([*path_list, attr_name], key, v)
elif callable(attr):
process_callable_attribute(attr, value["args"])
else:
update_value_if_changed(target_obj, attr_name, value)
if (
not attr_name.startswith("_")
and not inspect.isfunction(attr_value)
and not isinstance(attr_value, property)
):
self.__warn_if_not_observable(attr_value)
def _rpyc_getattr(self, name: str) -> Any:
if name.startswith("_"):
@@ -129,68 +126,7 @@ class DataService(rpyc.Service, AbstractDataService):
# allow all other attributes
setattr(self, name, value)
def _load_values_from_json(self) -> None:
if self._filename is not None:
# Check if the file specified by the filename exists
if os.path.exists(self._filename):
with open(self._filename, "r") as f:
# Load JSON data from file and update class attributes with these
# values
self.load_DataService_from_JSON(cast(dict[str, Any], json.load(f)))
def write_to_file(self) -> None:
"""
Serialize the DataService instance and write it to a JSON file.
Args:
filename (str): The name of the file to write to.
"""
if self._filename is not None:
with open(self._filename, "w") as f:
json.dump(self.serialize(), f, indent=4)
else:
logger.error(
f"Class {self.__class__.__name__} was not initialised with a filename. "
'Skipping "write_to_file"...'
)
def load_DataService_from_JSON(self, json_dict: dict[str, Any]) -> None:
# Traverse the serialized representation and set the attributes of the class
serialized_class = self.serialize()
for path in generate_paths_from_DataService_dict(json_dict):
value = get_nested_value_from_DataService_by_path_and_key(
json_dict, path=path
)
value_type = get_nested_value_from_DataService_by_path_and_key(
json_dict, path=path, key="type"
)
class_value_type = get_nested_value_from_DataService_by_path_and_key(
serialized_class, path=path, key="type"
)
if class_value_type == value_type:
class_attr_is_read_only = (
get_nested_value_from_DataService_by_path_and_key(
serialized_class, path=path, key="readonly"
)
)
if class_attr_is_read_only:
logger.debug(
f'Attribute "{path}" is read-only. Ignoring value from JSON '
"file..."
)
continue
# Split the path into parts
parts = path.split(".")
attr_name = parts[-1]
self.update_DataService_attribute(parts[:-1], attr_name, value)
else:
logger.info(
f'Attribute type of "{path}" changed from "{value_type}" to '
f'"{class_value_type}". Ignoring value from JSON file...'
)
def serialize(self) -> dict[str, dict[str, Any]]: # noqa
def serialize(self) -> SerializedObject:
"""
Serializes the instance into a dictionary, preserving the structure of the
instance.
@@ -207,158 +143,4 @@ class DataService(rpyc.Service, AbstractDataService):
Returns:
dict: The serialized instance.
"""
result: dict[str, dict[str, Any]] = {}
# Get the dictionary of the base class
base_set = set(type(super()).__dict__)
# Get the dictionary of the derived class
derived_set = set(type(self).__dict__)
# Get the difference between the two dictionaries
derived_only_set = derived_set - base_set
instance_dict = set(self.__dict__)
# Merge the class and instance dictionaries
merged_set = derived_only_set | instance_dict
def get_attribute_doc(attr: Any) -> Optional[str]:
"""This function takes an input attribute attr and returns its documentation
string if it's different from the documentation of its type, otherwise,
it returns None.
"""
attr_doc = inspect.getdoc(attr)
attr_class_doc = inspect.getdoc(type(attr))
if attr_class_doc != attr_doc:
return attr_doc
else:
return None
# Iterate over attributes, properties, class attributes, and methods
for key in sorted(merged_set):
if key.startswith("_"):
continue # Skip attributes that start with underscore
# Skip keys that start with "start_" or "stop_" and end with an async method
# name
if (key.startswith("start_") or key.startswith("stop_")) and key.split(
"_", 1
)[1] in {
name
for name, _ in inspect.getmembers(
self, predicate=inspect.iscoroutinefunction
)
}:
continue
# Get the value of the current attribute or method
value = getattr(self, key)
if isinstance(value, DataService):
result[key] = {
"type": type(value).__name__
if type(value).__name__ in get_component_class_names()
else "DataService",
"value": value.serialize(),
"readonly": False,
"doc": get_attribute_doc(value),
}
elif isinstance(value, list):
result[key] = {
"type": "list",
"value": [
{
"type": type(item).__name__
if not isinstance(item, DataService)
or type(item).__name__ in get_component_class_names()
else "DataService",
"value": item.serialize()
if isinstance(item, DataService)
else item,
"readonly": False,
"doc": get_attribute_doc(value),
}
for item in value
],
"readonly": False,
}
elif inspect.isfunction(value) or inspect.ismethod(value):
sig = inspect.signature(value)
# Store parameters and their anotations in a dictionary
parameters: dict[str, Optional[str]] = {}
for k, v in sig.parameters.items():
annotation = v.annotation
if annotation is not inspect._empty:
if isinstance(annotation, type):
# Handle regular types
parameters[k] = annotation.__name__
else:
parameters[k] = str(annotation)
else:
parameters[k] = None
running_task_info = None
if (
key in self._task_manager.tasks
): # If there's a running task for this method
task_info = self._task_manager.tasks[key]
running_task_info = task_info["kwargs"]
result[key] = {
"type": "method",
"async": asyncio.iscoroutinefunction(value),
"parameters": parameters,
"doc": get_attribute_doc(value),
"readonly": True,
"value": running_task_info,
}
elif isinstance(value, Enum):
result[key] = {
"type": "Enum",
"value": value.name,
"enum": {
name: member.value
for name, member in value.__class__.__members__.items()
},
"readonly": False,
"doc": get_attribute_doc(value),
}
else:
result[key] = {
"type": type(value).__name__,
"value": value
if not isinstance(value, u.Quantity)
else {"magnitude": value.m, "unit": str(value.u)},
"readonly": False,
"doc": get_attribute_doc(value),
}
if isinstance(getattr(self.__class__, key, None), property):
prop: property = getattr(self.__class__, key)
result[key]["readonly"] = prop.fset is None
result[key]["doc"] = get_attribute_doc(prop)
return result
def update_DataService_attribute(
self,
path_list: list[str],
attr_name: str,
value: Any,
) -> None:
# If attr_name corresponds to a list entry, extract the attr_name and the index
attr_name, index = parse_list_attr_and_index(attr_name)
# Traverse the object according to the path parts
target_obj = get_object_attr_from_path(self, path_list)
# If the attribute is a property, change it using the setter without getting the
# property value (would otherwise be bad for expensive getter methods)
if is_property_attribute(target_obj, attr_name):
setattr(target_obj, attr_name, value)
return
attr = get_object_attr_from_path(target_obj, [attr_name])
if attr is None:
return
self.__set_attribute_based_on_type(
target_obj, attr_name, attr, value, index, path_list
)
return Serializer.serialize_object(self)

View File

@@ -0,0 +1,52 @@
import logging
from typing import TYPE_CHECKING, Any, cast
from pydase.utils.serializer import (
SerializationPathError,
SerializationValueError,
SerializedObject,
get_nested_dict_by_path,
set_nested_value_by_path,
)
if TYPE_CHECKING:
from pydase import DataService
logger = logging.getLogger(__name__)
class DataServiceCache:
def __init__(self, service: "DataService") -> None:
self._cache: SerializedObject
self.service = service
self._initialize_cache()
@property
def cache(self) -> SerializedObject:
return self._cache
def _initialize_cache(self) -> None:
"""Initializes the cache and sets up the callback."""
logger.debug("Initializing cache.")
self._cache = self.service.serialize()
def update_cache(self, full_access_path: str, value: Any) -> None:
set_nested_value_by_path(
cast(dict[str, SerializedObject], self._cache["value"]),
full_access_path,
value,
)
def get_value_dict_from_cache(self, full_access_path: str) -> SerializedObject:
try:
return get_nested_dict_by_path(
cast(dict[str, SerializedObject], self._cache["value"]),
full_access_path,
)
except (SerializationPathError, SerializationValueError, KeyError):
return {
"value": None,
"type": None,
"doc": None,
"readonly": False,
}

View File

@@ -1,63 +0,0 @@
from collections.abc import Callable
from typing import Any
from pydase.utils.warnings import (
warn_if_instance_class_does_not_inherit_from_DataService,
)
class DataServiceList(list):
"""
DataServiceList is a list with additional functionality to trigger callbacks
whenever an item is set. This can be used to track changes in the list items.
The class takes the same arguments as the list superclass during initialization,
with an additional optional 'callback' argument that is a list of functions.
These callbacks are stored and executed whenever an item in the DataServiceList
is set via the __setitem__ method. The callbacks receive the index of the changed
item and its new value as arguments.
The original list that is passed during initialization is kept as a private
attribute to prevent it from being garbage collected.
Additional callbacks can be added after initialization using the `add_callback`
method.
Attributes:
callbacks (list):
List of callback functions to be executed on item set.
"""
def __init__(
self,
*args: list[Any],
callback: list[Callable[[int, Any], None]] | None = None,
**kwargs: Any,
) -> None:
self.callbacks: list[Callable[[int, Any], None]] = []
if isinstance(callback, list):
self.callbacks = callback
for item in args[0]:
warn_if_instance_class_does_not_inherit_from_DataService(item)
# prevent gc to delete the passed list by keeping a reference
self._original_list = args[0]
super().__init__(*args, **kwargs) # type: ignore
def __setitem__(self, key: int, value: Any) -> None: # type: ignore
super().__setitem__(key, value) # type: ignore
for callback in self.callbacks:
callback(key, value)
def add_callback(self, callback: Callable[[int, Any], None]) -> None:
"""
Add a new callback function to be executed on item set.
Args:
callback (Callable[[int, Any], None]): Callback function that takes two
arguments - index of the changed item and its new value.
"""
self.callbacks.append(callback)

View File

@@ -0,0 +1,123 @@
import logging
from collections.abc import Callable
from copy import deepcopy
from typing import Any
from pydase.data_service.state_manager import StateManager
from pydase.observer_pattern.observable.observable_object import ObservableObject
from pydase.observer_pattern.observer.property_observer import (
PropertyObserver,
)
from pydase.utils.helpers import get_object_attr_from_path_list
from pydase.utils.serializer import SerializedObject, dump
logger = logging.getLogger(__name__)
class DataServiceObserver(PropertyObserver):
def __init__(self, state_manager: StateManager) -> None:
self.state_manager = state_manager
self._notification_callbacks: list[
Callable[[str, Any, SerializedObject], None]
] = []
super().__init__(state_manager.service)
def on_change(self, full_access_path: str, value: Any) -> None:
if any(
full_access_path.startswith(changing_attribute)
and full_access_path != changing_attribute
for changing_attribute in self.changing_attributes
):
return
cached_value_dict = deepcopy(
self.state_manager._data_service_cache.get_value_dict_from_cache(
full_access_path
)
)
cached_value = cached_value_dict.get("value")
if cached_value != dump(value)["value"] and all(
part[0] != "_" for part in full_access_path.split(".")
):
logger.debug("'%s' changed to '%s'", full_access_path, value)
self._update_cache_value(full_access_path, value, cached_value_dict)
cached_value_dict = deepcopy(
self.state_manager._data_service_cache.get_value_dict_from_cache(
full_access_path
)
)
for callback in self._notification_callbacks:
callback(full_access_path, value, cached_value_dict)
if isinstance(value, ObservableObject):
self._update_property_deps_dict()
self._notify_dependent_property_changes(full_access_path)
def _update_cache_value(
self,
full_access_path: str,
value: Any,
cached_value_dict: SerializedObject | dict[str, Any],
) -> None:
value_dict = dump(value)
if cached_value_dict != {}:
if (
cached_value_dict["type"] != "method"
and cached_value_dict["type"] != value_dict["type"]
):
logger.warning(
"Type of '%s' changed from '%s' to '%s'. This could have unwanted "
"side effects! Consider setting it to '%s' directly.",
full_access_path,
cached_value_dict["type"],
value_dict["type"],
cached_value_dict["type"],
)
self.state_manager._data_service_cache.update_cache(
full_access_path,
value,
)
def _notify_dependent_property_changes(self, changed_attr_path: str) -> None:
changed_props = self.property_deps_dict.get(changed_attr_path, [])
for prop in changed_props:
# only notify about changing attribute if it is not currently being
# "changed" e.g. when calling the getter of a property within another
# property
if prop not in self.changing_attributes:
self._notify_changed(
prop,
get_object_attr_from_path_list(self.observable, prop.split(".")),
)
def add_notification_callback(
self, callback: Callable[[str, Any, SerializedObject], None]
) -> None:
"""
Registers a callback function to be invoked upon attribute changes in the
observed object.
This method allows for the addition of custom callback functions that will be
executed whenever there is a change in the value of an observed attribute. The
callback function is called with detailed information about the change, enabling
external logic to respond to specific state changes within the observable
object.
Args:
callback (Callable[[str, Any, dict[str, Any]]): The callback function to be
registered. The function should have the following signature:
- full_access_path (str): The full dot-notation access path of the
changed attribute. This path indicates the location of the changed
attribute within the observable object's structure.
- value (Any): The new value of the changed attribute.
- cached_value_dict (dict[str, Any]): A dictionary representing the
cached state of the attribute prior to the change. This can be useful
for understanding the nature of the change and for historical
comparison.
"""
self._notification_callbacks.append(callback)

View File

@@ -0,0 +1,293 @@
import json
import logging
import os
from collections.abc import Callable
from pathlib import Path
from typing import TYPE_CHECKING, Any, cast
import pydase.units as u
from pydase.data_service.data_service_cache import DataServiceCache
from pydase.utils.helpers import (
get_object_attr_from_path_list,
is_property_attribute,
parse_list_attr_and_index,
)
from pydase.utils.serializer import (
SerializedObject,
dump,
generate_serialized_data_paths,
get_nested_dict_by_path,
serialized_dict_is_nested_object,
)
if TYPE_CHECKING:
from pydase import DataService
logger = logging.getLogger(__name__)
def load_state(func: Callable[..., Any]) -> Callable[..., Any]:
"""This function should be used as a decorator on property setters to indicate that
the value should be loaded from the JSON file.
Example:
>>> class Service(pydase.DataService):
... _name = "Service"
...
... @property
... def name(self) -> str:
... return self._name
...
... @name.setter
... @load_state
... def name(self, value: str) -> None:
... self._name = value
"""
func._load_state = True # type: ignore[attr-defined]
return func
def has_load_state_decorator(prop: property) -> bool:
"""Determines if the property's setter method is decorated with the `@load_state`
decorator.
"""
try:
return prop.fset._load_state # type: ignore[union-attr]
except AttributeError:
return False
class StateManager:
"""
Manages the state of a DataService instance, serving as both a cache and a
persistence layer. It is designed to provide quick access to the latest known state
for newly connecting web clients without the need for expensive property accesses
that may involve complex calculations or I/O operations.
The StateManager listens for state change notifications from the DataService's
callback manager and updates its cache accordingly. This cache does not always
reflect the most current complex property states but rather retains the value from
the last known state, optimizing for performance and reducing the load on the
system.
While the StateManager ensures that the cached state is as up-to-date as possible,
it does not autonomously update complex properties of the DataService. Such
properties must be updated programmatically, for instance, by invoking specific
tasks or methods that trigger the necessary operations to refresh their state.
The cached state maintained by the StateManager is particularly useful for web
clients that connect to the system and need immediate access to the current state of
the DataService. By avoiding direct and potentially costly property accesses, the
StateManager provides a snapshot of the DataService's state that is sufficiently
accurate for initial rendering and interaction.
Attributes:
cache (dict[str, Any]):
A dictionary cache of the DataService's state.
filename (str):
The file name used for storing the DataService's state.
service (DataService):
The DataService instance whose state is being managed.
Note:
The StateManager's cache updates are triggered by notifications and do not
include autonomous updates of complex DataService properties, which must be
managed programmatically. The cache serves the purpose of providing immediate
state information to web clients, reflecting the state after the last property
update.
"""
def __init__(
self, service: "DataService", filename: str | Path | None = None
) -> None:
self.filename = getattr(service, "_filename", None)
if filename is not None:
if self.filename is not None:
logger.warning(
"Overwriting filename '%s' with '%s'.", self.filename, filename
)
self.filename = filename
self.service = service
self._data_service_cache = DataServiceCache(self.service)
@property
def cache(self) -> SerializedObject:
"""Returns the cached DataService state."""
return self._data_service_cache.cache
@property
def cache_value(self) -> dict[str, SerializedObject]:
"""Returns the "value" value of the DataService serialization."""
return cast(
dict[str, SerializedObject], self._data_service_cache.cache["value"]
)
def save_state(self) -> None:
"""
Saves the DataService's current state to a JSON file defined by `self.filename`.
Logs an error if `self.filename` is not set.
"""
if self.filename is not None:
with open(self.filename, "w") as f:
json.dump(self.cache_value, f, indent=4)
else:
logger.info(
"State manager was not initialised with a filename. Skipping "
"'save_state'..."
)
def load_state(self) -> None:
"""
Loads the DataService's state from a JSON file defined by `self.filename`.
Updates the service's attributes, respecting type and read-only constraints.
"""
# Traverse the serialized representation and set the attributes of the class
json_dict = self._get_state_dict_from_json_file()
if json_dict == {}:
logger.debug("Could not load the service state.")
return
for path in generate_serialized_data_paths(json_dict):
nested_json_dict = get_nested_dict_by_path(json_dict, path)
nested_class_dict = self._data_service_cache.get_value_dict_from_cache(path)
value, value_type = nested_json_dict["value"], nested_json_dict["type"]
class_attr_value_type = nested_class_dict.get("type", None)
if class_attr_value_type == value_type:
if self.__is_loadable_state_attribute(path):
self.set_service_attribute_value_by_path(path, value)
else:
logger.info(
"Attribute type of '%s' changed from '%s' to "
"'%s'. Ignoring value from JSON file...",
path,
value_type,
class_attr_value_type,
)
def _get_state_dict_from_json_file(self) -> dict[str, Any]:
if self.filename is not None and os.path.exists(self.filename):
with open(self.filename) as f:
# Load JSON data from file and update class attributes with these
# values
return cast(dict[str, Any], json.load(f))
return {}
def set_service_attribute_value_by_path(
self,
path: str,
value: Any,
) -> None:
"""
Sets the value of an attribute in the service managed by the `StateManager`
given its path as a dot-separated string.
This method updates the attribute specified by 'path' with 'value' only if the
attribute is not read-only and the new value differs from the current one.
It also handles type-specific conversions for the new value before setting it.
Args:
path: A dot-separated string indicating the hierarchical path to the
attribute.
value: The new value to set for the attribute.
"""
current_value_dict = get_nested_dict_by_path(self.cache_value, path)
# This will also filter out methods as they are 'read-only'
if current_value_dict["readonly"]:
logger.debug("Attribute '%s' is read-only. Ignoring new value...", path)
return
converted_value = self.__convert_value_if_needed(value, current_value_dict)
# only set value when it has changed
if self.__attr_value_has_changed(converted_value, current_value_dict["value"]):
self.__update_attribute_by_path(path, converted_value)
else:
logger.debug("Value of attribute '%s' has not changed...", path)
def __attr_value_has_changed(self, value_object: Any, current_value: Any) -> bool:
"""Check if the serialized value of `value_object` differs from `current_value`.
The method serializes `value_object` to compare it, which is mainly
necessary for handling Quantity objects.
"""
return dump(value_object)["value"] != current_value
def __convert_value_if_needed(
self, value: Any, current_value_dict: SerializedObject
) -> Any:
if current_value_dict["type"] == "Quantity":
return u.convert_to_quantity(
value, cast(dict[str, Any], current_value_dict["value"])["unit"]
)
if current_value_dict["type"] == "float" and not isinstance(value, float):
return float(value)
return value
def __update_attribute_by_path(self, path: str, value: Any) -> None:
parent_path_list, attr_name = path.split(".")[:-1], path.split(".")[-1]
# If attr_name corresponds to a list entry, extract the attr_name and the
# index
attr_name, index = parse_list_attr_and_index(attr_name)
# Update path to reflect the attribute without list indices
path = ".".join([*parent_path_list, attr_name])
attr_cache_type = get_nested_dict_by_path(self.cache_value, path)["type"]
# Traverse the object according to the path parts
target_obj = get_object_attr_from_path_list(self.service, parent_path_list)
if attr_cache_type in ("ColouredEnum", "Enum"):
enum_attr = get_object_attr_from_path_list(target_obj, [attr_name])
setattr(target_obj, attr_name, enum_attr.__class__[value])
elif attr_cache_type == "list":
list_obj = get_object_attr_from_path_list(target_obj, [attr_name])
list_obj[index] = value
else:
setattr(target_obj, attr_name, value)
def __is_loadable_state_attribute(self, full_access_path: str) -> bool:
"""Checks if an attribute defined by a dot-separated path should be loaded from
storage.
For properties, it verifies the presence of the '@load_state' decorator. Regular
attributes default to being loadable.
"""
parent_object = get_object_attr_from_path_list(
self.service, full_access_path.split(".")[:-1]
)
attr_name = full_access_path.split(".")[-1]
if is_property_attribute(parent_object, attr_name):
prop = getattr(type(parent_object), attr_name)
has_decorator = has_load_state_decorator(prop)
if not has_decorator:
logger.debug(
"Property '%s' has no '@load_state' decorator. "
"Ignoring value from JSON file...",
attr_name,
)
return has_decorator
cached_serialization_dict = get_nested_dict_by_path(
self.cache_value, full_access_path
)
if cached_serialization_dict["value"] == "method":
return False
# nested objects cannot be loaded
return not serialized_dict_is_nested_object(cached_serialization_dict)

View File

@@ -2,19 +2,31 @@ from __future__ import annotations
import asyncio
import inspect
from collections.abc import Callable
from functools import wraps
from typing import TYPE_CHECKING, Any, TypedDict
import logging
from enum import Enum
from typing import TYPE_CHECKING, Any
from loguru import logger
from pydase.data_service.abstract_data_service import AbstractDataService
from pydase.utils.helpers import (
function_has_arguments,
get_class_and_instance_attributes,
is_property_attribute,
)
if TYPE_CHECKING:
from collections.abc import Callable
from .data_service import DataService
logger = logging.getLogger(__name__)
class TaskDict(TypedDict):
task: asyncio.Task[None]
kwargs: dict[str, Any]
class TaskDefinitionError(Exception):
pass
class TaskStatus(Enum):
RUNNING = "running"
class TaskManager:
@@ -73,31 +85,100 @@ class TaskManager:
def __init__(self, service: DataService) -> None:
self.service = service
self._loop = asyncio.get_event_loop()
self.tasks: dict[str, TaskDict] = {}
self.tasks: dict[str, asyncio.Task[None]] = {}
"""A dictionary to keep track of running tasks. The keys are the names of the
tasks and the values are TaskDict instances which include the task itself and
its kwargs.
"""
self.task_status_change_callbacks: list[
Callable[[str, dict[str, Any] | None], Any]
] = []
"""A list of callback functions to be invoked when the status of a task (start
or stop) changes."""
self._set_start_and_stop_for_async_methods()
def _set_start_and_stop_for_async_methods(self) -> None: # noqa: C901
# inspect the methods of the class
for name, method in inspect.getmembers(
self.service, predicate=inspect.iscoroutinefunction
):
@property
def _loop(self) -> asyncio.AbstractEventLoop:
return asyncio.get_running_loop()
@wraps(method)
def start_task(*args: Any, **kwargs: Any) -> None:
def task_done_callback(task: asyncio.Task, name: str) -> None:
def _set_start_and_stop_for_async_methods(self) -> None:
for name in dir(self.service):
# circumvents calling properties
if is_property_attribute(self.service, name):
continue
method = getattr(self.service, name)
if inspect.iscoroutinefunction(method):
if function_has_arguments(method):
raise TaskDefinitionError(
"Asynchronous functions (tasks) should be defined without "
f"arguments. The task '{method.__name__}' has at least one "
"argument. Please remove the argument(s) from this function to "
"use it."
)
# create start and stop methods for each coroutine
setattr(
self.service, f"start_{name}", self._make_start_task(name, method)
)
setattr(self.service, f"stop_{name}", self._make_stop_task(name))
def _initiate_task_startup(self) -> None:
if self.service._autostart_tasks is not None:
for service_name, args in self.service._autostart_tasks.items():
start_method = getattr(self.service, f"start_{service_name}", None)
if start_method is not None and callable(start_method):
start_method(*args)
else:
logger.warning(
"No start method found for service '%s'", service_name
)
def start_autostart_tasks(self) -> None:
self._initiate_task_startup()
attrs = get_class_and_instance_attributes(self.service)
for attr_value in attrs.values():
if isinstance(attr_value, AbstractDataService):
attr_value._task_manager.start_autostart_tasks()
elif isinstance(attr_value, list):
for item in attr_value:
if isinstance(item, AbstractDataService):
item._task_manager.start_autostart_tasks()
def _make_stop_task(self, name: str) -> Callable[..., Any]:
"""
Factory function to create a 'stop_task' function for a running task.
The generated function cancels the associated asyncio task using 'name' for
identification, ensuring proper cleanup. Avoids closure and late binding issues.
Args:
name (str): The name of the coroutine task, used for its identification.
"""
def stop_task() -> None:
# cancel the task
task = self.tasks.get(name, None)
if task is not None:
self._loop.call_soon_threadsafe(task.cancel)
return stop_task
def _make_start_task(
self, name: str, method: Callable[..., Any]
) -> Callable[..., Any]:
"""
Factory function to create a 'start_task' function for a coroutine.
The generated function starts the coroutine as an asyncio task, handling
registration and monitoring.
It uses 'name' and 'method' to avoid the closure and late binding issue.
Args:
name (str): The name of the coroutine, used for task management.
method (callable): The coroutine to be turned into an asyncio task.
"""
def start_task() -> None:
def task_done_callback(task: asyncio.Task[None], name: str) -> None:
"""Handles tasks that have finished.
Removes a task from the tasks dictionary, calls the defined
@@ -107,50 +188,29 @@ class TaskManager:
self.tasks.pop(name, None)
# emit the notification that the task was stopped
for callback in self.task_status_change_callbacks:
callback(name, None)
self.service._notify_changed(name, None)
exception = task.exception()
if exception is not None:
# Handle the exception, or you can re-raise it.
logger.error(
f"Task '{name}' encountered an exception: "
f"{type(exception).__name__}: {exception}"
"Task '%s' encountered an exception: %s: %s",
name,
type(exception).__name__,
exception,
)
raise exception
async def task(*args: Any, **kwargs: Any) -> None:
async def task() -> None:
try:
await method(*args, **kwargs)
await method()
except asyncio.CancelledError:
print(f"Task {name} was cancelled")
logger.info("Task '%s' was cancelled", name)
if not self.tasks.get(name):
# Get the signature of the coroutine method to start
sig = inspect.signature(method)
# Create a list of the parameter names from the method signature.
parameter_names = list(sig.parameters.keys())
# Extend the list of positional arguments with None values to match
# the length of the parameter names list. This is done to ensure
# that zip can pair each parameter name with a corresponding value.
args_padded = list(args) + [None] * (
len(parameter_names) - len(args)
)
# Create a dictionary of keyword arguments by pairing the parameter
# names with the values in 'args_padded'. Then merge this dictionary
# with the 'kwargs' dictionary. If a parameter is specified in both
# 'args_padded' and 'kwargs', the value from 'kwargs' is used.
kwargs_updated = {
**dict(zip(parameter_names, args_padded)),
**kwargs,
}
# creating the task and adding the task_done_callback which checks
# if an exception has occured during the task execution
task_object = self._loop.create_task(task(*args, **kwargs))
task_object = self._loop.create_task(task())
task_object.add_done_callback(
lambda task: task_done_callback(task, name)
)
@@ -158,34 +218,11 @@ class TaskManager:
# Store the task and its arguments in the '__tasks' dictionary. The
# key is the name of the method, and the value is a dictionary
# containing the task object and the updated keyword arguments.
self.tasks[name] = {
"task": task_object,
"kwargs": kwargs_updated,
}
self.tasks[name] = task_object
# emit the notification that the task was started
for callback in self.task_status_change_callbacks:
callback(name, kwargs_updated)
self.service._notify_changed(name, TaskStatus.RUNNING)
else:
logger.error(f"Task `{name}` is already running!")
logger.error("Task '%s' is already running!", name)
def stop_task() -> None:
# cancel the task
task = self.tasks.get(name, None)
if task is not None:
self._loop.call_soon_threadsafe(task["task"].cancel)
# create start and stop methods for each coroutine
setattr(self.service, f"start_{name}", start_task)
setattr(self.service, f"stop_{name}", stop_task)
def start_autostart_tasks(self) -> None:
if self.service._autostart_tasks is not None:
for service_name, args in self.service._autostart_tasks.items():
start_method = getattr(self.service, f"start_{service_name}", None)
if start_method is not None and callable(start_method):
start_method(*args)
else:
logger.warning(
f"No start method found for service '{service_name}'"
)
return start_task

View File

@@ -1,13 +1,13 @@
{
"files": {
"main.css": "/static/css/main.398bc7f8.css",
"main.js": "/static/js/main.c348625e.js",
"main.css": "/static/css/main.7ef670d5.css",
"main.js": "/static/js/main.97ef73ea.js",
"index.html": "/index.html",
"main.398bc7f8.css.map": "/static/css/main.398bc7f8.css.map",
"main.c348625e.js.map": "/static/js/main.c348625e.js.map"
"main.7ef670d5.css.map": "/static/css/main.7ef670d5.css.map",
"main.97ef73ea.js.map": "/static/js/main.97ef73ea.js.map"
},
"entrypoints": [
"static/css/main.398bc7f8.css",
"static/js/main.c348625e.js"
"static/css/main.7ef670d5.css",
"static/js/main.97ef73ea.js"
]
}

View File

@@ -1 +1 @@
<!doctype html><html lang="en"><head><meta charset="utf-8"/><link rel="icon" href="/favicon.ico"/><meta name="viewport" content="width=device-width,initial-scale=1"/><meta name="theme-color" content="#000000"/><meta name="description" content="Web site displaying a pydase UI."/><link rel="apple-touch-icon" href="/logo192.png"/><link rel="manifest" href="/manifest.json"/><title>pydase App</title><script defer="defer" src="/static/js/main.c348625e.js"></script><link href="/static/css/main.398bc7f8.css" rel="stylesheet"></head><body><noscript>You need to enable JavaScript to run this app.</noscript><div id="root"></div></body></html>
<!doctype html><html lang="en"><head><meta charset="utf-8"/><link rel="icon" href="/favicon.ico"/><meta name="viewport" content="width=device-width,initial-scale=1"/><meta name="theme-color" content="#000000"/><meta name="description" content="Web site displaying a pydase UI."/><link rel="apple-touch-icon" href="/logo192.png"/><link rel="manifest" href="/manifest.json"/><title>pydase App</title><script defer="defer" src="/static/js/main.97ef73ea.js"></script><link href="/static/css/main.7ef670d5.css" rel="stylesheet"></head><body><noscript>You need to enable JavaScript to run this app.</noscript><div id="root"></div></body></html>

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -4,8 +4,6 @@
http://jedwatson.github.io/classnames
*/
/*! regenerator-runtime -- Copyright (c) 2014-present, Facebook, Inc. -- license (MIT): https://github.com/facebook/regenerator/blob/main/LICENSE */
/**
* @license React
* react-dom.production.min.js
@@ -45,11 +43,3 @@
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
/**
* @mui/styled-engine v5.13.2
*
* @license MIT
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,3 @@
from pydase.observer_pattern.observable.observable import Observable
__all__ = ["Observable"]

View File

@@ -0,0 +1,71 @@
import logging
from typing import Any
from pydase.observer_pattern.observable.observable_object import ObservableObject
from pydase.utils.helpers import is_property_attribute
logger = logging.getLogger(__name__)
class Observable(ObservableObject):
def __init__(self) -> None:
super().__init__()
class_attrs = {
k: type(self).__dict__[k]
for k in set(type(self).__dict__)
- set(Observable.__dict__)
- set(self.__dict__)
}
for name, value in class_attrs.items():
if isinstance(value, property) or callable(value):
continue
self.__dict__[name] = self._initialise_new_objects(name, value)
def __setattr__(self, name: str, value: Any) -> None:
if not hasattr(self, "_observers") and name != "_observers":
logger.warning(
"Ensure that super().__init__() is called at the start of the '%s' "
"constructor! Failing to do so may lead to unexpected behavior.",
type(self).__name__,
)
self._observers = {}
value = self._handle_observable_setattr(name, value)
super().__setattr__(name, value)
self._notify_changed(name, value)
def __getattribute__(self, name: str) -> Any:
if is_property_attribute(self, name):
self._notify_change_start(name)
value = super().__getattribute__(name)
if is_property_attribute(self, name):
self._notify_changed(name, value)
return value
def _handle_observable_setattr(self, name: str, value: Any) -> Any:
if name == "_observers":
return value
self._remove_observer_if_observable(name)
value = self._initialise_new_objects(name, value)
self._notify_change_start(name)
return value
def _remove_observer_if_observable(self, name: str) -> None:
if not is_property_attribute(self, name):
current_value = getattr(self, name, None)
if isinstance(current_value, ObservableObject):
current_value._remove_observer(self, name)
def _construct_extended_attr_path(
self, observer_attr_name: str, instance_attr_name: str
) -> str:
if observer_attr_name != "":
return f"{observer_attr_name}.{instance_attr_name}"
return instance_attr_name

View File

@@ -0,0 +1,264 @@
import logging
from abc import ABC, abstractmethod
from collections.abc import Iterable
from typing import TYPE_CHECKING, Any, ClassVar, SupportsIndex
if TYPE_CHECKING:
from pydase.observer_pattern.observer.observer import Observer
logger = logging.getLogger(__name__)
class ObservableObject(ABC):
_list_mapping: ClassVar[dict[int, "_ObservableList"]] = {}
_dict_mapping: ClassVar[dict[int, "_ObservableDict"]] = {}
def __init__(self) -> None:
if not hasattr(self, "_observers"):
self._observers: dict[str, list["ObservableObject | Observer"]] = {}
def add_observer(
self, observer: "ObservableObject | Observer", attr_name: str = ""
) -> None:
if attr_name not in self._observers:
self._observers[attr_name] = []
if observer not in self._observers[attr_name]:
self._observers[attr_name].append(observer)
def _remove_observer(self, observer: "ObservableObject", attribute: str) -> None:
if attribute in self._observers:
self._observers[attribute].remove(observer)
@abstractmethod
def _remove_observer_if_observable(self, name: str) -> None:
"""Removes the current object as an observer from an observable attribute.
This method is called before an attribute of the observable object is
changed. If the current value of the attribute is an instance of
`ObservableObject`, this method removes the current object from its list
of observers. This is a crucial step to avoid unwanted notifications from
the old value of the attribute.
"""
def _notify_changed(self, changed_attribute: str, value: Any) -> None:
"""Notifies all observers about changes to an attribute.
This method iterates through all observers registered for the object and
invokes their notification method. It is called whenever an attribute of
the observable object is changed.
Args:
changed_attribute (str): The name of the changed attribute.
value (Any): The value that the attribute was set to.
"""
for attr_name, observer_list in self._observers.items():
for observer in observer_list:
extendend_attr_path = self._construct_extended_attr_path(
attr_name, changed_attribute
)
observer._notify_changed(extendend_attr_path, value)
def _notify_change_start(self, changing_attribute: str) -> None:
"""Notify observers that an attribute or item change process has started.
This method is called at the start of the process of modifying an attribute in
the observed `Observable` object. It registers the attribute as currently
undergoing a change. This registration helps in managing and tracking changes as
they occur, especially in scenarios where the order of changes or their state
during the transition is significant.
Args:
changing_attribute (str): The name of the attribute that is starting to
change. This is typically the full access path of the attribute in the
`Observable`.
value (Any): The value that the attribute is being set to.
"""
for attr_name, observer_list in self._observers.items():
for observer in observer_list:
extended_attr_path = self._construct_extended_attr_path(
attr_name, changing_attribute
)
observer._notify_change_start(extended_attr_path)
def _initialise_new_objects(self, attr_name_or_key: Any, value: Any) -> Any:
new_value = value
if isinstance(value, list):
if id(value) in self._list_mapping:
# If the list `value` was already referenced somewhere else
new_value = self._list_mapping[id(value)]
else:
# convert the builtin list into a ObservableList
new_value = _ObservableList(original_list=value)
self._list_mapping[id(value)] = new_value
elif isinstance(value, dict):
if id(value) in self._dict_mapping:
# If the list `value` was already referenced somewhere else
new_value = self._dict_mapping[id(value)]
else:
# convert the builtin list into a ObservableList
new_value = _ObservableDict(original_dict=value)
self._dict_mapping[id(value)] = new_value
if isinstance(new_value, ObservableObject):
new_value.add_observer(self, str(attr_name_or_key))
return new_value
@abstractmethod
def _construct_extended_attr_path(
self, observer_attr_name: str, instance_attr_name: str
) -> str:
"""
Constructs the extended attribute path for notification purposes, which is used
in the observer pattern to specify the full path of an observed attribute.
This abstract method is implemented by the classes inheriting from
`ObservableObject`.
Args:
observer_attr_name (str): The name of the attribute in the observer that
holds a reference to the instance. Equals `""` if observer itself is of type
`Observer`.
instance_attr_name (str): The name of the attribute within the instance that
has changed.
Returns:
str: The constructed extended attribute path.
"""
class _ObservableList(ObservableObject, list[Any]):
def __init__(
self,
original_list: list[Any],
) -> None:
self._original_list = original_list
ObservableObject.__init__(self)
list.__init__(self, self._original_list)
for i, item in enumerate(self._original_list):
super().__setitem__(i, self._initialise_new_objects(f"[{i}]", item))
def __setitem__(self, key: int, value: Any) -> None: # type: ignore[override]
if hasattr(self, "_observers"):
self._remove_observer_if_observable(f"[{key}]")
value = self._initialise_new_objects(f"[{key}]", value)
self._notify_change_start(f"[{key}]")
super().__setitem__(key, value)
self._notify_changed(f"[{key}]", value)
def append(self, __object: Any) -> None:
self._notify_change_start("")
self._initialise_new_objects(f"[{len(self)}]", __object)
super().append(__object)
self._notify_changed("", self)
def clear(self) -> None:
self._remove_self_from_observables()
super().clear()
self._notify_changed("", self)
def extend(self, __iterable: Iterable[Any]) -> None:
self._remove_self_from_observables()
try:
super().extend(__iterable)
finally:
for i, item in enumerate(self):
super().__setitem__(i, self._initialise_new_objects(f"[{i}]", item))
self._notify_changed("", self)
def insert(self, __index: SupportsIndex, __object: Any) -> None:
self._remove_self_from_observables()
try:
super().insert(__index, __object)
finally:
for i, item in enumerate(self):
super().__setitem__(i, self._initialise_new_objects(f"[{i}]", item))
self._notify_changed("", self)
def pop(self, __index: SupportsIndex = -1) -> Any:
self._remove_self_from_observables()
try:
popped_item = super().pop(__index)
finally:
for i, item in enumerate(self):
super().__setitem__(i, self._initialise_new_objects(f"[{i}]", item))
self._notify_changed("", self)
return popped_item
def remove(self, __value: Any) -> None:
self._remove_self_from_observables()
try:
super().remove(__value)
finally:
for i, item in enumerate(self):
super().__setitem__(i, self._initialise_new_objects(f"[{i}]", item))
self._notify_changed("", self)
def _remove_self_from_observables(self) -> None:
for i in range(len(self)):
self._remove_observer_if_observable(f"[{i}]")
def _remove_observer_if_observable(self, name: str) -> None:
key = int(name[1:-1])
current_value = self.__getitem__(key)
if isinstance(current_value, ObservableObject):
current_value._remove_observer(self, name)
def _construct_extended_attr_path(
self, observer_attr_name: str, instance_attr_name: str
) -> str:
if observer_attr_name != "":
return f"{observer_attr_name}{instance_attr_name}"
return instance_attr_name
class _ObservableDict(dict[str, Any], ObservableObject):
def __init__(
self,
original_dict: dict[str, Any],
) -> None:
self._original_dict = original_dict
ObservableObject.__init__(self)
dict.__init__(self)
for key, value in self._original_dict.items():
super().__setitem__(key, self._initialise_new_objects(f"['{key}']", value))
def __setitem__(self, key: str, value: Any) -> None:
if not isinstance(key, str):
logger.warning("Converting non-string dictionary key %s to string.", key)
key = str(key)
if hasattr(self, "_observers"):
self._remove_observer_if_observable(f"['{key}']")
value = self._initialise_new_objects(key, value)
self._notify_change_start(f"['{key}']")
super().__setitem__(key, value)
self._notify_changed(f"['{key}']", value)
def _remove_observer_if_observable(self, name: str) -> None:
key = name[2:-2]
current_value = self.get(key, None)
if isinstance(current_value, ObservableObject):
current_value._remove_observer(self, name)
def _construct_extended_attr_path(
self, observer_attr_name: str, instance_attr_name: str
) -> str:
if observer_attr_name != "":
return f"{observer_attr_name}{instance_attr_name}"
return instance_attr_name

View File

@@ -0,0 +1,7 @@
from pydase.observer_pattern.observer.observer import Observer
from pydase.observer_pattern.observer.property_observer import PropertyObserver
__all__ = [
"Observer",
"PropertyObserver",
]

View File

@@ -0,0 +1,31 @@
import logging
from abc import ABC, abstractmethod
from typing import Any
from pydase.observer_pattern.observable import Observable
logger = logging.getLogger(__name__)
class Observer(ABC):
def __init__(self, observable: Observable) -> None:
self.observable = observable
self.observable.add_observer(self)
self.changing_attributes: list[str] = []
def _notify_changed(self, changed_attribute: str, value: Any) -> None:
self.on_change(full_access_path=changed_attribute, value=value)
if changed_attribute in self.changing_attributes:
self.changing_attributes.remove(changed_attribute)
def _notify_change_start(self, changing_attribute: str) -> None:
self.changing_attributes.append(changing_attribute)
self.on_change_start(changing_attribute)
@abstractmethod
def on_change(self, full_access_path: str, value: Any) -> None:
...
def on_change_start(self, full_access_path: str) -> None:
return

View File

@@ -0,0 +1,95 @@
import inspect
import logging
import re
from typing import Any
from pydase.observer_pattern.observable.observable import Observable
from pydase.observer_pattern.observer.observer import Observer
logger = logging.getLogger(__name__)
def reverse_dict(original_dict: dict[str, list[str]]) -> dict[str, list[str]]:
reversed_dict: dict[str, list[str]] = {
value: [] for values in original_dict.values() for value in values
}
for key, values in original_dict.items():
for value in values:
reversed_dict[value].append(key)
return reversed_dict
def get_property_dependencies(prop: property, prefix: str = "") -> list[str]:
source_code_string = inspect.getsource(prop.fget) # type: ignore[arg-type]
pattern = r"self\.([^\s\{\}]+)"
matches = re.findall(pattern, source_code_string)
return [prefix + match for match in matches if "(" not in match]
class PropertyObserver(Observer):
def __init__(self, observable: Observable) -> None:
super().__init__(observable)
self._update_property_deps_dict()
def _update_property_deps_dict(self) -> None:
self.property_deps_dict = reverse_dict(
self._get_properties_and_their_dependencies(self.observable)
)
def _get_properties_and_their_dependencies(
self, obj: Observable, prefix: str = ""
) -> dict[str, list[str]]:
deps: dict[str, Any] = {}
self._process_observable_properties(obj, deps, prefix)
self._process_nested_observables_properties(obj, deps, prefix)
return deps
def _process_observable_properties(
self, obj: Observable, deps: dict[str, Any], prefix: str
) -> None:
for k, value in vars(type(obj)).items():
prefix = (
f"{prefix}." if prefix != "" and not prefix.endswith(".") else prefix
)
key = f"{prefix}{k}"
if isinstance(value, property):
deps[key] = get_property_dependencies(value, prefix)
def _process_nested_observables_properties(
self, obj: Observable, deps: dict[str, Any], prefix: str
) -> None:
for k, value in vars(obj).items():
prefix = (
f"{prefix}." if prefix != "" and not prefix.endswith(".") else prefix
)
parent_path = f"{prefix}{k}"
if isinstance(value, Observable):
new_prefix = f"{parent_path}."
deps.update(
self._get_properties_and_their_dependencies(value, new_prefix)
)
elif isinstance(value, list | dict):
self._process_collection_item_properties(value, deps, parent_path)
def _process_collection_item_properties(
self,
collection: list[Any] | dict[str, Any],
deps: dict[str, Any],
parent_path: str,
) -> None:
if isinstance(collection, list):
for i, item in enumerate(collection):
if isinstance(item, Observable):
new_prefix = f"{parent_path}[{i}]"
deps.update(
self._get_properties_and_their_dependencies(item, new_prefix)
)
elif isinstance(collection, dict):
for key, val in collection.items():
if isinstance(val, Observable):
new_prefix = f"{parent_path}['{key}']"
deps.update(
self._get_properties_and_their_dependencies(val, new_prefix)
)

View File

@@ -1,3 +1,7 @@
from pydase.server.server import Server
from pydase.server.web_server.web_server import WebServer
__all__ = ["Server"]
__all__ = [
"Server",
"WebServer",
]

View File

@@ -1,25 +1,23 @@
import asyncio
import logging
import os
import signal
import threading
from concurrent.futures import ThreadPoolExecutor
from enum import Enum
from pathlib import Path
from types import FrameType
from typing import Any, Optional, Protocol, TypedDict
from typing import Any, Protocol, TypedDict
import uvicorn
from loguru import logger
from rpyc import (
ForkingServer, # can be used for multiprocessing, e.g. a database interface server
)
from rpyc import ThreadedServer
from rpyc import ThreadedServer # type: ignore[import-untyped]
from uvicorn.server import HANDLED_SIGNALS
import pydase.units as u
from pydase import DataService
from pydase.version import __version__
from pydase.config import ServiceConfig
from pydase.data_service.data_service_observer import DataServiceObserver
from pydase.data_service.state_manager import StateManager
from pydase.server.web_server import WebServer
from .web_server import WebAPI
logger = logging.getLogger(__name__)
class AdditionalServerProtocol(Protocol):
@@ -31,27 +29,28 @@ class AdditionalServerProtocol(Protocol):
any server implementing it should have an __init__ method for initialization and a
serve method for starting the server.
Parameters:
-----------
service: DataService
The instance of DataService that the server will use. This could be the main
application or a specific service that the server will provide.
port: int
The port number at which the server will be accessible. This should be a valid
port number, typically in the range 1024-65535.
host: str
The hostname or IP address at which the server will be hosted. This could be a
local address (like '127.0.0.1' for localhost) or a public IP address.
**kwargs: Any
Any additional parameters required for initializing the server. These parameters
are specific to the server's implementation.
Args:
data_service_observer:
Observer for the DataService, handling state updates and communication to
connected clients through injected callbacks. Can be utilized to access the
service and state manager, and to add custom state-update callbacks.
host:
Hostname or IP address where the server is accessible. Commonly '0.0.0.0' to
bind to all network interfaces.
port:
Port number on which the server listens. Typically in the range 1024-65535
(non-standard ports).
**kwargs:
Any additional parameters required for initializing the server. These
parameters are specific to the server's implementation.
"""
def __init__(
self, service: DataService, port: int, host: str, **kwargs: Any
self,
data_service_observer: DataServiceObserver,
host: str,
port: int,
**kwargs: Any,
) -> None:
...
@@ -59,7 +58,6 @@ class AdditionalServerProtocol(Protocol):
"""Starts the server. This method should be implemented as an asynchronous
method, which means that it should be able to run concurrently with other tasks.
"""
...
class AdditionalServer(TypedDict):
@@ -81,34 +79,33 @@ class Server:
"""
The `Server` class provides a flexible server implementation for the `DataService`.
Parameters:
-----------
Args:
service: DataService
The DataService instance that this server will manage.
host: str
The host address for the server. Default is '0.0.0.0', which means all available
network interfaces.
The host address for the server. Default is '0.0.0.0', which means all
available network interfaces.
rpc_port: int
The port number for the RPC server. Default is 18871.
The port number for the RPC server. Default is
`pydase.config.ServiceConfig().rpc_port`.
web_port: int
The port number for the web server. Default is 8001.
The port number for the web server. Default is
`pydase.config.ServiceConfig().web_port`.
enable_rpc: bool
Whether to enable the RPC server. Default is True.
enable_web: bool
Whether to enable the web server. Default is True.
filename: str | Path | None
Filename of the file managing the service state persistence. Defaults to None.
use_forking_server: bool
Whether to use ForkingServer for multiprocessing (e.g. for a database interface
server). Default is False.
web_settings: dict[str, Any]
Additional settings for the web server. Default is {} (an empty dictionary).
Whether to use ForkingServer for multiprocessing. Default is False.
additional_servers : list[AdditionalServer]
A list of additional servers to run alongside the main server. Each entry in the
list should be a dictionary with the following structure:
A list of additional servers to run alongside the main server. Each entry in
the list should be a dictionary with the following structure:
- server: A class that adheres to the AdditionalServerProtocol. This class
should have an `__init__` method that accepts the DataService instance,
port, host, and optional keyword arguments, and a `serve` method that is a
coroutine responsible for starting the server.
port, host, and optional keyword arguments, and a `serve` method that is
a coroutine responsible for starting the server.
- port: The port on which the additional server will be running.
- kwargs: A dictionary containing additional keyword arguments that will be
passed to the server's `__init__` method.
@@ -118,9 +115,15 @@ class Server:
>>> class MyCustomServer:
... def __init__(
... self, service: DataService, port: int, host: str, **kwargs: Any
... ):
... self.service = service
... self,
... data_service_observer: DataServiceObserver,
... host: str,
... port: int,
... **kwargs: Any,
... ) -> None:
... self.observer = data_service_observer
... self.state_manager = self.observer.state_manager
... self.service = self.state_manager.service
... self.port = port
... self.host = host
... # handle any additional arguments...
@@ -128,8 +131,8 @@ class Server:
... async def serve(self):
... # code to start the server...
And here's how you might add it to the `additional_servers` list when creating a
`Server` instance:
And here's how you might add it to the `additional_servers` list when creating
a `Server` instance:
>>> server = Server(
... service=my_data_service,
@@ -147,72 +150,48 @@ class Server:
Additional keyword arguments.
"""
def __init__( # noqa: CFQ002
def __init__( # noqa: PLR0913
self,
service: DataService,
host: str = "0.0.0.0",
rpc_port: int = 18871,
web_port: int = 8001,
rpc_port: int = ServiceConfig().rpc_port,
web_port: int = ServiceConfig().web_port,
enable_rpc: bool = True,
enable_web: bool = True,
use_forking_server: bool = False,
web_settings: dict[str, Any] = {},
additional_servers: list[AdditionalServer] = [],
filename: str | Path | None = None,
additional_servers: list[AdditionalServer] | None = None,
**kwargs: Any,
) -> None:
if additional_servers is None:
additional_servers = []
self._service = service
self._host = host
self._rpc_port = rpc_port
self._web_port = web_port
self._enable_rpc = enable_rpc
self._enable_web = enable_web
self._web_settings = web_settings
self._kwargs = kwargs
self._loop: asyncio.AbstractEventLoop
self._rpc_server_type = ForkingServer if use_forking_server else ThreadedServer
self._additional_servers = additional_servers
self.should_exit = False
self.servers: dict[str, asyncio.Future[Any]] = {}
self.executor: ThreadPoolExecutor | None = None
self._info: dict[str, Any] = {
"name": self._service.get_service_name(),
"version": __version__,
"rpc_port": self._rpc_port,
"web_port": self._web_port,
"enable_rpc": self._enable_rpc,
"enable_web": self._enable_web,
"web_settings": self._web_settings,
"additional_servers": [],
**kwargs,
}
self._state_manager = StateManager(self._service, filename)
self._observer = DataServiceObserver(self._state_manager)
self._state_manager.load_state()
def run(self) -> None:
"""
Initializes the asyncio event loop and starts the server.
This method should be called to start the server after it's been instantiated.
Raises
------
Exception
If there's an error while running the server, the error will be propagated
after the server is shut down.
"""
try:
self._loop = asyncio.get_event_loop()
except RuntimeError:
self._loop = asyncio.new_event_loop()
asyncio.set_event_loop(self._loop)
try:
self._loop.run_until_complete(self.serve())
except Exception:
self._loop.run_until_complete(self.shutdown())
raise
asyncio.run(self.serve())
async def serve(self) -> None:
process_id = os.getpid()
logger.info(f"Started server process [{process_id}]")
logger.info("Started server process [%s]", process_id)
await self.startup()
if self.should_exit:
@@ -220,9 +199,9 @@ class Server:
await self.main_loop()
await self.shutdown()
logger.info(f"Finished server process [{process_id}]")
logger.info("Finished server process [%s]", process_id)
async def startup(self) -> None: # noqa: C901
async def startup(self) -> None:
self._loop = asyncio.get_running_loop()
self._loop.set_exception_handler(self.custom_exception_handler)
self.install_signal_handlers()
@@ -230,7 +209,7 @@ class Server:
if self._enable_rpc:
self.executor = ThreadPoolExecutor()
self._rpc_server = self._rpc_server_type(
self._rpc_server = ThreadedServer(
self._service,
port=self._rpc_port,
protocol_config={
@@ -244,73 +223,26 @@ class Server:
self.servers["rpyc"] = future_or_task
for server in self._additional_servers:
addin_server = server["server"](
self._service,
port=server["port"],
data_service_observer=self._observer,
host=self._host,
info=self._info,
port=server["port"],
**server["kwargs"],
)
server_name = (
addin_server.__module__ + "." + addin_server.__class__.__name__
)
self._info["additional_servers"].append(
{
"name": server_name,
"port": server["port"],
"host": self._host,
**server["kwargs"],
}
)
future_or_task = self._loop.create_task(addin_server.serve())
self.servers[server_name] = future_or_task
if self._enable_web:
self._wapi: WebAPI = WebAPI(
service=self._service,
info=self._info,
self._web_server = WebServer(
data_service_observer=self._observer,
host=self._host,
port=self._web_port,
**self._kwargs,
)
web_server = uvicorn.Server(
uvicorn.Config(
self._wapi.fastapi_app, host=self._host, port=self._web_port
)
)
def sio_callback(parent_path: str, name: str, value: Any) -> None:
# TODO: an error happens when an attribute is set to a list
# > File "/usr/lib64/python3.11/json/encoder.py", line 180, in default
# > raise TypeError(f'Object of type {o.__class__.__name__} '
# > TypeError: Object of type list is not JSON serializable
notify_value = value
if isinstance(value, Enum):
notify_value = value.name
if isinstance(value, u.Quantity):
notify_value = {"magnitude": value.m, "unit": str(value.u)}
async def notify() -> None:
try:
await self._wapi.sio.emit( # type: ignore
"notify",
{
"data": {
"parent_path": parent_path,
"name": name,
"value": notify_value,
}
},
)
except Exception as e:
logger.warning(f"Failed to send notification: {e}")
self._loop.create_task(notify())
self._service._callback_manager.add_notification_callback(sio_callback)
# overwrite uvicorn's signal handlers, otherwise it will bogart SIGINT and
# SIGTERM, which makes it impossible to escape out of
web_server.install_signal_handlers = lambda: None # type: ignore
future_or_task = self._loop.create_task(web_server.serve())
future_or_task = self._loop.create_task(self._web_server.serve())
self.servers["web"] = future_or_task
async def main_loop(self) -> None:
@@ -320,9 +252,8 @@ class Server:
async def shutdown(self) -> None:
logger.info("Shutting down")
logger.info(f"Saving data to {self._service._filename}.")
if self._service._filename is not None:
self._service.write_to_file()
logger.info("Saving data to %s.", self._state_manager.filename)
self._state_manager.save_state()
await self.__cancel_servers()
await self.__cancel_tasks()
@@ -337,9 +268,9 @@ class Server:
try:
await task
except asyncio.CancelledError:
logger.debug(f"Cancelled {server_name} server.")
logger.debug("Cancelled '%s' server.", server_name)
except Exception as e:
logger.warning(f"Unexpected exception: {e}.")
logger.warning("Unexpected exception: %s", e)
async def __cancel_tasks(self) -> None:
for task in asyncio.all_tasks(self._loop):
@@ -347,29 +278,27 @@ class Server:
try:
await task
except asyncio.CancelledError:
logger.debug(f"Cancelled task {task.get_coro()}.")
logger.debug("Cancelled task '%s'.", task.get_coro())
except Exception as e:
logger.warning(f"Unexpected exception: {e}.")
logger.exception("Unexpected exception: %s", e)
def install_signal_handlers(self) -> None:
if threading.current_thread() is not threading.main_thread():
# Signals can only be listened to from the main thread.
return
try:
for sig in HANDLED_SIGNALS:
self._loop.add_signal_handler(sig, self.handle_exit, sig, None)
except NotImplementedError:
# Windows
for sig in HANDLED_SIGNALS:
signal.signal(sig, self.handle_exit)
def handle_exit(self, sig: int = 0, frame: Optional[FrameType] = None) -> None:
logger.info("Handling exit")
def handle_exit(self, sig: int = 0, frame: FrameType | None = None) -> None:
if self.should_exit and sig == signal.SIGINT:
self.force_exit = True
logger.warning("Received signal '%s', forcing exit...", sig)
os._exit(1)
else:
self.should_exit = True
logger.warning(
"Received signal '%s', exiting... (CTRL+C to force quit)", sig
)
def custom_exception_handler(
self, loop: asyncio.AbstractEventLoop, context: dict[str, Any]
@@ -386,7 +315,7 @@ class Server:
async def emit_exception() -> None:
try:
await self._wapi.sio.emit( # type: ignore
await self._web_server._sio.emit(
"exception",
{
"data": {
@@ -396,7 +325,7 @@ class Server:
},
)
except Exception as e:
logger.warning(f"Failed to send notification: {e}")
logger.exception("Failed to send notification: %s", e)
loop.create_task(emit_exception())
else:

View File

@@ -1,141 +0,0 @@
from pathlib import Path
from typing import Any, TypedDict
import socketio
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from fastapi.staticfiles import StaticFiles
from loguru import logger
from pydase import DataService
from pydase.version import __version__
class UpdateDict(TypedDict):
"""
A TypedDict subclass representing a dictionary used for updating attributes in a
DataService.
Attributes:
----------
name : str
The name of the attribute to be updated in the DataService instance.
If the attribute is part of a nested structure, this would be the name of the
attribute in the last nested object. For example, for an attribute access path
'attr1.list_attr[0].attr2', 'attr2' would be the name.
parent_path : str
The access path for the parent object of the attribute to be updated. This is
used to construct the full access path for the attribute. For example, for an
attribute access path 'attr1.list_attr[0].attr2', 'attr1.list_attr[0]' would be
the parent_path.
value : Any
The new value to be assigned to the attribute. The type of this value should
match the type of the attribute to be updated.
"""
name: str
parent_path: str
value: Any
class WebAPI:
__sio_app: socketio.ASGIApp
__fastapi_app: FastAPI
def __init__( # noqa: CFQ002
self,
service: DataService,
frontend: str | Path | None = None,
css: str | Path | None = None,
enable_CORS: bool = True,
info: dict[str, Any] = {},
*args: Any,
**kwargs: Any,
):
self.service = service
self.frontend = frontend
self.css = css
self.enable_CORS = enable_CORS
self.info = info
self.args = args
self.kwargs = kwargs
self.setup_socketio()
self.setup_fastapi_app()
def setup_socketio(self) -> None:
# the socketio ASGI app, to notify clients when params update
if self.enable_CORS:
sio = socketio.AsyncServer(async_mode="asgi", cors_allowed_origins="*")
else:
sio = socketio.AsyncServer(async_mode="asgi")
@sio.event # type: ignore
def frontend_update(sid: str, data: UpdateDict) -> Any:
logger.debug(f"Received frontend update: {data}")
path_list, attr_name = data["parent_path"].split("."), data["name"]
path_list.remove("DataService") # always at the start, does not do anything
return self.service.update_DataService_attribute(
path_list=path_list, attr_name=attr_name, value=data["value"]
)
self.__sio = sio
self.__sio_app = socketio.ASGIApp(self.__sio)
def setup_fastapi_app(self) -> None: # noqa: CFQ004
app = FastAPI()
if self.enable_CORS:
app.add_middleware(
CORSMiddleware,
allow_credentials=True,
allow_origins=["*"],
allow_methods=["*"],
allow_headers=["*"],
)
app.mount("/ws", self.__sio_app)
# @app.get("/version", include_in_schema=False)
@app.get("/version")
def version() -> str:
return __version__
@app.get("/name")
def name() -> str:
return self.service.get_service_name()
@app.get("/info")
def info() -> dict[str, Any]:
return self.info
@app.get("/service-properties")
def service_properties() -> dict[str, Any]:
return self.service.serialize()
app.mount(
"/",
StaticFiles(
directory=Path(__file__).parent.parent / "frontend",
html=True,
),
)
self.__fastapi_app = app
def add_endpoint(self, name: str) -> None:
# your endpoint creation code
pass
def get_custom_openapi(self) -> None:
# your custom openapi generation code
pass
@property
def sio(self) -> socketio.AsyncServer:
return self.__sio
@property
def fastapi_app(self) -> FastAPI:
return self.__fastapi_app

View File

@@ -0,0 +1,3 @@
from pydase.server.web_server.web_server import WebServer
__all__ = ["WebServer"]

View File

@@ -0,0 +1,144 @@
import asyncio
import logging
from typing import Any, TypedDict
import socketio # type: ignore[import-untyped]
from pydase.data_service.data_service import process_callable_attribute
from pydase.data_service.data_service_observer import DataServiceObserver
from pydase.data_service.state_manager import StateManager
from pydase.utils.helpers import get_object_attr_from_path_list
from pydase.utils.logging import SocketIOHandler
from pydase.utils.serializer import SerializedObject
logger = logging.getLogger(__name__)
class UpdateDict(TypedDict):
"""
A TypedDict subclass representing a dictionary used for updating attributes in a
DataService.
Attributes:
----------
name : str
The name of the attribute to be updated in the DataService instance.
If the attribute is part of a nested structure, this would be the name of the
attribute in the last nested object. For example, for an attribute access path
'attr1.list_attr[0].attr2', 'attr2' would be the name.
parent_path : str
The access path for the parent object of the attribute to be updated. This is
used to construct the full access path for the attribute. For example, for an
attribute access path 'attr1.list_attr[0].attr2', 'attr1.list_attr[0]' would be
the parent_path.
value : Any
The new value to be assigned to the attribute. The type of this value should
match the type of the attribute to be updated.
"""
name: str
parent_path: str
value: Any
class RunMethodDict(TypedDict):
"""
A TypedDict subclass representing a dictionary used for running methods from the
exposed DataService.
Attributes:
name (str): The name of the method to be run.
parent_path (str): The access path for the parent object of the method to be
run. This is used to construct the full access path for the method. For
example, for an method with access path 'attr1.list_attr[0].method_name',
'attr1.list_attr[0]' would be the parent_path.
kwargs (dict[str, Any]): The arguments passed to the method.
"""
name: str
parent_path: str
kwargs: dict[str, Any]
def setup_sio_server(
observer: DataServiceObserver,
enable_cors: bool,
loop: asyncio.AbstractEventLoop,
) -> socketio.AsyncServer:
"""
Sets up and configures a Socket.IO asynchronous server.
Args:
observer (DataServiceObserver):
The observer managing state updates and communication.
enable_cors (bool):
Flag indicating whether CORS should be enabled for the server.
loop (asyncio.AbstractEventLoop):
The event loop in which the server will run.
Returns:
socketio.AsyncServer: The configured Socket.IO asynchronous server.
"""
state_manager = observer.state_manager
if enable_cors:
sio = socketio.AsyncServer(async_mode="asgi", cors_allowed_origins="*")
else:
sio = socketio.AsyncServer(async_mode="asgi")
setup_sio_events(sio, state_manager)
setup_logging_handler(sio)
# Add notification callback to observer
def sio_callback(
full_access_path: str, value: Any, cached_value_dict: SerializedObject
) -> None:
if cached_value_dict != {}:
async def notify() -> None:
try:
await sio.emit(
"notify",
{
"data": {
"full_access_path": full_access_path,
"value": cached_value_dict,
}
},
)
except Exception as e:
logger.warning("Failed to send notification: %s", e)
loop.create_task(notify())
observer.add_notification_callback(sio_callback)
return sio
def setup_sio_events(sio: socketio.AsyncServer, state_manager: StateManager) -> None:
@sio.event
def set_attribute(sid: str, data: UpdateDict) -> Any:
logger.debug("Received frontend update: %s", data)
parent_path = data["parent_path"].split(".")
path_list = [element for element in parent_path if element] + [data["name"]]
path = ".".join(path_list)
return state_manager.set_service_attribute_value_by_path(
path=path, value=data["value"]
)
@sio.event
def run_method(sid: str, data: RunMethodDict) -> Any:
logger.debug("Running method: %s", data)
parent_path = data["parent_path"].split(".")
path_list = [element for element in parent_path if element] + [data["name"]]
method = get_object_attr_from_path_list(state_manager.service, path_list)
return process_callable_attribute(method, data["kwargs"])
def setup_logging_handler(sio: socketio.AsyncServer) -> None:
logger = logging.getLogger()
logger.addHandler(SocketIOHandler(sio))

View File

@@ -0,0 +1,190 @@
import asyncio
import json
import logging
from pathlib import Path
from typing import Any
import socketio # type: ignore[import-untyped]
import uvicorn
from fastapi import FastAPI, Response
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import FileResponse
from fastapi.staticfiles import StaticFiles
from pydase.config import ServiceConfig, WebServerConfig
from pydase.data_service.data_service_observer import DataServiceObserver
from pydase.server.web_server.sio_setup import (
setup_sio_server,
)
from pydase.utils.serializer import generate_serialized_data_paths
from pydase.version import __version__
logger = logging.getLogger(__name__)
class WebServer:
"""
Represents a web server that adheres to the AdditionalServerProtocol, designed to
work with a DataService instance. This server facilitates client-server
communication and state management through web protocols and socket connections.
The WebServer class initializes and manages a web server environment using FastAPI
and Socket.IO, allowing for HTTP and WebSocket communications. It incorporates CORS
(Cross-Origin Resource Sharing) support, custom CSS, and serves a frontend static
files directory. It also initializes web server settings based on configuration
files or generates default settings if necessary.
Configuration for the web server (like service configuration directory and whether
to generate new web settings) is determined in the following order of precedence:
1. Values provided directly to the constructor.
2. Environment variable settings (via configuration classes like
`pydase.config.ServiceConfig` and `pydase.config.WebServerConfig`).
3. Default values defined in the configuration classes.
Args:
data_service_observer (DataServiceObserver): Observer for the DataService,
handling state updates and communication to connected clients.
host (str): Hostname or IP address where the server is accessible. Commonly
'0.0.0.0' to bind to all network interfaces.
port (int): Port number on which the server listens. Typically in the range
1024-65535 (non-standard ports).
css (str | Path | None, optional): Path to a custom CSS file for styling the
frontend. If None, no custom styles are applied. Defaults to None.
enable_cors (bool, optional): Flag to enable or disable CORS policy. When True,
CORS is enabled, allowing cross-origin requests. Defaults to True.
config_dir (Path | None, optional): Path to the configuration
directory where the web settings will be stored. Defaults to
`pydase.config.ServiceConfig().config_dir`.
generate_new_web_settings (bool | None, optional): Flag to enable or disable
generation of new web settings if the configuration file is missing. Defaults
to `pydase.config.WebServerConfig().generate_new_web_settings`.
**kwargs (Any): Additional unused keyword arguments.
"""
def __init__( # noqa: PLR0913
self,
data_service_observer: DataServiceObserver,
host: str,
port: int,
css: str | Path | None = None,
enable_cors: bool = True,
config_dir: Path = ServiceConfig().config_dir,
generate_web_settings: bool = WebServerConfig().generate_web_settings,
frontend_src: Path = Path(__file__).parent.parent.parent / "frontend",
) -> None:
self.observer = data_service_observer
self.state_manager = self.observer.state_manager
self.service = self.state_manager.service
self.port = port
self.host = host
self.css = css
self.enable_cors = enable_cors
self.frontend_src = frontend_src
self._service_config_dir = config_dir
self._generate_web_settings = generate_web_settings
self._loop: asyncio.AbstractEventLoop
self._initialise_configuration()
async def serve(self) -> None:
self._loop = asyncio.get_running_loop()
self._setup_socketio()
self._setup_fastapi_app()
self.web_server = uvicorn.Server(
uvicorn.Config(self.__fastapi_app, host=self.host, port=self.port)
)
# overwrite uvicorn's signal handlers, otherwise it will bogart SIGINT and
# SIGTERM, which makes it impossible to escape out of
self.web_server.install_signal_handlers = lambda: None # type: ignore[method-assign]
await self.web_server.serve()
def _initialise_configuration(self) -> None:
logger.debug("Initialising web server configuration...")
file_path = self._service_config_dir / "web_settings.json"
if self._generate_web_settings:
# File does not exist, create it with default content
logger.debug("Generating web settings file...")
file_path.parent.mkdir(
parents=True, exist_ok=True
) # Ensure directory exists
file_path.write_text(json.dumps(self.web_settings, indent=4))
def _get_web_settings_from_file(self) -> dict[str, dict[str, Any]]:
file_path = self._service_config_dir / "web_settings.json"
web_settings = {}
# File exists, read its content
if file_path.exists():
logger.debug(
"Reading configuration from file '%s' ...", file_path.absolute()
)
web_settings = json.loads(file_path.read_text())
return web_settings
@property
def web_settings(self) -> dict[str, dict[str, Any]]:
current_web_settings = self._get_web_settings_from_file()
for path in generate_serialized_data_paths(self.state_manager.cache_value):
if path in current_web_settings:
continue
current_web_settings[path] = {
"displayName": path.split(".")[-1],
"display": True,
}
return current_web_settings
def _setup_socketio(self) -> None:
self._sio = setup_sio_server(self.observer, self.enable_cors, self._loop)
self.__sio_app = socketio.ASGIApp(self._sio)
def _setup_fastapi_app(self) -> None: # noqa: C901
app = FastAPI()
if self.enable_cors:
app.add_middleware(
CORSMiddleware,
allow_credentials=True,
allow_origins=["*"],
allow_methods=["*"],
allow_headers=["*"],
)
app.mount("/ws", self.__sio_app)
@app.get("/version")
def version() -> str:
return __version__
@app.get("/name")
def name() -> str:
return type(self.service).__name__
@app.get("/service-properties")
def service_properties() -> dict[str, Any]:
return self.state_manager.cache # type: ignore
@app.get("/web-settings")
def web_settings() -> dict[str, Any]:
return self.web_settings
# exposing custom.css file provided by user
@app.get("/custom.css")
async def styles() -> Response:
if self.css is not None:
return FileResponse(str(self.css))
return Response(content="", media_type="text/css")
app.mount(
"/",
StaticFiles(
directory=self.frontend_src,
html=True,
),
)
self.__fastapi_app = app

View File

@@ -2,7 +2,7 @@ from typing import TypedDict
import pint
units: pint.UnitRegistry = pint.UnitRegistry()
units: pint.UnitRegistry = pint.UnitRegistry(autoconvert_offset_to_baseunit=True)
units.default_format = "~P" # pretty and short format
Quantity = pint.Quantity
@@ -15,7 +15,7 @@ class QuantityDict(TypedDict):
def convert_to_quantity(
value: QuantityDict | float | int | Quantity, unit: str = ""
value: QuantityDict | float | Quantity, unit: str = ""
) -> Quantity:
"""
Convert a given value into a pint.Quantity object with the specified unit.
@@ -53,4 +53,4 @@ def convert_to_quantity(
quantity = float(value["magnitude"]) * Unit(value["unit"])
else:
quantity = value
return quantity # type: ignore
return quantity

View File

@@ -0,0 +1,27 @@
from collections.abc import Callable
from typing import Any
from pydase.utils.helpers import function_has_arguments
class FunctionDefinitionError(Exception):
pass
def frontend(func: Callable[..., Any]) -> Callable[..., Any]:
"""
Decorator to mark a DataService method for frontend rendering. Ensures that the
method does not contain arguments, as they are not supported for frontend rendering.
"""
if function_has_arguments(func):
raise FunctionDefinitionError(
"The @frontend decorator requires functions without arguments. Function "
f"'{func.__name__}' has at least one argument. "
"Please remove the argument(s) from this function to use it with the "
"@frontend decorator."
)
# Mark the function for frontend display.
func._display_in_frontend = True # type: ignore
return func

View File

@@ -1,10 +1,20 @@
import re
import inspect
import logging
from collections.abc import Callable
from itertools import chain
from typing import Any, Optional, cast
from typing import Any
from loguru import logger
logger = logging.getLogger(__name__)
STANDARD_TYPES = ("int", "float", "bool", "str", "Enum", "NoneType", "Quantity")
def get_attribute_doc(attr: Any) -> str | None:
"""This function takes an input attribute attr and returns its documentation
string if it's different from the documentation of its type, otherwise,
it returns None.
"""
attr_doc = inspect.getdoc(attr)
attr_class_doc = inspect.getdoc(type(attr))
return attr_doc if attr_class_doc != attr_doc else None
def get_class_and_instance_attributes(obj: object) -> dict[str, Any]:
@@ -17,12 +27,10 @@ def get_class_and_instance_attributes(obj: object) -> dict[str, Any]:
loops.
"""
attrs = dict(chain(type(obj).__dict__.items(), obj.__dict__.items()))
attrs.pop("__root__")
return attrs
return dict(chain(type(obj).__dict__.items(), obj.__dict__.items()))
def get_object_attr_from_path(target_obj: Any, path: list[str]) -> Any:
def get_object_attr_from_path_list(target_obj: Any, path: list[str]) -> Any:
"""
Traverse the object tree according to the given path.
@@ -50,218 +58,11 @@ def get_object_attr_from_path(target_obj: Any, path: list[str]) -> Any:
target_obj = getattr(target_obj, part)
except AttributeError:
# The attribute doesn't exist
logger.debug(f"Attribute {part} does not exist in the object.")
logger.debug("Attribute % does not exist in the object.", part)
return None
return target_obj
def generate_paths_from_DataService_dict(
data: dict, parent_path: str = ""
) -> list[str]:
"""
Recursively generate paths from a dictionary representing a DataService object.
This function traverses through a nested dictionary, which is typically obtained
from serializing a DataService object. The function generates a list where each
element is a string representing the path to each terminal value in the original
dictionary.
The paths are represented as strings, with dots ('.') denoting nesting levels and
square brackets ('[]') denoting list indices.
Args:
data (dict): The input dictionary to generate paths from. This is typically
obtained from serializing a DataService object.
parent_path (str, optional): The current path up to the current level of
recursion. Defaults to ''.
Returns:
list[str]: A list with paths as elements.
Note:
The function ignores keys whose "type" is "method", as these represent methods
of the DataService object and not its state.
Example:
-------
>>> {
... "attr1": {"type": "int", "value": 10},
... "attr2": {
... "type": "list",
... "value": [{"type": "int", "value": 1}, {"type": "int", "value": 2}],
... },
... "add": {
... "type": "method",
... "async": False,
... "parameters": {"a": "float", "b": "int"},
... "doc": "Returns the sum of the numbers a and b.",
... },
... }
>>> print(generate_paths_from_DataService_dict(nested_dict))
[attr1, attr2[0], attr2[1]]
"""
paths = []
for key, value in data.items():
if value["type"] == "method":
# ignoring methods
continue
new_path = f"{parent_path}.{key}" if parent_path else key
if isinstance(value["value"], dict) and value["type"] != "Quantity":
paths.extend(generate_paths_from_DataService_dict(value["value"], new_path)) # type: ignore
elif isinstance(value["value"], list):
for index, item in enumerate(value["value"]):
indexed_key_path = f"{new_path}[{index}]"
if isinstance(item["value"], dict):
paths.extend( # type: ignore
generate_paths_from_DataService_dict(
item["value"], indexed_key_path
)
)
else:
paths.append(indexed_key_path) # type: ignore
else:
paths.append(new_path) # type: ignore
return paths
def extract_dict_or_list_entry(data: dict[str, Any], key: str) -> dict[str, Any] | None:
"""
Extract a nested dictionary or list entry based on the provided key.
Given a dictionary and a key, this function retrieves the corresponding nested
dictionary or list entry. If the key includes an index in the format "[<index>]",
the function assumes that the corresponding entry in the dictionary is a list, and
it will attempt to retrieve the indexed item from that list.
Args:
data (dict): The input dictionary containing nested dictionaries or lists.
key (str): The key specifying the desired entry within the dictionary. The key
can be a regular dictionary key or can include an index in the format
"[<index>]" to retrieve an item from a nested list.
Returns:
dict | None: The nested dictionary or list item found for the given key. If the
key is invalid, or if the specified index is out of bounds for a list, it
returns None.
Example:
>>> data = {
... "attr1": [
... {"type": "int", "value": 10}, {"type": "string", "value": "hello"}
... ],
... "attr2": {
... "type": "MyClass",
... "value": {"sub_attr": {"type": "float", "value": 20.5}}
... }
... }
>>> extract_dict_or_list_entry(data, "attr1[1]")
{"type": "string", "value": "hello"}
>>> extract_dict_or_list_entry(data, "attr2")
{"type": "MyClass", "value": {"sub_attr": {"type": "float", "value": 20.5}}}
"""
attr_name = key
index: Optional[int] = None
# Check if the key contains an index part like '[<index>]'
if "[" in key and key.endswith("]"):
attr_name, index_part = key.split("[", 1)
index_part = index_part.rstrip("]") # remove the closing bracket
# Convert the index part to an integer
if index_part.isdigit():
index = int(index_part)
else:
logger.error(f"Invalid index format in key: {key}")
current_data: dict[str, Any] | list[dict[str, Any]] | None = data.get(
attr_name, None
)
if not isinstance(current_data, dict):
# key does not exist in dictionary, e.g. when class does not have this
# attribute
return None
if isinstance(current_data["value"], list):
current_data = current_data["value"]
if index is not None and 0 <= index < len(current_data):
current_data = current_data[index]
else:
return None
# When the attribute is a class instance, the attributes are nested in the
# "value" key
if current_data["type"] not in STANDARD_TYPES:
current_data = cast(dict[str, Any], current_data.get("value", None)) # type: ignore
assert isinstance(current_data, dict)
return current_data
def get_nested_value_from_DataService_by_path_and_key(
data: dict[str, Any], path: str, key: str = "value"
) -> Any:
"""
Get the value associated with a specific key from a dictionary given a path.
This function traverses the dictionary according to the path provided and
returns the value associated with the specified key at that path. The path is
a string with dots connecting the levels and brackets indicating list indices.
The function can handle complex dictionaries where data is nested within different
types of objects. It checks the type of each object it encounters and correctly
descends into the object if it is not a standard type (i.e., int, float, bool, str,
Enum).
Args:
data (dict): The input dictionary to get the value from.
path (str): The path to the value in the dictionary.
key (str, optional): The key associated with the value to be returned.
Default is "value".
Returns:
Any: The value associated with the specified key at the given path in the
dictionary.
Examples:
Let's consider the following dictionary:
>>> data = {
>>> "attr1": {"type": "int", "value": 10},
>>> "attr2": {
"type": "MyClass",
"value": {"attr3": {"type": "float", "value": 20.5}}
}
>>> }
The function can be used to get the value of 'attr1' as follows:
>>> get_nested_value_by_path_and_key(data, "attr1")
10
It can also be used to get the value of 'attr3', which is nested within 'attr2',
as follows:
>>> get_nested_value_by_path_and_key(data, "attr2.attr3", "type")
float
"""
# Split the path into parts
parts: list[str] = re.split(r"\.", path) # Split by '.'
current_data: dict[str, Any] | None = data
for part in parts:
if current_data is None:
return
current_data = extract_dict_or_list_entry(current_data, part)
if isinstance(current_data, dict):
return current_data.get(key, None)
def convert_arguments_to_hinted_types(
args: dict[str, Any], type_hints: dict[str, Any]
) -> dict[str, Any] | str:
@@ -339,62 +140,86 @@ def update_value_if_changed(
if getattr(target, attr_name_or_index) != new_value:
setattr(target, attr_name_or_index, new_value)
else:
logger.error(f"Incompatible arguments: {target}, {attr_name_or_index}.")
logger.error("Incompatible arguments: %s, %s.", target, attr_name_or_index)
def parse_list_attr_and_index(attr_string: str) -> tuple[str, Optional[int]]:
def parse_list_attr_and_index(attr_string: str) -> tuple[str, int | None]:
"""
Parses an attribute string and extracts a potential list attribute name and its
index.
Logs an error if the index is not a valid digit.
This function examines the provided attribute string. If the string contains square
brackets, it assumes that it's a list attribute and the string within brackets is
the index of an element. It then returns the attribute name and the index as an
integer. If no brackets are present, the function assumes it's a regular attribute
and returns the attribute name and None as the index.
Parameters:
-----------
attr_string: str
The attribute string to parse. Can be a regular attribute name (e.g.
'attr_name') or a list attribute with an index (e.g. 'list_attr[2]').
Args:
attr_string (str):
The attribute string to parse. Can be a regular attribute name (e.g.,
'attr_name') or a list attribute with an index (e.g., 'list_attr[2]').
Returns:
--------
tuple: (str, Optional[int])
A tuple containing the attribute name as a string and the index as an integer if
present, otherwise None.
tuple[str, Optional[int]]:
A tuple containing the attribute name as a string and the index as an
integer if present, otherwise None.
Example:
--------
>>> parse_list_attr_and_index('list_attr[2]')
Examples:
>>> parse_attribute_and_index('list_attr[2]')
('list_attr', 2)
>>> parse_list_attr_and_index('attr_name')
>>> parse_attribute_and_index('attr_name')
('attr_name', None)
"""
attr_name = attr_string
index = None
if "[" in attr_string and "]" in attr_string:
attr_name, idx = attr_string[:-1].split("[")
index = int(idx)
attr_name = attr_string
if "[" in attr_string and attr_string.endswith("]"):
attr_name, index_part = attr_string.split("[", 1)
index_part = index_part.rstrip("]")
if index_part.isdigit():
index = int(index_part)
else:
logger.error("Invalid index format in key: %s", attr_name)
return attr_name, index
def get_component_class_names() -> list[str]:
def get_component_classes() -> list[type]:
"""
Returns the names of the component classes in a list.
It takes the names from the pydase/components/__init__.py file, so this file should
always be up-to-date with the currently available components.
Returns:
list[str]: List of component class names
Returns references to the component classes in a list.
"""
import pydase.components
return pydase.components.__all__
return [
getattr(pydase.components, cls_name) for cls_name in pydase.components.__all__
]
def get_data_service_class_reference() -> Any:
import pydase.data_service.data_service
return getattr(pydase.data_service.data_service, "DataService")
def is_property_attribute(target_obj: Any, attr_name: str) -> bool:
return isinstance(getattr(type(target_obj), attr_name, None), property)
def function_has_arguments(func: Callable[..., Any]) -> bool:
sig = inspect.signature(func)
parameters = dict(sig.parameters)
# Remove 'self' parameter for instance methods.
parameters.pop("self", None)
# Check if there are any parameters left which would indicate additional arguments.
if len(parameters) > 0:
return True
return False
def render_in_frontend(func: Callable[..., Any]) -> bool:
"""Determines if the method should be rendered in the frontend.
It checks if the "@frontend" decorator was used or the method is a coroutine."""
if inspect.iscoroutinefunction(func):
return True
try:
return func._display_in_frontend # type: ignore
except AttributeError:
return False

View File

@@ -1,82 +1,149 @@
import asyncio
import logging
import sys
from types import FrameType
from typing import Optional
from copy import copy
import loguru
import rpyc
import socketio # type: ignore[import-untyped]
import uvicorn.logging
from uvicorn.config import LOGGING_CONFIG
import pydase.config
ALLOWED_LOG_LEVELS = ["DEBUG", "INFO", "ERROR"]
class DefaultFormatter(uvicorn.logging.ColourizedFormatter):
"""
A custom log formatter class that:
* Outputs the LOG_LEVEL with an appropriate color.
* If a log call includes an `extras={"color_message": ...}` it will be used
for formatting the output, instead of the plain text message.
"""
def formatMessage(self, record: logging.LogRecord) -> str: # noqa: N802
recordcopy = copy(record)
levelname = recordcopy.levelname
seperator = " " * (8 - len(recordcopy.levelname))
if self.use_colors:
levelname = self.color_level_name(levelname, recordcopy.levelno)
if "color_message" in recordcopy.__dict__:
recordcopy.msg = recordcopy.__dict__["color_message"]
recordcopy.__dict__["message"] = recordcopy.getMessage()
recordcopy.__dict__["levelprefix"] = levelname + seperator
return logging.Formatter.formatMessage(self, recordcopy)
def should_use_colors(self) -> bool:
return sys.stderr.isatty()
# from: https://github.com/Delgan/loguru section
# "Entirely compatible with standard logging"
class InterceptHandler(logging.Handler):
class SocketIOHandler(logging.Handler):
"""
Custom logging handler that emits ERROR and CRITICAL log records to a Socket.IO
server, allowing for real-time logging in applications that use Socket.IO for
communication.
"""
def __init__(self, sio: socketio.AsyncServer) -> None:
super().__init__(logging.ERROR)
self._sio = sio
def format(self, record: logging.LogRecord) -> str:
return f"{record.name}:{record.funcName}:{record.lineno} - {record.message}"
def emit(self, record: logging.LogRecord) -> None:
# Ignore "asyncio.CancelledError" raised by uvicorn
if record.name == "uvicorn.error" and "CancelledError" in record.msg:
return
log_entry = self.format(record)
# Get corresponding Loguru level if it exists.
level: int | str
try:
level = loguru.logger.level(record.levelname).name
except ValueError:
level = record.levelno
# Find caller from where originated the logged message.
frame: Optional[FrameType] = sys._getframe(6)
depth = 6
while frame and frame.f_code.co_filename == logging.__file__:
frame = frame.f_back
depth += 1
try:
msg = record.getMessage()
except TypeError:
# A `TypeError` is raised when the `msg` string expects more arguments
# than are provided by `args`. This can happen when intercepting log
# messages with a certain format, like
# > logger.debug("call: %s%r", method_name, *args) # in tiqi_rpc
# where `*args` unpacks a sequence of values that should replace
# placeholders in the string.
msg = record.msg % (record.args[0], record.args[2:]) # type: ignore
loguru.logger.opt(depth=depth, exception=record.exc_info).log(level, msg)
loop = asyncio.get_event_loop()
loop.create_task(
self._sio.emit(
"log",
{
"levelname": record.levelname,
"message": log_entry,
},
)
)
def setup_logging(level: Optional[str] = None) -> None:
loguru.logger.debug("Configuring service logging.")
def setup_logging(level: str | int | None = None) -> None:
"""
Configures the logging settings for the application.
This function sets up logging with specific formatting and colorization of log
messages. The log level is determined based on the application's operation mode,
with an option to override the level. By default, in a development environment, the
log level is set to DEBUG, whereas in other environments, it is set to INFO.
Args:
level (Optional[str | int]):
A specific log level to set for the application. If None, the log level is
determined based on the application's operation mode. Accepts standard log
level names ('DEBUG', 'INFO', etc.) and corresponding numerical values.
Example:
```python
>>> import logging
>>> setup_logging(logging.DEBUG)
>>> setup_logging("INFO")
```
"""
logger = logging.getLogger()
if pydase.config.OperationMode().environment == "development":
log_level = "DEBUG"
log_level = logging.DEBUG
else:
log_level = "INFO"
log_level = logging.INFO
if level is not None and level in ALLOWED_LOG_LEVELS:
log_level = level
# If a level is specified, check whether it's a string or an integer.
if level is not None:
if isinstance(level, str):
# Convert known log level strings directly to their corresponding logging
# module constants.
level_name = level.upper() # Ensure level names are uppercase
if hasattr(logging, level_name):
log_level = getattr(logging, level_name)
else:
raise ValueError(
f"Invalid log level: {level}. Must be one of 'DEBUG', 'INFO', "
"'WARNING', 'ERROR', etc."
)
elif isinstance(level, int):
log_level = level # Directly use integer levels
else:
raise ValueError("Log level must be a string or an integer.")
loguru.logger.remove()
loguru.logger.add(sys.stderr, level=log_level)
# Set the logger's level.
logger.setLevel(log_level)
# set up the rpyc logger *before* adding the InterceptHandler to the logging module
rpyc.setup_logger(quiet=True) # type: ignore
# create console handler and set level to debug
ch = logging.StreamHandler()
logging.basicConfig(handlers=[InterceptHandler()], level=0)
# add formatter to ch
ch.setFormatter(
DefaultFormatter(
fmt=(
"%(asctime)s.%(msecs)03d | %(levelprefix)s | "
"%(name)s:%(funcName)s:%(lineno)d - %(message)s"
),
datefmt="%Y-%m-%d %H:%M:%S",
)
)
# add ch to logger
logger.addHandler(ch)
logger.debug("Configuring service logging.")
logging.getLogger("asyncio").setLevel(logging.INFO)
logging.getLogger("urllib3").setLevel(logging.INFO)
# overwriting the uvicorn logging config to use the loguru intercept handler
LOGGING_CONFIG["handlers"] = {
"default": {
"()": InterceptHandler,
"formatter": "default",
},
"access": {
"()": InterceptHandler,
"formatter": "access",
},
}
# configuring uvicorn logger
LOGGING_CONFIG["formatters"]["default"][
"fmt"
] = "%(asctime)s.%(msecs)03d | %(levelprefix)s %(message)s"
LOGGING_CONFIG["formatters"]["default"]["datefmt"] = "%Y-%m-%d %H:%M:%S"
LOGGING_CONFIG["formatters"]["access"]["fmt"] = (
"%(asctime)s.%(msecs)03d | %(levelprefix)s %(client_addr)s "
'- "%(request_line)s" %(status_code)s'
)
LOGGING_CONFIG["formatters"]["access"]["datefmt"] = "%Y-%m-%d %H:%M:%S"

View File

@@ -0,0 +1,454 @@
from __future__ import annotations
import inspect
import logging
import sys
from enum import Enum
from typing import TYPE_CHECKING, Any, TypedDict, cast
if sys.version_info < (3, 11):
from typing_extensions import NotRequired
else:
from typing import NotRequired
import pydase.units as u
from pydase.data_service.abstract_data_service import AbstractDataService
from pydase.data_service.task_manager import TaskStatus
from pydase.utils.helpers import (
get_attribute_doc,
get_component_classes,
get_data_service_class_reference,
parse_list_attr_and_index,
render_in_frontend,
)
if TYPE_CHECKING:
from collections.abc import Callable
logger = logging.getLogger(__name__)
class SerializationPathError(Exception):
pass
class SerializationValueError(Exception):
pass
class SignatureDict(TypedDict):
parameters: dict[str, dict[str, Any]]
return_annotation: dict[str, Any]
SerializedObject = TypedDict(
"SerializedObject",
{
"name": NotRequired[str],
"value": "list[SerializedObject] | float | int | str | bool | dict[str, Any] | None", # noqa: E501
"type": str | None,
"doc": str | None,
"readonly": bool,
"enum": NotRequired[dict[str, Any]],
"async": NotRequired[bool],
"signature": NotRequired[SignatureDict],
"frontend_render": NotRequired[bool],
},
)
class Serializer:
@staticmethod
def serialize_object(obj: Any) -> SerializedObject:
result: SerializedObject
if isinstance(obj, AbstractDataService):
result = Serializer._serialize_data_service(obj)
elif isinstance(obj, list):
result = Serializer._serialize_list(obj)
elif isinstance(obj, dict):
result = Serializer._serialize_dict(obj)
# Special handling for u.Quantity
elif isinstance(obj, u.Quantity):
result = Serializer._serialize_quantity(obj)
# Handling for Enums
elif isinstance(obj, Enum):
result = Serializer._serialize_enum(obj)
# Methods and coroutines
elif inspect.isfunction(obj) or inspect.ismethod(obj):
result = Serializer._serialize_method(obj)
else:
obj_type = type(obj).__name__
value = obj
readonly = False
doc = get_attribute_doc(obj)
result = {
"type": obj_type,
"value": value,
"readonly": readonly,
"doc": doc,
}
return result
@staticmethod
def _serialize_enum(obj: Enum) -> SerializedObject:
import pydase.components.coloured_enum
value = obj.name
readonly = False
doc = obj.__doc__
if sys.version_info < (3, 11) and doc == "An enumeration.":
doc = None
if isinstance(obj, pydase.components.coloured_enum.ColouredEnum):
obj_type = "ColouredEnum"
else:
obj_type = "Enum"
return {
"type": obj_type,
"value": value,
"readonly": readonly,
"doc": doc,
"enum": {
name: member.value for name, member in obj.__class__.__members__.items()
},
}
@staticmethod
def _serialize_quantity(obj: u.Quantity) -> SerializedObject:
obj_type = "Quantity"
readonly = False
doc = get_attribute_doc(obj)
value = {"magnitude": obj.m, "unit": str(obj.u)}
return {
"type": obj_type,
"value": value,
"readonly": readonly,
"doc": doc,
}
@staticmethod
def _serialize_dict(obj: dict[str, Any]) -> SerializedObject:
obj_type = "dict"
readonly = False
doc = get_attribute_doc(obj)
value = {key: Serializer.serialize_object(val) for key, val in obj.items()}
return {
"type": obj_type,
"value": value,
"readonly": readonly,
"doc": doc,
}
@staticmethod
def _serialize_list(obj: list[Any]) -> SerializedObject:
obj_type = "list"
readonly = False
doc = get_attribute_doc(obj)
value = [Serializer.serialize_object(o) for o in obj]
return {
"type": obj_type,
"value": value,
"readonly": readonly,
"doc": doc,
}
@staticmethod
def _serialize_method(obj: Callable[..., Any]) -> SerializedObject:
obj_type = "method"
value = None
readonly = True
doc = get_attribute_doc(obj)
frontend_render = render_in_frontend(obj)
# Store parameters and their anotations in a dictionary
sig = inspect.signature(obj)
sig.return_annotation
signature: SignatureDict = {"parameters": {}, "return_annotation": {}}
for k, v in sig.parameters.items():
signature["parameters"][k] = {
"annotation": str(v.annotation),
"default": {} if v.default == inspect._empty else dump(v.default),
}
return {
"type": obj_type,
"value": value,
"readonly": readonly,
"doc": doc,
"async": inspect.iscoroutinefunction(obj),
"signature": signature,
"frontend_render": frontend_render,
}
@staticmethod
def _serialize_data_service(obj: AbstractDataService) -> SerializedObject:
readonly = False
doc = get_attribute_doc(obj)
obj_type = "DataService"
obj_name = obj.__class__.__name__
# Get component base class if any
component_base_cls = next(
(cls for cls in get_component_classes() if isinstance(obj, cls)), None
)
if component_base_cls:
obj_type = component_base_cls.__name__
# Get the set of DataService class attributes
data_service_attr_set = set(dir(get_data_service_class_reference()))
# Get the set of the object attributes
obj_attr_set = set(dir(obj))
# Get the difference between the two sets
derived_only_attr_set = obj_attr_set - data_service_attr_set
value: dict[str, SerializedObject] = {}
# Iterate over attributes, properties, class attributes, and methods
for key in sorted(derived_only_attr_set):
if key.startswith("_"):
continue # Skip attributes that start with underscore
# Skip keys that start with "start_" or "stop_" and end with an async
# method name
if key.startswith(("start_", "stop_")) and key.split("_", 1)[1] in {
name
for name, _ in inspect.getmembers(
obj, predicate=inspect.iscoroutinefunction
)
}:
continue
val = getattr(obj, key)
value[key] = Serializer.serialize_object(val)
# If there's a running task for this method
if key in obj._task_manager.tasks:
value[key]["value"] = TaskStatus.RUNNING.name
# If the DataService attribute is a property
if isinstance(getattr(obj.__class__, key, None), property):
prop: property = getattr(obj.__class__, key)
value[key]["readonly"] = prop.fset is None
value[key]["doc"] = get_attribute_doc(prop) # overwrite the doc
return {
"name": obj_name,
"type": obj_type,
"value": value,
"readonly": readonly,
"doc": doc,
}
def dump(obj: Any) -> SerializedObject:
return Serializer.serialize_object(obj)
def set_nested_value_by_path(
serialization_dict: dict[str, SerializedObject], path: str, value: Any
) -> None:
"""
Set a value in a nested dictionary structure, which conforms to the serialization
format used by `pydase.utils.serializer.Serializer`, using a dot-notation path.
Args:
serialization_dict:
The base dictionary representing data serialized with
`pydase.utils.serializer.Serializer`.
path:
The dot-notation path (e.g., 'attr1.attr2[0].attr3') indicating where to
set the value.
value:
The new value to set at the specified path.
Note:
- If the index equals the length of the list, the function will append the
serialized representation of the 'value' to the list.
"""
parent_path_parts, attr_name = path.split(".")[:-1], path.split(".")[-1]
current_dict: dict[str, SerializedObject] = serialization_dict
try:
for path_part in parent_path_parts:
next_level_serialized_object = get_next_level_dict_by_key(
current_dict, path_part, allow_append=False
)
current_dict = cast(
dict[str, SerializedObject], next_level_serialized_object["value"]
)
next_level_serialized_object = get_next_level_dict_by_key(
current_dict, attr_name, allow_append=True
)
except (SerializationPathError, SerializationValueError, KeyError) as e:
logger.error(e)
return
if next_level_serialized_object["type"] == "method": # state change of task
next_level_serialized_object["value"] = (
value.name if isinstance(value, Enum) else None
)
else:
serialized_value = dump(value)
keys_to_keep = set(serialized_value.keys())
# TODO: you might also want to pop "doc" from serialized_value if
# it is overwriting the value of the current dict
serialized_value.pop("readonly") # type: ignore
next_level_serialized_object.update(serialized_value)
# removes keys that are not present in the serialized new value
for key in list(next_level_serialized_object.keys()):
if key not in keys_to_keep:
next_level_serialized_object.pop(key, None) # type: ignore
def get_nested_dict_by_path(
serialization_dict: dict[str, SerializedObject],
path: str,
) -> SerializedObject:
parent_path_parts, attr_name = path.split(".")[:-1], path.split(".")[-1]
current_dict: dict[str, SerializedObject] = serialization_dict
for path_part in parent_path_parts:
next_level_serialized_object = get_next_level_dict_by_key(
current_dict, path_part, allow_append=False
)
current_dict = cast(
dict[str, SerializedObject], next_level_serialized_object["value"]
)
return get_next_level_dict_by_key(current_dict, attr_name, allow_append=False)
def get_next_level_dict_by_key(
serialization_dict: dict[str, SerializedObject],
attr_name: str,
*,
allow_append: bool = False,
) -> SerializedObject:
"""
Retrieve a nested dictionary entry or list item from a data structure serialized
with `pydase.utils.serializer.Serializer`.
Args:
serialization_dict: The base dictionary representing serialized data.
attr_name: The key name representing the attribute in the dictionary,
e.g. 'list_attr[0]' or 'attr'
allow_append: Flag to allow appending a new entry if `index` is out of range by
one.
Returns:
The dictionary or list item corresponding to the attribute and index.
Raises:
SerializationPathError: If the path composed of `attr_name` and `index` is
invalid or leads to an IndexError or KeyError.
SerializationValueError: If the expected nested structure is not a dictionary.
"""
# Check if the key contains an index part like 'attr_name[<index>]'
attr_name, index = parse_list_attr_and_index(attr_name)
try:
if index is not None:
next_level_serialized_object = cast(
list[SerializedObject], serialization_dict[attr_name]["value"]
)[index]
else:
next_level_serialized_object = serialization_dict[attr_name]
except IndexError as e:
if (
index is not None
and allow_append
and index
== len(cast(list[SerializedObject], serialization_dict[attr_name]["value"]))
):
# Appending to list
cast(list[SerializedObject], serialization_dict[attr_name]["value"]).append(
{
"value": None,
"type": None,
"doc": None,
"readonly": False,
}
)
next_level_serialized_object = cast(
list[SerializedObject], serialization_dict[attr_name]["value"]
)[index]
else:
raise SerializationPathError(
f"Error occured trying to change '{attr_name}[{index}]': {e}"
)
except KeyError:
raise SerializationPathError(
f"Error occured trying to access the key '{attr_name}': it is either "
"not present in the current dictionary or its value does not contain "
"a 'value' key."
)
if not isinstance(next_level_serialized_object, dict):
raise SerializationValueError(
f"Expected a dictionary at '{attr_name}', but found type "
f"'{type(next_level_serialized_object).__name__}' instead."
)
return next_level_serialized_object
def generate_serialized_data_paths(
data: dict[str, Any], parent_path: str = ""
) -> list[str]:
"""
Generate a list of access paths for all attributes in a dictionary representing
data serialized with `pydase.utils.serializer.Serializer`, excluding those that are
methods. This function handles nested structures, including lists, by generating
paths for each element in the nested lists.
Args:
data (dict[str, Any]): The dictionary representing serialized data, typically
produced by `pydase.utils.serializer.Serializer`.
parent_path (str, optional): The base path to prepend to the keys in the `data`
dictionary to form the access paths. Defaults to an empty string.
Returns:
list[str]: A list of strings where each string is a dot-notation access path
to an attribute in the serialized data. For list elements, the path includes
the index in square brackets.
"""
paths: list[str] = []
for key, value in data.items():
new_path = f"{parent_path}.{key}" if parent_path else key
paths.append(new_path)
if serialized_dict_is_nested_object(value):
if isinstance(value["value"], list):
for index, item in enumerate(value["value"]):
indexed_key_path = f"{new_path}[{index}]"
paths.append(indexed_key_path)
if serialized_dict_is_nested_object(item):
paths.extend(
generate_serialized_data_paths(
item["value"], indexed_key_path
)
)
continue
paths.extend(generate_serialized_data_paths(value["value"], new_path))
return paths
def serialized_dict_is_nested_object(serialized_dict: SerializedObject) -> bool:
return (
serialized_dict["type"] != "Quantity"
and isinstance(serialized_dict["value"], dict)
) or isinstance(serialized_dict["value"], list)

View File

@@ -1,21 +0,0 @@
from loguru import logger
def warn_if_instance_class_does_not_inherit_from_DataService(__value: object) -> None:
base_class_name = __value.__class__.__base__.__name__
module_name = __value.__class__.__module__
if (
module_name
not in [
"builtins",
"__builtin__",
"asyncio.unix_events",
"_abc",
]
and base_class_name not in ["DataService", "list", "Enum"]
and type(__value).__name__ not in ["CallbackManager", "TaskManager", "Quantity"]
):
logger.warning(
f"Warning: Class {type(__value).__name__} does not inherit from DataService."
)

View File

@@ -1,4 +1,4 @@
from importlib.metadata import distribution
__version__ = distribution("pydase").version
__major__, __minor__, __patch__ = [int(v) for v in __version__.split(".")]
__major__, __minor__, __patch__ = (int(v) for v in __version__.split("."))

View File

@@ -1,26 +0,0 @@
from collections.abc import Generator
from typing import Any
import pytest
from loguru import logger
from pytest import LogCaptureFixture
from pydase import DataService
from pydase.data_service.callback_manager import CallbackManager
@pytest.fixture
def caplog(caplog: LogCaptureFixture) -> Generator[LogCaptureFixture, Any, None]:
handler_id = logger.add(caplog.handler, format="{message}")
yield caplog
logger.remove(handler_id)
def emit(self: Any, parent_path: str, name: str, value: Any) -> None:
if isinstance(value, DataService):
value = value.serialize()
print(f"{parent_path}.{name} = {value}")
CallbackManager.emit_notification = emit # type: ignore

View File

@@ -0,0 +1,47 @@
from pydase.components.coloured_enum import ColouredEnum
from pydase.data_service.data_service import DataService
from pydase.data_service.data_service_observer import DataServiceObserver
from pydase.data_service.state_manager import StateManager
from pytest import LogCaptureFixture
def test_ColouredEnum(caplog: LogCaptureFixture) -> None:
class MyStatus(ColouredEnum):
RUNNING = "#00FF00"
FAILING = "#FF0000"
class ServiceClass(DataService):
_status = MyStatus.RUNNING
@property
def status(self) -> MyStatus:
return self._status
@status.setter
def status(self, value: MyStatus) -> None:
# do something ...
self._status = value
service_instance = ServiceClass()
state_manager = StateManager(service_instance)
DataServiceObserver(state_manager)
service_instance.status = MyStatus.FAILING
assert "'status' changed to 'MyStatus.FAILING'" in caplog.text
def test_warning(caplog: LogCaptureFixture) -> None:
class MyStatus(ColouredEnum):
RUNNING = "#00FF00"
FAILING = "#FF0000"
class ServiceClass(DataService):
status = MyStatus.RUNNING
ServiceClass()
assert (
"Class 'MyStatus' does not inherit from DataService. This may lead to "
"unexpected behaviour!" not in caplog.text
)

View File

@@ -0,0 +1,32 @@
import asyncio
import logging
import pydase
import pydase.components.device_connection
from pytest import LogCaptureFixture
from tests.utils.test_serializer import pytest
logger = logging.getLogger(__name__)
@pytest.mark.asyncio
async def test_reconnection(caplog: LogCaptureFixture) -> None:
class MyService(pydase.components.device_connection.DeviceConnection):
def __init__(
self,
) -> None:
super().__init__()
self._reconnection_wait_time = 0.01
def connect(self) -> None:
self._connected = True
service_instance = MyService()
assert service_instance._connected is False
service_instance._task_manager.start_autostart_tasks()
await asyncio.sleep(0.01)
assert service_instance._connected is True

View File

@@ -0,0 +1,141 @@
import logging
import pydase
import pydase.components
from pydase.data_service.data_service_observer import DataServiceObserver
from pydase.data_service.state_manager import StateManager
from pydase.utils.serializer import dump
from pytest import LogCaptureFixture
logger = logging.getLogger(__name__)
def test_image_functions(caplog: LogCaptureFixture) -> None:
class MyService(pydase.DataService):
def __init__(self) -> None:
super().__init__()
self.my_image = pydase.components.Image()
service_instance = MyService()
state_manager = StateManager(service_instance)
DataServiceObserver(state_manager)
service_instance.my_image.load_from_url("https://cataas.com/cat")
caplog.clear()
def test_image_serialization() -> None:
class MyService(pydase.DataService):
def __init__(self) -> None:
super().__init__()
self.my_image = pydase.components.Image()
assert dump(MyService()) == {
"name": "MyService",
"type": "DataService",
"value": {
"my_image": {
"name": "Image",
"type": "Image",
"value": {
"format": {
"type": "str",
"value": "",
"readonly": True,
"doc": None,
},
"load_from_base64": {
"type": "method",
"value": None,
"readonly": True,
"doc": None,
"async": False,
"signature": {
"parameters": {
"value_": {
"annotation": "<class 'bytes'>",
"default": {},
},
"format_": {
"annotation": "str | None",
"default": {
"type": "NoneType",
"value": None,
"readonly": False,
"doc": None,
},
},
},
"return_annotation": {},
},
"frontend_render": False,
},
"load_from_matplotlib_figure": {
"type": "method",
"value": None,
"readonly": True,
"doc": None,
"async": False,
"signature": {
"parameters": {
"fig": {"annotation": "Figure", "default": {}},
"format_": {
"annotation": "<class 'str'>",
"default": {
"type": "str",
"value": "png",
"readonly": False,
"doc": None,
},
},
},
"return_annotation": {},
},
"frontend_render": False,
},
"load_from_path": {
"type": "method",
"value": None,
"readonly": True,
"doc": None,
"async": False,
"signature": {
"parameters": {
"path": {
"annotation": "pathlib.Path | str",
"default": {},
}
},
"return_annotation": {},
},
"frontend_render": False,
},
"load_from_url": {
"type": "method",
"value": None,
"readonly": True,
"doc": None,
"async": False,
"signature": {
"parameters": {
"url": {"annotation": "<class 'str'>", "default": {}}
},
"return_annotation": {},
},
"frontend_render": False,
},
"value": {
"type": "str",
"value": "",
"readonly": True,
"doc": None,
},
},
"readonly": False,
"doc": None,
}
},
"readonly": False,
"doc": None,
}

View File

@@ -1,60 +1,83 @@
from pytest import CaptureFixture, LogCaptureFixture
import logging
from collections.abc import Callable
from pydase.components.number_slider import NumberSlider
from pydase.data_service.data_service import DataService
from pydase.data_service.data_service_observer import DataServiceObserver
from pydase.data_service.state_manager import StateManager
from pytest import LogCaptureFixture
from .. import caplog # noqa
from tests.utils.test_serializer import pytest
logger = logging.getLogger(__name__)
def test_NumberSlider(capsys: CaptureFixture) -> None:
class ServiceClass(DataService):
number_slider = NumberSlider(1, 0, 10, 1)
int_number_slider = NumberSlider(1, 0, 10, 1, "int")
def test_number_slider(caplog: LogCaptureFixture) -> None:
class MySlider(NumberSlider):
def __init__(
self,
value: float = 0,
min_: float = 0,
max_: float = 100,
step_size: float = 1,
callback: Callable[..., None] = lambda: None,
) -> None:
super().__init__(value, min_, max_, step_size)
self._callback = callback
service = ServiceClass()
@property
def value(self) -> float:
return self._value
assert service.number_slider.value == 1
assert isinstance(service.number_slider.value, float)
assert service.number_slider.min == 0
assert isinstance(service.number_slider.min, float)
assert service.number_slider.max == 10
assert isinstance(service.number_slider.max, float)
assert service.number_slider.step_size == 1
assert isinstance(service.number_slider.step_size, float)
@value.setter
def value(self, value: float) -> None:
self._callback(value)
self._value = value
assert service.int_number_slider.value == 1
assert isinstance(service.int_number_slider.value, int)
assert service.int_number_slider.step_size == 1
assert isinstance(service.int_number_slider.step_size, int)
@property
def max(self) -> float:
return self._max
service.number_slider.value = 10.0
service.int_number_slider.value = 10.1
@max.setter
def max(self, value: float) -> None:
self._max = value
captured = capsys.readouterr()
@property
def step_size(self) -> float:
return self._step_size
expected_output = sorted(
[
"ServiceClass.number_slider.value = 10.0",
"ServiceClass.int_number_slider.value = 10",
]
)
actual_output = sorted(captured.out.strip().split("\n")) # type: ignore
assert actual_output == expected_output
@step_size.setter
def step_size(self, value: float) -> None:
self._step_size = value
service.number_slider.min = 1.1
class MyService(DataService):
def __init__(self) -> None:
super().__init__()
self.my_slider = MySlider(callback=self.some_method)
captured = capsys.readouterr()
def some_method(self, slider_value: float) -> None:
logger.info("Slider changed to '%s'", slider_value)
expected_output = sorted(
[
"ServiceClass.number_slider.min = 1.1",
]
)
actual_output = sorted(captured.out.strip().split("\n")) # type: ignore
assert actual_output == expected_output
service_instance = MyService()
state_manager = StateManager(service_instance)
DataServiceObserver(state_manager)
service_instance.my_slider.value = 10.0
def test_init_error(caplog: LogCaptureFixture) -> None: # noqa
number_slider = NumberSlider(type="str") # type: ignore # noqa
assert "'my_slider.value' changed to '10.0'" in caplog.text
assert "Slider changed to '10.0'" in caplog.text
caplog.clear()
assert "Unknown type 'str'. Using 'float'" in caplog.text
service_instance.my_slider.max = 12.0
assert "'my_slider.max' changed to '12.0'" in caplog.text
caplog.clear()
service_instance.my_slider.step_size = 0.1
assert "'my_slider.step_size' changed to '0.1'" in caplog.text
caplog.clear()
# by overriding the getter only you can make the property read-only
with pytest.raises(AttributeError):
service_instance.my_slider.min = 1.1 # type: ignore[reportGeneralTypeIssues, misc]

View File

@@ -1,64 +1,137 @@
from enum import Enum
from typing import Any
import pydase
import pydase.units as u
import pytest
from pydase import DataService
from pydase.data_service.data_service_observer import DataServiceObserver
from pydase.data_service.state_manager import StateManager
from pydase.data_service.task_manager import TaskDefinitionError
from pydase.utils.decorators import FunctionDefinitionError, frontend
from pytest import LogCaptureFixture
def test_enum_serialize() -> None:
class EnumClass(Enum):
FOO = "foo"
BAR = "bar"
def test_unexpected_type_change_warning(caplog: LogCaptureFixture) -> None:
class ServiceClass(DataService):
attr_1 = 1.0
current = 1.0 * u.units.A
class EnumAttribute(pydase.DataService):
def __init__(self) -> None:
self.some_enum = EnumClass.FOO
super().__init__()
service_instance = ServiceClass()
state_manager = StateManager(service_instance)
DataServiceObserver(state_manager)
service_instance.attr_1 = 2
class EnumPropertyWithoutSetter(pydase.DataService):
def __init__(self) -> None:
self._some_enum = EnumClass.FOO
super().__init__()
assert "'attr_1' changed to '2'" in caplog.text
assert (
"Type of 'attr_1' changed from 'float' to 'int'. This may have unwanted "
"side effects! Consider setting it to 'float' directly." in caplog.text
)
service_instance.current = 2
assert "'current' changed to '2'" in caplog.text
assert (
"Type of 'current' changed from 'Quantity' to 'int'. This may have unwanted "
"side effects! Consider setting it to 'Quantity' directly." in caplog.text
)
def test_basic_inheritance_warning(caplog: LogCaptureFixture) -> None:
class SubService(DataService):
...
class SomeEnum(Enum):
HI = 0
class ServiceClass(DataService):
sub_service = SubService()
some_int = 1
some_float = 1.0
some_bool = True
some_quantity = 1.0 * u.units.A
some_list = [1, 2]
some_string = "Hello"
some_enum = SomeEnum.HI
_name = "Service"
@property
def some_enum(self) -> EnumClass:
return self._some_enum
def name(self) -> str:
return self._name
class EnumPropertyWithSetter(pydase.DataService):
def some_method(self) -> None:
...
async def some_task(self) -> None:
...
ServiceClass()
# neither of the attributes, methods or properties cause a warning log
assert "WARNING" not in caplog.text
def test_class_attr_inheritance_warning(caplog: LogCaptureFixture) -> None:
class SubClass:
name = "Hello"
class ServiceClass(DataService):
attr_1 = SubClass()
ServiceClass()
assert (
"Class 'SubClass' does not inherit from DataService. This may lead to "
"unexpected behaviour!"
) in caplog.text
def test_instance_attr_inheritance_warning(caplog: LogCaptureFixture) -> None:
class SubClass:
name = "Hello"
class ServiceClass(DataService):
def __init__(self) -> None:
self._some_enum = EnumClass.FOO
super().__init__()
self.attr_1 = SubClass()
@property
def some_enum(self) -> EnumClass:
return self._some_enum
ServiceClass()
@some_enum.setter
def some_enum(self, value: EnumClass) -> None:
self._some_enum = value
assert (
"Class 'SubClass' does not inherit from DataService. This may lead to "
"unexpected behaviour!"
) in caplog.text
assert EnumAttribute().serialize() == {
"some_enum": {
"type": "Enum",
"value": "FOO",
"enum": {"FOO": "foo", "BAR": "bar"},
"readonly": False,
"doc": None,
}
}
assert EnumPropertyWithoutSetter().serialize() == {
"some_enum": {
"type": "Enum",
"value": "FOO",
"enum": {"FOO": "foo", "BAR": "bar"},
"readonly": True,
"doc": None,
}
}
assert EnumPropertyWithSetter().serialize() == {
"some_enum": {
"type": "Enum",
"value": "FOO",
"enum": {"FOO": "foo", "BAR": "bar"},
"readonly": False,
"doc": None,
}
}
def test_protected_and_private_attribute_warning(caplog: LogCaptureFixture) -> None:
class SubClass:
name = "Hello"
class ServiceClass(DataService):
def __init__(self) -> None:
super().__init__()
self._subclass = SubClass()
self.__other_subclass = SubClass()
ServiceClass()
# Protected and private attributes are not checked
assert (
"Class 'SubClass' does not inherit from DataService. This may lead to "
"unexpected behaviour!"
) not in caplog.text
def test_exposing_methods() -> None:
class ClassWithTask(pydase.DataService):
async def some_task(self, sleep_time: int) -> None:
pass
with pytest.raises(TaskDefinitionError):
ClassWithTask()
with pytest.raises(FunctionDefinitionError):
class ClassWithMethod(pydase.DataService):
@frontend
def some_method(self, *args: Any) -> str:
return "some method"

Some files were not shown because too many files have changed in this diff Show More