82 Commits

Author SHA1 Message Date
aferk
a46945ed29 Update DOI link 2025-11-06 08:52:34 +01:00
aferk
21809b49e8 Update copyright notice to comply with regulations 2025-11-04 08:52:55 +01:00
Mose Müller
c45f1bd489 Merge pull request #258 from tiqi-group/release-v0.10.21
updates to v0.10.21
2025-08-28 13:31:09 +02:00
Mose Müller
5784818e5a updates to v0.10.21 2025-08-28 13:30:47 +02:00
Mose Müller
64a7097568 Merge pull request #257 from tiqi-group/remove-warning-on-type-change
removes _warn_on_type_change from DataService setattr
2025-08-28 13:27:36 +02:00
Mose Müller
5ef382728c removes warning test 2025-08-28 13:26:23 +02:00
Mose Müller
51d6189002 removes _warn_on_type_change from DataService setattr
Adding keys to dictionaries trigger this warning, so I would consider
this warning to not be useful any more.
2025-08-28 13:24:33 +02:00
Mose Müller
71e29c890e Merge pull request #256 from tiqi-group/release-v0.10.19
updates to version v0.10.19
2025-07-08 15:39:59 +02:00
Mose Müller
6e407ba1d6 updates to version v0.10.19 2025-07-08 15:39:40 +02:00
Mose Müller
4fb5e56aa8 Merge pull request #255 from tiqi-group/fix/property_observer_race_condition
fix: race-condition in PropertyObserver
2025-07-08 15:37:45 +02:00
Mose Müller
d55ba3a85f fix: race-condition in PropertyObserver
When a proxy of a pydase client initialised with
block_until_connected=False is set as an attribute of a data service, a
race condition can happen: when the client connects while the
DataServiceObserver is being initialised, the property_deps_dict
attribute might not be set yet while the DataServiceObserver was already
added as an observer to the client proxy. The proxy will then emit a
notification, which in turn tries to get the dependent properties from
the property_deps_dict attribute, which has not been initialised yet.
The resulting exception will not tell the proxy that the client has
connected.
2025-07-08 15:29:27 +02:00
Mose Müller
265d9a7ef5 Merge pull request #254 from tiqi-group/fix/serialize_exception
fix: serialize exception
2025-07-03 15:59:35 +02:00
Mose Müller
4cd36b4a2b tests: adds test for exception serialization 2025-07-03 15:55:41 +02:00
Mose Müller
1b2ff38aff fix: serializing exception that didn't take an argument
An exception that was instantiated without any argument could not be
serilaized before. Now, I check if any args were supplied and set the
value to an empty string if no args were passed.
2025-07-03 15:55:25 +02:00
Mose Müller
4b243985e8 Merge pull request #253 from tiqi-group/feat/reset_frontend_value_on_exception
feat: reset frontend value to last value on exception
2025-07-03 15:53:36 +02:00
Mose Müller
8615bdeadc npm run build 2025-07-03 15:52:32 +02:00
Mose Müller
d24893a989 feat: reset frontend value to last value on exception
When changing a value in the frontend and this operation triggers an
exception in the backend, reset the frontend value to the last known
value.
2025-07-03 15:52:22 +02:00
Mose Müller
661603ef71 Merge pull request #252 from tiqi-group/alert-autofix-4
Potential fix for code scanning alert no. 4: Workflow does not contain permissions
2025-06-27 08:56:48 +02:00
Mose Müller
d6947b0f43 Potential fix for code scanning alert no. 4: Workflow does not contain permissions
Co-authored-by: Copilot Autofix powered by AI <62310815+github-advanced-security[bot]@users.noreply.github.com>
2025-06-27 08:54:57 +02:00
Mose Müller
93d38651e8 Merge pull request #251 from tiqi-group/release-v0.10.18
updates to version v0.10.18
2025-06-27 08:48:25 +02:00
Mose Müller
72a3c199d9 updates to version v0.10.18 2025-06-27 08:48:07 +02:00
Mose Müller
7914e2fa7b Merge pull request #250 from tiqi-group/feat/client_auto_update_proxy
feat: adds auto_update_proxy argument to pydase.Client
2025-06-27 08:44:35 +02:00
Mose Müller
0a4f898fde docs: updates python client section 2025-06-27 08:43:36 +02:00
Mose Müller
a9aa55fc99 client: adds auto_update_proxy argument
If False, this disables automatic updates from the server by not
subscribing to the "nofity" event. This is useful for request-only where
real-time synchronization is not needed.
2025-06-27 08:02:33 +02:00
Mose Müller
fd5a230fa4 Merge pull request #249 from tiqi-group/feat/improve-client-proxy-serialization
chore: improve client proxy serialization
2025-06-23 14:11:31 +02:00
Mose Müller
243b46aadb test: adds test for ProxyClass
This test timed out before implementing the changes.
2025-06-23 14:09:52 +02:00
Mose Müller
0f1ca84df5 client: updates proxy.serialize logic
The proxy needs to properly handle serialization requests. If such a
requests comes from the asyncio loop used by the socketio client, this
would result in a deadlock. This happens, for example, when the observer
is notified of a change triggered within a socketio event. To prevent
this, I am checking the current loop against the socketio client loop.
If it's the same, return the _service_representation value, which is set
when pydase.Client connects to the server. I do the same when the client
is not connected (to prevent BadNamespaceErrors).
Every other invokation of serialize results in an API call to the
server.
2025-06-23 09:11:43 +02:00
Mose Müller
6438a07305 client: updates proxy._service_representation every time the client connects 2025-06-23 09:01:01 +02:00
Mose Müller
80bfd209df Merge pull request #248 from tiqi-group/docs/adds_doi_badge
docs: adds DOI badge
2025-06-20 10:46:48 +02:00
Mose Müller
e065b1fb22 docs: adds DOI badge 2025-06-20 10:45:01 +02:00
Mose Müller
977cee32b9 Merge pull request #247 from tiqi-group/feat/adding-direct-api-access-tests
feat: adds client tests for direct api functions
2025-06-19 14:04:30 +02:00
Mose Müller
96f695020b adds client tests for direct api functions 2025-06-19 14:03:34 +02:00
Mose Müller
33ce01865a Merge pull request #246 from tiqi-group/feat/direct-api-access-client
Feat: direct api access client
2025-06-19 13:57:25 +02:00
Mose Müller
f5374573cd docs: adds "Direct API Access" section to python-client.md 2025-06-19 13:54:52 +02:00
Mose Müller
43c6b5e817 fix: getter_proxy didn't return value 2025-06-19 13:52:31 +02:00
Mose Müller
37380c6d24 adds get_value, update_value and trigger_method functions to pydase.Client 2025-06-19 13:45:36 +02:00
Mose Müller
ae21656e83 pulls get_value out of ProxyClassMixin 2025-06-19 13:38:51 +02:00
Mose Müller
a4b4f179c6 Merge pull request #245 from tiqi-group/release-v0.10.17
updates to version v0.10.17
2025-06-19 10:43:45 +02:00
Mose Müller
c6beca3961 updates to version v0.10.17 2025-06-19 10:43:13 +02:00
Mose Müller
2fa8240e54 Merge pull request #244 from tiqi-group/feat/add_python_3_13_support
feat: official support for python 3.13
2025-06-19 10:42:11 +02:00
Mose Müller
369587a50c adds python 3.13 to checked versions in the github workflow 2025-06-19 10:39:40 +02:00
Mose Müller
25343f6909 Merge pull request #243 from tiqi-group/fix/test_image
fix: pathlib.Path signature annotation change in python 3.13
2025-06-19 10:37:53 +02:00
Mose Müller
c136c9f3de fix: pathlib.Path signature annotation change in python 3.13 2025-06-19 10:35:35 +02:00
Mose Müller
8897c2fe4c Merge pull request #242 from tiqi-group/refactor/web-server-initialisation
Refactor: web server initialisation
2025-06-19 10:26:12 +02:00
Mose Müller
80c5c4e99d fix: check if loop is running 2025-06-19 10:23:25 +02:00
Mose Müller
423441a74c initialise WebServer in pydase.Server constructor
The WebServer can be initialised in the pydase.Server constructor
without any problems. This would allow users to access the socketio
server before starting the pydase.Server.
2025-06-19 10:23:07 +02:00
Mose Müller
9ec60e3891 Merge pull request #241 from tiqi-group/fix/sio_server_warnings
fix: check if loop is running in SocketIOHandler
2025-06-19 10:00:53 +02:00
Mose Müller
8bde104322 fix: check if loop is running in SocketIOHandler
Before emitting sio events in the SocketIOHandler, I have to check if
the loop is actually still running. This caused issues with pytest as
pytest was tearing down asyncio tasks and stopping the loop, while the
sio handler was still trying to send those logs to the sio clients.
2025-06-19 09:59:08 +02:00
Mose Müller
9b57b6984e Merge pull request #240 from tiqi-group/fix/enable-web-argument
Fix: enable web argument handling
2025-06-19 09:53:29 +02:00
Mose Müller
e5b89f2581 always emit exceptions (also when enable_web is false)
replaces enable_web check with loop.is_running()
2025-06-19 09:50:09 +02:00
Mose Müller
ff1654e65c fix: enable_web argument toggles the frontend now
- always initialise the WebServer (also if enable_web is False).
Otherwise, the socketio server will not be initialised
- passing the enable_web argument to the WebServer which is then used to
decide whether to add the frontend routes
2025-06-19 09:43:34 +02:00
Mose Müller
cded80c8e5 Merge pull request #239 from tiqi-group/feat/post-startup-hook
feat: adds post_startup hook to pydase.Server
2025-06-19 09:23:34 +02:00
Mose Müller
87a33b6293 adds post_startup hook to pydase.Server 2025-06-19 09:18:44 +02:00
Mose Müller
6d621daaac Merge pull request #238 from tiqi-group/release-v0.10.16
updates to version v0.10.16
2025-06-06 13:45:58 +02:00
Mose Müller
8c1a50c106 updates to version v0.10.16 2025-06-06 13:45:27 +02:00
Mose Müller
a1545d341b Merge pull request #237 from tiqi-group/chore/update_npm_packages
Chore: update npm packages
2025-06-06 13:44:04 +02:00
Mose Müller
28a1cc7cd3 Merge pull request #236 from tiqi-group/fix/local_storage_usage
fix: prefix localStorage key with authority id
2025-06-06 13:43:41 +02:00
Mose Müller
c968708b85 npm run build 2025-06-06 13:43:22 +02:00
Mose Müller
fef8606d17 updates vitejs 2025-06-06 13:43:17 +02:00
Mose Müller
82286c8da0 fix: prefix localStorage key with authority id
Prefixing the localStorage key with the authority id fixes an issue that
arises when multiple services are displayed in an iframe on a webpage.
2025-06-06 13:36:48 +02:00
Mose Müller
533826a398 Merge pull request #234 from tiqi-group/release-v0.10.15
updates version to 0.10.15
2025-05-22 16:42:37 +02:00
Mose Müller
982875dee6 Merge pull request #235 from tiqi-group/feat/adds_client_id_default
feat: adds client id default
2025-05-22 16:16:02 +02:00
Mose Müller
e54710cd4d tests: update client_id test 2025-05-22 16:12:38 +02:00
Mose Müller
f48f7aacfb docs: updates client_id description 2025-05-22 16:10:52 +02:00
Mose Müller
e97aab4f36 client: adds hostname of the client as client_id default 2025-05-22 16:07:52 +02:00
Mose Müller
015c66d5a6 updates version to 0.10.15 2025-05-22 16:03:13 +02:00
Mose Müller
9827d0747c Merge pull request #233 from tiqi-group/fix/task_event_loop
fix: task event loop
2025-05-22 16:01:29 +02:00
Mose Müller
38a12fb72e fix: current_event_loop_exists should get the event loop which might not be running yet 2025-05-22 15:57:35 +02:00
Mose Müller
fb6ec16bf5 server: set event loop before initialising the state manager
As the server is run first, we don't have to check if any other event
loop is running.
2025-05-22 15:57:09 +02:00
Mose Müller
9ee498eb5c Merge pull request #232 from tiqi-group/fix/nested-attribute-notification
fix: properly checking is attribute is nested
2025-05-22 15:37:02 +02:00
Mose Müller
d015333123 tests: property starting with dependency name 2025-05-22 15:34:42 +02:00
Mose Müller
c4e7fe66a8 fix: properly checking is attribute is nested
Properties whose names start with a dependency's name (e.g., my_int ->
my_int_2) were incorrectly skipped during change notification. This
fixes it by checking if the changing properties start with the
full_access_path start followed by either "." or "[".
2025-05-22 15:34:09 +02:00
Mose Müller
5f1451a1c1 Merge pull request #231 from tiqi-group/fix/property_dependency_function_argument
Fix: property dependency function argument
2025-05-22 15:15:23 +02:00
Mose Müller
4c28cbaf7d tests: updates tests s.t. timezones don't matter 2025-05-22 15:07:10 +02:00
Mose Müller
a97b8eb2b4 fix: exclude ( from regex, as well 2025-05-22 15:06:30 +02:00
Mose Müller
f6b5c1b567 tests: property dependency as function argument 2025-05-22 14:51:33 +02:00
Mose Müller
f92d525588 fix: fixes regex pattern to get property dependencies 2025-05-22 14:50:29 +02:00
Mose Müller
61b69d77cc Merge pull request #229 from tiqi-group/release-v0.10.14
updates to version 0.10.14
2025-05-21 09:51:38 +02:00
Mose Müller
8abe9357cf updates to version 0.10.14 2025-05-21 09:51:17 +02:00
Mose Müller
0dace2a9f0 Merge pull request #228 from tiqi-group/fix/aiohttp_socks_dependency
fix: using client without aiohttp_socks dependency does not raise
2025-05-21 09:49:29 +02:00
Mose Müller
9992ade0ed chore: formatting 2025-05-21 09:48:11 +02:00
Mose Müller
6c2cebada2 fix: using client without aiohttp_socks dependency does not raise
When not specifying the proxy_url in `pydase.Client`, the aiohttp_socks
dependency is not required. This is now handled by putting the import
into the correct place, adding a descriptive log message when the import
fails.
2025-05-21 09:46:20 +02:00
35 changed files with 1206 additions and 750 deletions

View File

@@ -2,6 +2,8 @@
# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python
name: Python package name: Python package
permissions:
contents: read
on: on:
push: push:
@@ -16,7 +18,7 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
python-version: ["3.10", "3.11", "3.12"] python-version: ["3.10", "3.11", "3.12", "3.13"]
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4

View File

@@ -1,4 +1,4 @@
Copyright (c) 2023-2024 Mose Müller <mosemueller@gmail.com> Copyright (c) 2023-2025 ETH Zurich, Mose Müller, Carmelo Mordini
Permission is hereby granted, free of charge, to any person obtaining a copy Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal of this software and associated documentation files (the "Software"), to deal

View File

@@ -5,6 +5,7 @@
[![Python Versions](https://img.shields.io/pypi/pyversions/pydase)](https://pypi.org/project/pydase/) [![Python Versions](https://img.shields.io/pypi/pyversions/pydase)](https://pypi.org/project/pydase/)
[![Documentation Status](https://readthedocs.org/projects/pydase/badge/?version=stable)](https://pydase.readthedocs.io/en/stable/) [![Documentation Status](https://readthedocs.org/projects/pydase/badge/?version=stable)](https://pydase.readthedocs.io/en/stable/)
[![License: MIT](https://img.shields.io/github/license/tiqi-group/pydase)][License] [![License: MIT](https://img.shields.io/github/license/tiqi-group/pydase)][License]
[![DOI](http://img.shields.io/badge/DOI-10.5905/ethz-blue.svg)](http://doi.org/10.5905/ethz-1007-907)
`pydase` is a Python library that simplifies the creation of remote control interfaces for Python objects. It exposes the public attributes of a user-defined class via a [Socket.IO](https://python-socketio.readthedocs.io/en/stable/) web server, ensuring they are always in sync with the service state. You can interact with these attributes using an RPC client, a RESTful API, or a web browser. The web browser frontend is auto-generated, displaying components that correspond to each public attribute of the class for direct interaction. `pydase` is a Python library that simplifies the creation of remote control interfaces for Python objects. It exposes the public attributes of a user-defined class via a [Socket.IO](https://python-socketio.readthedocs.io/en/stable/) web server, ensuring they are always in sync with the service state. You can interact with these attributes using an RPC client, a RESTful API, or a web browser. The web browser frontend is auto-generated, displaying components that correspond to each public attribute of the class for direct interaction.
`pydase` implements an [observer pattern][Observer Pattern] to provide the real-time updates, ensuring that changes to the class attributes are reflected across all clients. `pydase` implements an [observer pattern][Observer Pattern] to provide the real-time updates, ensuring that changes to the class attributes are reflected across all clients.

View File

@@ -23,7 +23,39 @@ The proxy acts as a local representation of the remote service, enabling intuiti
The proxy class automatically synchronizes with the server's attributes and methods, keeping itself up-to-date with any changes. This dynamic synchronization essentially mirrors the server's API, making it feel like you're working with a local object. The proxy class automatically synchronizes with the server's attributes and methods, keeping itself up-to-date with any changes. This dynamic synchronization essentially mirrors the server's API, making it feel like you're working with a local object.
### Accessing Services Behind Firewalls or SSH Gateways ## Automatic Proxy Updates
By default, the client listens for attribute and structure changes from the server and dynamically updates its internal proxy representation. This ensures that value changes or newly added attributes on the server appear in the client proxy without requiring reconnection or manual refresh.
This is useful, for example, when [integrating the client into another service](#integrating-the-client-into-another-service). However, if you want to avoid this behavior (e.g., to reduce network traffic or avoid frequent re-syncing), you can disable it. When passing `auto_update_proxy=False` to the client, the proxy will not track changes after the initial connection:
```python
client = pydase.Client(
url="ws://localhost:8001",
auto_update_proxy=False
)
```
## Direct API Access
In addition to using the `proxy` object, users may access the server API directly via the following methods:
```python
client = pydase.Client(url="ws://localhost:8001")
# Get the current value of an attribute
value = client.get_value("device.voltage")
# Update an attribute
client.update_value("device.voltage", 5.0)
# Call a method on the remote service
result = client.trigger_method("device.reset")
```
This bypasses the proxy and is useful for lower-level access to individual service endpoints.
## Accessing Services Behind Firewalls or SSH Gateways
If your service is only reachable through a private network or SSH gateway, you can route your connection through a local SOCKS5 proxy using the `proxy_url` parameter. If your service is only reachable through a private network or SSH gateway, you can route your connection through a local SOCKS5 proxy using the `proxy_url` parameter.
@@ -58,7 +90,7 @@ class MyService(pydase.DataService):
proxy = pydase.Client( proxy = pydase.Client(
url="ws://<ip_addr>:<service_port>", url="ws://<ip_addr>:<service_port>",
block_until_connected=False, block_until_connected=False,
client_id="my_pydase_client_id", client_id="my_pydase_client_id", # optional, defaults to system hostname
).proxy ).proxy
# For SSL-encrypted services, use the wss protocol # For SSL-encrypted services, use the wss protocol
@@ -75,9 +107,10 @@ if __name__ == "__main__":
``` ```
In this example: In this example:
- The `MyService` class has a `proxy` attribute that connects to a `pydase` service at `<ip_addr>:<service_port>`. - The `MyService` class has a `proxy` attribute that connects to a `pydase` service at `<ip_addr>:<service_port>`.
- By setting `block_until_connected=False`, the service can start without waiting for the connection to succeed. - By setting `block_until_connected=False`, the service can start without waiting for the connection to succeed.
- By setting `client_id`, the server will log a descriptive identifier for this client via the `X-Client-Id` HTTP header. - The `client_id` is optional. If not specified, it defaults to the system hostname, which will be sent in the `X-Client-Id` HTTP header for logging or authentication on the server side.
## Custom `socketio.AsyncClient` Connection Parameters ## Custom `socketio.AsyncClient` Connection Parameters

File diff suppressed because it is too large Load Diff

View File

@@ -35,6 +35,6 @@
"prettier": "3.3.2", "prettier": "3.3.2",
"typescript": "^5.7.3", "typescript": "^5.7.3",
"typescript-eslint": "^7.18.0", "typescript-eslint": "^7.18.0",
"vite": "^5.4.12" "vite": "^6.3.5"
} }
} }

View File

@@ -50,7 +50,7 @@ const createDisplayNameFromAccessPath = (fullAccessPath: string): string => {
function changeCallback( function changeCallback(
value: SerializedObject, value: SerializedObject,
callback: (ack: unknown) => void = () => {}, callback: (ack: undefined | SerializedObject) => void = () => {},
) { ) {
updateValue(value, callback); updateValue(value, callback);
} }

View File

@@ -38,7 +38,10 @@ interface NumberComponentProps {
isInstantUpdate: boolean; isInstantUpdate: boolean;
unit?: string; unit?: string;
addNotification: (message: string, levelname?: LevelName) => void; addNotification: (message: string, levelname?: LevelName) => void;
changeCallback?: (value: SerializedObject, callback?: (ack: unknown) => void) => void; changeCallback?: (
value: SerializedObject,
callback?: (ack: undefined | SerializedObject) => void,
) => void;
displayName?: string; displayName?: string;
id: string; id: string;
} }
@@ -217,6 +220,15 @@ export const NumberComponent = React.memo((props: NumberComponentProps) => {
id, id,
} = props; } = props;
const handleChange = (newValue: SerializedObject) => {
changeCallback(newValue, (result: undefined | SerializedObject) => {
if (result === undefined) return;
if (result.type == "Exception") {
setInputString(value.toString());
}
});
};
// Create a state for the cursor position // Create a state for the cursor position
const cursorPositionRef = useRef<number | null>(null); const cursorPositionRef = useRef<number | null>(null);
@@ -319,7 +331,7 @@ export const NumberComponent = React.memo((props: NumberComponentProps) => {
}; };
} }
changeCallback(serializedObject); handleChange(serializedObject);
return; return;
} else { } else {
console.debug(key); console.debug(key);
@@ -350,7 +362,7 @@ export const NumberComponent = React.memo((props: NumberComponentProps) => {
}; };
} }
changeCallback(serializedObject); handleChange(serializedObject);
} }
setInputString(newValue); setInputString(newValue);
@@ -384,7 +396,7 @@ export const NumberComponent = React.memo((props: NumberComponentProps) => {
}; };
} }
changeCallback(serializedObject); handleChange(serializedObject);
} }
}; };
useEffect(() => { useEffect(() => {

View File

@@ -19,7 +19,10 @@ interface SliderComponentProps {
stepSize: NumberObject; stepSize: NumberObject;
isInstantUpdate: boolean; isInstantUpdate: boolean;
addNotification: (message: string, levelname?: LevelName) => void; addNotification: (message: string, levelname?: LevelName) => void;
changeCallback?: (value: SerializedObject, callback?: (ack: unknown) => void) => void; changeCallback?: (
value: SerializedObject,
callback?: (ack: undefined | SerializedObject) => void,
) => void;
displayName: string; displayName: string;
id: string; id: string;
} }

View File

@@ -1,8 +1,9 @@
import { useState, useEffect } from "react"; import { useState, useEffect } from "react";
import { authority } from "../socket";
export default function useLocalStorage(key: string, defaultValue: unknown) { export default function useLocalStorage(key: string, defaultValue: unknown) {
const [value, setValue] = useState(() => { const [value, setValue] = useState(() => {
const storedValue = localStorage.getItem(key); const storedValue = localStorage.getItem(`${authority}:${key}`);
if (storedValue) { if (storedValue) {
return JSON.parse(storedValue); return JSON.parse(storedValue);
} }
@@ -11,7 +12,7 @@ export default function useLocalStorage(key: string, defaultValue: unknown) {
useEffect(() => { useEffect(() => {
if (value === undefined) return; if (value === undefined) return;
localStorage.setItem(key, JSON.stringify(value)); localStorage.setItem(`${authority}:${key}`, JSON.stringify(value));
}, [value, key]); }, [value, key]);
return [value, setValue]; return [value, setValue];

View File

@@ -28,7 +28,7 @@ export const socket = io(URL, {
export const updateValue = ( export const updateValue = (
serializedObject: SerializedObject, serializedObject: SerializedObject,
callback?: (ack: unknown) => void, callback?: (ack: undefined | SerializedObject) => void,
) => { ) => {
if (callback) { if (callback) {
socket.emit( socket.emit(

View File

@@ -1,6 +1,6 @@
[project] [project]
name = "pydase" name = "pydase"
version = "0.10.13" version = "0.10.21"
description = "A flexible and robust Python library for creating, managing, and interacting with data services, with built-in support for web and RPC servers, and customizable features for diverse use cases." description = "A flexible and robust Python library for creating, managing, and interacting with data services, with built-in support for web and RPC servers, and customizable features for diverse use cases."
authors = [ authors = [
{name = "Mose Müller",email = "mosemueller@gmail.com"} {name = "Mose Müller",email = "mosemueller@gmail.com"}

View File

@@ -1,17 +1,23 @@
import asyncio import asyncio
import logging import logging
import socket
import sys import sys
import threading import threading
import urllib.parse import urllib.parse
from builtins import ModuleNotFoundError
from types import TracebackType from types import TracebackType
from typing import TYPE_CHECKING, Any, TypedDict, cast from typing import TYPE_CHECKING, Any, TypedDict, cast
import aiohttp import aiohttp
import aiohttp_socks.connector
import socketio # type: ignore import socketio # type: ignore
from pydase.client.proxy_class import ProxyClass from pydase.client.proxy_class import ProxyClass
from pydase.client.proxy_loader import ProxyLoader from pydase.client.proxy_loader import (
ProxyLoader,
get_value,
trigger_method,
update_value,
)
from pydase.utils.serialization.deserializer import loads from pydase.utils.serialization.deserializer import loads
from pydase.utils.serialization.types import SerializedDataService, SerializedObject from pydase.utils.serialization.types import SerializedDataService, SerializedObject
@@ -59,10 +65,13 @@ class Client:
client's behaviour (e.g., reconnection attempts or reconnection delay). client's behaviour (e.g., reconnection attempts or reconnection delay).
client_id: An optional client identifier. This ID is sent to the server as the client_id: An optional client identifier. This ID is sent to the server as the
`X-Client-Id` HTTP header. It can be used for logging or authentication `X-Client-Id` HTTP header. It can be used for logging or authentication
purposes on the server side. purposes on the server side. If not provided, it defaults to the hostname
of the machine running the client.
proxy_url: An optional proxy URL to route the connection through. This is useful proxy_url: An optional proxy URL to route the connection through. This is useful
if the service is only reachable via an SSH tunnel or behind a firewall if the service is only reachable via an SSH tunnel or behind a firewall
(e.g., `socks5://localhost:2222`). (e.g., `socks5://localhost:2222`).
auto_update_proxy: If False, disables automatic updates from the server. Useful
for request-only clients where real-time synchronization is not needed.
Example: Example:
Connect to a service directly: Connect to a service directly:
@@ -91,7 +100,7 @@ class Client:
``` ```
""" """
def __init__( def __init__( # noqa: PLR0913
self, self,
*, *,
url: str, url: str,
@@ -99,6 +108,7 @@ class Client:
sio_client_kwargs: dict[str, Any] = {}, sio_client_kwargs: dict[str, Any] = {},
client_id: str | None = None, client_id: str | None = None,
proxy_url: str | None = None, proxy_url: str | None = None,
auto_update_proxy: bool = True, # new argument
): ):
# Parse the URL to separate base URL and path prefix # Parse the URL to separate base URL and path prefix
parsed_url = urllib.parse.urlparse(url) parsed_url = urllib.parse.urlparse(url)
@@ -112,10 +122,11 @@ class Client:
self._path_prefix = parsed_url.path.rstrip("/") # Remove trailing slash if any self._path_prefix = parsed_url.path.rstrip("/") # Remove trailing slash if any
self._url = url self._url = url
self._proxy_url = proxy_url self._proxy_url = proxy_url
self._client_id = client_id self._client_id = client_id or socket.gethostname()
self._sio_client_kwargs = sio_client_kwargs self._sio_client_kwargs = sio_client_kwargs
self._loop: asyncio.AbstractEventLoop | None = None self._loop: asyncio.AbstractEventLoop | None = None
self._thread: threading.Thread | None = None self._thread: threading.Thread | None = None
self._auto_update_proxy = auto_update_proxy
self.proxy: ProxyClass self.proxy: ProxyClass
"""A proxy object representing the remote service, facilitating interaction as """A proxy object representing the remote service, facilitating interaction as
if it were local.""" if it were local."""
@@ -150,6 +161,17 @@ class Client:
def _initialize_socketio_client(self) -> None: def _initialize_socketio_client(self) -> None:
if self._proxy_url is not None: if self._proxy_url is not None:
try:
import aiohttp_socks.connector
except ModuleNotFoundError:
raise ModuleNotFoundError(
"Missing dependency 'aiohttp_socks'. To use SOCKS5 proxy support, "
"install the optional 'socks' extra:\n\n"
' pip install "pydase[socks]"\n\n'
"This is required when specifying a `proxy_url` for "
"`pydase.Client`."
)
session = aiohttp.ClientSession( session = aiohttp.ClientSession(
connector=aiohttp_socks.connector.ProxyConnector.from_url( connector=aiohttp_socks.connector.ProxyConnector.from_url(
url=self._proxy_url, loop=self._loop url=self._proxy_url, loop=self._loop
@@ -211,10 +233,12 @@ class Client:
async def _setup_events(self) -> None: async def _setup_events(self) -> None:
self._sio.on("connect", self._handle_connect) self._sio.on("connect", self._handle_connect)
self._sio.on("disconnect", self._handle_disconnect) self._sio.on("disconnect", self._handle_disconnect)
if self._auto_update_proxy:
self._sio.on("notify", self._handle_update) self._sio.on("notify", self._handle_update)
async def _handle_connect(self) -> None: async def _handle_connect(self) -> None:
logger.debug("Connected to '%s' ...", self._url) logger.debug("Connected to '%s' ...", self._url)
if self._auto_update_proxy:
serialized_object = cast( serialized_object = cast(
"SerializedDataService", await self._sio.call("service_serialization") "SerializedDataService", await self._sio.call("service_serialization")
) )
@@ -222,7 +246,6 @@ class Client:
self.proxy, serialized_object=serialized_object self.proxy, serialized_object=serialized_object
) )
serialized_object["type"] = "DeviceConnection" serialized_object["type"] = "DeviceConnection"
if self.proxy._service_representation is not None:
# need to use object.__setattr__ to not trigger an observer notification # need to use object.__setattr__ to not trigger an observer notification
object.__setattr__(self.proxy, "_service_representation", serialized_object) object.__setattr__(self.proxy, "_service_representation", serialized_object)
@@ -240,3 +263,77 @@ class Client:
data["data"]["full_access_path"], data["data"]["full_access_path"],
loads(data["data"]["value"]), loads(data["data"]["value"]),
) )
def get_value(self, access_path: str) -> Any:
"""Retrieve the current value of a remote attribute.
Args:
access_path: The dot-separated path to the attribute in the remote service.
Returns:
The deserialized value of the remote attribute, or None if the client is not
connected.
Example:
```python
value = client.get_value("my_device.temperature")
print(value)
```
"""
if self._loop is not None:
return get_value(
sio_client=self._sio,
loop=self._loop,
access_path=access_path,
)
return None
def update_value(self, access_path: str, new_value: Any) -> Any:
"""Set a new value for a remote attribute.
Args:
access_path: The dot-separated path to the attribute in the remote service.
new_value: The new value to assign to the attribute.
Example:
```python
client.update_value("my_device.power", True)
```
"""
if self._loop is not None:
update_value(
sio_client=self._sio,
loop=self._loop,
access_path=access_path,
value=new_value,
)
def trigger_method(self, access_path: str, *args: Any, **kwargs: Any) -> Any:
"""Trigger a remote method with optional arguments.
Args:
access_path: The dot-separated path to the method in the remote service.
*args: Positional arguments to pass to the method.
**kwargs: Keyword arguments to pass to the method.
Returns:
The return value of the method call, if any.
Example:
```python
result = client.trigger_method("my_device.calibrate", timeout=5)
print(result)
```
"""
if self._loop is not None:
return trigger_method(
sio_client=self._sio,
loop=self._loop,
access_path=access_path,
args=list(args),
kwargs=kwargs,
)
return None

View File

@@ -65,19 +65,31 @@ class ProxyClass(ProxyClassMixin, pydase.components.DeviceConnection):
self.reconnect = reconnect self.reconnect = reconnect
def serialize(self) -> SerializedObject: def serialize(self) -> SerializedObject:
current_loop = asyncio.get_event_loop()
if not self.connected or current_loop == self._loop:
logger.debug(
"Client not connected, or called from within client event loop - using "
"fallback serialization"
)
if self._service_representation is None: if self._service_representation is None:
serialization_future = cast( serialized_service = pydase.components.DeviceConnection().serialize()
else:
serialized_service = self._service_representation
else:
future = cast(
"asyncio.Future[SerializedDataService]", "asyncio.Future[SerializedDataService]",
asyncio.run_coroutine_threadsafe( asyncio.run_coroutine_threadsafe(
self._sio.call("service_serialization"), self._loop self._sio.call("service_serialization"), self._loop
), ),
) )
result = future.result()
# need to use object.__setattr__ to not trigger an observer notification # need to use object.__setattr__ to not trigger an observer notification
object.__setattr__( object.__setattr__(self, "_service_representation", result)
self, "_service_representation", serialization_future.result()
)
if TYPE_CHECKING: if TYPE_CHECKING:
self._service_representation = serialization_future.result() self._service_representation = result
serialized_service = result
device_connection_value = cast( device_connection_value = cast(
"dict[str, SerializedObject]", "dict[str, SerializedObject]",
@@ -93,7 +105,7 @@ class ProxyClass(ProxyClassMixin, pydase.components.DeviceConnection):
"dict[str, SerializedObject]", "dict[str, SerializedObject]",
# need to deepcopy to not overwrite the _service_representation dict # need to deepcopy to not overwrite the _service_representation dict
# when adding a prefix with add_prefix_to_full_access_path # when adding a prefix with add_prefix_to_full_access_path
deepcopy(self._service_representation["value"]), deepcopy(serialized_service["value"]),
), ),
**device_connection_value, **device_connection_value,
} }

View File

@@ -74,6 +74,21 @@ def update_value(
) )
def get_value(
sio_client: socketio.AsyncClient,
loop: asyncio.AbstractEventLoop,
access_path: str,
) -> Any:
async def get_result() -> Any:
return await sio_client.call("get_value", access_path)
result = asyncio.run_coroutine_threadsafe(
get_result(),
loop=loop,
).result()
return ProxyLoader.loads_proxy(result, sio_client, loop)
class ProxyDict(dict[str, Any]): class ProxyDict(dict[str, Any]):
def __init__( def __init__(
self, self,
@@ -242,16 +257,11 @@ class ProxyClassMixin:
self, attr_name: str, serialized_object: SerializedObject self, attr_name: str, serialized_object: SerializedObject
) -> None: ) -> None:
def getter_proxy() -> Any: def getter_proxy() -> Any:
async def get_result() -> Any: return get_value(
return await self._sio.call( sio_client=self._sio,
"get_value", serialized_object["full_access_path"]
)
result = asyncio.run_coroutine_threadsafe(
get_result(),
loop=self._loop, loop=self._loop,
).result() access_path=serialized_object["full_access_path"],
return ProxyLoader.loads_proxy(result, self._sio, self._loop) )
dict.__setitem__(self._proxy_getters, attr_name, getter_proxy) # type: ignore dict.__setitem__(self._proxy_getters, attr_name, getter_proxy) # type: ignore

View File

@@ -12,7 +12,6 @@ from pydase.observer_pattern.observable.observable import (
from pydase.utils.helpers import ( from pydase.utils.helpers import (
get_class_and_instance_attributes, get_class_and_instance_attributes,
is_descriptor, is_descriptor,
is_property_attribute,
) )
from pydase.utils.serialization.serializer import ( from pydase.utils.serialization.serializer import (
Serializer, Serializer,
@@ -28,9 +27,6 @@ class DataService(AbstractDataService):
self.__check_instance_classes() self.__check_instance_classes()
def __setattr__(self, name: str, value: Any, /) -> None: def __setattr__(self, name: str, value: Any, /) -> None:
# Check and warn for unexpected type changes in attributes
self._warn_on_type_change(name, value)
# every class defined by the user should inherit from DataService if it is # every class defined by the user should inherit from DataService if it is
# assigned to a public attribute # assigned to a public attribute
if not name.startswith("_") and not inspect.isfunction(value): if not name.startswith("_") and not inspect.isfunction(value):
@@ -39,21 +35,6 @@ class DataService(AbstractDataService):
# Set the attribute # Set the attribute
super().__setattr__(name, value) super().__setattr__(name, value)
def _warn_on_type_change(self, attr_name: str, new_value: Any) -> None:
if is_property_attribute(self, attr_name):
return
current_value = getattr(self, attr_name, None)
if self._is_unexpected_type_change(current_value, new_value):
logger.warning(
"Type of '%s' changed from '%s' to '%s'. This may have unwanted "
"side effects! Consider setting it to '%s' directly.",
attr_name,
type(current_value).__name__,
type(new_value).__name__,
type(current_value).__name__,
)
def _is_unexpected_type_change(self, current_value: Any, new_value: Any) -> bool: def _is_unexpected_type_change(self, current_value: Any, new_value: Any) -> bool:
return ( return (
isinstance(current_value, float) and not isinstance(new_value, float) isinstance(current_value, float) and not isinstance(new_value, float)

View File

@@ -20,6 +20,19 @@ from pydase.utils.serialization.types import SerializedObject
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def _is_nested_attribute(full_access_path: str, changing_attributes: list[str]) -> bool:
"""Return True if the full_access_path is a nested attribute of any
changing_attribute."""
return any(
(
full_access_path.startswith((f"{attr}.", f"{attr}["))
and full_access_path != attr
)
for attr in changing_attributes
)
class DataServiceObserver(PropertyObserver): class DataServiceObserver(PropertyObserver):
def __init__(self, state_manager: StateManager) -> None: def __init__(self, state_manager: StateManager) -> None:
self.state_manager = state_manager self.state_manager = state_manager
@@ -29,11 +42,7 @@ class DataServiceObserver(PropertyObserver):
super().__init__(state_manager.service) super().__init__(state_manager.service)
def on_change(self, full_access_path: str, value: Any) -> None: def on_change(self, full_access_path: str, value: Any) -> None:
if any( if _is_nested_attribute(full_access_path, self.changing_attributes):
full_access_path.startswith(changing_attribute)
and full_access_path != changing_attribute
for changing_attribute in self.changing_attributes
):
return return
cached_value_dict: SerializedObject cached_value_dict: SerializedObject

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -7,8 +7,8 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0" /> <meta name="viewport" content="width=device-width, initial-scale=1.0" />
<meta name="theme-color" content="#000000" /> <meta name="theme-color" content="#000000" />
<meta name="description" content="Web site displaying a pydase UI." /> <meta name="description" content="Web site displaying a pydase UI." />
<script type="module" crossorigin src="/assets/index-BLJetjaQ.js"></script> <script type="module" crossorigin src="/assets/index-CKS_bS2p.js"></script>
<link rel="stylesheet" crossorigin href="/assets/index-DJzFvk4W.css"> <link rel="stylesheet" crossorigin href="/assets/index-Cs09d5Pk.css">
</head> </head>
<script> <script>

View File

@@ -22,13 +22,14 @@ def reverse_dict(original_dict: dict[str, list[str]]) -> dict[str, list[str]]:
def get_property_dependencies(prop: property, prefix: str = "") -> list[str]: def get_property_dependencies(prop: property, prefix: str = "") -> list[str]:
source_code_string = inspect.getsource(prop.fget) # type: ignore[arg-type] source_code_string = inspect.getsource(prop.fget) # type: ignore[arg-type]
pattern = r"self\.([^\s\{\}]+)" pattern = r"self\.([^\s\{\}\(\)]+)"
matches = re.findall(pattern, source_code_string) matches = re.findall(pattern, source_code_string)
return [prefix + match for match in matches if "(" not in match] return [prefix + match for match in matches if "(" not in match]
class PropertyObserver(Observer): class PropertyObserver(Observer):
def __init__(self, observable: Observable) -> None: def __init__(self, observable: Observable) -> None:
self.property_deps_dict: dict[str, list[str]] = {}
super().__init__(observable) super().__init__(observable)
self._update_property_deps_dict() self._update_property_deps_dict()

View File

@@ -14,7 +14,6 @@ from pydase.data_service.data_service_observer import DataServiceObserver
from pydase.data_service.state_manager import StateManager from pydase.data_service.state_manager import StateManager
from pydase.server.web_server import WebServer from pydase.server.web_server import WebServer
from pydase.task.autostart import autostart_service_tasks from pydase.task.autostart import autostart_service_tasks
from pydase.utils.helpers import current_event_loop_exists
HANDLED_SIGNALS = ( HANDLED_SIGNALS = (
signal.SIGINT, # Unix signal 2. Sent by Ctrl+C. signal.SIGINT, # Unix signal 2. Sent by Ctrl+C.
@@ -136,6 +135,14 @@ class Server:
autosave_interval: Interval in seconds between automatic state save events. autosave_interval: Interval in seconds between automatic state save events.
If set to `None`, automatic saving is disabled. Defaults to 30 seconds. If set to `None`, automatic saving is disabled. Defaults to 30 seconds.
**kwargs: Additional keyword arguments. **kwargs: Additional keyword arguments.
# Advanced
- [`post_startup`][pydase.Server.post_startup] hook:
This method is intended to be overridden in subclasses. It runs immediately
after all servers (web and additional) are initialized and before entering the
main event loop. You can use this hook to register custom logic after the
server is fully started.
""" """
def __init__( # noqa: PLR0913 def __init__( # noqa: PLR0913
@@ -162,6 +169,10 @@ class Server:
self._additional_servers = additional_servers self._additional_servers = additional_servers
self.should_exit = False self.should_exit = False
self.servers: dict[str, asyncio.Future[Any]] = {} self.servers: dict[str, asyncio.Future[Any]] = {}
self._loop = asyncio.new_event_loop()
asyncio.set_event_loop(self._loop)
self._state_manager = StateManager( self._state_manager = StateManager(
service=self._service, service=self._service,
filename=filename, filename=filename,
@@ -170,11 +181,14 @@ class Server:
self._observer = DataServiceObserver(self._state_manager) self._observer = DataServiceObserver(self._state_manager)
self._state_manager.load_state() self._state_manager.load_state()
autostart_service_tasks(self._service) autostart_service_tasks(self._service)
if not current_event_loop_exists():
self._loop = asyncio.new_event_loop() self._web_server = WebServer(
asyncio.set_event_loop(self._loop) data_service_observer=self._observer,
else: host=self._host,
self._loop = asyncio.get_event_loop() port=self._web_port,
enable_frontend=self._enable_web,
**self._kwargs,
)
def run(self) -> None: def run(self) -> None:
""" """
@@ -193,6 +207,7 @@ class Server:
logger.info("Started server process [%s]", process_id) logger.info("Started server process [%s]", process_id)
await self.startup() await self.startup()
await self.post_startup()
if self.should_exit: if self.should_exit:
return return
await self.main_loop() await self.main_loop()
@@ -204,6 +219,10 @@ class Server:
self._loop.set_exception_handler(self.custom_exception_handler) self._loop.set_exception_handler(self.custom_exception_handler)
self.install_signal_handlers() self.install_signal_handlers()
server_task = self._loop.create_task(self._web_server.serve())
server_task.add_done_callback(self._handle_server_shutdown)
self.servers["web"] = server_task
for server in self._additional_servers: for server in self._additional_servers:
addin_server = server["server"]( addin_server = server["server"](
data_service_observer=self._observer, data_service_observer=self._observer,
@@ -219,17 +238,6 @@ class Server:
server_task = self._loop.create_task(addin_server.serve()) server_task = self._loop.create_task(addin_server.serve())
server_task.add_done_callback(self._handle_server_shutdown) server_task.add_done_callback(self._handle_server_shutdown)
self.servers[server_name] = server_task self.servers[server_name] = server_task
if self._enable_web:
self._web_server = WebServer(
data_service_observer=self._observer,
host=self._host,
port=self._web_port,
**self._kwargs,
)
server_task = self._loop.create_task(self._web_server.serve())
server_task.add_done_callback(self._handle_server_shutdown)
self.servers["web"] = server_task
self._loop.create_task(self._state_manager.autosave()) self._loop.create_task(self._state_manager.autosave())
@@ -260,6 +268,9 @@ class Server:
logger.debug("Cancelling tasks") logger.debug("Cancelling tasks")
await self.__cancel_tasks() await self.__cancel_tasks()
async def post_startup(self) -> None:
"""Override this in a subclass to register custom logic after startup."""
async def __cancel_servers(self) -> None: async def __cancel_servers(self) -> None:
for server_name, task in self.servers.items(): for server_name, task in self.servers.items():
task.cancel() task.cancel()
@@ -309,7 +320,7 @@ class Server:
# here we exclude most kinds of exceptions from triggering this kind of shutdown # here we exclude most kinds of exceptions from triggering this kind of shutdown
exc = context.get("exception") exc = context.get("exception")
if type(exc) not in [RuntimeError, KeyboardInterrupt, asyncio.CancelledError]: if type(exc) not in [RuntimeError, KeyboardInterrupt, asyncio.CancelledError]:
if self._enable_web: if loop.is_running():
async def emit_exception() -> None: async def emit_exception() -> None:
try: try:

View File

@@ -115,7 +115,7 @@ def setup_sio_server(
def sio_callback( def sio_callback(
full_access_path: str, value: Any, cached_value_dict: SerializedObject full_access_path: str, value: Any, cached_value_dict: SerializedObject
) -> None: ) -> None:
if cached_value_dict != {}: if cached_value_dict != {} and loop.is_running():
async def notify() -> None: async def notify() -> None:
try: try:

View File

@@ -81,6 +81,7 @@ class WebServer:
host: str, host: str,
port: int, port: int,
*, *,
enable_frontend: bool = True,
css: str | Path | None = None, css: str | Path | None = None,
favicon_path: str | Path | None = None, favicon_path: str | Path | None = None,
enable_cors: bool = True, enable_cors: bool = True,
@@ -97,19 +98,18 @@ class WebServer:
self.enable_cors = enable_cors self.enable_cors = enable_cors
self.frontend_src = frontend_src self.frontend_src = frontend_src
self.favicon_path: Path | str = favicon_path # type: ignore self.favicon_path: Path | str = favicon_path # type: ignore
self.enable_frontend = enable_frontend
if self.favicon_path is None: if self.favicon_path is None:
self.favicon_path = self.frontend_src / "favicon.ico" self.favicon_path = self.frontend_src / "favicon.ico"
self._service_config_dir = config_dir self._service_config_dir = config_dir
self._generate_web_settings = generate_web_settings self._generate_web_settings = generate_web_settings
self._loop: asyncio.AbstractEventLoop self._loop = asyncio.get_event_loop()
self._sio = setup_sio_server(self.observer, self.enable_cors, self._loop)
self._initialise_configuration() self._initialise_configuration()
async def serve(self) -> None: async def serve(self) -> None:
self._loop = asyncio.get_running_loop()
self._sio = setup_sio_server(self.observer, self.enable_cors, self._loop)
async def index( async def index(
request: aiohttp.web.Request, request: aiohttp.web.Request,
) -> aiohttp.web.Response | aiohttp.web.FileResponse: ) -> aiohttp.web.Response | aiohttp.web.FileResponse:
@@ -162,6 +162,7 @@ class WebServer:
# Define routes # Define routes
self._sio.attach(app, socketio_path="/ws/socket.io") self._sio.attach(app, socketio_path="/ws/socket.io")
if self.enable_frontend:
app.router.add_static("/assets", self.frontend_src / "assets") app.router.add_static("/assets", self.frontend_src / "assets")
app.router.add_get("/favicon.ico", self._favicon_route) app.router.add_get("/favicon.ico", self._favicon_route)
app.router.add_get("/service-properties", self._service_properties_route) app.router.add_get("/service-properties", self._service_properties_route)
@@ -169,6 +170,7 @@ class WebServer:
app.router.add_get("/custom.css", self._styles_route) app.router.add_get("/custom.css", self._styles_route)
app.add_subapp("/api/", create_api_application(self.state_manager)) app.add_subapp("/api/", create_api_application(self.state_manager))
if self.enable_frontend:
app.router.add_get(r"/", index) app.router.add_get(r"/", index)
app.router.add_get(r"/{tail:.*}", index) app.router.add_get(r"/{tail:.*}", index)

View File

@@ -231,6 +231,6 @@ def current_event_loop_exists() -> bool:
import asyncio import asyncio
try: try:
return not asyncio.get_running_loop().is_closed() return not asyncio.get_event_loop().is_closed()
except RuntimeError: except RuntimeError:
return False return False

View File

@@ -165,6 +165,7 @@ class SocketIOHandler(logging.Handler):
log_entry = self.format(record) log_entry = self.format(record)
loop = asyncio.get_event_loop() loop = asyncio.get_event_loop()
if loop.is_running():
loop.create_task( loop.create_task(
self._sio.emit( self._sio.emit(
"log", "log",

View File

@@ -158,7 +158,7 @@ class Serializer:
"doc": None, "doc": None,
"readonly": True, "readonly": True,
"type": "Exception", "type": "Exception",
"value": obj.args[0], "value": obj.args[0] if len(obj.args) > 0 else "",
"name": obj.__class__.__name__, "name": obj.__class__.__name__,
} }

View File

@@ -168,10 +168,25 @@ def test_context_manager(pydase_client: pydase.Client) -> None:
def test_client_id( def test_client_id(
pydase_client: pydase.Client, caplog: pytest.LogCaptureFixture pydase_client: pydase.Client, caplog: pytest.LogCaptureFixture
) -> None: ) -> None:
import socket
pydase.Client(url="ws://localhost:9999") pydase.Client(url="ws://localhost:9999")
assert "Client [sid=" in caplog.text assert f"Client [id={socket.gethostname()}]" in caplog.text
caplog.clear() caplog.clear()
pydase.Client(url="ws://localhost:9999", client_id="my_service") pydase.Client(url="ws://localhost:9999", client_id="my_service")
assert "Client [id=my_service] connected" in caplog.text assert "Client [id=my_service] connected" in caplog.text
def test_get_value(
pydase_client: pydase.Client, caplog: pytest.LogCaptureFixture
) -> None:
pydase_client.update_value("sub_service.name", "Other name")
assert pydase_client.get_value("sub_service.name") == "Other name"
assert (
pydase_client.trigger_method("my_async_method", input_str="Hello World")
== "Hello World"
)

View File

@@ -0,0 +1,22 @@
import asyncio
from unittest.mock import AsyncMock, call, patch
import pytest
from pydase import components
from pydase.client.proxy_class import ProxyClass
@pytest.mark.asyncio
async def test_serialize_fallback_inside_event_loop() -> None:
loop = asyncio.get_running_loop()
mock_sio = AsyncMock()
proxy = ProxyClass(sio_client=mock_sio, loop=loop, reconnect=lambda: None)
with patch.object(
components.DeviceConnection, "serialize", return_value={"value": {}}
) as mock_fallback:
result = proxy.serialize()
mock_fallback.assert_has_calls(calls=[call(), call()])
assert isinstance(result, dict)

View File

@@ -1,10 +1,17 @@
import sys
from pytest import LogCaptureFixture
import pydase import pydase
import pydase.components import pydase.components
from pydase.data_service.data_service_observer import DataServiceObserver from pydase.data_service.data_service_observer import DataServiceObserver
from pydase.data_service.state_manager import StateManager from pydase.data_service.state_manager import StateManager
from pydase.utils.serialization.serializer import dump from pydase.utils.serialization.serializer import dump
from pytest import LogCaptureFixture
if sys.version_info < (3, 13):
PATHLIB_PATH = "pathlib.Path"
else:
PATHLIB_PATH = "pathlib._local.Path"
def test_image_functions(caplog: LogCaptureFixture) -> None: def test_image_functions(caplog: LogCaptureFixture) -> None:
@@ -106,7 +113,7 @@ def test_image_serialization() -> None:
"signature": { "signature": {
"parameters": { "parameters": {
"path": { "path": {
"annotation": "pathlib.Path | str", "annotation": f"{PATHLIB_PATH} | str",
"default": {}, "default": {},
} }
}, },

View File

@@ -1,38 +1,13 @@
from enum import Enum from enum import Enum
from typing import Any from typing import Any
import pydase
import pydase.units as u
import pytest import pytest
from pydase import DataService
from pydase.data_service.data_service_observer import DataServiceObserver
from pydase.data_service.state_manager import StateManager
from pydase.utils.decorators import FunctionDefinitionError, frontend
from pytest import LogCaptureFixture from pytest import LogCaptureFixture
import pydase
def test_unexpected_type_change_warning(caplog: LogCaptureFixture) -> None: import pydase.units as u
class ServiceClass(DataService): from pydase import DataService
attr_1 = 1.0 from pydase.utils.decorators import FunctionDefinitionError, frontend
current = 1.0 * u.units.A
service_instance = ServiceClass()
state_manager = StateManager(service_instance)
DataServiceObserver(state_manager)
service_instance.attr_1 = 2
assert "'attr_1' changed to '2'" in caplog.text
assert (
"Type of 'attr_1' changed from 'float' to 'int'. This may have unwanted "
"side effects! Consider setting it to 'float' directly." in caplog.text
)
service_instance.current = 2
assert "'current' changed to '2'" in caplog.text
assert (
"Type of 'current' changed from 'Quantity' to 'int'. This may have unwanted "
"side effects! Consider setting it to 'Quantity' directly." in caplog.text
)
def test_basic_inheritance_warning(caplog: LogCaptureFixture) -> None: def test_basic_inheritance_warning(caplog: LogCaptureFixture) -> None:

View File

@@ -1,8 +1,9 @@
import logging import logging
from typing import Any from typing import Any
import pydase
import pytest import pytest
import pydase
from pydase.data_service.data_service_observer import DataServiceObserver from pydase.data_service.data_service_observer import DataServiceObserver
from pydase.data_service.state_manager import StateManager from pydase.data_service.state_manager import StateManager
from pydase.utils.serialization.serializer import SerializationError, dump from pydase.utils.serialization.serializer import SerializationError, dump
@@ -241,3 +242,42 @@ def test_read_only_dict_property(caplog: pytest.LogCaptureFixture) -> None:
service_instance._dict_attr["dotted.key"] = 2.0 service_instance._dict_attr["dotted.key"] = 2.0
assert "'dict_attr[\"dotted.key\"]' changed to '2.0'" in caplog.text assert "'dict_attr[\"dotted.key\"]' changed to '2.0'" in caplog.text
def test_dependency_as_function_argument(caplog: pytest.LogCaptureFixture) -> None:
class MyObservable(pydase.DataService):
some_int = 0
@property
def other_int(self) -> int:
return self.add_one(self.some_int)
def add_one(self, value: int) -> int:
return value + 1
service_instance = MyObservable()
state_manager = StateManager(service=service_instance)
DataServiceObserver(state_manager)
service_instance.some_int = 1337
assert "'other_int' changed to '1338'" in caplog.text
def test_property_starting_with_dependency_name(
caplog: pytest.LogCaptureFixture,
) -> None:
class MyObservable(pydase.DataService):
my_int = 0
@property
def my_int_2(self) -> int:
return self.my_int + 1
service_instance = MyObservable()
state_manager = StateManager(service=service_instance)
DataServiceObserver(state_manager)
service_instance.my_int = 1337
assert "'my_int_2' changed to '1338'" in caplog.text

View File

@@ -1225,3 +1225,22 @@ def test_add_prefix_to_full_access_path(
serialized_obj: SerializedObject, prefix: str, expected: SerializedObject serialized_obj: SerializedObject, prefix: str, expected: SerializedObject
) -> None: ) -> None:
assert add_prefix_to_full_access_path(serialized_obj, prefix) == expected assert add_prefix_to_full_access_path(serialized_obj, prefix) == expected
def test_serialize_exception() -> None:
assert dump(Exception()) == {
"doc": None,
"full_access_path": "",
"name": "Exception",
"readonly": True,
"type": "Exception",
"value": "",
}
assert dump(Exception("Exception message")) == {
"doc": None,
"full_access_path": "",
"name": "Exception",
"readonly": True,
"type": "Exception",
"value": "Exception message",
}