mirror of
https://github.com/tiqi-group/pydase.git
synced 2025-12-17 11:51:17 +01:00
Compare commits
82 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a46945ed29 | ||
|
|
21809b49e8 | ||
|
|
c45f1bd489 | ||
|
|
5784818e5a | ||
|
|
64a7097568 | ||
|
|
5ef382728c | ||
|
|
51d6189002 | ||
|
|
71e29c890e | ||
|
|
6e407ba1d6 | ||
|
|
4fb5e56aa8 | ||
|
|
d55ba3a85f | ||
|
|
265d9a7ef5 | ||
|
|
4cd36b4a2b | ||
|
|
1b2ff38aff | ||
|
|
4b243985e8 | ||
|
|
8615bdeadc | ||
|
|
d24893a989 | ||
|
|
661603ef71 | ||
|
|
d6947b0f43 | ||
|
|
93d38651e8 | ||
|
|
72a3c199d9 | ||
|
|
7914e2fa7b | ||
|
|
0a4f898fde | ||
|
|
a9aa55fc99 | ||
|
|
fd5a230fa4 | ||
|
|
243b46aadb | ||
|
|
0f1ca84df5 | ||
|
|
6438a07305 | ||
|
|
80bfd209df | ||
|
|
e065b1fb22 | ||
|
|
977cee32b9 | ||
|
|
96f695020b | ||
|
|
33ce01865a | ||
|
|
f5374573cd | ||
|
|
43c6b5e817 | ||
|
|
37380c6d24 | ||
|
|
ae21656e83 | ||
|
|
a4b4f179c6 | ||
|
|
c6beca3961 | ||
|
|
2fa8240e54 | ||
|
|
369587a50c | ||
|
|
25343f6909 | ||
|
|
c136c9f3de | ||
|
|
8897c2fe4c | ||
|
|
80c5c4e99d | ||
|
|
423441a74c | ||
|
|
9ec60e3891 | ||
|
|
8bde104322 | ||
|
|
9b57b6984e | ||
|
|
e5b89f2581 | ||
|
|
ff1654e65c | ||
|
|
cded80c8e5 | ||
|
|
87a33b6293 | ||
|
|
6d621daaac | ||
|
|
8c1a50c106 | ||
|
|
a1545d341b | ||
|
|
28a1cc7cd3 | ||
|
|
c968708b85 | ||
|
|
fef8606d17 | ||
|
|
82286c8da0 | ||
|
|
533826a398 | ||
|
|
982875dee6 | ||
|
|
e54710cd4d | ||
|
|
f48f7aacfb | ||
|
|
e97aab4f36 | ||
|
|
015c66d5a6 | ||
|
|
9827d0747c | ||
|
|
38a12fb72e | ||
|
|
fb6ec16bf5 | ||
|
|
9ee498eb5c | ||
|
|
d015333123 | ||
|
|
c4e7fe66a8 | ||
|
|
5f1451a1c1 | ||
|
|
4c28cbaf7d | ||
|
|
a97b8eb2b4 | ||
|
|
f6b5c1b567 | ||
|
|
f92d525588 | ||
|
|
61b69d77cc | ||
|
|
8abe9357cf | ||
|
|
0dace2a9f0 | ||
|
|
9992ade0ed | ||
|
|
6c2cebada2 |
4
.github/workflows/python-package.yml
vendored
4
.github/workflows/python-package.yml
vendored
@@ -2,6 +2,8 @@
|
|||||||
# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python
|
# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python
|
||||||
|
|
||||||
name: Python package
|
name: Python package
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
@@ -16,7 +18,7 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ["3.10", "3.11", "3.12"]
|
python-version: ["3.10", "3.11", "3.12", "3.13"]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|||||||
2
LICENSE
2
LICENSE
@@ -1,4 +1,4 @@
|
|||||||
Copyright (c) 2023-2024 Mose Müller <mosemueller@gmail.com>
|
Copyright (c) 2023-2025 ETH Zurich, Mose Müller, Carmelo Mordini
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|||||||
@@ -5,6 +5,7 @@
|
|||||||
[](https://pypi.org/project/pydase/)
|
[](https://pypi.org/project/pydase/)
|
||||||
[](https://pydase.readthedocs.io/en/stable/)
|
[](https://pydase.readthedocs.io/en/stable/)
|
||||||
[][License]
|
[][License]
|
||||||
|
[](http://doi.org/10.5905/ethz-1007-907)
|
||||||
|
|
||||||
`pydase` is a Python library that simplifies the creation of remote control interfaces for Python objects. It exposes the public attributes of a user-defined class via a [Socket.IO](https://python-socketio.readthedocs.io/en/stable/) web server, ensuring they are always in sync with the service state. You can interact with these attributes using an RPC client, a RESTful API, or a web browser. The web browser frontend is auto-generated, displaying components that correspond to each public attribute of the class for direct interaction.
|
`pydase` is a Python library that simplifies the creation of remote control interfaces for Python objects. It exposes the public attributes of a user-defined class via a [Socket.IO](https://python-socketio.readthedocs.io/en/stable/) web server, ensuring they are always in sync with the service state. You can interact with these attributes using an RPC client, a RESTful API, or a web browser. The web browser frontend is auto-generated, displaying components that correspond to each public attribute of the class for direct interaction.
|
||||||
`pydase` implements an [observer pattern][Observer Pattern] to provide the real-time updates, ensuring that changes to the class attributes are reflected across all clients.
|
`pydase` implements an [observer pattern][Observer Pattern] to provide the real-time updates, ensuring that changes to the class attributes are reflected across all clients.
|
||||||
|
|||||||
@@ -23,7 +23,39 @@ The proxy acts as a local representation of the remote service, enabling intuiti
|
|||||||
|
|
||||||
The proxy class automatically synchronizes with the server's attributes and methods, keeping itself up-to-date with any changes. This dynamic synchronization essentially mirrors the server's API, making it feel like you're working with a local object.
|
The proxy class automatically synchronizes with the server's attributes and methods, keeping itself up-to-date with any changes. This dynamic synchronization essentially mirrors the server's API, making it feel like you're working with a local object.
|
||||||
|
|
||||||
### Accessing Services Behind Firewalls or SSH Gateways
|
## Automatic Proxy Updates
|
||||||
|
|
||||||
|
By default, the client listens for attribute and structure changes from the server and dynamically updates its internal proxy representation. This ensures that value changes or newly added attributes on the server appear in the client proxy without requiring reconnection or manual refresh.
|
||||||
|
|
||||||
|
This is useful, for example, when [integrating the client into another service](#integrating-the-client-into-another-service). However, if you want to avoid this behavior (e.g., to reduce network traffic or avoid frequent re-syncing), you can disable it. When passing `auto_update_proxy=False` to the client, the proxy will not track changes after the initial connection:
|
||||||
|
|
||||||
|
```python
|
||||||
|
client = pydase.Client(
|
||||||
|
url="ws://localhost:8001",
|
||||||
|
auto_update_proxy=False
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Direct API Access
|
||||||
|
|
||||||
|
In addition to using the `proxy` object, users may access the server API directly via the following methods:
|
||||||
|
|
||||||
|
```python
|
||||||
|
client = pydase.Client(url="ws://localhost:8001")
|
||||||
|
|
||||||
|
# Get the current value of an attribute
|
||||||
|
value = client.get_value("device.voltage")
|
||||||
|
|
||||||
|
# Update an attribute
|
||||||
|
client.update_value("device.voltage", 5.0)
|
||||||
|
|
||||||
|
# Call a method on the remote service
|
||||||
|
result = client.trigger_method("device.reset")
|
||||||
|
```
|
||||||
|
|
||||||
|
This bypasses the proxy and is useful for lower-level access to individual service endpoints.
|
||||||
|
|
||||||
|
## Accessing Services Behind Firewalls or SSH Gateways
|
||||||
|
|
||||||
If your service is only reachable through a private network or SSH gateway, you can route your connection through a local SOCKS5 proxy using the `proxy_url` parameter.
|
If your service is only reachable through a private network or SSH gateway, you can route your connection through a local SOCKS5 proxy using the `proxy_url` parameter.
|
||||||
|
|
||||||
@@ -58,7 +90,7 @@ class MyService(pydase.DataService):
|
|||||||
proxy = pydase.Client(
|
proxy = pydase.Client(
|
||||||
url="ws://<ip_addr>:<service_port>",
|
url="ws://<ip_addr>:<service_port>",
|
||||||
block_until_connected=False,
|
block_until_connected=False,
|
||||||
client_id="my_pydase_client_id",
|
client_id="my_pydase_client_id", # optional, defaults to system hostname
|
||||||
).proxy
|
).proxy
|
||||||
|
|
||||||
# For SSL-encrypted services, use the wss protocol
|
# For SSL-encrypted services, use the wss protocol
|
||||||
@@ -75,9 +107,10 @@ if __name__ == "__main__":
|
|||||||
```
|
```
|
||||||
|
|
||||||
In this example:
|
In this example:
|
||||||
|
|
||||||
- The `MyService` class has a `proxy` attribute that connects to a `pydase` service at `<ip_addr>:<service_port>`.
|
- The `MyService` class has a `proxy` attribute that connects to a `pydase` service at `<ip_addr>:<service_port>`.
|
||||||
- By setting `block_until_connected=False`, the service can start without waiting for the connection to succeed.
|
- By setting `block_until_connected=False`, the service can start without waiting for the connection to succeed.
|
||||||
- By setting `client_id`, the server will log a descriptive identifier for this client via the `X-Client-Id` HTTP header.
|
- The `client_id` is optional. If not specified, it defaults to the system hostname, which will be sent in the `X-Client-Id` HTTP header for logging or authentication on the server side.
|
||||||
|
|
||||||
## Custom `socketio.AsyncClient` Connection Parameters
|
## Custom `socketio.AsyncClient` Connection Parameters
|
||||||
|
|
||||||
|
|||||||
1256
frontend/package-lock.json
generated
1256
frontend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -35,6 +35,6 @@
|
|||||||
"prettier": "3.3.2",
|
"prettier": "3.3.2",
|
||||||
"typescript": "^5.7.3",
|
"typescript": "^5.7.3",
|
||||||
"typescript-eslint": "^7.18.0",
|
"typescript-eslint": "^7.18.0",
|
||||||
"vite": "^5.4.12"
|
"vite": "^6.3.5"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -50,7 +50,7 @@ const createDisplayNameFromAccessPath = (fullAccessPath: string): string => {
|
|||||||
|
|
||||||
function changeCallback(
|
function changeCallback(
|
||||||
value: SerializedObject,
|
value: SerializedObject,
|
||||||
callback: (ack: unknown) => void = () => {},
|
callback: (ack: undefined | SerializedObject) => void = () => {},
|
||||||
) {
|
) {
|
||||||
updateValue(value, callback);
|
updateValue(value, callback);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -38,7 +38,10 @@ interface NumberComponentProps {
|
|||||||
isInstantUpdate: boolean;
|
isInstantUpdate: boolean;
|
||||||
unit?: string;
|
unit?: string;
|
||||||
addNotification: (message: string, levelname?: LevelName) => void;
|
addNotification: (message: string, levelname?: LevelName) => void;
|
||||||
changeCallback?: (value: SerializedObject, callback?: (ack: unknown) => void) => void;
|
changeCallback?: (
|
||||||
|
value: SerializedObject,
|
||||||
|
callback?: (ack: undefined | SerializedObject) => void,
|
||||||
|
) => void;
|
||||||
displayName?: string;
|
displayName?: string;
|
||||||
id: string;
|
id: string;
|
||||||
}
|
}
|
||||||
@@ -217,6 +220,15 @@ export const NumberComponent = React.memo((props: NumberComponentProps) => {
|
|||||||
id,
|
id,
|
||||||
} = props;
|
} = props;
|
||||||
|
|
||||||
|
const handleChange = (newValue: SerializedObject) => {
|
||||||
|
changeCallback(newValue, (result: undefined | SerializedObject) => {
|
||||||
|
if (result === undefined) return;
|
||||||
|
if (result.type == "Exception") {
|
||||||
|
setInputString(value.toString());
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
// Create a state for the cursor position
|
// Create a state for the cursor position
|
||||||
const cursorPositionRef = useRef<number | null>(null);
|
const cursorPositionRef = useRef<number | null>(null);
|
||||||
|
|
||||||
@@ -319,7 +331,7 @@ export const NumberComponent = React.memo((props: NumberComponentProps) => {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
changeCallback(serializedObject);
|
handleChange(serializedObject);
|
||||||
return;
|
return;
|
||||||
} else {
|
} else {
|
||||||
console.debug(key);
|
console.debug(key);
|
||||||
@@ -350,7 +362,7 @@ export const NumberComponent = React.memo((props: NumberComponentProps) => {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
changeCallback(serializedObject);
|
handleChange(serializedObject);
|
||||||
}
|
}
|
||||||
|
|
||||||
setInputString(newValue);
|
setInputString(newValue);
|
||||||
@@ -384,7 +396,7 @@ export const NumberComponent = React.memo((props: NumberComponentProps) => {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
changeCallback(serializedObject);
|
handleChange(serializedObject);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
|
|||||||
@@ -19,7 +19,10 @@ interface SliderComponentProps {
|
|||||||
stepSize: NumberObject;
|
stepSize: NumberObject;
|
||||||
isInstantUpdate: boolean;
|
isInstantUpdate: boolean;
|
||||||
addNotification: (message: string, levelname?: LevelName) => void;
|
addNotification: (message: string, levelname?: LevelName) => void;
|
||||||
changeCallback?: (value: SerializedObject, callback?: (ack: unknown) => void) => void;
|
changeCallback?: (
|
||||||
|
value: SerializedObject,
|
||||||
|
callback?: (ack: undefined | SerializedObject) => void,
|
||||||
|
) => void;
|
||||||
displayName: string;
|
displayName: string;
|
||||||
id: string;
|
id: string;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,8 +1,9 @@
|
|||||||
import { useState, useEffect } from "react";
|
import { useState, useEffect } from "react";
|
||||||
|
import { authority } from "../socket";
|
||||||
|
|
||||||
export default function useLocalStorage(key: string, defaultValue: unknown) {
|
export default function useLocalStorage(key: string, defaultValue: unknown) {
|
||||||
const [value, setValue] = useState(() => {
|
const [value, setValue] = useState(() => {
|
||||||
const storedValue = localStorage.getItem(key);
|
const storedValue = localStorage.getItem(`${authority}:${key}`);
|
||||||
if (storedValue) {
|
if (storedValue) {
|
||||||
return JSON.parse(storedValue);
|
return JSON.parse(storedValue);
|
||||||
}
|
}
|
||||||
@@ -11,7 +12,7 @@ export default function useLocalStorage(key: string, defaultValue: unknown) {
|
|||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (value === undefined) return;
|
if (value === undefined) return;
|
||||||
localStorage.setItem(key, JSON.stringify(value));
|
localStorage.setItem(`${authority}:${key}`, JSON.stringify(value));
|
||||||
}, [value, key]);
|
}, [value, key]);
|
||||||
|
|
||||||
return [value, setValue];
|
return [value, setValue];
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ export const socket = io(URL, {
|
|||||||
|
|
||||||
export const updateValue = (
|
export const updateValue = (
|
||||||
serializedObject: SerializedObject,
|
serializedObject: SerializedObject,
|
||||||
callback?: (ack: unknown) => void,
|
callback?: (ack: undefined | SerializedObject) => void,
|
||||||
) => {
|
) => {
|
||||||
if (callback) {
|
if (callback) {
|
||||||
socket.emit(
|
socket.emit(
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[project]
|
[project]
|
||||||
name = "pydase"
|
name = "pydase"
|
||||||
version = "0.10.13"
|
version = "0.10.21"
|
||||||
description = "A flexible and robust Python library for creating, managing, and interacting with data services, with built-in support for web and RPC servers, and customizable features for diverse use cases."
|
description = "A flexible and robust Python library for creating, managing, and interacting with data services, with built-in support for web and RPC servers, and customizable features for diverse use cases."
|
||||||
authors = [
|
authors = [
|
||||||
{name = "Mose Müller",email = "mosemueller@gmail.com"}
|
{name = "Mose Müller",email = "mosemueller@gmail.com"}
|
||||||
|
|||||||
@@ -1,17 +1,23 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
|
import socket
|
||||||
import sys
|
import sys
|
||||||
import threading
|
import threading
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
|
from builtins import ModuleNotFoundError
|
||||||
from types import TracebackType
|
from types import TracebackType
|
||||||
from typing import TYPE_CHECKING, Any, TypedDict, cast
|
from typing import TYPE_CHECKING, Any, TypedDict, cast
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
import aiohttp_socks.connector
|
|
||||||
import socketio # type: ignore
|
import socketio # type: ignore
|
||||||
|
|
||||||
from pydase.client.proxy_class import ProxyClass
|
from pydase.client.proxy_class import ProxyClass
|
||||||
from pydase.client.proxy_loader import ProxyLoader
|
from pydase.client.proxy_loader import (
|
||||||
|
ProxyLoader,
|
||||||
|
get_value,
|
||||||
|
trigger_method,
|
||||||
|
update_value,
|
||||||
|
)
|
||||||
from pydase.utils.serialization.deserializer import loads
|
from pydase.utils.serialization.deserializer import loads
|
||||||
from pydase.utils.serialization.types import SerializedDataService, SerializedObject
|
from pydase.utils.serialization.types import SerializedDataService, SerializedObject
|
||||||
|
|
||||||
@@ -59,10 +65,13 @@ class Client:
|
|||||||
client's behaviour (e.g., reconnection attempts or reconnection delay).
|
client's behaviour (e.g., reconnection attempts or reconnection delay).
|
||||||
client_id: An optional client identifier. This ID is sent to the server as the
|
client_id: An optional client identifier. This ID is sent to the server as the
|
||||||
`X-Client-Id` HTTP header. It can be used for logging or authentication
|
`X-Client-Id` HTTP header. It can be used for logging or authentication
|
||||||
purposes on the server side.
|
purposes on the server side. If not provided, it defaults to the hostname
|
||||||
|
of the machine running the client.
|
||||||
proxy_url: An optional proxy URL to route the connection through. This is useful
|
proxy_url: An optional proxy URL to route the connection through. This is useful
|
||||||
if the service is only reachable via an SSH tunnel or behind a firewall
|
if the service is only reachable via an SSH tunnel or behind a firewall
|
||||||
(e.g., `socks5://localhost:2222`).
|
(e.g., `socks5://localhost:2222`).
|
||||||
|
auto_update_proxy: If False, disables automatic updates from the server. Useful
|
||||||
|
for request-only clients where real-time synchronization is not needed.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
Connect to a service directly:
|
Connect to a service directly:
|
||||||
@@ -91,7 +100,7 @@ class Client:
|
|||||||
```
|
```
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__( # noqa: PLR0913
|
||||||
self,
|
self,
|
||||||
*,
|
*,
|
||||||
url: str,
|
url: str,
|
||||||
@@ -99,6 +108,7 @@ class Client:
|
|||||||
sio_client_kwargs: dict[str, Any] = {},
|
sio_client_kwargs: dict[str, Any] = {},
|
||||||
client_id: str | None = None,
|
client_id: str | None = None,
|
||||||
proxy_url: str | None = None,
|
proxy_url: str | None = None,
|
||||||
|
auto_update_proxy: bool = True, # new argument
|
||||||
):
|
):
|
||||||
# Parse the URL to separate base URL and path prefix
|
# Parse the URL to separate base URL and path prefix
|
||||||
parsed_url = urllib.parse.urlparse(url)
|
parsed_url = urllib.parse.urlparse(url)
|
||||||
@@ -112,10 +122,11 @@ class Client:
|
|||||||
self._path_prefix = parsed_url.path.rstrip("/") # Remove trailing slash if any
|
self._path_prefix = parsed_url.path.rstrip("/") # Remove trailing slash if any
|
||||||
self._url = url
|
self._url = url
|
||||||
self._proxy_url = proxy_url
|
self._proxy_url = proxy_url
|
||||||
self._client_id = client_id
|
self._client_id = client_id or socket.gethostname()
|
||||||
self._sio_client_kwargs = sio_client_kwargs
|
self._sio_client_kwargs = sio_client_kwargs
|
||||||
self._loop: asyncio.AbstractEventLoop | None = None
|
self._loop: asyncio.AbstractEventLoop | None = None
|
||||||
self._thread: threading.Thread | None = None
|
self._thread: threading.Thread | None = None
|
||||||
|
self._auto_update_proxy = auto_update_proxy
|
||||||
self.proxy: ProxyClass
|
self.proxy: ProxyClass
|
||||||
"""A proxy object representing the remote service, facilitating interaction as
|
"""A proxy object representing the remote service, facilitating interaction as
|
||||||
if it were local."""
|
if it were local."""
|
||||||
@@ -150,6 +161,17 @@ class Client:
|
|||||||
|
|
||||||
def _initialize_socketio_client(self) -> None:
|
def _initialize_socketio_client(self) -> None:
|
||||||
if self._proxy_url is not None:
|
if self._proxy_url is not None:
|
||||||
|
try:
|
||||||
|
import aiohttp_socks.connector
|
||||||
|
except ModuleNotFoundError:
|
||||||
|
raise ModuleNotFoundError(
|
||||||
|
"Missing dependency 'aiohttp_socks'. To use SOCKS5 proxy support, "
|
||||||
|
"install the optional 'socks' extra:\n\n"
|
||||||
|
' pip install "pydase[socks]"\n\n'
|
||||||
|
"This is required when specifying a `proxy_url` for "
|
||||||
|
"`pydase.Client`."
|
||||||
|
)
|
||||||
|
|
||||||
session = aiohttp.ClientSession(
|
session = aiohttp.ClientSession(
|
||||||
connector=aiohttp_socks.connector.ProxyConnector.from_url(
|
connector=aiohttp_socks.connector.ProxyConnector.from_url(
|
||||||
url=self._proxy_url, loop=self._loop
|
url=self._proxy_url, loop=self._loop
|
||||||
@@ -211,10 +233,12 @@ class Client:
|
|||||||
async def _setup_events(self) -> None:
|
async def _setup_events(self) -> None:
|
||||||
self._sio.on("connect", self._handle_connect)
|
self._sio.on("connect", self._handle_connect)
|
||||||
self._sio.on("disconnect", self._handle_disconnect)
|
self._sio.on("disconnect", self._handle_disconnect)
|
||||||
|
if self._auto_update_proxy:
|
||||||
self._sio.on("notify", self._handle_update)
|
self._sio.on("notify", self._handle_update)
|
||||||
|
|
||||||
async def _handle_connect(self) -> None:
|
async def _handle_connect(self) -> None:
|
||||||
logger.debug("Connected to '%s' ...", self._url)
|
logger.debug("Connected to '%s' ...", self._url)
|
||||||
|
if self._auto_update_proxy:
|
||||||
serialized_object = cast(
|
serialized_object = cast(
|
||||||
"SerializedDataService", await self._sio.call("service_serialization")
|
"SerializedDataService", await self._sio.call("service_serialization")
|
||||||
)
|
)
|
||||||
@@ -222,7 +246,6 @@ class Client:
|
|||||||
self.proxy, serialized_object=serialized_object
|
self.proxy, serialized_object=serialized_object
|
||||||
)
|
)
|
||||||
serialized_object["type"] = "DeviceConnection"
|
serialized_object["type"] = "DeviceConnection"
|
||||||
if self.proxy._service_representation is not None:
|
|
||||||
# need to use object.__setattr__ to not trigger an observer notification
|
# need to use object.__setattr__ to not trigger an observer notification
|
||||||
object.__setattr__(self.proxy, "_service_representation", serialized_object)
|
object.__setattr__(self.proxy, "_service_representation", serialized_object)
|
||||||
|
|
||||||
@@ -240,3 +263,77 @@ class Client:
|
|||||||
data["data"]["full_access_path"],
|
data["data"]["full_access_path"],
|
||||||
loads(data["data"]["value"]),
|
loads(data["data"]["value"]),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def get_value(self, access_path: str) -> Any:
|
||||||
|
"""Retrieve the current value of a remote attribute.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
access_path: The dot-separated path to the attribute in the remote service.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The deserialized value of the remote attribute, or None if the client is not
|
||||||
|
connected.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```python
|
||||||
|
value = client.get_value("my_device.temperature")
|
||||||
|
print(value)
|
||||||
|
```
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self._loop is not None:
|
||||||
|
return get_value(
|
||||||
|
sio_client=self._sio,
|
||||||
|
loop=self._loop,
|
||||||
|
access_path=access_path,
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def update_value(self, access_path: str, new_value: Any) -> Any:
|
||||||
|
"""Set a new value for a remote attribute.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
access_path: The dot-separated path to the attribute in the remote service.
|
||||||
|
new_value: The new value to assign to the attribute.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```python
|
||||||
|
client.update_value("my_device.power", True)
|
||||||
|
```
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self._loop is not None:
|
||||||
|
update_value(
|
||||||
|
sio_client=self._sio,
|
||||||
|
loop=self._loop,
|
||||||
|
access_path=access_path,
|
||||||
|
value=new_value,
|
||||||
|
)
|
||||||
|
|
||||||
|
def trigger_method(self, access_path: str, *args: Any, **kwargs: Any) -> Any:
|
||||||
|
"""Trigger a remote method with optional arguments.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
access_path: The dot-separated path to the method in the remote service.
|
||||||
|
*args: Positional arguments to pass to the method.
|
||||||
|
**kwargs: Keyword arguments to pass to the method.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The return value of the method call, if any.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```python
|
||||||
|
result = client.trigger_method("my_device.calibrate", timeout=5)
|
||||||
|
print(result)
|
||||||
|
```
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self._loop is not None:
|
||||||
|
return trigger_method(
|
||||||
|
sio_client=self._sio,
|
||||||
|
loop=self._loop,
|
||||||
|
access_path=access_path,
|
||||||
|
args=list(args),
|
||||||
|
kwargs=kwargs,
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|||||||
@@ -65,19 +65,31 @@ class ProxyClass(ProxyClassMixin, pydase.components.DeviceConnection):
|
|||||||
self.reconnect = reconnect
|
self.reconnect = reconnect
|
||||||
|
|
||||||
def serialize(self) -> SerializedObject:
|
def serialize(self) -> SerializedObject:
|
||||||
|
current_loop = asyncio.get_event_loop()
|
||||||
|
|
||||||
|
if not self.connected or current_loop == self._loop:
|
||||||
|
logger.debug(
|
||||||
|
"Client not connected, or called from within client event loop - using "
|
||||||
|
"fallback serialization"
|
||||||
|
)
|
||||||
if self._service_representation is None:
|
if self._service_representation is None:
|
||||||
serialization_future = cast(
|
serialized_service = pydase.components.DeviceConnection().serialize()
|
||||||
|
else:
|
||||||
|
serialized_service = self._service_representation
|
||||||
|
|
||||||
|
else:
|
||||||
|
future = cast(
|
||||||
"asyncio.Future[SerializedDataService]",
|
"asyncio.Future[SerializedDataService]",
|
||||||
asyncio.run_coroutine_threadsafe(
|
asyncio.run_coroutine_threadsafe(
|
||||||
self._sio.call("service_serialization"), self._loop
|
self._sio.call("service_serialization"), self._loop
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
result = future.result()
|
||||||
# need to use object.__setattr__ to not trigger an observer notification
|
# need to use object.__setattr__ to not trigger an observer notification
|
||||||
object.__setattr__(
|
object.__setattr__(self, "_service_representation", result)
|
||||||
self, "_service_representation", serialization_future.result()
|
|
||||||
)
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
self._service_representation = serialization_future.result()
|
self._service_representation = result
|
||||||
|
serialized_service = result
|
||||||
|
|
||||||
device_connection_value = cast(
|
device_connection_value = cast(
|
||||||
"dict[str, SerializedObject]",
|
"dict[str, SerializedObject]",
|
||||||
@@ -93,7 +105,7 @@ class ProxyClass(ProxyClassMixin, pydase.components.DeviceConnection):
|
|||||||
"dict[str, SerializedObject]",
|
"dict[str, SerializedObject]",
|
||||||
# need to deepcopy to not overwrite the _service_representation dict
|
# need to deepcopy to not overwrite the _service_representation dict
|
||||||
# when adding a prefix with add_prefix_to_full_access_path
|
# when adding a prefix with add_prefix_to_full_access_path
|
||||||
deepcopy(self._service_representation["value"]),
|
deepcopy(serialized_service["value"]),
|
||||||
),
|
),
|
||||||
**device_connection_value,
|
**device_connection_value,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -74,6 +74,21 @@ def update_value(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_value(
|
||||||
|
sio_client: socketio.AsyncClient,
|
||||||
|
loop: asyncio.AbstractEventLoop,
|
||||||
|
access_path: str,
|
||||||
|
) -> Any:
|
||||||
|
async def get_result() -> Any:
|
||||||
|
return await sio_client.call("get_value", access_path)
|
||||||
|
|
||||||
|
result = asyncio.run_coroutine_threadsafe(
|
||||||
|
get_result(),
|
||||||
|
loop=loop,
|
||||||
|
).result()
|
||||||
|
return ProxyLoader.loads_proxy(result, sio_client, loop)
|
||||||
|
|
||||||
|
|
||||||
class ProxyDict(dict[str, Any]):
|
class ProxyDict(dict[str, Any]):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
@@ -242,16 +257,11 @@ class ProxyClassMixin:
|
|||||||
self, attr_name: str, serialized_object: SerializedObject
|
self, attr_name: str, serialized_object: SerializedObject
|
||||||
) -> None:
|
) -> None:
|
||||||
def getter_proxy() -> Any:
|
def getter_proxy() -> Any:
|
||||||
async def get_result() -> Any:
|
return get_value(
|
||||||
return await self._sio.call(
|
sio_client=self._sio,
|
||||||
"get_value", serialized_object["full_access_path"]
|
|
||||||
)
|
|
||||||
|
|
||||||
result = asyncio.run_coroutine_threadsafe(
|
|
||||||
get_result(),
|
|
||||||
loop=self._loop,
|
loop=self._loop,
|
||||||
).result()
|
access_path=serialized_object["full_access_path"],
|
||||||
return ProxyLoader.loads_proxy(result, self._sio, self._loop)
|
)
|
||||||
|
|
||||||
dict.__setitem__(self._proxy_getters, attr_name, getter_proxy) # type: ignore
|
dict.__setitem__(self._proxy_getters, attr_name, getter_proxy) # type: ignore
|
||||||
|
|
||||||
|
|||||||
@@ -12,7 +12,6 @@ from pydase.observer_pattern.observable.observable import (
|
|||||||
from pydase.utils.helpers import (
|
from pydase.utils.helpers import (
|
||||||
get_class_and_instance_attributes,
|
get_class_and_instance_attributes,
|
||||||
is_descriptor,
|
is_descriptor,
|
||||||
is_property_attribute,
|
|
||||||
)
|
)
|
||||||
from pydase.utils.serialization.serializer import (
|
from pydase.utils.serialization.serializer import (
|
||||||
Serializer,
|
Serializer,
|
||||||
@@ -28,9 +27,6 @@ class DataService(AbstractDataService):
|
|||||||
self.__check_instance_classes()
|
self.__check_instance_classes()
|
||||||
|
|
||||||
def __setattr__(self, name: str, value: Any, /) -> None:
|
def __setattr__(self, name: str, value: Any, /) -> None:
|
||||||
# Check and warn for unexpected type changes in attributes
|
|
||||||
self._warn_on_type_change(name, value)
|
|
||||||
|
|
||||||
# every class defined by the user should inherit from DataService if it is
|
# every class defined by the user should inherit from DataService if it is
|
||||||
# assigned to a public attribute
|
# assigned to a public attribute
|
||||||
if not name.startswith("_") and not inspect.isfunction(value):
|
if not name.startswith("_") and not inspect.isfunction(value):
|
||||||
@@ -39,21 +35,6 @@ class DataService(AbstractDataService):
|
|||||||
# Set the attribute
|
# Set the attribute
|
||||||
super().__setattr__(name, value)
|
super().__setattr__(name, value)
|
||||||
|
|
||||||
def _warn_on_type_change(self, attr_name: str, new_value: Any) -> None:
|
|
||||||
if is_property_attribute(self, attr_name):
|
|
||||||
return
|
|
||||||
|
|
||||||
current_value = getattr(self, attr_name, None)
|
|
||||||
if self._is_unexpected_type_change(current_value, new_value):
|
|
||||||
logger.warning(
|
|
||||||
"Type of '%s' changed from '%s' to '%s'. This may have unwanted "
|
|
||||||
"side effects! Consider setting it to '%s' directly.",
|
|
||||||
attr_name,
|
|
||||||
type(current_value).__name__,
|
|
||||||
type(new_value).__name__,
|
|
||||||
type(current_value).__name__,
|
|
||||||
)
|
|
||||||
|
|
||||||
def _is_unexpected_type_change(self, current_value: Any, new_value: Any) -> bool:
|
def _is_unexpected_type_change(self, current_value: Any, new_value: Any) -> bool:
|
||||||
return (
|
return (
|
||||||
isinstance(current_value, float) and not isinstance(new_value, float)
|
isinstance(current_value, float) and not isinstance(new_value, float)
|
||||||
|
|||||||
@@ -20,6 +20,19 @@ from pydase.utils.serialization.types import SerializedObject
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def _is_nested_attribute(full_access_path: str, changing_attributes: list[str]) -> bool:
|
||||||
|
"""Return True if the full_access_path is a nested attribute of any
|
||||||
|
changing_attribute."""
|
||||||
|
|
||||||
|
return any(
|
||||||
|
(
|
||||||
|
full_access_path.startswith((f"{attr}.", f"{attr}["))
|
||||||
|
and full_access_path != attr
|
||||||
|
)
|
||||||
|
for attr in changing_attributes
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class DataServiceObserver(PropertyObserver):
|
class DataServiceObserver(PropertyObserver):
|
||||||
def __init__(self, state_manager: StateManager) -> None:
|
def __init__(self, state_manager: StateManager) -> None:
|
||||||
self.state_manager = state_manager
|
self.state_manager = state_manager
|
||||||
@@ -29,11 +42,7 @@ class DataServiceObserver(PropertyObserver):
|
|||||||
super().__init__(state_manager.service)
|
super().__init__(state_manager.service)
|
||||||
|
|
||||||
def on_change(self, full_access_path: str, value: Any) -> None:
|
def on_change(self, full_access_path: str, value: Any) -> None:
|
||||||
if any(
|
if _is_nested_attribute(full_access_path, self.changing_attributes):
|
||||||
full_access_path.startswith(changing_attribute)
|
|
||||||
and full_access_path != changing_attribute
|
|
||||||
for changing_attribute in self.changing_attributes
|
|
||||||
):
|
|
||||||
return
|
return
|
||||||
cached_value_dict: SerializedObject
|
cached_value_dict: SerializedObject
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
71
src/pydase/frontend/assets/index-CKS_bS2p.js
Normal file
71
src/pydase/frontend/assets/index-CKS_bS2p.js
Normal file
File diff suppressed because one or more lines are too long
5
src/pydase/frontend/assets/index-Cs09d5Pk.css
Normal file
5
src/pydase/frontend/assets/index-Cs09d5Pk.css
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -7,8 +7,8 @@
|
|||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||||
<meta name="theme-color" content="#000000" />
|
<meta name="theme-color" content="#000000" />
|
||||||
<meta name="description" content="Web site displaying a pydase UI." />
|
<meta name="description" content="Web site displaying a pydase UI." />
|
||||||
<script type="module" crossorigin src="/assets/index-BLJetjaQ.js"></script>
|
<script type="module" crossorigin src="/assets/index-CKS_bS2p.js"></script>
|
||||||
<link rel="stylesheet" crossorigin href="/assets/index-DJzFvk4W.css">
|
<link rel="stylesheet" crossorigin href="/assets/index-Cs09d5Pk.css">
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
<script>
|
<script>
|
||||||
|
|||||||
@@ -22,13 +22,14 @@ def reverse_dict(original_dict: dict[str, list[str]]) -> dict[str, list[str]]:
|
|||||||
|
|
||||||
def get_property_dependencies(prop: property, prefix: str = "") -> list[str]:
|
def get_property_dependencies(prop: property, prefix: str = "") -> list[str]:
|
||||||
source_code_string = inspect.getsource(prop.fget) # type: ignore[arg-type]
|
source_code_string = inspect.getsource(prop.fget) # type: ignore[arg-type]
|
||||||
pattern = r"self\.([^\s\{\}]+)"
|
pattern = r"self\.([^\s\{\}\(\)]+)"
|
||||||
matches = re.findall(pattern, source_code_string)
|
matches = re.findall(pattern, source_code_string)
|
||||||
return [prefix + match for match in matches if "(" not in match]
|
return [prefix + match for match in matches if "(" not in match]
|
||||||
|
|
||||||
|
|
||||||
class PropertyObserver(Observer):
|
class PropertyObserver(Observer):
|
||||||
def __init__(self, observable: Observable) -> None:
|
def __init__(self, observable: Observable) -> None:
|
||||||
|
self.property_deps_dict: dict[str, list[str]] = {}
|
||||||
super().__init__(observable)
|
super().__init__(observable)
|
||||||
self._update_property_deps_dict()
|
self._update_property_deps_dict()
|
||||||
|
|
||||||
|
|||||||
@@ -14,7 +14,6 @@ from pydase.data_service.data_service_observer import DataServiceObserver
|
|||||||
from pydase.data_service.state_manager import StateManager
|
from pydase.data_service.state_manager import StateManager
|
||||||
from pydase.server.web_server import WebServer
|
from pydase.server.web_server import WebServer
|
||||||
from pydase.task.autostart import autostart_service_tasks
|
from pydase.task.autostart import autostart_service_tasks
|
||||||
from pydase.utils.helpers import current_event_loop_exists
|
|
||||||
|
|
||||||
HANDLED_SIGNALS = (
|
HANDLED_SIGNALS = (
|
||||||
signal.SIGINT, # Unix signal 2. Sent by Ctrl+C.
|
signal.SIGINT, # Unix signal 2. Sent by Ctrl+C.
|
||||||
@@ -136,6 +135,14 @@ class Server:
|
|||||||
autosave_interval: Interval in seconds between automatic state save events.
|
autosave_interval: Interval in seconds between automatic state save events.
|
||||||
If set to `None`, automatic saving is disabled. Defaults to 30 seconds.
|
If set to `None`, automatic saving is disabled. Defaults to 30 seconds.
|
||||||
**kwargs: Additional keyword arguments.
|
**kwargs: Additional keyword arguments.
|
||||||
|
|
||||||
|
# Advanced
|
||||||
|
- [`post_startup`][pydase.Server.post_startup] hook:
|
||||||
|
|
||||||
|
This method is intended to be overridden in subclasses. It runs immediately
|
||||||
|
after all servers (web and additional) are initialized and before entering the
|
||||||
|
main event loop. You can use this hook to register custom logic after the
|
||||||
|
server is fully started.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__( # noqa: PLR0913
|
def __init__( # noqa: PLR0913
|
||||||
@@ -162,6 +169,10 @@ class Server:
|
|||||||
self._additional_servers = additional_servers
|
self._additional_servers = additional_servers
|
||||||
self.should_exit = False
|
self.should_exit = False
|
||||||
self.servers: dict[str, asyncio.Future[Any]] = {}
|
self.servers: dict[str, asyncio.Future[Any]] = {}
|
||||||
|
|
||||||
|
self._loop = asyncio.new_event_loop()
|
||||||
|
asyncio.set_event_loop(self._loop)
|
||||||
|
|
||||||
self._state_manager = StateManager(
|
self._state_manager = StateManager(
|
||||||
service=self._service,
|
service=self._service,
|
||||||
filename=filename,
|
filename=filename,
|
||||||
@@ -170,11 +181,14 @@ class Server:
|
|||||||
self._observer = DataServiceObserver(self._state_manager)
|
self._observer = DataServiceObserver(self._state_manager)
|
||||||
self._state_manager.load_state()
|
self._state_manager.load_state()
|
||||||
autostart_service_tasks(self._service)
|
autostart_service_tasks(self._service)
|
||||||
if not current_event_loop_exists():
|
|
||||||
self._loop = asyncio.new_event_loop()
|
self._web_server = WebServer(
|
||||||
asyncio.set_event_loop(self._loop)
|
data_service_observer=self._observer,
|
||||||
else:
|
host=self._host,
|
||||||
self._loop = asyncio.get_event_loop()
|
port=self._web_port,
|
||||||
|
enable_frontend=self._enable_web,
|
||||||
|
**self._kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
def run(self) -> None:
|
def run(self) -> None:
|
||||||
"""
|
"""
|
||||||
@@ -193,6 +207,7 @@ class Server:
|
|||||||
logger.info("Started server process [%s]", process_id)
|
logger.info("Started server process [%s]", process_id)
|
||||||
|
|
||||||
await self.startup()
|
await self.startup()
|
||||||
|
await self.post_startup()
|
||||||
if self.should_exit:
|
if self.should_exit:
|
||||||
return
|
return
|
||||||
await self.main_loop()
|
await self.main_loop()
|
||||||
@@ -204,6 +219,10 @@ class Server:
|
|||||||
self._loop.set_exception_handler(self.custom_exception_handler)
|
self._loop.set_exception_handler(self.custom_exception_handler)
|
||||||
self.install_signal_handlers()
|
self.install_signal_handlers()
|
||||||
|
|
||||||
|
server_task = self._loop.create_task(self._web_server.serve())
|
||||||
|
server_task.add_done_callback(self._handle_server_shutdown)
|
||||||
|
self.servers["web"] = server_task
|
||||||
|
|
||||||
for server in self._additional_servers:
|
for server in self._additional_servers:
|
||||||
addin_server = server["server"](
|
addin_server = server["server"](
|
||||||
data_service_observer=self._observer,
|
data_service_observer=self._observer,
|
||||||
@@ -219,17 +238,6 @@ class Server:
|
|||||||
server_task = self._loop.create_task(addin_server.serve())
|
server_task = self._loop.create_task(addin_server.serve())
|
||||||
server_task.add_done_callback(self._handle_server_shutdown)
|
server_task.add_done_callback(self._handle_server_shutdown)
|
||||||
self.servers[server_name] = server_task
|
self.servers[server_name] = server_task
|
||||||
if self._enable_web:
|
|
||||||
self._web_server = WebServer(
|
|
||||||
data_service_observer=self._observer,
|
|
||||||
host=self._host,
|
|
||||||
port=self._web_port,
|
|
||||||
**self._kwargs,
|
|
||||||
)
|
|
||||||
server_task = self._loop.create_task(self._web_server.serve())
|
|
||||||
|
|
||||||
server_task.add_done_callback(self._handle_server_shutdown)
|
|
||||||
self.servers["web"] = server_task
|
|
||||||
|
|
||||||
self._loop.create_task(self._state_manager.autosave())
|
self._loop.create_task(self._state_manager.autosave())
|
||||||
|
|
||||||
@@ -260,6 +268,9 @@ class Server:
|
|||||||
logger.debug("Cancelling tasks")
|
logger.debug("Cancelling tasks")
|
||||||
await self.__cancel_tasks()
|
await self.__cancel_tasks()
|
||||||
|
|
||||||
|
async def post_startup(self) -> None:
|
||||||
|
"""Override this in a subclass to register custom logic after startup."""
|
||||||
|
|
||||||
async def __cancel_servers(self) -> None:
|
async def __cancel_servers(self) -> None:
|
||||||
for server_name, task in self.servers.items():
|
for server_name, task in self.servers.items():
|
||||||
task.cancel()
|
task.cancel()
|
||||||
@@ -309,7 +320,7 @@ class Server:
|
|||||||
# here we exclude most kinds of exceptions from triggering this kind of shutdown
|
# here we exclude most kinds of exceptions from triggering this kind of shutdown
|
||||||
exc = context.get("exception")
|
exc = context.get("exception")
|
||||||
if type(exc) not in [RuntimeError, KeyboardInterrupt, asyncio.CancelledError]:
|
if type(exc) not in [RuntimeError, KeyboardInterrupt, asyncio.CancelledError]:
|
||||||
if self._enable_web:
|
if loop.is_running():
|
||||||
|
|
||||||
async def emit_exception() -> None:
|
async def emit_exception() -> None:
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -115,7 +115,7 @@ def setup_sio_server(
|
|||||||
def sio_callback(
|
def sio_callback(
|
||||||
full_access_path: str, value: Any, cached_value_dict: SerializedObject
|
full_access_path: str, value: Any, cached_value_dict: SerializedObject
|
||||||
) -> None:
|
) -> None:
|
||||||
if cached_value_dict != {}:
|
if cached_value_dict != {} and loop.is_running():
|
||||||
|
|
||||||
async def notify() -> None:
|
async def notify() -> None:
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -81,6 +81,7 @@ class WebServer:
|
|||||||
host: str,
|
host: str,
|
||||||
port: int,
|
port: int,
|
||||||
*,
|
*,
|
||||||
|
enable_frontend: bool = True,
|
||||||
css: str | Path | None = None,
|
css: str | Path | None = None,
|
||||||
favicon_path: str | Path | None = None,
|
favicon_path: str | Path | None = None,
|
||||||
enable_cors: bool = True,
|
enable_cors: bool = True,
|
||||||
@@ -97,19 +98,18 @@ class WebServer:
|
|||||||
self.enable_cors = enable_cors
|
self.enable_cors = enable_cors
|
||||||
self.frontend_src = frontend_src
|
self.frontend_src = frontend_src
|
||||||
self.favicon_path: Path | str = favicon_path # type: ignore
|
self.favicon_path: Path | str = favicon_path # type: ignore
|
||||||
|
self.enable_frontend = enable_frontend
|
||||||
|
|
||||||
if self.favicon_path is None:
|
if self.favicon_path is None:
|
||||||
self.favicon_path = self.frontend_src / "favicon.ico"
|
self.favicon_path = self.frontend_src / "favicon.ico"
|
||||||
|
|
||||||
self._service_config_dir = config_dir
|
self._service_config_dir = config_dir
|
||||||
self._generate_web_settings = generate_web_settings
|
self._generate_web_settings = generate_web_settings
|
||||||
self._loop: asyncio.AbstractEventLoop
|
self._loop = asyncio.get_event_loop()
|
||||||
|
self._sio = setup_sio_server(self.observer, self.enable_cors, self._loop)
|
||||||
self._initialise_configuration()
|
self._initialise_configuration()
|
||||||
|
|
||||||
async def serve(self) -> None:
|
async def serve(self) -> None:
|
||||||
self._loop = asyncio.get_running_loop()
|
|
||||||
self._sio = setup_sio_server(self.observer, self.enable_cors, self._loop)
|
|
||||||
|
|
||||||
async def index(
|
async def index(
|
||||||
request: aiohttp.web.Request,
|
request: aiohttp.web.Request,
|
||||||
) -> aiohttp.web.Response | aiohttp.web.FileResponse:
|
) -> aiohttp.web.Response | aiohttp.web.FileResponse:
|
||||||
@@ -162,6 +162,7 @@ class WebServer:
|
|||||||
|
|
||||||
# Define routes
|
# Define routes
|
||||||
self._sio.attach(app, socketio_path="/ws/socket.io")
|
self._sio.attach(app, socketio_path="/ws/socket.io")
|
||||||
|
if self.enable_frontend:
|
||||||
app.router.add_static("/assets", self.frontend_src / "assets")
|
app.router.add_static("/assets", self.frontend_src / "assets")
|
||||||
app.router.add_get("/favicon.ico", self._favicon_route)
|
app.router.add_get("/favicon.ico", self._favicon_route)
|
||||||
app.router.add_get("/service-properties", self._service_properties_route)
|
app.router.add_get("/service-properties", self._service_properties_route)
|
||||||
@@ -169,6 +170,7 @@ class WebServer:
|
|||||||
app.router.add_get("/custom.css", self._styles_route)
|
app.router.add_get("/custom.css", self._styles_route)
|
||||||
app.add_subapp("/api/", create_api_application(self.state_manager))
|
app.add_subapp("/api/", create_api_application(self.state_manager))
|
||||||
|
|
||||||
|
if self.enable_frontend:
|
||||||
app.router.add_get(r"/", index)
|
app.router.add_get(r"/", index)
|
||||||
app.router.add_get(r"/{tail:.*}", index)
|
app.router.add_get(r"/{tail:.*}", index)
|
||||||
|
|
||||||
|
|||||||
@@ -231,6 +231,6 @@ def current_event_loop_exists() -> bool:
|
|||||||
import asyncio
|
import asyncio
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return not asyncio.get_running_loop().is_closed()
|
return not asyncio.get_event_loop().is_closed()
|
||||||
except RuntimeError:
|
except RuntimeError:
|
||||||
return False
|
return False
|
||||||
|
|||||||
@@ -165,6 +165,7 @@ class SocketIOHandler(logging.Handler):
|
|||||||
log_entry = self.format(record)
|
log_entry = self.format(record)
|
||||||
|
|
||||||
loop = asyncio.get_event_loop()
|
loop = asyncio.get_event_loop()
|
||||||
|
if loop.is_running():
|
||||||
loop.create_task(
|
loop.create_task(
|
||||||
self._sio.emit(
|
self._sio.emit(
|
||||||
"log",
|
"log",
|
||||||
|
|||||||
@@ -158,7 +158,7 @@ class Serializer:
|
|||||||
"doc": None,
|
"doc": None,
|
||||||
"readonly": True,
|
"readonly": True,
|
||||||
"type": "Exception",
|
"type": "Exception",
|
||||||
"value": obj.args[0],
|
"value": obj.args[0] if len(obj.args) > 0 else "",
|
||||||
"name": obj.__class__.__name__,
|
"name": obj.__class__.__name__,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -168,10 +168,25 @@ def test_context_manager(pydase_client: pydase.Client) -> None:
|
|||||||
def test_client_id(
|
def test_client_id(
|
||||||
pydase_client: pydase.Client, caplog: pytest.LogCaptureFixture
|
pydase_client: pydase.Client, caplog: pytest.LogCaptureFixture
|
||||||
) -> None:
|
) -> None:
|
||||||
|
import socket
|
||||||
|
|
||||||
pydase.Client(url="ws://localhost:9999")
|
pydase.Client(url="ws://localhost:9999")
|
||||||
|
|
||||||
assert "Client [sid=" in caplog.text
|
assert f"Client [id={socket.gethostname()}]" in caplog.text
|
||||||
caplog.clear()
|
caplog.clear()
|
||||||
|
|
||||||
pydase.Client(url="ws://localhost:9999", client_id="my_service")
|
pydase.Client(url="ws://localhost:9999", client_id="my_service")
|
||||||
assert "Client [id=my_service] connected" in caplog.text
|
assert "Client [id=my_service] connected" in caplog.text
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_value(
|
||||||
|
pydase_client: pydase.Client, caplog: pytest.LogCaptureFixture
|
||||||
|
) -> None:
|
||||||
|
pydase_client.update_value("sub_service.name", "Other name")
|
||||||
|
|
||||||
|
assert pydase_client.get_value("sub_service.name") == "Other name"
|
||||||
|
|
||||||
|
assert (
|
||||||
|
pydase_client.trigger_method("my_async_method", input_str="Hello World")
|
||||||
|
== "Hello World"
|
||||||
|
)
|
||||||
|
|||||||
22
tests/client/test_proxy_class.py
Normal file
22
tests/client/test_proxy_class.py
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
import asyncio
|
||||||
|
from unittest.mock import AsyncMock, call, patch
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from pydase import components
|
||||||
|
from pydase.client.proxy_class import ProxyClass
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_serialize_fallback_inside_event_loop() -> None:
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
mock_sio = AsyncMock()
|
||||||
|
proxy = ProxyClass(sio_client=mock_sio, loop=loop, reconnect=lambda: None)
|
||||||
|
|
||||||
|
with patch.object(
|
||||||
|
components.DeviceConnection, "serialize", return_value={"value": {}}
|
||||||
|
) as mock_fallback:
|
||||||
|
result = proxy.serialize()
|
||||||
|
|
||||||
|
mock_fallback.assert_has_calls(calls=[call(), call()])
|
||||||
|
assert isinstance(result, dict)
|
||||||
@@ -1,10 +1,17 @@
|
|||||||
|
import sys
|
||||||
|
|
||||||
|
from pytest import LogCaptureFixture
|
||||||
|
|
||||||
import pydase
|
import pydase
|
||||||
import pydase.components
|
import pydase.components
|
||||||
from pydase.data_service.data_service_observer import DataServiceObserver
|
from pydase.data_service.data_service_observer import DataServiceObserver
|
||||||
from pydase.data_service.state_manager import StateManager
|
from pydase.data_service.state_manager import StateManager
|
||||||
from pydase.utils.serialization.serializer import dump
|
from pydase.utils.serialization.serializer import dump
|
||||||
from pytest import LogCaptureFixture
|
|
||||||
|
if sys.version_info < (3, 13):
|
||||||
|
PATHLIB_PATH = "pathlib.Path"
|
||||||
|
else:
|
||||||
|
PATHLIB_PATH = "pathlib._local.Path"
|
||||||
|
|
||||||
|
|
||||||
def test_image_functions(caplog: LogCaptureFixture) -> None:
|
def test_image_functions(caplog: LogCaptureFixture) -> None:
|
||||||
@@ -106,7 +113,7 @@ def test_image_serialization() -> None:
|
|||||||
"signature": {
|
"signature": {
|
||||||
"parameters": {
|
"parameters": {
|
||||||
"path": {
|
"path": {
|
||||||
"annotation": "pathlib.Path | str",
|
"annotation": f"{PATHLIB_PATH} | str",
|
||||||
"default": {},
|
"default": {},
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -1,38 +1,13 @@
|
|||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
import pydase
|
|
||||||
import pydase.units as u
|
|
||||||
import pytest
|
import pytest
|
||||||
from pydase import DataService
|
|
||||||
from pydase.data_service.data_service_observer import DataServiceObserver
|
|
||||||
from pydase.data_service.state_manager import StateManager
|
|
||||||
from pydase.utils.decorators import FunctionDefinitionError, frontend
|
|
||||||
from pytest import LogCaptureFixture
|
from pytest import LogCaptureFixture
|
||||||
|
|
||||||
|
import pydase
|
||||||
def test_unexpected_type_change_warning(caplog: LogCaptureFixture) -> None:
|
import pydase.units as u
|
||||||
class ServiceClass(DataService):
|
from pydase import DataService
|
||||||
attr_1 = 1.0
|
from pydase.utils.decorators import FunctionDefinitionError, frontend
|
||||||
current = 1.0 * u.units.A
|
|
||||||
|
|
||||||
service_instance = ServiceClass()
|
|
||||||
state_manager = StateManager(service_instance)
|
|
||||||
DataServiceObserver(state_manager)
|
|
||||||
service_instance.attr_1 = 2
|
|
||||||
|
|
||||||
assert "'attr_1' changed to '2'" in caplog.text
|
|
||||||
assert (
|
|
||||||
"Type of 'attr_1' changed from 'float' to 'int'. This may have unwanted "
|
|
||||||
"side effects! Consider setting it to 'float' directly." in caplog.text
|
|
||||||
)
|
|
||||||
|
|
||||||
service_instance.current = 2
|
|
||||||
assert "'current' changed to '2'" in caplog.text
|
|
||||||
assert (
|
|
||||||
"Type of 'current' changed from 'Quantity' to 'int'. This may have unwanted "
|
|
||||||
"side effects! Consider setting it to 'Quantity' directly." in caplog.text
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_basic_inheritance_warning(caplog: LogCaptureFixture) -> None:
|
def test_basic_inheritance_warning(caplog: LogCaptureFixture) -> None:
|
||||||
|
|||||||
@@ -1,8 +1,9 @@
|
|||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
import pydase
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
import pydase
|
||||||
from pydase.data_service.data_service_observer import DataServiceObserver
|
from pydase.data_service.data_service_observer import DataServiceObserver
|
||||||
from pydase.data_service.state_manager import StateManager
|
from pydase.data_service.state_manager import StateManager
|
||||||
from pydase.utils.serialization.serializer import SerializationError, dump
|
from pydase.utils.serialization.serializer import SerializationError, dump
|
||||||
@@ -241,3 +242,42 @@ def test_read_only_dict_property(caplog: pytest.LogCaptureFixture) -> None:
|
|||||||
service_instance._dict_attr["dotted.key"] = 2.0
|
service_instance._dict_attr["dotted.key"] = 2.0
|
||||||
|
|
||||||
assert "'dict_attr[\"dotted.key\"]' changed to '2.0'" in caplog.text
|
assert "'dict_attr[\"dotted.key\"]' changed to '2.0'" in caplog.text
|
||||||
|
|
||||||
|
|
||||||
|
def test_dependency_as_function_argument(caplog: pytest.LogCaptureFixture) -> None:
|
||||||
|
class MyObservable(pydase.DataService):
|
||||||
|
some_int = 0
|
||||||
|
|
||||||
|
@property
|
||||||
|
def other_int(self) -> int:
|
||||||
|
return self.add_one(self.some_int)
|
||||||
|
|
||||||
|
def add_one(self, value: int) -> int:
|
||||||
|
return value + 1
|
||||||
|
|
||||||
|
service_instance = MyObservable()
|
||||||
|
state_manager = StateManager(service=service_instance)
|
||||||
|
DataServiceObserver(state_manager)
|
||||||
|
|
||||||
|
service_instance.some_int = 1337
|
||||||
|
|
||||||
|
assert "'other_int' changed to '1338'" in caplog.text
|
||||||
|
|
||||||
|
|
||||||
|
def test_property_starting_with_dependency_name(
|
||||||
|
caplog: pytest.LogCaptureFixture,
|
||||||
|
) -> None:
|
||||||
|
class MyObservable(pydase.DataService):
|
||||||
|
my_int = 0
|
||||||
|
|
||||||
|
@property
|
||||||
|
def my_int_2(self) -> int:
|
||||||
|
return self.my_int + 1
|
||||||
|
|
||||||
|
service_instance = MyObservable()
|
||||||
|
state_manager = StateManager(service=service_instance)
|
||||||
|
DataServiceObserver(state_manager)
|
||||||
|
|
||||||
|
service_instance.my_int = 1337
|
||||||
|
|
||||||
|
assert "'my_int_2' changed to '1338'" in caplog.text
|
||||||
|
|||||||
@@ -1225,3 +1225,22 @@ def test_add_prefix_to_full_access_path(
|
|||||||
serialized_obj: SerializedObject, prefix: str, expected: SerializedObject
|
serialized_obj: SerializedObject, prefix: str, expected: SerializedObject
|
||||||
) -> None:
|
) -> None:
|
||||||
assert add_prefix_to_full_access_path(serialized_obj, prefix) == expected
|
assert add_prefix_to_full_access_path(serialized_obj, prefix) == expected
|
||||||
|
|
||||||
|
|
||||||
|
def test_serialize_exception() -> None:
|
||||||
|
assert dump(Exception()) == {
|
||||||
|
"doc": None,
|
||||||
|
"full_access_path": "",
|
||||||
|
"name": "Exception",
|
||||||
|
"readonly": True,
|
||||||
|
"type": "Exception",
|
||||||
|
"value": "",
|
||||||
|
}
|
||||||
|
assert dump(Exception("Exception message")) == {
|
||||||
|
"doc": None,
|
||||||
|
"full_access_path": "",
|
||||||
|
"name": "Exception",
|
||||||
|
"readonly": True,
|
||||||
|
"type": "Exception",
|
||||||
|
"value": "Exception message",
|
||||||
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user