mirror of
https://github.com/tiqi-group/pydase.git
synced 2025-12-19 20:51:19 +01:00
Compare commits
45 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a4b4f179c6 | ||
|
|
c6beca3961 | ||
|
|
2fa8240e54 | ||
|
|
369587a50c | ||
|
|
25343f6909 | ||
|
|
c136c9f3de | ||
|
|
8897c2fe4c | ||
|
|
80c5c4e99d | ||
|
|
423441a74c | ||
|
|
9ec60e3891 | ||
|
|
8bde104322 | ||
|
|
9b57b6984e | ||
|
|
e5b89f2581 | ||
|
|
ff1654e65c | ||
|
|
cded80c8e5 | ||
|
|
87a33b6293 | ||
|
|
6d621daaac | ||
|
|
8c1a50c106 | ||
|
|
a1545d341b | ||
|
|
28a1cc7cd3 | ||
|
|
c968708b85 | ||
|
|
fef8606d17 | ||
|
|
82286c8da0 | ||
|
|
533826a398 | ||
|
|
982875dee6 | ||
|
|
e54710cd4d | ||
|
|
f48f7aacfb | ||
|
|
e97aab4f36 | ||
|
|
015c66d5a6 | ||
|
|
9827d0747c | ||
|
|
38a12fb72e | ||
|
|
fb6ec16bf5 | ||
|
|
9ee498eb5c | ||
|
|
d015333123 | ||
|
|
c4e7fe66a8 | ||
|
|
5f1451a1c1 | ||
|
|
4c28cbaf7d | ||
|
|
a97b8eb2b4 | ||
|
|
f6b5c1b567 | ||
|
|
f92d525588 | ||
|
|
61b69d77cc | ||
|
|
8abe9357cf | ||
|
|
0dace2a9f0 | ||
|
|
9992ade0ed | ||
|
|
6c2cebada2 |
2
.github/workflows/python-package.yml
vendored
2
.github/workflows/python-package.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.10", "3.11", "3.12"]
|
||||
python-version: ["3.10", "3.11", "3.12", "3.13"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
@@ -58,7 +58,7 @@ class MyService(pydase.DataService):
|
||||
proxy = pydase.Client(
|
||||
url="ws://<ip_addr>:<service_port>",
|
||||
block_until_connected=False,
|
||||
client_id="my_pydase_client_id",
|
||||
client_id="my_pydase_client_id", # optional, defaults to system hostname
|
||||
).proxy
|
||||
|
||||
# For SSL-encrypted services, use the wss protocol
|
||||
@@ -77,7 +77,7 @@ if __name__ == "__main__":
|
||||
In this example:
|
||||
- The `MyService` class has a `proxy` attribute that connects to a `pydase` service at `<ip_addr>:<service_port>`.
|
||||
- By setting `block_until_connected=False`, the service can start without waiting for the connection to succeed.
|
||||
- By setting `client_id`, the server will log a descriptive identifier for this client via the `X-Client-Id` HTTP header.
|
||||
- The `client_id` is optional. If not specified, it defaults to the system hostname, which will be sent in the `X-Client-Id` HTTP header for logging or authentication on the server side.
|
||||
|
||||
## Custom `socketio.AsyncClient` Connection Parameters
|
||||
|
||||
|
||||
1256
frontend/package-lock.json
generated
1256
frontend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -35,6 +35,6 @@
|
||||
"prettier": "3.3.2",
|
||||
"typescript": "^5.7.3",
|
||||
"typescript-eslint": "^7.18.0",
|
||||
"vite": "^5.4.12"
|
||||
"vite": "^6.3.5"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import { useState, useEffect } from "react";
|
||||
import { authority } from "../socket";
|
||||
|
||||
export default function useLocalStorage(key: string, defaultValue: unknown) {
|
||||
const [value, setValue] = useState(() => {
|
||||
const storedValue = localStorage.getItem(key);
|
||||
const storedValue = localStorage.getItem(`${authority}:${key}`);
|
||||
if (storedValue) {
|
||||
return JSON.parse(storedValue);
|
||||
}
|
||||
@@ -11,7 +12,7 @@ export default function useLocalStorage(key: string, defaultValue: unknown) {
|
||||
|
||||
useEffect(() => {
|
||||
if (value === undefined) return;
|
||||
localStorage.setItem(key, JSON.stringify(value));
|
||||
localStorage.setItem(`${authority}:${key}`, JSON.stringify(value));
|
||||
}, [value, key]);
|
||||
|
||||
return [value, setValue];
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "pydase"
|
||||
version = "0.10.13"
|
||||
version = "0.10.17"
|
||||
description = "A flexible and robust Python library for creating, managing, and interacting with data services, with built-in support for web and RPC servers, and customizable features for diverse use cases."
|
||||
authors = [
|
||||
{name = "Mose Müller",email = "mosemueller@gmail.com"}
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import socket
|
||||
import sys
|
||||
import threading
|
||||
import urllib.parse
|
||||
from builtins import ModuleNotFoundError
|
||||
from types import TracebackType
|
||||
from typing import TYPE_CHECKING, Any, TypedDict, cast
|
||||
|
||||
import aiohttp
|
||||
import aiohttp_socks.connector
|
||||
import socketio # type: ignore
|
||||
|
||||
from pydase.client.proxy_class import ProxyClass
|
||||
@@ -59,7 +60,8 @@ class Client:
|
||||
client's behaviour (e.g., reconnection attempts or reconnection delay).
|
||||
client_id: An optional client identifier. This ID is sent to the server as the
|
||||
`X-Client-Id` HTTP header. It can be used for logging or authentication
|
||||
purposes on the server side.
|
||||
purposes on the server side. If not provided, it defaults to the hostname
|
||||
of the machine running the client.
|
||||
proxy_url: An optional proxy URL to route the connection through. This is useful
|
||||
if the service is only reachable via an SSH tunnel or behind a firewall
|
||||
(e.g., `socks5://localhost:2222`).
|
||||
@@ -112,7 +114,7 @@ class Client:
|
||||
self._path_prefix = parsed_url.path.rstrip("/") # Remove trailing slash if any
|
||||
self._url = url
|
||||
self._proxy_url = proxy_url
|
||||
self._client_id = client_id
|
||||
self._client_id = client_id or socket.gethostname()
|
||||
self._sio_client_kwargs = sio_client_kwargs
|
||||
self._loop: asyncio.AbstractEventLoop | None = None
|
||||
self._thread: threading.Thread | None = None
|
||||
@@ -150,6 +152,17 @@ class Client:
|
||||
|
||||
def _initialize_socketio_client(self) -> None:
|
||||
if self._proxy_url is not None:
|
||||
try:
|
||||
import aiohttp_socks.connector
|
||||
except ModuleNotFoundError:
|
||||
raise ModuleNotFoundError(
|
||||
"Missing dependency 'aiohttp_socks'. To use SOCKS5 proxy support, "
|
||||
"install the optional 'socks' extra:\n\n"
|
||||
' pip install "pydase[socks]"\n\n'
|
||||
"This is required when specifying a `proxy_url` for "
|
||||
"`pydase.Client`."
|
||||
)
|
||||
|
||||
session = aiohttp.ClientSession(
|
||||
connector=aiohttp_socks.connector.ProxyConnector.from_url(
|
||||
url=self._proxy_url, loop=self._loop
|
||||
|
||||
@@ -20,6 +20,19 @@ from pydase.utils.serialization.types import SerializedObject
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _is_nested_attribute(full_access_path: str, changing_attributes: list[str]) -> bool:
|
||||
"""Return True if the full_access_path is a nested attribute of any
|
||||
changing_attribute."""
|
||||
|
||||
return any(
|
||||
(
|
||||
full_access_path.startswith((f"{attr}.", f"{attr}["))
|
||||
and full_access_path != attr
|
||||
)
|
||||
for attr in changing_attributes
|
||||
)
|
||||
|
||||
|
||||
class DataServiceObserver(PropertyObserver):
|
||||
def __init__(self, state_manager: StateManager) -> None:
|
||||
self.state_manager = state_manager
|
||||
@@ -29,11 +42,7 @@ class DataServiceObserver(PropertyObserver):
|
||||
super().__init__(state_manager.service)
|
||||
|
||||
def on_change(self, full_access_path: str, value: Any) -> None:
|
||||
if any(
|
||||
full_access_path.startswith(changing_attribute)
|
||||
and full_access_path != changing_attribute
|
||||
for changing_attribute in self.changing_attributes
|
||||
):
|
||||
if _is_nested_attribute(full_access_path, self.changing_attributes):
|
||||
return
|
||||
cached_value_dict: SerializedObject
|
||||
|
||||
|
||||
File diff suppressed because one or more lines are too long
5
src/pydase/frontend/assets/index-Cs09d5Pk.css
Normal file
5
src/pydase/frontend/assets/index-Cs09d5Pk.css
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
71
src/pydase/frontend/assets/index-XZbNXHJp.js
Normal file
71
src/pydase/frontend/assets/index-XZbNXHJp.js
Normal file
File diff suppressed because one or more lines are too long
@@ -7,8 +7,8 @@
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<meta name="theme-color" content="#000000" />
|
||||
<meta name="description" content="Web site displaying a pydase UI." />
|
||||
<script type="module" crossorigin src="/assets/index-BLJetjaQ.js"></script>
|
||||
<link rel="stylesheet" crossorigin href="/assets/index-DJzFvk4W.css">
|
||||
<script type="module" crossorigin src="/assets/index-XZbNXHJp.js"></script>
|
||||
<link rel="stylesheet" crossorigin href="/assets/index-Cs09d5Pk.css">
|
||||
</head>
|
||||
|
||||
<script>
|
||||
|
||||
@@ -22,7 +22,7 @@ def reverse_dict(original_dict: dict[str, list[str]]) -> dict[str, list[str]]:
|
||||
|
||||
def get_property_dependencies(prop: property, prefix: str = "") -> list[str]:
|
||||
source_code_string = inspect.getsource(prop.fget) # type: ignore[arg-type]
|
||||
pattern = r"self\.([^\s\{\}]+)"
|
||||
pattern = r"self\.([^\s\{\}\(\)]+)"
|
||||
matches = re.findall(pattern, source_code_string)
|
||||
return [prefix + match for match in matches if "(" not in match]
|
||||
|
||||
|
||||
@@ -14,7 +14,6 @@ from pydase.data_service.data_service_observer import DataServiceObserver
|
||||
from pydase.data_service.state_manager import StateManager
|
||||
from pydase.server.web_server import WebServer
|
||||
from pydase.task.autostart import autostart_service_tasks
|
||||
from pydase.utils.helpers import current_event_loop_exists
|
||||
|
||||
HANDLED_SIGNALS = (
|
||||
signal.SIGINT, # Unix signal 2. Sent by Ctrl+C.
|
||||
@@ -136,6 +135,14 @@ class Server:
|
||||
autosave_interval: Interval in seconds between automatic state save events.
|
||||
If set to `None`, automatic saving is disabled. Defaults to 30 seconds.
|
||||
**kwargs: Additional keyword arguments.
|
||||
|
||||
# Advanced
|
||||
- [`post_startup`][pydase.Server.post_startup] hook:
|
||||
|
||||
This method is intended to be overridden in subclasses. It runs immediately
|
||||
after all servers (web and additional) are initialized and before entering the
|
||||
main event loop. You can use this hook to register custom logic after the
|
||||
server is fully started.
|
||||
"""
|
||||
|
||||
def __init__( # noqa: PLR0913
|
||||
@@ -162,6 +169,10 @@ class Server:
|
||||
self._additional_servers = additional_servers
|
||||
self.should_exit = False
|
||||
self.servers: dict[str, asyncio.Future[Any]] = {}
|
||||
|
||||
self._loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(self._loop)
|
||||
|
||||
self._state_manager = StateManager(
|
||||
service=self._service,
|
||||
filename=filename,
|
||||
@@ -170,11 +181,14 @@ class Server:
|
||||
self._observer = DataServiceObserver(self._state_manager)
|
||||
self._state_manager.load_state()
|
||||
autostart_service_tasks(self._service)
|
||||
if not current_event_loop_exists():
|
||||
self._loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(self._loop)
|
||||
else:
|
||||
self._loop = asyncio.get_event_loop()
|
||||
|
||||
self._web_server = WebServer(
|
||||
data_service_observer=self._observer,
|
||||
host=self._host,
|
||||
port=self._web_port,
|
||||
enable_frontend=self._enable_web,
|
||||
**self._kwargs,
|
||||
)
|
||||
|
||||
def run(self) -> None:
|
||||
"""
|
||||
@@ -193,6 +207,7 @@ class Server:
|
||||
logger.info("Started server process [%s]", process_id)
|
||||
|
||||
await self.startup()
|
||||
await self.post_startup()
|
||||
if self.should_exit:
|
||||
return
|
||||
await self.main_loop()
|
||||
@@ -204,6 +219,10 @@ class Server:
|
||||
self._loop.set_exception_handler(self.custom_exception_handler)
|
||||
self.install_signal_handlers()
|
||||
|
||||
server_task = self._loop.create_task(self._web_server.serve())
|
||||
server_task.add_done_callback(self._handle_server_shutdown)
|
||||
self.servers["web"] = server_task
|
||||
|
||||
for server in self._additional_servers:
|
||||
addin_server = server["server"](
|
||||
data_service_observer=self._observer,
|
||||
@@ -219,17 +238,6 @@ class Server:
|
||||
server_task = self._loop.create_task(addin_server.serve())
|
||||
server_task.add_done_callback(self._handle_server_shutdown)
|
||||
self.servers[server_name] = server_task
|
||||
if self._enable_web:
|
||||
self._web_server = WebServer(
|
||||
data_service_observer=self._observer,
|
||||
host=self._host,
|
||||
port=self._web_port,
|
||||
**self._kwargs,
|
||||
)
|
||||
server_task = self._loop.create_task(self._web_server.serve())
|
||||
|
||||
server_task.add_done_callback(self._handle_server_shutdown)
|
||||
self.servers["web"] = server_task
|
||||
|
||||
self._loop.create_task(self._state_manager.autosave())
|
||||
|
||||
@@ -260,6 +268,9 @@ class Server:
|
||||
logger.debug("Cancelling tasks")
|
||||
await self.__cancel_tasks()
|
||||
|
||||
async def post_startup(self) -> None:
|
||||
"""Override this in a subclass to register custom logic after startup."""
|
||||
|
||||
async def __cancel_servers(self) -> None:
|
||||
for server_name, task in self.servers.items():
|
||||
task.cancel()
|
||||
@@ -309,7 +320,7 @@ class Server:
|
||||
# here we exclude most kinds of exceptions from triggering this kind of shutdown
|
||||
exc = context.get("exception")
|
||||
if type(exc) not in [RuntimeError, KeyboardInterrupt, asyncio.CancelledError]:
|
||||
if self._enable_web:
|
||||
if loop.is_running():
|
||||
|
||||
async def emit_exception() -> None:
|
||||
try:
|
||||
|
||||
@@ -115,7 +115,7 @@ def setup_sio_server(
|
||||
def sio_callback(
|
||||
full_access_path: str, value: Any, cached_value_dict: SerializedObject
|
||||
) -> None:
|
||||
if cached_value_dict != {}:
|
||||
if cached_value_dict != {} and loop.is_running():
|
||||
|
||||
async def notify() -> None:
|
||||
try:
|
||||
|
||||
@@ -81,6 +81,7 @@ class WebServer:
|
||||
host: str,
|
||||
port: int,
|
||||
*,
|
||||
enable_frontend: bool = True,
|
||||
css: str | Path | None = None,
|
||||
favicon_path: str | Path | None = None,
|
||||
enable_cors: bool = True,
|
||||
@@ -97,19 +98,18 @@ class WebServer:
|
||||
self.enable_cors = enable_cors
|
||||
self.frontend_src = frontend_src
|
||||
self.favicon_path: Path | str = favicon_path # type: ignore
|
||||
self.enable_frontend = enable_frontend
|
||||
|
||||
if self.favicon_path is None:
|
||||
self.favicon_path = self.frontend_src / "favicon.ico"
|
||||
|
||||
self._service_config_dir = config_dir
|
||||
self._generate_web_settings = generate_web_settings
|
||||
self._loop: asyncio.AbstractEventLoop
|
||||
self._loop = asyncio.get_event_loop()
|
||||
self._sio = setup_sio_server(self.observer, self.enable_cors, self._loop)
|
||||
self._initialise_configuration()
|
||||
|
||||
async def serve(self) -> None:
|
||||
self._loop = asyncio.get_running_loop()
|
||||
self._sio = setup_sio_server(self.observer, self.enable_cors, self._loop)
|
||||
|
||||
async def index(
|
||||
request: aiohttp.web.Request,
|
||||
) -> aiohttp.web.Response | aiohttp.web.FileResponse:
|
||||
@@ -162,15 +162,17 @@ class WebServer:
|
||||
|
||||
# Define routes
|
||||
self._sio.attach(app, socketio_path="/ws/socket.io")
|
||||
app.router.add_static("/assets", self.frontend_src / "assets")
|
||||
app.router.add_get("/favicon.ico", self._favicon_route)
|
||||
app.router.add_get("/service-properties", self._service_properties_route)
|
||||
app.router.add_get("/web-settings", self._web_settings_route)
|
||||
app.router.add_get("/custom.css", self._styles_route)
|
||||
if self.enable_frontend:
|
||||
app.router.add_static("/assets", self.frontend_src / "assets")
|
||||
app.router.add_get("/favicon.ico", self._favicon_route)
|
||||
app.router.add_get("/service-properties", self._service_properties_route)
|
||||
app.router.add_get("/web-settings", self._web_settings_route)
|
||||
app.router.add_get("/custom.css", self._styles_route)
|
||||
app.add_subapp("/api/", create_api_application(self.state_manager))
|
||||
|
||||
app.router.add_get(r"/", index)
|
||||
app.router.add_get(r"/{tail:.*}", index)
|
||||
if self.enable_frontend:
|
||||
app.router.add_get(r"/", index)
|
||||
app.router.add_get(r"/{tail:.*}", index)
|
||||
|
||||
await aiohttp.web._run_app(
|
||||
app,
|
||||
|
||||
@@ -231,6 +231,6 @@ def current_event_loop_exists() -> bool:
|
||||
import asyncio
|
||||
|
||||
try:
|
||||
return not asyncio.get_running_loop().is_closed()
|
||||
return not asyncio.get_event_loop().is_closed()
|
||||
except RuntimeError:
|
||||
return False
|
||||
|
||||
@@ -165,15 +165,16 @@ class SocketIOHandler(logging.Handler):
|
||||
log_entry = self.format(record)
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.create_task(
|
||||
self._sio.emit(
|
||||
"log",
|
||||
{
|
||||
"levelname": record.levelname,
|
||||
"message": log_entry,
|
||||
},
|
||||
if loop.is_running():
|
||||
loop.create_task(
|
||||
self._sio.emit(
|
||||
"log",
|
||||
{
|
||||
"levelname": record.levelname,
|
||||
"message": log_entry,
|
||||
},
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def setup_logging() -> None:
|
||||
|
||||
@@ -168,9 +168,11 @@ def test_context_manager(pydase_client: pydase.Client) -> None:
|
||||
def test_client_id(
|
||||
pydase_client: pydase.Client, caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
import socket
|
||||
|
||||
pydase.Client(url="ws://localhost:9999")
|
||||
|
||||
assert "Client [sid=" in caplog.text
|
||||
assert f"Client [id={socket.gethostname()}]" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
pydase.Client(url="ws://localhost:9999", client_id="my_service")
|
||||
|
||||
@@ -1,10 +1,17 @@
|
||||
import sys
|
||||
|
||||
from pytest import LogCaptureFixture
|
||||
|
||||
import pydase
|
||||
import pydase.components
|
||||
from pydase.data_service.data_service_observer import DataServiceObserver
|
||||
from pydase.data_service.state_manager import StateManager
|
||||
from pydase.utils.serialization.serializer import dump
|
||||
from pytest import LogCaptureFixture
|
||||
|
||||
if sys.version_info < (3, 13):
|
||||
PATHLIB_PATH = "pathlib.Path"
|
||||
else:
|
||||
PATHLIB_PATH = "pathlib._local.Path"
|
||||
|
||||
|
||||
def test_image_functions(caplog: LogCaptureFixture) -> None:
|
||||
@@ -106,7 +113,7 @@ def test_image_serialization() -> None:
|
||||
"signature": {
|
||||
"parameters": {
|
||||
"path": {
|
||||
"annotation": "pathlib.Path | str",
|
||||
"annotation": f"{PATHLIB_PATH} | str",
|
||||
"default": {},
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import pydase
|
||||
import pytest
|
||||
|
||||
import pydase
|
||||
from pydase.data_service.data_service_observer import DataServiceObserver
|
||||
from pydase.data_service.state_manager import StateManager
|
||||
from pydase.utils.serialization.serializer import SerializationError, dump
|
||||
@@ -241,3 +242,42 @@ def test_read_only_dict_property(caplog: pytest.LogCaptureFixture) -> None:
|
||||
service_instance._dict_attr["dotted.key"] = 2.0
|
||||
|
||||
assert "'dict_attr[\"dotted.key\"]' changed to '2.0'" in caplog.text
|
||||
|
||||
|
||||
def test_dependency_as_function_argument(caplog: pytest.LogCaptureFixture) -> None:
|
||||
class MyObservable(pydase.DataService):
|
||||
some_int = 0
|
||||
|
||||
@property
|
||||
def other_int(self) -> int:
|
||||
return self.add_one(self.some_int)
|
||||
|
||||
def add_one(self, value: int) -> int:
|
||||
return value + 1
|
||||
|
||||
service_instance = MyObservable()
|
||||
state_manager = StateManager(service=service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
|
||||
service_instance.some_int = 1337
|
||||
|
||||
assert "'other_int' changed to '1338'" in caplog.text
|
||||
|
||||
|
||||
def test_property_starting_with_dependency_name(
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
class MyObservable(pydase.DataService):
|
||||
my_int = 0
|
||||
|
||||
@property
|
||||
def my_int_2(self) -> int:
|
||||
return self.my_int + 1
|
||||
|
||||
service_instance = MyObservable()
|
||||
state_manager = StateManager(service=service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
|
||||
service_instance.my_int = 1337
|
||||
|
||||
assert "'my_int_2' changed to '1338'" in caplog.text
|
||||
|
||||
Reference in New Issue
Block a user