Compare commits

..

No commits in common. "main" and "v0.10.13" have entirely different histories.

17 changed files with 629 additions and 894 deletions

View File

@ -58,7 +58,7 @@ class MyService(pydase.DataService):
proxy = pydase.Client(
url="ws://<ip_addr>:<service_port>",
block_until_connected=False,
client_id="my_pydase_client_id", # optional, defaults to system hostname
client_id="my_pydase_client_id",
).proxy
# For SSL-encrypted services, use the wss protocol
@ -77,7 +77,7 @@ if __name__ == "__main__":
In this example:
- The `MyService` class has a `proxy` attribute that connects to a `pydase` service at `<ip_addr>:<service_port>`.
- By setting `block_until_connected=False`, the service can start without waiting for the connection to succeed.
- The `client_id` is optional. If not specified, it defaults to the system hostname, which will be sent in the `X-Client-Id` HTTP header for logging or authentication on the server side.
- By setting `client_id`, the server will log a descriptive identifier for this client via the `X-Client-Id` HTTP header.
## Custom `socketio.AsyncClient` Connection Parameters

File diff suppressed because it is too large Load Diff

View File

@ -35,6 +35,6 @@
"prettier": "3.3.2",
"typescript": "^5.7.3",
"typescript-eslint": "^7.18.0",
"vite": "^6.3.5"
"vite": "^5.4.12"
}
}

View File

@ -1,9 +1,8 @@
import { useState, useEffect } from "react";
import { authority } from "../socket";
export default function useLocalStorage(key: string, defaultValue: unknown) {
const [value, setValue] = useState(() => {
const storedValue = localStorage.getItem(`${authority}:${key}`);
const storedValue = localStorage.getItem(key);
if (storedValue) {
return JSON.parse(storedValue);
}
@ -12,7 +11,7 @@ export default function useLocalStorage(key: string, defaultValue: unknown) {
useEffect(() => {
if (value === undefined) return;
localStorage.setItem(`${authority}:${key}`, JSON.stringify(value));
localStorage.setItem(key, JSON.stringify(value));
}, [value, key]);
return [value, setValue];

View File

@ -1,6 +1,6 @@
[project]
name = "pydase"
version = "0.10.16"
version = "0.10.13"
description = "A flexible and robust Python library for creating, managing, and interacting with data services, with built-in support for web and RPC servers, and customizable features for diverse use cases."
authors = [
{name = "Mose Müller",email = "mosemueller@gmail.com"}

View File

@ -1,14 +1,13 @@
import asyncio
import logging
import socket
import sys
import threading
import urllib.parse
from builtins import ModuleNotFoundError
from types import TracebackType
from typing import TYPE_CHECKING, Any, TypedDict, cast
import aiohttp
import aiohttp_socks.connector
import socketio # type: ignore
from pydase.client.proxy_class import ProxyClass
@ -60,8 +59,7 @@ class Client:
client's behaviour (e.g., reconnection attempts or reconnection delay).
client_id: An optional client identifier. This ID is sent to the server as the
`X-Client-Id` HTTP header. It can be used for logging or authentication
purposes on the server side. If not provided, it defaults to the hostname
of the machine running the client.
purposes on the server side.
proxy_url: An optional proxy URL to route the connection through. This is useful
if the service is only reachable via an SSH tunnel or behind a firewall
(e.g., `socks5://localhost:2222`).
@ -114,7 +112,7 @@ class Client:
self._path_prefix = parsed_url.path.rstrip("/") # Remove trailing slash if any
self._url = url
self._proxy_url = proxy_url
self._client_id = client_id or socket.gethostname()
self._client_id = client_id
self._sio_client_kwargs = sio_client_kwargs
self._loop: asyncio.AbstractEventLoop | None = None
self._thread: threading.Thread | None = None
@ -152,17 +150,6 @@ class Client:
def _initialize_socketio_client(self) -> None:
if self._proxy_url is not None:
try:
import aiohttp_socks.connector
except ModuleNotFoundError:
raise ModuleNotFoundError(
"Missing dependency 'aiohttp_socks'. To use SOCKS5 proxy support, "
"install the optional 'socks' extra:\n\n"
' pip install "pydase[socks]"\n\n'
"This is required when specifying a `proxy_url` for "
"`pydase.Client`."
)
session = aiohttp.ClientSession(
connector=aiohttp_socks.connector.ProxyConnector.from_url(
url=self._proxy_url, loop=self._loop

View File

@ -20,19 +20,6 @@ from pydase.utils.serialization.types import SerializedObject
logger = logging.getLogger(__name__)
def _is_nested_attribute(full_access_path: str, changing_attributes: list[str]) -> bool:
"""Return True if the full_access_path is a nested attribute of any
changing_attribute."""
return any(
(
full_access_path.startswith((f"{attr}.", f"{attr}["))
and full_access_path != attr
)
for attr in changing_attributes
)
class DataServiceObserver(PropertyObserver):
def __init__(self, state_manager: StateManager) -> None:
self.state_manager = state_manager
@ -42,7 +29,11 @@ class DataServiceObserver(PropertyObserver):
super().__init__(state_manager.service)
def on_change(self, full_access_path: str, value: Any) -> None:
if _is_nested_attribute(full_access_path, self.changing_attributes):
if any(
full_access_path.startswith(changing_attribute)
and full_access_path != changing_attribute
for changing_attribute in self.changing_attributes
):
return
cached_value_dict: SerializedObject

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -7,8 +7,8 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<meta name="theme-color" content="#000000" />
<meta name="description" content="Web site displaying a pydase UI." />
<script type="module" crossorigin src="/assets/index-XZbNXHJp.js"></script>
<link rel="stylesheet" crossorigin href="/assets/index-Cs09d5Pk.css">
<script type="module" crossorigin src="/assets/index-BLJetjaQ.js"></script>
<link rel="stylesheet" crossorigin href="/assets/index-DJzFvk4W.css">
</head>
<script>

View File

@ -22,7 +22,7 @@ def reverse_dict(original_dict: dict[str, list[str]]) -> dict[str, list[str]]:
def get_property_dependencies(prop: property, prefix: str = "") -> list[str]:
source_code_string = inspect.getsource(prop.fget) # type: ignore[arg-type]
pattern = r"self\.([^\s\{\}\(\)]+)"
pattern = r"self\.([^\s\{\}]+)"
matches = re.findall(pattern, source_code_string)
return [prefix + match for match in matches if "(" not in match]

View File

@ -14,6 +14,7 @@ from pydase.data_service.data_service_observer import DataServiceObserver
from pydase.data_service.state_manager import StateManager
from pydase.server.web_server import WebServer
from pydase.task.autostart import autostart_service_tasks
from pydase.utils.helpers import current_event_loop_exists
HANDLED_SIGNALS = (
signal.SIGINT, # Unix signal 2. Sent by Ctrl+C.
@ -161,10 +162,6 @@ class Server:
self._additional_servers = additional_servers
self.should_exit = False
self.servers: dict[str, asyncio.Future[Any]] = {}
self._loop = asyncio.new_event_loop()
asyncio.set_event_loop(self._loop)
self._state_manager = StateManager(
service=self._service,
filename=filename,
@ -173,6 +170,11 @@ class Server:
self._observer = DataServiceObserver(self._state_manager)
self._state_manager.load_state()
autostart_service_tasks(self._service)
if not current_event_loop_exists():
self._loop = asyncio.new_event_loop()
asyncio.set_event_loop(self._loop)
else:
self._loop = asyncio.get_event_loop()
def run(self) -> None:
"""

View File

@ -231,6 +231,6 @@ def current_event_loop_exists() -> bool:
import asyncio
try:
return not asyncio.get_event_loop().is_closed()
return not asyncio.get_running_loop().is_closed()
except RuntimeError:
return False

View File

@ -168,11 +168,9 @@ def test_context_manager(pydase_client: pydase.Client) -> None:
def test_client_id(
pydase_client: pydase.Client, caplog: pytest.LogCaptureFixture
) -> None:
import socket
pydase.Client(url="ws://localhost:9999")
assert f"Client [id={socket.gethostname()}]" in caplog.text
assert "Client [sid=" in caplog.text
caplog.clear()
pydase.Client(url="ws://localhost:9999", client_id="my_service")

View File

@ -1,9 +1,8 @@
import logging
from typing import Any
import pytest
import pydase
import pytest
from pydase.data_service.data_service_observer import DataServiceObserver
from pydase.data_service.state_manager import StateManager
from pydase.utils.serialization.serializer import SerializationError, dump
@ -242,42 +241,3 @@ def test_read_only_dict_property(caplog: pytest.LogCaptureFixture) -> None:
service_instance._dict_attr["dotted.key"] = 2.0
assert "'dict_attr[\"dotted.key\"]' changed to '2.0'" in caplog.text
def test_dependency_as_function_argument(caplog: pytest.LogCaptureFixture) -> None:
class MyObservable(pydase.DataService):
some_int = 0
@property
def other_int(self) -> int:
return self.add_one(self.some_int)
def add_one(self, value: int) -> int:
return value + 1
service_instance = MyObservable()
state_manager = StateManager(service=service_instance)
DataServiceObserver(state_manager)
service_instance.some_int = 1337
assert "'other_int' changed to '1338'" in caplog.text
def test_property_starting_with_dependency_name(
caplog: pytest.LogCaptureFixture,
) -> None:
class MyObservable(pydase.DataService):
my_int = 0
@property
def my_int_2(self) -> int:
return self.my_int + 1
service_instance = MyObservable()
state_manager = StateManager(service=service_instance)
DataServiceObserver(state_manager)
service_instance.my_int = 1337
assert "'my_int_2' changed to '1338'" in caplog.text