Compare commits
347 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c45f1bd489 | ||
|
|
5784818e5a | ||
|
|
64a7097568 | ||
|
|
5ef382728c | ||
|
|
51d6189002 | ||
|
|
71e29c890e | ||
|
|
6e407ba1d6 | ||
|
|
4fb5e56aa8 | ||
|
|
d55ba3a85f | ||
|
|
265d9a7ef5 | ||
|
|
4cd36b4a2b | ||
|
|
1b2ff38aff | ||
|
|
4b243985e8 | ||
|
|
8615bdeadc | ||
|
|
d24893a989 | ||
|
|
661603ef71 | ||
|
|
d6947b0f43 | ||
|
|
93d38651e8 | ||
|
|
72a3c199d9 | ||
|
|
7914e2fa7b | ||
|
|
0a4f898fde | ||
|
|
a9aa55fc99 | ||
|
|
fd5a230fa4 | ||
|
|
243b46aadb | ||
|
|
0f1ca84df5 | ||
|
|
6438a07305 | ||
|
|
80bfd209df | ||
|
|
e065b1fb22 | ||
|
|
977cee32b9 | ||
|
|
96f695020b | ||
|
|
33ce01865a | ||
|
|
f5374573cd | ||
|
|
43c6b5e817 | ||
|
|
37380c6d24 | ||
|
|
ae21656e83 | ||
|
|
a4b4f179c6 | ||
|
|
c6beca3961 | ||
|
|
2fa8240e54 | ||
|
|
369587a50c | ||
|
|
25343f6909 | ||
|
|
c136c9f3de | ||
|
|
8897c2fe4c | ||
|
|
80c5c4e99d | ||
|
|
423441a74c | ||
|
|
9ec60e3891 | ||
|
|
8bde104322 | ||
|
|
9b57b6984e | ||
|
|
e5b89f2581 | ||
|
|
ff1654e65c | ||
|
|
cded80c8e5 | ||
|
|
87a33b6293 | ||
|
|
6d621daaac | ||
|
|
8c1a50c106 | ||
|
|
a1545d341b | ||
|
|
28a1cc7cd3 | ||
|
|
c968708b85 | ||
|
|
fef8606d17 | ||
|
|
82286c8da0 | ||
|
|
533826a398 | ||
|
|
982875dee6 | ||
|
|
e54710cd4d | ||
|
|
f48f7aacfb | ||
|
|
e97aab4f36 | ||
|
|
015c66d5a6 | ||
|
|
9827d0747c | ||
|
|
38a12fb72e | ||
|
|
fb6ec16bf5 | ||
|
|
9ee498eb5c | ||
|
|
d015333123 | ||
|
|
c4e7fe66a8 | ||
|
|
5f1451a1c1 | ||
|
|
4c28cbaf7d | ||
|
|
a97b8eb2b4 | ||
|
|
f6b5c1b567 | ||
|
|
f92d525588 | ||
|
|
61b69d77cc | ||
|
|
8abe9357cf | ||
|
|
0dace2a9f0 | ||
|
|
9992ade0ed | ||
|
|
6c2cebada2 | ||
|
|
069a2b4696 | ||
|
|
38ed8d78de | ||
|
|
7ff6cab9b3 | ||
|
|
cbd93fb166 | ||
|
|
83c30439b6 | ||
|
|
e596c50915 | ||
|
|
9920350753 | ||
|
|
5dec01d800 | ||
|
|
18c66a8318 | ||
|
|
3d65240784 | ||
|
|
0d698e803d | ||
|
|
02a8791b74 | ||
|
|
35f658ce4d | ||
|
|
051e616280 | ||
|
|
98e9791d09 | ||
|
|
24ecbd1eb9 | ||
|
|
cd78d01b04 | ||
|
|
6be27217cf | ||
|
|
ae2c99b3ae | ||
|
|
c32b6a8694 | ||
|
|
57cfe45c76 | ||
|
|
7c18d86e9c | ||
|
|
c5eb5f80b4 | ||
|
|
18c64db826 | ||
|
|
27f8e1b1bc | ||
|
|
f8839f0e71 | ||
|
|
d31aff0b9b | ||
|
|
2c1db3fa45 | ||
|
|
b9cec19b02 | ||
|
|
6ba5193e9e | ||
|
|
bc0c69f9e1 | ||
|
|
b2314f7e33 | ||
|
|
eb43e7b380 | ||
|
|
5dc28b0b55 | ||
|
|
c327215b5f | ||
|
|
04a3b225f8 | ||
|
|
86c4514e1a | ||
|
|
cac74e90db | ||
|
|
c24d63f4c0 | ||
|
|
b0dd5835a3 | ||
|
|
b0c8af0108 | ||
|
|
c0016673a8 | ||
|
|
eadc1df763 | ||
|
|
922fdf8fd0 | ||
|
|
8b21c42ef7 | ||
|
|
2399b3ca9f | ||
|
|
db43f5dbbb | ||
|
|
f2c0a94904 | ||
|
|
c36cebf17c | ||
|
|
a96387b4d7 | ||
|
|
d1feff1a6a | ||
|
|
95df2f1650 | ||
|
|
0565c82448 | ||
|
|
755265bf53 | ||
|
|
4c7b386ab4 | ||
|
|
92b2326dfc | ||
|
|
9e18783a05 | ||
|
|
9be4aac988 | ||
|
|
f3d659670f | ||
|
|
23f051d6f1 | ||
|
|
c8979ab2e6 | ||
|
|
bd33252775 | ||
|
|
1fbcbc72bf | ||
|
|
9a8628cfbd | ||
|
|
3d13b20fda | ||
|
|
f2183ec3e4 | ||
|
|
360aeb5574 | ||
|
|
e85e93a1d9 | ||
|
|
ea5fd42919 | ||
|
|
247113f1db | ||
|
|
c76b0b0b6e | ||
|
|
2d39c56e3d | ||
|
|
60287fef95 | ||
|
|
c5e1a08c54 | ||
|
|
9424d4c412 | ||
|
|
0a4c13c617 | ||
|
|
5d72604199 | ||
|
|
3479c511fe | ||
|
|
9bf3b28390 | ||
|
|
0195f9d6f6 | ||
|
|
197268255b | ||
|
|
3698cb7f92 | ||
|
|
0625832457 | ||
|
|
f35bcf3be6 | ||
|
|
3fe77bb4e5 | ||
|
|
9b2d181f4a | ||
|
|
045334e51e | ||
|
|
1d8d17d715 | ||
|
|
4d84c9778f | ||
|
|
e3c144fa6e | ||
|
|
192075057f | ||
|
|
053050a62c | ||
|
|
aacc69ae94 | ||
|
|
de1483bdc5 | ||
|
|
b24db00eda | ||
|
|
36ee760610 | ||
|
|
3a67c07bad | ||
|
|
b9a91e5ee2 | ||
|
|
f83bc0073b | ||
|
|
c66b90c4e5 | ||
|
|
d0b0803407 | ||
|
|
e25511768d | ||
|
|
303de82318 | ||
|
|
db559e8ada | ||
|
|
1b35dba64f | ||
|
|
8a8ac9d297 | ||
|
|
40a8863ecd | ||
|
|
1dca04f693 | ||
|
|
2b520834dc | ||
|
|
d6bad37233 | ||
|
|
53a2a3303f | ||
|
|
4f206bbae9 | ||
|
|
090b8acd44 | ||
|
|
17b2ad32e5 | ||
|
|
3c99f3fe04 | ||
|
|
2bcc6b9660 | ||
|
|
c1ace54c78 | ||
|
|
56af2a423b | ||
|
|
eba0eb83e6 | ||
|
|
b7818c0d8a | ||
|
|
a0c3882f35 | ||
|
|
1d773ba09b | ||
|
|
10f1b8691c | ||
|
|
a99db6f053 | ||
|
|
36ab8ab68b | ||
|
|
27a832bbd1 | ||
|
|
18df9e288a | ||
|
|
7b786be892 | ||
|
|
374a930745 | ||
|
|
6d12e5c939 | ||
|
|
bcf37067ad | ||
|
|
a1ac0c2f88 | ||
|
|
cfe190ca5b | ||
|
|
c002d04328 | ||
|
|
0d1df4f9e5 | ||
|
|
59cc834a81 | ||
|
|
dc54d9faef | ||
|
|
89bf5cb3f1 | ||
|
|
c72ea9eb20 | ||
|
|
897387e39e | ||
|
|
4454a10f78 | ||
|
|
c9814f7cdc | ||
|
|
187d8bcf28 | ||
|
|
204d426663 | ||
|
|
29e9afa47e | ||
|
|
a6943c027f | ||
|
|
70e4fa73e1 | ||
|
|
579fa4715b | ||
|
|
0100bab04f | ||
|
|
bdf97fa181 | ||
|
|
eb1587fa7d | ||
|
|
5827cda316 | ||
|
|
0e9ec7a66a | ||
|
|
155957f0c5 | ||
|
|
a8b46f191b | ||
|
|
3862ce3405 | ||
|
|
5403b51a5b | ||
|
|
1270400e95 | ||
|
|
3d2bb1c528 | ||
|
|
7c68f02cfd | ||
|
|
ccd6447869 | ||
|
|
056c02c5a5 | ||
|
|
52a798e4c8 | ||
|
|
fdfdef5837 | ||
|
|
ff301f225c | ||
|
|
87f720f567 | ||
|
|
fecb46c02c | ||
|
|
cce2399b07 | ||
|
|
df1db99ec0 | ||
|
|
5f2619500b | ||
|
|
843675fa1e | ||
|
|
2aa370c8ac | ||
|
|
c25ff4a3aa | ||
|
|
5e32a70c3e | ||
|
|
3f6692a1cd | ||
|
|
eb32b34b59 | ||
|
|
9eedf03c01 | ||
|
|
5ec7a8b530 | ||
|
|
f2f330dbd9 | ||
|
|
2e0e056489 | ||
|
|
d8685fe9a0 | ||
|
|
e52a019d5e | ||
|
|
0d5cef1537 | ||
|
|
e8f33eee4d | ||
|
|
a3b71b174c | ||
|
|
e2ce0e9acb | ||
|
|
f47a183c11 | ||
|
|
a9ea237cf3 | ||
|
|
6db1652dd3 | ||
|
|
e3b95a8076 | ||
|
|
0fe2a8516f | ||
|
|
51bbaba162 | ||
|
|
77802da417 | ||
|
|
3e21858cb7 | ||
|
|
2003f28fd1 | ||
|
|
172b50bf77 | ||
|
|
ec5694fedf | ||
|
|
968f774092 | ||
|
|
757dc9aa3c | ||
|
|
3d938562a6 | ||
|
|
964a62d4b4 | ||
|
|
99aa38fcfe | ||
|
|
5658514c8a | ||
|
|
109ee7d5e1 | ||
|
|
f4fa02fe11 | ||
|
|
487ef504a8 | ||
|
|
c98e407ed7 | ||
|
|
6b6ce1d43f | ||
|
|
e491ac7458 | ||
|
|
e9d8cbafc2 | ||
|
|
aa705592b2 | ||
|
|
008e1262bb | ||
|
|
91a71ad004 | ||
|
|
bbf479a440 | ||
|
|
983d392ba8 | ||
|
|
56dd9dd8aa | ||
|
|
20028c379d | ||
|
|
e48046795e | ||
|
|
1ac9e45c73 | ||
|
|
488415436c | ||
|
|
d7c5c2cd6e | ||
|
|
5388fd0d2b | ||
|
|
e74b5c773a | ||
|
|
bb6cd159f1 | ||
|
|
4a09f02882 | ||
|
|
9180bb1d9e | ||
|
|
ece68b4b99 | ||
|
|
0c95b5e3cb | ||
|
|
0450bb1570 | ||
|
|
2f5a640c4c | ||
|
|
78964be506 | ||
|
|
fbdf6de63c | ||
|
|
9b04dcd41e | ||
|
|
32e36d4962 | ||
|
|
62f28f79db | ||
|
|
e88965b69d | ||
|
|
e422d627af | ||
|
|
2e31ebb7d9 | ||
|
|
71adc8bea2 | ||
|
|
bfa0acedab | ||
|
|
416b9ee815 | ||
|
|
d1d2ac2614 | ||
|
|
fa35fa53e2 | ||
|
|
c0e5a77d6f | ||
|
|
96cc7b31b4 | ||
|
|
0d6d312f68 | ||
|
|
be3011c565 | ||
|
|
09fae01985 | ||
|
|
12c0c9763d | ||
|
|
15322b742d | ||
|
|
85d6229aa6 | ||
|
|
083fab0a29 | ||
|
|
2a1aff589d | ||
|
|
3cd7198747 | ||
|
|
1e02f12794 | ||
|
|
e4a3cf341f | ||
|
|
7ddcd97f68 | ||
|
|
80da96657c | ||
|
|
861e89f37a | ||
|
|
c00cf9a6ff | ||
|
|
ed7f3d8509 | ||
|
|
456090fee9 | ||
|
|
e69ef376ae | ||
|
|
5f78771f66 | ||
|
|
09ceae90ec | ||
|
|
c34351270c | ||
|
|
743c18bdd7 |
5
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -18,7 +18,10 @@ Provide steps to reproduce the behaviour, including a minimal code snippet (if a
|
|||||||
## Expected behaviour
|
## Expected behaviour
|
||||||
A clear and concise description of what you expected to happen.
|
A clear and concise description of what you expected to happen.
|
||||||
|
|
||||||
## Screenshot/Video
|
## Actual behaviour
|
||||||
|
Describe what you see instead of the expected behaviour.
|
||||||
|
|
||||||
|
### Screenshot/Video
|
||||||
If applicable, add visual content that helps explain your problem.
|
If applicable, add visual content that helps explain your problem.
|
||||||
|
|
||||||
## Additional context
|
## Additional context
|
||||||
|
|||||||
6
.github/workflows/publish-to-pypi.yaml
vendored
@@ -22,7 +22,7 @@ jobs:
|
|||||||
- name: Build a binary wheel and a source tarball
|
- name: Build a binary wheel and a source tarball
|
||||||
run: python3 -m build
|
run: python3 -m build
|
||||||
- name: Store the distribution packages
|
- name: Store the distribution packages
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: python-package-distributions
|
name: python-package-distributions
|
||||||
path: dist/
|
path: dist/
|
||||||
@@ -44,7 +44,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Download all the dists
|
- name: Download all the dists
|
||||||
uses: actions/download-artifact@v3
|
uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: python-package-distributions
|
name: python-package-distributions
|
||||||
path: dist/
|
path: dist/
|
||||||
@@ -65,7 +65,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Download all the dists
|
- name: Download all the dists
|
||||||
uses: actions/download-artifact@v3
|
uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: python-package-distributions
|
name: python-package-distributions
|
||||||
path: dist/
|
path: dist/
|
||||||
|
|||||||
6
.github/workflows/python-package.yml
vendored
@@ -2,6 +2,8 @@
|
|||||||
# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python
|
# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python
|
||||||
|
|
||||||
name: Python package
|
name: Python package
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
@@ -16,7 +18,7 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ["3.10", "3.11", "3.12"]
|
python-version: ["3.10", "3.11", "3.12", "3.13"]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
@@ -28,7 +30,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
python -m pip install --upgrade pip
|
python -m pip install --upgrade pip
|
||||||
python -m pip install poetry
|
python -m pip install poetry
|
||||||
poetry install --with dev
|
poetry install --with dev --all-extras
|
||||||
- name: Check with ruff
|
- name: Check with ruff
|
||||||
run: |
|
run: |
|
||||||
poetry run ruff check src
|
poetry run ruff check src
|
||||||
|
|||||||
107
README.md
@@ -1,10 +1,11 @@
|
|||||||
<!--introduction-start-->
|
<!--introduction-start-->
|
||||||
# pydase <!-- omit from toc -->
|

|
||||||
|
|
||||||
[](https://pypi.org/project/pydase/)
|
[](https://pypi.org/project/pydase/)
|
||||||
[](https://pypi.org/project/pydase/)
|
[](https://pypi.org/project/pydase/)
|
||||||
[](https://pydase.readthedocs.io/en/stable/)
|
[](https://pydase.readthedocs.io/en/stable/)
|
||||||
[][License]
|
[][License]
|
||||||
|
[](https://doi.org/10.5281/zenodo.15703190)
|
||||||
|
|
||||||
`pydase` is a Python library that simplifies the creation of remote control interfaces for Python objects. It exposes the public attributes of a user-defined class via a [Socket.IO](https://python-socketio.readthedocs.io/en/stable/) web server, ensuring they are always in sync with the service state. You can interact with these attributes using an RPC client, a RESTful API, or a web browser. The web browser frontend is auto-generated, displaying components that correspond to each public attribute of the class for direct interaction.
|
`pydase` is a Python library that simplifies the creation of remote control interfaces for Python objects. It exposes the public attributes of a user-defined class via a [Socket.IO](https://python-socketio.readthedocs.io/en/stable/) web server, ensuring they are always in sync with the service state. You can interact with these attributes using an RPC client, a RESTful API, or a web browser. The web browser frontend is auto-generated, displaying components that correspond to each public attribute of the class for direct interaction.
|
||||||
`pydase` implements an [observer pattern][Observer Pattern] to provide the real-time updates, ensuring that changes to the class attributes are reflected across all clients.
|
`pydase` implements an [observer pattern][Observer Pattern] to provide the real-time updates, ensuring that changes to the class attributes are reflected across all clients.
|
||||||
@@ -105,7 +106,7 @@ class Device(pydase.DataService):
|
|||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
service = Device()
|
service = Device()
|
||||||
pydase.Server(service=service).run()
|
pydase.Server(service=service, web_port=8001).run()
|
||||||
```
|
```
|
||||||
|
|
||||||
In the above example, we define a `Device` class that inherits from `pydase.DataService`.
|
In the above example, we define a `Device` class that inherits from `pydase.DataService`.
|
||||||
@@ -122,10 +123,13 @@ import pydase
|
|||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
service = Device()
|
service = Device()
|
||||||
pydase.Server(service=service).run()
|
pydase.Server(service=service, web_port=8001).run()
|
||||||
```
|
```
|
||||||
|
|
||||||
This will start the server, making your `Device` service accessible on [http://localhost:8001](http://localhost:8001).
|
This will start the server, making your `Device` service accessible on
|
||||||
|
[http://localhost:8001](http://localhost:8001). The port number for the web server can
|
||||||
|
be customised in the server constructor or through environment variables and defaults
|
||||||
|
to `8001`.
|
||||||
|
|
||||||
### Accessing the Web Interface
|
### Accessing the Web Interface
|
||||||
|
|
||||||
@@ -144,7 +148,7 @@ import pydase
|
|||||||
|
|
||||||
# Replace the hostname and port with the IP address and the port of the machine where
|
# Replace the hostname and port with the IP address and the port of the machine where
|
||||||
# the service is running, respectively
|
# the service is running, respectively
|
||||||
client_proxy = pydase.Client(url="ws://<ip_addr>:<service_port>").proxy
|
client_proxy = pydase.Client(url="ws://<ip_addr>:<web_port>").proxy
|
||||||
# client_proxy = pydase.Client(url="wss://your-domain.ch").proxy # if your service uses ssl-encryption
|
# client_proxy = pydase.Client(url="wss://your-domain.ch").proxy # if your service uses ssl-encryption
|
||||||
|
|
||||||
# After the connection, interact with the service attributes as if they were local
|
# After the connection, interact with the service attributes as if they were local
|
||||||
@@ -170,7 +174,7 @@ import json
|
|||||||
import requests
|
import requests
|
||||||
|
|
||||||
response = requests.get(
|
response = requests.get(
|
||||||
"http://<hostname>:<port>/api/v1/get_value?access_path=<full_access_path>"
|
"http://<hostname>:<web_port>/api/v1/get_value?access_path=<full_access_path>"
|
||||||
)
|
)
|
||||||
serialized_value = json.loads(response.text)
|
serialized_value = json.loads(response.text)
|
||||||
```
|
```
|
||||||
@@ -181,44 +185,41 @@ For more information, see [here][RESTful API].
|
|||||||
|
|
||||||
## Configuring pydase via Environment Variables
|
## Configuring pydase via Environment Variables
|
||||||
|
|
||||||
Configuring `pydase` through environment variables enhances flexibility, security, and reusability. This approach allows for easy adaptation of services across different environments without code changes, promoting scalability and maintainability. With that, it simplifies deployment processes and facilitates centralized configuration management. Moreover, environment variables enable separation of configuration from code, aiding in secure and collaborative development.
|
`pydase` services work out of the box without requiring any configuration. However, you
|
||||||
|
might want to change some options, such as the web server port or logging level. To
|
||||||
|
accommodate such customizations, `pydase` allows configuration through environment
|
||||||
|
variables, such as:
|
||||||
|
|
||||||
`pydase` offers various configurable options:
|
- **`ENVIRONMENT`**:
|
||||||
|
Defines the operation mode (`"development"` or `"production"`), which influences
|
||||||
|
behaviour such as logging (see [Logging in pydase](#logging-in-pydase)).
|
||||||
|
|
||||||
- **`ENVIRONMENT`**: Sets the operation mode to either "development" or "production". Affects logging behaviour (see [logging section](#logging-in-pydase)).
|
- **`SERVICE_CONFIG_DIR`**:
|
||||||
- **`SERVICE_CONFIG_DIR`**: Specifies the directory for service configuration files, like `web_settings.json`. This directory can also be used to hold user-defined configuration files. Default is the `config` folder in the service root folder. The variable can be accessed through:
|
Specifies the directory for configuration files (e.g., `web_settings.json`). Defaults
|
||||||
|
to the `config` folder in the service root. Access this programmatically using:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
import pydase.config
|
import pydase.config
|
||||||
pydase.config.ServiceConfig().config_dir
|
pydase.config.ServiceConfig().config_dir
|
||||||
```
|
```
|
||||||
|
|
||||||
- **`SERVICE_WEB_PORT`**: Defines the port number for the web server. This has to be different for each services running on the same host. Default is 8001.
|
- **`SERVICE_WEB_PORT`**:
|
||||||
- **`GENERATE_WEB_SETTINGS`**: When set to true, generates / updates the `web_settings.json` file. If the file already exists, only new entries are appended.
|
Defines the web server’s port. Ensure each service on the same host uses a unique
|
||||||
|
port. Default: `8001`.
|
||||||
|
|
||||||
Some of those settings can also be altered directly in code when initializing the server:
|
- **`GENERATE_WEB_SETTINGS`**:
|
||||||
|
When `true`, generates or updates the `web_settings.json` file. Existing entries are
|
||||||
|
preserved, and new entries are appended.
|
||||||
|
|
||||||
```python
|
For more information, see [Configuring pydase](https://pydase.readthedocs.io/en/stable/user-guide/Configuration/).
|
||||||
import pathlib
|
|
||||||
|
|
||||||
from pydase import Server
|
|
||||||
from your_service_module import YourService
|
|
||||||
|
|
||||||
|
|
||||||
server = Server(
|
|
||||||
YourService(),
|
|
||||||
web_port=8080,
|
|
||||||
config_dir=pathlib.Path("other_config_dir"), # note that you need to provide an argument of type pathlib.Path
|
|
||||||
generate_web_settings=True
|
|
||||||
).run()
|
|
||||||
```
|
|
||||||
|
|
||||||
## Customizing the Web Interface
|
## Customizing the Web Interface
|
||||||
|
|
||||||
`pydase` allows you to enhance the user experience by customizing the web interface's appearance through
|
`pydase` allows you to enhance the user experience by customizing the web interface's appearance through
|
||||||
|
|
||||||
1. a custom CSS file, and
|
1. a custom CSS file, and
|
||||||
2. tailoring the frontend component layout and display style.
|
2. a custom favicon image, and
|
||||||
|
3. tailoring the frontend component layout and display style.
|
||||||
|
|
||||||
You can also provide a custom frontend source if you need even more flexibility.
|
You can also provide a custom frontend source if you need even more flexibility.
|
||||||
|
|
||||||
@@ -226,44 +227,15 @@ For details, please see [here](https://pydase.readthedocs.io/en/stable/user-guid
|
|||||||
|
|
||||||
## Logging in pydase
|
## Logging in pydase
|
||||||
|
|
||||||
The `pydase` library organizes its loggers on a per-module basis, mirroring the Python package hierarchy. This structured approach allows for granular control over logging levels and behaviour across different parts of the library.
|
The `pydase` library provides structured, per-module logging with support for log level configuration, rich formatting, and optional client identification in logs.
|
||||||
|
|
||||||
### Changing the Log Level
|
To configure logging in your own service, you can use:
|
||||||
|
|
||||||
You have two primary ways to adjust the log levels in `pydase`:
|
```python
|
||||||
|
from pydase.utils.logging import configure_logging_with_pydase_formatter
|
||||||
|
```
|
||||||
|
|
||||||
1. directly targeting `pydase` loggers
|
For more information, see the [full guide](https://pydase.readthedocs.io/en/stable/user-guide/Logging/).
|
||||||
|
|
||||||
You can set the log level for any `pydase` logger directly in your code. This method is useful for fine-tuning logging levels for specific modules within `pydase`. For instance, if you want to change the log level of the main `pydase` logger or target a submodule like `pydase.data_service`, you can do so as follows:
|
|
||||||
|
|
||||||
```python
|
|
||||||
# <your_script.py>
|
|
||||||
import logging
|
|
||||||
|
|
||||||
# Set the log level for the main pydase logger
|
|
||||||
logging.getLogger("pydase").setLevel(logging.INFO)
|
|
||||||
|
|
||||||
# Optionally, target a specific submodule logger
|
|
||||||
# logging.getLogger("pydase.data_service").setLevel(logging.DEBUG)
|
|
||||||
|
|
||||||
# Your logger for the current script
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
logger.info("My info message.")
|
|
||||||
```
|
|
||||||
|
|
||||||
This approach allows for specific control over different parts of the `pydase` library, depending on your logging needs.
|
|
||||||
|
|
||||||
2. using the `ENVIRONMENT` environment variable
|
|
||||||
|
|
||||||
For a more global setting that affects the entire `pydase` library, you can utilize the `ENVIRONMENT` environment variable. Setting this variable to "production" will configure all `pydase` loggers to only log messages of level "INFO" and above, filtering out more verbose logging. This is particularly useful for production environments where excessive logging can be overwhelming or unnecessary.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
ENVIRONMENT="production" python -m <module_using_pydase>
|
|
||||||
```
|
|
||||||
|
|
||||||
In the absence of this setting, the default behavior is to log everything of level "DEBUG" and above, suitable for development environments where more detailed logs are beneficial.
|
|
||||||
|
|
||||||
**Note**: It is recommended to avoid calling the `pydase.utils.logging.setup_logging` function directly, as this may result in duplicated logging messages.
|
|
||||||
|
|
||||||
## Documentation
|
## Documentation
|
||||||
|
|
||||||
@@ -273,10 +245,19 @@ The full documentation provides more detailed information about `pydase`, includ
|
|||||||
|
|
||||||
We welcome contributions! Please see [contributing.md](https://pydase.readthedocs.io/en/stable/about/contributing/) for details on how to contribute.
|
We welcome contributions! Please see [contributing.md](https://pydase.readthedocs.io/en/stable/about/contributing/) for details on how to contribute.
|
||||||
|
|
||||||
|
## Acknowledgements
|
||||||
|
|
||||||
|
This work was funded by the [ETH Zurich-PSI Quantum Computing Hub](https://www.psi.ch/en/lnq/qchub).
|
||||||
|
|
||||||
|
The main idea behind `pydase` is based on a previous project called `tiqi-plugin`, which
|
||||||
|
was developed within the same research group. While the concept was inspired by that
|
||||||
|
project, `pydase` was implemented from the ground up with a new architecture and design.
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
`pydase` is licensed under the [MIT License][License].
|
`pydase` is licensed under the [MIT License][License].
|
||||||
|
|
||||||
|
[pydase Banner]: ./docs/images/logo-with-text.png
|
||||||
[License]: ./LICENSE
|
[License]: ./LICENSE
|
||||||
[Observer Pattern]: https://pydase.readthedocs.io/en/docs/dev-guide/Observer_Pattern_Implementation/
|
[Observer Pattern]: https://pydase.readthedocs.io/en/docs/dev-guide/Observer_Pattern_Implementation/
|
||||||
[Service Persistence]: https://pydase.readthedocs.io/en/stable/user-guide/Service_Persistence
|
[Service Persistence]: https://pydase.readthedocs.io/en/stable/user-guide/Service_Persistence
|
||||||
|
|||||||
@@ -1,6 +1,15 @@
|
|||||||
::: pydase.data_service
|
::: pydase.data_service
|
||||||
handler: python
|
handler: python
|
||||||
|
|
||||||
|
::: pydase.data_service.data_service_cache
|
||||||
|
handler: python
|
||||||
|
|
||||||
|
::: pydase.data_service.data_service_observer
|
||||||
|
handler: python
|
||||||
|
|
||||||
|
::: pydase.data_service.state_manager
|
||||||
|
handler: python
|
||||||
|
|
||||||
::: pydase.server.server
|
::: pydase.server.server
|
||||||
handler: python
|
handler: python
|
||||||
|
|
||||||
@@ -13,6 +22,12 @@
|
|||||||
::: pydase.components
|
::: pydase.components
|
||||||
handler: python
|
handler: python
|
||||||
|
|
||||||
|
::: pydase.task
|
||||||
|
handler: python
|
||||||
|
options:
|
||||||
|
inherited_members: false
|
||||||
|
show_submodules: true
|
||||||
|
|
||||||
::: pydase.utils.serialization.serializer
|
::: pydase.utils.serialization.serializer
|
||||||
handler: python
|
handler: python
|
||||||
|
|
||||||
@@ -32,6 +47,9 @@
|
|||||||
options:
|
options:
|
||||||
filters: ["!render_in_frontend"]
|
filters: ["!render_in_frontend"]
|
||||||
|
|
||||||
|
::: pydase.utils.logging
|
||||||
|
handler: python
|
||||||
|
|
||||||
::: pydase.units
|
::: pydase.units
|
||||||
handler: python
|
handler: python
|
||||||
|
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
end="<!--getting-started-end-->"
|
end="<!--getting-started-end-->"
|
||||||
%}
|
%}
|
||||||
|
|
||||||
[RESTful API]: ./user-guide/interaction/README.md#restful-api
|
[RESTful API]: ./user-guide/interaction/RESTful-API.md
|
||||||
[Python RPC Client]: ./user-guide/interaction/README.md#python-rpc-client
|
[Python RPC Client]: ./user-guide/interaction/Python-Client.md
|
||||||
[Custom Components]: ./user-guide/Components.md#custom-components-pydasecomponents
|
[Custom Components]: ./user-guide/Components.md#custom-components-pydasecomponents
|
||||||
[Components]: ./user-guide/Components.md
|
[Components]: ./user-guide/Components.md
|
||||||
|
|||||||
BIN
docs/images/logo-bw.png
Normal file
|
After Width: | Height: | Size: 27 KiB |
11
docs/images/logo-bw.svg
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 20010904//EN" "http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd">
|
||||||
|
<svg version="1.0" xmlns="http://www.w3.org/2000/svg" width="300.000000pt" height="319.000000pt" viewBox="0 0 300.000000 319.000000" preserveAspectRatio="xMidYMid meet">
|
||||||
|
<metadata>
|
||||||
|
Created by potrace 1.10, written by Peter Selinger 2001-2011
|
||||||
|
</metadata>
|
||||||
|
<g transform="translate(0.000000,319.000000) scale(0.050000,-0.050000)" fill="#000000" stroke="none">
|
||||||
|
<path d="M3177 6315 c-73 -26 -181 -83 -240 -128 -87 -67 -137 -88 -270 -115 -1259 -251 -2314 -1289 -2589 -2550 -380 -1734 1006 -3502 2746 -3502 1092 0 1819 261 2376 852 1117 1187 1046 2893 -171 4102 l-265 263 107 71 c65 43 127 106 160 161 68 116 87 115 287 -19 279 -187 300 -77 30 157 l-58 51 115 116 c149 152 167 320 22 199 -224 -185 -335 -226 -354 -131 -34 168 -137 227 -683 390 l-380 114 -350 7 c-326 8 -359 5 -483 -38z m1193 -245 c505 -152 550 -179 550 -322 0 -95 -184 -206 -559 -337 -556 -193 -887 -224 -1121 -104 -71 37 -173 89 -224 115 -221 112 -188 499 57 673 129 91 215 106 577 98 l340 -7 380 -116z m-1647 -319 c-8 -214 19 -324 119 -480 33 -53 57 -98 54 -100 -3 -2 -127 -48 -276 -100 -789 -280 -1197 -648 -1468 -1325 -250 -626 -230 -1189 69 -1886 56 -132 112 -304 130 -400 66 -348 238 -672 518 -975 150 -162 145 -163 -142 -18 -751 378 -1266 1020 -1501 1873 -52 189 -51 877 2 1120 230 1058 1019 1971 2012 2329 129 46 450 147 480 150 6 1 7 -84 3 -188z m2304 -993 c914 -980 1033 -2150 325 -3215 -572 -860 -1720 -1295 -2645 -1002 -560 178 -831 366 -986 683 -223 458 -232 753 -33 1064 175 273 284 290 1082 163 853 -135 1190 -74 1545 280 91 90 165 157 165 148 0 -244 -303 -619 -632 -782 l-174 -86 -374 -11 c-447 -12 -521 -40 -624 -238 -142 -271 -52 -462 244 -518 216 -42 300 -46 464 -24 1202 161 1849 1357 1347 2490 -29 66 -75 226 -101 356 -48 244 -131 451 -249 622 l-61 89 235 80 c306 104 276 110 472 -99z m-772 -195 c280 -415 191 -1010 -208 -1383 -252 -236 -463 -295 -1137 -322 -822 -32 -1036 -94 -1249 -361 -107 -134 -113 -133 -82 7 172 759 472 1031 1191 1078 240 16 342 31 410 61 363 159 379 624 29 795 -99 49 -122 41 451 160 553 116 490 120 595 -35z m-1895 -84 c39 -11 192 -47 340 -80 518 -114 681 -237 592 -446 -67 -156 -155 -191 -550 -215 -782 -47 -1105 -339 -1352 -1226 -37 -131 -53 -128 -89 18 -134 554 57 1165 509 1623 309 313 404 369 550 326z m2342 -1942 c-167 -657 -704 -1119 -1359 -1169 -320 -24 -563 50 -563 173 0 188 127 259 508 282 802 48 1231 374 1375 1048 60 282 66 286 73 41 4 -166 -4 -255 -34 -375z"/>
|
||||||
|
<path d="M3858 5922 c-62 -62 -78 -92 -78 -151 0 -307 422 -382 501 -88 70 262 -231 432 -423 239z m245 -95 c45 -41 48 -113 6 -156 -43 -42 -101 -39 -149 9 -97 97 41 239 143 147z"/>
|
||||||
|
</g>
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 2.7 KiB |
BIN
docs/images/logo-colour.png
Normal file
|
After Width: | Height: | Size: 77 KiB |
153
docs/images/logo-colour.svg
Normal file
@@ -0,0 +1,153 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<svg
|
||||||
|
version="1.1"
|
||||||
|
width="588px"
|
||||||
|
height="626px"
|
||||||
|
viewBox="0 0 588 626"
|
||||||
|
preserveAspectRatio="xMidYMid meet"
|
||||||
|
id="svg184"
|
||||||
|
sodipodi:docname="pydase-logo-colour-3.svg"
|
||||||
|
inkscape:version="1.4 (e7c3feb100, 2024-10-09)"
|
||||||
|
inkscape:export-filename="pydase-logo-colour-3.png"
|
||||||
|
inkscape:export-xdpi="96"
|
||||||
|
inkscape:export-ydpi="96"
|
||||||
|
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||||
|
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
xmlns:svg="http://www.w3.org/2000/svg">
|
||||||
|
<defs
|
||||||
|
id="defs184" />
|
||||||
|
<sodipodi:namedview
|
||||||
|
id="namedview184"
|
||||||
|
pagecolor="#ffffff"
|
||||||
|
bordercolor="#000000"
|
||||||
|
borderopacity="0.25"
|
||||||
|
inkscape:showpageshadow="2"
|
||||||
|
inkscape:pageopacity="0.0"
|
||||||
|
inkscape:pagecheckerboard="0"
|
||||||
|
inkscape:deskcolor="#d1d1d1"
|
||||||
|
inkscape:zoom="0.70710678"
|
||||||
|
inkscape:cx="48.083261"
|
||||||
|
inkscape:cy="74.953318"
|
||||||
|
inkscape:window-width="2048"
|
||||||
|
inkscape:window-height="1243"
|
||||||
|
inkscape:window-x="0"
|
||||||
|
inkscape:window-y="0"
|
||||||
|
inkscape:window-maximized="1"
|
||||||
|
inkscape:current-layer="svg184"
|
||||||
|
showgrid="false" />
|
||||||
|
<g
|
||||||
|
fill="#041b31"
|
||||||
|
id="g1"
|
||||||
|
style="display:inline"
|
||||||
|
inkscape:label="Contour">
|
||||||
|
<path
|
||||||
|
d="m 249,624.5 c -0.8,-0.2 -4.9,-0.8 -9,-1.5 -23.8,-3.7 -65.4,-19 -91,-33.5 C 115.5,570.6 81,540.3 58.3,510 41.3,487.2 23.6,454.3 16.2,431.5 8.8,408.8 8.3,406.8 4.9,387.5 1.9,370.5 1.8,368 1.6,342 1.5,313.2 1.4,314 7.1,282.6 18.3,221.6 48.7,167 100.4,115.5 116.6,99.3 126.7,90.8 142.5,80.1 158.5,69.3 182.9,56 199.5,49 210.6,44.4 240.6,34.4 252,31.5 c 7.3,-1.8 22.4,-4.5 25.5,-4.5 0.2,0 2.7,-2.1 5.7,-4.6 C 301.8,6.5 318.4,1 348,0.9 c 17.1,0 36.4,1.4 46,3.2 3,0.6 14.7,4 26,7.4 11.3,3.5 27.3,8.2 35.5,10.4 17.5,4.8 27.3,9.3 33.4,15.3 5.5,5.5 8.1,10.7 8.8,17.4 0.3,3 0.9,5.4 1.4,5.4 4,0 19.5,-9.6 30.7,-19 8.1,-6.9 9.3,-6.9 11.3,-0.1 2,6.6 -0.6,10 -19,25.9 l -3.5,2.9 10.6,10.4 c 13.4,13.2 17.8,21.1 12.4,22.5 -2.9,0.7 -4.8,-0.3 -15.2,-7.8 C 516.1,87.4 503.2,80 500.5,80 c -1.6,0 -2.9,1.5 -5,6.1 -3.8,7.9 -13.7,17.7 -22.6,22.4 l -6.8,3.6 4.7,4.2 c 18.1,16.2 30.1,28 40.8,40 15.1,16.9 22.8,27 32.1,42.4 6.9,11.4 22.2,41.2 23.8,46.3 0.4,1.4 1.6,4.3 2.6,6.5 4.9,10.7 10.9,34.8 14.6,58.5 2.7,17.9 2.5,58.7 -0.5,77.8 -5.3,33.5 -9.2,47.1 -21.3,73.7 -12.6,27.8 -24.1,46.3 -40.8,65.6 -19.2,22.3 -38.5,39.4 -60.5,53.8 -10.2,6.6 -43.5,23 -54.7,26.9 -16.2,5.7 -44,11 -69.1,13.2 -6.9,0.6 -17.5,1.7 -23.5,2.5 -9.4,1.3 -59.9,2 -65.3,1 z m 99.5,-135.4 c 36.7,-9.2 67.4,-29.4 87.4,-57.6 7.2,-10.3 17.8,-31.2 21.6,-42.9 5.7,-17.8 7,-26.5 7,-48.3 0,-18 -0.4,-22.7 -2,-21.2 -0.2,0.3 -1.1,5 -2,10.4 -5.4,34.9 -14.4,55.5 -32.5,74.8 -16.6,17.7 -36.73987,31.75263 -59.4,38.2 -7.25764,2.06498 -18.96791,3.46589 -37.2,4.4 -35.48106,1.81785 -36.6,1.6 -43.6,5.3 -12.5,6.7 -18.3,17.8 -14.4,27.3 2,4.7 6.3,7.1 17.1,9.5 12.5,2.8 13.8,2.9 33,2.5 12.8,-0.3 19,-0.8 25,-2.4 z M 134.4,385.8 c 0.8,-2.9 2.5,-8.9 3.6,-13.3 7.9,-29.5 14.4,-45.5 25.2,-62 7.4,-11.4 12,-16.1 27,-27.5 8.1,-6.1 13.6,-9.4 23.3,-13.6 18.4,-8.1 23.2,-9 48.5,-9.8 36.8,-1.2 44.6,-2.8 53.9,-11.2 9.4,-8.5 10.8,-20 3.7,-30.6 -7.7,-11.7 -15.4,-15.1 -50.6,-22.2 -24.8,-5.1 -30,-6.3 -40.9,-9.7 l -7.3,-2.3 -5.5,2.9 c -9.6,5 -25.36942,18.22759 -38.5,31.3 -19.59963,19.51281 -30.17386,36.16842 -42.7,67.6 -4.80076,12.04646 -7.8,26.5 -9.2,37.8 -1.6,13.7 -0.7,38.8 2,50.6 2.7,12.1 4.2,17.2 5.2,17.2 0.4,0 1.4,-2.4 2.3,-5.2 z"
|
||||||
|
id="path1"
|
||||||
|
sodipodi:nodetypes="ccccccccccccscccccccscccccccsccccccccccccccccccccscccsscccccccccccccccccssccsc"
|
||||||
|
style="fill:#041b31;fill-opacity:1" />
|
||||||
|
</g>
|
||||||
|
<g
|
||||||
|
fill="#003051"
|
||||||
|
id="g84"
|
||||||
|
style="display:inline"
|
||||||
|
inkscape:label="Very Dark Blue">
|
||||||
|
<path
|
||||||
|
d="M 230.4,602 C 175.34835,591.74645 169.18046,579.19949 127.38046,537.39949 126.28656,507.06066 124.35047,466.6837 125.4,421 c 3.1,7.5 6.91046,19.16537 8.35973,29.56569 3.51031,25.1907 16.4289,65.12981 36.44027,90.93431 22.43047,28.92391 69.16433,55.53771 88.55235,64.93033 C 249.09029,604.75095 241.4,604.1 230.4,602 Z"
|
||||||
|
id="path70"
|
||||||
|
sodipodi:nodetypes="cccsacc" />
|
||||||
|
<path
|
||||||
|
d="m 319.4,193.4 c -9.8,-5.8 -14.5,-7.1 -48.4,-14 -18.7,-3.7 -29,-4.8 -29,-6.5 0,-1.7 4.92805,-2.87104 12.5,-5.4 12.8566,-4.29398 19.24892,-5.98769 27.1,-7.9 24.01253,-5.84879 36.7,-8.7 48.4,-10.5 25.2,-4 35.7,-5.4 42.5,-5.5 6.2,-0.1 7.9,0.3 14.6,3.6 9.7,4.8 15.5,10 26.3,24 -32.58707,9.22703 -69.37398,17.37018 -94,22.2 z"
|
||||||
|
id="path77"
|
||||||
|
sodipodi:nodetypes="ccsssccccc" />
|
||||||
|
</g>
|
||||||
|
<g
|
||||||
|
fill="#033f64"
|
||||||
|
id="g97"
|
||||||
|
style="display:inline"
|
||||||
|
inkscape:label="Dark Blue">
|
||||||
|
<path
|
||||||
|
d="m 152.17414,396.63217 c 0.38601,-2.81096 5.82243,-25.08009 21.18483,-38.15736 33.76966,-28.74649 155.07007,-22.31003 192.71893,-28.8897 C 388.43397,313.23279 413.02792,214.49976 425.1,189.5 c 7.4,15 16.15078,54.97811 10.64936,81.97944 -4.26433,20.9296 -15.49967,42.2641 -32.45863,55.24972 -23.8158,18.23596 -36.39069,23.58236 -86.79073,23.77084 -83.29996,0.31152 -95.44833,-4.42471 -136.27417,16.21161 -12.20115,6.16734 -21.45976,18.1207 -28.05169,29.92056 z"
|
||||||
|
id="path118"
|
||||||
|
sodipodi:nodetypes="csccaasac"
|
||||||
|
style="display:inline;fill:#18759e;fill-opacity:1;fill-rule:nonzero" />
|
||||||
|
<path
|
||||||
|
d="M 183.5,588.1 C 115.8931,558.47699 107.64772,492.94457 88.1,430.2335 79,400.6335 76.84251,387.87492 75,366.15 c -1.824643,-21.51425 -3.417479,-43.86578 2.1,-64.7404 8.432657,-31.90379 27.29188,-60.49473 46.1,-87.6096 11.8141,-17.03188 24.95272,-33.78473 41.4,-46.4 13.29518,-10.19757 29.7308,-15.48328 44.9,-22.6 23.68008,-11.10966 63.61618,-31.81861 71.93442,-31.35243 3.81558,6.62743 29.05267,18.5147 28.43398,19.68762 0.31235,2.20322 -15.49372,-1.71368 -93.0684,32.46481 -30.64541,13.50201 -57.7,42.3 -74.5,67.4 -13.2,19.7 -23.8,43.8 -29.8,67.5 -5.2,20.6 -5.8,26.4 -5.2,45.7 0.8,25.7 4.5,42 15.4,68.8 l 5.5,13.5 0.3,13 c 0.1,7.1 0.6,15.1 1,17.6 0.4,2.6 1.31647,9.84975 0.81647,10.14975 -1.3,0.8 -0.71647,10.65025 1.78353,20.75025 2.9,11.9 13.6,43.4 17,50.1 9.51543,25.08025 19.6983,31.17451 34.4,48 z"
|
||||||
|
id="path92"
|
||||||
|
sodipodi:nodetypes="ccaaaaaccsccccccccccc"
|
||||||
|
style="fill:#18759e;fill-opacity:1" />
|
||||||
|
<path
|
||||||
|
d="M 336.53336,126.11775 C 326.2422,124.21015 287.27262,118.19694 281.1,72.4 398.98512,97.839775 428.5705,92.736362 481.94363,60.277903 c 0.3,15.65 -0.24934,17.091747 -5.11226,23.440508 -12.11958,15.82266 -34.57733,20.119399 -53.08407,27.518149 -15.89858,6.35605 -32.39842,11.77707 -49.33154,14.31356 -12.48954,1.87087 -28.16017,2.36977 -37.8824,0.56763 z"
|
||||||
|
id="path121"
|
||||||
|
sodipodi:nodetypes="sccaaas"
|
||||||
|
style="display:inline;fill:#18759e;fill-opacity:1" />
|
||||||
|
</g>
|
||||||
|
<g
|
||||||
|
fill="#c88700"
|
||||||
|
id="g133"
|
||||||
|
style="display:inline"
|
||||||
|
inkscape:label="Orange">
|
||||||
|
<path
|
||||||
|
d="M387.4 69.6 c-2.7 -2.7 -3.4 -4.2 -3.4 -7.4 0 -4.7 2.9 -8.8 7.6 -10.8 5.2 -2.2 7.3 -1.7 11.5 2.5 5.2 5.1 5.4 10.3 0.8 15.6 -2.8 3.1 -3.6 3.5 -8.1 3.5 -4.4 0 -5.4 -0.4 -8.4 -3.4z"
|
||||||
|
id="path125" />
|
||||||
|
<path
|
||||||
|
d="m 319.5,603.3 c -20.3,-1 -47.80327,-8.953 -69.9,-18.6 -12.64521,-5.52065 -23.8619,-13.95619 -35,-22.1 -5.09897,-3.72819 -9.99476,-7.77262 -14.5,-12.2 -8.10524,-7.96518 -17.7,-18.1 -22.4,-25.7 -13.9,-22.6 -23.4,-49.7 -26.7,-76.3 -1,-7.8 -0.9,-10.1 0.5,-15.5 3.5,-13.8 17.6,-39 26.3,-47.1 2.7,-2.6 8.1,-6.2 11.9,-8.1 8.6,-4.4 24.6,-9.3 33.8,-10.4 7.3,-0.9 66.1,-0.8 73,0.1 2.2,0.3 13.7,0.8 25.7,1.2 22.9,0.7 34.8,-0.2 49.2,-3.5 0,0 49.54914,-16.12943 68.7,-52.4 l 3.8,-7.2 0.1,6 c 0,8.5 -4.5,35.3 -7.5,44.2 -5.06001,15.02512 -12.78595,28.02413 -23.26042,39.12091 -9.81203,10.39498 -22.03592,19.12073 -36.73958,26.27909 -17.6,8.5 -16.2,8.2 -52,8.4 -30.6,0.1 -32.3,0.2 -37.6,2.3 -16.6,6.6 -26.4,18.6 -29.5,36.3 -1.6,8.9 -1.1,16.5 1.1,20.9 1.8,3.3 8.2,9.4 12.2,11.4 4.3,2.1 18.7,5.2 31.3,6.7 20.6,2.4 50,-1.8 71.5,-10.1 22.9,-8.9 41.8,-21.2 59,-38.4 18.5,-18.5 31.2,-39.3 39.5,-64.5 12.2,-37.2 12.4,-66.6 0.5,-107.7 -3.2,-11.2 -4.6,-14.9 -12,-30.8 -2.7,-6 -4.1,-11.8 -7,-30.5 -0.9,-5.7 -2.6,-13.8 -3.6,-18 -2.3,-9 -12.8,-31.1 -18.8,-39.6 -5.9,-8.4 -18.1,-21.5 -25.2,-27.1 -3.3,-2.6 -5.6,-5.1 -5.2,-5.5 0.4,-0.4 5.1,-1.9 10.3,-3.3 17.7,-5 26.1,-7.9 29.6,-10.2 1.9,-1.3 4.3,-2.4 5.2,-2.4 5,0.1 36,27 53.9,46.9 46.2,51.1 71.3,114.2 71.3,178.9 0,60.4 -17.3,114.5 -51.4,160.6 -14.1,19.3 -42.2,45.5 -64.6,60.6 -12.3,8.3 -21.8,13.2 -36.1,18.9 -40.2,15.9 -63.3,20.2 -99.4,18.4 z"
|
||||||
|
id="path131"
|
||||||
|
sodipodi:nodetypes="caaacccccccccccccsccccccccscccccccsccccscccc" />
|
||||||
|
</g>
|
||||||
|
<g
|
||||||
|
fill="#38b3d3"
|
||||||
|
id="g162"
|
||||||
|
style="display:inline"
|
||||||
|
inkscape:label="Blue">
|
||||||
|
<path
|
||||||
|
d="m 152.17414,396.63217 c -2.38519,-1.38132 9.27416,-52.79756 19.37815,-68.90898 16.15254,-24.81116 34.25689,-40.51929 62.0508,-48.64318 22.03094,-6.43944 64.62509,-4.00901 74.27424,-7.22545 5.13056,-1.80515 13.30143,-6.84069 18.81201,-11.68624 5.89061,-5.1305 11.1162,-14.91656 12.63633,-23.84749 1.71019,-9.88108 -0.47111,-21.90723 -6.47249,-30.32083 -2.85318,-4 -7.4141,-11.9156 -29.3718,-19.44781 19.92351,-5.56647 53.71798,-12.0993 80.70491,-17.53799 7.71528,-1.55487 13.91102,-2.63422 23.21371,-4.3142 21.30966,22.8642 21.21637,35.77338 26.93252,55.92264 0.0584,24.34066 -3.50141,45.36921 -16.09946,65.67248 -23.04998,44.93326 -65.30711,57.83541 -113.96611,59.38228 -72.68272,0.94776 -90.43688,2.59826 -116.76278,14.72068 -22.87446,10.53312 -33.71226,36.8281 -35.33003,36.23409 z"
|
||||||
|
id="path159"
|
||||||
|
sodipodi:nodetypes="csscccscacssssc"
|
||||||
|
style="display:inline;stroke-width:0.999987" />
|
||||||
|
<path
|
||||||
|
d="M 59.627728,486.61872 C 26.249201,426.79436 20.062286,396.1054 18.4,359.3 17.560667,340.71596 17.7,316.6 19.4,303.5 23.8,271.6 35.4,236 51.1,206 75.3,160.1 119.7,111.9 162.1,85.7 194.42327,64.457719 225.27293,54.821946 268,43 c -4.38883,35.093545 0.24301,53.781332 18.43033,75.35581 -16.19179,5.17933 -38.68025,13.24334 -44.53566,15.38169 -16.14313,5.89535 -49.89323,20.65189 -79.79467,47.7625 -27.4732,24.909 -59.81413,81.60725 -65.712627,143.66935 -4.157076,43.73944 6.451807,84.86847 34.031537,142.43409 3.43378,24.64602 9.97891,73.87903 71.35443,127.63575 C 125.61659,570.1535 67.391777,500.53423 59.627728,486.61872 Z"
|
||||||
|
id="path160"
|
||||||
|
sodipodi:nodetypes="sscccccsssccs"
|
||||||
|
style="display:inline" />
|
||||||
|
<path
|
||||||
|
d="m 332,111.5 c -7.6,-1.9 -19.1,-6.8 -24.2,-10.3 -5.6,-3.9 -14.42556,-11.925563 -21.72556,-10.225563 -0.59944,-1.638563 -2.45486,-5.992204 -3.00412,-8.525 C 277.37032,64.949437 281.9,46.6 294.8,33.2 c 6.5,-6.8 14.5,-10.9 27.7,-14.4 7,-1.9 10.6,-2.1 29,-2.1 28.2,0.1 42.1,2.5 71.2,12.3 6.8,2.2 19.1,5.8 27.4,8 16.6,4.4 23.6,7.6 28,12.9 2.6,3.2 3.87429,4.2 3.87429,11.2 v 7.7 L 473.8,73.7 c -4,2.8 -9.8,6.4 -12.8,8.1 -15.5,8.6 -69.4,26.1 -91.5,29.7 -11,1.8 -30.1,1.8 -37.5,0 z m 74.6,-27.4 c 8,-3.6 13.4,-13.3 13.4,-24 0,-7.1 -2.5,-12.5 -7.8,-17.3 -6.2,-5.6 -15.4,-7.3 -24.6,-4.6 -5.8,1.7 -14.1,10.2 -15.6,15.9 -3.2,11.9 3.1,25.6 14,30.3 4.9,2.1 15.5,2 20.6,-0.3 z"
|
||||||
|
id="path162"
|
||||||
|
sodipodi:nodetypes="ccccccccscsccccccsccccc" />
|
||||||
|
</g>
|
||||||
|
<g
|
||||||
|
fill="#fccd00"
|
||||||
|
id="g165"
|
||||||
|
style="display:inline"
|
||||||
|
inkscape:label="Yellow">
|
||||||
|
<path
|
||||||
|
d="M 290.57843,579.73223 C 262.53343,574.09041 238.11479,563.08508 212.75,550.7 189.86762,538.42339 184.68162,535.3415 175.4,519.55 c -7.00993,-11.92651 -30.58414,-55.74044 -23.8,-86.25 4.0198,-18.07777 14.86881,-43.99552 38.1,-55.6 16.46843,-0.10091 32.45479,1.52207 48.61284,3.12963 26.00767,2.58749 51.5763,9.85418 77.70491,10.47812 23.17389,0.55338 47.87531,2.89829 69.28278,-5.99304 22.20756,-9.22363 37.89511,-23.97358 55.12824,-46.53102 -2.5563,14.26912 -7.95593,45.65799 -44.98524,71.69133 -11.14814,7.83767 -23.62107,14.42481 -36.84575,17.7139 -10.72566,2.66757 -18.69625,1.20562 -33.13151,1.30575 C 310.59858,429.5978 291.1,429.3 281.1,434.3 c -12.2,6 -20.6,17.5 -23.7,32.3 -3.2,15.3 0.11875,24.31875 9.51875,31.11875 4.9,3.6 9.48125,5.48125 25.58125,8.38125 10.2,1.8 14.5,2 29,1.6 19.3,-0.6 27.7,-2.1 45,-7.8 65,-21.6 108.32042,-74.69846 114.2483,-146.4 0.5433,-6.57154 0.51635,-11.00098 0.35824,-16.5 -0.12685,-4.41201 -0.53376,-8.81617 -1.04757,-13.2 -0.31035,-2.64783 -0.73303,-5.28343 -1.22803,-7.90303 -1.04804,-5.54641 -2.17688,-11.08849 -3.68486,-16.52789 -3.8173,-13.76923 -7.04718,-27.944 -13.54608,-40.66908 -8.57845,-16.79692 -6.03317,-32.79012 -12.7776,-53.20969 -5.4006,-16.35095 -14.13511,-31.22562 -25.45092,-47.68672 9.20262,-3.00968 42.04296,-13.97755 50.15501,-17.80255 10.28756,9.39474 26.84483,25.52589 38.78601,40.81146 30.4959,39.03695 51.65187,83.78847 56.2875,132.1875 4.21372,43.99397 -0.37701,62.58021 -7.1,82.25 -6.8,20.7 -14.2,35.95 -22.6,53.65 -14.8,30.9 -37.8,59.1 -65.1,79.7 -34.6,26.2 -53.59209,36.03122 -84.7,43.9 -28.19212,7.13123 -69.76059,13.01808 -98.52157,7.23223 z"
|
||||||
|
id="path163"
|
||||||
|
sodipodi:nodetypes="sssscaaacsasccccccsaaaassccsscccss"
|
||||||
|
style="display:inline" />
|
||||||
|
<path
|
||||||
|
d="M 391.3,71.5 C 387.8,70 384,64.8 384,61.4 c 0,-2.7 3.4,-7 7,-8.9 4.9,-2.5 7.8,-1.9 12.2,2.5 3.6,3.5 4,4.4 3.5,7.5 -0.7,4.5 -3.5,8.2 -7.1,9.5 -3.7,1.3 -4.4,1.2 -8.3,-0.5 z"
|
||||||
|
id="path165" />
|
||||||
|
</g>
|
||||||
|
<g
|
||||||
|
fill="#fafcfc"
|
||||||
|
id="g183"
|
||||||
|
inkscape:label="White"
|
||||||
|
style="display:inline">
|
||||||
|
<path
|
||||||
|
d="M 292.22204,510.87608 C 280.22101,508.20541 268.81402,500.34672 263.69227,494.9842 275.64093,505.5687 304.1,508.3 321.5,507.7 c 21.55,-2.225 49.37501,-6.43114 86.62589,-28.91732 22.61919,-13.65389 51.87112,-50.42418 60.53015,-75.76929 6.66561,-19.51032 10.07957,-35.4123 12.39396,-53.90714 3.1459,18.64649 1.15198,36.57617 -1.3,46.46875 -2.9,11.1 -6.35,24.125 -11.95,34.225 -8.3,15.1 -27.2,38.1 -39.1,47.8 -25.5,20.5 -61.64365,33.01311 -92.85,36.3 -15.06775,1.58705 -35.15198,-1.1377 -43.62796,-3.02392 z"
|
||||||
|
id="path174"
|
||||||
|
sodipodi:nodetypes="sccssccccss" />
|
||||||
|
<path
|
||||||
|
d="M 28.4,416.5 C 15.349709,374.67557 18.014551,365.86291 17.43688,340.1625 17.048048,322.86353 19.119484,305.4699 22.5,288.5 c 10.62259,-53.3245 29.9,-91.9 61.3,-131 11,-13.7 40.9,-44 52.7,-53.5 C 166.2,80.3 209,59.4 252,47.5 c 8.5,-2.3 15.6,-4.2 15.7,-4.1 0.1,0.1 -0.4,3.8 -1.2,8.1 -0.8,4.4 -1.4,8.1 -1.5,8.3 0,0.1 -0.8,0.2 -1.9,0.2 -1,0 -6.3,1.4 -11.7,3 -41.6,12.8 -72.7,28.3 -103.6,51.7 -24.8,18.7 -39.9,34 -59.6,60 C 63.3,207.6 42.3,251 34.6,285.5 29.2,309.4 26.825886,327.09972 25.755456,348.16934 24.598916,370.93392 24.8,389.7 28.4,416.5 Z"
|
||||||
|
id="path178"
|
||||||
|
sodipodi:nodetypes="cascccsccsccccac" />
|
||||||
|
<path
|
||||||
|
d="m 208.22773,289.96967 c 9.51882,-5.66851 21.67237,-10.67386 30.98163,-12.63033 5.43202,-1.14162 18.645,-2.6057 32.04905,-3.10711 14.85841,-0.5558 26.43935,0.0727 34.62618,-2.66291 17.29397,-5.77872 28.56982,-17.26767 32.18039,-30.34042 1.49085,-5.3979 2.16985,-10.98219 1.55113,-16.06452 -0.70068,-5.7556 -3.89365,-15.38399 -6.46854,-18.70034 7.65573,3.55244 13.50421,17.23897 13.20338,31.10442 -0.37371,17.22406 -13.0606,32.1577 -24.74645,38.26377 -9.47406,4.95038 -29.08518,7.77124 -44.57677,8.07938 -10.95355,0.21788 -20.76029,0.67236 -31.82773,2.18839 -11.53232,1.57971 -30.58589,8.52074 -45.60676,17.46672 -7.81866,4.65656 -18.21827,12.44919 -21.26902,14.46609 4.45077,-6.22439 16.85283,-20.2914 29.90351,-28.06314 z"
|
||||||
|
id="path181"
|
||||||
|
sodipodi:nodetypes="sssssscssssscs" />
|
||||||
|
<path
|
||||||
|
d="m 282.3,76.8 c -1.6,-2.7 -0.6,-19.1 1.6,-25.2 4.3,-12 13.6,-22.7 23.4,-27.1 12.6,-5.5 18.3,-6.7 36.2,-7.2 29.7,-0.9 49.3,2 77,11.3 7.2,2.4 19.8,6.1 28.2,8.3 19.3,5.1 26.3,8.5 30.5,14.9 1.6,2.4 2.5,8.2 1.3,8.2 -0.3,0 -2.6,-1.3 -5.2,-2.9 C 470.5,54.2 463.8,51.9 442,46 435.7,44.2 426,41.1 420.5,39 415,36.9 408.9,34.7 407,34.1 c -12,-3.7 -49.7,-5.9 -71.3,-4.2 -11,0.8 -13.8,1.4 -19.4,4.1 -12.9,6 -24.1,20.5 -27.6,35.9 -1.7,7.2 -4.3,10.1 -6.4,6.9 z"
|
||||||
|
id="path183" />
|
||||||
|
</g>
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 15 KiB |
BIN
docs/images/logo-with-text.png
Normal file
|
After Width: | Height: | Size: 22 KiB |
145
docs/images/logo-with-text.svg
Normal file
@@ -0,0 +1,145 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<!-- Created with Inkscape (http://www.inkscape.org/) -->
|
||||||
|
|
||||||
|
<svg
|
||||||
|
width="84.373627mm"
|
||||||
|
height="29.06181mm"
|
||||||
|
viewBox="0 0 84.373627 29.06181"
|
||||||
|
version="1.1"
|
||||||
|
id="svg1"
|
||||||
|
xml:space="preserve"
|
||||||
|
inkscape:version="1.4 (e7c3feb100, 2024-10-09)"
|
||||||
|
sodipodi:docname="logo-with-text.svg"
|
||||||
|
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||||
|
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
xmlns:svg="http://www.w3.org/2000/svg"><sodipodi:namedview
|
||||||
|
id="namedview1"
|
||||||
|
pagecolor="#ffffff"
|
||||||
|
bordercolor="#000000"
|
||||||
|
borderopacity="0.25"
|
||||||
|
inkscape:showpageshadow="2"
|
||||||
|
inkscape:pageopacity="0.0"
|
||||||
|
inkscape:pagecheckerboard="0"
|
||||||
|
inkscape:deskcolor="#d1d1d1"
|
||||||
|
inkscape:document-units="mm"
|
||||||
|
inkscape:zoom="1.6080267"
|
||||||
|
inkscape:cx="230.09568"
|
||||||
|
inkscape:cy="46.019136"
|
||||||
|
inkscape:window-width="1920"
|
||||||
|
inkscape:window-height="1011"
|
||||||
|
inkscape:window-x="26"
|
||||||
|
inkscape:window-y="23"
|
||||||
|
inkscape:window-maximized="0"
|
||||||
|
inkscape:current-layer="layer1" /><defs
|
||||||
|
id="defs1" /><g
|
||||||
|
inkscape:label="Layer 1"
|
||||||
|
inkscape:groupmode="layer"
|
||||||
|
id="layer1"
|
||||||
|
transform="translate(-27.646074,-133.9691)"><g
|
||||||
|
id="g2"
|
||||||
|
transform="matrix(0.04656788,0,0,0.04656788,27.572788,133.92718)"
|
||||||
|
style="stroke-width:5.68167"><g
|
||||||
|
fill="#041b31"
|
||||||
|
id="g1"
|
||||||
|
style="display:inline;stroke-width:5.68167"
|
||||||
|
inkscape:label="Contour"><path
|
||||||
|
d="m 249,624.5 c -0.8,-0.2 -4.9,-0.8 -9,-1.5 -23.8,-3.7 -65.4,-19 -91,-33.5 C 115.5,570.6 81,540.3 58.3,510 41.3,487.2 23.6,454.3 16.2,431.5 8.8,408.8 8.3,406.8 4.9,387.5 1.9,370.5 1.8,368 1.6,342 1.5,313.2 1.4,314 7.1,282.6 18.3,221.6 48.7,167 100.4,115.5 116.6,99.3 126.7,90.8 142.5,80.1 158.5,69.3 182.9,56 199.5,49 210.6,44.4 240.6,34.4 252,31.5 c 7.3,-1.8 22.4,-4.5 25.5,-4.5 0.2,0 2.7,-2.1 5.7,-4.6 C 301.8,6.5 318.4,1 348,0.9 c 17.1,0 36.4,1.4 46,3.2 3,0.6 14.7,4 26,7.4 11.3,3.5 27.3,8.2 35.5,10.4 17.5,4.8 27.3,9.3 33.4,15.3 5.5,5.5 8.1,10.7 8.8,17.4 0.3,3 0.9,5.4 1.4,5.4 4,0 19.5,-9.6 30.7,-19 8.1,-6.9 9.3,-6.9 11.3,-0.1 2,6.6 -0.6,10 -19,25.9 l -3.5,2.9 10.6,10.4 c 13.4,13.2 17.8,21.1 12.4,22.5 -2.9,0.7 -4.8,-0.3 -15.2,-7.8 C 516.1,87.4 503.2,80 500.5,80 c -1.6,0 -2.9,1.5 -5,6.1 -3.8,7.9 -13.7,17.7 -22.6,22.4 l -6.8,3.6 4.7,4.2 c 18.1,16.2 30.1,28 40.8,40 15.1,16.9 22.8,27 32.1,42.4 6.9,11.4 22.2,41.2 23.8,46.3 0.4,1.4 1.6,4.3 2.6,6.5 4.9,10.7 10.9,34.8 14.6,58.5 2.7,17.9 2.5,58.7 -0.5,77.8 -5.3,33.5 -9.2,47.1 -21.3,73.7 -12.6,27.8 -24.1,46.3 -40.8,65.6 -19.2,22.3 -38.5,39.4 -60.5,53.8 -10.2,6.6 -43.5,23 -54.7,26.9 -16.2,5.7 -44,11 -69.1,13.2 -6.9,0.6 -17.5,1.7 -23.5,2.5 -9.4,1.3 -59.9,2 -65.3,1 z m 99.5,-135.4 c 36.7,-9.2 67.4,-29.4 87.4,-57.6 7.2,-10.3 17.8,-31.2 21.6,-42.9 5.7,-17.8 7,-26.5 7,-48.3 0,-18 -0.4,-22.7 -2,-21.2 -0.2,0.3 -1.1,5 -2,10.4 -5.4,34.9 -14.4,55.5 -32.5,74.8 -16.6,17.7 -36.73987,31.75263 -59.4,38.2 -7.25764,2.06498 -18.96791,3.46589 -37.2,4.4 -35.48106,1.81785 -36.6,1.6 -43.6,5.3 -12.5,6.7 -18.3,17.8 -14.4,27.3 2,4.7 6.3,7.1 17.1,9.5 12.5,2.8 13.8,2.9 33,2.5 12.8,-0.3 19,-0.8 25,-2.4 z M 134.4,385.8 c 0.8,-2.9 2.5,-8.9 3.6,-13.3 7.9,-29.5 14.4,-45.5 25.2,-62 7.4,-11.4 12,-16.1 27,-27.5 8.1,-6.1 13.6,-9.4 23.3,-13.6 18.4,-8.1 23.2,-9 48.5,-9.8 36.8,-1.2 44.6,-2.8 53.9,-11.2 9.4,-8.5 10.8,-20 3.7,-30.6 -7.7,-11.7 -15.4,-15.1 -50.6,-22.2 -24.8,-5.1 -30,-6.3 -40.9,-9.7 l -7.3,-2.3 -5.5,2.9 c -9.6,5 -25.36942,18.22759 -38.5,31.3 -19.59963,19.51281 -30.17386,36.16842 -42.7,67.6 -4.80076,12.04646 -7.8,26.5 -9.2,37.8 -1.6,13.7 -0.7,38.8 2,50.6 2.7,12.1 4.2,17.2 5.2,17.2 0.4,0 1.4,-2.4 2.3,-5.2 z"
|
||||||
|
id="path1"
|
||||||
|
sodipodi:nodetypes="ccccccccccccscccccccscccccccsccccccccccccccccccccscccsscccccccccccccccccssccsc"
|
||||||
|
style="fill:#041b31;fill-opacity:1;stroke-width:5.68167" /></g><g
|
||||||
|
fill="#003051"
|
||||||
|
id="g84"
|
||||||
|
style="display:inline;stroke-width:5.68167"
|
||||||
|
inkscape:label="Very Dark Blue"><path
|
||||||
|
d="M 230.4,602 C 175.34835,591.74645 169.18046,579.19949 127.38046,537.39949 126.28656,507.06066 124.35047,466.6837 125.4,421 c 3.1,7.5 6.91046,19.16537 8.35973,29.56569 3.51031,25.1907 16.4289,65.12981 36.44027,90.93431 22.43047,28.92391 69.16433,55.53771 88.55235,64.93033 C 249.09029,604.75095 241.4,604.1 230.4,602 Z"
|
||||||
|
id="path70"
|
||||||
|
sodipodi:nodetypes="cccsacc"
|
||||||
|
style="stroke-width:5.68167" /><path
|
||||||
|
d="m 319.4,193.4 c -9.8,-5.8 -14.5,-7.1 -48.4,-14 -18.7,-3.7 -29,-4.8 -29,-6.5 0,-1.7 4.92805,-2.87104 12.5,-5.4 12.8566,-4.29398 19.24892,-5.98769 27.1,-7.9 24.01253,-5.84879 36.7,-8.7 48.4,-10.5 25.2,-4 35.7,-5.4 42.5,-5.5 6.2,-0.1 7.9,0.3 14.6,3.6 9.7,4.8 15.5,10 26.3,24 -32.58707,9.22703 -69.37398,17.37018 -94,22.2 z"
|
||||||
|
id="path77"
|
||||||
|
sodipodi:nodetypes="ccsssccccc"
|
||||||
|
style="stroke-width:5.68167" /></g><g
|
||||||
|
fill="#033f64"
|
||||||
|
id="g97"
|
||||||
|
style="display:inline;stroke-width:5.68167"
|
||||||
|
inkscape:label="Dark Blue"><path
|
||||||
|
d="m 152.17414,396.63217 c 0.38601,-2.81096 5.82243,-25.08009 21.18483,-38.15736 33.76966,-28.74649 155.07007,-22.31003 192.71893,-28.8897 C 388.43397,313.23279 413.02792,214.49976 425.1,189.5 c 7.4,15 16.15078,54.97811 10.64936,81.97944 -4.26433,20.9296 -15.49967,42.2641 -32.45863,55.24972 -23.8158,18.23596 -36.39069,23.58236 -86.79073,23.77084 -83.29996,0.31152 -95.44833,-4.42471 -136.27417,16.21161 -12.20115,6.16734 -21.45976,18.1207 -28.05169,29.92056 z"
|
||||||
|
id="path118"
|
||||||
|
sodipodi:nodetypes="csccaasac"
|
||||||
|
style="display:inline;fill:#18759e;fill-opacity:1;fill-rule:nonzero;stroke-width:5.68167" /><path
|
||||||
|
d="M 183.5,588.1 C 115.8931,558.47699 107.64772,492.94457 88.1,430.2335 79,400.6335 76.84251,387.87492 75,366.15 c -1.824643,-21.51425 -3.417479,-43.86578 2.1,-64.7404 8.432657,-31.90379 27.29188,-60.49473 46.1,-87.6096 11.8141,-17.03188 24.95272,-33.78473 41.4,-46.4 13.29518,-10.19757 29.7308,-15.48328 44.9,-22.6 23.68008,-11.10966 63.61618,-31.81861 71.93442,-31.35243 3.81558,6.62743 29.05267,18.5147 28.43398,19.68762 0.31235,2.20322 -15.49372,-1.71368 -93.0684,32.46481 -30.64541,13.50201 -57.7,42.3 -74.5,67.4 -13.2,19.7 -23.8,43.8 -29.8,67.5 -5.2,20.6 -5.8,26.4 -5.2,45.7 0.8,25.7 4.5,42 15.4,68.8 l 5.5,13.5 0.3,13 c 0.1,7.1 0.6,15.1 1,17.6 0.4,2.6 1.31647,9.84975 0.81647,10.14975 -1.3,0.8 -0.71647,10.65025 1.78353,20.75025 2.9,11.9 13.6,43.4 17,50.1 9.51543,25.08025 19.6983,31.17451 34.4,48 z"
|
||||||
|
id="path92"
|
||||||
|
sodipodi:nodetypes="ccaaaaaccsccccccccccc"
|
||||||
|
style="fill:#18759e;fill-opacity:1;stroke-width:5.68167" /><path
|
||||||
|
d="M 336.53336,126.11775 C 326.2422,124.21015 287.27262,118.19694 281.1,72.4 398.98512,97.839775 428.5705,92.736362 481.94363,60.277903 c 0.3,15.65 -0.24934,17.091747 -5.11226,23.440508 -12.11958,15.82266 -34.57733,20.119399 -53.08407,27.518149 -15.89858,6.35605 -32.39842,11.77707 -49.33154,14.31356 -12.48954,1.87087 -28.16017,2.36977 -37.8824,0.56763 z"
|
||||||
|
id="path121"
|
||||||
|
sodipodi:nodetypes="sccaaas"
|
||||||
|
style="display:inline;fill:#18759e;fill-opacity:1;stroke-width:5.68167" /></g><g
|
||||||
|
fill="#c88700"
|
||||||
|
id="g133"
|
||||||
|
style="display:inline;stroke-width:5.68167"
|
||||||
|
inkscape:label="Orange"><path
|
||||||
|
d="m 387.4,69.6 c -2.7,-2.7 -3.4,-4.2 -3.4,-7.4 0,-4.7 2.9,-8.8 7.6,-10.8 5.2,-2.2 7.3,-1.7 11.5,2.5 5.2,5.1 5.4,10.3 0.8,15.6 -2.8,3.1 -3.6,3.5 -8.1,3.5 -4.4,0 -5.4,-0.4 -8.4,-3.4 z"
|
||||||
|
id="path125"
|
||||||
|
style="stroke-width:5.68167" /><path
|
||||||
|
d="m 319.5,603.3 c -20.3,-1 -47.80327,-8.953 -69.9,-18.6 -12.64521,-5.52065 -23.8619,-13.95619 -35,-22.1 -5.09897,-3.72819 -9.99476,-7.77262 -14.5,-12.2 -8.10524,-7.96518 -17.7,-18.1 -22.4,-25.7 -13.9,-22.6 -23.4,-49.7 -26.7,-76.3 -1,-7.8 -0.9,-10.1 0.5,-15.5 3.5,-13.8 17.6,-39 26.3,-47.1 2.7,-2.6 8.1,-6.2 11.9,-8.1 8.6,-4.4 24.6,-9.3 33.8,-10.4 7.3,-0.9 66.1,-0.8 73,0.1 2.2,0.3 13.7,0.8 25.7,1.2 22.9,0.7 34.8,-0.2 49.2,-3.5 0,0 49.54914,-16.12943 68.7,-52.4 l 3.8,-7.2 0.1,6 c 0,8.5 -4.5,35.3 -7.5,44.2 -5.06001,15.02512 -12.78595,28.02413 -23.26042,39.12091 -9.81203,10.39498 -22.03592,19.12073 -36.73958,26.27909 -17.6,8.5 -16.2,8.2 -52,8.4 -30.6,0.1 -32.3,0.2 -37.6,2.3 -16.6,6.6 -26.4,18.6 -29.5,36.3 -1.6,8.9 -1.1,16.5 1.1,20.9 1.8,3.3 8.2,9.4 12.2,11.4 4.3,2.1 18.7,5.2 31.3,6.7 20.6,2.4 50,-1.8 71.5,-10.1 22.9,-8.9 41.8,-21.2 59,-38.4 18.5,-18.5 31.2,-39.3 39.5,-64.5 12.2,-37.2 12.4,-66.6 0.5,-107.7 -3.2,-11.2 -4.6,-14.9 -12,-30.8 -2.7,-6 -4.1,-11.8 -7,-30.5 -0.9,-5.7 -2.6,-13.8 -3.6,-18 -2.3,-9 -12.8,-31.1 -18.8,-39.6 -5.9,-8.4 -18.1,-21.5 -25.2,-27.1 -3.3,-2.6 -5.6,-5.1 -5.2,-5.5 0.4,-0.4 5.1,-1.9 10.3,-3.3 17.7,-5 26.1,-7.9 29.6,-10.2 1.9,-1.3 4.3,-2.4 5.2,-2.4 5,0.1 36,27 53.9,46.9 46.2,51.1 71.3,114.2 71.3,178.9 0,60.4 -17.3,114.5 -51.4,160.6 -14.1,19.3 -42.2,45.5 -64.6,60.6 -12.3,8.3 -21.8,13.2 -36.1,18.9 -40.2,15.9 -63.3,20.2 -99.4,18.4 z"
|
||||||
|
id="path131"
|
||||||
|
sodipodi:nodetypes="caaacccccccccccccsccccccccscccccccsccccscccc"
|
||||||
|
style="stroke-width:5.68167" /></g><g
|
||||||
|
fill="#38b3d3"
|
||||||
|
id="g162"
|
||||||
|
style="display:inline;stroke-width:5.68167"
|
||||||
|
inkscape:label="Blue"><path
|
||||||
|
d="m 152.17414,396.63217 c -2.38519,-1.38132 9.27416,-52.79756 19.37815,-68.90898 16.15254,-24.81116 34.25689,-40.51929 62.0508,-48.64318 22.03094,-6.43944 64.62509,-4.00901 74.27424,-7.22545 5.13056,-1.80515 13.30143,-6.84069 18.81201,-11.68624 5.89061,-5.1305 11.1162,-14.91656 12.63633,-23.84749 1.71019,-9.88108 -0.47111,-21.90723 -6.47249,-30.32083 -2.85318,-4 -7.4141,-11.9156 -29.3718,-19.44781 19.92351,-5.56647 53.71798,-12.0993 80.70491,-17.53799 7.71528,-1.55487 13.91102,-2.63422 23.21371,-4.3142 21.30966,22.8642 21.21637,35.77338 26.93252,55.92264 0.0584,24.34066 -3.50141,45.36921 -16.09946,65.67248 -23.04998,44.93326 -65.30711,57.83541 -113.96611,59.38228 -72.68272,0.94776 -90.43688,2.59826 -116.76278,14.72068 -22.87446,10.53312 -33.71226,36.8281 -35.33003,36.23409 z"
|
||||||
|
id="path159"
|
||||||
|
sodipodi:nodetypes="csscccscacssssc"
|
||||||
|
style="display:inline;stroke-width:5.68161" /><path
|
||||||
|
d="M 59.627728,486.61872 C 26.249201,426.79436 20.062286,396.1054 18.4,359.3 17.560667,340.71596 17.7,316.6 19.4,303.5 23.8,271.6 35.4,236 51.1,206 75.3,160.1 119.7,111.9 162.1,85.7 194.42327,64.457719 225.27293,54.821946 268,43 c -4.38883,35.093545 0.24301,53.781332 18.43033,75.35581 -16.19179,5.17933 -38.68025,13.24334 -44.53566,15.38169 -16.14313,5.89535 -49.89323,20.65189 -79.79467,47.7625 -27.4732,24.909 -59.81413,81.60725 -65.712627,143.66935 -4.157076,43.73944 6.451807,84.86847 34.031537,142.43409 3.43378,24.64602 9.97891,73.87903 71.35443,127.63575 C 125.61659,570.1535 67.391777,500.53423 59.627728,486.61872 Z"
|
||||||
|
id="path160"
|
||||||
|
sodipodi:nodetypes="sscccccsssccs"
|
||||||
|
style="display:inline;stroke-width:5.68167" /><path
|
||||||
|
d="m 332,111.5 c -7.6,-1.9 -19.1,-6.8 -24.2,-10.3 -5.6,-3.9 -14.42556,-11.925563 -21.72556,-10.225563 -0.59944,-1.638563 -2.45486,-5.992204 -3.00412,-8.525 C 277.37032,64.949437 281.9,46.6 294.8,33.2 c 6.5,-6.8 14.5,-10.9 27.7,-14.4 7,-1.9 10.6,-2.1 29,-2.1 28.2,0.1 42.1,2.5 71.2,12.3 6.8,2.2 19.1,5.8 27.4,8 16.6,4.4 23.6,7.6 28,12.9 2.6,3.2 3.87429,4.2 3.87429,11.2 v 7.7 L 473.8,73.7 c -4,2.8 -9.8,6.4 -12.8,8.1 -15.5,8.6 -69.4,26.1 -91.5,29.7 -11,1.8 -30.1,1.8 -37.5,0 z m 74.6,-27.4 c 8,-3.6 13.4,-13.3 13.4,-24 0,-7.1 -2.5,-12.5 -7.8,-17.3 -6.2,-5.6 -15.4,-7.3 -24.6,-4.6 -5.8,1.7 -14.1,10.2 -15.6,15.9 -3.2,11.9 3.1,25.6 14,30.3 4.9,2.1 15.5,2 20.6,-0.3 z"
|
||||||
|
id="path162"
|
||||||
|
sodipodi:nodetypes="ccccccccscsccccccsccccc"
|
||||||
|
style="stroke-width:5.68167" /></g><g
|
||||||
|
fill="#fccd00"
|
||||||
|
id="g165"
|
||||||
|
style="display:inline;stroke-width:5.68167"
|
||||||
|
inkscape:label="Yellow"><path
|
||||||
|
d="M 290.57843,579.73223 C 262.53343,574.09041 238.11479,563.08508 212.75,550.7 189.86762,538.42339 184.68162,535.3415 175.4,519.55 c -7.00993,-11.92651 -30.58414,-55.74044 -23.8,-86.25 4.0198,-18.07777 14.86881,-43.99552 38.1,-55.6 16.46843,-0.10091 32.45479,1.52207 48.61284,3.12963 26.00767,2.58749 51.5763,9.85418 77.70491,10.47812 23.17389,0.55338 47.87531,2.89829 69.28278,-5.99304 22.20756,-9.22363 37.89511,-23.97358 55.12824,-46.53102 -2.5563,14.26912 -7.95593,45.65799 -44.98524,71.69133 -11.14814,7.83767 -23.62107,14.42481 -36.84575,17.7139 -10.72566,2.66757 -18.69625,1.20562 -33.13151,1.30575 C 310.59858,429.5978 291.1,429.3 281.1,434.3 c -12.2,6 -20.6,17.5 -23.7,32.3 -3.2,15.3 0.11875,24.31875 9.51875,31.11875 4.9,3.6 9.48125,5.48125 25.58125,8.38125 10.2,1.8 14.5,2 29,1.6 19.3,-0.6 27.7,-2.1 45,-7.8 65,-21.6 108.32042,-74.69846 114.2483,-146.4 0.5433,-6.57154 0.51635,-11.00098 0.35824,-16.5 -0.12685,-4.41201 -0.53376,-8.81617 -1.04757,-13.2 -0.31035,-2.64783 -0.73303,-5.28343 -1.22803,-7.90303 -1.04804,-5.54641 -2.17688,-11.08849 -3.68486,-16.52789 -3.8173,-13.76923 -7.04718,-27.944 -13.54608,-40.66908 -8.57845,-16.79692 -6.03317,-32.79012 -12.7776,-53.20969 -5.4006,-16.35095 -14.13511,-31.22562 -25.45092,-47.68672 9.20262,-3.00968 42.04296,-13.97755 50.15501,-17.80255 10.28756,9.39474 26.84483,25.52589 38.78601,40.81146 30.4959,39.03695 51.65187,83.78847 56.2875,132.1875 4.21372,43.99397 -0.37701,62.58021 -7.1,82.25 -6.8,20.7 -14.2,35.95 -22.6,53.65 -14.8,30.9 -37.8,59.1 -65.1,79.7 -34.6,26.2 -53.59209,36.03122 -84.7,43.9 -28.19212,7.13123 -69.76059,13.01808 -98.52157,7.23223 z"
|
||||||
|
id="path163"
|
||||||
|
sodipodi:nodetypes="sssscaaacsasccccccsaaaassccsscccss"
|
||||||
|
style="display:inline;stroke-width:5.68167" /><path
|
||||||
|
d="M 391.3,71.5 C 387.8,70 384,64.8 384,61.4 c 0,-2.7 3.4,-7 7,-8.9 4.9,-2.5 7.8,-1.9 12.2,2.5 3.6,3.5 4,4.4 3.5,7.5 -0.7,4.5 -3.5,8.2 -7.1,9.5 -3.7,1.3 -4.4,1.2 -8.3,-0.5 z"
|
||||||
|
id="path165"
|
||||||
|
style="stroke-width:5.68167" /></g><g
|
||||||
|
fill="#fafcfc"
|
||||||
|
id="g183"
|
||||||
|
inkscape:label="White"
|
||||||
|
style="display:inline;stroke-width:5.68167"><path
|
||||||
|
d="M 292.22204,510.87608 C 280.22101,508.20541 268.81402,500.34672 263.69227,494.9842 275.64093,505.5687 304.1,508.3 321.5,507.7 c 21.55,-2.225 49.37501,-6.43114 86.62589,-28.91732 22.61919,-13.65389 51.87112,-50.42418 60.53015,-75.76929 6.66561,-19.51032 10.07957,-35.4123 12.39396,-53.90714 3.1459,18.64649 1.15198,36.57617 -1.3,46.46875 -2.9,11.1 -6.35,24.125 -11.95,34.225 -8.3,15.1 -27.2,38.1 -39.1,47.8 -25.5,20.5 -61.64365,33.01311 -92.85,36.3 -15.06775,1.58705 -35.15198,-1.1377 -43.62796,-3.02392 z"
|
||||||
|
id="path174"
|
||||||
|
sodipodi:nodetypes="sccssccccss"
|
||||||
|
style="stroke-width:5.68167" /><path
|
||||||
|
d="M 28.4,416.5 C 15.349709,374.67557 18.014551,365.86291 17.43688,340.1625 17.048048,322.86353 19.119484,305.4699 22.5,288.5 c 10.62259,-53.3245 29.9,-91.9 61.3,-131 11,-13.7 40.9,-44 52.7,-53.5 C 166.2,80.3 209,59.4 252,47.5 c 8.5,-2.3 15.6,-4.2 15.7,-4.1 0.1,0.1 -0.4,3.8 -1.2,8.1 -0.8,4.4 -1.4,8.1 -1.5,8.3 0,0.1 -0.8,0.2 -1.9,0.2 -1,0 -6.3,1.4 -11.7,3 -41.6,12.8 -72.7,28.3 -103.6,51.7 -24.8,18.7 -39.9,34 -59.6,60 C 63.3,207.6 42.3,251 34.6,285.5 29.2,309.4 26.825886,327.09972 25.755456,348.16934 24.598916,370.93392 24.8,389.7 28.4,416.5 Z"
|
||||||
|
id="path178"
|
||||||
|
sodipodi:nodetypes="cascccsccsccccac"
|
||||||
|
style="stroke-width:5.68167" /><path
|
||||||
|
d="m 208.22773,289.96967 c 9.51882,-5.66851 21.67237,-10.67386 30.98163,-12.63033 5.43202,-1.14162 18.645,-2.6057 32.04905,-3.10711 14.85841,-0.5558 26.43935,0.0727 34.62618,-2.66291 17.29397,-5.77872 28.56982,-17.26767 32.18039,-30.34042 1.49085,-5.3979 2.16985,-10.98219 1.55113,-16.06452 -0.70068,-5.7556 -3.89365,-15.38399 -6.46854,-18.70034 7.65573,3.55244 13.50421,17.23897 13.20338,31.10442 -0.37371,17.22406 -13.0606,32.1577 -24.74645,38.26377 -9.47406,4.95038 -29.08518,7.77124 -44.57677,8.07938 -10.95355,0.21788 -20.76029,0.67236 -31.82773,2.18839 -11.53232,1.57971 -30.58589,8.52074 -45.60676,17.46672 -7.81866,4.65656 -18.21827,12.44919 -21.26902,14.46609 4.45077,-6.22439 16.85283,-20.2914 29.90351,-28.06314 z"
|
||||||
|
id="path181"
|
||||||
|
sodipodi:nodetypes="sssssscssssscs"
|
||||||
|
style="stroke-width:5.68167" /><path
|
||||||
|
d="m 282.3,76.8 c -1.6,-2.7 -0.6,-19.1 1.6,-25.2 4.3,-12 13.6,-22.7 23.4,-27.1 12.6,-5.5 18.3,-6.7 36.2,-7.2 29.7,-0.9 49.3,2 77,11.3 7.2,2.4 19.8,6.1 28.2,8.3 19.3,5.1 26.3,8.5 30.5,14.9 1.6,2.4 2.5,8.2 1.3,8.2 -0.3,0 -2.6,-1.3 -5.2,-2.9 C 470.5,54.2 463.8,51.9 442,46 435.7,44.2 426,41.1 420.5,39 415,36.9 408.9,34.7 407,34.1 c -12,-3.7 -49.7,-5.9 -71.3,-4.2 -11,0.8 -13.8,1.4 -19.4,4.1 -12.9,6 -24.1,20.5 -27.6,35.9 -1.7,7.2 -4.3,10.1 -6.4,6.9 z"
|
||||||
|
id="path183"
|
||||||
|
style="stroke-width:5.68167" /></g></g><text
|
||||||
|
xml:space="preserve"
|
||||||
|
style="font-size:11.2889px;font-family:Comfortaa;-inkscape-font-specification:Comfortaa;text-align:center;writing-mode:lr-tb;direction:ltr;text-anchor:middle;opacity:0.66761;fill:#083f91;stroke-width:3.307;stroke-linejoin:round;stroke-miterlimit:2.6"
|
||||||
|
x="91.349724"
|
||||||
|
y="151.56494"
|
||||||
|
id="text2"><tspan
|
||||||
|
sodipodi:role="line"
|
||||||
|
id="tspan2"
|
||||||
|
style="font-size:11.2889px;fill:#000000;fill-opacity:1;stroke-width:3.307"
|
||||||
|
x="91.349724"
|
||||||
|
y="151.56494">pydase</tspan></text></g></svg>
|
||||||
|
After Width: | Height: | Size: 17 KiB |
@@ -4,13 +4,14 @@
|
|||||||
end="<!--introduction-end-->"
|
end="<!--introduction-end-->"
|
||||||
%}
|
%}
|
||||||
|
|
||||||
|
[pydase Banner]: ./images/logo-with-text.png
|
||||||
[License]: ./about/license.md
|
[License]: ./about/license.md
|
||||||
[Observer Pattern]: ./dev-guide/Observer_Pattern_Implementation.md
|
[Observer Pattern]: ./dev-guide/Observer_Pattern_Implementation.md
|
||||||
[Service Persistence]: ./user-guide/Service_Persistence.md
|
[Service Persistence]: ./user-guide/Service_Persistence.md
|
||||||
[Defining DataService]: ./getting-started.md#defining-a-dataservice
|
[Defining DataService]: ./getting-started.md#defining-a-dataservice
|
||||||
[Web Interface Access]: ./getting-started.md#accessing-the-web-interface
|
[Web Interface Access]: ./getting-started.md#accessing-the-web-interface
|
||||||
[Short RPC Client]: ./getting-started.md#connecting-to-the-service-via-python-rpc-client
|
[Short RPC Client]: ./getting-started.md#connecting-to-the-service-via-python-rpc-client
|
||||||
[Customizing Web Interface]: ./user-guide/interaction/README.md#customization-options
|
[Customizing Web Interface]: ./user-guide/interaction/Auto-generated-Frontend.md#customization-options
|
||||||
[Task Management]: ./user-guide/Tasks.md
|
[Task Management]: ./user-guide/Tasks.md
|
||||||
[Units]: ./user-guide/Understanding-Units.md
|
[Units]: ./user-guide/Understanding-Units.md
|
||||||
[Property Validation]: ./user-guide/Validating-Property-Setters.md
|
[Property Validation]: ./user-guide/Validating-Property-Setters.md
|
||||||
|
|||||||
211
docs/user-guide/Configuration.md
Normal file
@@ -0,0 +1,211 @@
|
|||||||
|
|
||||||
|
# Configuring `pydase`
|
||||||
|
|
||||||
|
## Do I Need to Configure My `pydase` Service?
|
||||||
|
|
||||||
|
`pydase` services work out of the box without requiring any configuration. However, you
|
||||||
|
might want to change some options, such as the web server port or logging level. To
|
||||||
|
accommodate such customizations, `pydase` allows configuration through environment
|
||||||
|
variables - avoiding hard-coded settings in your service code.
|
||||||
|
|
||||||
|
Why should you avoid hard-coding configurations? Here are two reasons:
|
||||||
|
|
||||||
|
1. **Security**:
|
||||||
|
Protect sensitive information, such as usernames and passwords. By using environment
|
||||||
|
variables, your service code can remain public while keeping private information
|
||||||
|
secure.
|
||||||
|
|
||||||
|
2. **Reusability**:
|
||||||
|
Services often need to be reused in different environments. For example, you might
|
||||||
|
deploy multiple instances of a service (e.g., for different sensors in a lab). By
|
||||||
|
separating configuration from code, you can adapt the service to new requirements
|
||||||
|
without modifying its codebase.
|
||||||
|
|
||||||
|
Next, we’ll walk you through the environment variables `pydase` supports and provide an
|
||||||
|
example of how to separate service code from configuration.
|
||||||
|
|
||||||
|
## Configuring `pydase` Using Environment Variables
|
||||||
|
|
||||||
|
`pydase` provides the following environment variables for customization:
|
||||||
|
|
||||||
|
- **`ENVIRONMENT`**:
|
||||||
|
Defines the operation mode (`"development"` or `"production"`), which influences
|
||||||
|
behaviour such as logging (see [Logging in pydase](./Logging.md)).
|
||||||
|
|
||||||
|
- **`SERVICE_CONFIG_DIR`**:
|
||||||
|
Specifies the directory for configuration files (e.g., `web_settings.json`). Defaults
|
||||||
|
to the `config` folder in the service root. Access this programmatically using:
|
||||||
|
|
||||||
|
```python
|
||||||
|
import pydase.config
|
||||||
|
pydase.config.ServiceConfig().config_dir
|
||||||
|
```
|
||||||
|
|
||||||
|
- **`SERVICE_WEB_PORT`**:
|
||||||
|
Defines the web server’s port. Ensure each service on the same host uses a unique
|
||||||
|
port. Default: `8001`.
|
||||||
|
|
||||||
|
- **`GENERATE_WEB_SETTINGS`**:
|
||||||
|
When `true`, generates or updates the `web_settings.json` file (see [Tailoring Frontend Component Layout](./interaction/Auto-generated-Frontend.md#tailoring-frontend-component-layout)).
|
||||||
|
Existing entries are preserved, and new entries are appended.
|
||||||
|
|
||||||
|
### Configuring `pydase` via Keyword Arguments
|
||||||
|
|
||||||
|
Some settings can also be overridden directly in your service code using keyword
|
||||||
|
arguments when initializing the server. This allows for flexibility in code-based
|
||||||
|
configuration:
|
||||||
|
|
||||||
|
```python
|
||||||
|
import pathlib
|
||||||
|
from pydase import Server
|
||||||
|
from your_service_module import YourService
|
||||||
|
|
||||||
|
server = Server(
|
||||||
|
YourService(),
|
||||||
|
web_port=8080, # Overrides SERVICE_WEB_PORT
|
||||||
|
config_dir=pathlib.Path("custom_config"), # Overrides SERVICE_CONFIG_DIR
|
||||||
|
generate_web_settings=True # Overrides GENERATE_WEB_SETTINGS
|
||||||
|
).run()
|
||||||
|
```
|
||||||
|
|
||||||
|
## Separating Service Code from Configuration
|
||||||
|
|
||||||
|
To decouple configuration from code, `pydase` utilizes `confz` for configuration
|
||||||
|
management. Below is an example that demonstrates how to configure a `pydase` service
|
||||||
|
for a sensor readout application.
|
||||||
|
|
||||||
|
### Scenario: Configuring a Sensor Service
|
||||||
|
|
||||||
|
Imagine you have multiple sensors distributed across your lab. You need to configure
|
||||||
|
each service instance with:
|
||||||
|
|
||||||
|
1. **Hostname**: The hostname or IP address of the sensor.
|
||||||
|
2. **Authentication Token**: A token or credentials to authenticate with the sensor.
|
||||||
|
3. **Readout Interval**: A periodic interval to read sensor data and log it to a
|
||||||
|
database.
|
||||||
|
|
||||||
|
Given the repository structure:
|
||||||
|
|
||||||
|
```bash title="Service Repository Structure"
|
||||||
|
my_sensor
|
||||||
|
├── pyproject.toml
|
||||||
|
├── README.md
|
||||||
|
└── src
|
||||||
|
└── my_sensor
|
||||||
|
├── my_sensor.py
|
||||||
|
├── config.py
|
||||||
|
├── __init__.py
|
||||||
|
└── __main__.py
|
||||||
|
```
|
||||||
|
|
||||||
|
Your service might look like this:
|
||||||
|
|
||||||
|
### Configuration
|
||||||
|
|
||||||
|
Define the configuration using `confz`:
|
||||||
|
|
||||||
|
```python title="src/my_sensor/config.py"
|
||||||
|
import confz
|
||||||
|
from pydase.config import ServiceConfig
|
||||||
|
|
||||||
|
class MySensorConfig(confz.BaseConfig):
|
||||||
|
instance_name: str
|
||||||
|
hostname: str
|
||||||
|
auth_token: str
|
||||||
|
readout_interval_s: float
|
||||||
|
|
||||||
|
CONFIG_SOURCES = confz.FileSource(file=ServiceConfig().config_dir / "config.yaml")
|
||||||
|
```
|
||||||
|
|
||||||
|
This class defines configurable parameters and loads values from a `config.yaml` file
|
||||||
|
located in the service’s configuration directory (which is configurable through an
|
||||||
|
environment variable, see [above](#configuring-pydase-using-environment-variables)).
|
||||||
|
A sample YAML file might look like this:
|
||||||
|
|
||||||
|
```yaml title="config.yaml"
|
||||||
|
instance_name: my-sensor-service-01
|
||||||
|
hostname: my-sensor-01.example.com
|
||||||
|
auth_token: my-secret-authentication-token
|
||||||
|
readout_interval_s: 5
|
||||||
|
```
|
||||||
|
|
||||||
|
### Service Implementation
|
||||||
|
|
||||||
|
Your service implementation might look like this:
|
||||||
|
|
||||||
|
```python title="src/my_sensor/my_sensor.py"
|
||||||
|
import asyncio
|
||||||
|
import http.client
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import pydase.components
|
||||||
|
import pydase.units as u
|
||||||
|
from pydase.task.decorator import task
|
||||||
|
|
||||||
|
from my_sensor.config import MySensorConfig
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class MySensor(pydase.DataService):
|
||||||
|
def __init__(self) -> None:
|
||||||
|
super().__init__()
|
||||||
|
self.readout_interval_s: u.Quantity = (
|
||||||
|
MySensorConfig().readout_interval_s * u.units.s
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hostname(self) -> str:
|
||||||
|
"""Hostname of the sensor. Read-only."""
|
||||||
|
return MySensorConfig().hostname
|
||||||
|
|
||||||
|
def _get_data(self) -> dict[str, Any]:
|
||||||
|
"""Fetches sensor data via an HTTP GET request. It passes the authentication
|
||||||
|
token as "Authorization" header."""
|
||||||
|
|
||||||
|
connection = http.client.HTTPConnection(self.hostname, timeout=10)
|
||||||
|
connection.request(
|
||||||
|
"GET", "/", headers={"Authorization": MySensorConfig().auth_token}
|
||||||
|
)
|
||||||
|
response = connection.getresponse()
|
||||||
|
connection.close()
|
||||||
|
|
||||||
|
return json.loads(response.read())
|
||||||
|
|
||||||
|
@task(autostart=True)
|
||||||
|
async def get_and_log_sensor_values(self) -> None:
|
||||||
|
"""Periodically fetches and logs sensor data."""
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
data = self._get_data()
|
||||||
|
# Write data to database using MySensorConfig().instance_name ...
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(
|
||||||
|
"Error occurred, retrying in %s seconds. Error: %s",
|
||||||
|
self.readout_interval_s.m,
|
||||||
|
e,
|
||||||
|
)
|
||||||
|
await asyncio.sleep(self.readout_interval_s.m)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Starting the Service
|
||||||
|
|
||||||
|
The service is launched via the `__main__.py` entry point:
|
||||||
|
|
||||||
|
```python title="src/my_sensor/__main__.py"
|
||||||
|
import pydase
|
||||||
|
from my_sensor.my_sensor import MySensor
|
||||||
|
|
||||||
|
pydase.Server(MySensor()).run()
|
||||||
|
```
|
||||||
|
|
||||||
|
You can now start the service with:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
python -m my_sensor
|
||||||
|
```
|
||||||
|
|
||||||
|
This approach ensures the service is fully configured via the `config.yaml` file,
|
||||||
|
separating service logic from configuration.
|
||||||
91
docs/user-guide/Logging.md
Normal file
@@ -0,0 +1,91 @@
|
|||||||
|
# Logging in pydase
|
||||||
|
|
||||||
|
The `pydase` library organizes its loggers per module, mirroring the Python package hierarchy. This structured approach allows for granular control over logging levels and behaviour across different parts of the library. Logs can also include details about client identification based on headers sent by the client or proxy, providing additional context for debugging or auditing.
|
||||||
|
|
||||||
|
## Changing the pydase Log Level
|
||||||
|
|
||||||
|
You have two primary ways to adjust the log levels in `pydase`:
|
||||||
|
|
||||||
|
1. **Directly targeting `pydase` loggers**
|
||||||
|
|
||||||
|
You can set the log level for any `pydase` logger directly in your code. This method is useful for fine-tuning logging levels for specific modules within `pydase`. For instance, if you want to change the log level of the main `pydase` logger or target a submodule like `pydase.data_service`, you can do so as follows:
|
||||||
|
|
||||||
|
```python
|
||||||
|
# <your_script.py>
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# Set the log level for the main pydase logger
|
||||||
|
logging.getLogger("pydase").setLevel(logging.INFO)
|
||||||
|
|
||||||
|
# Optionally, target a specific submodule logger
|
||||||
|
# logging.getLogger("pydase.data_service").setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
# Your logger for the current script
|
||||||
|
from pydase.utils.logging import configure_logging_with_pydase_formatter
|
||||||
|
configure_logging_with_pydase_formatter(level=logging.DEBUG)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
logger.debug("My debug message.")
|
||||||
|
```
|
||||||
|
|
||||||
|
This approach allows for specific control over different parts of the `pydase` library, depending on your logging needs.
|
||||||
|
|
||||||
|
2. **Using the `ENVIRONMENT` environment variable**
|
||||||
|
|
||||||
|
For a more global setting that affects the entire `pydase` library, you can utilize the `ENVIRONMENT` environment variable. Setting this variable to `"production"` will configure all `pydase` loggers to only log messages of level `"INFO"` and above, filtering out more verbose logging. This is particularly useful for production environments where excessive logging can be overwhelming or unnecessary.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
ENVIRONMENT="production" python -m <module_using_pydase>
|
||||||
|
```
|
||||||
|
|
||||||
|
In the absence of this setting, the default behavior is to log everything of level `"DEBUG"` and above, suitable for development environments where more detailed logs are beneficial.
|
||||||
|
|
||||||
|
## Client Identification in pydase Logs
|
||||||
|
|
||||||
|
The logging system in `pydase` includes information about clients based on headers sent by the client or a proxy. The priority for identifying the client is fixed and as follows:
|
||||||
|
|
||||||
|
1. **`Remote-User` Header**: This header is typically set by authentication servers like [Authelia](https://www.authelia.com/). While it can be set manually by users, its primary purpose is to provide client information authenticated through such servers.
|
||||||
|
2. **`X-Client-ID` Header**: This header is intended for use by Python clients to pass custom client identification information. It acts as a fallback when the `Remote-User` header is not available.
|
||||||
|
3. **Default Socket.IO Session ID**: If neither of the above headers is present, the system falls back to the default Socket.IO session ID to identify the client.
|
||||||
|
|
||||||
|
For example, a log entries might include the following details based on the available headers:
|
||||||
|
|
||||||
|
```plaintext
|
||||||
|
2025-01-20 06:47:50.940 | INFO | pydase.server.web_server.api.v1.application:_get_value:36 - Client [id=This is me!] is getting the value of 'property_attr'
|
||||||
|
|
||||||
|
2025-01-20 06:48:13.710 | INFO | pydase.server.web_server.api.v1.application:_get_value:36 - Client [user=Max Muster] is getting the value of 'property_attr'
|
||||||
|
```
|
||||||
|
|
||||||
|
## Configuring Logging in Services
|
||||||
|
|
||||||
|
To configure logging in services built with `pydase`, use the helper function [`configure_logging_with_pydase_formatter`][pydase.utils.logging.configure_logging_with_pydase_formatter]. This function sets up a logger with the same formatting used internally by `pydase`, so your service logs match the style and structure of `pydase` logs.
|
||||||
|
|
||||||
|
### Example
|
||||||
|
|
||||||
|
If your service follows a typical layout like:
|
||||||
|
|
||||||
|
```text
|
||||||
|
└── src
|
||||||
|
└── my_service
|
||||||
|
├── __init__.py
|
||||||
|
└── ...
|
||||||
|
```
|
||||||
|
|
||||||
|
you should call `configure_logging_with_pydase_formatter` inside `src/my_service/__init__.py`. This ensures the logger is configured as soon as your service is imported, and before any log messages are emitted.
|
||||||
|
|
||||||
|
```python title="src/my_service/__init__.py"
|
||||||
|
import sys
|
||||||
|
from pydase.utils.logging import configure_logging_with_pydase_formatter
|
||||||
|
|
||||||
|
configure_logging_with_pydase_formatter(
|
||||||
|
name="my_service", # Use the package/module name or None for the root logger
|
||||||
|
level=logging.DEBUG, # Set the desired logging level (defaults to INFO)
|
||||||
|
stream=sys.stderr # Optional: set the output stream (stderr by default)
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Notes
|
||||||
|
|
||||||
|
- If you pass `name=None`, the root logger will be configured. This affects **all logs** that propagate to the root logger.
|
||||||
|
- Passing a specific `name` like `"my_service"` allows you to scope the configuration to your service only, which is safer in multi-library environments.
|
||||||
|
- You can use `sys.stdout` instead of `sys.stderr` if your logs are being captured or processed differently (e.g., in containers or logging systems).
|
||||||
@@ -2,29 +2,47 @@
|
|||||||
|
|
||||||
`pydase` allows you to easily persist the state of your service by saving it to a file. This is especially useful when you want to maintain the service's state across different runs.
|
`pydase` allows you to easily persist the state of your service by saving it to a file. This is especially useful when you want to maintain the service's state across different runs.
|
||||||
|
|
||||||
To save the state of your service, pass a `filename` keyword argument to the constructor of the `pydase.Server` class. If the file specified by `filename` does not exist, the state manager will create this file and store its state in it when the service is shut down. If the file already exists, the state manager will load the state from this file, setting the values of its attributes to the values stored in the file.
|
To enable persistence, pass a `filename` keyword argument to the constructor of the [`pydase.Server`][pydase.Server] class. The `filename` specifies the file where the state will be saved:
|
||||||
|
|
||||||
Here's an example:
|
- If the file **does not exist**, it will be created and populated with the current state when the service shuts down or saves.
|
||||||
|
- If the file **already exists**, the state manager will **load** the saved values into the service at startup.
|
||||||
|
|
||||||
|
Here’s an example:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
import pydase
|
import pydase
|
||||||
|
|
||||||
class Device(pydase.DataService):
|
class Device(pydase.DataService):
|
||||||
# ... defining the Device class ...
|
# ... define your service class ...
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
service = Device()
|
service = Device()
|
||||||
pydase.Server(service=service, filename="device_state.json").run()
|
pydase.Server(service=service, filename="device_state.json").run()
|
||||||
```
|
```
|
||||||
|
|
||||||
In this example, the state of the `Device` service will be saved to `device_state.json` when the service is shut down. If `device_state.json` exists when the server is started, the state manager will restore the state of the service from this file.
|
In this example, the service state will be automatically loaded from `device_state.json` at startup (if it exists), and saved to the same file periodically and upon shutdown.
|
||||||
|
|
||||||
|
## Automatic Periodic State Saving
|
||||||
|
|
||||||
|
When a `filename` is provided, `pydase` automatically enables **periodic autosaving** of the service state to that file. This ensures that the current state is regularly persisted, reducing the risk of data loss during unexpected shutdowns.
|
||||||
|
|
||||||
|
The autosave happens every 30 seconds by default. You can customize the interval using the `autosave_interval` argument (in seconds):
|
||||||
|
|
||||||
|
```python
|
||||||
|
pydase.Server(
|
||||||
|
service=service,
|
||||||
|
filename="device_state.json",
|
||||||
|
autosave_interval=10.0, # save every 10 seconds
|
||||||
|
).run()
|
||||||
|
```
|
||||||
|
|
||||||
|
To disable automatic saving, set `autosave_interval` to `None`.
|
||||||
|
|
||||||
## Controlling Property State Loading with `@load_state`
|
## Controlling Property State Loading with `@load_state`
|
||||||
|
|
||||||
By default, the state manager only restores values for public attributes of your service. If you have properties that you want to control the loading for, you can use the `@load_state` decorator on your property setters. This indicates to the state manager that the value of the property should be loaded from the state file.
|
By default, the state manager only restores values for public attributes of your service (i.e. *it does not restore property values*). If you have properties that you want to control the loading for, you can use the [`@load_state`][pydase.data_service.state_manager.load_state] decorator on your property setters. This indicates to the state manager that the value of the property should be loaded from the state file.
|
||||||
|
|
||||||
Here is how you can apply the `@load_state` decorator:
|
Example:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
import pydase
|
import pydase
|
||||||
@@ -43,7 +61,6 @@ class Device(pydase.DataService):
|
|||||||
self._name = value
|
self._name = value
|
||||||
```
|
```
|
||||||
|
|
||||||
With the `@load_state` decorator applied to the `name` property setter, the state manager will load and apply the `name` property's value from the file storing the state upon server startup, assuming it exists.
|
With the `@load_state` decorator applied to the `name` property setter, the state manager will load and apply the `name` property's value from the file upon server startup.
|
||||||
|
|
||||||
Note: If the service class structure has changed since the last time its state was saved, only the attributes and properties decorated with `@load_state` that have remained the same will be restored from the settings file.
|
|
||||||
|
|
||||||
|
**Note**: If the structure of your service class changes between saves, only properties decorated with `@load_state` and unchanged public attributes will be restored safely.
|
||||||
|
|||||||
@@ -1,20 +1,18 @@
|
|||||||
# Understanding Tasks
|
# Understanding Tasks
|
||||||
|
|
||||||
In `pydase`, a task is defined as an asynchronous function without arguments contained in a class that inherits from `pydase.DataService`. These tasks usually contain a while loop and are designed to carry out periodic functions.
|
In `pydase`, a task is defined as an asynchronous function without arguments that is decorated with the [`@task`][pydase.task.decorator.task] decorator and contained in a class that inherits from [`pydase.DataService`][pydase.DataService]. These tasks usually contain a while loop and are designed to carry out periodic functions. For example, a task might be used to periodically read sensor data, update a database, or perform any other recurring job.
|
||||||
|
|
||||||
For example, a task might be used to periodically read sensor data, update a database, or perform any other recurring job. One core feature of `pydase` is its ability to automatically generate start and stop functions for these tasks. This allows you to control task execution via both the frontend and python clients, giving you flexible and powerful control over your service's operation.
|
`pydase` allows you to control task execution via both the frontend and Python clients and can automatically start tasks upon initialization of the service. By using the [`@task`][pydase.task.decorator.task] decorator with the `autostart=True` argument in your service class, `pydase` will automatically start these tasks when the server is started. Here's an example:
|
||||||
|
|
||||||
Another powerful feature of `pydase` is its ability to automatically start tasks upon initialization of the service. By specifying the tasks and their arguments in the `_autostart_tasks` dictionary in your service class's `__init__` method, `pydase` will automatically start these tasks when the server is started. Here's an example:
|
|
||||||
|
|
||||||
```python
|
```python
|
||||||
import pydase
|
import pydase
|
||||||
|
from pydase.task.decorator import task
|
||||||
|
|
||||||
|
|
||||||
class SensorService(pydase.DataService):
|
class SensorService(pydase.DataService):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.readout_frequency = 1.0
|
self.readout_frequency = 1.0
|
||||||
self._autostart_tasks["read_sensor_data"] = ()
|
|
||||||
|
|
||||||
def _process_data(self, data: ...) -> None:
|
def _process_data(self, data: ...) -> None:
|
||||||
...
|
...
|
||||||
@@ -22,6 +20,7 @@ class SensorService(pydase.DataService):
|
|||||||
def _read_from_sensor(self) -> Any:
|
def _read_from_sensor(self) -> Any:
|
||||||
...
|
...
|
||||||
|
|
||||||
|
@task(autostart=True)
|
||||||
async def read_sensor_data(self):
|
async def read_sensor_data(self):
|
||||||
while True:
|
while True:
|
||||||
data = self._read_from_sensor()
|
data = self._read_from_sensor()
|
||||||
@@ -34,6 +33,50 @@ if __name__ == "__main__":
|
|||||||
pydase.Server(service=service).run()
|
pydase.Server(service=service).run()
|
||||||
```
|
```
|
||||||
|
|
||||||
In this example, `read_sensor_data` is a task that continuously reads data from a sensor. By adding it to the `_autostart_tasks` dictionary, it will automatically start running when `pydase.Server(service).run()` is executed.
|
In this example, `read_sensor_data` is a task that continuously reads data from a sensor. By decorating it with `@task(autostart=True)`, it will automatically start running when `pydase.Server(service).run()` is executed.
|
||||||
As with all tasks, `pydase` will generate `start_read_sensor_data` and `stop_read_sensor_data` methods, which can be called to manually start and stop the data reading task. The readout frequency can be updated using the `readout_frequency` attribute.
|
|
||||||
|
|
||||||
|
## Task Lifecycle Control
|
||||||
|
|
||||||
|
The [`@task`][pydase.task.decorator.task] decorator replaces the function with a task object that has `start()` and `stop()` methods. This means you can control the task execution directly using these methods. For instance, you can manually start or stop the task by calling `service.read_sensor_data.start()` and `service.read_sensor_data.stop()`, respectively.
|
||||||
|
|
||||||
|
## Advanced Task Options
|
||||||
|
|
||||||
|
The [`@task`][pydase.task.decorator.task] decorator supports several options inspired by systemd unit services, allowing fine-grained control over task behavior:
|
||||||
|
|
||||||
|
- **`autostart`**: Automatically starts the task when the service initializes. Defaults to `False`.
|
||||||
|
- **`restart_on_exception`**: Configures whether the task should restart if it exits due to an exception (other than `asyncio.CancelledError`). Defaults to `True`.
|
||||||
|
- **`restart_sec`**: Specifies the delay (in seconds) before restarting a failed task. Defaults to `1.0`.
|
||||||
|
- **`start_limit_interval_sec`**: Configures a time window (in seconds) for rate limiting task restarts. If the task restarts more than `start_limit_burst` times within this interval, it will no longer restart. Defaults to `None` (disabled).
|
||||||
|
- **`start_limit_burst`**: Defines the maximum number of restarts allowed within the interval specified by `start_limit_interval_sec`. Defaults to `3`.
|
||||||
|
- **`exit_on_failure`**: If set to `True`, the service will exit if the task fails and either `restart_on_exception` is `False` or the start rate limiting is exceeded. Defaults to `False`.
|
||||||
|
|
||||||
|
### Example with Advanced Options
|
||||||
|
|
||||||
|
Here is an example showcasing advanced task options:
|
||||||
|
|
||||||
|
```python
|
||||||
|
import pydase
|
||||||
|
from pydase.task.decorator import task
|
||||||
|
|
||||||
|
|
||||||
|
class AdvancedTaskService(pydase.DataService):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__()
|
||||||
|
|
||||||
|
@task(
|
||||||
|
autostart=True,
|
||||||
|
restart_on_exception=True,
|
||||||
|
restart_sec=2.0,
|
||||||
|
start_limit_interval_sec=10.0,
|
||||||
|
start_limit_burst=5,
|
||||||
|
exit_on_failure=True,
|
||||||
|
)
|
||||||
|
async def critical_task(self):
|
||||||
|
while True:
|
||||||
|
raise Exception("Critical failure")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
service = AdvancedTaskService()
|
||||||
|
pydase.Server(service=service).run()
|
||||||
|
```
|
||||||
|
|||||||
59
docs/user-guide/advanced/Reverse-Proxy.md
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
# Deploying Services Behind a Reverse Proxy
|
||||||
|
|
||||||
|
In some environments, you may need to deploy your services behind a reverse proxy. Typically, this involves adding a CNAME record for your service that points to the reverse proxy in your DNS server. The proxy then routes requests to the `pydase` backend on the appropriate web server port.
|
||||||
|
|
||||||
|
However, in scenarios where you don’t control the DNS server, or where adding new CNAME records is time-consuming, `pydase` supports **service multiplexing** using a path prefix. This means multiple services can be hosted on a single CNAME (e.g., `services.example.com`), with each service accessible through a unique path such as `services.example.com/my-service`.
|
||||||
|
|
||||||
|
To ensure seamless operation, the reverse proxy must strip the path prefix (e.g., `/my-service`) from the request URL and forward it as the `X-Forwarded-Prefix` header. `pydase` then uses this header to dynamically adjust the frontend paths, ensuring all resources are correctly located.
|
||||||
|
|
||||||
|
## Example Deployment with Traefik
|
||||||
|
|
||||||
|
Below is an example setup using [Traefik](https://doc.traefik.io/traefik/), a widely-used reverse proxy. This configuration demonstrates how to forward requests for a `pydase` service using a path prefix.
|
||||||
|
|
||||||
|
### 1. Reverse Proxy Configuration
|
||||||
|
|
||||||
|
Save the following configuration to a file (e.g., `/etc/traefik/dynamic_conf/my-service-config.yml`):
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
http:
|
||||||
|
routers:
|
||||||
|
my-service-route:
|
||||||
|
rule: PathPrefix(`/my-service`)
|
||||||
|
entryPoints:
|
||||||
|
- web
|
||||||
|
service: my-service
|
||||||
|
middlewares:
|
||||||
|
- strip-prefix
|
||||||
|
services:
|
||||||
|
my-service:
|
||||||
|
loadBalancer:
|
||||||
|
servers:
|
||||||
|
- url: http://127.0.0.1:8001
|
||||||
|
middlewares:
|
||||||
|
strip-prefix:
|
||||||
|
stripprefix:
|
||||||
|
prefixes: /my-service
|
||||||
|
```
|
||||||
|
|
||||||
|
This configuration:
|
||||||
|
|
||||||
|
- Routes requests with the path prefix `/my-service` to the `pydase` backend.
|
||||||
|
- Strips the prefix (`/my-service`) from the request URL using the `stripprefix` middleware.
|
||||||
|
- Forwards the stripped prefix as the `X-Forwarded-Prefix` header.
|
||||||
|
|
||||||
|
### 2. Static Configuration for Traefik
|
||||||
|
|
||||||
|
Ensure Traefik is set up to use the dynamic configuration. Add this to your Traefik static configuration (e.g., `/etc/traefik/traefik.yml`):
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
providers:
|
||||||
|
file:
|
||||||
|
filename: /etc/traefik/dynamic_conf/my-service-config.yml
|
||||||
|
entrypoints:
|
||||||
|
web:
|
||||||
|
address: ":80"
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Accessing the Service
|
||||||
|
|
||||||
|
Once configured, your `pydase` service will be accessible at `http://services.example.com/my-service`. The path prefix will be handled transparently by `pydase`, so you don’t need to make any changes to your application code or frontend resources.
|
||||||
48
docs/user-guide/advanced/SOCKS-Proxy.md
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
# Connecting Through a SOCKS5 Proxy
|
||||||
|
|
||||||
|
If your target service is only reachable via an SSH gateway or resides behind a
|
||||||
|
firewall, you can route your [`pydase.Client`][pydase.Client] connection through a local
|
||||||
|
SOCKS5 proxy. This is particularly useful in network environments where direct access to
|
||||||
|
the service is not possible.
|
||||||
|
|
||||||
|
## Setting Up a SOCKS5 Proxy
|
||||||
|
|
||||||
|
You can create a local [SOCKS5 proxy](https://en.wikipedia.org/wiki/SOCKS) using SSH's
|
||||||
|
`-D` option:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
ssh -D 2222 user@gateway.example.com
|
||||||
|
```
|
||||||
|
|
||||||
|
This command sets up a SOCKS5 proxy on `localhost:2222`, securely forwarding traffic
|
||||||
|
over the SSH connection.
|
||||||
|
|
||||||
|
## Using the Proxy in Your Python Client
|
||||||
|
|
||||||
|
Once the proxy is running, configure the [`pydase.Client`][pydase.Client] to route
|
||||||
|
traffic through it using the `proxy_url` parameter:
|
||||||
|
|
||||||
|
```python
|
||||||
|
import pydase
|
||||||
|
|
||||||
|
client = pydase.Client(
|
||||||
|
url="ws://target-service:8001",
|
||||||
|
proxy_url="socks5://localhost:2222"
|
||||||
|
).proxy
|
||||||
|
```
|
||||||
|
|
||||||
|
* You can also use this setup with `wss://` URLs for encrypted WebSocket connections.
|
||||||
|
|
||||||
|
## Installing Required Dependencies
|
||||||
|
|
||||||
|
To use this feature, you must install the optional `socks` dependency group, which
|
||||||
|
includes [`aiohttp_socks`](https://pypi.org/project/aiohttp-socks/):
|
||||||
|
|
||||||
|
- `poetry`
|
||||||
|
```bash
|
||||||
|
poetry add "pydase[socks]"
|
||||||
|
```
|
||||||
|
- `pip`
|
||||||
|
```bash
|
||||||
|
pip install "pydase[socks]"
|
||||||
|
```
|
||||||
@@ -21,7 +21,8 @@ The frontend uses a component-based approach, representing various data types an
|
|||||||
`pydase` allows you to enhance the user experience by customizing the web interface's appearance through
|
`pydase` allows you to enhance the user experience by customizing the web interface's appearance through
|
||||||
|
|
||||||
1. a custom CSS file, and
|
1. a custom CSS file, and
|
||||||
2. tailoring the frontend component layout and display style.
|
2. a custom favicon image, and
|
||||||
|
3. tailoring the frontend component layout and display style.
|
||||||
|
|
||||||
For more advanced customization, you can provide a completely custom frontend source.
|
For more advanced customization, you can provide a completely custom frontend source.
|
||||||
|
|
||||||
@@ -51,6 +52,34 @@ This will apply the styles defined in `custom.css` to the web interface, allowin
|
|||||||
|
|
||||||
Please ensure that the CSS file path is accessible from the server's running location. Relative or absolute paths can be used depending on your setup.
|
Please ensure that the CSS file path is accessible from the server's running location. Relative or absolute paths can be used depending on your setup.
|
||||||
|
|
||||||
|
|
||||||
|
### Custom favicon image
|
||||||
|
|
||||||
|
You can customize the favicon displayed in the browser tab by providing your own favicon image file during the server initialization.
|
||||||
|
|
||||||
|
Here's how you can use this feature:
|
||||||
|
|
||||||
|
1. Prepare your custom favicon image (e.g. a `.png` file).
|
||||||
|
2. Pass the path to your favicon file as the `favicon_path` argument when initializing the `Server` class.
|
||||||
|
|
||||||
|
Here’s an example:
|
||||||
|
|
||||||
|
```python
|
||||||
|
import pydase
|
||||||
|
|
||||||
|
|
||||||
|
class MyService(pydase.DataService):
|
||||||
|
# ... your service definition ...
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
service = MyService()
|
||||||
|
pydase.Server(service, favicon_path="./my/local/my-favicon.png").run()
|
||||||
|
```
|
||||||
|
|
||||||
|
This will serve the specified image instead of the default `pydase` logo.
|
||||||
|
|
||||||
|
|
||||||
### Tailoring Frontend Component Layout
|
### Tailoring Frontend Component Layout
|
||||||
|
|
||||||
You can customize the display names, visibility, and order of components via the `web_settings.json` file.
|
You can customize the display names, visibility, and order of components via the `web_settings.json` file.
|
||||||
@@ -60,7 +89,7 @@ Each key in the file corresponds to the full access path of public attributes, p
|
|||||||
- **Control Component Visibility**: Utilize the `"display"` key-value pair to control whether a component is rendered in the frontend. Set the value to `true` to make the component visible or `false` to hide it.
|
- **Control Component Visibility**: Utilize the `"display"` key-value pair to control whether a component is rendered in the frontend. Set the value to `true` to make the component visible or `false` to hide it.
|
||||||
- **Adjustable Component Order**: The `"displayOrder"` values determine the order of components. Alter these values to rearrange the components as desired. The value defaults to [`Number.MAX_SAFE_INTEGER`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/MAX_SAFE_INTEGER).
|
- **Adjustable Component Order**: The `"displayOrder"` values determine the order of components. Alter these values to rearrange the components as desired. The value defaults to [`Number.MAX_SAFE_INTEGER`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/MAX_SAFE_INTEGER).
|
||||||
|
|
||||||
The `web_settings.json` file will be stored in the directory specified by `SERVICE_CONFIG_DIR`. You can generate a `web_settings.json` file by setting the `GENERATE_WEB_SETTINGS` to `True`. For more information, see the [configuration section](#configuring-pydase-via-environment-variables).
|
The `web_settings.json` file will be stored in the directory specified by the `SERVICE_CONFIG_DIR` environment variable. You can generate a `web_settings.json` file by setting the `GENERATE_WEB_SETTINGS` to `True`. For more information, see the [configuration section](../Configuration.md).
|
||||||
|
|
||||||
For example, styling the following service
|
For example, styling the following service
|
||||||
|
|
||||||
@@ -1,60 +0,0 @@
|
|||||||
# Python RPC Client
|
|
||||||
|
|
||||||
You can connect to the service using the `pydase.Client`. Below is an example of how to establish a connection to a service and interact with it:
|
|
||||||
|
|
||||||
```python
|
|
||||||
import pydase
|
|
||||||
|
|
||||||
# Replace the hostname and port with the IP address and the port of the machine where
|
|
||||||
# the service is running, respectively
|
|
||||||
client_proxy = pydase.Client(url="ws://<ip_addr>:<service_port>").proxy
|
|
||||||
# client_proxy = pydase.Client(url="wss://your-domain.ch").proxy # if your service uses ssl-encryption
|
|
||||||
|
|
||||||
# Interact with the service attributes as if they were local
|
|
||||||
client_proxy.voltage = 5.0
|
|
||||||
print(client_proxy.voltage) # Expected output: 5.0
|
|
||||||
```
|
|
||||||
|
|
||||||
This example demonstrates setting and retrieving the `voltage` attribute through the client proxy.
|
|
||||||
The proxy acts as a local representative of the remote service, enabling straightforward interaction.
|
|
||||||
|
|
||||||
The proxy class dynamically synchronizes with the server's exposed attributes. This synchronization allows the proxy to be automatically updated with any attributes or methods that the server exposes, essentially mirroring the server's API. This dynamic updating enables users to interact with the remote service as if they were working with a local object.
|
|
||||||
|
|
||||||
## Context Manager
|
|
||||||
|
|
||||||
You can also use the client as a context manager which automatically opens and closes the connection again:
|
|
||||||
|
|
||||||
```python
|
|
||||||
import pydase
|
|
||||||
|
|
||||||
|
|
||||||
with pydase.Client(url="ws://localhost:8001") as client:
|
|
||||||
client.proxy.<my_method>()
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
## Tab Completion Support
|
|
||||||
|
|
||||||
In interactive environments such as Python interpreters and Jupyter notebooks, the proxy class supports tab completion, which allows users to explore available methods and attributes.
|
|
||||||
|
|
||||||
## Integration within Other Services
|
|
||||||
|
|
||||||
You can also integrate a client proxy within another service. Here's how you can set it up:
|
|
||||||
|
|
||||||
```python
|
|
||||||
import pydase
|
|
||||||
|
|
||||||
class MyService(pydase.DataService):
|
|
||||||
# Initialize the client without blocking the constructor
|
|
||||||
proxy = pydase.Client(url="ws://<ip_addr>:<service_port>", block_until_connected=False).proxy
|
|
||||||
# proxy = pydase.Client(url="wss://your-domain.ch", block_until_connected=False).proxy # communicating with ssl-encrypted service
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
service = MyService()
|
|
||||||
# Create a server that exposes this service; adjust the web_port as needed
|
|
||||||
server = pydase.Server(service, web_port=8002). run()
|
|
||||||
```
|
|
||||||
|
|
||||||
In this setup, the `MyService` class has a `proxy` attribute that connects to a `pydase` service located at `<ip_addr>:8001`.
|
|
||||||
The `block_until_connected=False` argument allows the service to start up even if the initial connection attempt fails.
|
|
||||||
This configuration is particularly useful in distributed systems where services may start in any order.
|
|
||||||
130
docs/user-guide/interaction/Python-Client.md
Normal file
@@ -0,0 +1,130 @@
|
|||||||
|
# Python RPC Client
|
||||||
|
|
||||||
|
The [`pydase.Client`][pydase.Client] allows you to connect to a remote `pydase` service using Socket.IO, facilitating interaction with the service as though it were running locally.
|
||||||
|
|
||||||
|
## Basic Usage
|
||||||
|
|
||||||
|
```python
|
||||||
|
import pydase
|
||||||
|
|
||||||
|
# Replace <ip_addr> and <service_port> with the appropriate values for your service
|
||||||
|
client_proxy = pydase.Client(url="ws://<ip_addr>:<service_port>").proxy
|
||||||
|
|
||||||
|
# For SSL-encrypted services, use the wss protocol
|
||||||
|
# client_proxy = pydase.Client(url="wss://your-domain.ch").proxy
|
||||||
|
|
||||||
|
# Interact with the service attributes as if they were local
|
||||||
|
client_proxy.voltage = 5.0
|
||||||
|
print(client_proxy.voltage) # Expected output: 5.0
|
||||||
|
```
|
||||||
|
|
||||||
|
This example shows how to set and retrieve the `voltage` attribute through the client proxy.
|
||||||
|
The proxy acts as a local representation of the remote service, enabling intuitive interaction.
|
||||||
|
|
||||||
|
The proxy class automatically synchronizes with the server's attributes and methods, keeping itself up-to-date with any changes. This dynamic synchronization essentially mirrors the server's API, making it feel like you're working with a local object.
|
||||||
|
|
||||||
|
## Automatic Proxy Updates
|
||||||
|
|
||||||
|
By default, the client listens for attribute and structure changes from the server and dynamically updates its internal proxy representation. This ensures that value changes or newly added attributes on the server appear in the client proxy without requiring reconnection or manual refresh.
|
||||||
|
|
||||||
|
This is useful, for example, when [integrating the client into another service](#integrating-the-client-into-another-service). However, if you want to avoid this behavior (e.g., to reduce network traffic or avoid frequent re-syncing), you can disable it. When passing `auto_update_proxy=False` to the client, the proxy will not track changes after the initial connection:
|
||||||
|
|
||||||
|
```python
|
||||||
|
client = pydase.Client(
|
||||||
|
url="ws://localhost:8001",
|
||||||
|
auto_update_proxy=False
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Direct API Access
|
||||||
|
|
||||||
|
In addition to using the `proxy` object, users may access the server API directly via the following methods:
|
||||||
|
|
||||||
|
```python
|
||||||
|
client = pydase.Client(url="ws://localhost:8001")
|
||||||
|
|
||||||
|
# Get the current value of an attribute
|
||||||
|
value = client.get_value("device.voltage")
|
||||||
|
|
||||||
|
# Update an attribute
|
||||||
|
client.update_value("device.voltage", 5.0)
|
||||||
|
|
||||||
|
# Call a method on the remote service
|
||||||
|
result = client.trigger_method("device.reset")
|
||||||
|
```
|
||||||
|
|
||||||
|
This bypasses the proxy and is useful for lower-level access to individual service endpoints.
|
||||||
|
|
||||||
|
## Accessing Services Behind Firewalls or SSH Gateways
|
||||||
|
|
||||||
|
If your service is only reachable through a private network or SSH gateway, you can route your connection through a local SOCKS5 proxy using the `proxy_url` parameter.
|
||||||
|
|
||||||
|
See [Connecting Through a SOCKS5 Proxy](../advanced/SOCKS-Proxy.md) for details.
|
||||||
|
|
||||||
|
## Context Manager Support
|
||||||
|
|
||||||
|
You can also use the client within a context manager, which automatically handles connection management (i.e., opening and closing the connection):
|
||||||
|
|
||||||
|
```python
|
||||||
|
import pydase
|
||||||
|
|
||||||
|
|
||||||
|
with pydase.Client(url="ws://localhost:8001") as client:
|
||||||
|
client.proxy.my_method()
|
||||||
|
```
|
||||||
|
|
||||||
|
Using the context manager ensures that connections are cleanly closed once the block of code finishes executing.
|
||||||
|
|
||||||
|
## Tab Completion Support
|
||||||
|
|
||||||
|
In interactive environments like Python interpreters or Jupyter notebooks, the proxy supports tab completion. This allows users to explore available methods and attributes.
|
||||||
|
|
||||||
|
## Integrating the Client into Another Service
|
||||||
|
|
||||||
|
You can integrate a `pydase` client proxy within another service. Here's an example of how to set this up:
|
||||||
|
|
||||||
|
```python
|
||||||
|
import pydase
|
||||||
|
|
||||||
|
class MyService(pydase.DataService):
|
||||||
|
proxy = pydase.Client(
|
||||||
|
url="ws://<ip_addr>:<service_port>",
|
||||||
|
block_until_connected=False,
|
||||||
|
client_id="my_pydase_client_id", # optional, defaults to system hostname
|
||||||
|
).proxy
|
||||||
|
|
||||||
|
# For SSL-encrypted services, use the wss protocol
|
||||||
|
# proxy = pydase.Client(
|
||||||
|
# url="wss://your-domain.ch",
|
||||||
|
# block_until_connected=False,
|
||||||
|
# client_id="my_pydase_client_id",
|
||||||
|
# ).proxy
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
service = MyService()
|
||||||
|
# Create a server that exposes this service
|
||||||
|
server = pydase.Server(service, web_port=8002).run()
|
||||||
|
```
|
||||||
|
|
||||||
|
In this example:
|
||||||
|
|
||||||
|
- The `MyService` class has a `proxy` attribute that connects to a `pydase` service at `<ip_addr>:<service_port>`.
|
||||||
|
- By setting `block_until_connected=False`, the service can start without waiting for the connection to succeed.
|
||||||
|
- The `client_id` is optional. If not specified, it defaults to the system hostname, which will be sent in the `X-Client-Id` HTTP header for logging or authentication on the server side.
|
||||||
|
|
||||||
|
## Custom `socketio.AsyncClient` Connection Parameters
|
||||||
|
|
||||||
|
You can configure advanced connection options by passing arguments to the underlying [`AsyncClient`][socketio.AsyncClient] via `sio_client_kwargs`. For example:
|
||||||
|
|
||||||
|
```python
|
||||||
|
client = pydase.Client(
|
||||||
|
url="ws://localhost:8001",
|
||||||
|
sio_client_kwargs={
|
||||||
|
"reconnection_attempts": 3,
|
||||||
|
"reconnection_delay": 2,
|
||||||
|
"reconnection_delay_max": 10,
|
||||||
|
}
|
||||||
|
).proxy
|
||||||
|
```
|
||||||
|
|
||||||
|
In this setup, the client will attempt to reconnect three times, with an initial delay of 2 seconds (each successive attempt doubles this delay) and a maximum delay of 10 seconds between attempts.
|
||||||
@@ -1,81 +1,7 @@
|
|||||||
# Interacting with `pydase` Services
|
# Interacting with `pydase` Services
|
||||||
|
|
||||||
`pydase` offers multiple ways for users to interact with the services they create, providing flexibility and convenience for different use cases. This section outlines the primary interaction methods available, including an auto-generated frontend, a RESTful API, and a Python client based on Socket.IO.
|
`pydase` offers multiple ways for users to interact with the services they create.
|
||||||
|
|
||||||
{%
|
- [Auto-generated Frontend](./Auto-generated-Frontend.md)
|
||||||
include-markdown "./Auto-generated Frontend.md"
|
- [RESTful API](./RESTful-API.md)
|
||||||
heading-offset=1
|
- [Python Client](./Python-Client.md)
|
||||||
%}
|
|
||||||
|
|
||||||
{%
|
|
||||||
include-markdown "./RESTful API.md"
|
|
||||||
heading-offset=1
|
|
||||||
%}
|
|
||||||
|
|
||||||
{%
|
|
||||||
include-markdown "./Python Client.md"
|
|
||||||
heading-offset=1
|
|
||||||
%}
|
|
||||||
|
|
||||||
<!-- ## 2. **Socket.IO for Real-Time Updates** -->
|
|
||||||
<!-- For scenarios requiring real-time data updates, `pydase` includes a Socket.IO server. This feature is ideal for applications where live data tracking is crucial, such as monitoring systems or interactive dashboards. -->
|
|
||||||
<!---->
|
|
||||||
<!-- ### Key Features: -->
|
|
||||||
<!-- - **Live Data Streams**: Receive real-time updates for data changes. -->
|
|
||||||
<!-- - **Event-Driven Communication**: Utilize event-based messaging to push updates and handle client actions. -->
|
|
||||||
<!---->
|
|
||||||
<!-- ### Example Usage: -->
|
|
||||||
<!-- Clients can connect to the Socket.IO server to receive updates: -->
|
|
||||||
<!-- ```javascript -->
|
|
||||||
<!-- var socket = io.connect('http://<hostname>:<port>'); -->
|
|
||||||
<!-- socket.on('<event_name>', function(data) { -->
|
|
||||||
<!-- console.log(data); -->
|
|
||||||
<!-- }); -->
|
|
||||||
<!-- ``` -->
|
|
||||||
<!---->
|
|
||||||
<!-- **Use Cases:** -->
|
|
||||||
<!---->
|
|
||||||
<!-- - Real-time monitoring and alerts -->
|
|
||||||
<!-- - Live data visualization -->
|
|
||||||
<!-- - Collaborative applications -->
|
|
||||||
<!---->
|
|
||||||
<!-- ## 3. **Auto-Generated Frontend** -->
|
|
||||||
<!-- `pydase` automatically generates a web frontend based on the service definitions. This frontend is a convenient interface for interacting with the service, especially for users who prefer a graphical interface over command-line or code-based interactions. -->
|
|
||||||
<!---->
|
|
||||||
<!-- ### Key Features: -->
|
|
||||||
<!-- - **User-Friendly Interface**: Intuitive and easy to use, with real-time interaction capabilities. -->
|
|
||||||
<!-- - **Customizable**: Adjust the frontend's appearance and functionality to suit specific needs. -->
|
|
||||||
<!---->
|
|
||||||
<!-- ### Accessing the Frontend: -->
|
|
||||||
<!-- Once the service is running, access the frontend via a web browser: -->
|
|
||||||
<!-- ``` -->
|
|
||||||
<!-- http://<hostname>:<port> -->
|
|
||||||
<!-- ``` -->
|
|
||||||
<!---->
|
|
||||||
<!-- **Use Cases:** -->
|
|
||||||
<!---->
|
|
||||||
<!-- - End-user interfaces for data control and visualization -->
|
|
||||||
<!-- - Rapid prototyping and testing -->
|
|
||||||
<!-- - Demonstrations and training -->
|
|
||||||
<!---->
|
|
||||||
<!-- ## 4. **Python Client** -->
|
|
||||||
<!-- `pydase` also provides a Python client for programmatic interactions. This client is particularly useful for developers who want to integrate `pydase` services into other Python applications or automate interactions. -->
|
|
||||||
<!---->
|
|
||||||
<!-- ### Key Features: -->
|
|
||||||
<!-- - **Direct Interaction**: Call methods and access properties as if they were local. -->
|
|
||||||
<!-- - **Tab Completion**: Supports tab completion in interactive environments like Jupyter notebooks. -->
|
|
||||||
<!---->
|
|
||||||
<!-- ### Example Usage: -->
|
|
||||||
<!-- ```python -->
|
|
||||||
<!-- import pydase -->
|
|
||||||
<!---->
|
|
||||||
<!-- client = pydase.Client(hostname="<ip_addr>", port=8001) -->
|
|
||||||
<!-- service = client.proxy -->
|
|
||||||
<!-- service.some_method() -->
|
|
||||||
<!-- ``` -->
|
|
||||||
<!---->
|
|
||||||
<!-- **Use Cases:** -->
|
|
||||||
<!---->
|
|
||||||
<!-- - Integrating with other Python applications -->
|
|
||||||
<!-- - Automation and scripting -->
|
|
||||||
<!-- - Data analysis and manipulation -->
|
|
||||||
|
|||||||
@@ -3,11 +3,18 @@
|
|||||||
|
|
||||||
<head>
|
<head>
|
||||||
<meta charset="UTF-8" />
|
<meta charset="UTF-8" />
|
||||||
|
<link rel="icon" type="image/svg+xml" href="/favicon.ico" />
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||||
<meta name="theme-color" content="#000000" />
|
<meta name="theme-color" content="#000000" />
|
||||||
<meta name="description" content="Web site displaying a pydase UI." />
|
<meta name="description" content="Web site displaying a pydase UI." />
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
|
<script>
|
||||||
|
// this will be set by the python backend if the service is behind a proxy which strips a prefix. The frontend can use this to build the paths to the resources.
|
||||||
|
window.__FORWARDED_PREFIX__ = "";
|
||||||
|
window.__FORWARDED_PROTO__ = "";
|
||||||
|
</script>
|
||||||
|
|
||||||
<body>
|
<body>
|
||||||
<noscript>You need to enable JavaScript to run this app.</noscript>
|
<noscript>You need to enable JavaScript to run this app.</noscript>
|
||||||
<div id="root"></div>
|
<div id="root"></div>
|
||||||
|
|||||||
3556
frontend/package-lock.json
generated
@@ -10,31 +10,31 @@
|
|||||||
"preview": "vite preview"
|
"preview": "vite preview"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@emotion/styled": "^11.11.0",
|
"@emotion/styled": "^11.14.0",
|
||||||
"@mui/material": "^5.14.1",
|
"@mui/material": "^5.16.14",
|
||||||
"bootstrap": "^5.3.3",
|
"bootstrap": "^5.3.3",
|
||||||
"deep-equal": "^2.2.3",
|
"deep-equal": "^2.2.3",
|
||||||
"react": "^18.3.1",
|
"react": "^19.0.0",
|
||||||
"react-bootstrap": "^2.10.0",
|
"react-bootstrap": "^2.10.7",
|
||||||
"react-bootstrap-icons": "^1.11.4",
|
"react-bootstrap-icons": "^1.11.5",
|
||||||
"socket.io-client": "^4.7.1"
|
"socket.io-client": "^4.8.1"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@eslint/js": "^9.6.0",
|
"@eslint/js": "^9.18.0",
|
||||||
"@types/deep-equal": "^1.0.4",
|
"@types/deep-equal": "^1.0.4",
|
||||||
"@types/eslint__js": "^8.42.3",
|
"@types/eslint__js": "^8.42.3",
|
||||||
"@types/node": "^20.14.10",
|
"@types/node": "^20.17.14",
|
||||||
"@types/react": "^18.3.3",
|
"@types/react": "^19.0.7",
|
||||||
"@types/react-dom": "^18.3.0",
|
"@types/react-dom": "^19.0.3",
|
||||||
"@typescript-eslint/eslint-plugin": "^7.15.0",
|
"@typescript-eslint/eslint-plugin": "^7.15.0",
|
||||||
"@vitejs/plugin-react-swc": "^3.5.0",
|
"@vitejs/plugin-react-swc": "^3.7.2",
|
||||||
"eslint": "^8.57.0",
|
"eslint": "^8.57.1",
|
||||||
"eslint-config-prettier": "^9.1.0",
|
"eslint-config-prettier": "^9.1.0",
|
||||||
"eslint-plugin-prettier": "^5.1.3",
|
"eslint-plugin-prettier": "^5.2.3",
|
||||||
"eslint-plugin-react": "^7.34.3",
|
"eslint-plugin-react": "^7.37.4",
|
||||||
"prettier": "3.3.2",
|
"prettier": "3.3.2",
|
||||||
"typescript": "^5.5.3",
|
"typescript": "^5.7.3",
|
||||||
"typescript-eslint": "^7.15.0",
|
"typescript-eslint": "^7.18.0",
|
||||||
"vite": "^5.3.1"
|
"vite": "^6.3.5"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
BIN
frontend/public/favicon.ico
Normal file
|
After Width: | Height: | Size: 77 KiB |
@@ -1,6 +1,6 @@
|
|||||||
import { useCallback, useEffect, useReducer, useState } from "react";
|
import { useCallback, useEffect, useReducer, useState } from "react";
|
||||||
import { Navbar, Form, Offcanvas, Container } from "react-bootstrap";
|
import { Navbar, Form, Offcanvas, Container } from "react-bootstrap";
|
||||||
import { hostname, port, socket } from "./socket";
|
import { authority, socket, forwardedProto } from "./socket";
|
||||||
import "./App.css";
|
import "./App.css";
|
||||||
import {
|
import {
|
||||||
Notifications,
|
Notifications,
|
||||||
@@ -68,12 +68,12 @@ const App = () => {
|
|||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
// Allow the user to add a custom css file
|
// Allow the user to add a custom css file
|
||||||
fetch(`http://${hostname}:${port}/custom.css`)
|
fetch(`${forwardedProto}://${authority}/custom.css`, { credentials: "include" })
|
||||||
.then((response) => {
|
.then((response) => {
|
||||||
if (response.ok) {
|
if (response.ok) {
|
||||||
// If the file exists, create a link element for the custom CSS
|
// If the file exists, create a link element for the custom CSS
|
||||||
const link = document.createElement("link");
|
const link = document.createElement("link");
|
||||||
link.href = `http://${hostname}:${port}/custom.css`;
|
link.href = `${forwardedProto}://${authority}/custom.css`;
|
||||||
link.type = "text/css";
|
link.type = "text/css";
|
||||||
link.rel = "stylesheet";
|
link.rel = "stylesheet";
|
||||||
document.head.appendChild(link);
|
document.head.appendChild(link);
|
||||||
@@ -83,7 +83,9 @@ const App = () => {
|
|||||||
|
|
||||||
socket.on("connect", () => {
|
socket.on("connect", () => {
|
||||||
// Fetch data from the API when the client connects
|
// Fetch data from the API when the client connects
|
||||||
fetch(`http://${hostname}:${port}/service-properties`)
|
fetch(`${forwardedProto}://${authority}/service-properties`, {
|
||||||
|
credentials: "include",
|
||||||
|
})
|
||||||
.then((response) => response.json())
|
.then((response) => response.json())
|
||||||
.then((data: State) => {
|
.then((data: State) => {
|
||||||
dispatch({ type: "SET_DATA", data });
|
dispatch({ type: "SET_DATA", data });
|
||||||
@@ -91,7 +93,7 @@ const App = () => {
|
|||||||
|
|
||||||
document.title = data.name; // Setting browser tab title
|
document.title = data.name; // Setting browser tab title
|
||||||
});
|
});
|
||||||
fetch(`http://${hostname}:${port}/web-settings`)
|
fetch(`${forwardedProto}://${authority}/web-settings`, { credentials: "include" })
|
||||||
.then((response) => response.json())
|
.then((response) => response.json())
|
||||||
.then((data: Record<string, WebSetting>) => setWebSettings(data));
|
.then((data: Record<string, WebSetting>) => setWebSettings(data));
|
||||||
setConnectionStatus("connected");
|
setConnectionStatus("connected");
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ import { NumberComponent, NumberObject } from "./NumberComponent";
|
|||||||
import { SliderComponent } from "./SliderComponent";
|
import { SliderComponent } from "./SliderComponent";
|
||||||
import { EnumComponent } from "./EnumComponent";
|
import { EnumComponent } from "./EnumComponent";
|
||||||
import { MethodComponent } from "./MethodComponent";
|
import { MethodComponent } from "./MethodComponent";
|
||||||
import { AsyncMethodComponent } from "./AsyncMethodComponent";
|
|
||||||
import { StringComponent } from "./StringComponent";
|
import { StringComponent } from "./StringComponent";
|
||||||
import { ListComponent } from "./ListComponent";
|
import { ListComponent } from "./ListComponent";
|
||||||
import { DataServiceComponent, DataServiceJSON } from "./DataServiceComponent";
|
import { DataServiceComponent, DataServiceJSON } from "./DataServiceComponent";
|
||||||
@@ -17,6 +16,7 @@ import { updateValue } from "../socket";
|
|||||||
import { DictComponent } from "./DictComponent";
|
import { DictComponent } from "./DictComponent";
|
||||||
import { parseFullAccessPath } from "../utils/stateUtils";
|
import { parseFullAccessPath } from "../utils/stateUtils";
|
||||||
import { SerializedEnum, SerializedObject } from "../types/SerializedObject";
|
import { SerializedEnum, SerializedObject } from "../types/SerializedObject";
|
||||||
|
import { TaskComponent, TaskStatus } from "./TaskComponent";
|
||||||
|
|
||||||
interface GenericComponentProps {
|
interface GenericComponentProps {
|
||||||
attribute: SerializedObject;
|
attribute: SerializedObject;
|
||||||
@@ -50,7 +50,7 @@ const createDisplayNameFromAccessPath = (fullAccessPath: string): string => {
|
|||||||
|
|
||||||
function changeCallback(
|
function changeCallback(
|
||||||
value: SerializedObject,
|
value: SerializedObject,
|
||||||
callback: (ack: unknown) => void = () => {},
|
callback: (ack: undefined | SerializedObject) => void = () => {},
|
||||||
) {
|
) {
|
||||||
updateValue(value, callback);
|
updateValue(value, callback);
|
||||||
}
|
}
|
||||||
@@ -144,30 +144,16 @@ export const GenericComponent = React.memo(
|
|||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
} else if (attribute.type === "method") {
|
} else if (attribute.type === "method") {
|
||||||
if (!attribute.async) {
|
return (
|
||||||
return (
|
<MethodComponent
|
||||||
<MethodComponent
|
fullAccessPath={fullAccessPath}
|
||||||
fullAccessPath={fullAccessPath}
|
docString={attribute.doc}
|
||||||
docString={attribute.doc}
|
addNotification={addNotification}
|
||||||
addNotification={addNotification}
|
displayName={displayName}
|
||||||
displayName={displayName}
|
id={id}
|
||||||
id={id}
|
render={attribute.frontend_render}
|
||||||
render={attribute.frontend_render}
|
/>
|
||||||
/>
|
);
|
||||||
);
|
|
||||||
} else {
|
|
||||||
return (
|
|
||||||
<AsyncMethodComponent
|
|
||||||
fullAccessPath={fullAccessPath}
|
|
||||||
docString={attribute.doc}
|
|
||||||
value={attribute.value as "RUNNING" | null}
|
|
||||||
addNotification={addNotification}
|
|
||||||
displayName={displayName}
|
|
||||||
id={id}
|
|
||||||
render={attribute.frontend_render}
|
|
||||||
/>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
} else if (attribute.type === "str") {
|
} else if (attribute.type === "str") {
|
||||||
return (
|
return (
|
||||||
<StringComponent
|
<StringComponent
|
||||||
@@ -182,6 +168,17 @@ export const GenericComponent = React.memo(
|
|||||||
id={id}
|
id={id}
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
|
} else if (attribute.type == "Task") {
|
||||||
|
return (
|
||||||
|
<TaskComponent
|
||||||
|
fullAccessPath={fullAccessPath}
|
||||||
|
docString={attribute.doc}
|
||||||
|
status={attribute.value["status"].value as TaskStatus}
|
||||||
|
addNotification={addNotification}
|
||||||
|
displayName={displayName}
|
||||||
|
id={id}
|
||||||
|
/>
|
||||||
|
);
|
||||||
} else if (attribute.type === "DataService") {
|
} else if (attribute.type === "DataService") {
|
||||||
return (
|
return (
|
||||||
<DataServiceComponent
|
<DataServiceComponent
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import React, { useEffect, useState } from "react";
|
import React, { useEffect, useRef, useState } from "react";
|
||||||
import { Form, InputGroup } from "react-bootstrap";
|
import { Form, InputGroup } from "react-bootstrap";
|
||||||
import { DocStringComponent } from "./DocStringComponent";
|
import { DocStringComponent } from "./DocStringComponent";
|
||||||
import "../App.css";
|
import "../App.css";
|
||||||
@@ -38,7 +38,10 @@ interface NumberComponentProps {
|
|||||||
isInstantUpdate: boolean;
|
isInstantUpdate: boolean;
|
||||||
unit?: string;
|
unit?: string;
|
||||||
addNotification: (message: string, levelname?: LevelName) => void;
|
addNotification: (message: string, levelname?: LevelName) => void;
|
||||||
changeCallback?: (value: SerializedObject, callback?: (ack: unknown) => void) => void;
|
changeCallback?: (
|
||||||
|
value: SerializedObject,
|
||||||
|
callback?: (ack: undefined | SerializedObject) => void,
|
||||||
|
) => void;
|
||||||
displayName?: string;
|
displayName?: string;
|
||||||
id: string;
|
id: string;
|
||||||
}
|
}
|
||||||
@@ -132,6 +135,8 @@ const handleNumericKey = (
|
|||||||
selectionStart: number,
|
selectionStart: number,
|
||||||
selectionEnd: number,
|
selectionEnd: number,
|
||||||
) => {
|
) => {
|
||||||
|
let newValue = value;
|
||||||
|
|
||||||
// Check if a number key or a decimal point key is pressed
|
// Check if a number key or a decimal point key is pressed
|
||||||
if (key === "." && value.includes(".")) {
|
if (key === "." && value.includes(".")) {
|
||||||
// Check if value already contains a decimal. If so, ignore input.
|
// Check if value already contains a decimal. If so, ignore input.
|
||||||
@@ -139,20 +144,67 @@ const handleNumericKey = (
|
|||||||
return { value, selectionStart };
|
return { value, selectionStart };
|
||||||
}
|
}
|
||||||
|
|
||||||
let newValue = value;
|
// Handle minus sign input
|
||||||
|
if (key === "-") {
|
||||||
|
if (selectionStart === 0 && selectionEnd > selectionStart) {
|
||||||
|
// Replace selection with minus if selection starts at 0
|
||||||
|
newValue = "-" + value.slice(selectionEnd);
|
||||||
|
selectionStart = 1;
|
||||||
|
} else if (selectionStart === 0 && !value.startsWith("-")) {
|
||||||
|
// Add minus at the beginning if it doesn't exist
|
||||||
|
newValue = "-" + value;
|
||||||
|
selectionStart = 1;
|
||||||
|
} else if (
|
||||||
|
(selectionStart === 0 || selectionStart === 1) &&
|
||||||
|
value.startsWith("-")
|
||||||
|
) {
|
||||||
|
// Remove minus if it exists
|
||||||
|
newValue = value.slice(1);
|
||||||
|
selectionStart = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
return { value: newValue, selectionStart };
|
||||||
|
}
|
||||||
|
|
||||||
// Add the new key at the cursor's position
|
// Add the new key at the cursor's position
|
||||||
if (selectionEnd > selectionStart) {
|
if (selectionEnd > selectionStart) {
|
||||||
// If there is a selection, replace it with the key
|
// If there is a selection, replace it with the key
|
||||||
newValue = value.slice(0, selectionStart) + key + value.slice(selectionEnd);
|
newValue = value.slice(0, selectionStart) + key + value.slice(selectionEnd);
|
||||||
} else {
|
} else {
|
||||||
// otherwise, append the key after the selection start
|
// Otherwise, insert the key at the cursor position
|
||||||
newValue = value.slice(0, selectionStart) + key + value.slice(selectionStart);
|
newValue = value.slice(0, selectionStart) + key + value.slice(selectionStart);
|
||||||
}
|
}
|
||||||
|
|
||||||
return { value: newValue, selectionStart: selectionStart + 1 };
|
return { value: newValue, selectionStart: selectionStart + 1 };
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculates the new cursor position after moving left by a specified step size.
|
||||||
|
*
|
||||||
|
* @param cursorPosition - The current position of the cursor.
|
||||||
|
* @param step - The number of positions to move left.
|
||||||
|
* @returns The new cursor position, clamped to a minimum of 0.
|
||||||
|
*/
|
||||||
|
const getCursorLeftPosition = (cursorPosition: number, step: number): number => {
|
||||||
|
return Math.max(0, cursorPosition - step);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculates the new cursor position after moving right by a specified step size.
|
||||||
|
*
|
||||||
|
* @param cursorPosition - The current position of the cursor.
|
||||||
|
* @param step - The number of positions to move right.
|
||||||
|
* @param maxPosition - The maximum allowed cursor position (e.g., value.length).
|
||||||
|
* @returns The new cursor position, clamped to a maximum of maxPosition.
|
||||||
|
*/
|
||||||
|
const getCursorRightPosition = (
|
||||||
|
cursorPosition: number,
|
||||||
|
step: number,
|
||||||
|
maxPosition: number,
|
||||||
|
): number => {
|
||||||
|
return Math.min(maxPosition, cursorPosition + step);
|
||||||
|
};
|
||||||
|
|
||||||
export const NumberComponent = React.memo((props: NumberComponentProps) => {
|
export const NumberComponent = React.memo((props: NumberComponentProps) => {
|
||||||
const {
|
const {
|
||||||
fullAccessPath,
|
fullAccessPath,
|
||||||
@@ -168,8 +220,18 @@ export const NumberComponent = React.memo((props: NumberComponentProps) => {
|
|||||||
id,
|
id,
|
||||||
} = props;
|
} = props;
|
||||||
|
|
||||||
|
const handleChange = (newValue: SerializedObject) => {
|
||||||
|
changeCallback(newValue, (result: undefined | SerializedObject) => {
|
||||||
|
if (result === undefined) return;
|
||||||
|
if (result.type == "Exception") {
|
||||||
|
setInputString(value.toString());
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
// Create a state for the cursor position
|
// Create a state for the cursor position
|
||||||
const [cursorPosition, setCursorPosition] = useState<number | null>(null);
|
const cursorPositionRef = useRef<number | null>(null);
|
||||||
|
|
||||||
// Create a state for the input string
|
// Create a state for the input string
|
||||||
const [inputString, setInputString] = useState(value.toString());
|
const [inputString, setInputString] = useState(value.toString());
|
||||||
const renderCount = useRenderCount();
|
const renderCount = useRenderCount();
|
||||||
@@ -177,41 +239,42 @@ export const NumberComponent = React.memo((props: NumberComponentProps) => {
|
|||||||
const handleKeyDown = (event: React.KeyboardEvent<HTMLInputElement>) => {
|
const handleKeyDown = (event: React.KeyboardEvent<HTMLInputElement>) => {
|
||||||
const { key, target } = event;
|
const { key, target } = event;
|
||||||
|
|
||||||
// Typecast
|
|
||||||
const inputTarget = target as HTMLInputElement;
|
const inputTarget = target as HTMLInputElement;
|
||||||
if (
|
|
||||||
key === "F1" ||
|
|
||||||
key === "F5" ||
|
|
||||||
key === "F12" ||
|
|
||||||
key === "Tab" ||
|
|
||||||
key === "ArrowRight" ||
|
|
||||||
key === "ArrowLeft"
|
|
||||||
) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
event.preventDefault();
|
|
||||||
|
|
||||||
// Get the current input value and cursor position
|
// Get the current input value and cursor position
|
||||||
const { value } = inputTarget;
|
const { value } = inputTarget;
|
||||||
|
const valueLength = value.length;
|
||||||
const selectionEnd = inputTarget.selectionEnd ?? 0;
|
const selectionEnd = inputTarget.selectionEnd ?? 0;
|
||||||
let selectionStart = inputTarget.selectionStart ?? 0;
|
let selectionStart = inputTarget.selectionStart ?? 0;
|
||||||
|
|
||||||
|
if (key === "F1" || key === "F5" || key === "F12" || key === "Tab") {
|
||||||
|
return;
|
||||||
|
} else if (key === "ArrowLeft" || key === "ArrowRight") {
|
||||||
|
const hasSelection = selectionEnd > selectionStart;
|
||||||
|
|
||||||
|
if (hasSelection && !event.shiftKey) {
|
||||||
|
// Collapse selection: ArrowLeft -> start, ArrowRight -> end
|
||||||
|
const collapseTo = key === "ArrowLeft" ? selectionStart : selectionEnd;
|
||||||
|
cursorPositionRef.current = collapseTo;
|
||||||
|
} else {
|
||||||
|
// No selection or shift key is pressed, just move cursor by one
|
||||||
|
const newSelectionStart =
|
||||||
|
key === "ArrowLeft"
|
||||||
|
? getCursorLeftPosition(selectionStart, 1)
|
||||||
|
: getCursorRightPosition(selectionEnd, 1, valueLength);
|
||||||
|
|
||||||
|
cursorPositionRef.current = newSelectionStart;
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
event.preventDefault();
|
||||||
|
|
||||||
let newValue: string = value;
|
let newValue: string = value;
|
||||||
if (event.ctrlKey && key === "a") {
|
if (event.ctrlKey && key === "a") {
|
||||||
// Select everything when pressing Ctrl + a
|
// Select everything when pressing Ctrl + a
|
||||||
inputTarget.setSelectionRange(0, value.length);
|
inputTarget.setSelectionRange(0, value.length);
|
||||||
return;
|
return;
|
||||||
} else if (key === "-") {
|
} else if ((key >= "0" && key <= "9") || key === "-") {
|
||||||
if (selectionStart === 0 && !value.startsWith("-")) {
|
|
||||||
newValue = "-" + value;
|
|
||||||
selectionStart++;
|
|
||||||
} else if (value.startsWith("-") && selectionStart === 1) {
|
|
||||||
newValue = value.substring(1); // remove minus sign
|
|
||||||
selectionStart--;
|
|
||||||
} else {
|
|
||||||
return; // Ignore "-" pressed in other positions
|
|
||||||
}
|
|
||||||
} else if (key >= "0" && key <= "9") {
|
|
||||||
// Check if a number key or a decimal point key is pressed
|
// Check if a number key or a decimal point key is pressed
|
||||||
({ value: newValue, selectionStart } = handleNumericKey(
|
({ value: newValue, selectionStart } = handleNumericKey(
|
||||||
key,
|
key,
|
||||||
@@ -268,7 +331,7 @@ export const NumberComponent = React.memo((props: NumberComponentProps) => {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
changeCallback(serializedObject);
|
handleChange(serializedObject);
|
||||||
return;
|
return;
|
||||||
} else {
|
} else {
|
||||||
console.debug(key);
|
console.debug(key);
|
||||||
@@ -299,13 +362,13 @@ export const NumberComponent = React.memo((props: NumberComponentProps) => {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
changeCallback(serializedObject);
|
handleChange(serializedObject);
|
||||||
}
|
}
|
||||||
|
|
||||||
setInputString(newValue);
|
setInputString(newValue);
|
||||||
|
|
||||||
// Save the current cursor position before the component re-renders
|
// Save the current cursor position before the component re-renders
|
||||||
setCursorPosition(selectionStart);
|
cursorPositionRef.current = selectionStart;
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleBlur = () => {
|
const handleBlur = () => {
|
||||||
@@ -333,7 +396,7 @@ export const NumberComponent = React.memo((props: NumberComponentProps) => {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
changeCallback(serializedObject);
|
handleChange(serializedObject);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@@ -358,8 +421,11 @@ export const NumberComponent = React.memo((props: NumberComponentProps) => {
|
|||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
// Set the cursor position after the component re-renders
|
// Set the cursor position after the component re-renders
|
||||||
const inputElement = document.getElementsByName(id)[0] as HTMLInputElement;
|
const inputElement = document.getElementsByName(id)[0] as HTMLInputElement;
|
||||||
if (inputElement && cursorPosition !== null) {
|
if (inputElement && cursorPositionRef.current !== null) {
|
||||||
inputElement.setSelectionRange(cursorPosition, cursorPosition);
|
inputElement.setSelectionRange(
|
||||||
|
cursorPositionRef.current,
|
||||||
|
cursorPositionRef.current,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -19,7 +19,10 @@ interface SliderComponentProps {
|
|||||||
stepSize: NumberObject;
|
stepSize: NumberObject;
|
||||||
isInstantUpdate: boolean;
|
isInstantUpdate: boolean;
|
||||||
addNotification: (message: string, levelname?: LevelName) => void;
|
addNotification: (message: string, levelname?: LevelName) => void;
|
||||||
changeCallback?: (value: SerializedObject, callback?: (ack: unknown) => void) => void;
|
changeCallback?: (
|
||||||
|
value: SerializedObject,
|
||||||
|
callback?: (ack: undefined | SerializedObject) => void,
|
||||||
|
) => void;
|
||||||
displayName: string;
|
displayName: string;
|
||||||
id: string;
|
id: string;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,67 +5,51 @@ import { DocStringComponent } from "./DocStringComponent";
|
|||||||
import { LevelName } from "./NotificationsComponent";
|
import { LevelName } from "./NotificationsComponent";
|
||||||
import useRenderCount from "../hooks/useRenderCount";
|
import useRenderCount from "../hooks/useRenderCount";
|
||||||
|
|
||||||
interface AsyncMethodProps {
|
export type TaskStatus = "RUNNING" | "NOT_RUNNING";
|
||||||
|
|
||||||
|
interface TaskProps {
|
||||||
fullAccessPath: string;
|
fullAccessPath: string;
|
||||||
value: "RUNNING" | null;
|
|
||||||
docString: string | null;
|
docString: string | null;
|
||||||
hideOutput?: boolean;
|
status: TaskStatus;
|
||||||
addNotification: (message: string, levelname?: LevelName) => void;
|
addNotification: (message: string, levelname?: LevelName) => void;
|
||||||
displayName: string;
|
displayName: string;
|
||||||
id: string;
|
id: string;
|
||||||
render: boolean;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export const AsyncMethodComponent = React.memo((props: AsyncMethodProps) => {
|
export const TaskComponent = React.memo((props: TaskProps) => {
|
||||||
const {
|
const { fullAccessPath, docString, status, addNotification, displayName, id } = props;
|
||||||
fullAccessPath,
|
|
||||||
docString,
|
|
||||||
value: runningTask,
|
|
||||||
addNotification,
|
|
||||||
displayName,
|
|
||||||
id,
|
|
||||||
} = props;
|
|
||||||
|
|
||||||
// Conditional rendering based on the 'render' prop.
|
|
||||||
if (!props.render) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const renderCount = useRenderCount();
|
const renderCount = useRenderCount();
|
||||||
const formRef = useRef(null);
|
const formRef = useRef(null);
|
||||||
const [spinning, setSpinning] = useState(false);
|
const [spinning, setSpinning] = useState(false);
|
||||||
const name = fullAccessPath.split(".").at(-1)!;
|
|
||||||
const parentPath = fullAccessPath.slice(0, -(name.length + 1));
|
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
let message: string;
|
let message: string;
|
||||||
|
|
||||||
if (runningTask === null) {
|
if (status === "RUNNING") {
|
||||||
message = `${fullAccessPath} task was stopped.`;
|
|
||||||
} else {
|
|
||||||
message = `${fullAccessPath} was started.`;
|
message = `${fullAccessPath} was started.`;
|
||||||
|
} else {
|
||||||
|
message = `${fullAccessPath} was stopped.`;
|
||||||
}
|
}
|
||||||
|
|
||||||
addNotification(message);
|
addNotification(message);
|
||||||
setSpinning(false);
|
setSpinning(false);
|
||||||
}, [props.value]);
|
}, [status]);
|
||||||
|
|
||||||
const execute = async (event: React.FormEvent) => {
|
const execute = async (event: React.FormEvent) => {
|
||||||
event.preventDefault();
|
event.preventDefault();
|
||||||
let method_name: string;
|
|
||||||
|
|
||||||
if (runningTask !== undefined && runningTask !== null) {
|
const method_name = status == "RUNNING" ? "stop" : "start";
|
||||||
method_name = `stop_${name}`;
|
|
||||||
} else {
|
|
||||||
method_name = `start_${name}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
const accessPath = [parentPath, method_name].filter((element) => element).join(".");
|
const accessPath = [fullAccessPath, method_name]
|
||||||
|
.filter((element) => element)
|
||||||
|
.join(".");
|
||||||
setSpinning(true);
|
setSpinning(true);
|
||||||
runMethod(accessPath);
|
runMethod(accessPath);
|
||||||
};
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="component asyncMethodComponent" id={id}>
|
<div className="component taskComponent" id={id}>
|
||||||
{process.env.NODE_ENV === "development" && <div>Render count: {renderCount}</div>}
|
{process.env.NODE_ENV === "development" && <div>Render count: {renderCount}</div>}
|
||||||
<Form onSubmit={execute} ref={formRef}>
|
<Form onSubmit={execute} ref={formRef}>
|
||||||
<InputGroup>
|
<InputGroup>
|
||||||
@@ -76,7 +60,7 @@ export const AsyncMethodComponent = React.memo((props: AsyncMethodProps) => {
|
|||||||
<Button id={`button-${id}`} type="submit">
|
<Button id={`button-${id}`} type="submit">
|
||||||
{spinning ? (
|
{spinning ? (
|
||||||
<Spinner size="sm" role="status" aria-hidden="true" />
|
<Spinner size="sm" role="status" aria-hidden="true" />
|
||||||
) : runningTask === "RUNNING" ? (
|
) : status === "RUNNING" ? (
|
||||||
"Stop "
|
"Stop "
|
||||||
) : (
|
) : (
|
||||||
"Start "
|
"Start "
|
||||||
@@ -88,4 +72,4 @@ export const AsyncMethodComponent = React.memo((props: AsyncMethodProps) => {
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
AsyncMethodComponent.displayName = "AsyncMethodComponent";
|
TaskComponent.displayName = "TaskComponent";
|
||||||
@@ -1,8 +1,9 @@
|
|||||||
import { useState, useEffect } from "react";
|
import { useState, useEffect } from "react";
|
||||||
|
import { authority } from "../socket";
|
||||||
|
|
||||||
export default function useLocalStorage(key: string, defaultValue: unknown) {
|
export default function useLocalStorage(key: string, defaultValue: unknown) {
|
||||||
const [value, setValue] = useState(() => {
|
const [value, setValue] = useState(() => {
|
||||||
const storedValue = localStorage.getItem(key);
|
const storedValue = localStorage.getItem(`${authority}:${key}`);
|
||||||
if (storedValue) {
|
if (storedValue) {
|
||||||
return JSON.parse(storedValue);
|
return JSON.parse(storedValue);
|
||||||
}
|
}
|
||||||
@@ -11,7 +12,7 @@ export default function useLocalStorage(key: string, defaultValue: unknown) {
|
|||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (value === undefined) return;
|
if (value === undefined) return;
|
||||||
localStorage.setItem(key, JSON.stringify(value));
|
localStorage.setItem(`${authority}:${key}`, JSON.stringify(value));
|
||||||
}, [value, key]);
|
}, [value, key]);
|
||||||
|
|
||||||
return [value, setValue];
|
return [value, setValue];
|
||||||
|
|||||||
@@ -2,18 +2,33 @@ import { io } from "socket.io-client";
|
|||||||
import { serializeDict, serializeList } from "./utils/serializationUtils";
|
import { serializeDict, serializeList } from "./utils/serializationUtils";
|
||||||
import { SerializedObject } from "./types/SerializedObject";
|
import { SerializedObject } from "./types/SerializedObject";
|
||||||
|
|
||||||
export const hostname =
|
const hostname =
|
||||||
process.env.NODE_ENV === "development" ? `localhost` : window.location.hostname;
|
process.env.NODE_ENV === "development" ? `localhost` : window.location.hostname;
|
||||||
export const port =
|
const port = process.env.NODE_ENV === "development" ? 8001 : window.location.port;
|
||||||
process.env.NODE_ENV === "development" ? 8001 : window.location.port;
|
|
||||||
const URL = `ws://${hostname}:${port}/`;
|
|
||||||
console.debug("Websocket: ", URL);
|
|
||||||
|
|
||||||
export const socket = io(URL, { path: "/ws/socket.io", transports: ["websocket"] });
|
// Get the forwarded prefix from the global variable
|
||||||
|
export const forwardedPrefix: string =
|
||||||
|
(window as any) /* eslint-disable-line @typescript-eslint/no-explicit-any */
|
||||||
|
.__FORWARDED_PREFIX__ || "";
|
||||||
|
// Get the forwarded protocol type from the global variable
|
||||||
|
export const forwardedProto: string =
|
||||||
|
(window as any) /* eslint-disable-line @typescript-eslint/no-explicit-any */
|
||||||
|
.__FORWARDED_PROTO__ || "http";
|
||||||
|
|
||||||
|
export const authority = `${hostname}:${port}${forwardedPrefix}`;
|
||||||
|
|
||||||
|
const wsProto = forwardedProto === "http" ? "ws" : "wss";
|
||||||
|
|
||||||
|
const URL = `${wsProto}://${hostname}:${port}/`;
|
||||||
|
console.debug("Websocket: ", URL);
|
||||||
|
export const socket = io(URL, {
|
||||||
|
path: `${forwardedPrefix}/ws/socket.io`,
|
||||||
|
transports: ["websocket"],
|
||||||
|
});
|
||||||
|
|
||||||
export const updateValue = (
|
export const updateValue = (
|
||||||
serializedObject: SerializedObject,
|
serializedObject: SerializedObject,
|
||||||
callback?: (ack: unknown) => void,
|
callback?: (ack: undefined | SerializedObject) => void,
|
||||||
) => {
|
) => {
|
||||||
if (callback) {
|
if (callback) {
|
||||||
socket.emit(
|
socket.emit(
|
||||||
|
|||||||
@@ -77,7 +77,12 @@ type SerializedException = SerializedObjectBase & {
|
|||||||
type: "Exception";
|
type: "Exception";
|
||||||
};
|
};
|
||||||
|
|
||||||
type DataServiceTypes = "DataService" | "Image" | "NumberSlider" | "DeviceConnection";
|
type DataServiceTypes =
|
||||||
|
| "DataService"
|
||||||
|
| "Image"
|
||||||
|
| "NumberSlider"
|
||||||
|
| "DeviceConnection"
|
||||||
|
| "Task";
|
||||||
|
|
||||||
type SerializedDataService = SerializedObjectBase & {
|
type SerializedDataService = SerializedObjectBase & {
|
||||||
name: string;
|
name: string;
|
||||||
|
|||||||
15
mkdocs.yml
@@ -6,11 +6,20 @@ nav:
|
|||||||
- Getting Started: getting-started.md
|
- Getting Started: getting-started.md
|
||||||
- User Guide:
|
- User Guide:
|
||||||
- Components Guide: user-guide/Components.md
|
- Components Guide: user-guide/Components.md
|
||||||
- Interacting with pydase Services: user-guide/interaction/README.md
|
- Interaction:
|
||||||
|
- Overview: user-guide/interaction/README.md
|
||||||
|
- Auto-generated Frontend: user-guide/interaction/Auto-generated-Frontend.md
|
||||||
|
- RESTful API: user-guide/interaction/RESTful-API.md
|
||||||
|
- Python Client: user-guide/interaction/Python-Client.md
|
||||||
- Achieving Service Persistence: user-guide/Service_Persistence.md
|
- Achieving Service Persistence: user-guide/Service_Persistence.md
|
||||||
- Understanding Tasks: user-guide/Tasks.md
|
- Understanding Tasks: user-guide/Tasks.md
|
||||||
- Understanding Units: user-guide/Understanding-Units.md
|
- Understanding Units: user-guide/Understanding-Units.md
|
||||||
- Validating Property Setters: user-guide/Validating-Property-Setters.md
|
- Validating Property Setters: user-guide/Validating-Property-Setters.md
|
||||||
|
- Configuring pydase: user-guide/Configuration.md
|
||||||
|
- Logging in pydase: user-guide/Logging.md
|
||||||
|
- Advanced:
|
||||||
|
- Deploying behind a Reverse Proxy: user-guide/advanced/Reverse-Proxy.md
|
||||||
|
- Connecting through a SOCKS Proxy: user-guide/advanced/SOCKS-Proxy.md
|
||||||
- Developer Guide:
|
- Developer Guide:
|
||||||
- Developer Guide: dev-guide/README.md
|
- Developer Guide: dev-guide/README.md
|
||||||
- API Reference: dev-guide/api.md
|
- API Reference: dev-guide/api.md
|
||||||
@@ -22,6 +31,7 @@ nav:
|
|||||||
- License: about/license.md
|
- License: about/license.md
|
||||||
|
|
||||||
theme:
|
theme:
|
||||||
|
logo: images/logo-colour.png
|
||||||
name: material
|
name: material
|
||||||
features:
|
features:
|
||||||
- content.code.copy
|
- content.code.copy
|
||||||
@@ -50,10 +60,11 @@ plugins:
|
|||||||
handlers:
|
handlers:
|
||||||
python:
|
python:
|
||||||
paths: [src] # search packages in the src folder
|
paths: [src] # search packages in the src folder
|
||||||
import:
|
inventories:
|
||||||
- https://docs.python.org/3/objects.inv
|
- https://docs.python.org/3/objects.inv
|
||||||
- https://docs.pydantic.dev/latest/objects.inv
|
- https://docs.pydantic.dev/latest/objects.inv
|
||||||
- https://confz.readthedocs.io/en/latest/objects.inv
|
- https://confz.readthedocs.io/en/latest/objects.inv
|
||||||
|
- https://python-socketio.readthedocs.io/en/stable/objects.inv
|
||||||
options:
|
options:
|
||||||
show_source: true
|
show_source: true
|
||||||
inherited_members: true
|
inherited_members: true
|
||||||
|
|||||||
3267
poetry.lock
generated
@@ -1,49 +1,56 @@
|
|||||||
[tool.poetry]
|
[project]
|
||||||
name = "pydase"
|
name = "pydase"
|
||||||
version = "0.9.1"
|
version = "0.10.21"
|
||||||
description = "A flexible and robust Python library for creating, managing, and interacting with data services, with built-in support for web and RPC servers, and customizable features for diverse use cases."
|
description = "A flexible and robust Python library for creating, managing, and interacting with data services, with built-in support for web and RPC servers, and customizable features for diverse use cases."
|
||||||
authors = ["Mose Mueller <mosmuell@ethz.ch>"]
|
authors = [
|
||||||
|
{name = "Mose Müller",email = "mosemueller@gmail.com"}
|
||||||
|
]
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
packages = [{ include = "pydase", from = "src" }]
|
requires-python = ">=3.10,<4.0"
|
||||||
|
dependencies = [
|
||||||
|
"toml (>=0.10.2,<0.11.0)",
|
||||||
|
"python-socketio (>=5.13.0,<6.0.0)",
|
||||||
|
"confz (>=2.1.0,<3.0.0)",
|
||||||
|
"pint (>=0.24.4,<0.25.0)",
|
||||||
|
"websocket-client (>=1.8.0,<2.0.0)",
|
||||||
|
"aiohttp (>=3.11.18,<4.0.0)",
|
||||||
|
"click (>=8.2.0,<9.0.0)",
|
||||||
|
"aiohttp-middlewares (>=2.4.0,<3.0.0)",
|
||||||
|
"anyio (>=4.9.0,<5.0.0)"
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
socks = ["aiohttp-socks (>=0.10.1,<0.11.0)"]
|
||||||
|
|
||||||
[tool.poetry.dependencies]
|
[tool.poetry]
|
||||||
python = "^3.10"
|
packages = [{include = "pydase", from = "src"}]
|
||||||
toml = "^0.10.2"
|
|
||||||
python-socketio = "^5.8.0"
|
|
||||||
confz = "^2.0.0"
|
|
||||||
pint = "^0.24"
|
|
||||||
websocket-client = "^1.7.0"
|
|
||||||
aiohttp = "^3.9.3"
|
|
||||||
click = "^8.1.7"
|
|
||||||
aiohttp-middlewares = "^2.3.0"
|
|
||||||
|
|
||||||
[tool.poetry.group.dev]
|
[tool.poetry.group.dev]
|
||||||
optional = true
|
optional = true
|
||||||
|
|
||||||
[tool.poetry.group.dev.dependencies]
|
[tool.poetry.group.dev.dependencies]
|
||||||
types-toml = "^0.10.8.6"
|
types-toml = "^0.10.8.20240310"
|
||||||
pytest = "^7.4.0"
|
pytest = "^8.3.5"
|
||||||
pytest-cov = "^4.1.0"
|
pytest-cov = "^6.1.1"
|
||||||
mypy = "^1.4.1"
|
mypy = "^1.15.0"
|
||||||
matplotlib = "^3.7.2"
|
matplotlib = "^3.10.3"
|
||||||
pyright = "^1.1.323"
|
pyright = "^1.1.400"
|
||||||
pytest-mock = "^3.11.1"
|
pytest-mock = "^3.14.0"
|
||||||
ruff = "^0.5.0"
|
ruff = "^0.11.10"
|
||||||
pytest-asyncio = "^0.23.2"
|
pytest-asyncio = "^0.26.0"
|
||||||
|
|
||||||
[tool.poetry.group.docs]
|
[tool.poetry.group.docs]
|
||||||
optional = true
|
optional = true
|
||||||
|
|
||||||
[tool.poetry.group.docs.dependencies]
|
[tool.poetry.group.docs.dependencies]
|
||||||
mkdocs-material = "^9.5.30"
|
mkdocs-material = "^9.6.14"
|
||||||
mkdocs-include-markdown-plugin = "^3.9.1"
|
mkdocs-include-markdown-plugin = "^7.1.5"
|
||||||
mkdocstrings = {extras = ["python"], version = "^0.25.2"}
|
mkdocstrings = {extras = ["python"], version = "^0.29.1"}
|
||||||
pymdown-extensions = "^10.1"
|
pymdown-extensions = "^10.15"
|
||||||
mkdocs-swagger-ui-tag = "^0.6.10"
|
mkdocs-swagger-ui-tag = "^0.7.1"
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = ["poetry-core"]
|
requires = ["poetry-core>=2.0.0,<3.0.0"]
|
||||||
build-backend = "poetry.core.masonry.api"
|
build-backend = "poetry.core.masonry.api"
|
||||||
|
|
||||||
[tool.ruff]
|
[tool.ruff]
|
||||||
@@ -85,6 +92,7 @@ select = [
|
|||||||
ignore = [
|
ignore = [
|
||||||
"RUF006", # asyncio-dangling-task
|
"RUF006", # asyncio-dangling-task
|
||||||
"PERF203", # try-except-in-loop
|
"PERF203", # try-except-in-loop
|
||||||
|
"ASYNC110", # async-busy-wait
|
||||||
]
|
]
|
||||||
|
|
||||||
[tool.ruff.lint.mccabe]
|
[tool.ruff.lint.mccabe]
|
||||||
@@ -103,3 +111,10 @@ disallow_incomplete_defs = true
|
|||||||
disallow_any_generics = true
|
disallow_any_generics = true
|
||||||
check_untyped_defs = true
|
check_untyped_defs = true
|
||||||
ignore_missing_imports = false
|
ignore_missing_imports = false
|
||||||
|
|
||||||
|
[tool.pytest.ini_options]
|
||||||
|
asyncio_default_fixture_loop_scope = "function"
|
||||||
|
filterwarnings = [
|
||||||
|
# I don't controll the usage of the timeout
|
||||||
|
"ignore:parameter 'timeout' of type 'float' is deprecated, please use 'timeout=ClientWSTimeout"
|
||||||
|
]
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ from pydase.utils.logging import setup_logging
|
|||||||
setup_logging()
|
setup_logging()
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
|
"Client",
|
||||||
"DataService",
|
"DataService",
|
||||||
"Server",
|
"Server",
|
||||||
"Client",
|
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,13 +1,23 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
|
import socket
|
||||||
import sys
|
import sys
|
||||||
import threading
|
import threading
|
||||||
from typing import TypedDict, cast
|
import urllib.parse
|
||||||
|
from builtins import ModuleNotFoundError
|
||||||
|
from types import TracebackType
|
||||||
|
from typing import TYPE_CHECKING, Any, TypedDict, cast
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
import socketio # type: ignore
|
import socketio # type: ignore
|
||||||
|
|
||||||
import pydase.components
|
from pydase.client.proxy_class import ProxyClass
|
||||||
from pydase.client.proxy_loader import ProxyClassMixin, ProxyLoader
|
from pydase.client.proxy_loader import (
|
||||||
|
ProxyLoader,
|
||||||
|
get_value,
|
||||||
|
trigger_method,
|
||||||
|
update_value,
|
||||||
|
)
|
||||||
from pydase.utils.serialization.deserializer import loads
|
from pydase.utils.serialization.deserializer import loads
|
||||||
from pydase.utils.serialization.types import SerializedDataService, SerializedObject
|
from pydase.utils.serialization.types import SerializedDataService, SerializedObject
|
||||||
|
|
||||||
@@ -33,116 +43,186 @@ def asyncio_loop_thread(loop: asyncio.AbstractEventLoop) -> None:
|
|||||||
asyncio.set_event_loop(loop)
|
asyncio.set_event_loop(loop)
|
||||||
try:
|
try:
|
||||||
loop.run_forever()
|
loop.run_forever()
|
||||||
except RuntimeError:
|
finally:
|
||||||
logger.debug("Tried starting even loop, but it is running already")
|
loop.close()
|
||||||
|
|
||||||
|
|
||||||
class ProxyClass(ProxyClassMixin, pydase.components.DeviceConnection):
|
|
||||||
"""
|
|
||||||
A proxy class that serves as the interface for interacting with device connections
|
|
||||||
via a socket.io client in an asyncio environment.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
sio_client:
|
|
||||||
The socket.io client instance used for asynchronous communication with the
|
|
||||||
pydase service server.
|
|
||||||
loop:
|
|
||||||
The event loop in which the client operations are managed and executed.
|
|
||||||
|
|
||||||
This class is used to create a proxy object that behaves like a local representation
|
|
||||||
of a remote pydase service, facilitating direct interaction as if it were local
|
|
||||||
while actually communicating over network protocols.
|
|
||||||
It can also be used as an attribute of a pydase service itself, e.g.
|
|
||||||
|
|
||||||
```python
|
|
||||||
import pydase
|
|
||||||
|
|
||||||
|
|
||||||
class MyService(pydase.DataService):
|
|
||||||
proxy = pydase.Client(
|
|
||||||
hostname="...", port=8001, block_until_connected=False
|
|
||||||
).proxy
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
service = MyService()
|
|
||||||
server = pydase.Server(service, web_port=8002).run()
|
|
||||||
```
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self, sio_client: socketio.AsyncClient, loop: asyncio.AbstractEventLoop
|
|
||||||
) -> None:
|
|
||||||
super().__init__()
|
|
||||||
self._initialise(sio_client=sio_client, loop=loop)
|
|
||||||
|
|
||||||
|
|
||||||
class Client:
|
class Client:
|
||||||
"""
|
"""A client for connecting to a remote pydase service using Socket.IO. This client
|
||||||
A client for connecting to a remote pydase service using socket.io. This client
|
|
||||||
handles asynchronous communication with a service, manages events such as
|
handles asynchronous communication with a service, manages events such as
|
||||||
connection, disconnection, and updates, and ensures that the proxy object is
|
connection, disconnection, and updates, and ensures that the proxy object is
|
||||||
up-to-date with the server state.
|
up-to-date with the server state.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
url:
|
url: The URL of the pydase Socket.IO server. This should always contain the
|
||||||
The URL of the pydase Socket.IO server. This should always contain the
|
protocol (e.g., `ws` or `wss`) and the hostname, and can optionally include
|
||||||
protocol and the hostname.
|
a path prefix (e.g., `ws://localhost:8001/service`).
|
||||||
|
block_until_connected: If set to True, the constructor will block until the
|
||||||
|
connection to the service has been established. This is useful for ensuring
|
||||||
|
the client is ready to use immediately after instantiation. Default is True.
|
||||||
|
sio_client_kwargs: Additional keyword arguments passed to the underlying
|
||||||
|
[`AsyncClient`][socketio.AsyncClient]. This allows fine-tuning of the
|
||||||
|
client's behaviour (e.g., reconnection attempts or reconnection delay).
|
||||||
|
client_id: An optional client identifier. This ID is sent to the server as the
|
||||||
|
`X-Client-Id` HTTP header. It can be used for logging or authentication
|
||||||
|
purposes on the server side. If not provided, it defaults to the hostname
|
||||||
|
of the machine running the client.
|
||||||
|
proxy_url: An optional proxy URL to route the connection through. This is useful
|
||||||
|
if the service is only reachable via an SSH tunnel or behind a firewall
|
||||||
|
(e.g., `socks5://localhost:2222`).
|
||||||
|
auto_update_proxy: If False, disables automatic updates from the server. Useful
|
||||||
|
for request-only clients where real-time synchronization is not needed.
|
||||||
|
|
||||||
Examples:
|
Example:
|
||||||
|
Connect to a service directly:
|
||||||
|
|
||||||
- `wss://my-service.example.com` # for secure connections, use wss
|
```python
|
||||||
- `ws://localhost:8001`
|
client = pydase.Client(url="ws://localhost:8001")
|
||||||
block_until_connected:
|
```
|
||||||
If set to True, the constructor will block until the connection to the
|
|
||||||
service has been established. This is useful for ensuring the client is
|
Connect over a secure connection:
|
||||||
ready to use immediately after instantiation. Default is True.
|
|
||||||
|
```python
|
||||||
|
client = pydase.Client(url="wss://my-service.example.com")
|
||||||
|
```
|
||||||
|
|
||||||
|
Connect using a SOCKS5 proxy (e.g., through an SSH tunnel):
|
||||||
|
|
||||||
|
```bash
|
||||||
|
ssh -D 2222 user@gateway.example.com
|
||||||
|
```
|
||||||
|
|
||||||
|
```python
|
||||||
|
client = pydase.Client(
|
||||||
|
url="ws://remote-server:8001",
|
||||||
|
proxy_url="socks5://localhost:2222"
|
||||||
|
)
|
||||||
|
```
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__( # noqa: PLR0913
|
||||||
self,
|
self,
|
||||||
*,
|
*,
|
||||||
url: str,
|
url: str,
|
||||||
block_until_connected: bool = True,
|
block_until_connected: bool = True,
|
||||||
|
sio_client_kwargs: dict[str, Any] = {},
|
||||||
|
client_id: str | None = None,
|
||||||
|
proxy_url: str | None = None,
|
||||||
|
auto_update_proxy: bool = True, # new argument
|
||||||
):
|
):
|
||||||
|
# Parse the URL to separate base URL and path prefix
|
||||||
|
parsed_url = urllib.parse.urlparse(url)
|
||||||
|
|
||||||
|
# Construct the base URL without the path
|
||||||
|
self._base_url = urllib.parse.urlunparse(
|
||||||
|
(parsed_url.scheme, parsed_url.netloc, "", "", "", "")
|
||||||
|
)
|
||||||
|
|
||||||
|
# Store the path prefix (e.g., "/service" in "ws://localhost:8081/service")
|
||||||
|
self._path_prefix = parsed_url.path.rstrip("/") # Remove trailing slash if any
|
||||||
self._url = url
|
self._url = url
|
||||||
self._sio = socketio.AsyncClient()
|
self._proxy_url = proxy_url
|
||||||
self._loop = asyncio.new_event_loop()
|
self._client_id = client_id or socket.gethostname()
|
||||||
self.proxy = ProxyClass(sio_client=self._sio, loop=self._loop)
|
self._sio_client_kwargs = sio_client_kwargs
|
||||||
|
self._loop: asyncio.AbstractEventLoop | None = None
|
||||||
|
self._thread: threading.Thread | None = None
|
||||||
|
self._auto_update_proxy = auto_update_proxy
|
||||||
|
self.proxy: ProxyClass
|
||||||
"""A proxy object representing the remote service, facilitating interaction as
|
"""A proxy object representing the remote service, facilitating interaction as
|
||||||
if it were local."""
|
if it were local."""
|
||||||
self._thread = threading.Thread(
|
|
||||||
target=asyncio_loop_thread, args=(self._loop,), daemon=True
|
|
||||||
)
|
|
||||||
self._thread.start()
|
|
||||||
self.connect(block_until_connected=block_until_connected)
|
self.connect(block_until_connected=block_until_connected)
|
||||||
|
|
||||||
def __enter__(self) -> Self:
|
def __enter__(self) -> Self:
|
||||||
self.connect(block_until_connected=True)
|
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __del__(self) -> None:
|
def __exit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: TracebackType | None,
|
||||||
|
) -> None:
|
||||||
self.disconnect()
|
self.disconnect()
|
||||||
|
|
||||||
def connect(self, block_until_connected: bool = True) -> None:
|
def connect(self, block_until_connected: bool = True) -> None:
|
||||||
|
if self._thread is None or self._loop is None:
|
||||||
|
self._loop = self._initialize_loop_and_thread()
|
||||||
|
self._initialize_socketio_client()
|
||||||
|
self.proxy = ProxyClass(
|
||||||
|
sio_client=self._sio,
|
||||||
|
loop=self._loop,
|
||||||
|
reconnect=self.connect,
|
||||||
|
)
|
||||||
|
|
||||||
connection_future = asyncio.run_coroutine_threadsafe(
|
connection_future = asyncio.run_coroutine_threadsafe(
|
||||||
self._connect(), self._loop
|
self._connect(), self._loop
|
||||||
)
|
)
|
||||||
if block_until_connected:
|
if block_until_connected:
|
||||||
connection_future.result()
|
connection_future.result()
|
||||||
|
|
||||||
def disconnect(self) -> None:
|
def _initialize_socketio_client(self) -> None:
|
||||||
connection_future = asyncio.run_coroutine_threadsafe(
|
if self._proxy_url is not None:
|
||||||
self._disconnect(), self._loop
|
try:
|
||||||
|
import aiohttp_socks.connector
|
||||||
|
except ModuleNotFoundError:
|
||||||
|
raise ModuleNotFoundError(
|
||||||
|
"Missing dependency 'aiohttp_socks'. To use SOCKS5 proxy support, "
|
||||||
|
"install the optional 'socks' extra:\n\n"
|
||||||
|
' pip install "pydase[socks]"\n\n'
|
||||||
|
"This is required when specifying a `proxy_url` for "
|
||||||
|
"`pydase.Client`."
|
||||||
|
)
|
||||||
|
|
||||||
|
session = aiohttp.ClientSession(
|
||||||
|
connector=aiohttp_socks.connector.ProxyConnector.from_url(
|
||||||
|
url=self._proxy_url, loop=self._loop
|
||||||
|
),
|
||||||
|
loop=self._loop,
|
||||||
|
)
|
||||||
|
self._sio = socketio.AsyncClient(
|
||||||
|
http_session=session, **self._sio_client_kwargs
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self._sio = socketio.AsyncClient(**self._sio_client_kwargs)
|
||||||
|
|
||||||
|
def _initialize_loop_and_thread(self) -> asyncio.AbstractEventLoop:
|
||||||
|
"""Initialize a new asyncio event loop, start it in a background thread,
|
||||||
|
and create the ProxyClass instance bound to that loop.
|
||||||
|
"""
|
||||||
|
|
||||||
|
loop = asyncio.new_event_loop()
|
||||||
|
self._thread = threading.Thread(
|
||||||
|
target=asyncio_loop_thread,
|
||||||
|
args=(loop,),
|
||||||
|
daemon=True,
|
||||||
)
|
)
|
||||||
connection_future.result()
|
self._thread.start()
|
||||||
|
|
||||||
|
return loop
|
||||||
|
|
||||||
|
def disconnect(self) -> None:
|
||||||
|
if self._loop is not None and self._thread is not None:
|
||||||
|
connection_future = asyncio.run_coroutine_threadsafe(
|
||||||
|
self._disconnect(), self._loop
|
||||||
|
)
|
||||||
|
connection_future.result()
|
||||||
|
|
||||||
|
# Stop the event loop and thread
|
||||||
|
self._loop.call_soon_threadsafe(self._loop.stop)
|
||||||
|
self._thread.join()
|
||||||
|
self._thread = None
|
||||||
|
|
||||||
async def _connect(self) -> None:
|
async def _connect(self) -> None:
|
||||||
logger.debug("Connecting to server '%s' ...", self._url)
|
logger.debug("Connecting to server '%s' ...", self._url)
|
||||||
await self._setup_events()
|
await self._setup_events()
|
||||||
|
|
||||||
|
headers = {}
|
||||||
|
if self._client_id is not None:
|
||||||
|
headers["X-Client-Id"] = self._client_id
|
||||||
|
|
||||||
await self._sio.connect(
|
await self._sio.connect(
|
||||||
self._url,
|
url=self._base_url,
|
||||||
socketio_path="/ws/socket.io",
|
headers=headers,
|
||||||
|
socketio_path=f"{self._path_prefix}/ws/socket.io",
|
||||||
transports=["websocket"],
|
transports=["websocket"],
|
||||||
retry=True,
|
retry=True,
|
||||||
)
|
)
|
||||||
@@ -153,18 +233,25 @@ class Client:
|
|||||||
async def _setup_events(self) -> None:
|
async def _setup_events(self) -> None:
|
||||||
self._sio.on("connect", self._handle_connect)
|
self._sio.on("connect", self._handle_connect)
|
||||||
self._sio.on("disconnect", self._handle_disconnect)
|
self._sio.on("disconnect", self._handle_disconnect)
|
||||||
self._sio.on("notify", self._handle_update)
|
if self._auto_update_proxy:
|
||||||
|
self._sio.on("notify", self._handle_update)
|
||||||
|
|
||||||
async def _handle_connect(self) -> None:
|
async def _handle_connect(self) -> None:
|
||||||
logger.debug("Connected to '%s' ...", self._url)
|
logger.debug("Connected to '%s' ...", self._url)
|
||||||
serialized_object = cast(
|
if self._auto_update_proxy:
|
||||||
SerializedDataService, await self._sio.call("service_serialization")
|
serialized_object = cast(
|
||||||
)
|
"SerializedDataService", await self._sio.call("service_serialization")
|
||||||
ProxyLoader.update_data_service_proxy(
|
)
|
||||||
self.proxy, serialized_object=serialized_object
|
ProxyLoader.update_data_service_proxy(
|
||||||
)
|
self.proxy, serialized_object=serialized_object
|
||||||
serialized_object["type"] = "DeviceConnection"
|
)
|
||||||
self.proxy._notify_changed("", loads(serialized_object))
|
serialized_object["type"] = "DeviceConnection"
|
||||||
|
# need to use object.__setattr__ to not trigger an observer notification
|
||||||
|
object.__setattr__(self.proxy, "_service_representation", serialized_object)
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
self.proxy._service_representation = serialized_object # type: ignore
|
||||||
|
self.proxy._notify_changed("", self.proxy)
|
||||||
self.proxy._connected = True
|
self.proxy._connected = True
|
||||||
|
|
||||||
async def _handle_disconnect(self) -> None:
|
async def _handle_disconnect(self) -> None:
|
||||||
@@ -176,3 +263,77 @@ class Client:
|
|||||||
data["data"]["full_access_path"],
|
data["data"]["full_access_path"],
|
||||||
loads(data["data"]["value"]),
|
loads(data["data"]["value"]),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def get_value(self, access_path: str) -> Any:
|
||||||
|
"""Retrieve the current value of a remote attribute.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
access_path: The dot-separated path to the attribute in the remote service.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The deserialized value of the remote attribute, or None if the client is not
|
||||||
|
connected.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```python
|
||||||
|
value = client.get_value("my_device.temperature")
|
||||||
|
print(value)
|
||||||
|
```
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self._loop is not None:
|
||||||
|
return get_value(
|
||||||
|
sio_client=self._sio,
|
||||||
|
loop=self._loop,
|
||||||
|
access_path=access_path,
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def update_value(self, access_path: str, new_value: Any) -> Any:
|
||||||
|
"""Set a new value for a remote attribute.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
access_path: The dot-separated path to the attribute in the remote service.
|
||||||
|
new_value: The new value to assign to the attribute.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```python
|
||||||
|
client.update_value("my_device.power", True)
|
||||||
|
```
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self._loop is not None:
|
||||||
|
update_value(
|
||||||
|
sio_client=self._sio,
|
||||||
|
loop=self._loop,
|
||||||
|
access_path=access_path,
|
||||||
|
value=new_value,
|
||||||
|
)
|
||||||
|
|
||||||
|
def trigger_method(self, access_path: str, *args: Any, **kwargs: Any) -> Any:
|
||||||
|
"""Trigger a remote method with optional arguments.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
access_path: The dot-separated path to the method in the remote service.
|
||||||
|
*args: Positional arguments to pass to the method.
|
||||||
|
**kwargs: Keyword arguments to pass to the method.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The return value of the method call, if any.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```python
|
||||||
|
result = client.trigger_method("my_device.calibrate", timeout=5)
|
||||||
|
print(result)
|
||||||
|
```
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self._loop is not None:
|
||||||
|
return trigger_method(
|
||||||
|
sio_client=self._sio,
|
||||||
|
loop=self._loop,
|
||||||
|
access_path=access_path,
|
||||||
|
args=list(args),
|
||||||
|
kwargs=kwargs,
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|||||||
124
src/pydase/client/proxy_class.py
Normal file
@@ -0,0 +1,124 @@
|
|||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
from collections.abc import Callable
|
||||||
|
from copy import deepcopy
|
||||||
|
from typing import TYPE_CHECKING, cast
|
||||||
|
|
||||||
|
import socketio # type: ignore
|
||||||
|
|
||||||
|
import pydase.components
|
||||||
|
from pydase.client.proxy_loader import ProxyClassMixin
|
||||||
|
from pydase.utils.helpers import get_attribute_doc
|
||||||
|
from pydase.utils.serialization.types import SerializedDataService, SerializedObject
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ProxyClass(ProxyClassMixin, pydase.components.DeviceConnection):
|
||||||
|
"""
|
||||||
|
A proxy class that serves as the interface for interacting with device connections
|
||||||
|
via a socket.io client in an asyncio environment.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
sio_client:
|
||||||
|
The socket.io client instance used for asynchronous communication with the
|
||||||
|
pydase service server.
|
||||||
|
loop:
|
||||||
|
The event loop in which the client operations are managed and executed.
|
||||||
|
reconnect:
|
||||||
|
The method that is called periodically when the client is not connected.
|
||||||
|
|
||||||
|
This class is used to create a proxy object that behaves like a local representation
|
||||||
|
of a remote pydase service, facilitating direct interaction as if it were local
|
||||||
|
while actually communicating over network protocols.
|
||||||
|
It can also be used as an attribute of a pydase service itself, e.g.
|
||||||
|
|
||||||
|
```python
|
||||||
|
import pydase
|
||||||
|
|
||||||
|
|
||||||
|
class MyService(pydase.DataService):
|
||||||
|
proxy = pydase.Client(
|
||||||
|
hostname="...", port=8001, block_until_connected=False
|
||||||
|
).proxy
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
service = MyService()
|
||||||
|
server = pydase.Server(service, web_port=8002).run()
|
||||||
|
```
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
sio_client: socketio.AsyncClient,
|
||||||
|
loop: asyncio.AbstractEventLoop,
|
||||||
|
reconnect: Callable[..., None],
|
||||||
|
) -> None:
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
self._service_representation: None | SerializedObject = None
|
||||||
|
|
||||||
|
super().__init__()
|
||||||
|
pydase.components.DeviceConnection.__init__(self)
|
||||||
|
self._initialise(sio_client=sio_client, loop=loop)
|
||||||
|
object.__setattr__(self, "_service_representation", None)
|
||||||
|
self.reconnect = reconnect
|
||||||
|
|
||||||
|
def serialize(self) -> SerializedObject:
|
||||||
|
current_loop = asyncio.get_event_loop()
|
||||||
|
|
||||||
|
if not self.connected or current_loop == self._loop:
|
||||||
|
logger.debug(
|
||||||
|
"Client not connected, or called from within client event loop - using "
|
||||||
|
"fallback serialization"
|
||||||
|
)
|
||||||
|
if self._service_representation is None:
|
||||||
|
serialized_service = pydase.components.DeviceConnection().serialize()
|
||||||
|
else:
|
||||||
|
serialized_service = self._service_representation
|
||||||
|
|
||||||
|
else:
|
||||||
|
future = cast(
|
||||||
|
"asyncio.Future[SerializedDataService]",
|
||||||
|
asyncio.run_coroutine_threadsafe(
|
||||||
|
self._sio.call("service_serialization"), self._loop
|
||||||
|
),
|
||||||
|
)
|
||||||
|
result = future.result()
|
||||||
|
# need to use object.__setattr__ to not trigger an observer notification
|
||||||
|
object.__setattr__(self, "_service_representation", result)
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
self._service_representation = result
|
||||||
|
serialized_service = result
|
||||||
|
|
||||||
|
device_connection_value = cast(
|
||||||
|
"dict[str, SerializedObject]",
|
||||||
|
pydase.components.DeviceConnection().serialize()["value"],
|
||||||
|
)
|
||||||
|
|
||||||
|
readonly = False
|
||||||
|
doc = get_attribute_doc(self)
|
||||||
|
obj_name = self.__class__.__name__
|
||||||
|
|
||||||
|
value = {
|
||||||
|
**cast(
|
||||||
|
"dict[str, SerializedObject]",
|
||||||
|
# need to deepcopy to not overwrite the _service_representation dict
|
||||||
|
# when adding a prefix with add_prefix_to_full_access_path
|
||||||
|
deepcopy(serialized_service["value"]),
|
||||||
|
),
|
||||||
|
**device_connection_value,
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
"full_access_path": "",
|
||||||
|
"name": obj_name,
|
||||||
|
"type": "DeviceConnection",
|
||||||
|
"value": value,
|
||||||
|
"readonly": readonly,
|
||||||
|
"doc": doc,
|
||||||
|
}
|
||||||
|
|
||||||
|
def connect(self) -> None:
|
||||||
|
if not self._sio.reconnection or self._sio.reconnection_attempts > 0:
|
||||||
|
self.reconnect(block_until_connected=False)
|
||||||
@@ -1,7 +1,6 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
from collections.abc import Iterable
|
from collections.abc import Iterable
|
||||||
from copy import copy
|
|
||||||
from typing import TYPE_CHECKING, Any, cast
|
from typing import TYPE_CHECKING, Any, cast
|
||||||
|
|
||||||
import socketio # type: ignore
|
import socketio # type: ignore
|
||||||
@@ -75,6 +74,21 @@ def update_value(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_value(
|
||||||
|
sio_client: socketio.AsyncClient,
|
||||||
|
loop: asyncio.AbstractEventLoop,
|
||||||
|
access_path: str,
|
||||||
|
) -> Any:
|
||||||
|
async def get_result() -> Any:
|
||||||
|
return await sio_client.call("get_value", access_path)
|
||||||
|
|
||||||
|
result = asyncio.run_coroutine_threadsafe(
|
||||||
|
get_result(),
|
||||||
|
loop=loop,
|
||||||
|
).result()
|
||||||
|
return ProxyLoader.loads_proxy(result, sio_client, loop)
|
||||||
|
|
||||||
|
|
||||||
class ProxyDict(dict[str, Any]):
|
class ProxyDict(dict[str, Any]):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
@@ -124,35 +138,35 @@ class ProxyList(list[Any]):
|
|||||||
|
|
||||||
update_value(self._sio, self._loop, full_access_path, value)
|
update_value(self._sio, self._loop, full_access_path, value)
|
||||||
|
|
||||||
def append(self, __object: Any) -> None:
|
def append(self, object_: Any, /) -> None:
|
||||||
full_access_path = f"{self._parent_path}.append"
|
full_access_path = f"{self._parent_path}.append"
|
||||||
|
|
||||||
trigger_method(self._sio, self._loop, full_access_path, [__object], {})
|
trigger_method(self._sio, self._loop, full_access_path, [object_], {})
|
||||||
|
|
||||||
def clear(self) -> None:
|
def clear(self) -> None:
|
||||||
full_access_path = f"{self._parent_path}.clear"
|
full_access_path = f"{self._parent_path}.clear"
|
||||||
|
|
||||||
trigger_method(self._sio, self._loop, full_access_path, [], {})
|
trigger_method(self._sio, self._loop, full_access_path, [], {})
|
||||||
|
|
||||||
def extend(self, __iterable: Iterable[Any]) -> None:
|
def extend(self, iterable: Iterable[Any], /) -> None:
|
||||||
full_access_path = f"{self._parent_path}.extend"
|
full_access_path = f"{self._parent_path}.extend"
|
||||||
|
|
||||||
trigger_method(self._sio, self._loop, full_access_path, [__iterable], {})
|
trigger_method(self._sio, self._loop, full_access_path, [iterable], {})
|
||||||
|
|
||||||
def insert(self, __index: SupportsIndex, __object: Any) -> None:
|
def insert(self, index: SupportsIndex, object_: Any, /) -> None:
|
||||||
full_access_path = f"{self._parent_path}.insert"
|
full_access_path = f"{self._parent_path}.insert"
|
||||||
|
|
||||||
trigger_method(self._sio, self._loop, full_access_path, [__index, __object], {})
|
trigger_method(self._sio, self._loop, full_access_path, [index, object_], {})
|
||||||
|
|
||||||
def pop(self, __index: SupportsIndex = -1) -> Any:
|
def pop(self, index: SupportsIndex = -1, /) -> Any:
|
||||||
full_access_path = f"{self._parent_path}.pop"
|
full_access_path = f"{self._parent_path}.pop"
|
||||||
|
|
||||||
return trigger_method(self._sio, self._loop, full_access_path, [__index], {})
|
return trigger_method(self._sio, self._loop, full_access_path, [index], {})
|
||||||
|
|
||||||
def remove(self, __value: Any) -> None:
|
def remove(self, value: Any, /) -> None:
|
||||||
full_access_path = f"{self._parent_path}.remove"
|
full_access_path = f"{self._parent_path}.remove"
|
||||||
|
|
||||||
trigger_method(self._sio, self._loop, full_access_path, [__value], {})
|
trigger_method(self._sio, self._loop, full_access_path, [value], {})
|
||||||
|
|
||||||
|
|
||||||
class ProxyClassMixin:
|
class ProxyClassMixin:
|
||||||
@@ -202,25 +216,8 @@ class ProxyClassMixin:
|
|||||||
def _handle_serialized_method(
|
def _handle_serialized_method(
|
||||||
self, attr_name: str, serialized_object: SerializedObject
|
self, attr_name: str, serialized_object: SerializedObject
|
||||||
) -> None:
|
) -> None:
|
||||||
def add_prefix_to_last_path_element(s: str, prefix: str) -> str:
|
|
||||||
parts = s.split(".")
|
|
||||||
parts[-1] = f"{prefix}_{parts[-1]}"
|
|
||||||
return ".".join(parts)
|
|
||||||
|
|
||||||
if serialized_object["type"] == "method":
|
if serialized_object["type"] == "method":
|
||||||
if serialized_object["async"] is True:
|
self._add_method_proxy(attr_name, serialized_object)
|
||||||
start_method = copy(serialized_object)
|
|
||||||
start_method["full_access_path"] = add_prefix_to_last_path_element(
|
|
||||||
start_method["full_access_path"], "start"
|
|
||||||
)
|
|
||||||
stop_method = copy(serialized_object)
|
|
||||||
stop_method["full_access_path"] = add_prefix_to_last_path_element(
|
|
||||||
stop_method["full_access_path"], "stop"
|
|
||||||
)
|
|
||||||
self._add_method_proxy(f"start_{attr_name}", start_method)
|
|
||||||
self._add_method_proxy(f"stop_{attr_name}", stop_method)
|
|
||||||
else:
|
|
||||||
self._add_method_proxy(attr_name, serialized_object)
|
|
||||||
|
|
||||||
def _add_method_proxy(
|
def _add_method_proxy(
|
||||||
self, attr_name: str, serialized_object: SerializedObject
|
self, attr_name: str, serialized_object: SerializedObject
|
||||||
@@ -260,16 +257,11 @@ class ProxyClassMixin:
|
|||||||
self, attr_name: str, serialized_object: SerializedObject
|
self, attr_name: str, serialized_object: SerializedObject
|
||||||
) -> None:
|
) -> None:
|
||||||
def getter_proxy() -> Any:
|
def getter_proxy() -> Any:
|
||||||
async def get_result() -> Any:
|
return get_value(
|
||||||
return await self._sio.call(
|
sio_client=self._sio,
|
||||||
"get_value", serialized_object["full_access_path"]
|
|
||||||
)
|
|
||||||
|
|
||||||
result = asyncio.run_coroutine_threadsafe(
|
|
||||||
get_result(),
|
|
||||||
loop=self._loop,
|
loop=self._loop,
|
||||||
).result()
|
access_path=serialized_object["full_access_path"],
|
||||||
return ProxyLoader.loads_proxy(result, self._sio, self._loop)
|
)
|
||||||
|
|
||||||
dict.__setitem__(self._proxy_getters, attr_name, getter_proxy) # type: ignore
|
dict.__setitem__(self._proxy_getters, attr_name, getter_proxy) # type: ignore
|
||||||
|
|
||||||
@@ -284,7 +276,7 @@ class ProxyLoader:
|
|||||||
return ProxyList(
|
return ProxyList(
|
||||||
[
|
[
|
||||||
ProxyLoader.loads_proxy(item, sio_client, loop)
|
ProxyLoader.loads_proxy(item, sio_client, loop)
|
||||||
for item in cast(list[SerializedObject], serialized_object["value"])
|
for item in cast("list[SerializedObject]", serialized_object["value"])
|
||||||
],
|
],
|
||||||
parent_path=serialized_object["full_access_path"],
|
parent_path=serialized_object["full_access_path"],
|
||||||
sio_client=sio_client,
|
sio_client=sio_client,
|
||||||
@@ -301,7 +293,7 @@ class ProxyLoader:
|
|||||||
{
|
{
|
||||||
key: ProxyLoader.loads_proxy(value, sio_client, loop)
|
key: ProxyLoader.loads_proxy(value, sio_client, loop)
|
||||||
for key, value in cast(
|
for key, value in cast(
|
||||||
dict[str, SerializedObject], serialized_object["value"]
|
"dict[str, SerializedObject]", serialized_object["value"]
|
||||||
).items()
|
).items()
|
||||||
},
|
},
|
||||||
parent_path=serialized_object["full_access_path"],
|
parent_path=serialized_object["full_access_path"],
|
||||||
@@ -318,7 +310,7 @@ class ProxyLoader:
|
|||||||
proxy_class._proxy_setters.clear()
|
proxy_class._proxy_setters.clear()
|
||||||
proxy_class._proxy_methods.clear()
|
proxy_class._proxy_methods.clear()
|
||||||
for key, value in cast(
|
for key, value in cast(
|
||||||
dict[str, SerializedObject], serialized_object["value"]
|
"dict[str, SerializedObject]", serialized_object["value"]
|
||||||
).items():
|
).items():
|
||||||
type_handler: dict[str | None, None | Callable[..., Any]] = {
|
type_handler: dict[str | None, None | Callable[..., Any]] = {
|
||||||
None: None,
|
None: None,
|
||||||
@@ -351,7 +343,7 @@ class ProxyLoader:
|
|||||||
) -> Any:
|
) -> Any:
|
||||||
# Custom types like Components or DataService classes
|
# Custom types like Components or DataService classes
|
||||||
component_class = cast(
|
component_class = cast(
|
||||||
type, Deserializer.get_component_class(serialized_object["type"])
|
"type", Deserializer.get_service_base_class(serialized_object["type"])
|
||||||
)
|
)
|
||||||
class_bases = (
|
class_bases = (
|
||||||
ProxyClassMixin,
|
ProxyClassMixin,
|
||||||
|
|||||||
@@ -33,8 +33,8 @@ from pydase.components.image import Image
|
|||||||
from pydase.components.number_slider import NumberSlider
|
from pydase.components.number_slider import NumberSlider
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"NumberSlider",
|
|
||||||
"Image",
|
|
||||||
"ColouredEnum",
|
"ColouredEnum",
|
||||||
"DeviceConnection",
|
"DeviceConnection",
|
||||||
|
"Image",
|
||||||
|
"NumberSlider",
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
|
|
||||||
import pydase.data_service
|
import pydase.data_service
|
||||||
|
import pydase.task.decorator
|
||||||
|
|
||||||
|
|
||||||
class DeviceConnection(pydase.data_service.DataService):
|
class DeviceConnection(pydase.data_service.DataService):
|
||||||
@@ -52,7 +53,6 @@ class DeviceConnection(pydase.data_service.DataService):
|
|||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self._connected = False
|
self._connected = False
|
||||||
self._autostart_tasks["_handle_connection"] = () # type: ignore
|
|
||||||
self._reconnection_wait_time = 10.0
|
self._reconnection_wait_time = 10.0
|
||||||
|
|
||||||
def connect(self) -> None:
|
def connect(self) -> None:
|
||||||
@@ -70,6 +70,7 @@ class DeviceConnection(pydase.data_service.DataService):
|
|||||||
"""
|
"""
|
||||||
return self._connected
|
return self._connected
|
||||||
|
|
||||||
|
@pydase.task.decorator.task(autostart=True)
|
||||||
async def _handle_connection(self) -> None:
|
async def _handle_connection(self) -> None:
|
||||||
"""Automatically tries reconnecting to the device if it is not connected.
|
"""Automatically tries reconnecting to the device if it is not connected.
|
||||||
This method leverages the `connect` method and the `connected` property to
|
This method leverages the `connect` method and the `connected` property to
|
||||||
|
|||||||
@@ -13,11 +13,11 @@ class NumberSlider(DataService):
|
|||||||
|
|
||||||
Args:
|
Args:
|
||||||
value:
|
value:
|
||||||
The initial value of the slider. Defaults to 0.
|
The initial value of the slider. Defaults to 0.0.
|
||||||
min_:
|
min_:
|
||||||
The minimum value of the slider. Defaults to 0.
|
The minimum value of the slider. Defaults to 0.0.
|
||||||
max_:
|
max_:
|
||||||
The maximum value of the slider. Defaults to 100.
|
The maximum value of the slider. Defaults to 100.0.
|
||||||
step_size:
|
step_size:
|
||||||
The increment/decrement step size of the slider. Defaults to 1.0.
|
The increment/decrement step size of the slider. Defaults to 1.0.
|
||||||
|
|
||||||
@@ -84,9 +84,9 @@ class NumberSlider(DataService):
|
|||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
value: Any = 0.0,
|
value: Any = 0.0,
|
||||||
min_: float = 0.0,
|
min_: Any = 0.0,
|
||||||
max_: float = 100.0,
|
max_: Any = 100.0,
|
||||||
step_size: float = 1.0,
|
step_size: Any = 1.0,
|
||||||
) -> None:
|
) -> None:
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self._step_size = step_size
|
self._step_size = step_size
|
||||||
@@ -95,17 +95,17 @@ class NumberSlider(DataService):
|
|||||||
self._max = max_
|
self._max = max_
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def min(self) -> float:
|
def min(self) -> Any:
|
||||||
"""The min property."""
|
"""The min property."""
|
||||||
return self._min
|
return self._min
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def max(self) -> float:
|
def max(self) -> Any:
|
||||||
"""The min property."""
|
"""The min property."""
|
||||||
return self._max
|
return self._max
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def step_size(self) -> float:
|
def step_size(self) -> Any:
|
||||||
"""The min property."""
|
"""The min property."""
|
||||||
return self._step_size
|
return self._step_size
|
||||||
|
|
||||||
|
|||||||
@@ -1,15 +1,7 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import TYPE_CHECKING, Any
|
|
||||||
|
|
||||||
from pydase.observer_pattern.observable.observable import Observable
|
from pydase.observer_pattern.observable.observable import Observable
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from pydase.data_service.data_service import DataService
|
|
||||||
from pydase.data_service.task_manager import TaskManager
|
|
||||||
|
|
||||||
|
|
||||||
class AbstractDataService(Observable):
|
class AbstractDataService(Observable):
|
||||||
__root__: DataService
|
pass
|
||||||
_task_manager: TaskManager
|
|
||||||
_autostart_tasks: dict[str, tuple[Any]]
|
|
||||||
|
|||||||
@@ -1,22 +1,22 @@
|
|||||||
import inspect
|
import inspect
|
||||||
import logging
|
import logging
|
||||||
|
from collections.abc import Callable
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
import pydase.units as u
|
import pydase.units as u
|
||||||
from pydase.data_service.abstract_data_service import AbstractDataService
|
from pydase.data_service.abstract_data_service import AbstractDataService
|
||||||
from pydase.data_service.task_manager import TaskManager
|
|
||||||
from pydase.observer_pattern.observable.observable import (
|
from pydase.observer_pattern.observable.observable import (
|
||||||
Observable,
|
Observable,
|
||||||
)
|
)
|
||||||
from pydase.utils.helpers import (
|
from pydase.utils.helpers import (
|
||||||
get_class_and_instance_attributes,
|
get_class_and_instance_attributes,
|
||||||
is_property_attribute,
|
is_descriptor,
|
||||||
)
|
)
|
||||||
from pydase.utils.serialization.serializer import (
|
from pydase.utils.serialization.serializer import (
|
||||||
SerializedObject,
|
|
||||||
Serializer,
|
Serializer,
|
||||||
)
|
)
|
||||||
|
from pydase.utils.serialization.types import SerializedObject
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -24,57 +24,43 @@ logger = logging.getLogger(__name__)
|
|||||||
class DataService(AbstractDataService):
|
class DataService(AbstractDataService):
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self._task_manager = TaskManager(self)
|
|
||||||
|
|
||||||
if not hasattr(self, "_autostart_tasks"):
|
|
||||||
self._autostart_tasks = {}
|
|
||||||
|
|
||||||
self.__check_instance_classes()
|
self.__check_instance_classes()
|
||||||
|
|
||||||
def __setattr__(self, __name: str, __value: Any) -> None:
|
def __setattr__(self, name: str, value: Any, /) -> None:
|
||||||
# Check and warn for unexpected type changes in attributes
|
|
||||||
self._warn_on_type_change(__name, __value)
|
|
||||||
|
|
||||||
# every class defined by the user should inherit from DataService if it is
|
# every class defined by the user should inherit from DataService if it is
|
||||||
# assigned to a public attribute
|
# assigned to a public attribute
|
||||||
if not __name.startswith("_") and not inspect.isfunction(__value):
|
if not name.startswith("_") and not inspect.isfunction(value):
|
||||||
self.__warn_if_not_observable(__value)
|
self.__warn_if_not_observable(value)
|
||||||
|
|
||||||
# Set the attribute
|
# Set the attribute
|
||||||
super().__setattr__(__name, __value)
|
super().__setattr__(name, value)
|
||||||
|
|
||||||
def _warn_on_type_change(self, attr_name: str, new_value: Any) -> None:
|
|
||||||
if is_property_attribute(self, attr_name):
|
|
||||||
return
|
|
||||||
|
|
||||||
current_value = getattr(self, attr_name, None)
|
|
||||||
if self._is_unexpected_type_change(current_value, new_value):
|
|
||||||
logger.warning(
|
|
||||||
"Type of '%s' changed from '%s' to '%s'. This may have unwanted "
|
|
||||||
"side effects! Consider setting it to '%s' directly.",
|
|
||||||
attr_name,
|
|
||||||
type(current_value).__name__,
|
|
||||||
type(new_value).__name__,
|
|
||||||
type(current_value).__name__,
|
|
||||||
)
|
|
||||||
|
|
||||||
def _is_unexpected_type_change(self, current_value: Any, new_value: Any) -> bool:
|
def _is_unexpected_type_change(self, current_value: Any, new_value: Any) -> bool:
|
||||||
return (
|
return (
|
||||||
isinstance(current_value, float)
|
isinstance(current_value, float) and not isinstance(new_value, float)
|
||||||
and not isinstance(new_value, float)
|
) or (
|
||||||
or (
|
isinstance(current_value, u.Quantity)
|
||||||
isinstance(current_value, u.Quantity)
|
and not isinstance(new_value, u.Quantity)
|
||||||
and not isinstance(new_value, u.Quantity)
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def __warn_if_not_observable(self, __value: Any) -> None:
|
def __warn_if_not_observable(self, value: Any, /) -> None:
|
||||||
value_class = __value if inspect.isclass(__value) else __value.__class__
|
value_class = value if inspect.isclass(value) else value.__class__
|
||||||
|
|
||||||
if not issubclass(
|
if not issubclass(
|
||||||
value_class,
|
value_class,
|
||||||
(int | float | bool | str | list | dict | Enum | u.Quantity | Observable),
|
(
|
||||||
):
|
int
|
||||||
|
| float
|
||||||
|
| bool
|
||||||
|
| str
|
||||||
|
| list
|
||||||
|
| dict
|
||||||
|
| Enum
|
||||||
|
| u.Quantity
|
||||||
|
| Observable
|
||||||
|
| Callable
|
||||||
|
),
|
||||||
|
) and not is_descriptor(value):
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"Class '%s' does not inherit from DataService. This may lead to"
|
"Class '%s' does not inherit from DataService. This may lead to"
|
||||||
" unexpected behaviour!",
|
" unexpected behaviour!",
|
||||||
|
|||||||
@@ -2,10 +2,10 @@ import logging
|
|||||||
from typing import TYPE_CHECKING, Any, cast
|
from typing import TYPE_CHECKING, Any, cast
|
||||||
|
|
||||||
from pydase.utils.serialization.serializer import (
|
from pydase.utils.serialization.serializer import (
|
||||||
SerializedObject,
|
|
||||||
get_nested_dict_by_path,
|
get_nested_dict_by_path,
|
||||||
set_nested_value_by_path,
|
set_nested_value_by_path,
|
||||||
)
|
)
|
||||||
|
from pydase.utils.serialization.types import SerializedObject
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from pydase import DataService
|
from pydase import DataService
|
||||||
@@ -14,6 +14,22 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
class DataServiceCache:
|
class DataServiceCache:
|
||||||
|
"""Maintains a serialized cache of the current state of a DataService instance.
|
||||||
|
|
||||||
|
This class is responsible for storing and updating a representation of the service's
|
||||||
|
public attributes and properties. It is primarily used by the StateManager and the
|
||||||
|
web server to serve consistent state to clients without accessing the DataService
|
||||||
|
attributes directly.
|
||||||
|
|
||||||
|
The cache is initialized once upon construction by serializing the full state of
|
||||||
|
the service. After that, it can be incrementally updated using attribute paths and
|
||||||
|
values as notified by the
|
||||||
|
[`DataServiceObserver`][pydase.data_service.data_service_observer.DataServiceObserver].
|
||||||
|
|
||||||
|
Args:
|
||||||
|
service: The DataService instance whose state should be cached.
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(self, service: "DataService") -> None:
|
def __init__(self, service: "DataService") -> None:
|
||||||
self._cache: SerializedObject
|
self._cache: SerializedObject
|
||||||
self.service = service
|
self.service = service
|
||||||
@@ -30,13 +46,13 @@ class DataServiceCache:
|
|||||||
|
|
||||||
def update_cache(self, full_access_path: str, value: Any) -> None:
|
def update_cache(self, full_access_path: str, value: Any) -> None:
|
||||||
set_nested_value_by_path(
|
set_nested_value_by_path(
|
||||||
cast(dict[str, SerializedObject], self._cache["value"]),
|
cast("dict[str, SerializedObject]", self._cache["value"]),
|
||||||
full_access_path,
|
full_access_path,
|
||||||
value,
|
value,
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_value_dict_from_cache(self, full_access_path: str) -> SerializedObject:
|
def get_value_dict_from_cache(self, full_access_path: str) -> SerializedObject:
|
||||||
return get_nested_dict_by_path(
|
return get_nested_dict_by_path(
|
||||||
cast(dict[str, SerializedObject], self._cache["value"]),
|
cast("dict[str, SerializedObject]", self._cache["value"]),
|
||||||
full_access_path,
|
full_access_path,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -8,16 +8,31 @@ from pydase.observer_pattern.observable.observable_object import ObservableObjec
|
|||||||
from pydase.observer_pattern.observer.property_observer import (
|
from pydase.observer_pattern.observer.property_observer import (
|
||||||
PropertyObserver,
|
PropertyObserver,
|
||||||
)
|
)
|
||||||
from pydase.utils.helpers import get_object_attr_from_path
|
from pydase.utils.helpers import (
|
||||||
|
get_object_attr_from_path,
|
||||||
|
)
|
||||||
from pydase.utils.serialization.serializer import (
|
from pydase.utils.serialization.serializer import (
|
||||||
SerializationPathError,
|
SerializationPathError,
|
||||||
SerializedObject,
|
|
||||||
dump,
|
dump,
|
||||||
)
|
)
|
||||||
|
from pydase.utils.serialization.types import SerializedObject
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def _is_nested_attribute(full_access_path: str, changing_attributes: list[str]) -> bool:
|
||||||
|
"""Return True if the full_access_path is a nested attribute of any
|
||||||
|
changing_attribute."""
|
||||||
|
|
||||||
|
return any(
|
||||||
|
(
|
||||||
|
full_access_path.startswith((f"{attr}.", f"{attr}["))
|
||||||
|
and full_access_path != attr
|
||||||
|
)
|
||||||
|
for attr in changing_attributes
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class DataServiceObserver(PropertyObserver):
|
class DataServiceObserver(PropertyObserver):
|
||||||
def __init__(self, state_manager: StateManager) -> None:
|
def __init__(self, state_manager: StateManager) -> None:
|
||||||
self.state_manager = state_manager
|
self.state_manager = state_manager
|
||||||
@@ -27,11 +42,7 @@ class DataServiceObserver(PropertyObserver):
|
|||||||
super().__init__(state_manager.service)
|
super().__init__(state_manager.service)
|
||||||
|
|
||||||
def on_change(self, full_access_path: str, value: Any) -> None:
|
def on_change(self, full_access_path: str, value: Any) -> None:
|
||||||
if any(
|
if _is_nested_attribute(full_access_path, self.changing_attributes):
|
||||||
full_access_path.startswith(changing_attribute)
|
|
||||||
and full_access_path != changing_attribute
|
|
||||||
for changing_attribute in self.changing_attributes
|
|
||||||
):
|
|
||||||
return
|
return
|
||||||
cached_value_dict: SerializedObject
|
cached_value_dict: SerializedObject
|
||||||
|
|
||||||
@@ -53,7 +64,7 @@ class DataServiceObserver(PropertyObserver):
|
|||||||
cached_value = cached_value_dict.get("value")
|
cached_value = cached_value_dict.get("value")
|
||||||
if (
|
if (
|
||||||
all(part[0] != "_" for part in full_access_path.split("."))
|
all(part[0] != "_" for part in full_access_path.split("."))
|
||||||
and cached_value != value
|
and cached_value != dump(value)["value"]
|
||||||
):
|
):
|
||||||
logger.debug("'%s' changed to '%s'", full_access_path, value)
|
logger.debug("'%s' changed to '%s'", full_access_path, value)
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
import asyncio
|
||||||
import contextlib
|
import contextlib
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
@@ -16,11 +17,11 @@ from pydase.utils.helpers import (
|
|||||||
from pydase.utils.serialization.deserializer import loads
|
from pydase.utils.serialization.deserializer import loads
|
||||||
from pydase.utils.serialization.serializer import (
|
from pydase.utils.serialization.serializer import (
|
||||||
SerializationPathError,
|
SerializationPathError,
|
||||||
SerializedObject,
|
|
||||||
generate_serialized_data_paths,
|
generate_serialized_data_paths,
|
||||||
get_nested_dict_by_path,
|
get_nested_dict_by_path,
|
||||||
serialized_dict_is_nested_object,
|
serialized_dict_is_nested_object,
|
||||||
)
|
)
|
||||||
|
from pydase.utils.serialization.types import SerializedObject
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from pydase import DataService
|
from pydase import DataService
|
||||||
@@ -66,43 +67,41 @@ def has_load_state_decorator(prop: property) -> bool:
|
|||||||
class StateManager:
|
class StateManager:
|
||||||
"""
|
"""
|
||||||
Manages the state of a DataService instance, serving as both a cache and a
|
Manages the state of a DataService instance, serving as both a cache and a
|
||||||
persistence layer. It is designed to provide quick access to the latest known state
|
persistence layer. It provides fast access to the most recently known state of the
|
||||||
for newly connecting web clients without the need for expensive property accesses
|
service and ensures consistent state updates across connected clients and service
|
||||||
that may involve complex calculations or I/O operations.
|
restarts.
|
||||||
|
|
||||||
The StateManager listens for state change notifications from the DataService's
|
The StateManager is used by the web server to apply updates to service attributes
|
||||||
callback manager and updates its cache accordingly. This cache does not always
|
and to serve the current state to newly connected clients. Internally, it creates a
|
||||||
reflect the most current complex property states but rather retains the value from
|
[`DataServiceCache`][pydase.data_service.data_service_cache.DataServiceCache]
|
||||||
the last known state, optimizing for performance and reducing the load on the
|
instance to track the state of public attributes and properties.
|
||||||
system.
|
|
||||||
|
|
||||||
While the StateManager ensures that the cached state is as up-to-date as possible,
|
The StateManager also handles state persistence: it can load a previously saved
|
||||||
it does not autonomously update complex properties of the DataService. Such
|
state from disk at startup and periodically autosave the current state to a file
|
||||||
properties must be updated programmatically, for instance, by invoking specific
|
during runtime.
|
||||||
tasks or methods that trigger the necessary operations to refresh their state.
|
|
||||||
|
|
||||||
The cached state maintained by the StateManager is particularly useful for web
|
|
||||||
clients that connect to the system and need immediate access to the current state of
|
|
||||||
the DataService. By avoiding direct and potentially costly property accesses, the
|
|
||||||
StateManager provides a snapshot of the DataService's state that is sufficiently
|
|
||||||
accurate for initial rendering and interaction.
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
service:
|
service: The DataService instance whose state is being managed.
|
||||||
The DataService instance whose state is being managed.
|
filename: The file name used for loading and storing the DataService's state.
|
||||||
filename:
|
If provided, the state is loaded from this file at startup and saved to it
|
||||||
The file name used for storing the DataService's state.
|
on shutdown or at regular intervals.
|
||||||
|
autosave_interval: Interval in seconds between automatic state save events.
|
||||||
|
If set to `None`, automatic saving is disabled.
|
||||||
|
|
||||||
Note:
|
Note:
|
||||||
The StateManager's cache updates are triggered by notifications and do not
|
The StateManager does not autonomously poll hardware state. It relies on the
|
||||||
include autonomous updates of complex DataService properties, which must be
|
service to perform such updates. The cache maintained by
|
||||||
managed programmatically. The cache serves the purpose of providing immediate
|
[`DataServiceCache`][pydase.data_service.data_service_cache.DataServiceCache]
|
||||||
state information to web clients, reflecting the state after the last property
|
reflects the last known state as notified by the `DataServiceObserver`, and is
|
||||||
update.
|
used by the web interface to provide fast and accurate state rendering for
|
||||||
|
connected clients.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self, service: "DataService", filename: str | Path | None = None
|
self,
|
||||||
|
service: "DataService",
|
||||||
|
filename: str | Path | None = None,
|
||||||
|
autosave_interval: float | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
self.filename = getattr(service, "_filename", None)
|
self.filename = getattr(service, "_filename", None)
|
||||||
|
|
||||||
@@ -115,30 +114,51 @@ class StateManager:
|
|||||||
|
|
||||||
self.service = service
|
self.service = service
|
||||||
self.cache_manager = DataServiceCache(self.service)
|
self.cache_manager = DataServiceCache(self.service)
|
||||||
|
self.autosave_interval = autosave_interval
|
||||||
|
|
||||||
|
async def autosave(self) -> None:
|
||||||
|
"""Periodically saves the current service state to the configured file.
|
||||||
|
|
||||||
|
This coroutine is automatically started by the [`pydase.Server`][pydase.Server]
|
||||||
|
when a filename is provided. It runs in the background and writes the latest
|
||||||
|
known state of the service to disk every `autosave_interval` seconds.
|
||||||
|
|
||||||
|
If `autosave_interval` is set to `None`, autosaving is disabled and this
|
||||||
|
coroutine exits immediately.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self.autosave_interval is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
if self.filename is not None:
|
||||||
|
self.save_state()
|
||||||
|
await asyncio.sleep(self.autosave_interval)
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception(e)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def cache_value(self) -> dict[str, SerializedObject]:
|
def cache_value(self) -> dict[str, SerializedObject]:
|
||||||
"""Returns the "value" value of the DataService serialization."""
|
"""Returns the "value" value of the DataService serialization."""
|
||||||
return cast(dict[str, SerializedObject], self.cache_manager.cache["value"])
|
return cast("dict[str, SerializedObject]", self.cache_manager.cache["value"])
|
||||||
|
|
||||||
def save_state(self) -> None:
|
def save_state(self) -> None:
|
||||||
"""
|
"""Saves the DataService's current state to a JSON file defined by
|
||||||
Saves the DataService's current state to a JSON file defined by `self.filename`.
|
`self.filename`.
|
||||||
Logs an error if `self.filename` is not set.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if self.filename is not None:
|
if self.filename is not None:
|
||||||
with open(self.filename, "w") as f:
|
with open(self.filename, "w") as f:
|
||||||
json.dump(self.cache_value, f, indent=4)
|
json.dump(self.cache_value, f, indent=4)
|
||||||
else:
|
else:
|
||||||
logger.info(
|
logger.debug(
|
||||||
"State manager was not initialised with a filename. Skipping "
|
"State manager was not initialised with a filename. Skipping "
|
||||||
"'save_state'..."
|
"'save_state'..."
|
||||||
)
|
)
|
||||||
|
|
||||||
def load_state(self) -> None:
|
def load_state(self) -> None:
|
||||||
"""
|
"""Loads the DataService's state from a JSON file defined by `self.filename`.
|
||||||
Loads the DataService's state from a JSON file defined by `self.filename`.
|
|
||||||
Updates the service's attributes, respecting type and read-only constraints.
|
Updates the service's attributes, respecting type and read-only constraints.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -183,7 +203,7 @@ class StateManager:
|
|||||||
with open(self.filename) as f:
|
with open(self.filename) as f:
|
||||||
# Load JSON data from file and update class attributes with these
|
# Load JSON data from file and update class attributes with these
|
||||||
# values
|
# values
|
||||||
return cast(dict[str, Any], json.load(f))
|
return cast("dict[str, Any]", json.load(f))
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
def set_service_attribute_value_by_path(
|
def set_service_attribute_value_by_path(
|
||||||
@@ -191,8 +211,7 @@ class StateManager:
|
|||||||
path: str,
|
path: str,
|
||||||
serialized_value: SerializedObject,
|
serialized_value: SerializedObject,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""
|
"""Sets the value of an attribute in the service managed by the `StateManager`
|
||||||
Sets the value of an attribute in the service managed by the `StateManager`
|
|
||||||
given its path as a dot-separated string.
|
given its path as a dot-separated string.
|
||||||
|
|
||||||
This method updates the attribute specified by 'path' with 'value' only if the
|
This method updates the attribute specified by 'path' with 'value' only if the
|
||||||
|
|||||||
@@ -1,225 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import inspect
|
|
||||||
import logging
|
|
||||||
from enum import Enum
|
|
||||||
from typing import TYPE_CHECKING, Any
|
|
||||||
|
|
||||||
from pydase.data_service.abstract_data_service import AbstractDataService
|
|
||||||
from pydase.utils.helpers import (
|
|
||||||
function_has_arguments,
|
|
||||||
get_class_and_instance_attributes,
|
|
||||||
is_property_attribute,
|
|
||||||
)
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from collections.abc import Callable
|
|
||||||
|
|
||||||
from .data_service import DataService
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class TaskStatus(Enum):
|
|
||||||
RUNNING = "running"
|
|
||||||
|
|
||||||
|
|
||||||
class TaskManager:
|
|
||||||
"""
|
|
||||||
The TaskManager class is a utility designed to manage asynchronous tasks. It
|
|
||||||
provides functionality for starting, stopping, and tracking these tasks. The class
|
|
||||||
is primarily used by the DataService class to manage its tasks.
|
|
||||||
|
|
||||||
A task in TaskManager is any asynchronous function. To add a task, you simply need
|
|
||||||
to define an async function within your class that extends TaskManager. For example:
|
|
||||||
|
|
||||||
```python
|
|
||||||
class MyService(DataService):
|
|
||||||
async def my_task(self):
|
|
||||||
# Your task implementation here
|
|
||||||
pass
|
|
||||||
```
|
|
||||||
|
|
||||||
With the above definition, TaskManager automatically creates `start_my_task` and
|
|
||||||
`stop_my_task` methods that can be used to control the task.
|
|
||||||
|
|
||||||
TaskManager also supports auto-starting tasks. If there are tasks that should start
|
|
||||||
running as soon as an instance of your class is created, you can define them in
|
|
||||||
`self._autostart_tasks` in your class constructor (__init__ method). Here's how:
|
|
||||||
|
|
||||||
```python
|
|
||||||
class MyService(DataService):
|
|
||||||
def __init__(self):
|
|
||||||
self._autostart_tasks = {
|
|
||||||
"my_task": (*args) # Replace with actual arguments
|
|
||||||
}
|
|
||||||
self.wait_time = 1
|
|
||||||
super().__init__()
|
|
||||||
|
|
||||||
async def my_task(self, *args):
|
|
||||||
while True:
|
|
||||||
# Your task implementation here
|
|
||||||
await asyncio.sleep(self.wait_time)
|
|
||||||
```
|
|
||||||
|
|
||||||
In the above example, `my_task` will start running as soon as
|
|
||||||
`_start_autostart_tasks` is called which is done when the DataService instance is
|
|
||||||
passed to the `pydase.Server` class.
|
|
||||||
|
|
||||||
The responsibilities of the TaskManager class are:
|
|
||||||
|
|
||||||
- Track all running tasks: Keeps track of all the tasks that are currently running.
|
|
||||||
This allows for monitoring of task statuses and for making sure tasks do not
|
|
||||||
overlap.
|
|
||||||
- Provide the ability to start and stop tasks: Automatically creates methods to
|
|
||||||
start and stop each task.
|
|
||||||
- Emit notifications when the status of a task changes: Has a built-in mechanism for
|
|
||||||
emitting notifications when a task starts or stops. This is used to update the user
|
|
||||||
interfaces, but can also be used to write logs, etc.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, service: DataService) -> None:
|
|
||||||
self.service = service
|
|
||||||
|
|
||||||
self.tasks: dict[str, asyncio.Task[None]] = {}
|
|
||||||
"""A dictionary to keep track of running tasks. The keys are the names of the
|
|
||||||
tasks and the values are TaskDict instances which include the task itself and
|
|
||||||
its kwargs.
|
|
||||||
"""
|
|
||||||
|
|
||||||
self._set_start_and_stop_for_async_methods()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def _loop(self) -> asyncio.AbstractEventLoop:
|
|
||||||
return asyncio.get_running_loop()
|
|
||||||
|
|
||||||
def _set_start_and_stop_for_async_methods(self) -> None:
|
|
||||||
for name in dir(self.service):
|
|
||||||
# circumvents calling properties
|
|
||||||
if is_property_attribute(self.service, name):
|
|
||||||
continue
|
|
||||||
|
|
||||||
method = getattr(self.service, name)
|
|
||||||
if inspect.iscoroutinefunction(method):
|
|
||||||
if function_has_arguments(method):
|
|
||||||
logger.info(
|
|
||||||
"Async function %a is defined with at least one argument. If "
|
|
||||||
"you want to use it as a task, remove the argument(s) from the "
|
|
||||||
"function definition.",
|
|
||||||
method.__name__,
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# create start and stop methods for each coroutine
|
|
||||||
setattr(
|
|
||||||
self.service, f"start_{name}", self._make_start_task(name, method)
|
|
||||||
)
|
|
||||||
setattr(self.service, f"stop_{name}", self._make_stop_task(name))
|
|
||||||
|
|
||||||
def _initiate_task_startup(self) -> None:
|
|
||||||
if self.service._autostart_tasks is not None:
|
|
||||||
for service_name, args in self.service._autostart_tasks.items():
|
|
||||||
start_method = getattr(self.service, f"start_{service_name}", None)
|
|
||||||
if start_method is not None and callable(start_method):
|
|
||||||
start_method(*args)
|
|
||||||
else:
|
|
||||||
logger.warning(
|
|
||||||
"No start method found for service '%s'", service_name
|
|
||||||
)
|
|
||||||
|
|
||||||
def start_autostart_tasks(self) -> None:
|
|
||||||
self._initiate_task_startup()
|
|
||||||
attrs = get_class_and_instance_attributes(self.service)
|
|
||||||
|
|
||||||
for attr_value in attrs.values():
|
|
||||||
if isinstance(attr_value, AbstractDataService):
|
|
||||||
attr_value._task_manager.start_autostart_tasks()
|
|
||||||
elif isinstance(attr_value, list):
|
|
||||||
for item in attr_value:
|
|
||||||
if isinstance(item, AbstractDataService):
|
|
||||||
item._task_manager.start_autostart_tasks()
|
|
||||||
|
|
||||||
def _make_stop_task(self, name: str) -> Callable[..., Any]:
|
|
||||||
"""
|
|
||||||
Factory function to create a 'stop_task' function for a running task.
|
|
||||||
|
|
||||||
The generated function cancels the associated asyncio task using 'name' for
|
|
||||||
identification, ensuring proper cleanup. Avoids closure and late binding issues.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
name (str): The name of the coroutine task, used for its identification.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def stop_task() -> None:
|
|
||||||
# cancel the task
|
|
||||||
task = self.tasks.get(name, None)
|
|
||||||
if task is not None:
|
|
||||||
self._loop.call_soon_threadsafe(task.cancel)
|
|
||||||
|
|
||||||
return stop_task
|
|
||||||
|
|
||||||
def _make_start_task(
|
|
||||||
self, name: str, method: Callable[..., Any]
|
|
||||||
) -> Callable[..., Any]:
|
|
||||||
"""
|
|
||||||
Factory function to create a 'start_task' function for a coroutine.
|
|
||||||
|
|
||||||
The generated function starts the coroutine as an asyncio task, handling
|
|
||||||
registration and monitoring.
|
|
||||||
It uses 'name' and 'method' to avoid the closure and late binding issue.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
name (str): The name of the coroutine, used for task management.
|
|
||||||
method (callable): The coroutine to be turned into an asyncio task.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def start_task() -> None:
|
|
||||||
def task_done_callback(task: asyncio.Task[None], name: str) -> None:
|
|
||||||
"""Handles tasks that have finished.
|
|
||||||
|
|
||||||
Removes a task from the tasks dictionary, calls the defined
|
|
||||||
callbacks, and logs and re-raises exceptions."""
|
|
||||||
|
|
||||||
# removing the finished task from the tasks i
|
|
||||||
self.tasks.pop(name, None)
|
|
||||||
|
|
||||||
# emit the notification that the task was stopped
|
|
||||||
self.service._notify_changed(name, None)
|
|
||||||
|
|
||||||
exception = task.exception()
|
|
||||||
if exception is not None:
|
|
||||||
# Handle the exception, or you can re-raise it.
|
|
||||||
logger.error(
|
|
||||||
"Task '%s' encountered an exception: %s: %s",
|
|
||||||
name,
|
|
||||||
type(exception).__name__,
|
|
||||||
exception,
|
|
||||||
)
|
|
||||||
raise exception
|
|
||||||
|
|
||||||
async def task() -> None:
|
|
||||||
try:
|
|
||||||
await method()
|
|
||||||
except asyncio.CancelledError:
|
|
||||||
logger.info("Task '%s' was cancelled", name)
|
|
||||||
|
|
||||||
if not self.tasks.get(name):
|
|
||||||
# creating the task and adding the task_done_callback which checks
|
|
||||||
# if an exception has occured during the task execution
|
|
||||||
task_object = self._loop.create_task(task())
|
|
||||||
task_object.add_done_callback(
|
|
||||||
lambda task: task_done_callback(task, name)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Store the task and its arguments in the '__tasks' dictionary. The
|
|
||||||
# key is the name of the method, and the value is a dictionary
|
|
||||||
# containing the task object and the updated keyword arguments.
|
|
||||||
self.tasks[name] = task_object
|
|
||||||
|
|
||||||
# emit the notification that the task was started
|
|
||||||
self.service._notify_changed(name, TaskStatus.RUNNING)
|
|
||||||
else:
|
|
||||||
logger.error("Task '%s' is already running!", name)
|
|
||||||
|
|
||||||
return start_task
|
|
||||||
71
src/pydase/frontend/assets/index-CKS_bS2p.js
Normal file
5
src/pydase/frontend/assets/index-Cs09d5Pk.css
Normal file
BIN
src/pydase/frontend/favicon.ico
Normal file
|
After Width: | Height: | Size: 77 KiB |
@@ -3,13 +3,20 @@
|
|||||||
|
|
||||||
<head>
|
<head>
|
||||||
<meta charset="UTF-8" />
|
<meta charset="UTF-8" />
|
||||||
|
<link rel="icon" type="image/svg+xml" href="/favicon.ico" />
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||||
<meta name="theme-color" content="#000000" />
|
<meta name="theme-color" content="#000000" />
|
||||||
<meta name="description" content="Web site displaying a pydase UI." />
|
<meta name="description" content="Web site displaying a pydase UI." />
|
||||||
<script type="module" crossorigin src="/assets/index-D7tStNHJ.js"></script>
|
<script type="module" crossorigin src="/assets/index-CKS_bS2p.js"></script>
|
||||||
<link rel="stylesheet" crossorigin href="/assets/index-D2aktF3W.css">
|
<link rel="stylesheet" crossorigin href="/assets/index-Cs09d5Pk.css">
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
|
<script>
|
||||||
|
// this will be set by the python backend if the service is behind a proxy which strips a prefix. The frontend can use this to build the paths to the resources.
|
||||||
|
window.__FORWARDED_PREFIX__ = "";
|
||||||
|
window.__FORWARDED_PROTO__ = "";
|
||||||
|
</script>
|
||||||
|
|
||||||
<body>
|
<body>
|
||||||
<noscript>You need to enable JavaScript to run this app.</noscript>
|
<noscript>You need to enable JavaScript to run this app.</noscript>
|
||||||
<div id="root"></div>
|
<div id="root"></div>
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ from pydase.observer_pattern.observable.decorators import (
|
|||||||
has_validate_set_decorator,
|
has_validate_set_decorator,
|
||||||
)
|
)
|
||||||
from pydase.observer_pattern.observable.observable_object import ObservableObject
|
from pydase.observer_pattern.observable.observable_object import ObservableObject
|
||||||
from pydase.utils.helpers import is_property_attribute
|
from pydase.utils.helpers import is_descriptor, is_property_attribute
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -22,7 +22,9 @@ class Observable(ObservableObject):
|
|||||||
- {"__annotations__"}
|
- {"__annotations__"}
|
||||||
}
|
}
|
||||||
for name, value in class_attrs.items():
|
for name, value in class_attrs.items():
|
||||||
if isinstance(value, property) or callable(value):
|
if isinstance(value, property) or callable(value) or is_descriptor(value):
|
||||||
|
# Properties, methods and descriptors have to be stored as class
|
||||||
|
# attributes to work properly. So don't make it an instance attribute.
|
||||||
continue
|
continue
|
||||||
self.__dict__[name] = self._initialise_new_objects(name, value)
|
self.__dict__[name] = self._initialise_new_objects(name, value)
|
||||||
|
|
||||||
@@ -53,6 +55,10 @@ class Observable(ObservableObject):
|
|||||||
value = super().__getattribute__(name)
|
value = super().__getattribute__(name)
|
||||||
|
|
||||||
if is_property_attribute(self, name):
|
if is_property_attribute(self, name):
|
||||||
|
# fixes https://github.com/tiqi-group/pydase/issues/187 and
|
||||||
|
# https://github.com/tiqi-group/pydase/issues/192
|
||||||
|
if isinstance(value, ObservableObject):
|
||||||
|
value.add_observer(self, name)
|
||||||
self._notify_changed(name, value)
|
self._notify_changed(name, value)
|
||||||
|
|
||||||
return value
|
return value
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import contextlib
|
||||||
import logging
|
import logging
|
||||||
import weakref
|
import weakref
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
@@ -164,9 +165,9 @@ class _ObservableList(ObservableObject, list[Any]):
|
|||||||
|
|
||||||
self._notify_changed(f"[{key}]", value)
|
self._notify_changed(f"[{key}]", value)
|
||||||
|
|
||||||
def append(self, __object: Any) -> None:
|
def append(self, object_: Any, /) -> None:
|
||||||
self._notify_change_start("")
|
self._notify_change_start("")
|
||||||
super().append(self._initialise_new_objects(f"[{len(self)}]", __object))
|
super().append(self._initialise_new_objects(f"[{len(self)}]", object_))
|
||||||
self._notify_changed("", self)
|
self._notify_changed("", self)
|
||||||
|
|
||||||
def clear(self) -> None:
|
def clear(self) -> None:
|
||||||
@@ -176,33 +177,33 @@ class _ObservableList(ObservableObject, list[Any]):
|
|||||||
|
|
||||||
self._notify_changed("", self)
|
self._notify_changed("", self)
|
||||||
|
|
||||||
def extend(self, __iterable: Iterable[Any]) -> None:
|
def extend(self, iterable: Iterable[Any], /) -> None:
|
||||||
self._remove_self_from_observables()
|
self._remove_self_from_observables()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
super().extend(__iterable)
|
super().extend(iterable)
|
||||||
finally:
|
finally:
|
||||||
for i, item in enumerate(self):
|
for i, item in enumerate(self):
|
||||||
super().__setitem__(i, self._initialise_new_objects(f"[{i}]", item))
|
super().__setitem__(i, self._initialise_new_objects(f"[{i}]", item))
|
||||||
|
|
||||||
self._notify_changed("", self)
|
self._notify_changed("", self)
|
||||||
|
|
||||||
def insert(self, __index: SupportsIndex, __object: Any) -> None:
|
def insert(self, index: SupportsIndex, object_: Any, /) -> None:
|
||||||
self._remove_self_from_observables()
|
self._remove_self_from_observables()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
super().insert(__index, __object)
|
super().insert(index, object_)
|
||||||
finally:
|
finally:
|
||||||
for i, item in enumerate(self):
|
for i, item in enumerate(self):
|
||||||
super().__setitem__(i, self._initialise_new_objects(f"[{i}]", item))
|
super().__setitem__(i, self._initialise_new_objects(f"[{i}]", item))
|
||||||
|
|
||||||
self._notify_changed("", self)
|
self._notify_changed("", self)
|
||||||
|
|
||||||
def pop(self, __index: SupportsIndex = -1) -> Any:
|
def pop(self, index: SupportsIndex = -1, /) -> Any:
|
||||||
self._remove_self_from_observables()
|
self._remove_self_from_observables()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
popped_item = super().pop(__index)
|
popped_item = super().pop(index)
|
||||||
finally:
|
finally:
|
||||||
for i, item in enumerate(self):
|
for i, item in enumerate(self):
|
||||||
super().__setitem__(i, self._initialise_new_objects(f"[{i}]", item))
|
super().__setitem__(i, self._initialise_new_objects(f"[{i}]", item))
|
||||||
@@ -210,11 +211,11 @@ class _ObservableList(ObservableObject, list[Any]):
|
|||||||
self._notify_changed("", self)
|
self._notify_changed("", self)
|
||||||
return popped_item
|
return popped_item
|
||||||
|
|
||||||
def remove(self, __value: Any) -> None:
|
def remove(self, value: Any, /) -> None:
|
||||||
self._remove_self_from_observables()
|
self._remove_self_from_observables()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
super().remove(__value)
|
super().remove(value)
|
||||||
finally:
|
finally:
|
||||||
for i, item in enumerate(self):
|
for i, item in enumerate(self):
|
||||||
super().__setitem__(i, self._initialise_new_objects(f"[{i}]", item))
|
super().__setitem__(i, self._initialise_new_objects(f"[{i}]", item))
|
||||||
@@ -252,7 +253,8 @@ class _ObservableDict(ObservableObject, dict[str, Any]):
|
|||||||
self.__setitem__(key, self._initialise_new_objects(f'["{key}"]', value))
|
self.__setitem__(key, self._initialise_new_objects(f'["{key}"]', value))
|
||||||
|
|
||||||
def __del__(self) -> None:
|
def __del__(self) -> None:
|
||||||
self._dict_mapping.pop(id(self._original_dict))
|
with contextlib.suppress(KeyError):
|
||||||
|
self._dict_mapping.pop(id(self._original_dict))
|
||||||
|
|
||||||
def __setitem__(self, key: str, value: Any) -> None:
|
def __setitem__(self, key: str, value: Any) -> None:
|
||||||
if not isinstance(key, str):
|
if not isinstance(key, str):
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ from typing import Any
|
|||||||
|
|
||||||
from pydase.observer_pattern.observable.observable import Observable
|
from pydase.observer_pattern.observable.observable import Observable
|
||||||
from pydase.observer_pattern.observer.observer import Observer
|
from pydase.observer_pattern.observer.observer import Observer
|
||||||
|
from pydase.utils.helpers import is_descriptor
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -21,13 +22,14 @@ def reverse_dict(original_dict: dict[str, list[str]]) -> dict[str, list[str]]:
|
|||||||
|
|
||||||
def get_property_dependencies(prop: property, prefix: str = "") -> list[str]:
|
def get_property_dependencies(prop: property, prefix: str = "") -> list[str]:
|
||||||
source_code_string = inspect.getsource(prop.fget) # type: ignore[arg-type]
|
source_code_string = inspect.getsource(prop.fget) # type: ignore[arg-type]
|
||||||
pattern = r"self\.([^\s\{\}]+)"
|
pattern = r"self\.([^\s\{\}\(\)]+)"
|
||||||
matches = re.findall(pattern, source_code_string)
|
matches = re.findall(pattern, source_code_string)
|
||||||
return [prefix + match for match in matches if "(" not in match]
|
return [prefix + match for match in matches if "(" not in match]
|
||||||
|
|
||||||
|
|
||||||
class PropertyObserver(Observer):
|
class PropertyObserver(Observer):
|
||||||
def __init__(self, observable: Observable) -> None:
|
def __init__(self, observable: Observable) -> None:
|
||||||
|
self.property_deps_dict: dict[str, list[str]] = {}
|
||||||
super().__init__(observable)
|
super().__init__(observable)
|
||||||
self._update_property_deps_dict()
|
self._update_property_deps_dict()
|
||||||
|
|
||||||
@@ -60,18 +62,28 @@ class PropertyObserver(Observer):
|
|||||||
def _process_nested_observables_properties(
|
def _process_nested_observables_properties(
|
||||||
self, obj: Observable, deps: dict[str, Any], prefix: str
|
self, obj: Observable, deps: dict[str, Any], prefix: str
|
||||||
) -> None:
|
) -> None:
|
||||||
for k, value in vars(obj).items():
|
for k, value in {**vars(type(obj)), **vars(obj)}.items():
|
||||||
|
actual_value = value
|
||||||
prefix = (
|
prefix = (
|
||||||
f"{prefix}." if prefix != "" and not prefix.endswith(".") else prefix
|
f"{prefix}." if prefix != "" and not prefix.endswith(".") else prefix
|
||||||
)
|
)
|
||||||
parent_path = f"{prefix}{k}"
|
parent_path = f"{prefix}{k}"
|
||||||
if isinstance(value, Observable):
|
|
||||||
|
# Get value from descriptor
|
||||||
|
if not isinstance(value, property) and is_descriptor(value):
|
||||||
|
actual_value = getattr(obj, k)
|
||||||
|
|
||||||
|
if isinstance(actual_value, Observable):
|
||||||
new_prefix = f"{parent_path}."
|
new_prefix = f"{parent_path}."
|
||||||
deps.update(
|
deps.update(
|
||||||
self._get_properties_and_their_dependencies(value, new_prefix)
|
self._get_properties_and_their_dependencies(
|
||||||
|
actual_value, new_prefix
|
||||||
|
)
|
||||||
)
|
)
|
||||||
elif isinstance(value, list | dict):
|
elif isinstance(value, list | dict):
|
||||||
self._process_collection_item_properties(value, deps, parent_path)
|
self._process_collection_item_properties(
|
||||||
|
actual_value, deps, parent_path
|
||||||
|
)
|
||||||
|
|
||||||
def _process_collection_item_properties(
|
def _process_collection_item_properties(
|
||||||
self,
|
self,
|
||||||
@@ -89,7 +101,7 @@ class PropertyObserver(Observer):
|
|||||||
elif isinstance(collection, dict):
|
elif isinstance(collection, dict):
|
||||||
for key, val in collection.items():
|
for key, val in collection.items():
|
||||||
if isinstance(val, Observable):
|
if isinstance(val, Observable):
|
||||||
new_prefix = f"{parent_path}['{key}']"
|
new_prefix = f'{parent_path}["{key}"]'
|
||||||
deps.update(
|
deps.update(
|
||||||
self._get_properties_and_their_dependencies(val, new_prefix)
|
self._get_properties_and_their_dependencies(val, new_prefix)
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ from pydase.config import ServiceConfig
|
|||||||
from pydase.data_service.data_service_observer import DataServiceObserver
|
from pydase.data_service.data_service_observer import DataServiceObserver
|
||||||
from pydase.data_service.state_manager import StateManager
|
from pydase.data_service.state_manager import StateManager
|
||||||
from pydase.server.web_server import WebServer
|
from pydase.server.web_server import WebServer
|
||||||
|
from pydase.task.autostart import autostart_service_tasks
|
||||||
|
|
||||||
HANDLED_SIGNALS = (
|
HANDLED_SIGNALS = (
|
||||||
signal.SIGINT, # Unix signal 2. Sent by Ctrl+C.
|
signal.SIGINT, # Unix signal 2. Sent by Ctrl+C.
|
||||||
@@ -82,21 +83,17 @@ class Server:
|
|||||||
The `Server` class provides a flexible server implementation for the `DataService`.
|
The `Server` class provides a flexible server implementation for the `DataService`.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
service:
|
service: The DataService instance that this server will manage.
|
||||||
The DataService instance that this server will manage.
|
host: The host address for the server. Defaults to `'0.0.0.0'`, which means all
|
||||||
host:
|
|
||||||
The host address for the server. Defaults to `'0.0.0.0'`, which means all
|
|
||||||
available network interfaces.
|
available network interfaces.
|
||||||
web_port:
|
web_port: The port number for the web server. If set to None, it will use the
|
||||||
The port number for the web server. Defaults to
|
port defined in
|
||||||
[`ServiceConfig().web_port`][pydase.config.ServiceConfig.web_port].
|
[`ServiceConfig().web_port`][pydase.config.ServiceConfig.web_port]. Defaults
|
||||||
enable_web:
|
to None.
|
||||||
Whether to enable the web server.
|
enable_web: Whether to enable the web server.
|
||||||
filename:
|
filename: Filename of the file managing the service state persistence.
|
||||||
Filename of the file managing the service state persistence.
|
additional_servers: A list of additional servers to run alongside the main
|
||||||
additional_servers:
|
server.
|
||||||
A list of additional servers to run alongside the main server.
|
|
||||||
|
|
||||||
Here's an example of how you might define an additional server:
|
Here's an example of how you might define an additional server:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
@@ -135,34 +132,63 @@ class Server:
|
|||||||
)
|
)
|
||||||
server.run()
|
server.run()
|
||||||
```
|
```
|
||||||
**kwargs:
|
autosave_interval: Interval in seconds between automatic state save events.
|
||||||
Additional keyword arguments.
|
If set to `None`, automatic saving is disabled. Defaults to 30 seconds.
|
||||||
|
**kwargs: Additional keyword arguments.
|
||||||
|
|
||||||
|
# Advanced
|
||||||
|
- [`post_startup`][pydase.Server.post_startup] hook:
|
||||||
|
|
||||||
|
This method is intended to be overridden in subclasses. It runs immediately
|
||||||
|
after all servers (web and additional) are initialized and before entering the
|
||||||
|
main event loop. You can use this hook to register custom logic after the
|
||||||
|
server is fully started.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__( # noqa: PLR0913
|
def __init__( # noqa: PLR0913
|
||||||
self,
|
self,
|
||||||
service: DataService,
|
service: DataService,
|
||||||
host: str = "0.0.0.0",
|
host: str = "0.0.0.0",
|
||||||
web_port: int = ServiceConfig().web_port,
|
web_port: int | None = None,
|
||||||
enable_web: bool = True,
|
enable_web: bool = True,
|
||||||
filename: str | Path | None = None,
|
filename: str | Path | None = None,
|
||||||
additional_servers: list[AdditionalServer] | None = None,
|
additional_servers: list[AdditionalServer] | None = None,
|
||||||
|
autosave_interval: float = 30.0,
|
||||||
**kwargs: Any,
|
**kwargs: Any,
|
||||||
) -> None:
|
) -> None:
|
||||||
if additional_servers is None:
|
if additional_servers is None:
|
||||||
additional_servers = []
|
additional_servers = []
|
||||||
self._service = service
|
self._service = service
|
||||||
self._host = host
|
self._host = host
|
||||||
self._web_port = web_port
|
if web_port is None:
|
||||||
|
self._web_port = ServiceConfig().web_port
|
||||||
|
else:
|
||||||
|
self._web_port = web_port
|
||||||
self._enable_web = enable_web
|
self._enable_web = enable_web
|
||||||
self._kwargs = kwargs
|
self._kwargs = kwargs
|
||||||
self._loop: asyncio.AbstractEventLoop
|
|
||||||
self._additional_servers = additional_servers
|
self._additional_servers = additional_servers
|
||||||
self.should_exit = False
|
self.should_exit = False
|
||||||
self.servers: dict[str, asyncio.Future[Any]] = {}
|
self.servers: dict[str, asyncio.Future[Any]] = {}
|
||||||
self._state_manager = StateManager(self._service, filename)
|
|
||||||
|
self._loop = asyncio.new_event_loop()
|
||||||
|
asyncio.set_event_loop(self._loop)
|
||||||
|
|
||||||
|
self._state_manager = StateManager(
|
||||||
|
service=self._service,
|
||||||
|
filename=filename,
|
||||||
|
autosave_interval=autosave_interval,
|
||||||
|
)
|
||||||
self._observer = DataServiceObserver(self._state_manager)
|
self._observer = DataServiceObserver(self._state_manager)
|
||||||
self._state_manager.load_state()
|
self._state_manager.load_state()
|
||||||
|
autostart_service_tasks(self._service)
|
||||||
|
|
||||||
|
self._web_server = WebServer(
|
||||||
|
data_service_observer=self._observer,
|
||||||
|
host=self._host,
|
||||||
|
port=self._web_port,
|
||||||
|
enable_frontend=self._enable_web,
|
||||||
|
**self._kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
def run(self) -> None:
|
def run(self) -> None:
|
||||||
"""
|
"""
|
||||||
@@ -170,7 +196,10 @@ class Server:
|
|||||||
|
|
||||||
This method should be called to start the server after it's been instantiated.
|
This method should be called to start the server after it's been instantiated.
|
||||||
"""
|
"""
|
||||||
asyncio.run(self.serve())
|
try:
|
||||||
|
self._loop.run_until_complete(self.serve())
|
||||||
|
finally:
|
||||||
|
self._loop.close()
|
||||||
|
|
||||||
async def serve(self) -> None:
|
async def serve(self) -> None:
|
||||||
process_id = os.getpid()
|
process_id = os.getpid()
|
||||||
@@ -178,6 +207,7 @@ class Server:
|
|||||||
logger.info("Started server process [%s]", process_id)
|
logger.info("Started server process [%s]", process_id)
|
||||||
|
|
||||||
await self.startup()
|
await self.startup()
|
||||||
|
await self.post_startup()
|
||||||
if self.should_exit:
|
if self.should_exit:
|
||||||
return
|
return
|
||||||
await self.main_loop()
|
await self.main_loop()
|
||||||
@@ -186,10 +216,12 @@ class Server:
|
|||||||
logger.info("Finished server process [%s]", process_id)
|
logger.info("Finished server process [%s]", process_id)
|
||||||
|
|
||||||
async def startup(self) -> None:
|
async def startup(self) -> None:
|
||||||
self._loop = asyncio.get_running_loop()
|
|
||||||
self._loop.set_exception_handler(self.custom_exception_handler)
|
self._loop.set_exception_handler(self.custom_exception_handler)
|
||||||
self.install_signal_handlers()
|
self.install_signal_handlers()
|
||||||
self._service._task_manager.start_autostart_tasks()
|
|
||||||
|
server_task = self._loop.create_task(self._web_server.serve())
|
||||||
|
server_task.add_done_callback(self._handle_server_shutdown)
|
||||||
|
self.servers["web"] = server_task
|
||||||
|
|
||||||
for server in self._additional_servers:
|
for server in self._additional_servers:
|
||||||
addin_server = server["server"](
|
addin_server = server["server"](
|
||||||
@@ -206,17 +238,8 @@ class Server:
|
|||||||
server_task = self._loop.create_task(addin_server.serve())
|
server_task = self._loop.create_task(addin_server.serve())
|
||||||
server_task.add_done_callback(self._handle_server_shutdown)
|
server_task.add_done_callback(self._handle_server_shutdown)
|
||||||
self.servers[server_name] = server_task
|
self.servers[server_name] = server_task
|
||||||
if self._enable_web:
|
|
||||||
self._web_server = WebServer(
|
|
||||||
data_service_observer=self._observer,
|
|
||||||
host=self._host,
|
|
||||||
port=self._web_port,
|
|
||||||
**self._kwargs,
|
|
||||||
)
|
|
||||||
server_task = self._loop.create_task(self._web_server.serve())
|
|
||||||
|
|
||||||
server_task.add_done_callback(self._handle_server_shutdown)
|
self._loop.create_task(self._state_manager.autosave())
|
||||||
self.servers["web"] = server_task
|
|
||||||
|
|
||||||
def _handle_server_shutdown(self, task: asyncio.Task[Any]) -> None:
|
def _handle_server_shutdown(self, task: asyncio.Task[Any]) -> None:
|
||||||
"""Handle server shutdown. If the service should exit, do nothing. Else, make
|
"""Handle server shutdown. If the service should exit, do nothing. Else, make
|
||||||
@@ -245,6 +268,9 @@ class Server:
|
|||||||
logger.debug("Cancelling tasks")
|
logger.debug("Cancelling tasks")
|
||||||
await self.__cancel_tasks()
|
await self.__cancel_tasks()
|
||||||
|
|
||||||
|
async def post_startup(self) -> None:
|
||||||
|
"""Override this in a subclass to register custom logic after startup."""
|
||||||
|
|
||||||
async def __cancel_servers(self) -> None:
|
async def __cancel_servers(self) -> None:
|
||||||
for server_name, task in self.servers.items():
|
for server_name, task in self.servers.items():
|
||||||
task.cancel()
|
task.cancel()
|
||||||
@@ -253,7 +279,7 @@ class Server:
|
|||||||
except asyncio.CancelledError:
|
except asyncio.CancelledError:
|
||||||
logger.debug("Cancelled '%s' server.", server_name)
|
logger.debug("Cancelled '%s' server.", server_name)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Unexpected exception: %s", e)
|
logger.exception("Unexpected exception: %s", e)
|
||||||
|
|
||||||
async def __cancel_tasks(self) -> None:
|
async def __cancel_tasks(self) -> None:
|
||||||
for task in asyncio.all_tasks(self._loop):
|
for task in asyncio.all_tasks(self._loop):
|
||||||
@@ -294,7 +320,7 @@ class Server:
|
|||||||
# here we exclude most kinds of exceptions from triggering this kind of shutdown
|
# here we exclude most kinds of exceptions from triggering this kind of shutdown
|
||||||
exc = context.get("exception")
|
exc = context.get("exception")
|
||||||
if type(exc) not in [RuntimeError, KeyboardInterrupt, asyncio.CancelledError]:
|
if type(exc) not in [RuntimeError, KeyboardInterrupt, asyncio.CancelledError]:
|
||||||
if self._enable_web:
|
if loop.is_running():
|
||||||
|
|
||||||
async def emit_exception() -> None:
|
async def emit_exception() -> None:
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -1,15 +1,20 @@
|
|||||||
|
import inspect
|
||||||
import logging
|
import logging
|
||||||
|
from functools import partial
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
import aiohttp.web
|
import aiohttp.web
|
||||||
import aiohttp_middlewares.error
|
import aiohttp_middlewares.error
|
||||||
|
import click
|
||||||
|
|
||||||
from pydase.data_service.state_manager import StateManager
|
from pydase.data_service.state_manager import StateManager
|
||||||
from pydase.server.web_server.api.v1.endpoints import (
|
from pydase.server.web_server.api.v1.endpoints import (
|
||||||
get_value,
|
get_value,
|
||||||
|
trigger_async_method,
|
||||||
trigger_method,
|
trigger_method,
|
||||||
update_value,
|
update_value,
|
||||||
)
|
)
|
||||||
|
from pydase.utils.helpers import get_object_attr_from_path
|
||||||
from pydase.utils.serialization.serializer import dump
|
from pydase.utils.serialization.serializer import dump
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
@@ -17,54 +22,104 @@ if TYPE_CHECKING:
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
API_VERSION = "v1"
|
|
||||||
|
|
||||||
STATUS_OK = 200
|
STATUS_OK = 200
|
||||||
STATUS_FAILED = 400
|
STATUS_FAILED = 400
|
||||||
|
|
||||||
|
|
||||||
|
async def _get_value(
|
||||||
|
request: aiohttp.web.Request, state_manager: StateManager
|
||||||
|
) -> aiohttp.web.Response:
|
||||||
|
log_id = get_log_id(request)
|
||||||
|
|
||||||
|
access_path = request.rel_url.query["access_path"]
|
||||||
|
|
||||||
|
logger.info("Client [%s] is getting the value of '%s'", log_id, access_path)
|
||||||
|
|
||||||
|
status = STATUS_OK
|
||||||
|
try:
|
||||||
|
result = get_value(state_manager, access_path)
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception(e)
|
||||||
|
result = dump(e)
|
||||||
|
status = STATUS_FAILED
|
||||||
|
return aiohttp.web.json_response(result, status=status)
|
||||||
|
|
||||||
|
|
||||||
|
async def _update_value(
|
||||||
|
request: aiohttp.web.Request, state_manager: StateManager
|
||||||
|
) -> aiohttp.web.Response:
|
||||||
|
log_id = get_log_id(request)
|
||||||
|
|
||||||
|
data: UpdateDict = await request.json()
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
"Client [%s] is updating the value of '%s'", log_id, data["access_path"]
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
update_value(state_manager, data)
|
||||||
|
|
||||||
|
return aiohttp.web.json_response()
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception(e)
|
||||||
|
return aiohttp.web.json_response(dump(e), status=STATUS_FAILED)
|
||||||
|
|
||||||
|
|
||||||
|
async def _trigger_method(
|
||||||
|
request: aiohttp.web.Request, state_manager: StateManager
|
||||||
|
) -> aiohttp.web.Response:
|
||||||
|
log_id = get_log_id(request)
|
||||||
|
|
||||||
|
data: TriggerMethodDict = await request.json()
|
||||||
|
|
||||||
|
access_path = data["access_path"]
|
||||||
|
|
||||||
|
logger.info("Client [%s] is triggering the method '%s'", log_id, access_path)
|
||||||
|
|
||||||
|
method = get_object_attr_from_path(state_manager.service, access_path)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if inspect.iscoroutinefunction(method):
|
||||||
|
method_return = await trigger_async_method(
|
||||||
|
state_manager=state_manager, data=data
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
method_return = trigger_method(state_manager=state_manager, data=data)
|
||||||
|
|
||||||
|
return aiohttp.web.json_response(method_return)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception(e)
|
||||||
|
return aiohttp.web.json_response(dump(e), status=STATUS_FAILED)
|
||||||
|
|
||||||
|
|
||||||
|
def get_log_id(request: aiohttp.web.Request) -> str:
|
||||||
|
client_id_header = request.headers.get("x-client-id", None)
|
||||||
|
remote_username_header = request.headers.get("remote-user", None)
|
||||||
|
|
||||||
|
if remote_username_header is not None:
|
||||||
|
log_id = f"user={click.style(remote_username_header, fg='cyan')}"
|
||||||
|
elif client_id_header is not None:
|
||||||
|
log_id = f"id={click.style(client_id_header, fg='cyan')}"
|
||||||
|
else:
|
||||||
|
log_id = f"id={click.style(None, fg='cyan')}"
|
||||||
|
|
||||||
|
return log_id
|
||||||
|
|
||||||
|
|
||||||
def create_api_application(state_manager: StateManager) -> aiohttp.web.Application:
|
def create_api_application(state_manager: StateManager) -> aiohttp.web.Application:
|
||||||
api_application = aiohttp.web.Application(
|
api_application = aiohttp.web.Application(
|
||||||
middlewares=(aiohttp_middlewares.error.error_middleware(),)
|
middlewares=(aiohttp_middlewares.error.error_middleware(),)
|
||||||
)
|
)
|
||||||
|
|
||||||
async def _get_value(request: aiohttp.web.Request) -> aiohttp.web.Response:
|
api_application.router.add_get(
|
||||||
logger.info("Handle api request: %s", request)
|
"/get_value", partial(_get_value, state_manager=state_manager)
|
||||||
|
)
|
||||||
access_path = request.rel_url.query["access_path"]
|
api_application.router.add_put(
|
||||||
|
"/update_value", partial(_update_value, state_manager=state_manager)
|
||||||
status = STATUS_OK
|
)
|
||||||
try:
|
api_application.router.add_put(
|
||||||
result = get_value(state_manager, access_path)
|
"/trigger_method", partial(_trigger_method, state_manager=state_manager)
|
||||||
except Exception as e:
|
)
|
||||||
logger.exception(e)
|
|
||||||
result = dump(e)
|
|
||||||
status = STATUS_FAILED
|
|
||||||
return aiohttp.web.json_response(result, status=status)
|
|
||||||
|
|
||||||
async def _update_value(request: aiohttp.web.Request) -> aiohttp.web.Response:
|
|
||||||
data: UpdateDict = await request.json()
|
|
||||||
|
|
||||||
try:
|
|
||||||
update_value(state_manager, data)
|
|
||||||
|
|
||||||
return aiohttp.web.json_response()
|
|
||||||
except Exception as e:
|
|
||||||
logger.exception(e)
|
|
||||||
return aiohttp.web.json_response(dump(e), status=STATUS_FAILED)
|
|
||||||
|
|
||||||
async def _trigger_method(request: aiohttp.web.Request) -> aiohttp.web.Response:
|
|
||||||
data: TriggerMethodDict = await request.json()
|
|
||||||
|
|
||||||
try:
|
|
||||||
return aiohttp.web.json_response(trigger_method(state_manager, data))
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.exception(e)
|
|
||||||
return aiohttp.web.json_response(dump(e), status=STATUS_FAILED)
|
|
||||||
|
|
||||||
api_application.router.add_get("/get_value", _get_value)
|
|
||||||
api_application.router.add_put("/update_value", _update_value)
|
|
||||||
api_application.router.add_put("/trigger_method", _trigger_method)
|
|
||||||
|
|
||||||
return api_application
|
return api_application
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
from typing import Any
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
import pydase.utils.serialization.deserializer
|
import pydase.utils.serialization.deserializer
|
||||||
import pydase.utils.serialization.serializer
|
import pydase.utils.serialization.serializer
|
||||||
@@ -7,6 +7,9 @@ from pydase.server.web_server.sio_setup import TriggerMethodDict, UpdateDict
|
|||||||
from pydase.utils.helpers import get_object_attr_from_path
|
from pydase.utils.helpers import get_object_attr_from_path
|
||||||
from pydase.utils.serialization.types import SerializedObject
|
from pydase.utils.serialization.types import SerializedObject
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from collections.abc import Awaitable, Callable
|
||||||
|
|
||||||
loads = pydase.utils.serialization.deserializer.loads
|
loads = pydase.utils.serialization.deserializer.loads
|
||||||
Serializer = pydase.utils.serialization.serializer.Serializer
|
Serializer = pydase.utils.serialization.serializer.Serializer
|
||||||
|
|
||||||
@@ -36,3 +39,19 @@ def trigger_method(state_manager: StateManager, data: TriggerMethodDict) -> Any:
|
|||||||
kwargs: dict[str, Any] = loads(serialized_kwargs) if serialized_kwargs else {}
|
kwargs: dict[str, Any] = loads(serialized_kwargs) if serialized_kwargs else {}
|
||||||
|
|
||||||
return Serializer.serialize_object(method(*args, **kwargs))
|
return Serializer.serialize_object(method(*args, **kwargs))
|
||||||
|
|
||||||
|
|
||||||
|
async def trigger_async_method(
|
||||||
|
state_manager: StateManager, data: TriggerMethodDict
|
||||||
|
) -> Any:
|
||||||
|
method: Callable[..., Awaitable[Any]] = get_object_attr_from_path(
|
||||||
|
state_manager.service, data["access_path"]
|
||||||
|
)
|
||||||
|
|
||||||
|
serialized_args = data.get("args", None)
|
||||||
|
args = loads(serialized_args) if serialized_args else []
|
||||||
|
|
||||||
|
serialized_kwargs = data.get("kwargs", None)
|
||||||
|
kwargs: dict[str, Any] = loads(serialized_kwargs) if serialized_kwargs else {}
|
||||||
|
|
||||||
|
return Serializer.serialize_object(await method(*args, **kwargs))
|
||||||
|
|||||||
@@ -1,8 +1,11 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
|
import inspect
|
||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
from typing import Any, TypedDict
|
from typing import Any, TypedDict
|
||||||
|
|
||||||
|
from pydase.utils.helpers import get_object_attr_from_path
|
||||||
|
|
||||||
if sys.version_info < (3, 11):
|
if sys.version_info < (3, 11):
|
||||||
from typing_extensions import NotRequired
|
from typing_extensions import NotRequired
|
||||||
else:
|
else:
|
||||||
@@ -11,13 +14,13 @@ else:
|
|||||||
import click
|
import click
|
||||||
import socketio # type: ignore[import-untyped]
|
import socketio # type: ignore[import-untyped]
|
||||||
|
|
||||||
import pydase.server.web_server.api.v1.endpoints
|
|
||||||
import pydase.utils.serialization.deserializer
|
import pydase.utils.serialization.deserializer
|
||||||
import pydase.utils.serialization.serializer
|
import pydase.utils.serialization.serializer
|
||||||
from pydase.data_service.data_service_observer import DataServiceObserver
|
from pydase.data_service.data_service_observer import DataServiceObserver
|
||||||
from pydase.data_service.state_manager import StateManager
|
from pydase.data_service.state_manager import StateManager
|
||||||
|
from pydase.server.web_server.api.v1 import endpoints
|
||||||
from pydase.utils.logging import SocketIOHandler
|
from pydase.utils.logging import SocketIOHandler
|
||||||
from pydase.utils.serialization.serializer import SerializedObject
|
from pydase.utils.serialization.types import SerializedObject
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -112,7 +115,7 @@ def setup_sio_server(
|
|||||||
def sio_callback(
|
def sio_callback(
|
||||||
full_access_path: str, value: Any, cached_value_dict: SerializedObject
|
full_access_path: str, value: Any, cached_value_dict: SerializedObject
|
||||||
) -> None:
|
) -> None:
|
||||||
if cached_value_dict != {}:
|
if cached_value_dict != {} and loop.is_running():
|
||||||
|
|
||||||
async def notify() -> None:
|
async def notify() -> None:
|
||||||
try:
|
try:
|
||||||
@@ -138,26 +141,43 @@ def setup_sio_server(
|
|||||||
def setup_sio_events(sio: socketio.AsyncServer, state_manager: StateManager) -> None: # noqa: C901
|
def setup_sio_events(sio: socketio.AsyncServer, state_manager: StateManager) -> None: # noqa: C901
|
||||||
@sio.event # type: ignore
|
@sio.event # type: ignore
|
||||||
async def connect(sid: str, environ: Any) -> None:
|
async def connect(sid: str, environ: Any) -> None:
|
||||||
logger.debug("Client [%s] connected", click.style(str(sid), fg="cyan"))
|
client_id_header = environ.get("HTTP_X_CLIENT_ID", None)
|
||||||
|
remote_username_header = environ.get("HTTP_REMOTE_USER", None)
|
||||||
|
|
||||||
|
if remote_username_header is not None:
|
||||||
|
log_id = f"user={click.style(remote_username_header, fg='cyan')}"
|
||||||
|
elif client_id_header is not None:
|
||||||
|
log_id = f"id={click.style(client_id_header, fg='cyan')}"
|
||||||
|
else:
|
||||||
|
log_id = f"sid={click.style(sid, fg='cyan')}"
|
||||||
|
|
||||||
|
async with sio.session(sid) as session:
|
||||||
|
session["client_id"] = log_id
|
||||||
|
logger.info("Client [%s] connected", session["client_id"])
|
||||||
|
|
||||||
@sio.event # type: ignore
|
@sio.event # type: ignore
|
||||||
async def disconnect(sid: str) -> None:
|
async def disconnect(sid: str) -> None:
|
||||||
logger.debug("Client [%s] disconnected", click.style(str(sid), fg="cyan"))
|
async with sio.session(sid) as session:
|
||||||
|
logger.info("Client [%s] disconnected", session["client_id"])
|
||||||
|
|
||||||
@sio.event # type: ignore
|
@sio.event # type: ignore
|
||||||
async def service_serialization(sid: str) -> SerializedObject:
|
async def service_serialization(sid: str) -> SerializedObject:
|
||||||
logger.debug(
|
async with sio.session(sid) as session:
|
||||||
"Client [%s] requested service serialization",
|
logger.info(
|
||||||
click.style(str(sid), fg="cyan"),
|
"Client [%s] requested service serialization", session["client_id"]
|
||||||
)
|
)
|
||||||
return state_manager.cache_manager.cache
|
return state_manager.cache_manager.cache
|
||||||
|
|
||||||
@sio.event
|
@sio.event
|
||||||
async def update_value(sid: str, data: UpdateDict) -> SerializedObject | None:
|
async def update_value(sid: str, data: UpdateDict) -> SerializedObject | None:
|
||||||
try:
|
async with sio.session(sid) as session:
|
||||||
pydase.server.web_server.api.v1.endpoints.update_value(
|
logger.info(
|
||||||
state_manager=state_manager, data=data
|
"Client [%s] is updating the value of '%s'",
|
||||||
|
session["client_id"],
|
||||||
|
data["access_path"],
|
||||||
)
|
)
|
||||||
|
try:
|
||||||
|
endpoints.update_value(state_manager=state_manager, data=data)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.exception(e)
|
logger.exception(e)
|
||||||
return dump(e)
|
return dump(e)
|
||||||
@@ -165,8 +185,14 @@ def setup_sio_events(sio: socketio.AsyncServer, state_manager: StateManager) ->
|
|||||||
|
|
||||||
@sio.event
|
@sio.event
|
||||||
async def get_value(sid: str, access_path: str) -> SerializedObject:
|
async def get_value(sid: str, access_path: str) -> SerializedObject:
|
||||||
|
async with sio.session(sid) as session:
|
||||||
|
logger.info(
|
||||||
|
"Client [%s] is getting the value of '%s'",
|
||||||
|
session["client_id"],
|
||||||
|
access_path,
|
||||||
|
)
|
||||||
try:
|
try:
|
||||||
return pydase.server.web_server.api.v1.endpoints.get_value(
|
return endpoints.get_value(
|
||||||
state_manager=state_manager, access_path=access_path
|
state_manager=state_manager, access_path=access_path
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -175,12 +201,23 @@ def setup_sio_events(sio: socketio.AsyncServer, state_manager: StateManager) ->
|
|||||||
|
|
||||||
@sio.event
|
@sio.event
|
||||||
async def trigger_method(sid: str, data: TriggerMethodDict) -> Any:
|
async def trigger_method(sid: str, data: TriggerMethodDict) -> Any:
|
||||||
try:
|
async with sio.session(sid) as session:
|
||||||
return pydase.server.web_server.api.v1.endpoints.trigger_method(
|
logger.info(
|
||||||
state_manager=state_manager, data=data
|
"Client [%s] is triggering the method '%s'",
|
||||||
|
session["client_id"],
|
||||||
|
data["access_path"],
|
||||||
)
|
)
|
||||||
|
try:
|
||||||
|
method = get_object_attr_from_path(
|
||||||
|
state_manager.service, data["access_path"]
|
||||||
|
)
|
||||||
|
if inspect.iscoroutinefunction(method):
|
||||||
|
return await endpoints.trigger_async_method(
|
||||||
|
state_manager=state_manager, data=data
|
||||||
|
)
|
||||||
|
return endpoints.trigger_method(state_manager=state_manager, data=data)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(e)
|
logger.exception(e)
|
||||||
return dump(e)
|
return dump(e)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
|
import html
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
@@ -6,6 +7,7 @@ from typing import Any
|
|||||||
|
|
||||||
import aiohttp.web
|
import aiohttp.web
|
||||||
import aiohttp_middlewares.cors
|
import aiohttp_middlewares.cors
|
||||||
|
import anyio
|
||||||
|
|
||||||
from pydase.config import ServiceConfig, WebServerConfig
|
from pydase.config import ServiceConfig, WebServerConfig
|
||||||
from pydase.data_service.data_service_observer import DataServiceObserver
|
from pydase.data_service.data_service_observer import DataServiceObserver
|
||||||
@@ -20,7 +22,6 @@ from pydase.utils.helpers import (
|
|||||||
from pydase.utils.serialization.serializer import generate_serialized_data_paths
|
from pydase.utils.serialization.serializer import generate_serialized_data_paths
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
API_VERSION = "v1"
|
|
||||||
|
|
||||||
|
|
||||||
class WebServer:
|
class WebServer:
|
||||||
@@ -59,6 +60,8 @@ class WebServer:
|
|||||||
css:
|
css:
|
||||||
Path to a custom CSS file for styling the frontend. If None, no custom
|
Path to a custom CSS file for styling the frontend. If None, no custom
|
||||||
styles are applied. Defaults to None.
|
styles are applied. Defaults to None.
|
||||||
|
favicon_path:
|
||||||
|
Path to a custom favicon.ico file. Defaults to None.
|
||||||
enable_cors:
|
enable_cors:
|
||||||
Flag to enable or disable CORS policy. When True, CORS is enabled, allowing
|
Flag to enable or disable CORS policy. When True, CORS is enabled, allowing
|
||||||
cross-origin requests. Defaults to True.
|
cross-origin requests. Defaults to True.
|
||||||
@@ -77,7 +80,10 @@ class WebServer:
|
|||||||
data_service_observer: DataServiceObserver,
|
data_service_observer: DataServiceObserver,
|
||||||
host: str,
|
host: str,
|
||||||
port: int,
|
port: int,
|
||||||
|
*,
|
||||||
|
enable_frontend: bool = True,
|
||||||
css: str | Path | None = None,
|
css: str | Path | None = None,
|
||||||
|
favicon_path: str | Path | None = None,
|
||||||
enable_cors: bool = True,
|
enable_cors: bool = True,
|
||||||
config_dir: Path = ServiceConfig().config_dir,
|
config_dir: Path = ServiceConfig().config_dir,
|
||||||
generate_web_settings: bool = WebServerConfig().generate_web_settings,
|
generate_web_settings: bool = WebServerConfig().generate_web_settings,
|
||||||
@@ -91,17 +97,60 @@ class WebServer:
|
|||||||
self.css = css
|
self.css = css
|
||||||
self.enable_cors = enable_cors
|
self.enable_cors = enable_cors
|
||||||
self.frontend_src = frontend_src
|
self.frontend_src = frontend_src
|
||||||
|
self.favicon_path: Path | str = favicon_path # type: ignore
|
||||||
|
self.enable_frontend = enable_frontend
|
||||||
|
|
||||||
|
if self.favicon_path is None:
|
||||||
|
self.favicon_path = self.frontend_src / "favicon.ico"
|
||||||
|
|
||||||
self._service_config_dir = config_dir
|
self._service_config_dir = config_dir
|
||||||
self._generate_web_settings = generate_web_settings
|
self._generate_web_settings = generate_web_settings
|
||||||
self._loop: asyncio.AbstractEventLoop
|
self._loop = asyncio.get_event_loop()
|
||||||
|
self._sio = setup_sio_server(self.observer, self.enable_cors, self._loop)
|
||||||
self._initialise_configuration()
|
self._initialise_configuration()
|
||||||
|
|
||||||
async def serve(self) -> None:
|
async def serve(self) -> None:
|
||||||
self._loop = asyncio.get_running_loop()
|
async def index(
|
||||||
self._sio = setup_sio_server(self.observer, self.enable_cors, self._loop)
|
request: aiohttp.web.Request,
|
||||||
|
) -> aiohttp.web.Response | aiohttp.web.FileResponse:
|
||||||
|
forwarded_proto = request.headers.get("X-Forwarded-Proto", "http")
|
||||||
|
escaped_proto = html.escape(forwarded_proto)
|
||||||
|
|
||||||
async def index(request: aiohttp.web.Request) -> aiohttp.web.FileResponse:
|
# Read the index.html file
|
||||||
return aiohttp.web.FileResponse(self.frontend_src / "index.html")
|
index_file_path = self.frontend_src / "index.html"
|
||||||
|
|
||||||
|
async with await anyio.open_file(index_file_path) as f:
|
||||||
|
html_content = await f.read()
|
||||||
|
|
||||||
|
# Inject the escaped forwarded protocol into the HTML
|
||||||
|
modified_html = html_content.replace(
|
||||||
|
'window.__FORWARDED_PROTO__ = "";',
|
||||||
|
f'window.__FORWARDED_PROTO__ = "{escaped_proto}";',
|
||||||
|
)
|
||||||
|
|
||||||
|
# Read the X-Forwarded-Prefix header from the request
|
||||||
|
forwarded_prefix = request.headers.get("X-Forwarded-Prefix", "")
|
||||||
|
|
||||||
|
if forwarded_prefix != "":
|
||||||
|
# Escape the forwarded prefix to prevent XSS
|
||||||
|
escaped_prefix = html.escape(forwarded_prefix)
|
||||||
|
|
||||||
|
# Inject the escaped forwarded prefix into the HTML
|
||||||
|
modified_html = modified_html.replace(
|
||||||
|
'window.__FORWARDED_PREFIX__ = "";',
|
||||||
|
f'window.__FORWARDED_PREFIX__ = "{escaped_prefix}";',
|
||||||
|
)
|
||||||
|
modified_html = modified_html.replace(
|
||||||
|
"/assets/",
|
||||||
|
f"{escaped_prefix}/assets/",
|
||||||
|
)
|
||||||
|
|
||||||
|
modified_html = modified_html.replace(
|
||||||
|
"/favicon.ico",
|
||||||
|
f"{escaped_prefix}/favicon.ico",
|
||||||
|
)
|
||||||
|
|
||||||
|
return aiohttp.web.Response(text=modified_html, content_type="text/html")
|
||||||
|
|
||||||
app = aiohttp.web.Application()
|
app = aiohttp.web.Application()
|
||||||
|
|
||||||
@@ -113,14 +162,17 @@ class WebServer:
|
|||||||
|
|
||||||
# Define routes
|
# Define routes
|
||||||
self._sio.attach(app, socketio_path="/ws/socket.io")
|
self._sio.attach(app, socketio_path="/ws/socket.io")
|
||||||
app.router.add_static("/assets", self.frontend_src / "assets")
|
if self.enable_frontend:
|
||||||
app.router.add_get("/service-properties", self._service_properties_route)
|
app.router.add_static("/assets", self.frontend_src / "assets")
|
||||||
app.router.add_get("/web-settings", self._web_settings_route)
|
app.router.add_get("/favicon.ico", self._favicon_route)
|
||||||
app.router.add_get("/custom.css", self._styles_route)
|
app.router.add_get("/service-properties", self._service_properties_route)
|
||||||
|
app.router.add_get("/web-settings", self._web_settings_route)
|
||||||
|
app.router.add_get("/custom.css", self._styles_route)
|
||||||
app.add_subapp("/api/", create_api_application(self.state_manager))
|
app.add_subapp("/api/", create_api_application(self.state_manager))
|
||||||
|
|
||||||
app.router.add_get(r"/", index)
|
if self.enable_frontend:
|
||||||
app.router.add_get(r"/{tail:.*}", index)
|
app.router.add_get(r"/", index)
|
||||||
|
app.router.add_get(r"/{tail:.*}", index)
|
||||||
|
|
||||||
await aiohttp.web._run_app(
|
await aiohttp.web._run_app(
|
||||||
app,
|
app,
|
||||||
@@ -131,6 +183,12 @@ class WebServer:
|
|||||||
shutdown_timeout=0.1,
|
shutdown_timeout=0.1,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def _favicon_route(
|
||||||
|
self,
|
||||||
|
request: aiohttp.web.Request,
|
||||||
|
) -> aiohttp.web.FileResponse:
|
||||||
|
return aiohttp.web.FileResponse(self.favicon_path)
|
||||||
|
|
||||||
async def _service_properties_route(
|
async def _service_properties_route(
|
||||||
self,
|
self,
|
||||||
request: aiohttp.web.Request,
|
request: aiohttp.web.Request,
|
||||||
|
|||||||
0
src/pydase/task/__init__.py
Normal file
46
src/pydase/task/autostart.py
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import pydase.data_service.data_service
|
||||||
|
import pydase.task.task
|
||||||
|
from pydase.task.task_status import TaskStatus
|
||||||
|
from pydase.utils.helpers import is_property_attribute
|
||||||
|
|
||||||
|
|
||||||
|
def autostart_service_tasks(
|
||||||
|
service: pydase.data_service.data_service.DataService,
|
||||||
|
) -> None:
|
||||||
|
"""Starts the service tasks defined with the `autostart` keyword argument.
|
||||||
|
|
||||||
|
This method goes through the attributes of the passed service and its nested
|
||||||
|
[`DataService`][pydase.DataService] instances and calls the start method on
|
||||||
|
autostart-tasks.
|
||||||
|
"""
|
||||||
|
|
||||||
|
for attr in dir(service):
|
||||||
|
if is_property_attribute(service, attr) or attr in {
|
||||||
|
"_observers",
|
||||||
|
"__dict__",
|
||||||
|
}: # prevent eval of property attrs and recursion
|
||||||
|
continue
|
||||||
|
|
||||||
|
val = getattr(service, attr)
|
||||||
|
if isinstance(val, pydase.task.task.Task):
|
||||||
|
if val.autostart and val.status == TaskStatus.NOT_RUNNING:
|
||||||
|
val.start()
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
autostart_nested_service_tasks(val)
|
||||||
|
|
||||||
|
|
||||||
|
def autostart_nested_service_tasks(
|
||||||
|
service: pydase.data_service.data_service.DataService | list[Any] | dict[Any, Any],
|
||||||
|
) -> None:
|
||||||
|
if isinstance(service, pydase.DataService):
|
||||||
|
autostart_service_tasks(service)
|
||||||
|
elif isinstance(service, list):
|
||||||
|
for entry in service:
|
||||||
|
autostart_nested_service_tasks(entry)
|
||||||
|
elif isinstance(service, dict):
|
||||||
|
for entry in service.values():
|
||||||
|
autostart_nested_service_tasks(entry)
|
||||||
194
src/pydase/task/decorator.py
Normal file
@@ -0,0 +1,194 @@
|
|||||||
|
import logging
|
||||||
|
from collections.abc import Callable, Coroutine
|
||||||
|
from typing import Any, Generic, TypeVar, overload
|
||||||
|
|
||||||
|
from pydase.data_service.data_service import DataService
|
||||||
|
from pydase.task.task import Task
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
R = TypeVar("R")
|
||||||
|
|
||||||
|
|
||||||
|
class PerInstanceTaskDescriptor(Generic[R]):
|
||||||
|
"""
|
||||||
|
A descriptor class that provides a unique [`Task`][pydase.task.task.Task] object
|
||||||
|
for each instance of a [`DataService`][pydase.data_service.data_service.DataService]
|
||||||
|
class.
|
||||||
|
|
||||||
|
The `PerInstanceTaskDescriptor` is used to transform an asynchronous function into a
|
||||||
|
task that is managed independently for each instance of a `DataService` subclass.
|
||||||
|
This allows tasks to be initialized, started, and stopped on a per-instance basis,
|
||||||
|
providing better control over task execution within the service.
|
||||||
|
|
||||||
|
The `PerInstanceTaskDescriptor` is not intended to be used directly. Instead, it is
|
||||||
|
used internally by the `@task` decorator to manage task objects for each instance of
|
||||||
|
the service class.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__( # noqa: PLR0913
|
||||||
|
self,
|
||||||
|
func: Callable[[Any], Coroutine[None, None, R]]
|
||||||
|
| Callable[[], Coroutine[None, None, R]],
|
||||||
|
autostart: bool,
|
||||||
|
restart_on_exception: bool,
|
||||||
|
restart_sec: float,
|
||||||
|
start_limit_interval_sec: float | None,
|
||||||
|
start_limit_burst: int,
|
||||||
|
exit_on_failure: bool,
|
||||||
|
) -> None:
|
||||||
|
self.__func = func
|
||||||
|
self.__autostart = autostart
|
||||||
|
self.__task_instances: dict[object, Task[R]] = {}
|
||||||
|
self.__restart_on_exception = restart_on_exception
|
||||||
|
self.__restart_sec = restart_sec
|
||||||
|
self.__start_limit_interval_sec = start_limit_interval_sec
|
||||||
|
self.__start_limit_burst = start_limit_burst
|
||||||
|
self.__exit_on_failure = exit_on_failure
|
||||||
|
|
||||||
|
def __set_name__(self, owner: type[DataService], name: str) -> None:
|
||||||
|
"""Stores the name of the task within the owning class. This method is called
|
||||||
|
automatically when the descriptor is assigned to a class attribute.
|
||||||
|
"""
|
||||||
|
|
||||||
|
self.__task_name = name
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def __get__(
|
||||||
|
self, instance: None, owner: type[DataService]
|
||||||
|
) -> "PerInstanceTaskDescriptor[R]":
|
||||||
|
"""Returns the descriptor itself when accessed through the class."""
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def __get__(self, instance: DataService, owner: type[DataService]) -> Task[R]:
|
||||||
|
"""Returns the `Task` object associated with the specific `DataService`
|
||||||
|
instance.
|
||||||
|
If no task exists for the instance, a new `Task` object is created and stored
|
||||||
|
in the `__task_instances` dictionary.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __get__(
|
||||||
|
self, instance: DataService | None, owner: type[DataService]
|
||||||
|
) -> "Task[R] | PerInstanceTaskDescriptor[R]":
|
||||||
|
if instance is None:
|
||||||
|
return self
|
||||||
|
|
||||||
|
# Create a new Task object for this instance, using the function's name.
|
||||||
|
if instance not in self.__task_instances:
|
||||||
|
self.__task_instances[instance] = instance._initialise_new_objects(
|
||||||
|
self.__task_name,
|
||||||
|
Task(
|
||||||
|
self.__func.__get__(instance, owner),
|
||||||
|
autostart=self.__autostart,
|
||||||
|
restart_on_exception=self.__restart_on_exception,
|
||||||
|
restart_sec=self.__restart_sec,
|
||||||
|
start_limit_interval_sec=self.__start_limit_interval_sec,
|
||||||
|
start_limit_burst=self.__start_limit_burst,
|
||||||
|
exit_on_failure=self.__exit_on_failure,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
return self.__task_instances[instance]
|
||||||
|
|
||||||
|
|
||||||
|
def task( # noqa: PLR0913
|
||||||
|
*,
|
||||||
|
autostart: bool = False,
|
||||||
|
restart_on_exception: bool = True,
|
||||||
|
restart_sec: float = 1.0,
|
||||||
|
start_limit_interval_sec: float | None = None,
|
||||||
|
start_limit_burst: int = 3,
|
||||||
|
exit_on_failure: bool = False,
|
||||||
|
) -> Callable[
|
||||||
|
[
|
||||||
|
Callable[[Any], Coroutine[None, None, R]]
|
||||||
|
| Callable[[], Coroutine[None, None, R]]
|
||||||
|
],
|
||||||
|
PerInstanceTaskDescriptor[R],
|
||||||
|
]:
|
||||||
|
"""
|
||||||
|
A decorator to define an asynchronous function as a per-instance task within a
|
||||||
|
[`DataService`][pydase.DataService] class.
|
||||||
|
|
||||||
|
This decorator transforms an asynchronous function into a
|
||||||
|
[`Task`][pydase.task.task.Task] object that is unique to each instance of the
|
||||||
|
`DataService` class. The resulting `Task` object provides methods like `start()`
|
||||||
|
and `stop()` to control the execution of the task, and manages the task's lifecycle
|
||||||
|
independently for each instance of the service.
|
||||||
|
|
||||||
|
The decorator is particularly useful for defining tasks that need to run
|
||||||
|
periodically or perform asynchronous operations, such as polling data sources,
|
||||||
|
updating databases, or any recurring job that should be managed within the context
|
||||||
|
of a `DataService`.
|
||||||
|
|
||||||
|
The keyword arguments that can be passed to this decorator are inspired by systemd
|
||||||
|
unit services.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
autostart:
|
||||||
|
If set to True, the task will automatically start when the service is
|
||||||
|
initialized. Defaults to False.
|
||||||
|
restart_on_exception:
|
||||||
|
Configures whether the task shall be restarted when it exits with an
|
||||||
|
exception other than [`asyncio.CancelledError`][asyncio.CancelledError].
|
||||||
|
restart_sec:
|
||||||
|
Configures the time to sleep before restarting a task. Defaults to 1.0.
|
||||||
|
start_limit_interval_sec:
|
||||||
|
Configures start rate limiting. Tasks which are started more than
|
||||||
|
`start_limit_burst` times within an `start_limit_interval_sec` time span are
|
||||||
|
not permitted to start any more. Defaults to None (disabled rate limiting).
|
||||||
|
start_limit_burst:
|
||||||
|
Configures unit start rate limiting. Tasks which are started more than
|
||||||
|
`start_limit_burst` times within an `start_limit_interval_sec` time span are
|
||||||
|
not permitted to start any more. Defaults to 3.
|
||||||
|
exit_on_failure:
|
||||||
|
If True, exit the service if the task fails and restart_on_exception is
|
||||||
|
False or burst limits are exceeded.
|
||||||
|
Returns:
|
||||||
|
A decorator that wraps an asynchronous function in a
|
||||||
|
[`PerInstanceTaskDescriptor`][pydase.task.decorator.PerInstanceTaskDescriptor]
|
||||||
|
object, which, when accessed, provides an instance-specific
|
||||||
|
[`Task`][pydase.task.task.Task] object.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```python
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
import pydase
|
||||||
|
from pydase.task.decorator import task
|
||||||
|
|
||||||
|
|
||||||
|
class MyService(pydase.DataService):
|
||||||
|
@task(autostart=True)
|
||||||
|
async def my_task(self) -> None:
|
||||||
|
while True:
|
||||||
|
# Perform some periodic work
|
||||||
|
await asyncio.sleep(1)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
service = MyService()
|
||||||
|
pydase.Server(service=service).run()
|
||||||
|
```
|
||||||
|
|
||||||
|
In this example, `my_task` is defined as a task using the `@task` decorator, and
|
||||||
|
it will start automatically when the service is initialized because
|
||||||
|
`autostart=True` is set. You can manually start or stop the task using
|
||||||
|
`service.my_task.start()` and `service.my_task.stop()`, respectively.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def decorator(
|
||||||
|
func: Callable[[Any], Coroutine[None, None, R]]
|
||||||
|
| Callable[[], Coroutine[None, None, R]],
|
||||||
|
) -> PerInstanceTaskDescriptor[R]:
|
||||||
|
return PerInstanceTaskDescriptor(
|
||||||
|
func,
|
||||||
|
autostart=autostart,
|
||||||
|
restart_on_exception=restart_on_exception,
|
||||||
|
restart_sec=restart_sec,
|
||||||
|
start_limit_interval_sec=start_limit_interval_sec,
|
||||||
|
start_limit_burst=start_limit_burst,
|
||||||
|
exit_on_failure=exit_on_failure,
|
||||||
|
)
|
||||||
|
|
||||||
|
return decorator
|
||||||
237
src/pydase/task/task.py
Normal file
@@ -0,0 +1,237 @@
|
|||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import signal
|
||||||
|
from collections.abc import Callable, Coroutine
|
||||||
|
from datetime import datetime
|
||||||
|
from time import time
|
||||||
|
from typing import (
|
||||||
|
Generic,
|
||||||
|
TypeVar,
|
||||||
|
)
|
||||||
|
|
||||||
|
import pydase.data_service.data_service
|
||||||
|
from pydase.task.task_status import TaskStatus
|
||||||
|
from pydase.utils.helpers import current_event_loop_exists
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
R = TypeVar("R")
|
||||||
|
|
||||||
|
|
||||||
|
class Task(pydase.data_service.data_service.DataService, Generic[R]):
|
||||||
|
"""A class representing a task within the `pydase` framework.
|
||||||
|
|
||||||
|
The `Task` class wraps an asynchronous function and provides methods to manage its
|
||||||
|
lifecycle, such as `start()` and `stop()`. It is typically used to perform periodic
|
||||||
|
or recurring jobs in a [`DataService`][pydase.DataService], like reading
|
||||||
|
sensor data, updating databases, or executing other background tasks.
|
||||||
|
|
||||||
|
When a function is decorated with the [`@task`][pydase.task.decorator.task]
|
||||||
|
decorator, it is replaced by a `Task` instance that controls the execution of the
|
||||||
|
original function.
|
||||||
|
|
||||||
|
The keyword arguments that can be passed to this class are inspired by systemd unit
|
||||||
|
services.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
func:
|
||||||
|
The asynchronous function that this task wraps. It must be a coroutine
|
||||||
|
without arguments.
|
||||||
|
autostart:
|
||||||
|
If set to True, the task will automatically start when the service is
|
||||||
|
initialized. Defaults to False.
|
||||||
|
restart_on_exception:
|
||||||
|
Configures whether the task shall be restarted when it exits with an
|
||||||
|
exception other than [`asyncio.CancelledError`][asyncio.CancelledError].
|
||||||
|
restart_sec:
|
||||||
|
Configures the time to sleep before restarting a task. Defaults to 1.0.
|
||||||
|
start_limit_interval_sec:
|
||||||
|
Configures start rate limiting. Tasks which are started more than
|
||||||
|
`start_limit_burst` times within an `start_limit_interval_sec` time span are
|
||||||
|
not permitted to start any more. Defaults to None (disabled rate limiting).
|
||||||
|
start_limit_burst:
|
||||||
|
Configures unit start rate limiting. Tasks which are started more than
|
||||||
|
`start_limit_burst` times within an `start_limit_interval_sec` time span are
|
||||||
|
not permitted to start any more. Defaults to 3.
|
||||||
|
exit_on_failure:
|
||||||
|
If True, exit the service if the task fails and restart_on_exception is
|
||||||
|
False or burst limits are exceeded.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```python
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
import pydase
|
||||||
|
from pydase.task.decorator import task
|
||||||
|
|
||||||
|
|
||||||
|
class MyService(pydase.DataService):
|
||||||
|
@task(autostart=True)
|
||||||
|
async def my_task(self) -> None:
|
||||||
|
while True:
|
||||||
|
# Perform some periodic work
|
||||||
|
await asyncio.sleep(1)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
service = MyService()
|
||||||
|
pydase.Server(service=service).run()
|
||||||
|
```
|
||||||
|
|
||||||
|
In this example, `my_task` is defined as a task using the `@task` decorator, and
|
||||||
|
it will start automatically when the service is initialized because
|
||||||
|
`autostart=True` is set. You can manually start or stop the task using
|
||||||
|
`service.my_task.start()` and `service.my_task.stop()`, respectively.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__( # noqa: PLR0913
|
||||||
|
self,
|
||||||
|
func: Callable[[], Coroutine[None, None, R | None]],
|
||||||
|
*,
|
||||||
|
autostart: bool,
|
||||||
|
restart_on_exception: bool,
|
||||||
|
restart_sec: float,
|
||||||
|
start_limit_interval_sec: float | None,
|
||||||
|
start_limit_burst: int,
|
||||||
|
exit_on_failure: bool,
|
||||||
|
) -> None:
|
||||||
|
super().__init__()
|
||||||
|
self._autostart = autostart
|
||||||
|
self._restart_on_exception = restart_on_exception
|
||||||
|
self._restart_sec = restart_sec
|
||||||
|
self._start_limit_interval_sec = start_limit_interval_sec
|
||||||
|
self._start_limit_burst = start_limit_burst
|
||||||
|
self._exit_on_failure = exit_on_failure
|
||||||
|
self._func_name = func.__name__
|
||||||
|
self._func = func
|
||||||
|
self._task: asyncio.Task[R | None] | None = None
|
||||||
|
self._status = TaskStatus.NOT_RUNNING
|
||||||
|
self._result: R | None = None
|
||||||
|
|
||||||
|
if not current_event_loop_exists():
|
||||||
|
self._loop = asyncio.new_event_loop()
|
||||||
|
asyncio.set_event_loop(self._loop)
|
||||||
|
else:
|
||||||
|
self._loop = asyncio.get_event_loop()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def autostart(self) -> bool:
|
||||||
|
"""Defines if the task should be started automatically when the
|
||||||
|
[`Server`][pydase.Server] starts."""
|
||||||
|
return self._autostart
|
||||||
|
|
||||||
|
@property
|
||||||
|
def status(self) -> TaskStatus:
|
||||||
|
"""Returns the current status of the task."""
|
||||||
|
return self._status
|
||||||
|
|
||||||
|
def start(self) -> None:
|
||||||
|
"""Starts the asynchronous task if it is not already running."""
|
||||||
|
if self._task:
|
||||||
|
return
|
||||||
|
|
||||||
|
def task_done_callback(task: asyncio.Task[R | None]) -> None:
|
||||||
|
"""Handles tasks that have finished.
|
||||||
|
|
||||||
|
Updates the task status, calls the defined callbacks, and logs and re-raises
|
||||||
|
exceptions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
self._task = None
|
||||||
|
self._status = TaskStatus.NOT_RUNNING
|
||||||
|
|
||||||
|
exception = None
|
||||||
|
try:
|
||||||
|
exception = task.exception()
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
return
|
||||||
|
|
||||||
|
if exception is not None:
|
||||||
|
logger.error(
|
||||||
|
"Task '%s' encountered an exception: %r",
|
||||||
|
self._func_name,
|
||||||
|
exception,
|
||||||
|
)
|
||||||
|
os.kill(os.getpid(), signal.SIGTERM)
|
||||||
|
else:
|
||||||
|
self._result = task.result()
|
||||||
|
|
||||||
|
logger.info("Creating task %r", self._func_name)
|
||||||
|
self._task = self._loop.create_task(self.__running_task_loop())
|
||||||
|
self._task.add_done_callback(task_done_callback)
|
||||||
|
|
||||||
|
async def __running_task_loop(self) -> R | None:
|
||||||
|
logger.info("Starting task %r", self._func_name)
|
||||||
|
self._status = TaskStatus.RUNNING
|
||||||
|
attempts = 0
|
||||||
|
start_time_of_start_limit_interval = None
|
||||||
|
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
return await self._func()
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
logger.info("Task '%s' was cancelled", self._func_name)
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
attempts, start_time_of_start_limit_interval = (
|
||||||
|
self._handle_task_exception(
|
||||||
|
e, attempts, start_time_of_start_limit_interval
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if not self._should_restart_task(
|
||||||
|
attempts, start_time_of_start_limit_interval
|
||||||
|
):
|
||||||
|
if self._exit_on_failure:
|
||||||
|
raise e
|
||||||
|
break
|
||||||
|
await asyncio.sleep(self._restart_sec)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _handle_task_exception(
|
||||||
|
self,
|
||||||
|
exception: Exception,
|
||||||
|
attempts: int,
|
||||||
|
start_time_of_start_limit_interval: float | None,
|
||||||
|
) -> tuple[int, float]:
|
||||||
|
"""Handle an exception raised during task execution."""
|
||||||
|
if start_time_of_start_limit_interval is None:
|
||||||
|
start_time_of_start_limit_interval = time()
|
||||||
|
|
||||||
|
attempts += 1
|
||||||
|
logger.exception(
|
||||||
|
"Task %r encountered an exception: %r [attempt %s since %s].",
|
||||||
|
self._func.__name__,
|
||||||
|
exception,
|
||||||
|
attempts,
|
||||||
|
datetime.fromtimestamp(start_time_of_start_limit_interval),
|
||||||
|
)
|
||||||
|
return attempts, start_time_of_start_limit_interval
|
||||||
|
|
||||||
|
def _should_restart_task(
|
||||||
|
self, attempts: int, start_time_of_start_limit_interval: float
|
||||||
|
) -> bool:
|
||||||
|
"""Determine if the task should be restarted."""
|
||||||
|
if not self._restart_on_exception:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if self._start_limit_interval_sec is not None:
|
||||||
|
if (
|
||||||
|
time() - start_time_of_start_limit_interval
|
||||||
|
) > self._start_limit_interval_sec:
|
||||||
|
# Reset attempts if interval is exceeded
|
||||||
|
start_time_of_start_limit_interval = time()
|
||||||
|
attempts = 1
|
||||||
|
elif attempts > self._start_limit_burst:
|
||||||
|
logger.error(
|
||||||
|
"Task %r exceeded restart burst limit. Stopping.",
|
||||||
|
self._func.__name__,
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def stop(self) -> None:
|
||||||
|
"""Stops the running asynchronous task by cancelling it."""
|
||||||
|
|
||||||
|
if self._task:
|
||||||
|
self._task.cancel()
|
||||||
8
src/pydase/task/task_status.py
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
import enum
|
||||||
|
|
||||||
|
|
||||||
|
class TaskStatus(enum.Enum):
|
||||||
|
"""Possible statuses of a [`Task`][pydase.task.task.Task]."""
|
||||||
|
|
||||||
|
RUNNING = "running"
|
||||||
|
NOT_RUNNING = "not_running"
|
||||||
@@ -114,8 +114,6 @@ def get_class_and_instance_attributes(obj: object) -> dict[str, Any]:
|
|||||||
|
|
||||||
If an attribute exists at both the instance and class level,the value from the
|
If an attribute exists at both the instance and class level,the value from the
|
||||||
instance attribute takes precedence.
|
instance attribute takes precedence.
|
||||||
The __root__ object is removed as this will lead to endless recursion in the for
|
|
||||||
loops.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return dict(chain(type(obj).__dict__.items(), obj.__dict__.items()))
|
return dict(chain(type(obj).__dict__.items(), obj.__dict__.items()))
|
||||||
@@ -162,6 +160,12 @@ def get_object_attr_from_path(target_obj: Any, path: str) -> Any:
|
|||||||
return get_object_by_path_parts(target_obj, path_parts)
|
return get_object_by_path_parts(target_obj, path_parts)
|
||||||
|
|
||||||
|
|
||||||
|
def get_task_class() -> type:
|
||||||
|
from pydase.task.task import Task
|
||||||
|
|
||||||
|
return Task
|
||||||
|
|
||||||
|
|
||||||
def get_component_classes() -> list[type]:
|
def get_component_classes() -> list[type]:
|
||||||
"""
|
"""
|
||||||
Returns references to the component classes in a list.
|
Returns references to the component classes in a list.
|
||||||
@@ -196,3 +200,37 @@ def function_has_arguments(func: Callable[..., Any]) -> bool:
|
|||||||
|
|
||||||
# Check if there are any parameters left which would indicate additional arguments.
|
# Check if there are any parameters left which would indicate additional arguments.
|
||||||
return len(parameters) > 0
|
return len(parameters) > 0
|
||||||
|
|
||||||
|
|
||||||
|
def is_descriptor(obj: object) -> bool:
|
||||||
|
"""Check if an object is a descriptor."""
|
||||||
|
|
||||||
|
# Exclude functions, methods, builtins and properties
|
||||||
|
if (
|
||||||
|
inspect.isfunction(obj)
|
||||||
|
or inspect.ismethod(obj)
|
||||||
|
or inspect.isbuiltin(obj)
|
||||||
|
or isinstance(obj, property)
|
||||||
|
):
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Check if it has any descriptor methods
|
||||||
|
return any(hasattr(obj, method) for method in ("__get__", "__set__", "__delete__"))
|
||||||
|
|
||||||
|
|
||||||
|
def current_event_loop_exists() -> bool:
|
||||||
|
"""Check if a running and open asyncio event loop exists in the current thread.
|
||||||
|
|
||||||
|
This checks if an event loop is set via the current event loop policy and verifies
|
||||||
|
that the loop has not been closed.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if an event loop exists and is not closed, False otherwise.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
try:
|
||||||
|
return not asyncio.get_event_loop().is_closed()
|
||||||
|
except RuntimeError:
|
||||||
|
return False
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ import logging.config
|
|||||||
import sys
|
import sys
|
||||||
from collections.abc import Callable
|
from collections.abc import Callable
|
||||||
from copy import copy
|
from copy import copy
|
||||||
from typing import ClassVar, Literal
|
from typing import ClassVar, Literal, TextIO
|
||||||
|
|
||||||
import click
|
import click
|
||||||
import socketio # type: ignore[import-untyped]
|
import socketio # type: ignore[import-untyped]
|
||||||
@@ -29,22 +29,44 @@ LOGGING_CONFIG = {
|
|||||||
"datefmt": "%Y-%m-%d %H:%M:%S",
|
"datefmt": "%Y-%m-%d %H:%M:%S",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
"filters": {
|
||||||
|
"only_pydase_server": {
|
||||||
|
"()": "pydase.utils.logging.NameFilter",
|
||||||
|
"match": "pydase.server",
|
||||||
|
},
|
||||||
|
"exclude_pydase_server": {
|
||||||
|
"()": "pydase.utils.logging.NameFilter",
|
||||||
|
"match": "pydase.server",
|
||||||
|
"invert": True,
|
||||||
|
},
|
||||||
|
},
|
||||||
"handlers": {
|
"handlers": {
|
||||||
"default": {
|
"stdout_handler": {
|
||||||
|
"formatter": "default",
|
||||||
|
"class": "logging.StreamHandler",
|
||||||
|
"stream": "ext://sys.stdout",
|
||||||
|
"filters": ["only_pydase_server"],
|
||||||
|
},
|
||||||
|
"stderr_handler": {
|
||||||
"formatter": "default",
|
"formatter": "default",
|
||||||
"class": "logging.StreamHandler",
|
"class": "logging.StreamHandler",
|
||||||
"stream": "ext://sys.stderr",
|
"stream": "ext://sys.stderr",
|
||||||
|
"filters": ["exclude_pydase_server"],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"loggers": {
|
"loggers": {
|
||||||
"pydase": {"handlers": ["default"], "level": LOG_LEVEL, "propagate": False},
|
"pydase": {
|
||||||
|
"handlers": ["stdout_handler", "stderr_handler"],
|
||||||
|
"level": LOG_LEVEL,
|
||||||
|
"propagate": False,
|
||||||
|
},
|
||||||
"aiohttp_middlewares": {
|
"aiohttp_middlewares": {
|
||||||
"handlers": ["default"],
|
"handlers": ["stderr_handler"],
|
||||||
"level": logging.WARNING,
|
"level": logging.WARNING,
|
||||||
"propagate": False,
|
"propagate": False,
|
||||||
},
|
},
|
||||||
"aiohttp": {
|
"aiohttp": {
|
||||||
"handlers": ["default"],
|
"handlers": ["stderr_handler"],
|
||||||
"level": logging.INFO,
|
"level": logging.INFO,
|
||||||
"propagate": False,
|
"propagate": False,
|
||||||
},
|
},
|
||||||
@@ -52,6 +74,23 @@ LOGGING_CONFIG = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class NameFilter(logging.Filter):
|
||||||
|
"""
|
||||||
|
Logging filter that allows filtering logs based on the logger name.
|
||||||
|
Can either include or exclude a specific logger.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, match: str, invert: bool = False):
|
||||||
|
super().__init__()
|
||||||
|
self.match = match
|
||||||
|
self.invert = invert
|
||||||
|
|
||||||
|
def filter(self, record: logging.LogRecord) -> bool:
|
||||||
|
if self.invert:
|
||||||
|
return not record.name.startswith(self.match)
|
||||||
|
return record.name.startswith(self.match)
|
||||||
|
|
||||||
|
|
||||||
class DefaultFormatter(logging.Formatter):
|
class DefaultFormatter(logging.Formatter):
|
||||||
"""
|
"""
|
||||||
A custom log formatter class that:
|
A custom log formatter class that:
|
||||||
@@ -126,15 +165,16 @@ class SocketIOHandler(logging.Handler):
|
|||||||
log_entry = self.format(record)
|
log_entry = self.format(record)
|
||||||
|
|
||||||
loop = asyncio.get_event_loop()
|
loop = asyncio.get_event_loop()
|
||||||
loop.create_task(
|
if loop.is_running():
|
||||||
self._sio.emit(
|
loop.create_task(
|
||||||
"log",
|
self._sio.emit(
|
||||||
{
|
"log",
|
||||||
"levelname": record.levelname,
|
{
|
||||||
"message": log_entry,
|
"levelname": record.levelname,
|
||||||
},
|
"message": log_entry,
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def setup_logging() -> None:
|
def setup_logging() -> None:
|
||||||
@@ -150,3 +190,51 @@ def setup_logging() -> None:
|
|||||||
logger.debug("Configuring pydase logging.")
|
logger.debug("Configuring pydase logging.")
|
||||||
|
|
||||||
logging.config.dictConfig(LOGGING_CONFIG)
|
logging.config.dictConfig(LOGGING_CONFIG)
|
||||||
|
|
||||||
|
|
||||||
|
def configure_logging_with_pydase_formatter(
|
||||||
|
name: str | None = None, level: int = logging.INFO, stream: TextIO | None = None
|
||||||
|
) -> None:
|
||||||
|
"""Configure a logger with the pydase `DefaultFormatter`.
|
||||||
|
|
||||||
|
This sets up a `StreamHandler` with the custom `DefaultFormatter`, which includes
|
||||||
|
timestamp, log level with color (if supported), logger name, function, and line
|
||||||
|
number. It can be used to configure the root logger or any named logger.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: The name of the logger to configure. If None, the root logger is used.
|
||||||
|
level: The logging level to set on the logger (e.g., logging.DEBUG,
|
||||||
|
logging.INFO). Defaults to logging.INFO.
|
||||||
|
stream: The output stream for the log messages (e.g., sys.stdout or sys.stderr).
|
||||||
|
If None, defaults to sys.stderr.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
Configure logging in your service:
|
||||||
|
|
||||||
|
```python
|
||||||
|
import sys
|
||||||
|
from pydase.utils.logging import configure_logging_with_pydase_formatter
|
||||||
|
|
||||||
|
configure_logging_with_pydase_formatter(
|
||||||
|
name="my_service", # Use the package/module name or None for the root logger
|
||||||
|
level=logging.DEBUG, # Set the desired logging level (defaults to INFO)
|
||||||
|
stream=sys.stdout # Set the output stream (stderr by default)
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
- This function adds a new handler each time it's called.
|
||||||
|
Use carefully to avoid duplicate logs.
|
||||||
|
- Colors are enabled if the stream supports TTY (e.g., in terminal).
|
||||||
|
""" # noqa: E501
|
||||||
|
|
||||||
|
logger = logging.getLogger(name=name)
|
||||||
|
handler = logging.StreamHandler(stream=stream)
|
||||||
|
formatter = DefaultFormatter(
|
||||||
|
fmt="%(asctime)s.%(msecs)03d | %(levelprefix)s | "
|
||||||
|
"%(name)s:%(funcName)s:%(lineno)d - %(message)s",
|
||||||
|
datefmt="%Y-%m-%d %H:%M:%S",
|
||||||
|
)
|
||||||
|
handler.setFormatter(formatter)
|
||||||
|
logger.addHandler(handler)
|
||||||
|
logger.setLevel(level)
|
||||||
|
|||||||
@@ -6,7 +6,9 @@ from typing import TYPE_CHECKING, Any, NoReturn, cast
|
|||||||
import pydase
|
import pydase
|
||||||
import pydase.components
|
import pydase.components
|
||||||
import pydase.units as u
|
import pydase.units as u
|
||||||
from pydase.utils.helpers import get_component_classes
|
from pydase.utils.helpers import (
|
||||||
|
get_component_classes,
|
||||||
|
)
|
||||||
from pydase.utils.serialization.types import (
|
from pydase.utils.serialization.types import (
|
||||||
SerializedDatetime,
|
SerializedDatetime,
|
||||||
SerializedException,
|
SerializedException,
|
||||||
@@ -49,9 +51,9 @@ class Deserializer:
|
|||||||
return handler(serialized_object)
|
return handler(serialized_object)
|
||||||
|
|
||||||
# Custom types like Components or DataService classes
|
# Custom types like Components or DataService classes
|
||||||
component_class = cls.get_component_class(serialized_object["type"])
|
service_base_class = cls.get_service_base_class(serialized_object["type"])
|
||||||
if component_class:
|
if service_base_class:
|
||||||
return cls.deserialize_component_type(serialized_object, component_class)
|
return cls.deserialize_data_service(serialized_object, service_base_class)
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@@ -83,7 +85,7 @@ class Deserializer:
|
|||||||
def deserialize_list(cls, serialized_object: SerializedObject) -> Any:
|
def deserialize_list(cls, serialized_object: SerializedObject) -> Any:
|
||||||
return [
|
return [
|
||||||
cls.deserialize(item)
|
cls.deserialize(item)
|
||||||
for item in cast(list[SerializedObject], serialized_object["value"])
|
for item in cast("list[SerializedObject]", serialized_object["value"])
|
||||||
]
|
]
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -91,7 +93,7 @@ class Deserializer:
|
|||||||
return {
|
return {
|
||||||
key: cls.deserialize(value)
|
key: cls.deserialize(value)
|
||||||
for key, value in cast(
|
for key, value in cast(
|
||||||
dict[str, SerializedObject], serialized_object["value"]
|
"dict[str, SerializedObject]", serialized_object["value"]
|
||||||
).items()
|
).items()
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -110,11 +112,11 @@ class Deserializer:
|
|||||||
raise exception(serialized_object["value"])
|
raise exception(serialized_object["value"])
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_component_class(type_name: str | None) -> type | None:
|
def get_service_base_class(type_name: str | None) -> type | None:
|
||||||
for component_class in get_component_classes():
|
for component_class in get_component_classes():
|
||||||
if type_name == component_class.__name__:
|
if type_name == component_class.__name__:
|
||||||
return component_class
|
return component_class
|
||||||
if type_name == "DataService":
|
if type_name in ("DataService", "Task"):
|
||||||
import pydase
|
import pydase
|
||||||
|
|
||||||
return pydase.DataService
|
return pydase.DataService
|
||||||
@@ -137,7 +139,7 @@ class Deserializer:
|
|||||||
return property(get, set)
|
return property(get, set)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def deserialize_component_type(
|
def deserialize_data_service(
|
||||||
cls, serialized_object: SerializedObject, base_class: type
|
cls, serialized_object: SerializedObject, base_class: type
|
||||||
) -> Any:
|
) -> Any:
|
||||||
def create_proxy_class(serialized_object: SerializedObject) -> type:
|
def create_proxy_class(serialized_object: SerializedObject) -> type:
|
||||||
@@ -146,7 +148,7 @@ class Deserializer:
|
|||||||
|
|
||||||
# Process and add properties based on the serialized object
|
# Process and add properties based on the serialized object
|
||||||
for key, value in cast(
|
for key, value in cast(
|
||||||
dict[str, SerializedObject], serialized_object["value"]
|
"dict[str, SerializedObject]", serialized_object["value"]
|
||||||
).items():
|
).items():
|
||||||
if value["type"] != "method":
|
if value["type"] != "method":
|
||||||
class_attrs[key] = cls.create_attr_property(value)
|
class_attrs[key] = cls.create_attr_property(value)
|
||||||
|
|||||||
@@ -9,37 +9,41 @@ from typing import TYPE_CHECKING, Any, Literal, cast
|
|||||||
|
|
||||||
import pydase.units as u
|
import pydase.units as u
|
||||||
from pydase.data_service.abstract_data_service import AbstractDataService
|
from pydase.data_service.abstract_data_service import AbstractDataService
|
||||||
from pydase.data_service.task_manager import TaskStatus
|
from pydase.task.task_status import TaskStatus
|
||||||
from pydase.utils.decorators import render_in_frontend
|
from pydase.utils.decorators import render_in_frontend
|
||||||
from pydase.utils.helpers import (
|
from pydase.utils.helpers import (
|
||||||
get_attribute_doc,
|
get_attribute_doc,
|
||||||
get_component_classes,
|
get_component_classes,
|
||||||
get_data_service_class_reference,
|
get_data_service_class_reference,
|
||||||
|
get_task_class,
|
||||||
|
is_property_attribute,
|
||||||
parse_full_access_path,
|
parse_full_access_path,
|
||||||
parse_serialized_key,
|
parse_serialized_key,
|
||||||
)
|
)
|
||||||
from pydase.utils.serialization.types import (
|
|
||||||
DataServiceTypes,
|
|
||||||
SerializedBool,
|
|
||||||
SerializedDataService,
|
|
||||||
SerializedDatetime,
|
|
||||||
SerializedDict,
|
|
||||||
SerializedEnum,
|
|
||||||
SerializedException,
|
|
||||||
SerializedFloat,
|
|
||||||
SerializedInteger,
|
|
||||||
SerializedList,
|
|
||||||
SerializedMethod,
|
|
||||||
SerializedNoneType,
|
|
||||||
SerializedObject,
|
|
||||||
SerializedQuantity,
|
|
||||||
SerializedString,
|
|
||||||
SignatureDict,
|
|
||||||
)
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from collections.abc import Callable
|
from collections.abc import Callable
|
||||||
|
|
||||||
|
from pydase.client.proxy_class import ProxyClass
|
||||||
|
from pydase.utils.serialization.types import (
|
||||||
|
DataServiceTypes,
|
||||||
|
SerializedBool,
|
||||||
|
SerializedDataService,
|
||||||
|
SerializedDatetime,
|
||||||
|
SerializedDict,
|
||||||
|
SerializedEnum,
|
||||||
|
SerializedException,
|
||||||
|
SerializedFloat,
|
||||||
|
SerializedInteger,
|
||||||
|
SerializedList,
|
||||||
|
SerializedMethod,
|
||||||
|
SerializedNoneType,
|
||||||
|
SerializedObject,
|
||||||
|
SerializedQuantity,
|
||||||
|
SerializedString,
|
||||||
|
SignatureDict,
|
||||||
|
)
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@@ -72,6 +76,7 @@ class Serializer:
|
|||||||
Returns:
|
Returns:
|
||||||
Dictionary representation of `obj`.
|
Dictionary representation of `obj`.
|
||||||
"""
|
"""
|
||||||
|
from pydase.client.client import ProxyClass
|
||||||
|
|
||||||
result: SerializedObject
|
result: SerializedObject
|
||||||
|
|
||||||
@@ -81,6 +86,9 @@ class Serializer:
|
|||||||
elif isinstance(obj, datetime):
|
elif isinstance(obj, datetime):
|
||||||
result = cls._serialize_datetime(obj, access_path=access_path)
|
result = cls._serialize_datetime(obj, access_path=access_path)
|
||||||
|
|
||||||
|
elif isinstance(obj, ProxyClass):
|
||||||
|
result = cls._serialize_proxy_class(obj, access_path=access_path)
|
||||||
|
|
||||||
elif isinstance(obj, AbstractDataService):
|
elif isinstance(obj, AbstractDataService):
|
||||||
result = cls._serialize_data_service(obj, access_path=access_path)
|
result = cls._serialize_data_service(obj, access_path=access_path)
|
||||||
|
|
||||||
@@ -150,7 +158,7 @@ class Serializer:
|
|||||||
"doc": None,
|
"doc": None,
|
||||||
"readonly": True,
|
"readonly": True,
|
||||||
"type": "Exception",
|
"type": "Exception",
|
||||||
"value": obj.args[0],
|
"value": obj.args[0] if len(obj.args) > 0 else "",
|
||||||
"name": obj.__class__.__name__,
|
"name": obj.__class__.__name__,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -245,7 +253,7 @@ class Serializer:
|
|||||||
|
|
||||||
for k, v in sig.parameters.items():
|
for k, v in sig.parameters.items():
|
||||||
default_value = cast(
|
default_value = cast(
|
||||||
dict[str, Any], {} if v.default == inspect._empty else dump(v.default)
|
"dict[str, Any]", {} if v.default == inspect._empty else dump(v.default)
|
||||||
)
|
)
|
||||||
default_value.pop("full_access_path", None)
|
default_value.pop("full_access_path", None)
|
||||||
signature["parameters"][k] = {
|
signature["parameters"][k] = {
|
||||||
@@ -280,6 +288,10 @@ class Serializer:
|
|||||||
if component_base_cls:
|
if component_base_cls:
|
||||||
obj_type = component_base_cls.__name__ # type: ignore
|
obj_type = component_base_cls.__name__ # type: ignore
|
||||||
|
|
||||||
|
elif isinstance(obj, get_task_class()):
|
||||||
|
# Check if obj is a pydase task
|
||||||
|
obj_type = "Task"
|
||||||
|
|
||||||
# Get the set of DataService class attributes
|
# Get the set of DataService class attributes
|
||||||
data_service_attr_set = set(dir(get_data_service_class_reference()))
|
data_service_attr_set = set(dir(get_data_service_class_reference()))
|
||||||
# Get the set of the object attributes
|
# Get the set of the object attributes
|
||||||
@@ -294,29 +306,15 @@ class Serializer:
|
|||||||
if key.startswith("_"):
|
if key.startswith("_"):
|
||||||
continue # Skip attributes that start with underscore
|
continue # Skip attributes that start with underscore
|
||||||
|
|
||||||
# Skip keys that start with "start_" or "stop_" and end with an async
|
|
||||||
# method name
|
|
||||||
if key.startswith(("start_", "stop_")) and key.split("_", 1)[1] in {
|
|
||||||
name
|
|
||||||
for name, _ in inspect.getmembers(
|
|
||||||
obj, predicate=inspect.iscoroutinefunction
|
|
||||||
)
|
|
||||||
}:
|
|
||||||
continue
|
|
||||||
|
|
||||||
val = getattr(obj, key)
|
val = getattr(obj, key)
|
||||||
|
|
||||||
path = f"{access_path}.{key}" if access_path else key
|
path = f"{access_path}.{key}" if access_path else key
|
||||||
serialized_object = cls.serialize_object(val, access_path=path)
|
serialized_object = cls.serialize_object(val, access_path=path)
|
||||||
|
|
||||||
# If there's a running task for this method
|
|
||||||
if serialized_object["type"] == "method" and key in obj._task_manager.tasks:
|
|
||||||
serialized_object["value"] = TaskStatus.RUNNING.name
|
|
||||||
|
|
||||||
value[key] = serialized_object
|
value[key] = serialized_object
|
||||||
|
|
||||||
# If the DataService attribute is a property
|
# If the DataService attribute is a property
|
||||||
if isinstance(getattr(obj.__class__, key, None), property):
|
if is_property_attribute(obj, key):
|
||||||
prop: property = getattr(obj.__class__, key)
|
prop: property = getattr(obj.__class__, key)
|
||||||
value[key]["readonly"] = prop.fset is None
|
value[key]["readonly"] = prop.fset is None
|
||||||
value[key]["doc"] = get_attribute_doc(prop) # overwrite the doc
|
value[key]["doc"] = get_attribute_doc(prop) # overwrite the doc
|
||||||
@@ -330,6 +328,13 @@ class Serializer:
|
|||||||
"doc": doc,
|
"doc": doc,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _serialize_proxy_class(
|
||||||
|
cls, obj: ProxyClass, access_path: str = ""
|
||||||
|
) -> SerializedDataService:
|
||||||
|
# Get serialization value from the remote service and adapt the full_access_path
|
||||||
|
return add_prefix_to_full_access_path(obj.serialize(), access_path)
|
||||||
|
|
||||||
|
|
||||||
def dump(obj: Any) -> SerializedObject:
|
def dump(obj: Any) -> SerializedObject:
|
||||||
"""Serialize `obj` to a
|
"""Serialize `obj` to a
|
||||||
@@ -380,7 +385,7 @@ def set_nested_value_by_path(
|
|||||||
current_dict, path_part, allow_append=False
|
current_dict, path_part, allow_append=False
|
||||||
)
|
)
|
||||||
current_dict = cast(
|
current_dict = cast(
|
||||||
dict[Any, SerializedObject],
|
"dict[Any, SerializedObject]",
|
||||||
next_level_serialized_object["value"],
|
next_level_serialized_object["value"],
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -388,7 +393,7 @@ def set_nested_value_by_path(
|
|||||||
current_dict, path_parts[-1], allow_append=True
|
current_dict, path_parts[-1], allow_append=True
|
||||||
)
|
)
|
||||||
except (SerializationPathError, KeyError) as e:
|
except (SerializationPathError, KeyError) as e:
|
||||||
logger.error("Error occured trying to change %a: %s", path, e)
|
logger.exception("Error occured trying to change %a: %s", path, e)
|
||||||
return
|
return
|
||||||
|
|
||||||
if next_level_serialized_object["type"] == "method": # state change of task
|
if next_level_serialized_object["type"] == "method": # state change of task
|
||||||
@@ -421,7 +426,7 @@ def get_nested_dict_by_path(
|
|||||||
current_dict, path_part, allow_append=False
|
current_dict, path_part, allow_append=False
|
||||||
)
|
)
|
||||||
current_dict = cast(
|
current_dict = cast(
|
||||||
dict[Any, SerializedObject],
|
"dict[Any, SerializedObject]",
|
||||||
next_level_serialized_object["value"],
|
next_level_serialized_object["value"],
|
||||||
)
|
)
|
||||||
return get_container_item_by_key(current_dict, path_parts[-1], allow_append=False)
|
return get_container_item_by_key(current_dict, path_parts[-1], allow_append=False)
|
||||||
@@ -451,7 +456,7 @@ def get_or_create_item_in_container(
|
|||||||
return container[key]
|
return container[key]
|
||||||
except IndexError:
|
except IndexError:
|
||||||
if allow_add_key and key == len(container):
|
if allow_add_key and key == len(container):
|
||||||
cast(list[SerializedObject], container).append(
|
cast("list[SerializedObject]", container).append(
|
||||||
create_empty_serialized_object()
|
create_empty_serialized_object()
|
||||||
)
|
)
|
||||||
return container[key]
|
return container[key]
|
||||||
@@ -536,7 +541,7 @@ def get_data_paths_from_serialized_object( # noqa: C901
|
|||||||
|
|
||||||
elif serialized_dict_is_nested_object(serialized_obj):
|
elif serialized_dict_is_nested_object(serialized_obj):
|
||||||
for key, value in cast(
|
for key, value in cast(
|
||||||
dict[str, SerializedObject], serialized_obj["value"]
|
"dict[str, SerializedObject]", serialized_obj["value"]
|
||||||
).items():
|
).items():
|
||||||
# Serialized dictionaries need to have a different new_path than nested
|
# Serialized dictionaries need to have a different new_path than nested
|
||||||
# classes
|
# classes
|
||||||
@@ -580,6 +585,62 @@ def generate_serialized_data_paths(
|
|||||||
return paths
|
return paths
|
||||||
|
|
||||||
|
|
||||||
|
def add_prefix_to_full_access_path(
|
||||||
|
serialized_obj: SerializedObject, prefix: str
|
||||||
|
) -> Any:
|
||||||
|
"""Recursively adds a specified prefix to all full access paths of the serialized
|
||||||
|
object.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
serialized_obj:
|
||||||
|
The serialized object to process.
|
||||||
|
prefix:
|
||||||
|
The prefix string to prepend to each full access path.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The modified serialized object with the prefix added to all full access paths.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```python
|
||||||
|
>>> serialized_obj = {
|
||||||
|
... "full_access_path": "",
|
||||||
|
... "value": {
|
||||||
|
... "item": {
|
||||||
|
... "full_access_path": "some_item_path",
|
||||||
|
... "value": 1.0
|
||||||
|
... }
|
||||||
|
... }
|
||||||
|
... }
|
||||||
|
...
|
||||||
|
... modified_data = add_prefix_to_full_access_path(serialized_obj, 'prefix')
|
||||||
|
{"full_access_path": "prefix", "value": {"item": {"full_access_path":
|
||||||
|
"prefix.some_item_path", "value": 1.0}}}
|
||||||
|
```
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
if serialized_obj.get("full_access_path", None) is not None:
|
||||||
|
serialized_obj["full_access_path"] = (
|
||||||
|
prefix + "." + serialized_obj["full_access_path"]
|
||||||
|
if serialized_obj["full_access_path"] != ""
|
||||||
|
else prefix
|
||||||
|
)
|
||||||
|
|
||||||
|
if isinstance(serialized_obj["value"], list):
|
||||||
|
for value in serialized_obj["value"]:
|
||||||
|
add_prefix_to_full_access_path(cast("SerializedObject", value), prefix)
|
||||||
|
|
||||||
|
elif isinstance(serialized_obj["value"], dict):
|
||||||
|
for value in cast(
|
||||||
|
"dict[str, SerializedObject]", serialized_obj["value"]
|
||||||
|
).values():
|
||||||
|
add_prefix_to_full_access_path(cast("SerializedObject", value), prefix)
|
||||||
|
except (TypeError, KeyError, AttributeError):
|
||||||
|
# passed dictionary is not a serialized object
|
||||||
|
pass
|
||||||
|
return serialized_obj
|
||||||
|
|
||||||
|
|
||||||
def serialized_dict_is_nested_object(serialized_dict: SerializedObject) -> bool:
|
def serialized_dict_is_nested_object(serialized_dict: SerializedObject) -> bool:
|
||||||
value = serialized_dict["value"]
|
value = serialized_dict["value"]
|
||||||
# We are excluding Quantity here as the value corresponding to the "value" key is
|
# We are excluding Quantity here as the value corresponding to the "value" key is
|
||||||
|
|||||||
@@ -98,7 +98,9 @@ class SerializedException(SerializedObjectBase):
|
|||||||
type: Literal["Exception"]
|
type: Literal["Exception"]
|
||||||
|
|
||||||
|
|
||||||
DataServiceTypes = Literal["DataService", "Image", "NumberSlider", "DeviceConnection"]
|
DataServiceTypes = Literal[
|
||||||
|
"DataService", "Image", "NumberSlider", "DeviceConnection", "Task"
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class SerializedDataService(SerializedObjectBase):
|
class SerializedDataService(SerializedObjectBase):
|
||||||
|
|||||||
@@ -2,8 +2,9 @@ import threading
|
|||||||
from collections.abc import Generator
|
from collections.abc import Generator
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
import pydase
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
import pydase
|
||||||
from pydase.client.proxy_loader import ProxyAttributeError
|
from pydase.client.proxy_loader import ProxyAttributeError
|
||||||
|
|
||||||
|
|
||||||
@@ -41,6 +42,9 @@ def pydase_client() -> Generator[pydase.Client, None, Any]:
|
|||||||
def my_method(self, input_str: str) -> str:
|
def my_method(self, input_str: str) -> str:
|
||||||
return input_str
|
return input_str
|
||||||
|
|
||||||
|
async def my_async_method(self, input_str: str) -> str:
|
||||||
|
return input_str
|
||||||
|
|
||||||
server = pydase.Server(MyService(), web_port=9999)
|
server = pydase.Server(MyService(), web_port=9999)
|
||||||
thread = threading.Thread(target=server.run, daemon=True)
|
thread = threading.Thread(target=server.run, daemon=True)
|
||||||
thread.start()
|
thread.start()
|
||||||
@@ -49,6 +53,7 @@ def pydase_client() -> Generator[pydase.Client, None, Any]:
|
|||||||
|
|
||||||
yield client
|
yield client
|
||||||
|
|
||||||
|
client.disconnect()
|
||||||
server.handle_exit()
|
server.handle_exit()
|
||||||
thread.join()
|
thread.join()
|
||||||
|
|
||||||
@@ -79,6 +84,14 @@ def test_method_execution(pydase_client: pydase.Client) -> None:
|
|||||||
pydase_client.proxy.my_method(kwarg="hello")
|
pydase_client.proxy.my_method(kwarg="hello")
|
||||||
|
|
||||||
|
|
||||||
|
def test_async_method_execution(pydase_client: pydase.Client) -> None:
|
||||||
|
assert pydase_client.proxy.my_async_method("My return string") == "My return string"
|
||||||
|
assert (
|
||||||
|
pydase_client.proxy.my_async_method(input_str="My return string")
|
||||||
|
== "My return string"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_nested_service(pydase_client: pydase.Client) -> None:
|
def test_nested_service(pydase_client: pydase.Client) -> None:
|
||||||
assert pydase_client.proxy.sub_service.name == "SubService"
|
assert pydase_client.proxy.sub_service.name == "SubService"
|
||||||
pydase_client.proxy.sub_service.name = "New name"
|
pydase_client.proxy.sub_service.name = "New name"
|
||||||
@@ -138,3 +151,42 @@ def test_tab_completion(pydase_client: pydase.Client) -> None:
|
|||||||
"sub_service",
|
"sub_service",
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_context_manager(pydase_client: pydase.Client) -> None:
|
||||||
|
client = pydase.Client(url="ws://localhost:9999")
|
||||||
|
|
||||||
|
assert client.proxy.connected
|
||||||
|
|
||||||
|
with client:
|
||||||
|
client.proxy.my_property = 1337.01
|
||||||
|
assert client.proxy.my_property == 1337.01
|
||||||
|
|
||||||
|
assert not client.proxy.connected
|
||||||
|
|
||||||
|
|
||||||
|
def test_client_id(
|
||||||
|
pydase_client: pydase.Client, caplog: pytest.LogCaptureFixture
|
||||||
|
) -> None:
|
||||||
|
import socket
|
||||||
|
|
||||||
|
pydase.Client(url="ws://localhost:9999")
|
||||||
|
|
||||||
|
assert f"Client [id={socket.gethostname()}]" in caplog.text
|
||||||
|
caplog.clear()
|
||||||
|
|
||||||
|
pydase.Client(url="ws://localhost:9999", client_id="my_service")
|
||||||
|
assert "Client [id=my_service] connected" in caplog.text
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_value(
|
||||||
|
pydase_client: pydase.Client, caplog: pytest.LogCaptureFixture
|
||||||
|
) -> None:
|
||||||
|
pydase_client.update_value("sub_service.name", "Other name")
|
||||||
|
|
||||||
|
assert pydase_client.get_value("sub_service.name") == "Other name"
|
||||||
|
|
||||||
|
assert (
|
||||||
|
pydase_client.trigger_method("my_async_method", input_str="Hello World")
|
||||||
|
== "Hello World"
|
||||||
|
)
|
||||||
|
|||||||
22
tests/client/test_proxy_class.py
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
import asyncio
|
||||||
|
from unittest.mock import AsyncMock, call, patch
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from pydase import components
|
||||||
|
from pydase.client.proxy_class import ProxyClass
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_serialize_fallback_inside_event_loop() -> None:
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
mock_sio = AsyncMock()
|
||||||
|
proxy = ProxyClass(sio_client=mock_sio, loop=loop, reconnect=lambda: None)
|
||||||
|
|
||||||
|
with patch.object(
|
||||||
|
components.DeviceConnection, "serialize", return_value={"value": {}}
|
||||||
|
) as mock_fallback:
|
||||||
|
result = proxy.serialize()
|
||||||
|
|
||||||
|
mock_fallback.assert_has_calls(calls=[call(), call()])
|
||||||
|
assert isinstance(result, dict)
|
||||||
106
tests/client/test_reconnection.py
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
import threading
|
||||||
|
from collections.abc import Callable, Generator
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import socketio.exceptions
|
||||||
|
|
||||||
|
import pydase
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="function")
|
||||||
|
def pydase_restartable_server() -> Generator[
|
||||||
|
tuple[
|
||||||
|
pydase.Server,
|
||||||
|
threading.Thread,
|
||||||
|
pydase.DataService,
|
||||||
|
Callable[
|
||||||
|
[pydase.Server, threading.Thread, pydase.DataService],
|
||||||
|
tuple[pydase.Server, threading.Thread],
|
||||||
|
],
|
||||||
|
],
|
||||||
|
None,
|
||||||
|
Any,
|
||||||
|
]:
|
||||||
|
class MyService(pydase.DataService):
|
||||||
|
def __init__(self) -> None:
|
||||||
|
super().__init__()
|
||||||
|
self._name = "MyService"
|
||||||
|
self._my_property = 12.1
|
||||||
|
|
||||||
|
@property
|
||||||
|
def my_property(self) -> float:
|
||||||
|
return self._my_property
|
||||||
|
|
||||||
|
@my_property.setter
|
||||||
|
def my_property(self, value: float) -> None:
|
||||||
|
self._my_property = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self) -> str:
|
||||||
|
return self._name
|
||||||
|
|
||||||
|
service_instance = MyService()
|
||||||
|
server = pydase.Server(service_instance, web_port=9999)
|
||||||
|
thread = threading.Thread(target=server.run, daemon=True)
|
||||||
|
thread.start()
|
||||||
|
|
||||||
|
def restart(
|
||||||
|
server: pydase.Server,
|
||||||
|
thread: threading.Thread,
|
||||||
|
service_instance: pydase.DataService,
|
||||||
|
) -> tuple[pydase.Server, threading.Thread]:
|
||||||
|
server.handle_exit()
|
||||||
|
thread.join()
|
||||||
|
|
||||||
|
server = pydase.Server(service_instance, web_port=9999)
|
||||||
|
new_thread = threading.Thread(target=server.run, daemon=True)
|
||||||
|
new_thread.start()
|
||||||
|
|
||||||
|
return server, new_thread
|
||||||
|
|
||||||
|
yield server, thread, service_instance, restart
|
||||||
|
|
||||||
|
|
||||||
|
def test_reconnection(
|
||||||
|
pydase_restartable_server: tuple[
|
||||||
|
pydase.Server,
|
||||||
|
threading.Thread,
|
||||||
|
pydase.DataService,
|
||||||
|
Callable[
|
||||||
|
[pydase.Server, threading.Thread, pydase.DataService],
|
||||||
|
tuple[pydase.Server, threading.Thread],
|
||||||
|
],
|
||||||
|
],
|
||||||
|
) -> None:
|
||||||
|
client = pydase.Client(
|
||||||
|
url="ws://localhost:9999",
|
||||||
|
sio_client_kwargs={
|
||||||
|
"reconnection": False,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
client_2 = pydase.Client(
|
||||||
|
url="ws://localhost:9999",
|
||||||
|
sio_client_kwargs={
|
||||||
|
"reconnection_attempts": 1,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
server, thread, service_instance, restart = pydase_restartable_server
|
||||||
|
service_instance._name = "New service name"
|
||||||
|
|
||||||
|
server, thread = restart(server, thread, service_instance)
|
||||||
|
|
||||||
|
with pytest.raises(socketio.exceptions.BadNamespaceError):
|
||||||
|
client.proxy.name
|
||||||
|
client_2.proxy.name
|
||||||
|
|
||||||
|
client.proxy.reconnect()
|
||||||
|
client_2.proxy.reconnect()
|
||||||
|
|
||||||
|
# the service proxies successfully reconnect and get the new service name
|
||||||
|
assert client.proxy.name == "New service name"
|
||||||
|
assert client_2.proxy.name == "New service name"
|
||||||
|
|
||||||
|
server.handle_exit()
|
||||||
|
thread.join()
|
||||||
@@ -3,10 +3,11 @@ import asyncio
|
|||||||
import pydase
|
import pydase
|
||||||
import pydase.components.device_connection
|
import pydase.components.device_connection
|
||||||
import pytest
|
import pytest
|
||||||
|
from pydase.task.autostart import autostart_service_tasks
|
||||||
from pytest import LogCaptureFixture
|
from pytest import LogCaptureFixture
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio(scope="function")
|
@pytest.mark.asyncio(loop_scope="function")
|
||||||
async def test_reconnection(caplog: LogCaptureFixture) -> None:
|
async def test_reconnection(caplog: LogCaptureFixture) -> None:
|
||||||
class MyService(pydase.components.device_connection.DeviceConnection):
|
class MyService(pydase.components.device_connection.DeviceConnection):
|
||||||
def __init__(
|
def __init__(
|
||||||
@@ -19,10 +20,9 @@ async def test_reconnection(caplog: LogCaptureFixture) -> None:
|
|||||||
self._connected = True
|
self._connected = True
|
||||||
|
|
||||||
service_instance = MyService()
|
service_instance = MyService()
|
||||||
|
autostart_service_tasks(service_instance)
|
||||||
|
|
||||||
assert service_instance._connected is False
|
assert service_instance._connected is False
|
||||||
|
|
||||||
service_instance._task_manager.start_autostart_tasks()
|
|
||||||
|
|
||||||
await asyncio.sleep(0.01)
|
await asyncio.sleep(0.01)
|
||||||
assert service_instance._connected is True
|
assert service_instance._connected is True
|
||||||
|
|||||||
@@ -1,10 +1,17 @@
|
|||||||
|
import sys
|
||||||
|
|
||||||
|
from pytest import LogCaptureFixture
|
||||||
|
|
||||||
import pydase
|
import pydase
|
||||||
import pydase.components
|
import pydase.components
|
||||||
from pydase.data_service.data_service_observer import DataServiceObserver
|
from pydase.data_service.data_service_observer import DataServiceObserver
|
||||||
from pydase.data_service.state_manager import StateManager
|
from pydase.data_service.state_manager import StateManager
|
||||||
from pydase.utils.serialization.serializer import dump
|
from pydase.utils.serialization.serializer import dump
|
||||||
from pytest import LogCaptureFixture
|
|
||||||
|
if sys.version_info < (3, 13):
|
||||||
|
PATHLIB_PATH = "pathlib.Path"
|
||||||
|
else:
|
||||||
|
PATHLIB_PATH = "pathlib._local.Path"
|
||||||
|
|
||||||
|
|
||||||
def test_image_functions(caplog: LogCaptureFixture) -> None:
|
def test_image_functions(caplog: LogCaptureFixture) -> None:
|
||||||
@@ -106,7 +113,7 @@ def test_image_serialization() -> None:
|
|||||||
"signature": {
|
"signature": {
|
||||||
"parameters": {
|
"parameters": {
|
||||||
"path": {
|
"path": {
|
||||||
"annotation": "pathlib.Path | str",
|
"annotation": f"{PATHLIB_PATH} | str",
|
||||||
"default": {},
|
"default": {},
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -1,43 +1,17 @@
|
|||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
import pydase
|
|
||||||
import pydase.units as u
|
|
||||||
import pytest
|
import pytest
|
||||||
from pydase import DataService
|
|
||||||
from pydase.data_service.data_service_observer import DataServiceObserver
|
|
||||||
from pydase.data_service.state_manager import StateManager
|
|
||||||
from pydase.utils.decorators import FunctionDefinitionError, frontend
|
|
||||||
from pytest import LogCaptureFixture
|
from pytest import LogCaptureFixture
|
||||||
|
|
||||||
|
import pydase
|
||||||
def test_unexpected_type_change_warning(caplog: LogCaptureFixture) -> None:
|
import pydase.units as u
|
||||||
class ServiceClass(DataService):
|
from pydase import DataService
|
||||||
attr_1 = 1.0
|
from pydase.utils.decorators import FunctionDefinitionError, frontend
|
||||||
current = 1.0 * u.units.A
|
|
||||||
|
|
||||||
service_instance = ServiceClass()
|
|
||||||
state_manager = StateManager(service_instance)
|
|
||||||
DataServiceObserver(state_manager)
|
|
||||||
service_instance.attr_1 = 2
|
|
||||||
|
|
||||||
assert "'attr_1' changed to '2'" in caplog.text
|
|
||||||
assert (
|
|
||||||
"Type of 'attr_1' changed from 'float' to 'int'. This may have unwanted "
|
|
||||||
"side effects! Consider setting it to 'float' directly." in caplog.text
|
|
||||||
)
|
|
||||||
|
|
||||||
service_instance.current = 2
|
|
||||||
assert "'current' changed to '2'" in caplog.text
|
|
||||||
assert (
|
|
||||||
"Type of 'current' changed from 'Quantity' to 'int'. This may have unwanted "
|
|
||||||
"side effects! Consider setting it to 'Quantity' directly." in caplog.text
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_basic_inheritance_warning(caplog: LogCaptureFixture) -> None:
|
def test_basic_inheritance_warning(caplog: LogCaptureFixture) -> None:
|
||||||
class SubService(DataService):
|
class SubService(DataService): ...
|
||||||
...
|
|
||||||
|
|
||||||
class SomeEnum(Enum):
|
class SomeEnum(Enum):
|
||||||
HI = 0
|
HI = 0
|
||||||
@@ -57,11 +31,9 @@ def test_basic_inheritance_warning(caplog: LogCaptureFixture) -> None:
|
|||||||
def name(self) -> str:
|
def name(self) -> str:
|
||||||
return self._name
|
return self._name
|
||||||
|
|
||||||
def some_method(self) -> None:
|
def some_method(self) -> None: ...
|
||||||
...
|
|
||||||
|
|
||||||
async def some_task(self) -> None:
|
async def some_task(self) -> None: ...
|
||||||
...
|
|
||||||
|
|
||||||
ServiceClass()
|
ServiceClass()
|
||||||
|
|
||||||
@@ -129,17 +101,12 @@ def test_exposing_methods(caplog: LogCaptureFixture) -> None:
|
|||||||
return "some method"
|
return "some method"
|
||||||
|
|
||||||
class ClassWithTask(pydase.DataService):
|
class ClassWithTask(pydase.DataService):
|
||||||
async def some_task(self, sleep_time: int) -> None:
|
@frontend
|
||||||
pass
|
def some_method(self) -> str:
|
||||||
|
return "some method"
|
||||||
|
|
||||||
ClassWithTask()
|
ClassWithTask()
|
||||||
|
|
||||||
assert (
|
|
||||||
"Async function 'some_task' is defined with at least one argument. If you want "
|
|
||||||
"to use it as a task, remove the argument(s) from the function definition."
|
|
||||||
in caplog.text
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_dynamically_added_attribute(caplog: LogCaptureFixture) -> None:
|
def test_dynamically_added_attribute(caplog: LogCaptureFixture) -> None:
|
||||||
class MyService(DataService):
|
class MyService(DataService):
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
import pydase
|
import pydase
|
||||||
import pytest
|
|
||||||
from pydase.data_service.data_service_observer import DataServiceObserver
|
from pydase.data_service.data_service_observer import DataServiceObserver
|
||||||
from pydase.data_service.state_manager import StateManager
|
from pydase.data_service.state_manager import StateManager
|
||||||
|
|
||||||
@@ -33,35 +32,3 @@ def test_nested_attributes_cache_callback() -> None:
|
|||||||
]
|
]
|
||||||
== "Ciao"
|
== "Ciao"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio(scope="function")
|
|
||||||
async def test_task_status_update() -> None:
|
|
||||||
class ServiceClass(pydase.DataService):
|
|
||||||
name = "World"
|
|
||||||
|
|
||||||
async def my_method(self) -> None:
|
|
||||||
pass
|
|
||||||
|
|
||||||
service_instance = ServiceClass()
|
|
||||||
state_manager = StateManager(service_instance)
|
|
||||||
DataServiceObserver(state_manager)
|
|
||||||
|
|
||||||
assert (
|
|
||||||
state_manager.cache_manager.get_value_dict_from_cache("my_method")["type"]
|
|
||||||
== "method"
|
|
||||||
)
|
|
||||||
assert (
|
|
||||||
state_manager.cache_manager.get_value_dict_from_cache("my_method")["value"]
|
|
||||||
is None
|
|
||||||
)
|
|
||||||
|
|
||||||
service_instance.start_my_method() # type: ignore
|
|
||||||
assert (
|
|
||||||
state_manager.cache_manager.get_value_dict_from_cache("my_method")["type"]
|
|
||||||
== "method"
|
|
||||||
)
|
|
||||||
assert (
|
|
||||||
state_manager.cache_manager.get_value_dict_from_cache("my_method")["value"]
|
|
||||||
== "RUNNING"
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -1,11 +1,12 @@
|
|||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
import pydase
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
import pydase
|
||||||
from pydase.data_service.data_service_observer import DataServiceObserver
|
from pydase.data_service.data_service_observer import DataServiceObserver
|
||||||
from pydase.data_service.state_manager import StateManager
|
from pydase.data_service.state_manager import StateManager
|
||||||
from pydase.utils.serialization.serializer import SerializationError
|
from pydase.utils.serialization.serializer import SerializationError, dump
|
||||||
|
|
||||||
logger = logging.getLogger()
|
logger = logging.getLogger()
|
||||||
|
|
||||||
@@ -146,3 +147,137 @@ def test_private_attribute_does_not_have_to_be_serializable() -> None:
|
|||||||
service_instance.change_publ_attr()
|
service_instance.change_publ_attr()
|
||||||
|
|
||||||
service_instance.change_priv_attr()
|
service_instance.change_priv_attr()
|
||||||
|
|
||||||
|
|
||||||
|
def test_normalized_attr_path_in_dependent_property_changes(
|
||||||
|
caplog: pytest.LogCaptureFixture,
|
||||||
|
) -> None:
|
||||||
|
class SubService(pydase.DataService):
|
||||||
|
_prop = 10.0
|
||||||
|
|
||||||
|
@property
|
||||||
|
def prop(self) -> float:
|
||||||
|
return self._prop
|
||||||
|
|
||||||
|
class MyService(pydase.DataService):
|
||||||
|
def __init__(self) -> None:
|
||||||
|
super().__init__()
|
||||||
|
self.service_dict = {"one": SubService()}
|
||||||
|
|
||||||
|
service_instance = MyService()
|
||||||
|
state_manager = StateManager(service=service_instance)
|
||||||
|
observer = DataServiceObserver(state_manager=state_manager)
|
||||||
|
|
||||||
|
assert observer.property_deps_dict['service_dict["one"]._prop'] == [
|
||||||
|
'service_dict["one"].prop'
|
||||||
|
]
|
||||||
|
|
||||||
|
# We can use dict key path encoded with double quotes
|
||||||
|
state_manager.set_service_attribute_value_by_path(
|
||||||
|
'service_dict["one"]._prop', dump(11.0)
|
||||||
|
)
|
||||||
|
assert service_instance.service_dict["one"].prop == 11.0
|
||||||
|
assert "'service_dict[\"one\"].prop' changed to '11.0'" in caplog.text
|
||||||
|
|
||||||
|
# We can use dict key path encoded with single quotes
|
||||||
|
state_manager.set_service_attribute_value_by_path(
|
||||||
|
"service_dict['one']._prop", dump(12.0)
|
||||||
|
)
|
||||||
|
assert service_instance.service_dict["one"].prop == 12.0
|
||||||
|
assert "'service_dict[\"one\"].prop' changed to '12.0'" in caplog.text
|
||||||
|
|
||||||
|
|
||||||
|
def test_nested_dict_property_changes(
|
||||||
|
caplog: pytest.LogCaptureFixture,
|
||||||
|
) -> None:
|
||||||
|
def get_voltage() -> float:
|
||||||
|
"""Mocking a remote device."""
|
||||||
|
return 2.0
|
||||||
|
|
||||||
|
def set_voltage(value: float) -> None:
|
||||||
|
"""Mocking a remote device."""
|
||||||
|
|
||||||
|
class OtherService(pydase.DataService):
|
||||||
|
_voltage = 1.0
|
||||||
|
|
||||||
|
@property
|
||||||
|
def voltage(self) -> float:
|
||||||
|
# Property dependency _voltage changes within the property itself.
|
||||||
|
# This should be handled gracefully, i.e. not introduce recursion
|
||||||
|
self._voltage = get_voltage()
|
||||||
|
return self._voltage
|
||||||
|
|
||||||
|
@voltage.setter
|
||||||
|
def voltage(self, value: float) -> None:
|
||||||
|
self._voltage = value
|
||||||
|
set_voltage(self._voltage)
|
||||||
|
|
||||||
|
class MyService(pydase.DataService):
|
||||||
|
def __init__(self) -> None:
|
||||||
|
super().__init__()
|
||||||
|
self.my_dict = {"key": OtherService()}
|
||||||
|
|
||||||
|
service = MyService()
|
||||||
|
pydase.Server(service)
|
||||||
|
|
||||||
|
# Changing the _voltage attribute should re-evaluate the voltage property, but avoid
|
||||||
|
# recursion
|
||||||
|
service.my_dict["key"].voltage = 1.2
|
||||||
|
|
||||||
|
|
||||||
|
def test_read_only_dict_property(caplog: pytest.LogCaptureFixture) -> None:
|
||||||
|
class MyObservable(pydase.DataService):
|
||||||
|
def __init__(self) -> None:
|
||||||
|
super().__init__()
|
||||||
|
self._dict_attr = {"dotted.key": 1.0}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def dict_attr(self) -> dict[str, Any]:
|
||||||
|
return self._dict_attr
|
||||||
|
|
||||||
|
service_instance = MyObservable()
|
||||||
|
state_manager = StateManager(service=service_instance)
|
||||||
|
DataServiceObserver(state_manager)
|
||||||
|
|
||||||
|
service_instance._dict_attr["dotted.key"] = 2.0
|
||||||
|
|
||||||
|
assert "'dict_attr[\"dotted.key\"]' changed to '2.0'" in caplog.text
|
||||||
|
|
||||||
|
|
||||||
|
def test_dependency_as_function_argument(caplog: pytest.LogCaptureFixture) -> None:
|
||||||
|
class MyObservable(pydase.DataService):
|
||||||
|
some_int = 0
|
||||||
|
|
||||||
|
@property
|
||||||
|
def other_int(self) -> int:
|
||||||
|
return self.add_one(self.some_int)
|
||||||
|
|
||||||
|
def add_one(self, value: int) -> int:
|
||||||
|
return value + 1
|
||||||
|
|
||||||
|
service_instance = MyObservable()
|
||||||
|
state_manager = StateManager(service=service_instance)
|
||||||
|
DataServiceObserver(state_manager)
|
||||||
|
|
||||||
|
service_instance.some_int = 1337
|
||||||
|
|
||||||
|
assert "'other_int' changed to '1338'" in caplog.text
|
||||||
|
|
||||||
|
|
||||||
|
def test_property_starting_with_dependency_name(
|
||||||
|
caplog: pytest.LogCaptureFixture,
|
||||||
|
) -> None:
|
||||||
|
class MyObservable(pydase.DataService):
|
||||||
|
my_int = 0
|
||||||
|
|
||||||
|
@property
|
||||||
|
def my_int_2(self) -> int:
|
||||||
|
return self.my_int + 1
|
||||||
|
|
||||||
|
service_instance = MyObservable()
|
||||||
|
state_manager = StateManager(service=service_instance)
|
||||||
|
DataServiceObserver(state_manager)
|
||||||
|
|
||||||
|
service_instance.my_int = 1337
|
||||||
|
|
||||||
|
assert "'my_int_2' changed to '1338'" in caplog.text
|
||||||
|
|||||||
@@ -1,10 +1,13 @@
|
|||||||
|
import asyncio
|
||||||
import json
|
import json
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
|
import anyio
|
||||||
import pydase
|
import pydase
|
||||||
import pydase.components
|
import pydase.components
|
||||||
import pydase.units as u
|
import pydase.units as u
|
||||||
|
import pytest
|
||||||
from pydase.data_service.data_service_observer import DataServiceObserver
|
from pydase.data_service.data_service_observer import DataServiceObserver
|
||||||
from pydase.data_service.state_manager import (
|
from pydase.data_service.state_manager import (
|
||||||
StateManager,
|
StateManager,
|
||||||
@@ -349,4 +352,24 @@ def test_property_load_state(tmp_path: Path) -> None:
|
|||||||
|
|
||||||
assert service_instance.name == "Some other name"
|
assert service_instance.name == "Some other name"
|
||||||
assert service_instance.not_loadable_attr == "Not loadable"
|
assert service_instance.not_loadable_attr == "Not loadable"
|
||||||
assert not has_load_state_decorator(type(service_instance).property_without_setter)
|
assert not has_load_state_decorator(type(service_instance).property_without_setter) # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio()
|
||||||
|
async def test_autosave(tmp_path: Path, caplog: LogCaptureFixture) -> None:
|
||||||
|
filename = tmp_path / "state.json"
|
||||||
|
|
||||||
|
service = Service()
|
||||||
|
manager = StateManager(service=service, filename=filename, autosave_interval=0.1)
|
||||||
|
DataServiceObserver(state_manager=manager)
|
||||||
|
|
||||||
|
task = asyncio.create_task(manager.autosave())
|
||||||
|
service.property_attr = 198.0
|
||||||
|
await asyncio.sleep(0.1)
|
||||||
|
task.cancel()
|
||||||
|
|
||||||
|
assert filename.exists(), "Autosave should write to the file"
|
||||||
|
async with await anyio.open_file(filename) as f:
|
||||||
|
data = json.loads(await f.read())
|
||||||
|
|
||||||
|
assert data["property_attr"]["value"] == service.property_attr
|
||||||
|
|||||||
@@ -1,135 +0,0 @@
|
|||||||
import asyncio
|
|
||||||
import logging
|
|
||||||
|
|
||||||
import pydase
|
|
||||||
import pytest
|
|
||||||
from pydase.data_service.data_service_observer import DataServiceObserver
|
|
||||||
from pydase.data_service.state_manager import StateManager
|
|
||||||
from pytest import LogCaptureFixture
|
|
||||||
|
|
||||||
logger = logging.getLogger("pydase")
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio(scope="function")
|
|
||||||
async def test_autostart_task_callback(caplog: LogCaptureFixture) -> None:
|
|
||||||
class MyService(pydase.DataService):
|
|
||||||
def __init__(self) -> None:
|
|
||||||
super().__init__()
|
|
||||||
self._autostart_tasks = { # type: ignore
|
|
||||||
"my_task": (), # type: ignore
|
|
||||||
"my_other_task": (), # type: ignore
|
|
||||||
}
|
|
||||||
|
|
||||||
async def my_task(self) -> None:
|
|
||||||
logger.info("Triggered task.")
|
|
||||||
|
|
||||||
async def my_other_task(self) -> None:
|
|
||||||
logger.info("Triggered other task.")
|
|
||||||
|
|
||||||
# Your test code here
|
|
||||||
service_instance = MyService()
|
|
||||||
state_manager = StateManager(service_instance)
|
|
||||||
DataServiceObserver(state_manager)
|
|
||||||
service_instance._task_manager.start_autostart_tasks()
|
|
||||||
|
|
||||||
assert "'my_task' changed to 'TaskStatus.RUNNING'" in caplog.text
|
|
||||||
assert "'my_other_task' changed to 'TaskStatus.RUNNING'" in caplog.text
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio(scope="function")
|
|
||||||
async def test_DataService_subclass_autostart_task_callback(
|
|
||||||
caplog: LogCaptureFixture,
|
|
||||||
) -> None:
|
|
||||||
class MySubService(pydase.DataService):
|
|
||||||
def __init__(self) -> None:
|
|
||||||
super().__init__()
|
|
||||||
self._autostart_tasks = { # type: ignore
|
|
||||||
"my_task": (),
|
|
||||||
"my_other_task": (),
|
|
||||||
}
|
|
||||||
|
|
||||||
async def my_task(self) -> None:
|
|
||||||
logger.info("Triggered task.")
|
|
||||||
|
|
||||||
async def my_other_task(self) -> None:
|
|
||||||
logger.info("Triggered other task.")
|
|
||||||
|
|
||||||
class MyService(pydase.DataService):
|
|
||||||
sub_service = MySubService()
|
|
||||||
|
|
||||||
service_instance = MyService()
|
|
||||||
state_manager = StateManager(service_instance)
|
|
||||||
DataServiceObserver(state_manager)
|
|
||||||
service_instance._task_manager.start_autostart_tasks()
|
|
||||||
|
|
||||||
assert "'sub_service.my_task' changed to 'TaskStatus.RUNNING'" in caplog.text
|
|
||||||
assert "'sub_service.my_other_task' changed to 'TaskStatus.RUNNING'" in caplog.text
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio(scope="function")
|
|
||||||
async def test_DataService_subclass_list_autostart_task_callback(
|
|
||||||
caplog: LogCaptureFixture,
|
|
||||||
) -> None:
|
|
||||||
class MySubService(pydase.DataService):
|
|
||||||
def __init__(self) -> None:
|
|
||||||
super().__init__()
|
|
||||||
self._autostart_tasks = { # type: ignore
|
|
||||||
"my_task": (),
|
|
||||||
"my_other_task": (),
|
|
||||||
}
|
|
||||||
|
|
||||||
async def my_task(self) -> None:
|
|
||||||
logger.info("Triggered task.")
|
|
||||||
|
|
||||||
async def my_other_task(self) -> None:
|
|
||||||
logger.info("Triggered other task.")
|
|
||||||
|
|
||||||
class MyService(pydase.DataService):
|
|
||||||
sub_services_list = [MySubService() for i in range(2)]
|
|
||||||
|
|
||||||
service_instance = MyService()
|
|
||||||
state_manager = StateManager(service_instance)
|
|
||||||
DataServiceObserver(state_manager)
|
|
||||||
service_instance._task_manager.start_autostart_tasks()
|
|
||||||
|
|
||||||
assert (
|
|
||||||
"'sub_services_list[0].my_task' changed to 'TaskStatus.RUNNING'" in caplog.text
|
|
||||||
)
|
|
||||||
assert (
|
|
||||||
"'sub_services_list[0].my_other_task' changed to 'TaskStatus.RUNNING'"
|
|
||||||
in caplog.text
|
|
||||||
)
|
|
||||||
assert (
|
|
||||||
"'sub_services_list[1].my_task' changed to 'TaskStatus.RUNNING'" in caplog.text
|
|
||||||
)
|
|
||||||
assert (
|
|
||||||
"'sub_services_list[1].my_other_task' changed to 'TaskStatus.RUNNING'"
|
|
||||||
in caplog.text
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio(scope="function")
|
|
||||||
async def test_start_and_stop_task_methods(caplog: LogCaptureFixture) -> None:
|
|
||||||
class MyService(pydase.DataService):
|
|
||||||
def __init__(self) -> None:
|
|
||||||
super().__init__()
|
|
||||||
|
|
||||||
async def my_task(self) -> None:
|
|
||||||
while True:
|
|
||||||
logger.debug("Logging message")
|
|
||||||
await asyncio.sleep(0.1)
|
|
||||||
|
|
||||||
# Your test code here
|
|
||||||
service_instance = MyService()
|
|
||||||
state_manager = StateManager(service_instance)
|
|
||||||
DataServiceObserver(state_manager)
|
|
||||||
service_instance.start_my_task()
|
|
||||||
await asyncio.sleep(0.01)
|
|
||||||
|
|
||||||
assert "'my_task' changed to 'TaskStatus.RUNNING'" in caplog.text
|
|
||||||
assert "Logging message" in caplog.text
|
|
||||||
caplog.clear()
|
|
||||||
|
|
||||||
service_instance.stop_my_task()
|
|
||||||
await asyncio.sleep(0.01)
|
|
||||||
assert "Task 'my_task' was cancelled" in caplog.text
|
|
||||||
@@ -1,8 +1,9 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
import pydase
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
import pydase
|
||||||
from pydase.observer_pattern.observable.decorators import validate_set
|
from pydase.observer_pattern.observable.decorators import validate_set
|
||||||
|
|
||||||
|
|
||||||
@@ -17,7 +18,10 @@ def linspace(start: float, stop: float, n: int):
|
|||||||
|
|
||||||
def asyncio_loop_thread(loop: asyncio.AbstractEventLoop) -> None:
|
def asyncio_loop_thread(loop: asyncio.AbstractEventLoop) -> None:
|
||||||
asyncio.set_event_loop(loop)
|
asyncio.set_event_loop(loop)
|
||||||
loop.run_forever()
|
try:
|
||||||
|
loop.run_forever()
|
||||||
|
finally:
|
||||||
|
loop.close()
|
||||||
|
|
||||||
|
|
||||||
def test_validate_set_precision(caplog: pytest.LogCaptureFixture) -> None:
|
def test_validate_set_precision(caplog: pytest.LogCaptureFixture) -> None:
|
||||||
@@ -89,10 +93,10 @@ def test_validate_set_timeout(caplog: pytest.LogCaptureFixture) -> None:
|
|||||||
def value(self, value: float) -> None:
|
def value(self, value: float) -> None:
|
||||||
self.loop.create_task(self.set_value(value))
|
self.loop.create_task(self.set_value(value))
|
||||||
|
|
||||||
async def set_value(self, value) -> None:
|
async def set_value(self, value: float) -> None:
|
||||||
for i in linspace(self._value, value, 10):
|
for i in linspace(self._value, value, 10):
|
||||||
self._value = i
|
self._value = i
|
||||||
await asyncio.sleep(0.1)
|
await asyncio.sleep(0.01)
|
||||||
|
|
||||||
class Service(pydase.DataService):
|
class Service(pydase.DataService):
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
@@ -104,7 +108,7 @@ def test_validate_set_timeout(caplog: pytest.LogCaptureFixture) -> None:
|
|||||||
return self._driver.value
|
return self._driver.value
|
||||||
|
|
||||||
@value_1.setter
|
@value_1.setter
|
||||||
@validate_set(timeout=0.5)
|
@validate_set(timeout=0.01)
|
||||||
def value_1(self, value: float) -> None:
|
def value_1(self, value: float) -> None:
|
||||||
self._driver.value = value
|
self._driver.value = value
|
||||||
|
|
||||||
@@ -113,7 +117,7 @@ def test_validate_set_timeout(caplog: pytest.LogCaptureFixture) -> None:
|
|||||||
return self._driver.value
|
return self._driver.value
|
||||||
|
|
||||||
@value_2.setter
|
@value_2.setter
|
||||||
@validate_set(timeout=1)
|
@validate_set(timeout=0.11)
|
||||||
def value_2(self, value: float) -> None:
|
def value_2(self, value: float) -> None:
|
||||||
self._driver.value = value
|
self._driver.value = value
|
||||||
|
|
||||||
|
|||||||
@@ -4,11 +4,13 @@ from collections.abc import Generator
|
|||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
import pydase
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
import pydase
|
||||||
|
from pydase.utils.serialization.deserializer import Deserializer
|
||||||
|
|
||||||
@pytest.fixture()
|
|
||||||
|
@pytest.fixture(scope="module")
|
||||||
def pydase_server() -> Generator[None, None, None]:
|
def pydase_server() -> Generator[None, None, None]:
|
||||||
class SubService(pydase.DataService):
|
class SubService(pydase.DataService):
|
||||||
name = "SubService"
|
name = "SubService"
|
||||||
@@ -40,7 +42,10 @@ def pydase_server() -> Generator[None, None, None]:
|
|||||||
return self._readonly_attr
|
return self._readonly_attr
|
||||||
|
|
||||||
def my_method(self, input_str: str) -> str:
|
def my_method(self, input_str: str) -> str:
|
||||||
return input_str
|
return f"{input_str}: my_method"
|
||||||
|
|
||||||
|
async def my_async_method(self, input_str: str) -> str:
|
||||||
|
return f"{input_str}: my_async_method"
|
||||||
|
|
||||||
server = pydase.Server(MyService(), web_port=9998)
|
server = pydase.Server(MyService(), web_port=9998)
|
||||||
thread = threading.Thread(target=server.run, daemon=True)
|
thread = threading.Thread(target=server.run, daemon=True)
|
||||||
@@ -48,6 +53,9 @@ def pydase_server() -> Generator[None, None, None]:
|
|||||||
|
|
||||||
yield
|
yield
|
||||||
|
|
||||||
|
server.handle_exit()
|
||||||
|
thread.join()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"access_path, expected",
|
"access_path, expected",
|
||||||
@@ -103,7 +111,7 @@ def pydase_server() -> Generator[None, None, None]:
|
|||||||
),
|
),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@pytest.mark.asyncio()
|
@pytest.mark.asyncio(loop_scope="module")
|
||||||
async def test_get_value(
|
async def test_get_value(
|
||||||
access_path: str,
|
access_path: str,
|
||||||
expected: dict[str, Any],
|
expected: dict[str, Any],
|
||||||
@@ -175,12 +183,13 @@ async def test_get_value(
|
|||||||
),
|
),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@pytest.mark.asyncio()
|
@pytest.mark.asyncio(loop_scope="module")
|
||||||
async def test_update_value(
|
async def test_update_value(
|
||||||
access_path: str,
|
access_path: str,
|
||||||
new_value: dict[str, Any],
|
new_value: dict[str, Any],
|
||||||
ok: bool,
|
ok: bool,
|
||||||
pydase_server: pydase.DataService,
|
pydase_server: pydase.DataService,
|
||||||
|
caplog: pytest.LogCaptureFixture,
|
||||||
) -> None:
|
) -> None:
|
||||||
async with aiohttp.ClientSession("http://localhost:9998") as session:
|
async with aiohttp.ClientSession("http://localhost:9998") as session:
|
||||||
resp = await session.put(
|
resp = await session.put(
|
||||||
@@ -192,3 +201,97 @@ async def test_update_value(
|
|||||||
resp = await session.get(f"/api/v1/get_value?access_path={access_path}")
|
resp = await session.get(f"/api/v1/get_value?access_path={access_path}")
|
||||||
content = json.loads(await resp.text())
|
content = json.loads(await resp.text())
|
||||||
assert content == new_value
|
assert content == new_value
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"access_path, expected, ok",
|
||||||
|
[
|
||||||
|
(
|
||||||
|
"my_method",
|
||||||
|
"Hello from function: my_method",
|
||||||
|
True,
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"my_async_method",
|
||||||
|
"Hello from function: my_async_method",
|
||||||
|
True,
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"invalid_method",
|
||||||
|
None,
|
||||||
|
False,
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
@pytest.mark.asyncio(loop_scope="module")
|
||||||
|
async def test_trigger_method(
|
||||||
|
access_path: str,
|
||||||
|
expected: Any,
|
||||||
|
ok: bool,
|
||||||
|
pydase_server: pydase.DataService,
|
||||||
|
) -> None:
|
||||||
|
async with aiohttp.ClientSession("http://localhost:9998") as session:
|
||||||
|
resp = await session.put(
|
||||||
|
"/api/v1/trigger_method",
|
||||||
|
json={
|
||||||
|
"access_path": access_path,
|
||||||
|
"kwargs": {
|
||||||
|
"full_access_path": "",
|
||||||
|
"type": "dict",
|
||||||
|
"value": {
|
||||||
|
"input_str": {
|
||||||
|
"docs": None,
|
||||||
|
"full_access_path": "",
|
||||||
|
"readonly": False,
|
||||||
|
"type": "str",
|
||||||
|
"value": "Hello from function",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
assert resp.ok == ok
|
||||||
|
|
||||||
|
if resp.ok:
|
||||||
|
content = Deserializer.deserialize(json.loads(await resp.text()))
|
||||||
|
assert content == expected
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"headers, log_id",
|
||||||
|
[
|
||||||
|
({}, "id=None"),
|
||||||
|
(
|
||||||
|
{
|
||||||
|
"X-Client-Id": "client-header",
|
||||||
|
},
|
||||||
|
"id=client-header",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{
|
||||||
|
"Remote-User": "Remote User",
|
||||||
|
},
|
||||||
|
"user=Remote User",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{
|
||||||
|
"X-Client-Id": "client-header",
|
||||||
|
"Remote-User": "Remote User",
|
||||||
|
},
|
||||||
|
"user=Remote User",
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
@pytest.mark.asyncio(loop_scope="module")
|
||||||
|
async def test_client_information_logging(
|
||||||
|
headers: dict[str, str],
|
||||||
|
log_id: str,
|
||||||
|
pydase_server: pydase.DataService,
|
||||||
|
caplog: pytest.LogCaptureFixture,
|
||||||
|
) -> None:
|
||||||
|
async with aiohttp.ClientSession("http://localhost:9998") as session:
|
||||||
|
await session.get(
|
||||||
|
"/api/v1/get_value?access_path=readonly_attr", headers=headers
|
||||||
|
)
|
||||||
|
|
||||||
|
assert log_id in caplog.text
|
||||||
|
|||||||
316
tests/server/web_server/test_sio_setup.py
Normal file
@@ -0,0 +1,316 @@
|
|||||||
|
import threading
|
||||||
|
from collections.abc import Generator
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import socketio
|
||||||
|
|
||||||
|
import pydase
|
||||||
|
from pydase.utils.serialization.deserializer import Deserializer
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="module")
|
||||||
|
def pydase_server() -> Generator[None, None, None]:
|
||||||
|
class SubService(pydase.DataService):
|
||||||
|
name = "SubService"
|
||||||
|
|
||||||
|
subservice_instance = SubService()
|
||||||
|
|
||||||
|
class MyService(pydase.DataService):
|
||||||
|
def __init__(self) -> None:
|
||||||
|
super().__init__()
|
||||||
|
self._readonly_attr = "MyService"
|
||||||
|
self._my_property = 12.1
|
||||||
|
self.sub_service = SubService()
|
||||||
|
self.list_attr = [1, 2]
|
||||||
|
self.dict_attr = {
|
||||||
|
"foo": subservice_instance,
|
||||||
|
"dotted.key": subservice_instance,
|
||||||
|
}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def my_property(self) -> float:
|
||||||
|
return self._my_property
|
||||||
|
|
||||||
|
@my_property.setter
|
||||||
|
def my_property(self, value: float) -> None:
|
||||||
|
self._my_property = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def readonly_attr(self) -> str:
|
||||||
|
return self._readonly_attr
|
||||||
|
|
||||||
|
def my_method(self, input_str: str) -> str:
|
||||||
|
return f"{input_str}: my_method"
|
||||||
|
|
||||||
|
async def my_async_method(self, input_str: str) -> str:
|
||||||
|
return f"{input_str}: my_async_method"
|
||||||
|
|
||||||
|
server = pydase.Server(MyService(), web_port=9997)
|
||||||
|
thread = threading.Thread(target=server.run, daemon=True)
|
||||||
|
thread.start()
|
||||||
|
|
||||||
|
yield
|
||||||
|
|
||||||
|
server.handle_exit()
|
||||||
|
thread.join()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"access_path, expected",
|
||||||
|
[
|
||||||
|
(
|
||||||
|
"readonly_attr",
|
||||||
|
{
|
||||||
|
"full_access_path": "readonly_attr",
|
||||||
|
"doc": None,
|
||||||
|
"readonly": False,
|
||||||
|
"type": "str",
|
||||||
|
"value": "MyService",
|
||||||
|
},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"sub_service.name",
|
||||||
|
{
|
||||||
|
"full_access_path": "sub_service.name",
|
||||||
|
"doc": None,
|
||||||
|
"readonly": False,
|
||||||
|
"type": "str",
|
||||||
|
"value": "SubService",
|
||||||
|
},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"list_attr[0]",
|
||||||
|
{
|
||||||
|
"full_access_path": "list_attr[0]",
|
||||||
|
"doc": None,
|
||||||
|
"readonly": False,
|
||||||
|
"type": "int",
|
||||||
|
"value": 1,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'dict_attr["foo"]',
|
||||||
|
{
|
||||||
|
"full_access_path": 'dict_attr["foo"]',
|
||||||
|
"doc": None,
|
||||||
|
"name": "SubService",
|
||||||
|
"readonly": False,
|
||||||
|
"type": "DataService",
|
||||||
|
"value": {
|
||||||
|
"name": {
|
||||||
|
"doc": None,
|
||||||
|
"full_access_path": 'dict_attr["foo"].name',
|
||||||
|
"readonly": False,
|
||||||
|
"type": "str",
|
||||||
|
"value": "SubService",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
@pytest.mark.asyncio(loop_scope="module")
|
||||||
|
async def test_get_value(
|
||||||
|
access_path: str,
|
||||||
|
expected: dict[str, Any],
|
||||||
|
pydase_server: None,
|
||||||
|
) -> None:
|
||||||
|
client = socketio.AsyncClient()
|
||||||
|
await client.connect(
|
||||||
|
"http://localhost:9997", socketio_path="/ws/socket.io", transports=["websocket"]
|
||||||
|
)
|
||||||
|
response = await client.call("get_value", access_path)
|
||||||
|
assert response == expected
|
||||||
|
await client.disconnect()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"access_path, new_value, ok",
|
||||||
|
[
|
||||||
|
(
|
||||||
|
"sub_service.name",
|
||||||
|
{
|
||||||
|
"full_access_path": "sub_service.name",
|
||||||
|
"doc": None,
|
||||||
|
"readonly": False,
|
||||||
|
"type": "str",
|
||||||
|
"value": "New Name",
|
||||||
|
},
|
||||||
|
True,
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"list_attr[0]",
|
||||||
|
{
|
||||||
|
"full_access_path": "list_attr[0]",
|
||||||
|
"doc": None,
|
||||||
|
"readonly": False,
|
||||||
|
"type": "int",
|
||||||
|
"value": 11,
|
||||||
|
},
|
||||||
|
True,
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'dict_attr["foo"].name',
|
||||||
|
{
|
||||||
|
"full_access_path": 'dict_attr["foo"].name',
|
||||||
|
"doc": None,
|
||||||
|
"readonly": False,
|
||||||
|
"type": "str",
|
||||||
|
"value": "foo name",
|
||||||
|
},
|
||||||
|
True,
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"readonly_attr",
|
||||||
|
{
|
||||||
|
"full_access_path": "readonly_attr",
|
||||||
|
"doc": None,
|
||||||
|
"readonly": True,
|
||||||
|
"type": "str",
|
||||||
|
"value": "Other Name",
|
||||||
|
},
|
||||||
|
False,
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"invalid_attribute",
|
||||||
|
{
|
||||||
|
"full_access_path": "invalid_attribute",
|
||||||
|
"doc": None,
|
||||||
|
"readonly": False,
|
||||||
|
"type": "float",
|
||||||
|
"value": 12.0,
|
||||||
|
},
|
||||||
|
False,
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
@pytest.mark.asyncio(loop_scope="module")
|
||||||
|
async def test_update_value(
|
||||||
|
access_path: str,
|
||||||
|
new_value: dict[str, Any],
|
||||||
|
ok: bool,
|
||||||
|
pydase_server: None,
|
||||||
|
caplog: pytest.LogCaptureFixture,
|
||||||
|
) -> None:
|
||||||
|
client = socketio.AsyncClient()
|
||||||
|
await client.connect(
|
||||||
|
"http://localhost:9997", socketio_path="/ws/socket.io", transports=["websocket"]
|
||||||
|
)
|
||||||
|
response = await client.call(
|
||||||
|
"update_value",
|
||||||
|
{"access_path": access_path, "value": new_value},
|
||||||
|
)
|
||||||
|
|
||||||
|
if ok:
|
||||||
|
assert response is None
|
||||||
|
else:
|
||||||
|
assert response["type"] == "Exception"
|
||||||
|
|
||||||
|
await client.disconnect()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"access_path, expected, ok",
|
||||||
|
[
|
||||||
|
(
|
||||||
|
"my_method",
|
||||||
|
"Hello from function: my_method",
|
||||||
|
True,
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"my_async_method",
|
||||||
|
"Hello from function: my_async_method",
|
||||||
|
True,
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"invalid_method",
|
||||||
|
None,
|
||||||
|
False,
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
@pytest.mark.asyncio(loop_scope="module")
|
||||||
|
async def test_trigger_method(
|
||||||
|
access_path: str,
|
||||||
|
expected: Any,
|
||||||
|
ok: bool,
|
||||||
|
pydase_server: pydase.DataService,
|
||||||
|
) -> None:
|
||||||
|
client = socketio.AsyncClient()
|
||||||
|
await client.connect(
|
||||||
|
"http://localhost:9997", socketio_path="/ws/socket.io", transports=["websocket"]
|
||||||
|
)
|
||||||
|
response = await client.call(
|
||||||
|
"trigger_method",
|
||||||
|
{
|
||||||
|
"access_path": access_path,
|
||||||
|
"kwargs": {
|
||||||
|
"full_access_path": "",
|
||||||
|
"type": "dict",
|
||||||
|
"value": {
|
||||||
|
"input_str": {
|
||||||
|
"docs": None,
|
||||||
|
"full_access_path": "",
|
||||||
|
"readonly": False,
|
||||||
|
"type": "str",
|
||||||
|
"value": "Hello from function",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
if ok:
|
||||||
|
content = Deserializer.deserialize(response)
|
||||||
|
assert content == expected
|
||||||
|
else:
|
||||||
|
assert response["type"] == "Exception"
|
||||||
|
|
||||||
|
await client.disconnect()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"headers, log_id",
|
||||||
|
[
|
||||||
|
({}, "sid="),
|
||||||
|
(
|
||||||
|
{
|
||||||
|
"X-Client-Id": "client-header",
|
||||||
|
},
|
||||||
|
"id=client-header",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{
|
||||||
|
"Remote-User": "Remote User",
|
||||||
|
},
|
||||||
|
"user=Remote User",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{
|
||||||
|
"X-Client-Id": "client-header",
|
||||||
|
"Remote-User": "Remote User",
|
||||||
|
},
|
||||||
|
"user=Remote User",
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
@pytest.mark.asyncio(loop_scope="module")
|
||||||
|
async def test_client_information_logging(
|
||||||
|
headers: dict[str, str],
|
||||||
|
log_id: str,
|
||||||
|
pydase_server: pydase.DataService,
|
||||||
|
caplog: pytest.LogCaptureFixture,
|
||||||
|
) -> None:
|
||||||
|
client = socketio.AsyncClient()
|
||||||
|
await client.connect(
|
||||||
|
"http://localhost:9997",
|
||||||
|
socketio_path="/ws/socket.io",
|
||||||
|
transports=["websocket"],
|
||||||
|
headers=headers,
|
||||||
|
)
|
||||||
|
await client.call("get_value", "readonly_attr")
|
||||||
|
|
||||||
|
assert log_id in caplog.text
|
||||||
|
|
||||||
|
await client.disconnect()
|
||||||
480
tests/task/test_task.py
Normal file
@@ -0,0 +1,480 @@
|
|||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from pytest import LogCaptureFixture
|
||||||
|
|
||||||
|
import pydase
|
||||||
|
from pydase.data_service.data_service_observer import DataServiceObserver
|
||||||
|
from pydase.data_service.state_manager import StateManager
|
||||||
|
from pydase.task.autostart import autostart_service_tasks
|
||||||
|
from pydase.task.decorator import task
|
||||||
|
from pydase.task.task_status import TaskStatus
|
||||||
|
|
||||||
|
logger = logging.getLogger("pydase")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio()
|
||||||
|
async def test_start_and_stop_task(caplog: LogCaptureFixture) -> None:
|
||||||
|
class MyService(pydase.DataService):
|
||||||
|
@task()
|
||||||
|
async def my_task(self) -> None:
|
||||||
|
logger.info("Triggered task.")
|
||||||
|
while True:
|
||||||
|
await asyncio.sleep(1)
|
||||||
|
|
||||||
|
# Your test code here
|
||||||
|
service_instance = MyService()
|
||||||
|
state_manager = StateManager(service_instance)
|
||||||
|
DataServiceObserver(state_manager)
|
||||||
|
|
||||||
|
autostart_service_tasks(service_instance)
|
||||||
|
await asyncio.sleep(0.01)
|
||||||
|
assert service_instance.my_task.status == TaskStatus.NOT_RUNNING
|
||||||
|
|
||||||
|
service_instance.my_task.start()
|
||||||
|
await asyncio.sleep(0.01)
|
||||||
|
assert service_instance.my_task.status == TaskStatus.RUNNING
|
||||||
|
|
||||||
|
assert "'my_task.status' changed to 'TaskStatus.RUNNING'" in caplog.text
|
||||||
|
assert "Triggered task." in caplog.text
|
||||||
|
caplog.clear()
|
||||||
|
|
||||||
|
service_instance.my_task.stop()
|
||||||
|
await asyncio.sleep(0.01)
|
||||||
|
assert service_instance.my_task.status == TaskStatus.NOT_RUNNING
|
||||||
|
assert "Task 'my_task' was cancelled" in caplog.text
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio()
|
||||||
|
async def test_autostart_task(caplog: LogCaptureFixture) -> None:
|
||||||
|
class MyService(pydase.DataService):
|
||||||
|
@task(autostart=True)
|
||||||
|
async def my_task(self) -> None:
|
||||||
|
logger.info("Triggered task.")
|
||||||
|
while True:
|
||||||
|
await asyncio.sleep(1)
|
||||||
|
|
||||||
|
# Your test code here
|
||||||
|
service_instance = MyService()
|
||||||
|
state_manager = StateManager(service_instance)
|
||||||
|
DataServiceObserver(state_manager)
|
||||||
|
|
||||||
|
autostart_service_tasks(service_instance)
|
||||||
|
|
||||||
|
await asyncio.sleep(0.01)
|
||||||
|
assert service_instance.my_task.status == TaskStatus.RUNNING
|
||||||
|
|
||||||
|
assert "'my_task.status' changed to 'TaskStatus.RUNNING'" in caplog.text
|
||||||
|
|
||||||
|
service_instance.my_task.stop()
|
||||||
|
await asyncio.sleep(0.01)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio()
|
||||||
|
async def test_nested_list_autostart_task(
|
||||||
|
caplog: LogCaptureFixture,
|
||||||
|
) -> None:
|
||||||
|
class MySubService(pydase.DataService):
|
||||||
|
@task(autostart=True)
|
||||||
|
async def my_task(self) -> None:
|
||||||
|
logger.info("Triggered task.")
|
||||||
|
while True:
|
||||||
|
await asyncio.sleep(1)
|
||||||
|
|
||||||
|
class MyService(pydase.DataService):
|
||||||
|
sub_services_list = [MySubService() for i in range(2)]
|
||||||
|
|
||||||
|
service_instance = MyService()
|
||||||
|
state_manager = StateManager(service_instance)
|
||||||
|
DataServiceObserver(state_manager)
|
||||||
|
autostart_service_tasks(service_instance)
|
||||||
|
|
||||||
|
await asyncio.sleep(0.01)
|
||||||
|
assert service_instance.sub_services_list[0].my_task.status == TaskStatus.RUNNING
|
||||||
|
assert service_instance.sub_services_list[1].my_task.status == TaskStatus.RUNNING
|
||||||
|
|
||||||
|
assert (
|
||||||
|
"'sub_services_list[0].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||||
|
in caplog.text
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
"'sub_services_list[1].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||||
|
in caplog.text
|
||||||
|
)
|
||||||
|
|
||||||
|
service_instance.sub_services_list[0].my_task.stop()
|
||||||
|
service_instance.sub_services_list[1].my_task.stop()
|
||||||
|
await asyncio.sleep(0.01)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio()
|
||||||
|
async def test_nested_dict_autostart_task(
|
||||||
|
caplog: LogCaptureFixture,
|
||||||
|
) -> None:
|
||||||
|
class MySubService(pydase.DataService):
|
||||||
|
@task(autostart=True)
|
||||||
|
async def my_task(self) -> None:
|
||||||
|
logger.info("Triggered task.")
|
||||||
|
while True:
|
||||||
|
await asyncio.sleep(1)
|
||||||
|
|
||||||
|
class MyService(pydase.DataService):
|
||||||
|
sub_services_dict = {"first": MySubService(), "second": MySubService()}
|
||||||
|
|
||||||
|
service_instance = MyService()
|
||||||
|
state_manager = StateManager(service_instance)
|
||||||
|
DataServiceObserver(state_manager)
|
||||||
|
|
||||||
|
autostart_service_tasks(service_instance)
|
||||||
|
|
||||||
|
await asyncio.sleep(0.01)
|
||||||
|
|
||||||
|
assert (
|
||||||
|
service_instance.sub_services_dict["first"].my_task.status == TaskStatus.RUNNING
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
service_instance.sub_services_dict["second"].my_task.status
|
||||||
|
== TaskStatus.RUNNING
|
||||||
|
)
|
||||||
|
|
||||||
|
assert (
|
||||||
|
"'sub_services_dict[\"first\"].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||||
|
in caplog.text
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
"'sub_services_dict[\"second\"].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||||
|
in caplog.text
|
||||||
|
)
|
||||||
|
|
||||||
|
service_instance.sub_services_dict["first"].my_task.stop()
|
||||||
|
service_instance.sub_services_dict["second"].my_task.stop()
|
||||||
|
await asyncio.sleep(0.01)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio()
|
||||||
|
async def test_manual_start_with_multiple_service_instances(
|
||||||
|
caplog: LogCaptureFixture,
|
||||||
|
) -> None:
|
||||||
|
class MySubService(pydase.DataService):
|
||||||
|
@task()
|
||||||
|
async def my_task(self) -> None:
|
||||||
|
logger.info("Triggered task.")
|
||||||
|
while True:
|
||||||
|
await asyncio.sleep(1)
|
||||||
|
|
||||||
|
class MyService(pydase.DataService):
|
||||||
|
sub_services_list = [MySubService() for i in range(2)]
|
||||||
|
sub_services_dict = {"first": MySubService(), "second": MySubService()}
|
||||||
|
|
||||||
|
service_instance = MyService()
|
||||||
|
state_manager = StateManager(service_instance)
|
||||||
|
DataServiceObserver(state_manager)
|
||||||
|
|
||||||
|
autostart_service_tasks(service_instance)
|
||||||
|
|
||||||
|
await asyncio.sleep(0.01)
|
||||||
|
|
||||||
|
assert (
|
||||||
|
service_instance.sub_services_list[0].my_task.status == TaskStatus.NOT_RUNNING
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
service_instance.sub_services_list[1].my_task.status == TaskStatus.NOT_RUNNING
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
service_instance.sub_services_dict["first"].my_task.status
|
||||||
|
== TaskStatus.NOT_RUNNING
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
service_instance.sub_services_dict["second"].my_task.status
|
||||||
|
== TaskStatus.NOT_RUNNING
|
||||||
|
)
|
||||||
|
|
||||||
|
service_instance.sub_services_list[0].my_task.start()
|
||||||
|
await asyncio.sleep(0.01)
|
||||||
|
|
||||||
|
assert service_instance.sub_services_list[0].my_task.status == TaskStatus.RUNNING
|
||||||
|
assert (
|
||||||
|
"'sub_services_list[0].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||||
|
in caplog.text
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
"'sub_services_list[1].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||||
|
not in caplog.text
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
"'sub_services_dict[\"first\"].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||||
|
not in caplog.text
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
"'sub_services_dict[\"second\"].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||||
|
not in caplog.text
|
||||||
|
)
|
||||||
|
|
||||||
|
service_instance.sub_services_list[0].my_task.stop()
|
||||||
|
await asyncio.sleep(0.01)
|
||||||
|
|
||||||
|
assert "Task 'my_task' was cancelled" in caplog.text
|
||||||
|
caplog.clear()
|
||||||
|
|
||||||
|
service_instance.sub_services_list[1].my_task.start()
|
||||||
|
await asyncio.sleep(0.01)
|
||||||
|
|
||||||
|
assert service_instance.sub_services_list[1].my_task.status == TaskStatus.RUNNING
|
||||||
|
assert (
|
||||||
|
"'sub_services_list[0].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||||
|
not in caplog.text
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
"'sub_services_list[1].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||||
|
in caplog.text
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
"'sub_services_dict[\"first\"].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||||
|
not in caplog.text
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
"'sub_services_dict[\"second\"].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||||
|
not in caplog.text
|
||||||
|
)
|
||||||
|
|
||||||
|
service_instance.sub_services_list[1].my_task.stop()
|
||||||
|
await asyncio.sleep(0.01)
|
||||||
|
|
||||||
|
assert "Task 'my_task' was cancelled" in caplog.text
|
||||||
|
caplog.clear()
|
||||||
|
|
||||||
|
service_instance.sub_services_dict["first"].my_task.start()
|
||||||
|
await asyncio.sleep(0.01)
|
||||||
|
|
||||||
|
assert (
|
||||||
|
service_instance.sub_services_dict["first"].my_task.status == TaskStatus.RUNNING
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
"'sub_services_list[0].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||||
|
not in caplog.text
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
"'sub_services_list[1].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||||
|
not in caplog.text
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
"'sub_services_dict[\"first\"].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||||
|
in caplog.text
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
"'sub_services_dict[\"second\"].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||||
|
not in caplog.text
|
||||||
|
)
|
||||||
|
|
||||||
|
service_instance.sub_services_dict["first"].my_task.stop()
|
||||||
|
await asyncio.sleep(0.01)
|
||||||
|
|
||||||
|
assert "Task 'my_task' was cancelled" in caplog.text
|
||||||
|
caplog.clear()
|
||||||
|
|
||||||
|
service_instance.sub_services_dict["second"].my_task.start()
|
||||||
|
await asyncio.sleep(0.01)
|
||||||
|
|
||||||
|
assert (
|
||||||
|
service_instance.sub_services_dict["second"].my_task.status
|
||||||
|
== TaskStatus.RUNNING
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
"'sub_services_list[0].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||||
|
not in caplog.text
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
"'sub_services_list[1].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||||
|
not in caplog.text
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
"'sub_services_dict[\"first\"].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||||
|
not in caplog.text
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
"'sub_services_dict[\"second\"].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||||
|
in caplog.text
|
||||||
|
)
|
||||||
|
|
||||||
|
service_instance.sub_services_dict["second"].my_task.stop()
|
||||||
|
await asyncio.sleep(0.01)
|
||||||
|
|
||||||
|
assert "Task 'my_task' was cancelled" in caplog.text
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio()
|
||||||
|
async def test_restart_on_exception(caplog: LogCaptureFixture) -> None:
|
||||||
|
class MyService(pydase.DataService):
|
||||||
|
@task(restart_on_exception=True, restart_sec=0.1)
|
||||||
|
async def my_task(self) -> None:
|
||||||
|
logger.info("Triggered task.")
|
||||||
|
raise Exception("Task failure")
|
||||||
|
|
||||||
|
service_instance = MyService()
|
||||||
|
state_manager = StateManager(service_instance)
|
||||||
|
DataServiceObserver(state_manager)
|
||||||
|
service_instance.my_task.start()
|
||||||
|
|
||||||
|
await asyncio.sleep(0.01)
|
||||||
|
assert "Task 'my_task' encountered an exception" in caplog.text
|
||||||
|
caplog.clear()
|
||||||
|
await asyncio.sleep(0.1)
|
||||||
|
assert service_instance.my_task.status == TaskStatus.RUNNING
|
||||||
|
assert "Task 'my_task' encountered an exception" in caplog.text
|
||||||
|
assert "Triggered task." in caplog.text
|
||||||
|
|
||||||
|
service_instance.my_task.stop()
|
||||||
|
await asyncio.sleep(0.01)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio()
|
||||||
|
async def test_restart_sec(caplog: LogCaptureFixture) -> None:
|
||||||
|
class MyService(pydase.DataService):
|
||||||
|
@task(restart_on_exception=True, restart_sec=0.1)
|
||||||
|
async def my_task(self) -> None:
|
||||||
|
logger.info("Triggered task.")
|
||||||
|
raise Exception("Task failure")
|
||||||
|
|
||||||
|
service_instance = MyService()
|
||||||
|
state_manager = StateManager(service_instance)
|
||||||
|
DataServiceObserver(state_manager)
|
||||||
|
service_instance.my_task.start()
|
||||||
|
|
||||||
|
await asyncio.sleep(0.001)
|
||||||
|
assert "Triggered task." in caplog.text
|
||||||
|
caplog.clear()
|
||||||
|
await asyncio.sleep(0.05)
|
||||||
|
assert "Triggered task." not in caplog.text
|
||||||
|
await asyncio.sleep(0.05)
|
||||||
|
assert "Triggered task." in caplog.text # Ensures the task restarted after 0.2s
|
||||||
|
|
||||||
|
service_instance.my_task.stop()
|
||||||
|
await asyncio.sleep(0.01)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio()
|
||||||
|
async def test_exceeding_start_limit_interval_sec_and_burst(
|
||||||
|
caplog: LogCaptureFixture,
|
||||||
|
) -> None:
|
||||||
|
class MyService(pydase.DataService):
|
||||||
|
@task(
|
||||||
|
restart_on_exception=True,
|
||||||
|
restart_sec=0.0,
|
||||||
|
start_limit_interval_sec=1.0,
|
||||||
|
start_limit_burst=2,
|
||||||
|
)
|
||||||
|
async def my_task(self) -> None:
|
||||||
|
raise Exception("Task failure")
|
||||||
|
|
||||||
|
service_instance = MyService()
|
||||||
|
state_manager = StateManager(service_instance)
|
||||||
|
DataServiceObserver(state_manager)
|
||||||
|
service_instance.my_task.start()
|
||||||
|
|
||||||
|
await asyncio.sleep(0.1)
|
||||||
|
assert "Task 'my_task' exceeded restart burst limit" in caplog.text
|
||||||
|
assert service_instance.my_task.status == TaskStatus.NOT_RUNNING
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio()
|
||||||
|
async def test_non_exceeding_start_limit_interval_sec_and_burst(
|
||||||
|
caplog: LogCaptureFixture,
|
||||||
|
) -> None:
|
||||||
|
class MyService(pydase.DataService):
|
||||||
|
@task(
|
||||||
|
restart_on_exception=True,
|
||||||
|
restart_sec=0.1,
|
||||||
|
start_limit_interval_sec=0.1,
|
||||||
|
start_limit_burst=2,
|
||||||
|
)
|
||||||
|
async def my_task(self) -> None:
|
||||||
|
raise Exception("Task failure")
|
||||||
|
|
||||||
|
service_instance = MyService()
|
||||||
|
state_manager = StateManager(service_instance)
|
||||||
|
DataServiceObserver(state_manager)
|
||||||
|
service_instance.my_task.start()
|
||||||
|
|
||||||
|
await asyncio.sleep(0.5)
|
||||||
|
assert "Task 'my_task' exceeded restart burst limit" not in caplog.text
|
||||||
|
assert service_instance.my_task.status == TaskStatus.RUNNING
|
||||||
|
|
||||||
|
service_instance.my_task.stop()
|
||||||
|
await asyncio.sleep(0.01)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio()
|
||||||
|
async def test_exit_on_failure(
|
||||||
|
monkeypatch: pytest.MonkeyPatch, caplog: LogCaptureFixture
|
||||||
|
) -> None:
|
||||||
|
class MyService(pydase.DataService):
|
||||||
|
@task(restart_on_exception=False, exit_on_failure=True)
|
||||||
|
async def my_task(self) -> None:
|
||||||
|
logger.info("Triggered task.")
|
||||||
|
raise Exception("Critical failure")
|
||||||
|
|
||||||
|
def mock_os_kill(pid: int, signal: int) -> None:
|
||||||
|
logger.critical("os.kill called with signal=%s and pid=%s", signal, pid)
|
||||||
|
|
||||||
|
monkeypatch.setattr("os.kill", mock_os_kill)
|
||||||
|
|
||||||
|
service_instance = MyService()
|
||||||
|
state_manager = StateManager(service_instance)
|
||||||
|
DataServiceObserver(state_manager)
|
||||||
|
service_instance.my_task.start()
|
||||||
|
|
||||||
|
await asyncio.sleep(0.1)
|
||||||
|
assert "os.kill called with signal=" in caplog.text
|
||||||
|
assert "Task 'my_task' encountered an exception" in caplog.text
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio()
|
||||||
|
async def test_exit_on_failure_exceeding_rate_limit(
|
||||||
|
monkeypatch: pytest.MonkeyPatch, caplog: LogCaptureFixture
|
||||||
|
) -> None:
|
||||||
|
class MyService(pydase.DataService):
|
||||||
|
@task(
|
||||||
|
restart_on_exception=True,
|
||||||
|
restart_sec=0.0,
|
||||||
|
start_limit_interval_sec=0.1,
|
||||||
|
start_limit_burst=2,
|
||||||
|
exit_on_failure=True,
|
||||||
|
)
|
||||||
|
async def my_task(self) -> None:
|
||||||
|
raise Exception("Critical failure")
|
||||||
|
|
||||||
|
def mock_os_kill(pid: int, signal: int) -> None:
|
||||||
|
logger.critical("os.kill called with signal=%s and pid=%s", signal, pid)
|
||||||
|
|
||||||
|
monkeypatch.setattr("os.kill", mock_os_kill)
|
||||||
|
|
||||||
|
service_instance = MyService()
|
||||||
|
state_manager = StateManager(service_instance)
|
||||||
|
DataServiceObserver(state_manager)
|
||||||
|
service_instance.my_task.start()
|
||||||
|
|
||||||
|
await asyncio.sleep(0.5)
|
||||||
|
assert "os.kill called with signal=" in caplog.text
|
||||||
|
assert "Task 'my_task' encountered an exception" in caplog.text
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio()
|
||||||
|
async def test_gracefully_finishing_task(
|
||||||
|
monkeypatch: pytest.MonkeyPatch, caplog: LogCaptureFixture
|
||||||
|
) -> None:
|
||||||
|
class MyService(pydase.DataService):
|
||||||
|
@task()
|
||||||
|
async def my_task(self) -> None:
|
||||||
|
print("Hello")
|
||||||
|
await asyncio.sleep(0.1)
|
||||||
|
|
||||||
|
service_instance = MyService()
|
||||||
|
state_manager = StateManager(service_instance)
|
||||||
|
DataServiceObserver(state_manager)
|
||||||
|
service_instance.my_task.start()
|
||||||
|
|
||||||
|
await asyncio.sleep(0.05)
|
||||||
|
assert service_instance.my_task.status == TaskStatus.RUNNING
|
||||||
|
await asyncio.sleep(0.1)
|
||||||
|
assert service_instance.my_task.status == TaskStatus.NOT_RUNNING
|
||||||
@@ -4,5 +4,5 @@ import toml
|
|||||||
|
|
||||||
def test_project_version() -> None:
|
def test_project_version() -> None:
|
||||||
pyproject = toml.load("pyproject.toml")
|
pyproject = toml.load("pyproject.toml")
|
||||||
pydase_pyroject_version = pyproject["tool"]["poetry"]["version"]
|
pydase_pyroject_version = pyproject["project"]["version"]
|
||||||
assert pydase.version.__version__ == pydase_pyroject_version
|
assert pydase.version.__version__ == pydase_pyroject_version
|
||||||
|
|||||||
@@ -1,18 +1,18 @@
|
|||||||
import asyncio
|
|
||||||
import enum
|
import enum
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Any, ClassVar
|
from typing import Any, ClassVar
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
import pydase
|
import pydase
|
||||||
import pydase.units as u
|
import pydase.units as u
|
||||||
import pytest
|
|
||||||
from pydase.components.coloured_enum import ColouredEnum
|
from pydase.components.coloured_enum import ColouredEnum
|
||||||
from pydase.data_service.task_manager import TaskStatus
|
from pydase.task.task_status import TaskStatus
|
||||||
from pydase.utils.decorators import frontend
|
from pydase.utils.decorators import frontend
|
||||||
from pydase.utils.serialization.serializer import (
|
from pydase.utils.serialization.serializer import (
|
||||||
SerializationPathError,
|
SerializationPathError,
|
||||||
SerializedObject,
|
add_prefix_to_full_access_path,
|
||||||
dump,
|
dump,
|
||||||
generate_serialized_data_paths,
|
generate_serialized_data_paths,
|
||||||
get_container_item_by_key,
|
get_container_item_by_key,
|
||||||
@@ -21,6 +21,7 @@ from pydase.utils.serialization.serializer import (
|
|||||||
serialized_dict_is_nested_object,
|
serialized_dict_is_nested_object,
|
||||||
set_nested_value_by_path,
|
set_nested_value_by_path,
|
||||||
)
|
)
|
||||||
|
from pydase.utils.serialization.types import SerializedObject
|
||||||
|
|
||||||
|
|
||||||
class MyEnum(enum.Enum):
|
class MyEnum(enum.Enum):
|
||||||
@@ -207,18 +208,16 @@ def test_ColouredEnum_serialize() -> None:
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio(scope="module")
|
@pytest.mark.asyncio(loop_scope="module")
|
||||||
async def test_method_serialization() -> None:
|
async def test_method_serialization() -> None:
|
||||||
class ClassWithMethod(pydase.DataService):
|
class ClassWithMethod(pydase.DataService):
|
||||||
def some_method(self) -> str:
|
def some_method(self) -> str:
|
||||||
return "some method"
|
return "some method"
|
||||||
|
|
||||||
async def some_task(self) -> None:
|
async def some_task(self) -> None:
|
||||||
while True:
|
pass
|
||||||
await asyncio.sleep(10)
|
|
||||||
|
|
||||||
instance = ClassWithMethod()
|
instance = ClassWithMethod()
|
||||||
instance.start_some_task() # type: ignore
|
|
||||||
|
|
||||||
assert dump(instance)["value"] == {
|
assert dump(instance)["value"] == {
|
||||||
"some_method": {
|
"some_method": {
|
||||||
@@ -234,7 +233,7 @@ async def test_method_serialization() -> None:
|
|||||||
"some_task": {
|
"some_task": {
|
||||||
"full_access_path": "some_task",
|
"full_access_path": "some_task",
|
||||||
"type": "method",
|
"type": "method",
|
||||||
"value": TaskStatus.RUNNING.name,
|
"value": None,
|
||||||
"readonly": True,
|
"readonly": True,
|
||||||
"doc": None,
|
"doc": None,
|
||||||
"async": True,
|
"async": True,
|
||||||
@@ -254,7 +253,7 @@ def test_methods_with_type_hints() -> None:
|
|||||||
def method_with_type_hint(some_argument: int) -> None:
|
def method_with_type_hint(some_argument: int) -> None:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def method_with_union_type_hint(some_argument: int | float) -> None:
|
def method_with_union_type_hint(some_argument: int | float) -> None: # noqa: PYI041
|
||||||
pass
|
pass
|
||||||
|
|
||||||
assert dump(method_without_type_hint) == {
|
assert dump(method_without_type_hint) == {
|
||||||
@@ -1073,3 +1072,175 @@ def test_get_data_paths_from_serialized_object(obj: Any, expected: list[str]) ->
|
|||||||
)
|
)
|
||||||
def test_generate_serialized_data_paths(obj: Any, expected: list[str]) -> None:
|
def test_generate_serialized_data_paths(obj: Any, expected: list[str]) -> None:
|
||||||
assert generate_serialized_data_paths(dump(obj=obj)["value"]) == expected
|
assert generate_serialized_data_paths(dump(obj=obj)["value"]) == expected
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"serialized_obj, prefix, expected",
|
||||||
|
[
|
||||||
|
(
|
||||||
|
{
|
||||||
|
"full_access_path": "new_attr",
|
||||||
|
"value": {
|
||||||
|
"name": {
|
||||||
|
"full_access_path": "new_attr.name",
|
||||||
|
"value": "MyService",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"prefix",
|
||||||
|
{
|
||||||
|
"full_access_path": "prefix.new_attr",
|
||||||
|
"value": {
|
||||||
|
"name": {
|
||||||
|
"full_access_path": "prefix.new_attr.name",
|
||||||
|
"value": "MyService",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{
|
||||||
|
"full_access_path": "new_attr",
|
||||||
|
"value": [
|
||||||
|
{
|
||||||
|
"full_access_path": "new_attr[0]",
|
||||||
|
"value": 1.0,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"prefix",
|
||||||
|
{
|
||||||
|
"full_access_path": "prefix.new_attr",
|
||||||
|
"value": [
|
||||||
|
{
|
||||||
|
"full_access_path": "prefix.new_attr[0]",
|
||||||
|
"value": 1.0,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{
|
||||||
|
"full_access_path": "new_attr",
|
||||||
|
"value": {
|
||||||
|
"key": {
|
||||||
|
"full_access_path": 'new_attr["key"]',
|
||||||
|
"value": 1.0,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"prefix",
|
||||||
|
{
|
||||||
|
"full_access_path": "prefix.new_attr",
|
||||||
|
"value": {
|
||||||
|
"key": {
|
||||||
|
"full_access_path": 'prefix.new_attr["key"]',
|
||||||
|
"value": 1.0,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{
|
||||||
|
"full_access_path": "new_attr",
|
||||||
|
"value": {"magnitude": 10, "unit": "meter"},
|
||||||
|
},
|
||||||
|
"prefix",
|
||||||
|
{
|
||||||
|
"full_access_path": "prefix.new_attr",
|
||||||
|
"value": {"magnitude": 10, "unit": "meter"},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{
|
||||||
|
"full_access_path": "quantity_list",
|
||||||
|
"value": [
|
||||||
|
{
|
||||||
|
"full_access_path": "quantity_list[0]",
|
||||||
|
"value": {"magnitude": 1.0, "unit": "A"},
|
||||||
|
}
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"prefix",
|
||||||
|
{
|
||||||
|
"full_access_path": "prefix.quantity_list",
|
||||||
|
"value": [
|
||||||
|
{
|
||||||
|
"full_access_path": "prefix.quantity_list[0]",
|
||||||
|
"value": {"magnitude": 1.0, "unit": "A"},
|
||||||
|
}
|
||||||
|
],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{
|
||||||
|
"full_access_path": "",
|
||||||
|
"value": {
|
||||||
|
"dict_attr": {
|
||||||
|
"type": "dict",
|
||||||
|
"full_access_path": "dict_attr",
|
||||||
|
"value": {
|
||||||
|
"foo": {
|
||||||
|
"full_access_path": 'dict_attr["foo"]',
|
||||||
|
"type": "dict",
|
||||||
|
"value": {
|
||||||
|
"some_int": {
|
||||||
|
"full_access_path": 'dict_attr["foo"].some_int',
|
||||||
|
"type": "int",
|
||||||
|
"value": 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"prefix",
|
||||||
|
{
|
||||||
|
"full_access_path": "prefix",
|
||||||
|
"value": {
|
||||||
|
"dict_attr": {
|
||||||
|
"type": "dict",
|
||||||
|
"full_access_path": "prefix.dict_attr",
|
||||||
|
"value": {
|
||||||
|
"foo": {
|
||||||
|
"full_access_path": 'prefix.dict_attr["foo"]',
|
||||||
|
"type": "dict",
|
||||||
|
"value": {
|
||||||
|
"some_int": {
|
||||||
|
"full_access_path": 'prefix.dict_attr["foo"].some_int',
|
||||||
|
"type": "int",
|
||||||
|
"value": 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_add_prefix_to_full_access_path(
|
||||||
|
serialized_obj: SerializedObject, prefix: str, expected: SerializedObject
|
||||||
|
) -> None:
|
||||||
|
assert add_prefix_to_full_access_path(serialized_obj, prefix) == expected
|
||||||
|
|
||||||
|
|
||||||
|
def test_serialize_exception() -> None:
|
||||||
|
assert dump(Exception()) == {
|
||||||
|
"doc": None,
|
||||||
|
"full_access_path": "",
|
||||||
|
"name": "Exception",
|
||||||
|
"readonly": True,
|
||||||
|
"type": "Exception",
|
||||||
|
"value": "",
|
||||||
|
}
|
||||||
|
assert dump(Exception("Exception message")) == {
|
||||||
|
"doc": None,
|
||||||
|
"full_access_path": "",
|
||||||
|
"name": "Exception",
|
||||||
|
"readonly": True,
|
||||||
|
"type": "Exception",
|
||||||
|
"value": "Exception message",
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,9 +1,10 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from pytest import LogCaptureFixture
|
import pytest
|
||||||
|
from pydase.utils.logging import configure_logging_with_pydase_formatter
|
||||||
|
|
||||||
|
|
||||||
def test_log_error(caplog: LogCaptureFixture):
|
def test_log_error(caplog: pytest.LogCaptureFixture) -> None:
|
||||||
logger = logging.getLogger("pydase")
|
logger = logging.getLogger("pydase")
|
||||||
logger.setLevel(logging.ERROR)
|
logger.setLevel(logging.ERROR)
|
||||||
|
|
||||||
@@ -20,7 +21,7 @@ def test_log_error(caplog: LogCaptureFixture):
|
|||||||
assert any(record.levelname == "ERROR" for record in caplog.records)
|
assert any(record.levelname == "ERROR" for record in caplog.records)
|
||||||
|
|
||||||
|
|
||||||
def test_log_warning(caplog: LogCaptureFixture):
|
def test_log_warning(caplog: pytest.LogCaptureFixture) -> None:
|
||||||
logger = logging.getLogger("pydase")
|
logger = logging.getLogger("pydase")
|
||||||
logger.setLevel(logging.WARNING)
|
logger.setLevel(logging.WARNING)
|
||||||
|
|
||||||
@@ -37,7 +38,7 @@ def test_log_warning(caplog: LogCaptureFixture):
|
|||||||
assert any(record.levelname == "ERROR" for record in caplog.records)
|
assert any(record.levelname == "ERROR" for record in caplog.records)
|
||||||
|
|
||||||
|
|
||||||
def test_log_debug(caplog: LogCaptureFixture):
|
def test_log_debug(caplog: pytest.LogCaptureFixture) -> None:
|
||||||
logger = logging.getLogger("pydase")
|
logger = logging.getLogger("pydase")
|
||||||
logger.setLevel(logging.DEBUG)
|
logger.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
@@ -53,7 +54,7 @@ def test_log_debug(caplog: LogCaptureFixture):
|
|||||||
assert "This is an error message" in caplog.text
|
assert "This is an error message" in caplog.text
|
||||||
|
|
||||||
|
|
||||||
def test_log_info(caplog: LogCaptureFixture):
|
def test_log_info(caplog: pytest.LogCaptureFixture) -> None:
|
||||||
logger = logging.getLogger("pydase")
|
logger = logging.getLogger("pydase")
|
||||||
logger.setLevel(logging.INFO)
|
logger.setLevel(logging.INFO)
|
||||||
|
|
||||||
@@ -67,3 +68,21 @@ def test_log_info(caplog: LogCaptureFixture):
|
|||||||
assert "This is an info message" in caplog.text
|
assert "This is an info message" in caplog.text
|
||||||
assert "This is a warning message" in caplog.text
|
assert "This is a warning message" in caplog.text
|
||||||
assert "This is an error message" in caplog.text
|
assert "This is an error message" in caplog.text
|
||||||
|
|
||||||
|
|
||||||
|
def test_before_configuring_root_logger(caplog: pytest.LogCaptureFixture) -> None:
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
logger.info("Hello world")
|
||||||
|
|
||||||
|
assert "Hello world" not in caplog.text
|
||||||
|
|
||||||
|
|
||||||
|
def test_configure_root_logger(caplog: pytest.LogCaptureFixture) -> None:
|
||||||
|
configure_logging_with_pydase_formatter()
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
logger.info("Hello world")
|
||||||
|
|
||||||
|
assert (
|
||||||
|
"INFO tests.utils.test_logging:test_logging.py:83 Hello world"
|
||||||
|
in caplog.text
|
||||||
|
)
|
||||||
|
|||||||